##// END OF EJS Templates
Expand import * to allow Pyflakes to find problems
Joel Rosdahl -
r6211:f89fd07f default
parent child Browse files
Show More
@@ -1,124 +1,124 b''
1 1 # acl.py - changeset access control for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # this hook allows to allow or deny access to parts of a repo when
9 9 # taking incoming changesets.
10 10 #
11 11 # authorization is against local user name on system where hook is
12 12 # run, not committer of original changeset (since that is easy to
13 13 # spoof).
14 14 #
15 15 # acl hook is best to use if you use hgsh to set up restricted shells
16 16 # for authenticated users to only push to / pull from. not safe if
17 17 # user has interactive shell access, because they can disable hook.
18 18 # also not safe if remote users share one local account, because then
19 19 # no way to tell remote users apart.
20 20 #
21 21 # to use, configure acl extension in hgrc like this:
22 22 #
23 23 # [extensions]
24 24 # hgext.acl =
25 25 #
26 26 # [hooks]
27 27 # pretxnchangegroup.acl = python:hgext.acl.hook
28 28 #
29 29 # [acl]
30 30 # sources = serve # check if source of incoming changes in this list
31 31 # # ("serve" == ssh or http, "push", "pull", "bundle")
32 32 #
33 33 # allow and deny lists have subtree pattern (default syntax is glob)
34 34 # on left, user names on right. deny list checked before allow list.
35 35 #
36 36 # [acl.allow]
37 37 # # if acl.allow not present, all users allowed by default
38 38 # # empty acl.allow = no users allowed
39 39 # docs/** = doc_writer
40 40 # .hgtags = release_engineer
41 41 #
42 42 # [acl.deny]
43 43 # # if acl.deny not present, no users denied by default
44 44 # # empty acl.deny = all users allowed
45 45 # glob pattern = user4, user5
46 46 # ** = user6
47 47
48 48 from mercurial.i18n import _
49 from mercurial.node import *
49 from mercurial.node import bin, short
50 50 from mercurial import util
51 51 import getpass
52 52
53 53 class checker(object):
54 54 '''acl checker.'''
55 55
56 56 def buildmatch(self, key):
57 57 '''return tuple of (match function, list enabled).'''
58 58 if not self.ui.has_section(key):
59 59 self.ui.debug(_('acl: %s not enabled\n') % key)
60 60 return None, False
61 61
62 62 thisuser = self.getuser()
63 63 pats = [pat for pat, users in self.ui.configitems(key)
64 64 if thisuser in users.replace(',', ' ').split()]
65 65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
66 66 (key, len(pats), thisuser))
67 67 if pats:
68 68 match = util.matcher(self.repo.root, names=pats)[1]
69 69 else:
70 70 match = util.never
71 71 return match, True
72 72
73 73 def getuser(self):
74 74 '''return name of authenticated user.'''
75 75 return self.user
76 76
77 77 def __init__(self, ui, repo):
78 78 self.ui = ui
79 79 self.repo = repo
80 80 self.user = getpass.getuser()
81 81 cfg = self.ui.config('acl', 'config')
82 82 if cfg:
83 83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
84 84 self.allow, self.allowable = self.buildmatch('acl.allow')
85 85 self.deny, self.deniable = self.buildmatch('acl.deny')
86 86
87 87 def skipsource(self, source):
88 88 '''true if incoming changes from this source should be skipped.'''
89 89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
90 90 return source not in ok_sources
91 91
92 92 def check(self, node):
93 93 '''return if access allowed, raise exception if not.'''
94 94 files = self.repo.changectx(node).files()
95 95 if self.deniable:
96 96 for f in files:
97 97 if self.deny(f):
98 98 self.ui.debug(_('acl: user %s denied on %s\n') %
99 99 (self.getuser(), f))
100 100 raise util.Abort(_('acl: access denied for changeset %s') %
101 101 short(node))
102 102 if self.allowable:
103 103 for f in files:
104 104 if not self.allow(f):
105 105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
106 106 (self.getuser(), f))
107 107 raise util.Abort(_('acl: access denied for changeset %s') %
108 108 short(node))
109 109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
110 110
111 111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
112 112 if hooktype != 'pretxnchangegroup':
113 113 raise util.Abort(_('config error - hook type "%s" cannot stop '
114 114 'incoming changesets') % hooktype)
115 115
116 116 c = checker(ui, repo)
117 117 if c.skipsource(source):
118 118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
119 119 return
120 120
121 121 start = repo.changelog.rev(bin(node))
122 122 end = repo.changelog.count()
123 123 for rev in xrange(start, end):
124 124 c.check(repo.changelog.node(rev))
@@ -1,311 +1,311 b''
1 1 # bugzilla.py - bugzilla integration for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to update comments of bugzilla bugs when changesets
9 9 # that refer to bugs by id are seen. this hook does not change bug
10 10 # status, only comments.
11 11 #
12 12 # to configure, add items to '[bugzilla]' section of hgrc.
13 13 #
14 14 # to use, configure bugzilla extension and enable like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.bugzilla =
18 18 #
19 19 # [hooks]
20 20 # # run bugzilla hook on every change pulled or pushed in here
21 21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 22 #
23 23 # config items:
24 24 #
25 25 # section name is 'bugzilla'.
26 26 # [bugzilla]
27 27 #
28 28 # REQUIRED:
29 29 # host = bugzilla # mysql server where bugzilla database lives
30 30 # password = ** # user's password
31 31 # version = 2.16 # version of bugzilla installed
32 32 #
33 33 # OPTIONAL:
34 34 # bzuser = ... # fallback bugzilla user name to record comments with
35 35 # db = bugs # database to connect to
36 36 # notify = ... # command to run to get bugzilla to send mail
37 37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 38 # strip = 0 # number of slashes to strip for url paths
39 39 # style = ... # style file to use when formatting comments
40 40 # template = ... # template to use when formatting comments
41 41 # timeout = 5 # database connection timeout (seconds)
42 42 # user = bugs # user to connect to database as
43 43 # [web]
44 44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 45 #
46 46 # if hg committer names are not same as bugzilla user names, use
47 47 # "usermap" feature to map from committer email to bugzilla user name.
48 48 # usermap can be in hgrc or separate config file.
49 49 #
50 50 # [bugzilla]
51 51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 52 # [usermap]
53 53 # committer_email = bugzilla_user_name
54 54
55 55 from mercurial.i18n import _
56 from mercurial.node import *
56 from mercurial.node import short
57 57 from mercurial import cmdutil, templater, util
58 58 import os, re, time
59 59
60 60 MySQLdb = None
61 61
62 62 def buglist(ids):
63 63 return '(' + ','.join(map(str, ids)) + ')'
64 64
65 65 class bugzilla_2_16(object):
66 66 '''support for bugzilla version 2.16.'''
67 67
68 68 def __init__(self, ui):
69 69 self.ui = ui
70 70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 72 passwd = self.ui.config('bugzilla', 'password')
73 73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 75 usermap = self.ui.config('bugzilla', 'usermap')
76 76 if usermap:
77 77 self.ui.readsections(usermap, 'usermap')
78 78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 79 (host, db, user, '*' * len(passwd)))
80 80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 81 db=db, connect_timeout=timeout)
82 82 self.cursor = self.conn.cursor()
83 83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 84 ids = self.cursor.fetchall()
85 85 if len(ids) != 1:
86 86 raise util.Abort(_('unknown database schema'))
87 87 self.longdesc_id = ids[0][0]
88 88 self.user_ids = {}
89 89
90 90 def run(self, *args, **kwargs):
91 91 '''run a query.'''
92 92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 93 try:
94 94 self.cursor.execute(*args, **kwargs)
95 95 except MySQLdb.MySQLError, err:
96 96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 97 raise
98 98
99 99 def filter_real_bug_ids(self, ids):
100 100 '''filter not-existing bug ids from list.'''
101 101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 102 ids = [c[0] for c in self.cursor.fetchall()]
103 103 ids.sort()
104 104 return ids
105 105
106 106 def filter_unknown_bug_ids(self, node, ids):
107 107 '''filter bug ids from list that already refer to this changeset.'''
108 108
109 109 self.run('''select bug_id from longdescs where
110 110 bug_id in %s and thetext like "%%%s%%"''' %
111 111 (buglist(ids), short(node)))
112 112 unknown = dict.fromkeys(ids)
113 113 for (id,) in self.cursor.fetchall():
114 114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 115 (id, short(node)))
116 116 unknown.pop(id, None)
117 117 ids = unknown.keys()
118 118 ids.sort()
119 119 return ids
120 120
121 121 def notify(self, ids):
122 122 '''tell bugzilla to send mail.'''
123 123
124 124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 125 for id in ids:
126 126 self.ui.status(_(' bug %s\n') % id)
127 127 cmd = self.ui.config('bugzilla', 'notify',
128 128 'cd /var/www/html/bugzilla && '
129 129 './processmail %s nobody@nowhere.com') % id
130 130 fp = os.popen('(%s) 2>&1' % cmd)
131 131 out = fp.read()
132 132 ret = fp.close()
133 133 if ret:
134 134 self.ui.warn(out)
135 135 raise util.Abort(_('bugzilla notify command %s') %
136 136 util.explain_exit(ret)[0])
137 137 self.ui.status(_('done\n'))
138 138
139 139 def get_user_id(self, user):
140 140 '''look up numeric bugzilla user id.'''
141 141 try:
142 142 return self.user_ids[user]
143 143 except KeyError:
144 144 try:
145 145 userid = int(user)
146 146 except ValueError:
147 147 self.ui.note(_('looking up user %s\n') % user)
148 148 self.run('''select userid from profiles
149 149 where login_name like %s''', user)
150 150 all = self.cursor.fetchall()
151 151 if len(all) != 1:
152 152 raise KeyError(user)
153 153 userid = int(all[0][0])
154 154 self.user_ids[user] = userid
155 155 return userid
156 156
157 157 def map_committer(self, user):
158 158 '''map name of committer to bugzilla user name.'''
159 159 for committer, bzuser in self.ui.configitems('usermap'):
160 160 if committer.lower() == user.lower():
161 161 return bzuser
162 162 return user
163 163
164 164 def add_comment(self, bugid, text, committer):
165 165 '''add comment to bug. try adding comment as committer of
166 166 changeset, otherwise as default bugzilla user.'''
167 167 user = self.map_committer(committer)
168 168 try:
169 169 userid = self.get_user_id(user)
170 170 except KeyError:
171 171 try:
172 172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 173 if not defaultuser:
174 174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 175 user)
176 176 userid = self.get_user_id(defaultuser)
177 177 except KeyError:
178 178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 179 (user, defaultuser))
180 180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 181 self.run('''insert into longdescs
182 182 (bug_id, who, bug_when, thetext)
183 183 values (%s, %s, %s, %s)''',
184 184 (bugid, userid, now, text))
185 185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 186 values (%s, %s, %s, %s)''',
187 187 (bugid, userid, now, self.longdesc_id))
188 188
189 189 class bugzilla(object):
190 190 # supported versions of bugzilla. different versions have
191 191 # different schemas.
192 192 _versions = {
193 193 '2.16': bugzilla_2_16,
194 194 }
195 195
196 196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198 198
199 199 _bz = None
200 200
201 201 def __init__(self, ui, repo):
202 202 self.ui = ui
203 203 self.repo = repo
204 204
205 205 def bz(self):
206 206 '''return object that knows how to talk to bugzilla version in
207 207 use.'''
208 208
209 209 if bugzilla._bz is None:
210 210 bzversion = self.ui.config('bugzilla', 'version')
211 211 try:
212 212 bzclass = bugzilla._versions[bzversion]
213 213 except KeyError:
214 214 raise util.Abort(_('bugzilla version %s not supported') %
215 215 bzversion)
216 216 bugzilla._bz = bzclass(self.ui)
217 217 return bugzilla._bz
218 218
219 219 def __getattr__(self, key):
220 220 return getattr(self.bz(), key)
221 221
222 222 _bug_re = None
223 223 _split_re = None
224 224
225 225 def find_bug_ids(self, ctx):
226 226 '''find valid bug ids that are referred to in changeset
227 227 comments and that do not already have references to this
228 228 changeset.'''
229 229
230 230 if bugzilla._bug_re is None:
231 231 bugzilla._bug_re = re.compile(
232 232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 233 re.IGNORECASE)
234 234 bugzilla._split_re = re.compile(r'\D+')
235 235 start = 0
236 236 ids = {}
237 237 while True:
238 238 m = bugzilla._bug_re.search(ctx.description(), start)
239 239 if not m:
240 240 break
241 241 start = m.end()
242 242 for id in bugzilla._split_re.split(m.group(1)):
243 243 if not id: continue
244 244 ids[int(id)] = 1
245 245 ids = ids.keys()
246 246 if ids:
247 247 ids = self.filter_real_bug_ids(ids)
248 248 if ids:
249 249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 250 return ids
251 251
252 252 def update(self, bugid, ctx):
253 253 '''update bugzilla bug with reference to changeset.'''
254 254
255 255 def webroot(root):
256 256 '''strip leading prefix of repo root and turn into
257 257 url-safe path.'''
258 258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 259 root = util.pconvert(root)
260 260 while count > 0:
261 261 c = root.find('/')
262 262 if c == -1:
263 263 break
264 264 root = root[c+1:]
265 265 count -= 1
266 266 return root
267 267
268 268 mapfile = self.ui.config('bugzilla', 'style')
269 269 tmpl = self.ui.config('bugzilla', 'template')
270 270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 271 False, mapfile, False)
272 272 if not mapfile and not tmpl:
273 273 tmpl = _('changeset {node|short} in repo {root} refers '
274 274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 275 if tmpl:
276 276 tmpl = templater.parsestring(tmpl, quoted=False)
277 277 t.use_template(tmpl)
278 278 self.ui.pushbuffer()
279 279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 280 bug=str(bugid),
281 281 hgweb=self.ui.config('web', 'baseurl'),
282 282 root=self.repo.root,
283 283 webroot=webroot(self.repo.root))
284 284 data = self.ui.popbuffer()
285 285 self.add_comment(bugid, data, util.email(ctx.user()))
286 286
287 287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 288 '''add comment to bugzilla for each changeset that refers to a
289 289 bugzilla bug id. only add a comment once per bug, so same change
290 290 seen multiple times does not fill bug with duplicate data.'''
291 291 try:
292 292 import MySQLdb as mysql
293 293 global MySQLdb
294 294 MySQLdb = mysql
295 295 except ImportError, err:
296 296 raise util.Abort(_('python mysql support not available: %s') % err)
297 297
298 298 if node is None:
299 299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 300 hooktype)
301 301 try:
302 302 bz = bugzilla(ui, repo)
303 303 ctx = repo.changectx(node)
304 304 ids = bz.find_bug_ids(ctx)
305 305 if ids:
306 306 for id in ids:
307 307 bz.update(id, ctx)
308 308 bz.notify(ids)
309 309 except MySQLdb.MySQLError, err:
310 310 raise util.Abort(_('database error: %s') % err[1])
311 311
@@ -1,301 +1,301 b''
1 1 # hg backend for convert extension
2 2
3 3 # Notes for hg->hg conversion:
4 4 #
5 5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 6 # of commit messages, but new versions do. Changesets created by
7 7 # those older versions, then converted, may thus have different
8 8 # hashes for changesets that are otherwise identical.
9 9 #
10 10 # * By default, the source revision is stored in the converted
11 11 # revision. This will cause the converted revision to have a
12 12 # different identity than the source. To avoid this, use the
13 13 # following option: "--config convert.hg.saverev=false"
14 14
15 15
16 16 import os, time
17 17 from mercurial.i18n import _
18 from mercurial.node import *
18 from mercurial.node import bin, hex, nullid
19 19 from mercurial import hg, lock, revlog, util
20 20
21 21 from common import NoRepo, commit, converter_source, converter_sink
22 22
23 23 class mercurial_sink(converter_sink):
24 24 def __init__(self, ui, path):
25 25 converter_sink.__init__(self, ui, path)
26 26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 29 self.lastbranch = None
30 30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 31 try:
32 32 self.repo = hg.repository(self.ui, path)
33 33 if not self.repo.local():
34 34 raise NoRepo(_('%s is not a local Mercurial repo') % path)
35 35 except hg.RepoError, err:
36 36 ui.print_exc()
37 37 raise NoRepo(err.args[0])
38 38 else:
39 39 try:
40 40 ui.status(_('initializing destination %s repository\n') % path)
41 41 self.repo = hg.repository(self.ui, path, create=True)
42 42 if not self.repo.local():
43 43 raise NoRepo(_('%s is not a local Mercurial repo') % path)
44 44 self.created.append(path)
45 45 except hg.RepoError, err:
46 46 ui.print_exc()
47 47 raise NoRepo("could not create hg repo %s as sink" % path)
48 48 self.lock = None
49 49 self.wlock = None
50 50 self.filemapmode = False
51 51
52 52 def before(self):
53 53 self.ui.debug(_('run hg sink pre-conversion action\n'))
54 54 self.wlock = self.repo.wlock()
55 55 self.lock = self.repo.lock()
56 56 self.repo.dirstate.clear()
57 57
58 58 def after(self):
59 59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 60 self.repo.dirstate.invalidate()
61 61 self.lock = None
62 62 self.wlock = None
63 63
64 64 def revmapfile(self):
65 65 return os.path.join(self.path, ".hg", "shamap")
66 66
67 67 def authorfile(self):
68 68 return os.path.join(self.path, ".hg", "authormap")
69 69
70 70 def getheads(self):
71 71 h = self.repo.changelog.heads()
72 72 return [ hex(x) for x in h ]
73 73
74 74 def putfile(self, f, e, data):
75 75 self.repo.wwrite(f, data, e)
76 76 if f not in self.repo.dirstate:
77 77 self.repo.dirstate.normallookup(f)
78 78
79 79 def copyfile(self, source, dest):
80 80 self.repo.copy(source, dest)
81 81
82 82 def delfile(self, f):
83 83 try:
84 84 util.unlink(self.repo.wjoin(f))
85 85 #self.repo.remove([f])
86 86 except OSError:
87 87 pass
88 88
89 89 def setbranch(self, branch, pbranches):
90 90 if not self.clonebranches:
91 91 return
92 92
93 93 setbranch = (branch != self.lastbranch)
94 94 self.lastbranch = branch
95 95 if not branch:
96 96 branch = 'default'
97 97 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
98 98 pbranch = pbranches and pbranches[0][1] or 'default'
99 99
100 100 branchpath = os.path.join(self.path, branch)
101 101 if setbranch:
102 102 self.after()
103 103 try:
104 104 self.repo = hg.repository(self.ui, branchpath)
105 105 except:
106 106 self.repo = hg.repository(self.ui, branchpath, create=True)
107 107 self.before()
108 108
109 109 # pbranches may bring revisions from other branches (merge parents)
110 110 # Make sure we have them, or pull them.
111 111 missings = {}
112 112 for b in pbranches:
113 113 try:
114 114 self.repo.lookup(b[0])
115 115 except:
116 116 missings.setdefault(b[1], []).append(b[0])
117 117
118 118 if missings:
119 119 self.after()
120 120 for pbranch, heads in missings.iteritems():
121 121 pbranchpath = os.path.join(self.path, pbranch)
122 122 prepo = hg.repository(self.ui, pbranchpath)
123 123 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
124 124 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
125 125 self.before()
126 126
127 127 def putcommit(self, files, parents, commit):
128 128 seen = {}
129 129 pl = []
130 130 for p in parents:
131 131 if p not in seen:
132 132 pl.append(p)
133 133 seen[p] = 1
134 134 parents = pl
135 135 nparents = len(parents)
136 136 if self.filemapmode and nparents == 1:
137 137 m1node = self.repo.changelog.read(bin(parents[0]))[0]
138 138 parent = parents[0]
139 139
140 140 if len(parents) < 2: parents.append("0" * 40)
141 141 if len(parents) < 2: parents.append("0" * 40)
142 142 p2 = parents.pop(0)
143 143
144 144 text = commit.desc
145 145 extra = commit.extra.copy()
146 146 if self.branchnames and commit.branch:
147 147 extra['branch'] = commit.branch
148 148 if commit.rev:
149 149 extra['convert_revision'] = commit.rev
150 150
151 151 while parents:
152 152 p1 = p2
153 153 p2 = parents.pop(0)
154 154 a = self.repo.rawcommit(files, text, commit.author, commit.date,
155 155 bin(p1), bin(p2), extra=extra)
156 156 self.repo.dirstate.clear()
157 157 text = "(octopus merge fixup)\n"
158 158 p2 = hg.hex(self.repo.changelog.tip())
159 159
160 160 if self.filemapmode and nparents == 1:
161 161 man = self.repo.manifest
162 162 mnode = self.repo.changelog.read(bin(p2))[0]
163 163 if not man.cmp(m1node, man.revision(mnode)):
164 164 self.repo.rollback()
165 165 self.repo.dirstate.clear()
166 166 return parent
167 167 return p2
168 168
169 169 def puttags(self, tags):
170 170 try:
171 171 old = self.repo.wfile(".hgtags").read()
172 172 oldlines = old.splitlines(1)
173 173 oldlines.sort()
174 174 except:
175 175 oldlines = []
176 176
177 177 k = tags.keys()
178 178 k.sort()
179 179 newlines = []
180 180 for tag in k:
181 181 newlines.append("%s %s\n" % (tags[tag], tag))
182 182
183 183 newlines.sort()
184 184
185 185 if newlines != oldlines:
186 186 self.ui.status("updating tags\n")
187 187 f = self.repo.wfile(".hgtags", "w")
188 188 f.write("".join(newlines))
189 189 f.close()
190 190 if not oldlines: self.repo.add([".hgtags"])
191 191 date = "%s 0" % int(time.mktime(time.gmtime()))
192 192 extra = {}
193 193 if self.tagsbranch != 'default':
194 194 extra['branch'] = self.tagsbranch
195 195 try:
196 196 tagparent = self.repo.changectx(self.tagsbranch).node()
197 197 except hg.RepoError, inst:
198 198 tagparent = nullid
199 199 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
200 200 date, tagparent, nullid, extra=extra)
201 201 return hex(self.repo.changelog.tip())
202 202
203 203 def setfilemapmode(self, active):
204 204 self.filemapmode = active
205 205
206 206 class mercurial_source(converter_source):
207 207 def __init__(self, ui, path, rev=None):
208 208 converter_source.__init__(self, ui, path, rev)
209 209 self.saverev = ui.configbool('convert', 'hg.saverev', True)
210 210 try:
211 211 self.repo = hg.repository(self.ui, path)
212 212 # try to provoke an exception if this isn't really a hg
213 213 # repo, but some other bogus compatible-looking url
214 214 if not self.repo.local():
215 215 raise hg.RepoError()
216 216 except hg.RepoError:
217 217 ui.print_exc()
218 218 raise NoRepo("%s is not a local Mercurial repo" % path)
219 219 self.lastrev = None
220 220 self.lastctx = None
221 221 self._changescache = None
222 222 self.convertfp = None
223 223
224 224 def changectx(self, rev):
225 225 if self.lastrev != rev:
226 226 self.lastctx = self.repo.changectx(rev)
227 227 self.lastrev = rev
228 228 return self.lastctx
229 229
230 230 def getheads(self):
231 231 if self.rev:
232 232 return [hex(self.repo.changectx(self.rev).node())]
233 233 else:
234 234 return [hex(node) for node in self.repo.heads()]
235 235
236 236 def getfile(self, name, rev):
237 237 try:
238 238 return self.changectx(rev).filectx(name).data()
239 239 except revlog.LookupError, err:
240 240 raise IOError(err)
241 241
242 242 def getmode(self, name, rev):
243 243 m = self.changectx(rev).manifest()
244 244 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
245 245
246 246 def getchanges(self, rev):
247 247 ctx = self.changectx(rev)
248 248 if self._changescache and self._changescache[0] == rev:
249 249 m, a, r = self._changescache[1]
250 250 else:
251 251 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
252 252 changes = [(name, rev) for name in m + a + r]
253 253 changes.sort()
254 254 return (changes, self.getcopies(ctx, m + a))
255 255
256 256 def getcopies(self, ctx, files):
257 257 copies = {}
258 258 for name in files:
259 259 try:
260 260 copies[name] = ctx.filectx(name).renamed()[0]
261 261 except TypeError:
262 262 pass
263 263 return copies
264 264
265 265 def getcommit(self, rev):
266 266 ctx = self.changectx(rev)
267 267 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
268 268 if self.saverev:
269 269 crev = rev
270 270 else:
271 271 crev = None
272 272 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
273 273 desc=ctx.description(), rev=crev, parents=parents,
274 274 branch=ctx.branch(), extra=ctx.extra())
275 275
276 276 def gettags(self):
277 277 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
278 278 return dict([(name, hex(node)) for name, node in tags])
279 279
280 280 def getchangedfiles(self, rev, i):
281 281 ctx = self.changectx(rev)
282 282 i = i or 0
283 283 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
284 284
285 285 if i == 0:
286 286 self._changescache = (rev, changes)
287 287
288 288 return changes[0] + changes[1] + changes[2]
289 289
290 290 def converted(self, rev, destrev):
291 291 if self.convertfp is None:
292 292 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
293 293 'a')
294 294 self.convertfp.write('%s %s\n' % (destrev, rev))
295 295 self.convertfp.flush()
296 296
297 297 def before(self):
298 298 self.ui.debug(_('run hg source pre-conversion action\n'))
299 299
300 300 def after(self):
301 301 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,251 +1,251 b''
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''
9 9 The `extdiff' Mercurial extension allows you to use external programs
10 10 to compare revisions, or revision with working dir. The external diff
11 11 programs are called with a configurable set of options and two
12 12 non-option arguments: paths to directories containing snapshots of
13 13 files to compare.
14 14
15 15 To enable this extension:
16 16
17 17 [extensions]
18 18 hgext.extdiff =
19 19
20 20 The `extdiff' extension also allows to configure new diff commands, so
21 21 you do not need to type "hg extdiff -p kdiff3" always.
22 22
23 23 [extdiff]
24 24 # add new command that runs GNU diff(1) in 'context diff' mode
25 25 cdiff = gdiff -Nprc5
26 26 ## or the old way:
27 27 #cmd.cdiff = gdiff
28 28 #opts.cdiff = -Nprc5
29 29
30 30 # add new command called vdiff, runs kdiff3
31 31 vdiff = kdiff3
32 32
33 33 # add new command called meld, runs meld (no need to name twice)
34 34 meld =
35 35
36 36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 37 #(see http://www.vim.org/scripts/script.php?script_id=102)
38 38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 39 # your .vimrc
40 40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
41 41
42 42 You can use -I/-X and list of file or directory names like normal
43 43 "hg diff" command. The `extdiff' extension makes snapshots of only
44 44 needed files, so running the external diff program will actually be
45 45 pretty fast (at least faster than having to compare the entire tree).
46 46 '''
47 47
48 48 from mercurial.i18n import _
49 from mercurial.node import *
49 from mercurial.node import short
50 50 from mercurial import cmdutil, util, commands
51 51 import os, shlex, shutil, tempfile
52 52
53 53 def snapshot_node(ui, repo, files, node, tmproot):
54 54 '''snapshot files as of some revision'''
55 55 mf = repo.changectx(node).manifest()
56 56 dirname = os.path.basename(repo.root)
57 57 if dirname == "":
58 58 dirname = "root"
59 59 dirname = '%s.%s' % (dirname, short(node))
60 60 base = os.path.join(tmproot, dirname)
61 61 os.mkdir(base)
62 62 ui.note(_('making snapshot of %d files from rev %s\n') %
63 63 (len(files), short(node)))
64 64 for fn in files:
65 65 if not fn in mf:
66 66 # skipping new file after a merge ?
67 67 continue
68 68 wfn = util.pconvert(fn)
69 69 ui.note(' %s\n' % wfn)
70 70 dest = os.path.join(base, wfn)
71 71 destdir = os.path.dirname(dest)
72 72 if not os.path.isdir(destdir):
73 73 os.makedirs(destdir)
74 74 data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
75 75 open(dest, 'wb').write(data)
76 76 return dirname
77 77
78 78
79 79 def snapshot_wdir(ui, repo, files, tmproot):
80 80 '''snapshot files from working directory.
81 81 if not using snapshot, -I/-X does not work and recursive diff
82 82 in tools like kdiff3 and meld displays too many files.'''
83 83 repo_root = repo.root
84 84
85 85 dirname = os.path.basename(repo_root)
86 86 if dirname == "":
87 87 dirname = "root"
88 88 base = os.path.join(tmproot, dirname)
89 89 os.mkdir(base)
90 90 ui.note(_('making snapshot of %d files from working dir\n') %
91 91 (len(files)))
92 92
93 93 fns_and_mtime = []
94 94
95 95 for fn in files:
96 96 wfn = util.pconvert(fn)
97 97 ui.note(' %s\n' % wfn)
98 98 dest = os.path.join(base, wfn)
99 99 destdir = os.path.dirname(dest)
100 100 if not os.path.isdir(destdir):
101 101 os.makedirs(destdir)
102 102
103 103 fp = open(dest, 'wb')
104 104 for chunk in util.filechunkiter(repo.wopener(wfn)):
105 105 fp.write(chunk)
106 106 fp.close()
107 107
108 108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
109 109 os.path.getmtime(dest)))
110 110
111 111
112 112 return dirname, fns_and_mtime
113 113
114 114
115 115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
116 116 '''Do the actuall diff:
117 117
118 118 - copy to a temp structure if diffing 2 internal revisions
119 119 - copy to a temp structure if diffing working revision with
120 120 another one and more than 1 file is changed
121 121 - just invoke the diff for a single file in the working dir
122 122 '''
123 123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
124 124 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
125 125 modified, added, removed, deleted, unknown = repo.status(
126 126 node1, node2, files, match=matchfn)[:5]
127 127 if not (modified or added or removed):
128 128 return 0
129 129
130 130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
131 131 dir2root = ''
132 132 try:
133 133 # Always make a copy of node1
134 134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
135 135 changes = len(modified) + len(removed) + len(added)
136 136
137 137 fns_and_mtime = []
138 138
139 139 # If node2 in not the wc or there is >1 change, copy it
140 140 if node2:
141 141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
142 142 elif changes > 1:
143 143 #we only actually need to get the files to copy back to the working
144 144 #dir in this case (because the other cases are: diffing 2 revisions
145 145 #or single file -- in which case the file is already directly passed
146 146 #to the diff tool).
147 147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
148 148 else:
149 149 # This lets the diff tool open the changed file directly
150 150 dir2 = ''
151 151 dir2root = repo.root
152 152
153 153 # If only one change, diff the files instead of the directories
154 154 if changes == 1 :
155 155 if len(modified):
156 156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
157 157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
158 158 elif len(removed) :
159 159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
160 160 dir2 = os.devnull
161 161 else:
162 162 dir1 = os.devnull
163 163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
164 164
165 165 cmdline = ('%s %s %s %s' %
166 166 (util.shellquote(diffcmd), ' '.join(diffopts),
167 167 util.shellquote(dir1), util.shellquote(dir2)))
168 168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
169 169 util.system(cmdline, cwd=tmproot)
170 170
171 171 for copy_fn, working_fn, mtime in fns_and_mtime:
172 172 if os.path.getmtime(copy_fn) != mtime:
173 173 ui.debug('File changed while diffing. '
174 174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
175 175 util.copyfile(copy_fn, working_fn)
176 176
177 177 return 1
178 178 finally:
179 179 ui.note(_('cleaning up temp directory\n'))
180 180 shutil.rmtree(tmproot)
181 181
182 182 def extdiff(ui, repo, *pats, **opts):
183 183 '''use external program to diff repository (or selected files)
184 184
185 185 Show differences between revisions for the specified files, using
186 186 an external program. The default program used is diff, with
187 187 default options "-Npru".
188 188
189 189 To select a different program, use the -p option. The program
190 190 will be passed the names of two directories to compare. To pass
191 191 additional options to the program, use the -o option. These will
192 192 be passed before the names of the directories to compare.
193 193
194 194 When two revision arguments are given, then changes are
195 195 shown between those revisions. If only one revision is
196 196 specified then that revision is compared to the working
197 197 directory, and, when no revisions are specified, the
198 198 working directory files are compared to its parent.'''
199 199 program = opts['program'] or 'diff'
200 200 if opts['program']:
201 201 option = opts['option']
202 202 else:
203 203 option = opts['option'] or ['-Npru']
204 204 return dodiff(ui, repo, program, option, pats, opts)
205 205
206 206 cmdtable = {
207 207 "extdiff":
208 208 (extdiff,
209 209 [('p', 'program', '', _('comparison program to run')),
210 210 ('o', 'option', [], _('pass option to comparison program')),
211 211 ('r', 'rev', [], _('revision')),
212 212 ] + commands.walkopts,
213 213 _('hg extdiff [OPT]... [FILE]...')),
214 214 }
215 215
216 216 def uisetup(ui):
217 217 for cmd, path in ui.configitems('extdiff'):
218 218 if cmd.startswith('cmd.'):
219 219 cmd = cmd[4:]
220 220 if not path: path = cmd
221 221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
222 222 diffopts = diffopts and [diffopts] or []
223 223 elif cmd.startswith('opts.'):
224 224 continue
225 225 else:
226 226 # command = path opts
227 227 if path:
228 228 diffopts = shlex.split(path)
229 229 path = diffopts.pop(0)
230 230 else:
231 231 path, diffopts = cmd, []
232 232 def save(cmd, path, diffopts):
233 233 '''use closure to save diff command to use'''
234 234 def mydiff(ui, repo, *pats, **opts):
235 235 return dodiff(ui, repo, path, diffopts, pats, opts)
236 236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
237 237
238 238 Show differences between revisions for the specified
239 239 files, using the %(path)s program.
240 240
241 241 When two revision arguments are given, then changes are
242 242 shown between those revisions. If only one revision is
243 243 specified then that revision is compared to the working
244 244 directory, and, when no revisions are specified, the
245 245 working directory files are compared to its parent.''' % {
246 246 'path': util.uirepr(path),
247 247 }
248 248 return mydiff
249 249 cmdtable[cmd] = (save(cmd, path, diffopts),
250 250 cmdtable['extdiff'][1][1:],
251 251 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,123 +1,123 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import nullid, short
10 10 from mercurial import commands, cmdutil, hg, node, util
11 11
12 12 def fetch(ui, repo, source='default', **opts):
13 13 '''Pull changes from a remote repository, merge new changes if needed.
14 14
15 15 This finds all changes from the repository at the specified path
16 16 or URL and adds them to the local repository.
17 17
18 18 If the pulled changes add a new head, the head is automatically
19 19 merged, and the result of the merge is committed. Otherwise, the
20 20 working directory is updated to include the new changes.
21 21
22 22 When a merge occurs, the newly pulled changes are assumed to be
23 23 "authoritative". The head of the new changes is used as the first
24 24 parent, with local changes as the second. To switch the merge
25 25 order, use --switch-parent.
26 26
27 27 See 'hg help dates' for a list of formats valid for -d/--date.
28 28 '''
29 29
30 30 def postincoming(other, modheads):
31 31 if modheads == 0:
32 32 return 0
33 33 if modheads == 1:
34 34 return hg.clean(repo, repo.changelog.tip())
35 35 newheads = repo.heads(parent)
36 36 newchildren = [n for n in repo.heads(parent) if n != parent]
37 37 newparent = parent
38 38 if newchildren:
39 39 newparent = newchildren[0]
40 40 hg.clean(repo, newparent)
41 41 newheads = [n for n in repo.heads() if n != newparent]
42 42 if len(newheads) > 1:
43 43 ui.status(_('not merging with %d other new heads '
44 44 '(use "hg heads" and "hg merge" to merge them)') %
45 45 (len(newheads) - 1))
46 46 return
47 47 err = False
48 48 if newheads:
49 49 # By default, we consider the repository we're pulling
50 50 # *from* as authoritative, so we merge our changes into
51 51 # theirs.
52 52 if opts['switch_parent']:
53 53 firstparent, secondparent = newparent, newheads[0]
54 54 else:
55 55 firstparent, secondparent = newheads[0], newparent
56 56 ui.status(_('updating to %d:%s\n') %
57 57 (repo.changelog.rev(firstparent),
58 58 short(firstparent)))
59 59 hg.clean(repo, firstparent)
60 60 ui.status(_('merging with %d:%s\n') %
61 61 (repo.changelog.rev(secondparent), short(secondparent)))
62 62 err = hg.merge(repo, secondparent, remind=False)
63 63 if not err:
64 64 mod, add, rem = repo.status()[:3]
65 65 message = (cmdutil.logmessage(opts) or
66 66 (_('Automated merge with %s') %
67 67 util.removeauth(other.url())))
68 68 n = repo.commit(mod + add + rem, message,
69 69 opts['user'], opts['date'],
70 70 force_editor=opts.get('force_editor'))
71 71 ui.status(_('new changeset %d:%s merges remote changes '
72 72 'with local\n') % (repo.changelog.rev(n),
73 73 short(n)))
74 74
75 75 def pull():
76 76 cmdutil.setremoteconfig(ui, opts)
77 77
78 78 other = hg.repository(ui, ui.expandpath(source))
79 79 ui.status(_('pulling from %s\n') %
80 80 util.hidepassword(ui.expandpath(source)))
81 81 revs = None
82 82 if opts['rev']:
83 83 if not other.local():
84 84 raise util.Abort(_("fetch -r doesn't work for remote "
85 85 "repositories yet"))
86 86 else:
87 87 revs = [other.lookup(rev) for rev in opts['rev']]
88 88 modheads = repo.pull(other, heads=revs)
89 89 return postincoming(other, modheads)
90 90
91 91 date = opts.get('date')
92 92 if date:
93 93 opts['date'] = util.parsedate(date)
94 94
95 95 parent, p2 = repo.dirstate.parents()
96 96 if parent != repo.changelog.tip():
97 97 raise util.Abort(_('working dir not at tip '
98 98 '(use "hg update" to check out tip)'))
99 99 if p2 != nullid:
100 100 raise util.Abort(_('outstanding uncommitted merge'))
101 101 wlock = lock = None
102 102 try:
103 103 wlock = repo.wlock()
104 104 lock = repo.lock()
105 105 mod, add, rem = repo.status()[:3]
106 106 if mod or add or rem:
107 107 raise util.Abort(_('outstanding uncommitted changes'))
108 108 if len(repo.heads()) > 1:
109 109 raise util.Abort(_('multiple heads in this repository '
110 110 '(use "hg heads" and "hg merge" to merge)'))
111 111 return pull()
112 112 finally:
113 113 del lock, wlock
114 114
115 115 cmdtable = {
116 116 'fetch':
117 117 (fetch,
118 118 [('r', 'rev', [], _('a specific revision you would like to pull')),
119 119 ('f', 'force-editor', None, _('edit commit message')),
120 120 ('', 'switch-parent', None, _('switch parents when merging')),
121 121 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
122 122 _('hg fetch [SOURCE]')),
123 123 }
@@ -1,406 +1,406 b''
1 1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
2 2 # Published under the GNU GPL
3 3
4 4 '''
5 5 imerge - interactive merge
6 6 '''
7 7
8 8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import hex, short
10 10 from mercurial import commands, cmdutil, dispatch, fancyopts
11 11 from mercurial import hg, filemerge, util
12 12 import os, tarfile
13 13
14 14 class InvalidStateFileException(Exception): pass
15 15
16 16 class ImergeStateFile(object):
17 17 def __init__(self, im):
18 18 self.im = im
19 19
20 20 def save(self, dest):
21 21 tf = tarfile.open(dest, 'w:gz')
22 22
23 23 st = os.path.join(self.im.path, 'status')
24 24 tf.add(st, os.path.join('.hg', 'imerge', 'status'))
25 25
26 26 for f in self.im.resolved:
27 27 (fd, fo) = self.im.conflicts[f]
28 28 abssrc = self.im.repo.wjoin(fd)
29 29 tf.add(abssrc, fd)
30 30
31 31 tf.close()
32 32
33 33 def load(self, source):
34 34 wlock = self.im.repo.wlock()
35 35 lock = self.im.repo.lock()
36 36
37 37 tf = tarfile.open(source, 'r')
38 38 contents = tf.getnames()
39 39 # tarfile normalizes path separators to '/'
40 40 statusfile = '.hg/imerge/status'
41 41 if statusfile not in contents:
42 42 raise InvalidStateFileException('no status file')
43 43
44 44 tf.extract(statusfile, self.im.repo.root)
45 45 p1, p2 = self.im.load()
46 46 if self.im.repo.dirstate.parents()[0] != p1.node():
47 47 hg.clean(self.im.repo, p1.node())
48 48 self.im.start(p2.node())
49 49 for tarinfo in tf:
50 50 tf.extract(tarinfo, self.im.repo.root)
51 51 self.im.load()
52 52
53 53 class Imerge(object):
54 54 def __init__(self, ui, repo):
55 55 self.ui = ui
56 56 self.repo = repo
57 57
58 58 self.path = repo.join('imerge')
59 59 self.opener = util.opener(self.path)
60 60
61 61 self.wctx = self.repo.workingctx()
62 62 self.conflicts = {}
63 63 self.resolved = []
64 64
65 65 def merging(self):
66 66 return len(self.wctx.parents()) > 1
67 67
68 68 def load(self):
69 69 # status format. \0-delimited file, fields are
70 70 # p1, p2, conflict count, conflict filenames, resolved filenames
71 71 # conflict filenames are tuples of localname, remoteorig, remotenew
72 72
73 73 statusfile = self.opener('status')
74 74
75 75 status = statusfile.read().split('\0')
76 76 if len(status) < 3:
77 77 raise util.Abort('invalid imerge status file')
78 78
79 79 try:
80 80 parents = [self.repo.changectx(n) for n in status[:2]]
81 81 except LookupError:
82 82 raise util.Abort('merge parent %s not in repository' % short(p))
83 83
84 84 status = status[2:]
85 85 conflicts = int(status.pop(0)) * 3
86 86 self.resolved = status[conflicts:]
87 87 for i in xrange(0, conflicts, 3):
88 88 self.conflicts[status[i]] = (status[i+1], status[i+2])
89 89
90 90 return parents
91 91
92 92 def save(self):
93 93 lock = self.repo.lock()
94 94
95 95 if not os.path.isdir(self.path):
96 96 os.mkdir(self.path)
97 97 statusfile = self.opener('status', 'wb')
98 98
99 99 out = [hex(n.node()) for n in self.wctx.parents()]
100 100 out.append(str(len(self.conflicts)))
101 101 conflicts = self.conflicts.items()
102 102 conflicts.sort()
103 103 for fw, fd_fo in conflicts:
104 104 out.append(fw)
105 105 out.extend(fd_fo)
106 106 out.extend(self.resolved)
107 107
108 108 statusfile.write('\0'.join(out))
109 109
110 110 def remaining(self):
111 111 return [f for f in self.conflicts if f not in self.resolved]
112 112
113 113 def filemerge(self, fn, interactive=True):
114 114 wlock = self.repo.wlock()
115 115
116 116 (fd, fo) = self.conflicts[fn]
117 117 p1, p2 = self.wctx.parents()
118 118
119 119 # this could be greatly improved
120 120 realmerge = os.environ.get('HGMERGE')
121 121 if not interactive:
122 122 os.environ['HGMERGE'] = 'merge'
123 123
124 124 # The filemerge ancestor algorithm does not work if self.wctx
125 125 # already has two parents (in normal merge it doesn't yet). But
126 126 # this is very dirty.
127 127 self.wctx._parents.pop()
128 128 try:
129 129 # TODO: we should probably revert the file if merge fails
130 130 return filemerge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
131 131 finally:
132 132 self.wctx._parents.append(p2)
133 133 if realmerge:
134 134 os.environ['HGMERGE'] = realmerge
135 135 elif not interactive:
136 136 del os.environ['HGMERGE']
137 137
138 138 def start(self, rev=None):
139 139 _filemerge = filemerge.filemerge
140 140 def filemerge_(repo, fw, fd, fo, wctx, mctx):
141 141 self.conflicts[fw] = (fd, fo)
142 142
143 143 filemerge.filemerge = filemerge_
144 144 commands.merge(self.ui, self.repo, rev=rev)
145 145 filemerge.filemerge = _filemerge
146 146
147 147 self.wctx = self.repo.workingctx()
148 148 self.save()
149 149
150 150 def resume(self):
151 151 self.load()
152 152
153 153 dp = self.repo.dirstate.parents()
154 154 p1, p2 = self.wctx.parents()
155 155 if p1.node() != dp[0] or p2.node() != dp[1]:
156 156 raise util.Abort('imerge state does not match working directory')
157 157
158 158 def next(self):
159 159 remaining = self.remaining()
160 160 return remaining and remaining[0]
161 161
162 162 def resolve(self, files):
163 163 resolved = dict.fromkeys(self.resolved)
164 164 for fn in files:
165 165 if fn not in self.conflicts:
166 166 raise util.Abort('%s is not in the merge set' % fn)
167 167 resolved[fn] = True
168 168 self.resolved = resolved.keys()
169 169 self.resolved.sort()
170 170 self.save()
171 171 return 0
172 172
173 173 def unresolve(self, files):
174 174 resolved = dict.fromkeys(self.resolved)
175 175 for fn in files:
176 176 if fn not in resolved:
177 177 raise util.Abort('%s is not resolved' % fn)
178 178 del resolved[fn]
179 179 self.resolved = resolved.keys()
180 180 self.resolved.sort()
181 181 self.save()
182 182 return 0
183 183
184 184 def pickle(self, dest):
185 185 '''write current merge state to file to be resumed elsewhere'''
186 186 state = ImergeStateFile(self)
187 187 return state.save(dest)
188 188
189 189 def unpickle(self, source):
190 190 '''read merge state from file'''
191 191 state = ImergeStateFile(self)
192 192 return state.load(source)
193 193
194 194 def load(im, source):
195 195 if im.merging():
196 196 raise util.Abort('there is already a merge in progress '
197 197 '(update -C <rev> to abort it)' )
198 198 m, a, r, d = im.repo.status()[:4]
199 199 if m or a or r or d:
200 200 raise util.Abort('working directory has uncommitted changes')
201 201
202 202 rc = im.unpickle(source)
203 203 if not rc:
204 204 status(im)
205 205 return rc
206 206
207 207 def merge_(im, filename=None, auto=False):
208 208 success = True
209 209 if auto and not filename:
210 210 for fn in im.remaining():
211 211 rc = im.filemerge(fn, interactive=False)
212 212 if rc:
213 213 success = False
214 214 else:
215 215 im.resolve([fn])
216 216 if success:
217 217 im.ui.write('all conflicts resolved\n')
218 218 else:
219 219 status(im)
220 220 return 0
221 221
222 222 if not filename:
223 223 filename = im.next()
224 224 if not filename:
225 225 im.ui.write('all conflicts resolved\n')
226 226 return 0
227 227
228 228 rc = im.filemerge(filename, interactive=not auto)
229 229 if not rc:
230 230 im.resolve([filename])
231 231 if not im.next():
232 232 im.ui.write('all conflicts resolved\n')
233 233 return rc
234 234
235 235 def next(im):
236 236 n = im.next()
237 237 if n:
238 238 im.ui.write('%s\n' % n)
239 239 else:
240 240 im.ui.write('all conflicts resolved\n')
241 241 return 0
242 242
243 243 def resolve(im, *files):
244 244 if not files:
245 245 raise util.Abort('resolve requires at least one filename')
246 246 return im.resolve(files)
247 247
248 248 def save(im, dest):
249 249 return im.pickle(dest)
250 250
251 251 def status(im, **opts):
252 252 if not opts.get('resolved') and not opts.get('unresolved'):
253 253 opts['resolved'] = True
254 254 opts['unresolved'] = True
255 255
256 256 if im.ui.verbose:
257 257 p1, p2 = [short(p.node()) for p in im.wctx.parents()]
258 258 im.ui.note(_('merging %s and %s\n') % (p1, p2))
259 259
260 260 conflicts = im.conflicts.keys()
261 261 conflicts.sort()
262 262 remaining = dict.fromkeys(im.remaining())
263 263 st = []
264 264 for fn in conflicts:
265 265 if opts.get('no_status'):
266 266 mode = ''
267 267 elif fn in remaining:
268 268 mode = 'U '
269 269 else:
270 270 mode = 'R '
271 271 if ((opts.get('resolved') and fn not in remaining)
272 272 or (opts.get('unresolved') and fn in remaining)):
273 273 st.append((mode, fn))
274 274 st.sort()
275 275 for (mode, fn) in st:
276 276 if im.ui.verbose:
277 277 fo, fd = im.conflicts[fn]
278 278 if fd != fn:
279 279 fn = '%s (%s)' % (fn, fd)
280 280 im.ui.write('%s%s\n' % (mode, fn))
281 281 if opts.get('unresolved') and not remaining:
282 282 im.ui.write(_('all conflicts resolved\n'))
283 283
284 284 return 0
285 285
286 286 def unresolve(im, *files):
287 287 if not files:
288 288 raise util.Abort('unresolve requires at least one filename')
289 289 return im.unresolve(files)
290 290
291 291 subcmdtable = {
292 292 'load': (load, []),
293 293 'merge':
294 294 (merge_,
295 295 [('a', 'auto', None, _('automatically resolve if possible'))]),
296 296 'next': (next, []),
297 297 'resolve': (resolve, []),
298 298 'save': (save, []),
299 299 'status':
300 300 (status,
301 301 [('n', 'no-status', None, _('hide status prefix')),
302 302 ('', 'resolved', None, _('only show resolved conflicts')),
303 303 ('', 'unresolved', None, _('only show unresolved conflicts'))]),
304 304 'unresolve': (unresolve, [])
305 305 }
306 306
307 307 def dispatch_(im, args, opts):
308 308 def complete(s, choices):
309 309 candidates = []
310 310 for choice in choices:
311 311 if choice.startswith(s):
312 312 candidates.append(choice)
313 313 return candidates
314 314
315 315 c, args = args[0], list(args[1:])
316 316 cmd = complete(c, subcmdtable.keys())
317 317 if not cmd:
318 318 raise cmdutil.UnknownCommand('imerge ' + c)
319 319 if len(cmd) > 1:
320 320 cmd.sort()
321 321 raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
322 322 cmd = cmd[0]
323 323
324 324 func, optlist = subcmdtable[cmd]
325 325 opts = {}
326 326 try:
327 327 args = fancyopts.fancyopts(args, optlist, opts)
328 328 return func(im, *args, **opts)
329 329 except fancyopts.getopt.GetoptError, inst:
330 330 raise dispatch.ParseError('imerge', '%s: %s' % (cmd, inst))
331 331 except TypeError:
332 332 raise dispatch.ParseError('imerge', _('%s: invalid arguments') % cmd)
333 333
334 334 def imerge(ui, repo, *args, **opts):
335 335 '''interactive merge
336 336
337 337 imerge lets you split a merge into pieces. When you start a merge
338 338 with imerge, the names of all files with conflicts are recorded.
339 339 You can then merge any of these files, and if the merge is
340 340 successful, they will be marked as resolved. When all files are
341 341 resolved, the merge is complete.
342 342
343 343 If no merge is in progress, hg imerge [rev] will merge the working
344 344 directory with rev (defaulting to the other head if the repository
345 345 only has two heads). You may also resume a saved merge with
346 346 hg imerge load <file>.
347 347
348 348 If a merge is in progress, hg imerge will default to merging the
349 349 next unresolved file.
350 350
351 351 The following subcommands are available:
352 352
353 353 status:
354 354 show the current state of the merge
355 355 options:
356 356 -n --no-status: do not print the status prefix
357 357 --resolved: only print resolved conflicts
358 358 --unresolved: only print unresolved conflicts
359 359 next:
360 360 show the next unresolved file merge
361 361 merge [<file>]:
362 362 merge <file>. If the file merge is successful, the file will be
363 363 recorded as resolved. If no file is given, the next unresolved
364 364 file will be merged.
365 365 resolve <file>...:
366 366 mark files as successfully merged
367 367 unresolve <file>...:
368 368 mark files as requiring merging.
369 369 save <file>:
370 370 save the state of the merge to a file to be resumed elsewhere
371 371 load <file>:
372 372 load the state of the merge from a file created by save
373 373 '''
374 374
375 375 im = Imerge(ui, repo)
376 376
377 377 if im.merging():
378 378 im.resume()
379 379 else:
380 380 rev = opts.get('rev')
381 381 if rev and args:
382 382 raise util.Abort('please specify just one revision')
383 383
384 384 if len(args) == 2 and args[0] == 'load':
385 385 pass
386 386 else:
387 387 if args:
388 388 rev = args[0]
389 389 im.start(rev=rev)
390 390 if opts.get('auto'):
391 391 args = ['merge', '--auto']
392 392 else:
393 393 args = ['status']
394 394
395 395 if not args:
396 396 args = ['merge']
397 397
398 398 return dispatch_(im, args, opts)
399 399
400 400 cmdtable = {
401 401 '^imerge':
402 402 (imerge,
403 403 [('r', 'rev', '', _('revision to merge')),
404 404 ('a', 'auto', None, _('automatically merge where possible'))],
405 405 'hg imerge [command]')
406 406 }
@@ -1,556 +1,556 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an audience
15 15 # not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Setup in hgrc:
25 25 #
26 26 # [extensions]
27 27 # # enable extension
28 28 # hgext.keyword =
29 29 #
30 30 # Files to act upon/ignore are specified in the [keyword] section.
31 31 # Customized keyword template mappings in the [keywordmaps] section.
32 32 #
33 33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34 34
35 35 '''keyword expansion in local repositories
36 36
37 37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 38 in tracked text files selected by your configuration.
39 39
40 40 Keywords are only expanded in local repositories and not stored in
41 41 the change history. The mechanism can be regarded as a convenience
42 42 for the current user or for archive distribution.
43 43
44 44 Configuration is done in the [keyword] and [keywordmaps] sections
45 45 of hgrc files.
46 46
47 47 Example:
48 48
49 49 [keyword]
50 50 # expand keywords in every python file except those matching "x*"
51 51 **.py =
52 52 x* = ignore
53 53
54 54 Note: the more specific you are in your filename patterns
55 55 the less you lose speed in huge repos.
56 56
57 57 For [keywordmaps] template mapping and expansion demonstration and
58 58 control run "hg kwdemo".
59 59
60 60 An additional date template filter {date|utcdate} is provided.
61 61
62 62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 63 with customized keywords and templates.
64 64 Again, run "hg kwdemo" to control the results of your config changes.
65 65
66 66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 67 the risk of inadvertedly storing expanded keywords in the change history.
68 68
69 69 To force expansion after enabling it, or a configuration change, run
70 70 "hg kwexpand".
71 71
72 72 Also, when committing with the record extension or using mq's qrecord, be aware
73 73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 74 question to update keyword expansions after all changes have been checked in.
75 75
76 76 Expansions spanning more than one line and incremental expansions,
77 77 like CVS' $Log$, are not supported. A keyword template map
78 78 "Log = {desc}" expands to the first line of the changeset description.
79 79 '''
80 80
81 81 from mercurial import commands, cmdutil, context, dispatch, filelog, revlog
82 82 from mercurial import patch, localrepo, templater, templatefilters, util
83 83 from mercurial.hgweb import webcommands
84 from mercurial.node import *
84 from mercurial.node import nullid, hex
85 85 from mercurial.i18n import _
86 86 import re, shutil, tempfile, time
87 87
88 88 commands.optionalrepo += ' kwdemo'
89 89
90 90 # hg commands that do not act on keywords
91 91 nokwcommands = ('add addremove bundle copy export grep incoming init'
92 92 ' log outgoing push rename rollback tip'
93 93 ' convert email glog')
94 94
95 95 # hg commands that trigger expansion only when writing to working dir,
96 96 # not when reading filelog, and unexpand when reading from working dir
97 97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98 98
99 99 def utcdate(date):
100 100 '''Returns hgdate in cvs-like UTC format.'''
101 101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102 102
103 103
104 104 # make keyword tools accessible
105 105 kwtools = {'templater': None, 'hgcmd': None}
106 106
107 107 # store originals of monkeypatches
108 108 _patchfile_init = patch.patchfile.__init__
109 109 _patch_diff = patch.diff
110 110 _dispatch_parse = dispatch._parse
111 111
112 112 def _kwpatchfile_init(self, ui, fname, missing=False):
113 113 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
114 114 rejects or conflicts due to expanded keywords in working dir.'''
115 115 _patchfile_init(self, ui, fname, missing=missing)
116 116 # shrink keywords read from working dir
117 117 kwt = kwtools['templater']
118 118 self.lines = kwt.shrinklines(self.fname, self.lines)
119 119
120 120 def _kw_diff(repo, node1=None, node2=None, files=None, match=util.always,
121 121 fp=None, changes=None, opts=None):
122 122 '''Monkeypatch patch.diff to avoid expansion except when
123 123 comparing against working dir.'''
124 124 if node2 is not None:
125 125 kwtools['templater'].matcher = util.never
126 126 elif node1 is not None and node1 != repo.changectx().node():
127 127 kwtools['templater'].restrict = True
128 128 _patch_diff(repo, node1=node1, node2=node2, files=files, match=match,
129 129 fp=fp, changes=changes, opts=opts)
130 130
131 131 def _kwweb_changeset(web, req, tmpl):
132 132 '''Wraps webcommands.changeset turning off keyword expansion.'''
133 133 kwtools['templater'].matcher = util.never
134 134 return web.changeset(tmpl, web.changectx(req))
135 135
136 136 def _kwweb_filediff(web, req, tmpl):
137 137 '''Wraps webcommands.filediff turning off keyword expansion.'''
138 138 kwtools['templater'].matcher = util.never
139 139 return web.filediff(tmpl, web.filectx(req))
140 140
141 141 def _kwdispatch_parse(ui, args):
142 142 '''Monkeypatch dispatch._parse to obtain running hg command.'''
143 143 cmd, func, args, options, cmdoptions = _dispatch_parse(ui, args)
144 144 kwtools['hgcmd'] = cmd
145 145 return cmd, func, args, options, cmdoptions
146 146
147 147 # dispatch._parse is run before reposetup, so wrap it here
148 148 dispatch._parse = _kwdispatch_parse
149 149
150 150
151 151 class kwtemplater(object):
152 152 '''
153 153 Sets up keyword templates, corresponding keyword regex, and
154 154 provides keyword substitution functions.
155 155 '''
156 156 templates = {
157 157 'Revision': '{node|short}',
158 158 'Author': '{author|user}',
159 159 'Date': '{date|utcdate}',
160 160 'RCSFile': '{file|basename},v',
161 161 'Source': '{root}/{file},v',
162 162 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
163 163 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
164 164 }
165 165
166 166 def __init__(self, ui, repo, inc, exc):
167 167 self.ui = ui
168 168 self.repo = repo
169 169 self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
170 170 self.restrict = kwtools['hgcmd'] in restricted.split()
171 171
172 172 kwmaps = self.ui.configitems('keywordmaps')
173 173 if kwmaps: # override default templates
174 174 kwmaps = [(k, templater.parsestring(v, quoted=False))
175 175 for (k, v) in kwmaps]
176 176 self.templates = dict(kwmaps)
177 177 escaped = map(re.escape, self.templates.keys())
178 178 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
179 179 self.re_kw = re.compile(kwpat)
180 180
181 181 templatefilters.filters['utcdate'] = utcdate
182 182 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
183 183 False, '', False)
184 184
185 185 def getnode(self, path, fnode):
186 186 '''Derives changenode from file path and filenode.'''
187 187 # used by kwfilelog.read and kwexpand
188 188 c = context.filectx(self.repo, path, fileid=fnode)
189 189 return c.node()
190 190
191 191 def substitute(self, data, path, node, subfunc):
192 192 '''Replaces keywords in data with expanded template.'''
193 193 def kwsub(mobj):
194 194 kw = mobj.group(1)
195 195 self.ct.use_template(self.templates[kw])
196 196 self.ui.pushbuffer()
197 197 self.ct.show(changenode=node, root=self.repo.root, file=path)
198 198 ekw = templatefilters.firstline(self.ui.popbuffer())
199 199 return '$%s: %s $' % (kw, ekw)
200 200 return subfunc(kwsub, data)
201 201
202 202 def expand(self, path, node, data):
203 203 '''Returns data with keywords expanded.'''
204 204 if not self.restrict and self.matcher(path) and not util.binary(data):
205 205 changenode = self.getnode(path, node)
206 206 return self.substitute(data, path, changenode, self.re_kw.sub)
207 207 return data
208 208
209 209 def iskwfile(self, path, islink):
210 210 '''Returns true if path matches [keyword] pattern
211 211 and is not a symbolic link.
212 212 Caveat: localrepository._link fails on Windows.'''
213 213 return self.matcher(path) and not islink(path)
214 214
215 215 def overwrite(self, node=None, expand=True, files=None):
216 216 '''Overwrites selected files expanding/shrinking keywords.'''
217 217 ctx = self.repo.changectx(node)
218 218 mf = ctx.manifest()
219 219 if node is not None: # commit
220 220 files = [f for f in ctx.files() if f in mf]
221 221 notify = self.ui.debug
222 222 else: # kwexpand/kwshrink
223 223 notify = self.ui.note
224 224 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
225 225 if candidates:
226 226 self.restrict = True # do not expand when reading
227 227 candidates.sort()
228 228 action = expand and 'expanding' or 'shrinking'
229 229 for f in candidates:
230 230 fp = self.repo.file(f)
231 231 data = fp.read(mf[f])
232 232 if util.binary(data):
233 233 continue
234 234 if expand:
235 235 changenode = node or self.getnode(f, mf[f])
236 236 data, found = self.substitute(data, f, changenode,
237 237 self.re_kw.subn)
238 238 else:
239 239 found = self.re_kw.search(data)
240 240 if found:
241 241 notify(_('overwriting %s %s keywords\n') % (f, action))
242 242 self.repo.wwrite(f, data, mf.flags(f))
243 243 self.repo.dirstate.normal(f)
244 244 self.restrict = False
245 245
246 246 def shrinktext(self, text):
247 247 '''Unconditionally removes all keyword substitutions from text.'''
248 248 return self.re_kw.sub(r'$\1$', text)
249 249
250 250 def shrink(self, fname, text):
251 251 '''Returns text with all keyword substitutions removed.'''
252 252 if self.matcher(fname) and not util.binary(text):
253 253 return self.shrinktext(text)
254 254 return text
255 255
256 256 def shrinklines(self, fname, lines):
257 257 '''Returns lines with keyword substitutions removed.'''
258 258 if self.matcher(fname):
259 259 text = ''.join(lines)
260 260 if not util.binary(text):
261 261 return self.shrinktext(text).splitlines(True)
262 262 return lines
263 263
264 264 def wread(self, fname, data):
265 265 '''If in restricted mode returns data read from wdir with
266 266 keyword substitutions removed.'''
267 267 return self.restrict and self.shrink(fname, data) or data
268 268
269 269 class kwfilelog(filelog.filelog):
270 270 '''
271 271 Subclass of filelog to hook into its read, add, cmp methods.
272 272 Keywords are "stored" unexpanded, and processed on reading.
273 273 '''
274 274 def __init__(self, opener, path):
275 275 super(kwfilelog, self).__init__(opener, path)
276 276 self.kwt = kwtools['templater']
277 277 self.path = path
278 278
279 279 def read(self, node):
280 280 '''Expands keywords when reading filelog.'''
281 281 data = super(kwfilelog, self).read(node)
282 282 return self.kwt.expand(self.path, node, data)
283 283
284 284 def add(self, text, meta, tr, link, p1=None, p2=None):
285 285 '''Removes keyword substitutions when adding to filelog.'''
286 286 text = self.kwt.shrink(self.path, text)
287 287 return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
288 288
289 289 def cmp(self, node, text):
290 290 '''Removes keyword substitutions for comparison.'''
291 291 text = self.kwt.shrink(self.path, text)
292 292 if self.renamed(node):
293 293 t2 = super(kwfilelog, self).read(node)
294 294 return t2 != text
295 295 return revlog.revlog.cmp(self, node, text)
296 296
297 297 def _status(ui, repo, kwt, *pats, **opts):
298 298 '''Bails out if [keyword] configuration is not active.
299 299 Returns status of working directory.'''
300 300 if kwt:
301 301 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
302 302 return repo.status(files=files, match=match, list_clean=True)
303 303 if ui.configitems('keyword'):
304 304 raise util.Abort(_('[keyword] patterns cannot match'))
305 305 raise util.Abort(_('no [keyword] patterns configured'))
306 306
307 307 def _kwfwrite(ui, repo, expand, *pats, **opts):
308 308 '''Selects files and passes them to kwtemplater.overwrite.'''
309 309 kwt = kwtools['templater']
310 310 status = _status(ui, repo, kwt, *pats, **opts)
311 311 modified, added, removed, deleted, unknown, ignored, clean = status
312 312 if modified or added or removed or deleted:
313 313 raise util.Abort(_('outstanding uncommitted changes in given files'))
314 314 wlock = lock = None
315 315 try:
316 316 wlock = repo.wlock()
317 317 lock = repo.lock()
318 318 kwt.overwrite(expand=expand, files=clean)
319 319 finally:
320 320 del wlock, lock
321 321
322 322
323 323 def demo(ui, repo, *args, **opts):
324 324 '''print [keywordmaps] configuration and an expansion example
325 325
326 326 Show current, custom, or default keyword template maps
327 327 and their expansion.
328 328
329 329 Extend current configuration by specifying maps as arguments
330 330 and optionally by reading from an additional hgrc file.
331 331
332 332 Override current keyword template maps with "default" option.
333 333 '''
334 334 def demostatus(stat):
335 335 ui.status(_('\n\t%s\n') % stat)
336 336
337 337 def demoitems(section, items):
338 338 ui.write('[%s]\n' % section)
339 339 for k, v in items:
340 340 ui.write('%s = %s\n' % (k, v))
341 341
342 342 msg = 'hg keyword config and expansion example'
343 343 kwstatus = 'current'
344 344 fn = 'demo.txt'
345 345 branchname = 'demobranch'
346 346 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
347 347 ui.note(_('creating temporary repo at %s\n') % tmpdir)
348 348 repo = localrepo.localrepository(ui, path=tmpdir, create=True)
349 349 ui.setconfig('keyword', fn, '')
350 350 if args or opts.get('rcfile'):
351 351 kwstatus = 'custom'
352 352 if opts.get('rcfile'):
353 353 ui.readconfig(opts.get('rcfile'))
354 354 if opts.get('default'):
355 355 kwstatus = 'default'
356 356 kwmaps = kwtemplater.templates
357 357 if ui.configitems('keywordmaps'):
358 358 # override maps from optional rcfile
359 359 for k, v in kwmaps.iteritems():
360 360 ui.setconfig('keywordmaps', k, v)
361 361 elif args:
362 362 # simulate hgrc parsing
363 363 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
364 364 fp = repo.opener('hgrc', 'w')
365 365 fp.writelines(rcmaps)
366 366 fp.close()
367 367 ui.readconfig(repo.join('hgrc'))
368 368 if not opts.get('default'):
369 369 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
370 370 reposetup(ui, repo)
371 371 for k, v in ui.configitems('extensions'):
372 372 if k.endswith('keyword'):
373 373 extension = '%s = %s' % (k, v)
374 374 break
375 375 demostatus('config using %s keyword template maps' % kwstatus)
376 376 ui.write('[extensions]\n%s\n' % extension)
377 377 demoitems('keyword', ui.configitems('keyword'))
378 378 demoitems('keywordmaps', kwmaps.iteritems())
379 379 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
380 380 repo.wopener(fn, 'w').write(keywords)
381 381 repo.add([fn])
382 382 path = repo.wjoin(fn)
383 383 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
384 384 ui.note(keywords)
385 385 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
386 386 # silence branch command if not verbose
387 387 quiet = ui.quiet
388 388 ui.quiet = not ui.verbose
389 389 commands.branch(ui, repo, branchname)
390 390 ui.quiet = quiet
391 391 for name, cmd in ui.configitems('hooks'):
392 392 if name.split('.', 1)[0].find('commit') > -1:
393 393 repo.ui.setconfig('hooks', name, '')
394 394 ui.note(_('unhooked all commit hooks\n'))
395 395 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
396 396 repo.commit(text=msg)
397 397 format = ui.verbose and ' in %s' % path or ''
398 398 demostatus('%s keywords expanded%s' % (kwstatus, format))
399 399 ui.write(repo.wread(fn))
400 400 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
401 401 shutil.rmtree(tmpdir, ignore_errors=True)
402 402
403 403 def expand(ui, repo, *pats, **opts):
404 404 '''expand keywords in working directory
405 405
406 406 Run after (re)enabling keyword expansion.
407 407
408 408 kwexpand refuses to run if given files contain local changes.
409 409 '''
410 410 # 3rd argument sets expansion to True
411 411 _kwfwrite(ui, repo, True, *pats, **opts)
412 412
413 413 def files(ui, repo, *pats, **opts):
414 414 '''print files currently configured for keyword expansion
415 415
416 416 Crosscheck which files in working directory are potential targets for
417 417 keyword expansion.
418 418 That is, files matched by [keyword] config patterns but not symlinks.
419 419 '''
420 420 kwt = kwtools['templater']
421 421 status = _status(ui, repo, kwt, *pats, **opts)
422 422 modified, added, removed, deleted, unknown, ignored, clean = status
423 423 files = modified + added + clean
424 424 if opts.get('untracked'):
425 425 files += unknown
426 426 files.sort()
427 427 wctx = repo.workingctx()
428 428 islink = lambda p: 'l' in wctx.fileflags(p)
429 429 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
430 430 cwd = pats and repo.getcwd() or ''
431 431 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
432 432 if opts.get('all') or opts.get('ignore'):
433 433 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
434 434 for char, filenames in kwfstats:
435 435 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
436 436 for f in filenames:
437 437 ui.write(format % repo.pathto(f, cwd))
438 438
439 439 def shrink(ui, repo, *pats, **opts):
440 440 '''revert expanded keywords in working directory
441 441
442 442 Run before changing/disabling active keywords
443 443 or if you experience problems with "hg import" or "hg merge".
444 444
445 445 kwshrink refuses to run if given files contain local changes.
446 446 '''
447 447 # 3rd argument sets expansion to False
448 448 _kwfwrite(ui, repo, False, *pats, **opts)
449 449
450 450
451 451 def reposetup(ui, repo):
452 452 '''Sets up repo as kwrepo for keyword substitution.
453 453 Overrides file method to return kwfilelog instead of filelog
454 454 if file matches user configuration.
455 455 Wraps commit to overwrite configured files with updated
456 456 keyword substitutions.
457 457 This is done for local repos only, and only if there are
458 458 files configured at all for keyword substitution.'''
459 459
460 460 try:
461 461 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
462 462 or '.hg' in util.splitpath(repo.root)
463 463 or repo._url.startswith('bundle:')):
464 464 return
465 465 except AttributeError:
466 466 pass
467 467
468 468 inc, exc = [], ['.hg*']
469 469 for pat, opt in ui.configitems('keyword'):
470 470 if opt != 'ignore':
471 471 inc.append(pat)
472 472 else:
473 473 exc.append(pat)
474 474 if not inc:
475 475 return
476 476
477 477 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
478 478
479 479 class kwrepo(repo.__class__):
480 480 def file(self, f):
481 481 if f[0] == '/':
482 482 f = f[1:]
483 483 return kwfilelog(self.sopener, f)
484 484
485 485 def wread(self, filename):
486 486 data = super(kwrepo, self).wread(filename)
487 487 return kwt.wread(filename, data)
488 488
489 489 def commit(self, files=None, text='', user=None, date=None,
490 490 match=util.always, force=False, force_editor=False,
491 491 p1=None, p2=None, extra={}, empty_ok=False):
492 492 wlock = lock = None
493 493 _p1 = _p2 = None
494 494 try:
495 495 wlock = self.wlock()
496 496 lock = self.lock()
497 497 # store and postpone commit hooks
498 498 commithooks = {}
499 499 for name, cmd in ui.configitems('hooks'):
500 500 if name.split('.', 1)[0] == 'commit':
501 501 commithooks[name] = cmd
502 502 ui.setconfig('hooks', name, None)
503 503 if commithooks:
504 504 # store parents for commit hook environment
505 505 if p1 is None:
506 506 _p1, _p2 = repo.dirstate.parents()
507 507 else:
508 508 _p1, _p2 = p1, p2 or nullid
509 509 _p1 = hex(_p1)
510 510 if _p2 == nullid:
511 511 _p2 = ''
512 512 else:
513 513 _p2 = hex(_p2)
514 514
515 515 node = super(kwrepo,
516 516 self).commit(files=files, text=text, user=user,
517 517 date=date, match=match, force=force,
518 518 force_editor=force_editor,
519 519 p1=p1, p2=p2, extra=extra,
520 520 empty_ok=empty_ok)
521 521
522 522 # restore commit hooks
523 523 for name, cmd in commithooks.iteritems():
524 524 ui.setconfig('hooks', name, cmd)
525 525 if node is not None:
526 526 kwt.overwrite(node=node)
527 527 repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
528 528 return node
529 529 finally:
530 530 del wlock, lock
531 531
532 532 repo.__class__ = kwrepo
533 533 patch.patchfile.__init__ = _kwpatchfile_init
534 534 patch.diff = _kw_diff
535 535 webcommands.changeset = webcommands.rev = _kwweb_changeset
536 536 webcommands.filediff = webcommands.diff = _kwweb_filediff
537 537
538 538
539 539 cmdtable = {
540 540 'kwdemo':
541 541 (demo,
542 542 [('d', 'default', None, _('show default keyword template maps')),
543 543 ('f', 'rcfile', [], _('read maps from rcfile'))],
544 544 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
545 545 'kwexpand': (expand, commands.walkopts,
546 546 _('hg kwexpand [OPTION]... [FILE]...')),
547 547 'kwfiles':
548 548 (files,
549 549 [('a', 'all', None, _('show keyword status flags of all files')),
550 550 ('i', 'ignore', None, _('show files excluded from expansion')),
551 551 ('u', 'untracked', None, _('additionally show untracked files')),
552 552 ] + commands.walkopts,
553 553 _('hg kwfiles [OPTION]... [FILE]...')),
554 554 'kwshrink': (shrink, commands.walkopts,
555 555 _('hg kwshrink [OPTION]... [FILE]...')),
556 556 }
@@ -1,285 +1,285 b''
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to email notifications to people when changesets are
9 9 # committed to a repo they subscribe to.
10 10 #
11 11 # default mode is to print messages to stdout, for testing and
12 12 # configuring.
13 13 #
14 14 # to use, configure notify extension and enable in hgrc like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.notify =
18 18 #
19 19 # [hooks]
20 20 # # one email for each incoming changeset
21 21 # incoming.notify = python:hgext.notify.hook
22 22 # # batch emails when many changesets incoming at one time
23 23 # changegroup.notify = python:hgext.notify.hook
24 24 #
25 25 # [notify]
26 26 # # config items go in here
27 27 #
28 28 # config items:
29 29 #
30 30 # REQUIRED:
31 31 # config = /path/to/file # file containing subscriptions
32 32 #
33 33 # OPTIONAL:
34 34 # test = True # print messages to stdout for testing
35 35 # strip = 3 # number of slashes to strip for url paths
36 36 # domain = example.com # domain to use if committer missing domain
37 37 # style = ... # style file to use when formatting email
38 38 # template = ... # template to use when formatting email
39 39 # incoming = ... # template to use when run as incoming hook
40 40 # changegroup = ... # template when run as changegroup hook
41 41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 42 # maxsubject = 67 # truncate subject line longer than this
43 43 # diffstat = True # add a diffstat before the diff content
44 44 # sources = serve # notify if source of incoming changes in this list
45 45 # # (serve == ssh or http, push, pull, bundle)
46 46 # [email]
47 47 # from = user@host.com # email address to send as if none given
48 48 # [web]
49 49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 50 #
51 51 # notify config file has same format as regular hgrc. it has two
52 52 # sections so you can express subscriptions in whatever way is handier
53 53 # for you.
54 54 #
55 55 # [usersubs]
56 56 # # key is subscriber email, value is ","-separated list of glob patterns
57 57 # user@host = pattern
58 58 #
59 59 # [reposubs]
60 60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 61 # pattern = user@host
62 62 #
63 63 # glob patterns are matched against path to repo root.
64 64 #
65 65 # if you like, you can put notify config file in repo that users can
66 66 # push changes to, they can manage their own subscriptions.
67 67
68 68 from mercurial.i18n import _
69 from mercurial.node import *
69 from mercurial.node import bin, short
70 70 from mercurial import patch, cmdutil, templater, util, mail
71 71 import email.Parser, fnmatch, socket, time
72 72
73 73 # template for single changeset can include email headers.
74 74 single_template = '''
75 75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 76 From: {author}
77 77
78 78 changeset {node|short} in {root}
79 79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 80 description:
81 81 \t{desc|tabindent|strip}
82 82 '''.lstrip()
83 83
84 84 # template for multiple changesets should not contain email headers,
85 85 # because only first set of headers will be used and result will look
86 86 # strange.
87 87 multiple_template = '''
88 88 changeset {node|short} in {root}
89 89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 90 summary: {desc|firstline}
91 91 '''
92 92
93 93 deftemplates = {
94 94 'changegroup': multiple_template,
95 95 }
96 96
97 97 class notifier(object):
98 98 '''email notification class.'''
99 99
100 100 def __init__(self, ui, repo, hooktype):
101 101 self.ui = ui
102 102 cfg = self.ui.config('notify', 'config')
103 103 if cfg:
104 104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
105 105 self.repo = repo
106 106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 107 self.root = self.strip(self.repo.root)
108 108 self.domain = self.ui.config('notify', 'domain')
109 109 self.subs = self.subscribers()
110 110
111 111 mapfile = self.ui.config('notify', 'style')
112 112 template = (self.ui.config('notify', hooktype) or
113 113 self.ui.config('notify', 'template'))
114 114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 115 False, mapfile, False)
116 116 if not mapfile and not template:
117 117 template = deftemplates.get(hooktype) or single_template
118 118 if template:
119 119 template = templater.parsestring(template, quoted=False)
120 120 self.t.use_template(template)
121 121
122 122 def strip(self, path):
123 123 '''strip leading slashes from local path, turn into web-safe path.'''
124 124
125 125 path = util.pconvert(path)
126 126 count = self.stripcount
127 127 while count > 0:
128 128 c = path.find('/')
129 129 if c == -1:
130 130 break
131 131 path = path[c+1:]
132 132 count -= 1
133 133 return path
134 134
135 135 def fixmail(self, addr):
136 136 '''try to clean up email addresses.'''
137 137
138 138 addr = util.email(addr.strip())
139 139 if self.domain:
140 140 a = addr.find('@localhost')
141 141 if a != -1:
142 142 addr = addr[:a]
143 143 if '@' not in addr:
144 144 return addr + '@' + self.domain
145 145 return addr
146 146
147 147 def subscribers(self):
148 148 '''return list of email addresses of subscribers to this repo.'''
149 149
150 150 subs = {}
151 151 for user, pats in self.ui.configitems('usersubs'):
152 152 for pat in pats.split(','):
153 153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 154 subs[self.fixmail(user)] = 1
155 155 for pat, users in self.ui.configitems('reposubs'):
156 156 if fnmatch.fnmatch(self.repo.root, pat):
157 157 for user in users.split(','):
158 158 subs[self.fixmail(user)] = 1
159 159 subs = subs.keys()
160 160 subs.sort()
161 161 return subs
162 162
163 163 def url(self, path=None):
164 164 return self.ui.config('web', 'baseurl') + (path or self.root)
165 165
166 166 def node(self, node):
167 167 '''format one changeset.'''
168 168
169 169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 170 baseurl=self.ui.config('web', 'baseurl'),
171 171 root=self.repo.root,
172 172 webroot=self.root)
173 173
174 174 def skipsource(self, source):
175 175 '''true if incoming changes from this source should be skipped.'''
176 176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 177 return source not in ok_sources
178 178
179 179 def send(self, node, count, data):
180 180 '''send message.'''
181 181
182 182 p = email.Parser.Parser()
183 183 msg = p.parsestr(data)
184 184
185 185 def fix_subject():
186 186 '''try to make subject line exist and be useful.'''
187 187
188 188 subject = msg['Subject']
189 189 if not subject:
190 190 if count > 1:
191 191 subject = _('%s: %d new changesets') % (self.root, count)
192 192 else:
193 193 changes = self.repo.changelog.read(node)
194 194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 195 subject = '%s: %s' % (self.root, s)
196 196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 197 if maxsubject and len(subject) > maxsubject:
198 198 subject = subject[:maxsubject-3] + '...'
199 199 del msg['Subject']
200 200 msg['Subject'] = subject
201 201
202 202 def fix_sender():
203 203 '''try to make message have proper sender.'''
204 204
205 205 sender = msg['From']
206 206 if not sender:
207 207 sender = self.ui.config('email', 'from') or self.ui.username()
208 208 if '@' not in sender or '@localhost' in sender:
209 209 sender = self.fixmail(sender)
210 210 del msg['From']
211 211 msg['From'] = sender
212 212
213 213 msg['Date'] = util.datestr(date=util.makedate(),
214 214 format="%a, %d %b %Y %H:%M:%S",
215 215 timezone=True)
216 216 fix_subject()
217 217 fix_sender()
218 218
219 219 msg['X-Hg-Notification'] = 'changeset ' + short(node)
220 220 if not msg['Message-Id']:
221 221 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
222 222 (short(node), int(time.time()),
223 223 hash(self.repo.root), socket.getfqdn()))
224 224 msg['To'] = ', '.join(self.subs)
225 225
226 226 msgtext = msg.as_string(0)
227 227 if self.ui.configbool('notify', 'test', True):
228 228 self.ui.write(msgtext)
229 229 if not msgtext.endswith('\n'):
230 230 self.ui.write('\n')
231 231 else:
232 232 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
233 233 (len(self.subs), count))
234 234 mail.sendmail(self.ui, util.email(msg['From']),
235 235 self.subs, msgtext)
236 236
237 237 def diff(self, node, ref):
238 238 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
239 239 if maxdiff == 0:
240 240 return
241 241 prev = self.repo.changelog.parents(node)[0]
242 242 self.ui.pushbuffer()
243 243 patch.diff(self.repo, prev, ref)
244 244 difflines = self.ui.popbuffer().splitlines(1)
245 245 if self.ui.configbool('notify', 'diffstat', True):
246 246 s = patch.diffstat(difflines)
247 247 # s may be nil, don't include the header if it is
248 248 if s:
249 249 self.ui.write('\ndiffstat:\n\n%s' % s)
250 250 if maxdiff > 0 and len(difflines) > maxdiff:
251 251 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
252 252 (len(difflines), maxdiff))
253 253 difflines = difflines[:maxdiff]
254 254 elif difflines:
255 255 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
256 256 self.ui.write(*difflines)
257 257
258 258 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
259 259 '''send email notifications to interested subscribers.
260 260
261 261 if used as changegroup hook, send one email for all changesets in
262 262 changegroup. else send one email per changeset.'''
263 263 n = notifier(ui, repo, hooktype)
264 264 if not n.subs:
265 265 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
266 266 return
267 267 if n.skipsource(source):
268 268 ui.debug(_('notify: changes have source "%s" - skipping\n') %
269 269 source)
270 270 return
271 271 node = bin(node)
272 272 ui.pushbuffer()
273 273 if hooktype == 'changegroup':
274 274 start = repo.changelog.rev(node)
275 275 end = repo.changelog.count()
276 276 count = end - start
277 277 for rev in xrange(start, end):
278 278 n.node(repo.changelog.node(rev))
279 279 n.diff(node, repo.changelog.tip())
280 280 else:
281 281 count = 1
282 282 n.node(node)
283 283 n.diff(node, node)
284 284 data = ui.popbuffer()
285 285 n.send(node, count, data)
@@ -1,466 +1,466 b''
1 1 # Command for sending a collection of Mercurial changesets as a series
2 2 # of patch emails.
3 3 #
4 4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 5 # which describes the series as a whole.
6 6 #
7 7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 8 # the first line of the changeset description as the subject text.
9 9 # The message contains two or three body parts:
10 10 #
11 11 # The remainder of the changeset description.
12 12 #
13 13 # [Optional] If the diffstat program is installed, the result of
14 14 # running diffstat on the patch.
15 15 #
16 16 # The patch itself, as generated by "hg export".
17 17 #
18 18 # Each message refers to all of its predecessors using the In-Reply-To
19 19 # and References headers, so they will show up as a sequence in
20 20 # threaded mail and news readers, and in mail archives.
21 21 #
22 22 # For each changeset, you will be prompted with a diffstat summary and
23 23 # the changeset summary, so you can be sure you are sending the right
24 24 # changes.
25 25 #
26 26 # To enable this extension:
27 27 #
28 28 # [extensions]
29 29 # hgext.patchbomb =
30 30 #
31 31 # To configure other defaults, add a section like this to your hgrc
32 32 # file:
33 33 #
34 34 # [email]
35 35 # from = My Name <my@email>
36 36 # to = recipient1, recipient2, ...
37 37 # cc = cc1, cc2, ...
38 38 # bcc = bcc1, bcc2, ...
39 39 #
40 40 # Then you can use the "hg email" command to mail a series of changesets
41 41 # as a patchbomb.
42 42 #
43 43 # To avoid sending patches prematurely, it is a good idea to first run
44 44 # the "email" command with the "-n" option (test only). You will be
45 45 # prompted for an email recipient address, a subject an an introductory
46 46 # message describing the patches of your patchbomb. Then when all is
47 47 # done, patchbomb messages are displayed. If PAGER environment variable
48 48 # is set, your pager will be fired up once for each patchbomb message, so
49 49 # you can verify everything is alright.
50 50 #
51 51 # The "-m" (mbox) option is also very useful. Instead of previewing
52 52 # each patchbomb message in a pager or sending the messages directly,
53 53 # it will create a UNIX mailbox file with the patch emails. This
54 54 # mailbox file can be previewed with any mail user agent which supports
55 55 # UNIX mbox files, i.e. with mutt:
56 56 #
57 57 # % mutt -R -f mbox
58 58 #
59 59 # When you are previewing the patchbomb messages, you can use `formail'
60 60 # (a utility that is commonly installed as part of the procmail package),
61 61 # to send each message out:
62 62 #
63 63 # % formail -s sendmail -bm -t < mbox
64 64 #
65 65 # That should be all. Now your patchbomb is on its way out.
66 66
67 67 import os, errno, socket, tempfile
68 68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
69 69 import email.Utils, email.Encoders
70 70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
71 71 from mercurial.i18n import _
72 from mercurial.node import *
72 from mercurial.node import bin
73 73
74 74 def patchbomb(ui, repo, *revs, **opts):
75 75 '''send changesets by email
76 76
77 77 By default, diffs are sent in the format generated by hg export,
78 78 one per message. The series starts with a "[PATCH 0 of N]"
79 79 introduction, which describes the series as a whole.
80 80
81 81 Each patch email has a Subject line of "[PATCH M of N] ...", using
82 82 the first line of the changeset description as the subject text.
83 83 The message contains two or three body parts. First, the rest of
84 84 the changeset description. Next, (optionally) if the diffstat
85 85 program is installed, the result of running diffstat on the patch.
86 86 Finally, the patch itself, as generated by "hg export".
87 87
88 88 With --outgoing, emails will be generated for patches not
89 89 found in the destination repository (or only those which are
90 90 ancestors of the specified revisions if any are provided)
91 91
92 92 With --bundle, changesets are selected as for --outgoing,
93 93 but a single email containing a binary Mercurial bundle as an
94 94 attachment will be sent.
95 95
96 96 Examples:
97 97
98 98 hg email -r 3000 # send patch 3000 only
99 99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
100 100 hg email -r 3000:3005 # send patches 3000 through 3005
101 101 hg email 3000 # send patch 3000 (deprecated)
102 102
103 103 hg email -o # send all patches not in default
104 104 hg email -o DEST # send all patches not in DEST
105 105 hg email -o -r 3000 # send all ancestors of 3000 not in default
106 106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
107 107
108 108 hg email -b # send bundle of all patches not in default
109 109 hg email -b DEST # send bundle of all patches not in DEST
110 110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
111 111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
112 112
113 113 Before using this command, you will need to enable email in your hgrc.
114 114 See the [email] section in hgrc(5) for details.
115 115 '''
116 116
117 117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
118 118 if not ui.interactive:
119 119 return default
120 120 if default:
121 121 prompt += ' [%s]' % default
122 122 prompt += rest
123 123 while True:
124 124 r = ui.prompt(prompt, default=default)
125 125 if r:
126 126 return r
127 127 if default is not None:
128 128 return default
129 129 if empty_ok:
130 130 return r
131 131 ui.warn(_('Please enter a valid value.\n'))
132 132
133 133 def confirm(s, denial):
134 134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
135 135 raise util.Abort(denial)
136 136
137 137 def cdiffstat(summary, patchlines):
138 138 s = patch.diffstat(patchlines)
139 139 if s:
140 140 if summary:
141 141 ui.write(summary, '\n')
142 142 ui.write(s, '\n')
143 143 confirm(_('Does the diffstat above look okay'),
144 144 _('diffstat rejected'))
145 145 elif s is None:
146 146 ui.warn(_('No diffstat information available.\n'))
147 147 s = ''
148 148 return s
149 149
150 150 def makepatch(patch, idx, total):
151 151 desc = []
152 152 node = None
153 153 body = ''
154 154 for line in patch:
155 155 if line.startswith('#'):
156 156 if line.startswith('# Node ID'):
157 157 node = line.split()[-1]
158 158 continue
159 159 if line.startswith('diff -r') or line.startswith('diff --git'):
160 160 break
161 161 desc.append(line)
162 162 if not node:
163 163 raise ValueError
164 164
165 165 if opts['attach']:
166 166 body = ('\n'.join(desc[1:]).strip() or
167 167 'Patch subject is complete summary.')
168 168 body += '\n\n\n'
169 169
170 170 if opts.get('plain'):
171 171 while patch and patch[0].startswith('# '):
172 172 patch.pop(0)
173 173 if patch:
174 174 patch.pop(0)
175 175 while patch and not patch[0].strip():
176 176 patch.pop(0)
177 177 if opts.get('diffstat'):
178 178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
179 179 if opts.get('attach') or opts.get('inline'):
180 180 msg = email.MIMEMultipart.MIMEMultipart()
181 181 if body:
182 182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
183 183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
184 184 binnode = bin(node)
185 185 # if node is mq patch, it will have patch file name as tag
186 186 patchname = [t for t in repo.nodetags(binnode)
187 187 if t.endswith('.patch') or t.endswith('.diff')]
188 188 if patchname:
189 189 patchname = patchname[0]
190 190 elif total > 1:
191 191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
192 192 binnode, idx, total)
193 193 else:
194 194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
195 195 disposition = 'inline'
196 196 if opts['attach']:
197 197 disposition = 'attachment'
198 198 p['Content-Disposition'] = disposition + '; filename=' + patchname
199 199 msg.attach(p)
200 200 else:
201 201 body += '\n'.join(patch)
202 202 msg = email.MIMEText.MIMEText(body)
203 203
204 204 subj = desc[0].strip().rstrip('. ')
205 205 if total == 1:
206 206 subj = '[PATCH] ' + (opts.get('subject') or subj)
207 207 else:
208 208 tlen = len(str(total))
209 209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
210 210 msg['Subject'] = subj
211 211 msg['X-Mercurial-Node'] = node
212 212 return msg
213 213
214 214 def outgoing(dest, revs):
215 215 '''Return the revisions present locally but not in dest'''
216 216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
217 217 revs = [repo.lookup(rev) for rev in revs]
218 218 other = hg.repository(ui, dest)
219 219 ui.status(_('comparing with %s\n') % dest)
220 220 o = repo.findoutgoing(other)
221 221 if not o:
222 222 ui.status(_("no changes found\n"))
223 223 return []
224 224 o = repo.changelog.nodesbetween(o, revs or None)[0]
225 225 return [str(repo.changelog.rev(r)) for r in o]
226 226
227 227 def getbundle(dest):
228 228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
229 229 tmpfn = os.path.join(tmpdir, 'bundle')
230 230 try:
231 231 commands.bundle(ui, repo, tmpfn, dest, **opts)
232 232 return open(tmpfn, 'rb').read()
233 233 finally:
234 234 try:
235 235 os.unlink(tmpfn)
236 236 except:
237 237 pass
238 238 os.rmdir(tmpdir)
239 239
240 240 if not (opts.get('test') or opts.get('mbox')):
241 241 # really sending
242 242 mail.validateconfig(ui)
243 243
244 244 if not (revs or opts.get('rev')
245 245 or opts.get('outgoing') or opts.get('bundle')):
246 246 raise util.Abort(_('specify at least one changeset with -r or -o'))
247 247
248 248 cmdutil.setremoteconfig(ui, opts)
249 249 if opts.get('outgoing') and opts.get('bundle'):
250 250 raise util.Abort(_("--outgoing mode always on with --bundle;"
251 251 " do not re-specify --outgoing"))
252 252
253 253 if opts.get('outgoing') or opts.get('bundle'):
254 254 if len(revs) > 1:
255 255 raise util.Abort(_("too many destinations"))
256 256 dest = revs and revs[0] or None
257 257 revs = []
258 258
259 259 if opts.get('rev'):
260 260 if revs:
261 261 raise util.Abort(_('use only one form to specify the revision'))
262 262 revs = opts.get('rev')
263 263
264 264 if opts.get('outgoing'):
265 265 revs = outgoing(dest, opts.get('rev'))
266 266 if opts.get('bundle'):
267 267 opts['revs'] = revs
268 268
269 269 # start
270 270 if opts.get('date'):
271 271 start_time = util.parsedate(opts.get('date'))
272 272 else:
273 273 start_time = util.makedate()
274 274
275 275 def genmsgid(id):
276 276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
277 277
278 278 def getdescription(body, sender):
279 279 if opts.get('desc'):
280 280 body = open(opts.get('desc')).read()
281 281 else:
282 282 ui.write(_('\nWrite the introductory message for the '
283 283 'patch series.\n\n'))
284 284 body = ui.edit(body, sender)
285 285 return body
286 286
287 287 def getexportmsgs():
288 288 patches = []
289 289
290 290 class exportee:
291 291 def __init__(self, container):
292 292 self.lines = []
293 293 self.container = container
294 294 self.name = 'email'
295 295
296 296 def write(self, data):
297 297 self.lines.append(data)
298 298
299 299 def close(self):
300 300 self.container.append(''.join(self.lines).split('\n'))
301 301 self.lines = []
302 302
303 303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
304 304 'switch_parent': False,
305 305 'text': None,
306 306 'git': opts.get('git')})
307 307
308 308 jumbo = []
309 309 msgs = []
310 310
311 311 ui.write(_('This patch series consists of %d patches.\n\n')
312 312 % len(patches))
313 313
314 314 for p, i in zip(patches, xrange(len(patches))):
315 315 jumbo.extend(p)
316 316 msgs.append(makepatch(p, i + 1, len(patches)))
317 317
318 318 if len(patches) > 1:
319 319 tlen = len(str(len(patches)))
320 320
321 321 subj = '[PATCH %0*d of %d] %s' % (
322 322 tlen, 0, len(patches),
323 323 opts.get('subject') or
324 324 prompt('Subject:',
325 325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
326 326
327 327 body = ''
328 328 if opts.get('diffstat'):
329 329 d = cdiffstat(_('Final summary:\n'), jumbo)
330 330 if d:
331 331 body = '\n' + d
332 332
333 333 body = getdescription(body, sender)
334 334 msg = email.MIMEText.MIMEText(body)
335 335 msg['Subject'] = subj
336 336
337 337 msgs.insert(0, msg)
338 338 return msgs
339 339
340 340 def getbundlemsgs(bundle):
341 341 subj = (opts.get('subject')
342 342 or prompt('Subject:', default='A bundle for your repository'))
343 343
344 344 body = getdescription('', sender)
345 345 msg = email.MIMEMultipart.MIMEMultipart()
346 346 if body:
347 347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
348 348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
349 349 datapart.set_payload(bundle)
350 350 datapart.add_header('Content-Disposition', 'attachment',
351 351 filename='bundle.hg')
352 352 email.Encoders.encode_base64(datapart)
353 353 msg.attach(datapart)
354 354 msg['Subject'] = subj
355 355 return [msg]
356 356
357 357 sender = (opts.get('from') or ui.config('email', 'from') or
358 358 ui.config('patchbomb', 'from') or
359 359 prompt('From', ui.username()))
360 360
361 361 if opts.get('bundle'):
362 362 msgs = getbundlemsgs(getbundle(dest))
363 363 else:
364 364 msgs = getexportmsgs()
365 365
366 366 def getaddrs(opt, prpt, default = None):
367 367 addrs = opts.get(opt) or (ui.config('email', opt) or
368 368 ui.config('patchbomb', opt) or
369 369 prompt(prpt, default = default)).split(',')
370 370 return [a.strip() for a in addrs if a.strip()]
371 371
372 372 to = getaddrs('to', 'To')
373 373 cc = getaddrs('cc', 'Cc', '')
374 374
375 375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
376 376 ui.config('patchbomb', 'bcc') or '').split(',')
377 377 bcc = [a.strip() for a in bcc if a.strip()]
378 378
379 379 ui.write('\n')
380 380
381 381 parent = None
382 382
383 383 sender_addr = email.Utils.parseaddr(sender)[1]
384 384 sendmail = None
385 385 for m in msgs:
386 386 try:
387 387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
388 388 except TypeError:
389 389 m['Message-Id'] = genmsgid('patchbomb')
390 390 if parent:
391 391 m['In-Reply-To'] = parent
392 392 else:
393 393 parent = m['Message-Id']
394 394 m['Date'] = util.datestr(date=start_time,
395 395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
396 396
397 397 start_time = (start_time[0] + 1, start_time[1])
398 398 m['From'] = sender
399 399 m['To'] = ', '.join(to)
400 400 if cc:
401 401 m['Cc'] = ', '.join(cc)
402 402 if bcc:
403 403 m['Bcc'] = ', '.join(bcc)
404 404 if opts.get('test'):
405 405 ui.status('Displaying ', m['Subject'], ' ...\n')
406 406 ui.flush()
407 407 if 'PAGER' in os.environ:
408 408 fp = os.popen(os.environ['PAGER'], 'w')
409 409 else:
410 410 fp = ui
411 411 try:
412 412 fp.write(m.as_string(0))
413 413 fp.write('\n')
414 414 except IOError, inst:
415 415 if inst.errno != errno.EPIPE:
416 416 raise
417 417 if fp is not ui:
418 418 fp.close()
419 419 elif opts.get('mbox'):
420 420 ui.status('Writing ', m['Subject'], ' ...\n')
421 421 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
422 422 date = util.datestr(date=start_time,
423 423 format='%a %b %d %H:%M:%S %Y', timezone=False)
424 424 fp.write('From %s %s\n' % (sender_addr, date))
425 425 fp.write(m.as_string(0))
426 426 fp.write('\n\n')
427 427 fp.close()
428 428 else:
429 429 if not sendmail:
430 430 sendmail = mail.connect(ui)
431 431 ui.status('Sending ', m['Subject'], ' ...\n')
432 432 # Exim does not remove the Bcc field
433 433 del m['Bcc']
434 434 sendmail(sender, to + bcc + cc, m.as_string(0))
435 435
436 436 cmdtable = {
437 437 "email":
438 438 (patchbomb,
439 439 [('a', 'attach', None, _('send patches as attachments')),
440 440 ('i', 'inline', None, _('send patches as inline attachments')),
441 441 ('', 'bcc', [], _('email addresses of blind copy recipients')),
442 442 ('c', 'cc', [], _('email addresses of copy recipients')),
443 443 ('d', 'diffstat', None, _('add diffstat output to messages')),
444 444 ('', 'date', '', _('use the given date as the sending date')),
445 445 ('', 'desc', '', _('use the given file as the series description')),
446 446 ('g', 'git', None, _('use git extended diff format')),
447 447 ('f', 'from', '', _('email address of sender')),
448 448 ('', 'plain', None, _('omit hg patch header')),
449 449 ('n', 'test', None, _('print messages that would be sent')),
450 450 ('m', 'mbox', '',
451 451 _('write messages to mbox file instead of sending them')),
452 452 ('o', 'outgoing', None,
453 453 _('send changes not found in the target repository')),
454 454 ('b', 'bundle', None,
455 455 _('send changes not in target as a binary bundle')),
456 456 ('r', 'rev', [], _('a revision to send')),
457 457 ('s', 'subject', '',
458 458 _('subject of first message (intro or single patch)')),
459 459 ('t', 'to', [], _('email addresses of recipients')),
460 460 ('', 'force', None,
461 461 _('run even when remote repository is unrelated (with -b)')),
462 462 ('', 'base', [],
463 463 _('a base changeset to specify instead of a destination (with -b)')),
464 464 ] + commands.remoteopts,
465 465 _('hg email [OPTION]... [DEST]...'))
466 466 }
@@ -1,107 +1,107 b''
1 1 # win32text.py - LF <-> CRLF translation utilities for Windows users
2 2 #
3 3 # This software may be used and distributed according to the terms
4 4 # of the GNU General Public License, incorporated herein by reference.
5 5 #
6 6 # To perform automatic newline conversion, use:
7 7 #
8 8 # [extensions]
9 9 # hgext.win32text =
10 10 # [encode]
11 11 # ** = cleverencode:
12 12 # [decode]
13 13 # ** = cleverdecode:
14 14 #
15 15 # If not doing conversion, to make sure you do not commit CRLF by accident:
16 16 #
17 17 # [hooks]
18 18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
19 19 #
20 20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
21 21 #
22 22 # [hooks]
23 23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
24 24
25 25 from mercurial import util, ui
26 26 from mercurial.i18n import gettext as _
27 from mercurial.node import *
27 from mercurial.node import bin, short
28 28 import re
29 29
30 30 # regexp for single LF without CR preceding.
31 31 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
32 32
33 33 def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
34 34 # warn if already has CRLF in repository.
35 35 # it might cause unexpected eol conversion.
36 36 # see issue 302:
37 37 # http://www.selenic.com/mercurial/bts/issue302
38 38 if '\r\n' in s and ui and filename and repo:
39 39 ui.warn(_('WARNING: %s already has CRLF line endings\n'
40 40 'and does not need EOL conversion by the win32text plugin.\n'
41 41 'Before your next commit, please reconsider your '
42 42 'encode/decode settings in \nMercurial.ini or %s.\n') %
43 43 (filename, repo.join('hgrc')))
44 44 # replace single LF to CRLF
45 45 return re_single_lf.sub('\\1\r\n', s)
46 46
47 47 def dumbencode(s, cmd):
48 48 return s.replace('\r\n', '\n')
49 49
50 50 def clevertest(s, cmd):
51 51 if '\0' in s: return False
52 52 return True
53 53
54 54 def cleverdecode(s, cmd, **kwargs):
55 55 if clevertest(s, cmd):
56 56 return dumbdecode(s, cmd, **kwargs)
57 57 return s
58 58
59 59 def cleverencode(s, cmd):
60 60 if clevertest(s, cmd):
61 61 return dumbencode(s, cmd)
62 62 return s
63 63
64 64 _filters = {
65 65 'dumbdecode:': dumbdecode,
66 66 'dumbencode:': dumbencode,
67 67 'cleverdecode:': cleverdecode,
68 68 'cleverencode:': cleverencode,
69 69 }
70 70
71 71 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
72 72 halt = False
73 73 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
74 74 c = repo.changectx(rev)
75 75 for f in c.files():
76 76 if f not in c:
77 77 continue
78 78 data = c[f].data()
79 79 if '\0' not in data and '\r\n' in data:
80 80 if not halt:
81 81 ui.warn(_('Attempt to commit or push text file(s) '
82 82 'using CRLF line endings\n'))
83 83 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
84 84 halt = True
85 85 if halt and hooktype == 'pretxnchangegroup':
86 86 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
87 87 'add to Mercurial.ini or .hg/hgrc:\n'
88 88 '\n'
89 89 '[hooks]\n'
90 90 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
91 91 '\n'
92 92 'and also consider adding:\n'
93 93 '\n'
94 94 '[extensions]\n'
95 95 'hgext.win32text =\n'
96 96 '[encode]\n'
97 97 '** = cleverencode:\n'
98 98 '[decode]\n'
99 99 '** = cleverdecode:\n'))
100 100 return halt
101 101
102 102 def reposetup(ui, repo):
103 103 if not repo.local():
104 104 return
105 105 for name, fn in _filters.iteritems():
106 106 repo.adddatafilter(name, fn)
107 107
@@ -1,224 +1,224 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 from node import *
9 from node import hex
10 10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 11 import zlib, gzip
12 12
13 13 def tidyprefix(dest, prefix, suffixes):
14 14 '''choose prefix to use for names in archive. make sure prefix is
15 15 safe for consumers.'''
16 16
17 17 if prefix:
18 18 prefix = util.normpath(prefix)
19 19 else:
20 20 if not isinstance(dest, str):
21 21 raise ValueError('dest must be string if no prefix')
22 22 prefix = os.path.basename(dest)
23 23 lower = prefix.lower()
24 24 for sfx in suffixes:
25 25 if lower.endswith(sfx):
26 26 prefix = prefix[:-len(sfx)]
27 27 break
28 28 lpfx = os.path.normpath(util.localpath(prefix))
29 29 prefix = util.pconvert(lpfx)
30 30 if not prefix.endswith('/'):
31 31 prefix += '/'
32 32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 33 raise util.Abort(_('archive prefix contains illegal components'))
34 34 return prefix
35 35
36 36 class tarit:
37 37 '''write archive to tar file or stream. can write uncompressed,
38 38 or compress with gzip or bzip2.'''
39 39
40 40 class GzipFileWithTime(gzip.GzipFile):
41 41
42 42 def __init__(self, *args, **kw):
43 43 timestamp = None
44 44 if 'timestamp' in kw:
45 45 timestamp = kw.pop('timestamp')
46 46 if timestamp == None:
47 47 self.timestamp = time.time()
48 48 else:
49 49 self.timestamp = timestamp
50 50 gzip.GzipFile.__init__(self, *args, **kw)
51 51
52 52 def _write_gzip_header(self):
53 53 self.fileobj.write('\037\213') # magic header
54 54 self.fileobj.write('\010') # compression method
55 55 fname = self.filename[:-3]
56 56 flags = 0
57 57 if fname:
58 58 flags = gzip.FNAME
59 59 self.fileobj.write(chr(flags))
60 60 gzip.write32u(self.fileobj, long(self.timestamp))
61 61 self.fileobj.write('\002')
62 62 self.fileobj.write('\377')
63 63 if fname:
64 64 self.fileobj.write(fname + '\000')
65 65
66 66 def __init__(self, dest, prefix, mtime, kind=''):
67 67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
68 68 '.tgz', '.tbz2'])
69 69 self.mtime = mtime
70 70
71 71 def taropen(name, mode, fileobj=None):
72 72 if kind == 'gz':
73 73 mode = mode[0]
74 74 if not fileobj:
75 75 fileobj = open(name, mode + 'b')
76 76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
77 77 zlib.Z_BEST_COMPRESSION,
78 78 fileobj, timestamp=mtime)
79 79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
80 80 else:
81 81 return tarfile.open(name, mode + kind, fileobj)
82 82
83 83 if isinstance(dest, str):
84 84 self.z = taropen(dest, mode='w:')
85 85 else:
86 86 # Python 2.5-2.5.1 have a regression that requires a name arg
87 87 self.z = taropen(name='', mode='w|', fileobj=dest)
88 88
89 89 def addfile(self, name, mode, islink, data):
90 90 i = tarfile.TarInfo(self.prefix + name)
91 91 i.mtime = self.mtime
92 92 i.size = len(data)
93 93 if islink:
94 94 i.type = tarfile.SYMTYPE
95 95 i.mode = 0777
96 96 i.linkname = data
97 97 data = None
98 98 else:
99 99 i.mode = mode
100 100 data = cStringIO.StringIO(data)
101 101 self.z.addfile(i, data)
102 102
103 103 def done(self):
104 104 self.z.close()
105 105
106 106 class tellable:
107 107 '''provide tell method for zipfile.ZipFile when writing to http
108 108 response file object.'''
109 109
110 110 def __init__(self, fp):
111 111 self.fp = fp
112 112 self.offset = 0
113 113
114 114 def __getattr__(self, key):
115 115 return getattr(self.fp, key)
116 116
117 117 def write(self, s):
118 118 self.fp.write(s)
119 119 self.offset += len(s)
120 120
121 121 def tell(self):
122 122 return self.offset
123 123
124 124 class zipit:
125 125 '''write archive to zip file or stream. can write uncompressed,
126 126 or compressed with deflate.'''
127 127
128 128 def __init__(self, dest, prefix, mtime, compress=True):
129 129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
130 130 if not isinstance(dest, str):
131 131 try:
132 132 dest.tell()
133 133 except (AttributeError, IOError):
134 134 dest = tellable(dest)
135 135 self.z = zipfile.ZipFile(dest, 'w',
136 136 compress and zipfile.ZIP_DEFLATED or
137 137 zipfile.ZIP_STORED)
138 138 self.date_time = time.gmtime(mtime)[:6]
139 139
140 140 def addfile(self, name, mode, islink, data):
141 141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
142 142 i.compress_type = self.z.compression
143 143 # unzip will not honor unix file modes unless file creator is
144 144 # set to unix (id 3).
145 145 i.create_system = 3
146 146 ftype = stat.S_IFREG
147 147 if islink:
148 148 mode = 0777
149 149 ftype = stat.S_IFLNK
150 150 i.external_attr = (mode | ftype) << 16L
151 151 self.z.writestr(i, data)
152 152
153 153 def done(self):
154 154 self.z.close()
155 155
156 156 class fileit:
157 157 '''write archive as files in directory.'''
158 158
159 159 def __init__(self, name, prefix, mtime):
160 160 if prefix:
161 161 raise util.Abort(_('cannot give prefix when archiving to files'))
162 162 self.basedir = name
163 163 self.opener = util.opener(self.basedir)
164 164
165 165 def addfile(self, name, mode, islink, data):
166 166 if islink:
167 167 self.opener.symlink(data, name)
168 168 return
169 169 f = self.opener(name, "w", atomictemp=True)
170 170 f.write(data)
171 171 f.rename()
172 172 destfile = os.path.join(self.basedir, name)
173 173 os.chmod(destfile, mode)
174 174
175 175 def done(self):
176 176 pass
177 177
178 178 archivers = {
179 179 'files': fileit,
180 180 'tar': tarit,
181 181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
182 182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
183 183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
184 184 'zip': zipit,
185 185 }
186 186
187 187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
188 188 prefix=None, mtime=None):
189 189 '''create archive of repo as it was at node.
190 190
191 191 dest can be name of directory, name of archive file, or file
192 192 object to write archive to.
193 193
194 194 kind is type of archive to create.
195 195
196 196 decode tells whether to put files through decode filters from
197 197 hgrc.
198 198
199 199 matchfn is function to filter names of files to write to archive.
200 200
201 201 prefix is name of path to put before every archive member.'''
202 202
203 203 def write(name, mode, islink, getdata):
204 204 if matchfn and not matchfn(name): return
205 205 data = getdata()
206 206 if decode:
207 207 data = repo.wwritedata(name, data)
208 208 archiver.addfile(name, mode, islink, data)
209 209
210 210 ctx = repo.changectx(node)
211 211 if kind not in archivers:
212 212 raise util.Abort(_("unknown archive type '%s'" % kind))
213 213 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
214 214 m = ctx.manifest()
215 215 items = m.items()
216 216 items.sort()
217 217 if repo.ui.configbool("ui", "archivemeta", True):
218 218 write('.hg_archival.txt', 0644, False,
219 219 lambda: 'repo: %s\nnode: %s\n' % (
220 220 hex(repo.changelog.node(0)), hex(node)))
221 221 for filename, filenode in items:
222 222 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
223 223 lambda: repo.file(filename).read(filenode))
224 224 archiver.done()
@@ -1,282 +1,282 b''
1 1 """
2 2 bundlerepo.py - repository class for viewing uncompressed bundles
3 3
4 4 This provides a read-only repository interface to bundles as if
5 5 they were part of the actual repository.
6 6
7 7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 from node import *
13 from node import hex, nullid, short
14 14 from i18n import _
15 15 import changegroup, util, os, struct, bz2, tempfile, mdiff
16 16 import localrepo, changelog, manifest, filelog, revlog
17 17
18 18 class bundlerevlog(revlog.revlog):
19 19 def __init__(self, opener, indexfile, bundlefile,
20 20 linkmapper=None):
21 21 # How it works:
22 22 # to retrieve a revision, we need to know the offset of
23 23 # the revision in the bundlefile (an opened file).
24 24 #
25 25 # We store this offset in the index (start), to differentiate a
26 26 # rev in the bundle and from a rev in the revlog, we check
27 27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 28 # (it is bigger since we store the node to which the delta is)
29 29 #
30 30 revlog.revlog.__init__(self, opener, indexfile)
31 31 self.bundlefile = bundlefile
32 32 self.basemap = {}
33 33 def chunkpositer():
34 34 for chunk in changegroup.chunkiter(bundlefile):
35 35 pos = bundlefile.tell()
36 36 yield chunk, pos - len(chunk)
37 37 n = self.count()
38 38 prev = None
39 39 for chunk, start in chunkpositer():
40 40 size = len(chunk)
41 41 if size < 80:
42 42 raise util.Abort("invalid changegroup")
43 43 start += 80
44 44 size -= 80
45 45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 46 if node in self.nodemap:
47 47 prev = node
48 48 continue
49 49 for p in (p1, p2):
50 50 if not p in self.nodemap:
51 51 raise revlog.LookupError(hex(p1), _("unknown parent %s") % short(p1))
52 52 if linkmapper is None:
53 53 link = n
54 54 else:
55 55 link = linkmapper(cs)
56 56
57 57 if not prev:
58 58 prev = p1
59 59 # start, size, full unc. size, base (unused), link, p1, p2, node
60 60 e = (revlog.offset_type(start, 0), size, -1, -1, link,
61 61 self.rev(p1), self.rev(p2), node)
62 62 self.basemap[n] = prev
63 63 self.index.insert(-1, e)
64 64 self.nodemap[node] = n
65 65 prev = node
66 66 n += 1
67 67
68 68 def bundle(self, rev):
69 69 """is rev from the bundle"""
70 70 if rev < 0:
71 71 return False
72 72 return rev in self.basemap
73 73 def bundlebase(self, rev): return self.basemap[rev]
74 74 def chunk(self, rev, df=None, cachelen=4096):
75 75 # Warning: in case of bundle, the diff is against bundlebase,
76 76 # not against rev - 1
77 77 # XXX: could use some caching
78 78 if not self.bundle(rev):
79 79 return revlog.revlog.chunk(self, rev, df)
80 80 self.bundlefile.seek(self.start(rev))
81 81 return self.bundlefile.read(self.length(rev))
82 82
83 83 def revdiff(self, rev1, rev2):
84 84 """return or calculate a delta between two revisions"""
85 85 if self.bundle(rev1) and self.bundle(rev2):
86 86 # hot path for bundle
87 87 revb = self.rev(self.bundlebase(rev2))
88 88 if revb == rev1:
89 89 return self.chunk(rev2)
90 90 elif not self.bundle(rev1) and not self.bundle(rev2):
91 91 return revlog.revlog.revdiff(self, rev1, rev2)
92 92
93 93 return mdiff.textdiff(self.revision(self.node(rev1)),
94 94 self.revision(self.node(rev2)))
95 95
96 96 def revision(self, node):
97 97 """return an uncompressed revision of a given"""
98 98 if node == nullid: return ""
99 99
100 100 text = None
101 101 chain = []
102 102 iter_node = node
103 103 rev = self.rev(iter_node)
104 104 # reconstruct the revision if it is from a changegroup
105 105 while self.bundle(rev):
106 106 if self._cache and self._cache[0] == iter_node:
107 107 text = self._cache[2]
108 108 break
109 109 chain.append(rev)
110 110 iter_node = self.bundlebase(rev)
111 111 rev = self.rev(iter_node)
112 112 if text is None:
113 113 text = revlog.revlog.revision(self, iter_node)
114 114
115 115 while chain:
116 116 delta = self.chunk(chain.pop())
117 117 text = mdiff.patches(text, [delta])
118 118
119 119 p1, p2 = self.parents(node)
120 120 if node != revlog.hash(text, p1, p2):
121 121 raise revlog.RevlogError(_("integrity check failed on %s:%d")
122 122 % (self.datafile, self.rev(node)))
123 123
124 124 self._cache = (node, self.rev(node), text)
125 125 return text
126 126
127 127 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
128 128 raise NotImplementedError
129 129 def addgroup(self, revs, linkmapper, transaction, unique=0):
130 130 raise NotImplementedError
131 131 def strip(self, rev, minlink):
132 132 raise NotImplementedError
133 133 def checksize(self):
134 134 raise NotImplementedError
135 135
136 136 class bundlechangelog(bundlerevlog, changelog.changelog):
137 137 def __init__(self, opener, bundlefile):
138 138 changelog.changelog.__init__(self, opener)
139 139 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
140 140
141 141 class bundlemanifest(bundlerevlog, manifest.manifest):
142 142 def __init__(self, opener, bundlefile, linkmapper):
143 143 manifest.manifest.__init__(self, opener)
144 144 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
145 145 linkmapper)
146 146
147 147 class bundlefilelog(bundlerevlog, filelog.filelog):
148 148 def __init__(self, opener, path, bundlefile, linkmapper):
149 149 filelog.filelog.__init__(self, opener, path)
150 150 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
151 151 linkmapper)
152 152
153 153 class bundlerepository(localrepo.localrepository):
154 154 def __init__(self, ui, path, bundlename):
155 155 localrepo.localrepository.__init__(self, ui, path)
156 156
157 157 if path:
158 158 self._url = 'bundle:' + path + '+' + bundlename
159 159 else:
160 160 self._url = 'bundle:' + bundlename
161 161
162 162 self.tempfile = None
163 163 self.bundlefile = open(bundlename, "rb")
164 164 header = self.bundlefile.read(6)
165 165 if not header.startswith("HG"):
166 166 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
167 167 elif not header.startswith("HG10"):
168 168 raise util.Abort(_("%s: unknown bundle version") % bundlename)
169 169 elif header == "HG10BZ":
170 170 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
171 171 suffix=".hg10un", dir=self.path)
172 172 self.tempfile = temp
173 173 fptemp = os.fdopen(fdtemp, 'wb')
174 174 def generator(f):
175 175 zd = bz2.BZ2Decompressor()
176 176 zd.decompress("BZ")
177 177 for chunk in f:
178 178 yield zd.decompress(chunk)
179 179 gen = generator(util.filechunkiter(self.bundlefile, 4096))
180 180
181 181 try:
182 182 fptemp.write("HG10UN")
183 183 for chunk in gen:
184 184 fptemp.write(chunk)
185 185 finally:
186 186 fptemp.close()
187 187 self.bundlefile.close()
188 188
189 189 self.bundlefile = open(self.tempfile, "rb")
190 190 # seek right after the header
191 191 self.bundlefile.seek(6)
192 192 elif header == "HG10UN":
193 193 # nothing to do
194 194 pass
195 195 else:
196 196 raise util.Abort(_("%s: unknown bundle compression type")
197 197 % bundlename)
198 198 # dict with the mapping 'filename' -> position in the bundle
199 199 self.bundlefilespos = {}
200 200
201 201 def __getattr__(self, name):
202 202 if name == 'changelog':
203 203 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
204 204 self.manstart = self.bundlefile.tell()
205 205 return self.changelog
206 206 if name == 'manifest':
207 207 self.bundlefile.seek(self.manstart)
208 208 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
209 209 self.changelog.rev)
210 210 self.filestart = self.bundlefile.tell()
211 211 return self.manifest
212 212 if name == 'manstart':
213 213 self.changelog
214 214 return self.manstart
215 215 if name == 'filestart':
216 216 self.manifest
217 217 return self.filestart
218 218 return localrepo.localrepository.__getattr__(self, name)
219 219
220 220 def url(self):
221 221 return self._url
222 222
223 223 def dev(self):
224 224 return -1
225 225
226 226 def file(self, f):
227 227 if not self.bundlefilespos:
228 228 self.bundlefile.seek(self.filestart)
229 229 while 1:
230 230 chunk = changegroup.getchunk(self.bundlefile)
231 231 if not chunk:
232 232 break
233 233 self.bundlefilespos[chunk] = self.bundlefile.tell()
234 234 for c in changegroup.chunkiter(self.bundlefile):
235 235 pass
236 236
237 237 if f[0] == '/':
238 238 f = f[1:]
239 239 if f in self.bundlefilespos:
240 240 self.bundlefile.seek(self.bundlefilespos[f])
241 241 return bundlefilelog(self.sopener, f, self.bundlefile,
242 242 self.changelog.rev)
243 243 else:
244 244 return filelog.filelog(self.sopener, f)
245 245
246 246 def close(self):
247 247 """Close assigned bundle file immediately."""
248 248 self.bundlefile.close()
249 249
250 250 def __del__(self):
251 251 bundlefile = getattr(self, 'bundlefile', None)
252 252 if bundlefile and not bundlefile.closed:
253 253 bundlefile.close()
254 254 tempfile = getattr(self, 'tempfile', None)
255 255 if tempfile is not None:
256 256 os.unlink(tempfile)
257 257
258 258 def instance(ui, path, create):
259 259 if create:
260 260 raise util.Abort(_('cannot create new bundle repository'))
261 261 parentpath = ui.config("bundle", "mainreporoot", "")
262 262 if parentpath:
263 263 # Try to make the full path relative so we get a nice, short URL.
264 264 # In particular, we don't want temp dir names in test outputs.
265 265 cwd = os.getcwd()
266 266 if parentpath == cwd:
267 267 parentpath = ''
268 268 else:
269 269 cwd = os.path.join(cwd,'')
270 270 if parentpath.startswith(cwd):
271 271 parentpath = parentpath[len(cwd):]
272 272 path = util.drop_scheme('file', path)
273 273 if path.startswith('bundle:'):
274 274 path = util.drop_scheme('bundle', path)
275 275 s = path.split("+", 1)
276 276 if len(s) == 1:
277 277 repopath, bundlename = parentpath, s[0]
278 278 else:
279 279 repopath, bundlename = s
280 280 else:
281 281 repopath, bundlename = parentpath, path
282 282 return bundlerepository(ui, repopath, bundlename)
@@ -1,192 +1,193 b''
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from revlog import *
8 from node import bin, hex, nullid
9 from revlog import revlog
9 10 from i18n import _
10 11 import os, time, util
11 12
12 13 def _string_escape(text):
13 14 """
14 15 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 16 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 17 >>> s
17 18 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 19 >>> res = _string_escape(s)
19 20 >>> s == res.decode('string_escape')
20 21 True
21 22 """
22 23 # subset of the string_escape codec
23 24 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 25 return text.replace('\0', '\\0')
25 26
26 27 class appender:
27 28 '''the changelog index must be update last on disk, so we use this class
28 29 to delay writes to it'''
29 30 def __init__(self, fp, buf):
30 31 self.data = buf
31 32 self.fp = fp
32 33 self.offset = fp.tell()
33 34 self.size = util.fstat(fp).st_size
34 35
35 36 def end(self):
36 37 return self.size + len("".join(self.data))
37 38 def tell(self):
38 39 return self.offset
39 40 def flush(self):
40 41 pass
41 42 def close(self):
42 43 self.fp.close()
43 44
44 45 def seek(self, offset, whence=0):
45 46 '''virtual file offset spans real file and data'''
46 47 if whence == 0:
47 48 self.offset = offset
48 49 elif whence == 1:
49 50 self.offset += offset
50 51 elif whence == 2:
51 52 self.offset = self.end() + offset
52 53 if self.offset < self.size:
53 54 self.fp.seek(self.offset)
54 55
55 56 def read(self, count=-1):
56 57 '''only trick here is reads that span real file and data'''
57 58 ret = ""
58 59 if self.offset < self.size:
59 60 s = self.fp.read(count)
60 61 ret = s
61 62 self.offset += len(s)
62 63 if count > 0:
63 64 count -= len(s)
64 65 if count != 0:
65 66 doff = self.offset - self.size
66 67 self.data.insert(0, "".join(self.data))
67 68 del self.data[1:]
68 69 s = self.data[0][doff:doff+count]
69 70 self.offset += len(s)
70 71 ret += s
71 72 return ret
72 73
73 74 def write(self, s):
74 75 self.data.append(str(s))
75 76 self.offset += len(s)
76 77
77 78 class changelog(revlog):
78 79 def __init__(self, opener):
79 80 revlog.__init__(self, opener, "00changelog.i")
80 81
81 82 def delayupdate(self):
82 83 "delay visibility of index updates to other readers"
83 84 self._realopener = self.opener
84 85 self.opener = self._delayopener
85 86 self._delaycount = self.count()
86 87 self._delaybuf = []
87 88 self._delayname = None
88 89
89 90 def finalize(self, tr):
90 91 "finalize index updates"
91 92 self.opener = self._realopener
92 93 # move redirected index data back into place
93 94 if self._delayname:
94 95 util.rename(self._delayname + ".a", self._delayname)
95 96 elif self._delaybuf:
96 97 fp = self.opener(self.indexfile, 'a')
97 98 fp.write("".join(self._delaybuf))
98 99 fp.close()
99 100 del self._delaybuf
100 101 # split when we're done
101 102 self.checkinlinesize(tr)
102 103
103 104 def _delayopener(self, name, mode='r'):
104 105 fp = self._realopener(name, mode)
105 106 # only divert the index
106 107 if not name == self.indexfile:
107 108 return fp
108 109 # if we're doing an initial clone, divert to another file
109 110 if self._delaycount == 0:
110 111 self._delayname = fp.name
111 112 return self._realopener(name + ".a", mode)
112 113 # otherwise, divert to memory
113 114 return appender(fp, self._delaybuf)
114 115
115 116 def checkinlinesize(self, tr, fp=None):
116 117 if self.opener == self._delayopener:
117 118 return
118 119 return revlog.checkinlinesize(self, tr, fp)
119 120
120 121 def decode_extra(self, text):
121 122 extra = {}
122 123 for l in text.split('\0'):
123 124 if l:
124 125 k, v = l.decode('string_escape').split(':', 1)
125 126 extra[k] = v
126 127 return extra
127 128
128 129 def encode_extra(self, d):
129 130 # keys must be sorted to produce a deterministic changelog entry
130 131 keys = d.keys()
131 132 keys.sort()
132 133 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
133 134 return "\0".join(items)
134 135
135 136 def read(self, node):
136 137 """
137 138 format used:
138 139 nodeid\n : manifest node in ascii
139 140 user\n : user, no \n or \r allowed
140 141 time tz extra\n : date (time is int or float, timezone is int)
141 142 : extra is metadatas, encoded and separated by '\0'
142 143 : older versions ignore it
143 144 files\n\n : files modified by the cset, no \n or \r allowed
144 145 (.*) : comment (free text, ideally utf-8)
145 146
146 147 changelog v0 doesn't use extra
147 148 """
148 149 text = self.revision(node)
149 150 if not text:
150 151 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
151 152 last = text.index("\n\n")
152 153 desc = util.tolocal(text[last + 2:])
153 154 l = text[:last].split('\n')
154 155 manifest = bin(l[0])
155 156 user = util.tolocal(l[1])
156 157
157 158 extra_data = l[2].split(' ', 2)
158 159 if len(extra_data) != 3:
159 160 time = float(extra_data.pop(0))
160 161 try:
161 162 # various tools did silly things with the time zone field.
162 163 timezone = int(extra_data[0])
163 164 except:
164 165 timezone = 0
165 166 extra = {}
166 167 else:
167 168 time, timezone, extra = extra_data
168 169 time, timezone = float(time), int(timezone)
169 170 extra = self.decode_extra(extra)
170 171 if not extra.get('branch'):
171 172 extra['branch'] = 'default'
172 173 files = l[3:]
173 174 return (manifest, user, (time, timezone), files, desc, extra)
174 175
175 176 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
176 177 user=None, date=None, extra={}):
177 178
178 179 user, desc = util.fromlocal(user), util.fromlocal(desc)
179 180
180 181 if date:
181 182 parseddate = "%d %d" % util.parsedate(date)
182 183 else:
183 184 parseddate = "%d %d" % util.makedate()
184 185 if extra and extra.get("branch") in ("default", ""):
185 186 del extra["branch"]
186 187 if extra:
187 188 extra = self.encode_extra(extra)
188 189 parseddate = "%s %s" % (parseddate, extra)
189 190 list.sort()
190 191 l = [hex(manifest), user, parseddate] + list + ["", desc]
191 192 text = "\n".join(l)
192 193 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,1176 +1,1176 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12
13 13 revrangesep = ':'
14 14
15 15 class UnknownCommand(Exception):
16 16 """Exception raised if command is not in the command table."""
17 17 class AmbiguousCommand(Exception):
18 18 """Exception raised if command shortcut matches more than one command."""
19 19
20 20 def findpossible(ui, cmd, table):
21 21 """
22 22 Return cmd -> (aliases, command table entry)
23 23 for each matching command.
24 24 Return debug commands (or their aliases) only if no normal command matches.
25 25 """
26 26 choice = {}
27 27 debugchoice = {}
28 28 for e in table.keys():
29 29 aliases = e.lstrip("^").split("|")
30 30 found = None
31 31 if cmd in aliases:
32 32 found = cmd
33 33 elif not ui.config("ui", "strict"):
34 34 for a in aliases:
35 35 if a.startswith(cmd):
36 36 found = a
37 37 break
38 38 if found is not None:
39 39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 40 debugchoice[found] = (aliases, table[e])
41 41 else:
42 42 choice[found] = (aliases, table[e])
43 43
44 44 if not choice and debugchoice:
45 45 choice = debugchoice
46 46
47 47 return choice
48 48
49 49 def findcmd(ui, cmd, table):
50 50 """Return (aliases, command table entry) for command string."""
51 51 choice = findpossible(ui, cmd, table)
52 52
53 53 if cmd in choice:
54 54 return choice[cmd]
55 55
56 56 if len(choice) > 1:
57 57 clist = choice.keys()
58 58 clist.sort()
59 59 raise AmbiguousCommand(cmd, clist)
60 60
61 61 if choice:
62 62 return choice.values()[0]
63 63
64 64 raise UnknownCommand(cmd)
65 65
66 66 def bail_if_changed(repo):
67 67 if repo.dirstate.parents()[1] != nullid:
68 68 raise util.Abort(_('outstanding uncommitted merge'))
69 69 modified, added, removed, deleted = repo.status()[:4]
70 70 if modified or added or removed or deleted:
71 71 raise util.Abort(_("outstanding uncommitted changes"))
72 72
73 73 def logmessage(opts):
74 74 """ get the log message according to -m and -l option """
75 75 message = opts['message']
76 76 logfile = opts['logfile']
77 77
78 78 if message and logfile:
79 79 raise util.Abort(_('options --message and --logfile are mutually '
80 80 'exclusive'))
81 81 if not message and logfile:
82 82 try:
83 83 if logfile == '-':
84 84 message = sys.stdin.read()
85 85 else:
86 86 message = open(logfile).read()
87 87 except IOError, inst:
88 88 raise util.Abort(_("can't read commit message '%s': %s") %
89 89 (logfile, inst.strerror))
90 90 return message
91 91
92 92 def loglimit(opts):
93 93 """get the log limit according to option -l/--limit"""
94 94 limit = opts.get('limit')
95 95 if limit:
96 96 try:
97 97 limit = int(limit)
98 98 except ValueError:
99 99 raise util.Abort(_('limit must be a positive integer'))
100 100 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 101 else:
102 102 limit = sys.maxint
103 103 return limit
104 104
105 105 def setremoteconfig(ui, opts):
106 106 "copy remote options to ui tree"
107 107 if opts.get('ssh'):
108 108 ui.setconfig("ui", "ssh", opts['ssh'])
109 109 if opts.get('remotecmd'):
110 110 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111 111
112 112 def revpair(repo, revs):
113 113 '''return pair of nodes, given list of revisions. second item can
114 114 be None, meaning use working dir.'''
115 115
116 116 def revfix(repo, val, defval):
117 117 if not val and val != 0 and defval is not None:
118 118 val = defval
119 119 return repo.lookup(val)
120 120
121 121 if not revs:
122 122 return repo.dirstate.parents()[0], None
123 123 end = None
124 124 if len(revs) == 1:
125 125 if revrangesep in revs[0]:
126 126 start, end = revs[0].split(revrangesep, 1)
127 127 start = revfix(repo, start, 0)
128 128 end = revfix(repo, end, repo.changelog.count() - 1)
129 129 else:
130 130 start = revfix(repo, revs[0], None)
131 131 elif len(revs) == 2:
132 132 if revrangesep in revs[0] or revrangesep in revs[1]:
133 133 raise util.Abort(_('too many revisions specified'))
134 134 start = revfix(repo, revs[0], None)
135 135 end = revfix(repo, revs[1], None)
136 136 else:
137 137 raise util.Abort(_('too many revisions specified'))
138 138 return start, end
139 139
140 140 def revrange(repo, revs):
141 141 """Yield revision as strings from a list of revision specifications."""
142 142
143 143 def revfix(repo, val, defval):
144 144 if not val and val != 0 and defval is not None:
145 145 return defval
146 146 return repo.changelog.rev(repo.lookup(val))
147 147
148 148 seen, l = {}, []
149 149 for spec in revs:
150 150 if revrangesep in spec:
151 151 start, end = spec.split(revrangesep, 1)
152 152 start = revfix(repo, start, 0)
153 153 end = revfix(repo, end, repo.changelog.count() - 1)
154 154 step = start > end and -1 or 1
155 155 for rev in xrange(start, end+step, step):
156 156 if rev in seen:
157 157 continue
158 158 seen[rev] = 1
159 159 l.append(rev)
160 160 else:
161 161 rev = revfix(repo, spec, None)
162 162 if rev in seen:
163 163 continue
164 164 seen[rev] = 1
165 165 l.append(rev)
166 166
167 167 return l
168 168
169 169 def make_filename(repo, pat, node,
170 170 total=None, seqno=None, revwidth=None, pathname=None):
171 171 node_expander = {
172 172 'H': lambda: hex(node),
173 173 'R': lambda: str(repo.changelog.rev(node)),
174 174 'h': lambda: short(node),
175 175 }
176 176 expander = {
177 177 '%': lambda: '%',
178 178 'b': lambda: os.path.basename(repo.root),
179 179 }
180 180
181 181 try:
182 182 if node:
183 183 expander.update(node_expander)
184 184 if node:
185 185 expander['r'] = (lambda:
186 186 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 187 if total is not None:
188 188 expander['N'] = lambda: str(total)
189 189 if seqno is not None:
190 190 expander['n'] = lambda: str(seqno)
191 191 if total is not None and seqno is not None:
192 192 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 193 if pathname is not None:
194 194 expander['s'] = lambda: os.path.basename(pathname)
195 195 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 196 expander['p'] = lambda: pathname
197 197
198 198 newname = []
199 199 patlen = len(pat)
200 200 i = 0
201 201 while i < patlen:
202 202 c = pat[i]
203 203 if c == '%':
204 204 i += 1
205 205 c = pat[i]
206 206 c = expander[c]()
207 207 newname.append(c)
208 208 i += 1
209 209 return ''.join(newname)
210 210 except KeyError, inst:
211 211 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 212 inst.args[0])
213 213
214 214 def make_file(repo, pat, node=None,
215 215 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 216 if not pat or pat == '-':
217 217 return 'w' in mode and sys.stdout or sys.stdin
218 218 if hasattr(pat, 'write') and 'w' in mode:
219 219 return pat
220 220 if hasattr(pat, 'read') and 'r' in mode:
221 221 return pat
222 222 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 223 pathname),
224 224 mode)
225 225
226 226 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
227 227 cwd = repo.getcwd()
228 228 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
229 229 opts.get('exclude'), globbed=globbed,
230 230 default=default)
231 231
232 232 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
233 233 default=None):
234 234 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
235 235 default=default)
236 236 exact = dict.fromkeys(files)
237 237 cwd = repo.getcwd()
238 238 for src, fn in repo.walk(node=node, files=files, match=matchfn,
239 239 badmatch=badmatch):
240 240 yield src, fn, repo.pathto(fn, cwd), fn in exact
241 241
242 242 def findrenames(repo, added=None, removed=None, threshold=0.5):
243 243 '''find renamed files -- yields (before, after, score) tuples'''
244 244 if added is None or removed is None:
245 245 added, removed = repo.status()[1:3]
246 246 ctx = repo.changectx()
247 247 for a in added:
248 248 aa = repo.wread(a)
249 249 bestname, bestscore = None, threshold
250 250 for r in removed:
251 251 rr = ctx.filectx(r).data()
252 252
253 253 # bdiff.blocks() returns blocks of matching lines
254 254 # count the number of bytes in each
255 255 equal = 0
256 256 alines = mdiff.splitnewlines(aa)
257 257 matches = bdiff.blocks(aa, rr)
258 258 for x1,x2,y1,y2 in matches:
259 259 for line in alines[x1:x2]:
260 260 equal += len(line)
261 261
262 262 lengths = len(aa) + len(rr)
263 263 if lengths:
264 264 myscore = equal*2.0 / lengths
265 265 if myscore >= bestscore:
266 266 bestname, bestscore = r, myscore
267 267 if bestname:
268 268 yield bestname, a, bestscore
269 269
270 270 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
271 271 if dry_run is None:
272 272 dry_run = opts.get('dry_run')
273 273 if similarity is None:
274 274 similarity = float(opts.get('similarity') or 0)
275 275 add, remove = [], []
276 276 mapping = {}
277 277 for src, abs, rel, exact in walk(repo, pats, opts):
278 278 target = repo.wjoin(abs)
279 279 if src == 'f' and abs not in repo.dirstate:
280 280 add.append(abs)
281 281 mapping[abs] = rel, exact
282 282 if repo.ui.verbose or not exact:
283 283 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
284 284 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
285 285 or (os.path.isdir(target) and not os.path.islink(target))):
286 286 remove.append(abs)
287 287 mapping[abs] = rel, exact
288 288 if repo.ui.verbose or not exact:
289 289 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
290 290 if not dry_run:
291 291 repo.remove(remove)
292 292 repo.add(add)
293 293 if similarity > 0:
294 294 for old, new, score in findrenames(repo, add, remove, similarity):
295 295 oldrel, oldexact = mapping[old]
296 296 newrel, newexact = mapping[new]
297 297 if repo.ui.verbose or not oldexact or not newexact:
298 298 repo.ui.status(_('recording removal of %s as rename to %s '
299 299 '(%d%% similar)\n') %
300 300 (oldrel, newrel, score * 100))
301 301 if not dry_run:
302 302 repo.copy(old, new)
303 303
304 304 def copy(ui, repo, pats, opts, rename=False):
305 305 # called with the repo lock held
306 306 #
307 307 # hgsep => pathname that uses "/" to separate directories
308 308 # ossep => pathname that uses os.sep to separate directories
309 309 cwd = repo.getcwd()
310 310 targets = {}
311 311 after = opts.get("after")
312 312 dryrun = opts.get("dry_run")
313 313
314 314 def walkpat(pat):
315 315 srcs = []
316 316 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
317 317 state = repo.dirstate[abs]
318 318 if state in '?r':
319 319 if exact and state == '?':
320 320 ui.warn(_('%s: not copying - file is not managed\n') % rel)
321 321 if exact and state == 'r':
322 322 ui.warn(_('%s: not copying - file has been marked for'
323 323 ' remove\n') % rel)
324 324 continue
325 325 # abs: hgsep
326 326 # rel: ossep
327 327 srcs.append((abs, rel, exact))
328 328 return srcs
329 329
330 330 # abssrc: hgsep
331 331 # relsrc: ossep
332 332 # otarget: ossep
333 333 def copyfile(abssrc, relsrc, otarget, exact):
334 334 abstarget = util.canonpath(repo.root, cwd, otarget)
335 335 reltarget = repo.pathto(abstarget, cwd)
336 336 target = repo.wjoin(abstarget)
337 337 src = repo.wjoin(abssrc)
338 338 state = repo.dirstate[abstarget]
339 339
340 340 # check for collisions
341 341 prevsrc = targets.get(abstarget)
342 342 if prevsrc is not None:
343 343 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
344 344 (reltarget, repo.pathto(abssrc, cwd),
345 345 repo.pathto(prevsrc, cwd)))
346 346 return
347 347
348 348 # check for overwrites
349 349 exists = os.path.exists(target)
350 350 if (not after and exists or after and state in 'mn'):
351 351 if not opts['force']:
352 352 ui.warn(_('%s: not overwriting - file exists\n') %
353 353 reltarget)
354 354 return
355 355
356 356 if after:
357 357 if not exists:
358 358 return
359 359 elif not dryrun:
360 360 try:
361 361 if exists:
362 362 os.unlink(target)
363 363 targetdir = os.path.dirname(target) or '.'
364 364 if not os.path.isdir(targetdir):
365 365 os.makedirs(targetdir)
366 366 util.copyfile(src, target)
367 367 except IOError, inst:
368 368 if inst.errno == errno.ENOENT:
369 369 ui.warn(_('%s: deleted in working copy\n') % relsrc)
370 370 else:
371 371 ui.warn(_('%s: cannot copy - %s\n') %
372 372 (relsrc, inst.strerror))
373 373 return True # report a failure
374 374
375 375 if ui.verbose or not exact:
376 376 action = rename and "moving" or "copying"
377 377 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
378 378
379 379 targets[abstarget] = abssrc
380 380
381 381 # fix up dirstate
382 382 origsrc = repo.dirstate.copied(abssrc) or abssrc
383 383 if abstarget == origsrc: # copying back a copy?
384 384 if state not in 'mn' and not dryrun:
385 385 repo.dirstate.normallookup(abstarget)
386 386 else:
387 387 if repo.dirstate[origsrc] == 'a':
388 388 if not ui.quiet:
389 389 ui.warn(_("%s has not been committed yet, so no copy "
390 390 "data will be stored for %s.\n")
391 391 % (repo.pathto(origsrc, cwd), reltarget))
392 392 if abstarget not in repo.dirstate and not dryrun:
393 393 repo.add([abstarget])
394 394 elif not dryrun:
395 395 repo.copy(origsrc, abstarget)
396 396
397 397 if rename and not dryrun:
398 398 repo.remove([abssrc], True)
399 399
400 400 # pat: ossep
401 401 # dest ossep
402 402 # srcs: list of (hgsep, hgsep, ossep, bool)
403 403 # return: function that takes hgsep and returns ossep
404 404 def targetpathfn(pat, dest, srcs):
405 405 if os.path.isdir(pat):
406 406 abspfx = util.canonpath(repo.root, cwd, pat)
407 407 abspfx = util.localpath(abspfx)
408 408 if destdirexists:
409 409 striplen = len(os.path.split(abspfx)[0])
410 410 else:
411 411 striplen = len(abspfx)
412 412 if striplen:
413 413 striplen += len(os.sep)
414 414 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
415 415 elif destdirexists:
416 416 res = lambda p: os.path.join(dest,
417 417 os.path.basename(util.localpath(p)))
418 418 else:
419 419 res = lambda p: dest
420 420 return res
421 421
422 422 # pat: ossep
423 423 # dest ossep
424 424 # srcs: list of (hgsep, hgsep, ossep, bool)
425 425 # return: function that takes hgsep and returns ossep
426 426 def targetpathafterfn(pat, dest, srcs):
427 427 if util.patkind(pat, None)[0]:
428 428 # a mercurial pattern
429 429 res = lambda p: os.path.join(dest,
430 430 os.path.basename(util.localpath(p)))
431 431 else:
432 432 abspfx = util.canonpath(repo.root, cwd, pat)
433 433 if len(abspfx) < len(srcs[0][0]):
434 434 # A directory. Either the target path contains the last
435 435 # component of the source path or it does not.
436 436 def evalpath(striplen):
437 437 score = 0
438 438 for s in srcs:
439 439 t = os.path.join(dest, util.localpath(s[0])[striplen:])
440 440 if os.path.exists(t):
441 441 score += 1
442 442 return score
443 443
444 444 abspfx = util.localpath(abspfx)
445 445 striplen = len(abspfx)
446 446 if striplen:
447 447 striplen += len(os.sep)
448 448 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
449 449 score = evalpath(striplen)
450 450 striplen1 = len(os.path.split(abspfx)[0])
451 451 if striplen1:
452 452 striplen1 += len(os.sep)
453 453 if evalpath(striplen1) > score:
454 454 striplen = striplen1
455 455 res = lambda p: os.path.join(dest,
456 456 util.localpath(p)[striplen:])
457 457 else:
458 458 # a file
459 459 if destdirexists:
460 460 res = lambda p: os.path.join(dest,
461 461 os.path.basename(util.localpath(p)))
462 462 else:
463 463 res = lambda p: dest
464 464 return res
465 465
466 466
467 467 pats = util.expand_glob(pats)
468 468 if not pats:
469 469 raise util.Abort(_('no source or destination specified'))
470 470 if len(pats) == 1:
471 471 raise util.Abort(_('no destination specified'))
472 472 dest = pats.pop()
473 473 destdirexists = os.path.isdir(dest)
474 474 if not destdirexists:
475 475 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
476 476 raise util.Abort(_('with multiple sources, destination must be an '
477 477 'existing directory'))
478 478 if util.endswithsep(dest):
479 479 raise util.Abort(_('destination %s is not a directory') % dest)
480 480
481 481 tfn = targetpathfn
482 482 if after:
483 483 tfn = targetpathafterfn
484 484 copylist = []
485 485 for pat in pats:
486 486 srcs = walkpat(pat)
487 487 if not srcs:
488 488 continue
489 489 copylist.append((tfn(pat, dest, srcs), srcs))
490 490 if not copylist:
491 491 raise util.Abort(_('no files to copy'))
492 492
493 493 errors = 0
494 494 for targetpath, srcs in copylist:
495 495 for abssrc, relsrc, exact in srcs:
496 496 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
497 497 errors += 1
498 498
499 499 if errors:
500 500 ui.warn(_('(consider using --after)\n'))
501 501
502 502 return errors
503 503
504 504 def service(opts, parentfn=None, initfn=None, runfn=None):
505 505 '''Run a command as a service.'''
506 506
507 507 if opts['daemon'] and not opts['daemon_pipefds']:
508 508 rfd, wfd = os.pipe()
509 509 args = sys.argv[:]
510 510 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
511 511 # Don't pass --cwd to the child process, because we've already
512 512 # changed directory.
513 513 for i in xrange(1,len(args)):
514 514 if args[i].startswith('--cwd='):
515 515 del args[i]
516 516 break
517 517 elif args[i].startswith('--cwd'):
518 518 del args[i:i+2]
519 519 break
520 520 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
521 521 args[0], args)
522 522 os.close(wfd)
523 523 os.read(rfd, 1)
524 524 if parentfn:
525 525 return parentfn(pid)
526 526 else:
527 527 os._exit(0)
528 528
529 529 if initfn:
530 530 initfn()
531 531
532 532 if opts['pid_file']:
533 533 fp = open(opts['pid_file'], 'w')
534 534 fp.write(str(os.getpid()) + '\n')
535 535 fp.close()
536 536
537 537 if opts['daemon_pipefds']:
538 538 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
539 539 os.close(rfd)
540 540 try:
541 541 os.setsid()
542 542 except AttributeError:
543 543 pass
544 544 os.write(wfd, 'y')
545 545 os.close(wfd)
546 546 sys.stdout.flush()
547 547 sys.stderr.flush()
548 548 fd = os.open(util.nulldev, os.O_RDWR)
549 549 if fd != 0: os.dup2(fd, 0)
550 550 if fd != 1: os.dup2(fd, 1)
551 551 if fd != 2: os.dup2(fd, 2)
552 552 if fd not in (0, 1, 2): os.close(fd)
553 553
554 554 if runfn:
555 555 return runfn()
556 556
557 557 class changeset_printer(object):
558 558 '''show changeset information when templating not requested.'''
559 559
560 560 def __init__(self, ui, repo, patch, buffered):
561 561 self.ui = ui
562 562 self.repo = repo
563 563 self.buffered = buffered
564 564 self.patch = patch
565 565 self.header = {}
566 566 self.hunk = {}
567 567 self.lastheader = None
568 568
569 569 def flush(self, rev):
570 570 if rev in self.header:
571 571 h = self.header[rev]
572 572 if h != self.lastheader:
573 573 self.lastheader = h
574 574 self.ui.write(h)
575 575 del self.header[rev]
576 576 if rev in self.hunk:
577 577 self.ui.write(self.hunk[rev])
578 578 del self.hunk[rev]
579 579 return 1
580 580 return 0
581 581
582 582 def show(self, rev=0, changenode=None, copies=(), **props):
583 583 if self.buffered:
584 584 self.ui.pushbuffer()
585 585 self._show(rev, changenode, copies, props)
586 586 self.hunk[rev] = self.ui.popbuffer()
587 587 else:
588 588 self._show(rev, changenode, copies, props)
589 589
590 590 def _show(self, rev, changenode, copies, props):
591 591 '''show a single changeset or file revision'''
592 592 log = self.repo.changelog
593 593 if changenode is None:
594 594 changenode = log.node(rev)
595 595 elif not rev:
596 596 rev = log.rev(changenode)
597 597
598 598 if self.ui.quiet:
599 599 self.ui.write("%d:%s\n" % (rev, short(changenode)))
600 600 return
601 601
602 602 changes = log.read(changenode)
603 603 date = util.datestr(changes[2])
604 604 extra = changes[5]
605 605 branch = extra.get("branch")
606 606
607 607 hexfunc = self.ui.debugflag and hex or short
608 608
609 609 parents = [(p, hexfunc(log.node(p)))
610 610 for p in self._meaningful_parentrevs(log, rev)]
611 611
612 612 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
613 613
614 614 # don't show the default branch name
615 615 if branch != 'default':
616 616 branch = util.tolocal(branch)
617 617 self.ui.write(_("branch: %s\n") % branch)
618 618 for tag in self.repo.nodetags(changenode):
619 619 self.ui.write(_("tag: %s\n") % tag)
620 620 for parent in parents:
621 621 self.ui.write(_("parent: %d:%s\n") % parent)
622 622
623 623 if self.ui.debugflag:
624 624 self.ui.write(_("manifest: %d:%s\n") %
625 625 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
626 626 self.ui.write(_("user: %s\n") % changes[1])
627 627 self.ui.write(_("date: %s\n") % date)
628 628
629 629 if self.ui.debugflag:
630 630 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
631 631 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
632 632 files):
633 633 if value:
634 634 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
635 635 elif changes[3] and self.ui.verbose:
636 636 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
637 637 if copies and self.ui.verbose:
638 638 copies = ['%s (%s)' % c for c in copies]
639 639 self.ui.write(_("copies: %s\n") % ' '.join(copies))
640 640
641 641 if extra and self.ui.debugflag:
642 642 extraitems = extra.items()
643 643 extraitems.sort()
644 644 for key, value in extraitems:
645 645 self.ui.write(_("extra: %s=%s\n")
646 646 % (key, value.encode('string_escape')))
647 647
648 648 description = changes[4].strip()
649 649 if description:
650 650 if self.ui.verbose:
651 651 self.ui.write(_("description:\n"))
652 652 self.ui.write(description)
653 653 self.ui.write("\n\n")
654 654 else:
655 655 self.ui.write(_("summary: %s\n") %
656 656 description.splitlines()[0])
657 657 self.ui.write("\n")
658 658
659 659 self.showpatch(changenode)
660 660
661 661 def showpatch(self, node):
662 662 if self.patch:
663 663 prev = self.repo.changelog.parents(node)[0]
664 664 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
665 665 opts=patch.diffopts(self.ui))
666 666 self.ui.write("\n")
667 667
668 668 def _meaningful_parentrevs(self, log, rev):
669 669 """Return list of meaningful (or all if debug) parentrevs for rev.
670 670
671 671 For merges (two non-nullrev revisions) both parents are meaningful.
672 672 Otherwise the first parent revision is considered meaningful if it
673 673 is not the preceding revision.
674 674 """
675 675 parents = log.parentrevs(rev)
676 676 if not self.ui.debugflag and parents[1] == nullrev:
677 677 if parents[0] >= rev - 1:
678 678 parents = []
679 679 else:
680 680 parents = [parents[0]]
681 681 return parents
682 682
683 683
684 684 class changeset_templater(changeset_printer):
685 685 '''format changeset information.'''
686 686
687 687 def __init__(self, ui, repo, patch, mapfile, buffered):
688 688 changeset_printer.__init__(self, ui, repo, patch, buffered)
689 689 filters = templatefilters.filters.copy()
690 690 filters['formatnode'] = (ui.debugflag and (lambda x: x)
691 691 or (lambda x: x[:12]))
692 692 self.t = templater.templater(mapfile, filters,
693 693 cache={
694 694 'parent': '{rev}:{node|formatnode} ',
695 695 'manifest': '{rev}:{node|formatnode}',
696 696 'filecopy': '{name} ({source})'})
697 697
698 698 def use_template(self, t):
699 699 '''set template string to use'''
700 700 self.t.cache['changeset'] = t
701 701
702 702 def _show(self, rev, changenode, copies, props):
703 703 '''show a single changeset or file revision'''
704 704 log = self.repo.changelog
705 705 if changenode is None:
706 706 changenode = log.node(rev)
707 707 elif not rev:
708 708 rev = log.rev(changenode)
709 709
710 710 changes = log.read(changenode)
711 711
712 712 def showlist(name, values, plural=None, **args):
713 713 '''expand set of values.
714 714 name is name of key in template map.
715 715 values is list of strings or dicts.
716 716 plural is plural of name, if not simply name + 's'.
717 717
718 718 expansion works like this, given name 'foo'.
719 719
720 720 if values is empty, expand 'no_foos'.
721 721
722 722 if 'foo' not in template map, return values as a string,
723 723 joined by space.
724 724
725 725 expand 'start_foos'.
726 726
727 727 for each value, expand 'foo'. if 'last_foo' in template
728 728 map, expand it instead of 'foo' for last key.
729 729
730 730 expand 'end_foos'.
731 731 '''
732 732 if plural: names = plural
733 733 else: names = name + 's'
734 734 if not values:
735 735 noname = 'no_' + names
736 736 if noname in self.t:
737 737 yield self.t(noname, **args)
738 738 return
739 739 if name not in self.t:
740 740 if isinstance(values[0], str):
741 741 yield ' '.join(values)
742 742 else:
743 743 for v in values:
744 744 yield dict(v, **args)
745 745 return
746 746 startname = 'start_' + names
747 747 if startname in self.t:
748 748 yield self.t(startname, **args)
749 749 vargs = args.copy()
750 750 def one(v, tag=name):
751 751 try:
752 752 vargs.update(v)
753 753 except (AttributeError, ValueError):
754 754 try:
755 755 for a, b in v:
756 756 vargs[a] = b
757 757 except ValueError:
758 758 vargs[name] = v
759 759 return self.t(tag, **vargs)
760 760 lastname = 'last_' + name
761 761 if lastname in self.t:
762 762 last = values.pop()
763 763 else:
764 764 last = None
765 765 for v in values:
766 766 yield one(v)
767 767 if last is not None:
768 768 yield one(last, tag=lastname)
769 769 endname = 'end_' + names
770 770 if endname in self.t:
771 771 yield self.t(endname, **args)
772 772
773 773 def showbranches(**args):
774 774 branch = changes[5].get("branch")
775 775 if branch != 'default':
776 776 branch = util.tolocal(branch)
777 777 return showlist('branch', [branch], plural='branches', **args)
778 778
779 779 def showparents(**args):
780 780 parents = [[('rev', p), ('node', hex(log.node(p)))]
781 781 for p in self._meaningful_parentrevs(log, rev)]
782 782 return showlist('parent', parents, **args)
783 783
784 784 def showtags(**args):
785 785 return showlist('tag', self.repo.nodetags(changenode), **args)
786 786
787 787 def showextras(**args):
788 788 extras = changes[5].items()
789 789 extras.sort()
790 790 for key, value in extras:
791 791 args = args.copy()
792 792 args.update(dict(key=key, value=value))
793 793 yield self.t('extra', **args)
794 794
795 795 def showcopies(**args):
796 796 c = [{'name': x[0], 'source': x[1]} for x in copies]
797 797 return showlist('file_copy', c, plural='file_copies', **args)
798 798
799 799 files = []
800 800 def getfiles():
801 801 if not files:
802 802 files[:] = self.repo.status(
803 803 log.parents(changenode)[0], changenode)[:3]
804 804 return files
805 805 def showfiles(**args):
806 806 return showlist('file', changes[3], **args)
807 807 def showmods(**args):
808 808 return showlist('file_mod', getfiles()[0], **args)
809 809 def showadds(**args):
810 810 return showlist('file_add', getfiles()[1], **args)
811 811 def showdels(**args):
812 812 return showlist('file_del', getfiles()[2], **args)
813 813 def showmanifest(**args):
814 814 args = args.copy()
815 815 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
816 816 node=hex(changes[0])))
817 817 return self.t('manifest', **args)
818 818
819 819 defprops = {
820 820 'author': changes[1],
821 821 'branches': showbranches,
822 822 'date': changes[2],
823 823 'desc': changes[4].strip(),
824 824 'file_adds': showadds,
825 825 'file_dels': showdels,
826 826 'file_mods': showmods,
827 827 'files': showfiles,
828 828 'file_copies': showcopies,
829 829 'manifest': showmanifest,
830 830 'node': hex(changenode),
831 831 'parents': showparents,
832 832 'rev': rev,
833 833 'tags': showtags,
834 834 'extras': showextras,
835 835 }
836 836 props = props.copy()
837 837 props.update(defprops)
838 838
839 839 try:
840 840 if self.ui.debugflag and 'header_debug' in self.t:
841 841 key = 'header_debug'
842 842 elif self.ui.quiet and 'header_quiet' in self.t:
843 843 key = 'header_quiet'
844 844 elif self.ui.verbose and 'header_verbose' in self.t:
845 845 key = 'header_verbose'
846 846 elif 'header' in self.t:
847 847 key = 'header'
848 848 else:
849 849 key = ''
850 850 if key:
851 851 h = templater.stringify(self.t(key, **props))
852 852 if self.buffered:
853 853 self.header[rev] = h
854 854 else:
855 855 self.ui.write(h)
856 856 if self.ui.debugflag and 'changeset_debug' in self.t:
857 857 key = 'changeset_debug'
858 858 elif self.ui.quiet and 'changeset_quiet' in self.t:
859 859 key = 'changeset_quiet'
860 860 elif self.ui.verbose and 'changeset_verbose' in self.t:
861 861 key = 'changeset_verbose'
862 862 else:
863 863 key = 'changeset'
864 864 self.ui.write(templater.stringify(self.t(key, **props)))
865 865 self.showpatch(changenode)
866 866 except KeyError, inst:
867 867 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
868 868 inst.args[0]))
869 869 except SyntaxError, inst:
870 870 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
871 871
872 872 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
873 873 """show one changeset using template or regular display.
874 874
875 875 Display format will be the first non-empty hit of:
876 876 1. option 'template'
877 877 2. option 'style'
878 878 3. [ui] setting 'logtemplate'
879 879 4. [ui] setting 'style'
880 880 If all of these values are either the unset or the empty string,
881 881 regular display via changeset_printer() is done.
882 882 """
883 883 # options
884 884 patch = False
885 885 if opts.get('patch'):
886 886 patch = matchfn or util.always
887 887
888 888 tmpl = opts.get('template')
889 889 mapfile = None
890 890 if tmpl:
891 891 tmpl = templater.parsestring(tmpl, quoted=False)
892 892 else:
893 893 mapfile = opts.get('style')
894 894 # ui settings
895 895 if not mapfile:
896 896 tmpl = ui.config('ui', 'logtemplate')
897 897 if tmpl:
898 898 tmpl = templater.parsestring(tmpl)
899 899 else:
900 900 mapfile = ui.config('ui', 'style')
901 901
902 902 if tmpl or mapfile:
903 903 if mapfile:
904 904 if not os.path.split(mapfile)[0]:
905 905 mapname = (templater.templatepath('map-cmdline.' + mapfile)
906 906 or templater.templatepath(mapfile))
907 907 if mapname: mapfile = mapname
908 908 try:
909 909 t = changeset_templater(ui, repo, patch, mapfile, buffered)
910 910 except SyntaxError, inst:
911 911 raise util.Abort(inst.args[0])
912 912 if tmpl: t.use_template(tmpl)
913 913 return t
914 914 return changeset_printer(ui, repo, patch, buffered)
915 915
916 916 def finddate(ui, repo, date):
917 917 """Find the tipmost changeset that matches the given date spec"""
918 918 df = util.matchdate(date)
919 919 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
920 920 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
921 921 results = {}
922 922 for st, rev, fns in changeiter:
923 923 if st == 'add':
924 924 d = get(rev)[2]
925 925 if df(d[0]):
926 926 results[rev] = d
927 927 elif st == 'iter':
928 928 if rev in results:
929 929 ui.status("Found revision %s from %s\n" %
930 930 (rev, util.datestr(results[rev])))
931 931 return str(rev)
932 932
933 933 raise util.Abort(_("revision matching date not found"))
934 934
935 935 def walkchangerevs(ui, repo, pats, change, opts):
936 936 '''Iterate over files and the revs they changed in.
937 937
938 938 Callers most commonly need to iterate backwards over the history
939 939 it is interested in. Doing so has awful (quadratic-looking)
940 940 performance, so we use iterators in a "windowed" way.
941 941
942 942 We walk a window of revisions in the desired order. Within the
943 943 window, we first walk forwards to gather data, then in the desired
944 944 order (usually backwards) to display it.
945 945
946 946 This function returns an (iterator, matchfn) tuple. The iterator
947 947 yields 3-tuples. They will be of one of the following forms:
948 948
949 949 "window", incrementing, lastrev: stepping through a window,
950 950 positive if walking forwards through revs, last rev in the
951 951 sequence iterated over - use to reset state for the current window
952 952
953 953 "add", rev, fns: out-of-order traversal of the given file names
954 954 fns, which changed during revision rev - use to gather data for
955 955 possible display
956 956
957 957 "iter", rev, None: in-order traversal of the revs earlier iterated
958 958 over with "add" - use to display data'''
959 959
960 960 def increasing_windows(start, end, windowsize=8, sizelimit=512):
961 961 if start < end:
962 962 while start < end:
963 963 yield start, min(windowsize, end-start)
964 964 start += windowsize
965 965 if windowsize < sizelimit:
966 966 windowsize *= 2
967 967 else:
968 968 while start > end:
969 969 yield start, min(windowsize, start-end-1)
970 970 start -= windowsize
971 971 if windowsize < sizelimit:
972 972 windowsize *= 2
973 973
974 974 files, matchfn, anypats = matchpats(repo, pats, opts)
975 975 follow = opts.get('follow') or opts.get('follow_first')
976 976
977 977 if repo.changelog.count() == 0:
978 978 return [], matchfn
979 979
980 980 if follow:
981 981 defrange = '%s:0' % repo.changectx().rev()
982 982 else:
983 983 defrange = '-1:0'
984 984 revs = revrange(repo, opts['rev'] or [defrange])
985 985 wanted = {}
986 986 slowpath = anypats or opts.get('removed')
987 987 fncache = {}
988 988
989 989 if not slowpath and not files:
990 990 # No files, no patterns. Display all revs.
991 991 wanted = dict.fromkeys(revs)
992 992 copies = []
993 993 if not slowpath:
994 994 # Only files, no patterns. Check the history of each file.
995 995 def filerevgen(filelog, node):
996 996 cl_count = repo.changelog.count()
997 997 if node is None:
998 998 last = filelog.count() - 1
999 999 else:
1000 1000 last = filelog.rev(node)
1001 1001 for i, window in increasing_windows(last, nullrev):
1002 1002 revs = []
1003 1003 for j in xrange(i - window, i + 1):
1004 1004 n = filelog.node(j)
1005 1005 revs.append((filelog.linkrev(n),
1006 1006 follow and filelog.renamed(n)))
1007 1007 revs.reverse()
1008 1008 for rev in revs:
1009 1009 # only yield rev for which we have the changelog, it can
1010 1010 # happen while doing "hg log" during a pull or commit
1011 1011 if rev[0] < cl_count:
1012 1012 yield rev
1013 1013 def iterfiles():
1014 1014 for filename in files:
1015 1015 yield filename, None
1016 1016 for filename_node in copies:
1017 1017 yield filename_node
1018 1018 minrev, maxrev = min(revs), max(revs)
1019 1019 for file_, node in iterfiles():
1020 1020 filelog = repo.file(file_)
1021 1021 # A zero count may be a directory or deleted file, so
1022 1022 # try to find matching entries on the slow path.
1023 1023 if filelog.count() == 0:
1024 1024 slowpath = True
1025 1025 break
1026 1026 for rev, copied in filerevgen(filelog, node):
1027 1027 if rev <= maxrev:
1028 1028 if rev < minrev:
1029 1029 break
1030 1030 fncache.setdefault(rev, [])
1031 1031 fncache[rev].append(file_)
1032 1032 wanted[rev] = 1
1033 1033 if follow and copied:
1034 1034 copies.append(copied)
1035 1035 if slowpath:
1036 1036 if follow:
1037 1037 raise util.Abort(_('can only follow copies/renames for explicit '
1038 1038 'file names'))
1039 1039
1040 1040 # The slow path checks files modified in every changeset.
1041 1041 def changerevgen():
1042 1042 for i, window in increasing_windows(repo.changelog.count()-1,
1043 1043 nullrev):
1044 1044 for j in xrange(i - window, i + 1):
1045 1045 yield j, change(j)[3]
1046 1046
1047 1047 for rev, changefiles in changerevgen():
1048 1048 matches = filter(matchfn, changefiles)
1049 1049 if matches:
1050 1050 fncache[rev] = matches
1051 1051 wanted[rev] = 1
1052 1052
1053 1053 class followfilter:
1054 1054 def __init__(self, onlyfirst=False):
1055 1055 self.startrev = nullrev
1056 1056 self.roots = []
1057 1057 self.onlyfirst = onlyfirst
1058 1058
1059 1059 def match(self, rev):
1060 1060 def realparents(rev):
1061 1061 if self.onlyfirst:
1062 1062 return repo.changelog.parentrevs(rev)[0:1]
1063 1063 else:
1064 1064 return filter(lambda x: x != nullrev,
1065 1065 repo.changelog.parentrevs(rev))
1066 1066
1067 1067 if self.startrev == nullrev:
1068 1068 self.startrev = rev
1069 1069 return True
1070 1070
1071 1071 if rev > self.startrev:
1072 1072 # forward: all descendants
1073 1073 if not self.roots:
1074 1074 self.roots.append(self.startrev)
1075 1075 for parent in realparents(rev):
1076 1076 if parent in self.roots:
1077 1077 self.roots.append(rev)
1078 1078 return True
1079 1079 else:
1080 1080 # backwards: all parents
1081 1081 if not self.roots:
1082 1082 self.roots.extend(realparents(self.startrev))
1083 1083 if rev in self.roots:
1084 1084 self.roots.remove(rev)
1085 1085 self.roots.extend(realparents(rev))
1086 1086 return True
1087 1087
1088 1088 return False
1089 1089
1090 1090 # it might be worthwhile to do this in the iterator if the rev range
1091 1091 # is descending and the prune args are all within that range
1092 1092 for rev in opts.get('prune', ()):
1093 1093 rev = repo.changelog.rev(repo.lookup(rev))
1094 1094 ff = followfilter()
1095 1095 stop = min(revs[0], revs[-1])
1096 1096 for x in xrange(rev, stop-1, -1):
1097 1097 if ff.match(x) and x in wanted:
1098 1098 del wanted[x]
1099 1099
1100 1100 def iterate():
1101 1101 if follow and not files:
1102 1102 ff = followfilter(onlyfirst=opts.get('follow_first'))
1103 1103 def want(rev):
1104 1104 if ff.match(rev) and rev in wanted:
1105 1105 return True
1106 1106 return False
1107 1107 else:
1108 1108 def want(rev):
1109 1109 return rev in wanted
1110 1110
1111 1111 for i, window in increasing_windows(0, len(revs)):
1112 1112 yield 'window', revs[0] < revs[-1], revs[-1]
1113 1113 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1114 1114 srevs = list(nrevs)
1115 1115 srevs.sort()
1116 1116 for rev in srevs:
1117 1117 fns = fncache.get(rev)
1118 1118 if not fns:
1119 1119 def fns_generator():
1120 1120 for f in change(rev)[3]:
1121 1121 if matchfn(f):
1122 1122 yield f
1123 1123 fns = fns_generator()
1124 1124 yield 'add', rev, fns
1125 1125 for rev in nrevs:
1126 1126 yield 'iter', rev, None
1127 1127 return iterate(), matchfn
1128 1128
1129 1129 def commit(ui, repo, commitfunc, pats, opts):
1130 1130 '''commit the specified files or all outstanding changes'''
1131 1131 date = opts.get('date')
1132 1132 if date:
1133 1133 opts['date'] = util.parsedate(date)
1134 1134 message = logmessage(opts)
1135 1135
1136 1136 # extract addremove carefully -- this function can be called from a command
1137 1137 # that doesn't support addremove
1138 1138 if opts.get('addremove'):
1139 1139 addremove(repo, pats, opts)
1140 1140
1141 1141 fns, match, anypats = matchpats(repo, pats, opts)
1142 1142 if pats:
1143 1143 status = repo.status(files=fns, match=match)
1144 1144 modified, added, removed, deleted, unknown = status[:5]
1145 1145 files = modified + added + removed
1146 1146 slist = None
1147 1147 for f in fns:
1148 1148 if f == '.':
1149 1149 continue
1150 1150 if f not in files:
1151 1151 rf = repo.wjoin(f)
1152 1152 rel = repo.pathto(f)
1153 1153 try:
1154 1154 mode = os.lstat(rf)[stat.ST_MODE]
1155 1155 except OSError:
1156 1156 raise util.Abort(_("file %s not found!") % rel)
1157 1157 if stat.S_ISDIR(mode):
1158 1158 name = f + '/'
1159 1159 if slist is None:
1160 1160 slist = list(files)
1161 1161 slist.sort()
1162 1162 i = bisect.bisect(slist, name)
1163 1163 if i >= len(slist) or not slist[i].startswith(name):
1164 1164 raise util.Abort(_("no match under directory %s!")
1165 1165 % rel)
1166 1166 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1167 1167 raise util.Abort(_("can't commit %s: "
1168 1168 "unsupported file type!") % rel)
1169 1169 elif f not in repo.dirstate:
1170 1170 raise util.Abort(_("file %s not tracked!") % rel)
1171 1171 else:
1172 1172 files = []
1173 1173 try:
1174 1174 return commitfunc(ui, repo, files, message, match, opts)
1175 1175 except ValueError, inst:
1176 1176 raise util.Abort(str(inst))
@@ -1,3179 +1,3179 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, re, sys, urllib
11 11 import hg, util, revlog, bundlerepo, extensions
12 12 import difflib, patch, time, help, mdiff, tempfile
13 13 import errno, version, socket
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 33 badmatch=util.always):
34 34 if exact:
35 35 if ui.verbose:
36 36 ui.status(_('adding %s\n') % rel)
37 37 names.append(abs)
38 38 exacts[abs] = 1
39 39 elif abs not in repo.dirstate:
40 40 ui.status(_('adding %s\n') % rel)
41 41 names.append(abs)
42 42 if not opts.get('dry_run'):
43 43 rejected = repo.add(names)
44 44 rejected = [p for p in rejected if p in exacts]
45 45 return rejected and 1 or 0
46 46
47 47 def addremove(ui, repo, *pats, **opts):
48 48 """add all new files, delete all missing files
49 49
50 50 Add all new files and remove all missing files from the repository.
51 51
52 52 New files are ignored if they match any of the patterns in .hgignore. As
53 53 with add, these changes take effect at the next commit.
54 54
55 55 Use the -s option to detect renamed files. With a parameter > 0,
56 56 this compares every removed file with every added file and records
57 57 those similar enough as renames. This option takes a percentage
58 58 between 0 (disabled) and 100 (files must be identical) as its
59 59 parameter. Detecting renamed files this way can be expensive.
60 60 """
61 61 try:
62 62 sim = float(opts.get('similarity') or 0)
63 63 except ValueError:
64 64 raise util.Abort(_('similarity must be a number'))
65 65 if sim < 0 or sim > 100:
66 66 raise util.Abort(_('similarity must be between 0 and 100'))
67 67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 68
69 69 def annotate(ui, repo, *pats, **opts):
70 70 """show changeset information per file line
71 71
72 72 List changes in files, showing the revision id responsible for each line
73 73
74 74 This command is useful to discover who did a change or when a change took
75 75 place.
76 76
77 77 Without the -a option, annotate will avoid processing files it
78 78 detects as binary. With -a, annotate will generate an annotation
79 79 anyway, probably with undesirable results.
80 80 """
81 81 datefunc = ui.quiet and util.shortdate or util.datestr
82 82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 83
84 84 if not pats:
85 85 raise util.Abort(_('at least one file name or pattern required'))
86 86
87 87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 88 ('number', lambda x: str(x[0].rev())),
89 89 ('changeset', lambda x: short(x[0].node())),
90 90 ('date', getdate),
91 91 ('follow', lambda x: x[0].path()),
92 92 ]
93 93
94 94 if (not opts['user'] and not opts['changeset'] and not opts['date']
95 95 and not opts['follow']):
96 96 opts['number'] = 1
97 97
98 98 linenumber = opts.get('line_number') is not None
99 99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
100 100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 101
102 102 funcmap = [func for op, func in opmap if opts.get(op)]
103 103 if linenumber:
104 104 lastfunc = funcmap[-1]
105 105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 106
107 107 ctx = repo.changectx(opts['rev'])
108 108
109 109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
110 110 node=ctx.node()):
111 111 fctx = ctx.filectx(abs)
112 112 if not opts['text'] and util.binary(fctx.data()):
113 113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
114 114 continue
115 115
116 116 lines = fctx.annotate(follow=opts.get('follow'),
117 117 linenumber=linenumber)
118 118 pieces = []
119 119
120 120 for f in funcmap:
121 121 l = [f(n) for n, dummy in lines]
122 122 if l:
123 123 m = max(map(len, l))
124 124 pieces.append(["%*s" % (m, x) for x in l])
125 125
126 126 if pieces:
127 127 for p, l in zip(zip(*pieces), lines):
128 128 ui.write("%s: %s" % (" ".join(p), l[1]))
129 129
130 130 def archive(ui, repo, dest, **opts):
131 131 '''create unversioned archive of a repository revision
132 132
133 133 By default, the revision used is the parent of the working
134 134 directory; use "-r" to specify a different revision.
135 135
136 136 To specify the type of archive to create, use "-t". Valid
137 137 types are:
138 138
139 139 "files" (default): a directory full of files
140 140 "tar": tar archive, uncompressed
141 141 "tbz2": tar archive, compressed using bzip2
142 142 "tgz": tar archive, compressed using gzip
143 143 "uzip": zip archive, uncompressed
144 144 "zip": zip archive, compressed using deflate
145 145
146 146 The exact name of the destination archive or directory is given
147 147 using a format string; see "hg help export" for details.
148 148
149 149 Each member added to an archive file has a directory prefix
150 150 prepended. Use "-p" to specify a format string for the prefix.
151 151 The default is the basename of the archive, with suffixes removed.
152 152 '''
153 153
154 154 ctx = repo.changectx(opts['rev'])
155 155 if not ctx:
156 156 raise util.Abort(_('repository has no revisions'))
157 157 node = ctx.node()
158 158 dest = cmdutil.make_filename(repo, dest, node)
159 159 if os.path.realpath(dest) == repo.root:
160 160 raise util.Abort(_('repository root cannot be destination'))
161 161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
162 162 kind = opts.get('type') or 'files'
163 163 prefix = opts['prefix']
164 164 if dest == '-':
165 165 if kind == 'files':
166 166 raise util.Abort(_('cannot archive plain files to stdout'))
167 167 dest = sys.stdout
168 168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 169 prefix = cmdutil.make_filename(repo, prefix, node)
170 170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
171 171 matchfn, prefix)
172 172
173 173 def backout(ui, repo, node=None, rev=None, **opts):
174 174 '''reverse effect of earlier changeset
175 175
176 176 Commit the backed out changes as a new changeset. The new
177 177 changeset is a child of the backed out changeset.
178 178
179 179 If you back out a changeset other than the tip, a new head is
180 180 created. This head will be the new tip and you should merge this
181 181 backout changeset with another head (current one by default).
182 182
183 183 The --merge option remembers the parent of the working directory
184 184 before starting the backout, then merges the new head with that
185 185 changeset afterwards. This saves you from doing the merge by
186 186 hand. The result of this merge is not committed, as for a normal
187 187 merge.
188 188
189 189 See 'hg help dates' for a list of formats valid for -d/--date.
190 190 '''
191 191 if rev and node:
192 192 raise util.Abort(_("please specify just one revision"))
193 193
194 194 if not rev:
195 195 rev = node
196 196
197 197 if not rev:
198 198 raise util.Abort(_("please specify a revision to backout"))
199 199
200 200 date = opts.get('date')
201 201 if date:
202 202 opts['date'] = util.parsedate(date)
203 203
204 204 cmdutil.bail_if_changed(repo)
205 205 node = repo.lookup(rev)
206 206
207 207 op1, op2 = repo.dirstate.parents()
208 208 a = repo.changelog.ancestor(op1, node)
209 209 if a != node:
210 210 raise util.Abort(_('cannot back out change on a different branch'))
211 211
212 212 p1, p2 = repo.changelog.parents(node)
213 213 if p1 == nullid:
214 214 raise util.Abort(_('cannot back out a change with no parents'))
215 215 if p2 != nullid:
216 216 if not opts['parent']:
217 217 raise util.Abort(_('cannot back out a merge changeset without '
218 218 '--parent'))
219 219 p = repo.lookup(opts['parent'])
220 220 if p not in (p1, p2):
221 221 raise util.Abort(_('%s is not a parent of %s') %
222 222 (short(p), short(node)))
223 223 parent = p
224 224 else:
225 225 if opts['parent']:
226 226 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 227 parent = p1
228 228
229 229 hg.clean(repo, node, show_stats=False)
230 230 revert_opts = opts.copy()
231 231 revert_opts['date'] = None
232 232 revert_opts['all'] = True
233 233 revert_opts['rev'] = hex(parent)
234 234 revert_opts['no_backup'] = None
235 235 revert(ui, repo, **revert_opts)
236 236 commit_opts = opts.copy()
237 237 commit_opts['addremove'] = False
238 238 if not commit_opts['message'] and not commit_opts['logfile']:
239 239 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
240 240 commit_opts['force_editor'] = True
241 241 commit(ui, repo, **commit_opts)
242 242 def nice(node):
243 243 return '%d:%s' % (repo.changelog.rev(node), short(node))
244 244 ui.status(_('changeset %s backs out changeset %s\n') %
245 245 (nice(repo.changelog.tip()), nice(node)))
246 246 if op1 != node:
247 247 hg.clean(repo, op1, show_stats=False)
248 248 if opts['merge']:
249 249 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
250 250 hg.merge(repo, hex(repo.changelog.tip()))
251 251 else:
252 252 ui.status(_('the backout changeset is a new head - '
253 253 'do not forget to merge\n'))
254 254 ui.status(_('(use "backout --merge" '
255 255 'if you want to auto-merge)\n'))
256 256
257 257 def bisect(ui, repo, rev=None, extra=None,
258 258 reset=None, good=None, bad=None, skip=None, noupdate=None):
259 259 """subdivision search of changesets
260 260
261 261 This command helps to find changesets which introduce problems.
262 262 To use, mark the earliest changeset you know exhibits the problem
263 263 as bad, then mark the latest changeset which is free from the
264 264 problem as good. Bisect will update your working directory to a
265 265 revision for testing. Once you have performed tests, mark the
266 266 working directory as bad or good and bisect will either update to
267 267 another candidate changeset or announce that it has found the bad
268 268 revision.
269 269 """
270 270 # backward compatibility
271 271 if rev in "good bad reset init".split():
272 272 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
273 273 cmd, rev, extra = rev, extra, None
274 274 if cmd == "good":
275 275 good = True
276 276 elif cmd == "bad":
277 277 bad = True
278 278 else:
279 279 reset = True
280 280 elif extra or good + bad + skip + reset > 1:
281 281 raise util.Abort("Incompatible arguments")
282 282
283 283 if reset:
284 284 p = repo.join("bisect.state")
285 285 if os.path.exists(p):
286 286 os.unlink(p)
287 287 return
288 288
289 289 # load state
290 290 state = {'good': [], 'bad': [], 'skip': []}
291 291 if os.path.exists(repo.join("bisect.state")):
292 292 for l in repo.opener("bisect.state"):
293 293 kind, node = l[:-1].split()
294 294 node = repo.lookup(node)
295 295 if kind not in state:
296 296 raise util.Abort(_("unknown bisect kind %s") % kind)
297 297 state[kind].append(node)
298 298
299 299 # update state
300 300 node = repo.lookup(rev or '.')
301 301 if good:
302 302 state['good'].append(node)
303 303 elif bad:
304 304 state['bad'].append(node)
305 305 elif skip:
306 306 state['skip'].append(node)
307 307
308 308 # save state
309 309 f = repo.opener("bisect.state", "w", atomictemp=True)
310 310 wlock = repo.wlock()
311 311 try:
312 312 for kind in state:
313 313 for node in state[kind]:
314 314 f.write("%s %s\n" % (kind, hg.hex(node)))
315 315 f.rename()
316 316 finally:
317 317 del wlock
318 318
319 319 if not state['good'] or not state['bad']:
320 320 return
321 321
322 322 # actually bisect
323 323 node, changesets, good = hbisect.bisect(repo.changelog, state)
324 324 if changesets == 0:
325 325 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
326 326 displayer = cmdutil.show_changeset(ui, repo, {})
327 327 displayer.show(changenode=node)
328 328 elif node is not None:
329 329 # compute the approximate number of remaining tests
330 330 tests, size = 0, 2
331 331 while size <= changesets:
332 332 tests, size = tests + 1, size * 2
333 333 rev = repo.changelog.rev(node)
334 334 ui.write(_("Testing changeset %s:%s "
335 335 "(%s changesets remaining, ~%s tests)\n")
336 336 % (rev, hg.short(node), changesets, tests))
337 337 if not noupdate:
338 338 cmdutil.bail_if_changed(repo)
339 339 return hg.clean(repo, node)
340 340
341 341 def branch(ui, repo, label=None, **opts):
342 342 """set or show the current branch name
343 343
344 344 With no argument, show the current branch name. With one argument,
345 345 set the working directory branch name (the branch does not exist in
346 346 the repository until the next commit).
347 347
348 348 Unless --force is specified, branch will not let you set a
349 349 branch name that shadows an existing branch.
350 350
351 351 Use the command 'hg update' to switch to an existing branch.
352 352 """
353 353
354 354 if label:
355 355 if not opts.get('force') and label in repo.branchtags():
356 356 if label not in [p.branch() for p in repo.workingctx().parents()]:
357 357 raise util.Abort(_('a branch of the same name already exists'
358 358 ' (use --force to override)'))
359 359 repo.dirstate.setbranch(util.fromlocal(label))
360 360 ui.status(_('marked working directory as branch %s\n') % label)
361 361 else:
362 362 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
363 363
364 364 def branches(ui, repo, active=False):
365 365 """list repository named branches
366 366
367 367 List the repository's named branches, indicating which ones are
368 368 inactive. If active is specified, only show active branches.
369 369
370 370 A branch is considered active if it contains unmerged heads.
371 371
372 372 Use the command 'hg update' to switch to an existing branch.
373 373 """
374 374 b = repo.branchtags()
375 375 heads = dict.fromkeys(repo.heads(), 1)
376 376 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
377 377 l.sort()
378 378 l.reverse()
379 379 for ishead, r, n, t in l:
380 380 if active and not ishead:
381 381 # If we're only displaying active branches, abort the loop on
382 382 # encountering the first inactive head
383 383 break
384 384 else:
385 385 hexfunc = ui.debugflag and hex or short
386 386 if ui.quiet:
387 387 ui.write("%s\n" % t)
388 388 else:
389 389 spaces = " " * (30 - util.locallen(t))
390 390 # The code only gets here if inactive branches are being
391 391 # displayed or the branch is active.
392 392 isinactive = ((not ishead) and " (inactive)") or ''
393 393 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
394 394
395 395 def bundle(ui, repo, fname, dest=None, **opts):
396 396 """create a changegroup file
397 397
398 398 Generate a compressed changegroup file collecting changesets not
399 399 found in the other repository.
400 400
401 401 If no destination repository is specified the destination is
402 402 assumed to have all the nodes specified by one or more --base
403 403 parameters. To create a bundle containing all changesets, use
404 404 --all (or --base null).
405 405
406 406 The bundle file can then be transferred using conventional means and
407 407 applied to another repository with the unbundle or pull command.
408 408 This is useful when direct push and pull are not available or when
409 409 exporting an entire repository is undesirable.
410 410
411 411 Applying bundles preserves all changeset contents including
412 412 permissions, copy/rename information, and revision history.
413 413 """
414 414 revs = opts.get('rev') or None
415 415 if revs:
416 416 revs = [repo.lookup(rev) for rev in revs]
417 417 if opts.get('all'):
418 418 base = ['null']
419 419 else:
420 420 base = opts.get('base')
421 421 if base:
422 422 if dest:
423 423 raise util.Abort(_("--base is incompatible with specifiying "
424 424 "a destination"))
425 425 base = [repo.lookup(rev) for rev in base]
426 426 # create the right base
427 427 # XXX: nodesbetween / changegroup* should be "fixed" instead
428 428 o = []
429 429 has = {nullid: None}
430 430 for n in base:
431 431 has.update(repo.changelog.reachable(n))
432 432 if revs:
433 433 visit = list(revs)
434 434 else:
435 435 visit = repo.changelog.heads()
436 436 seen = {}
437 437 while visit:
438 438 n = visit.pop(0)
439 439 parents = [p for p in repo.changelog.parents(n) if p not in has]
440 440 if len(parents) == 0:
441 441 o.insert(0, n)
442 442 else:
443 443 for p in parents:
444 444 if p not in seen:
445 445 seen[p] = 1
446 446 visit.append(p)
447 447 else:
448 448 cmdutil.setremoteconfig(ui, opts)
449 449 dest, revs, checkout = hg.parseurl(
450 450 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
451 451 other = hg.repository(ui, dest)
452 452 o = repo.findoutgoing(other, force=opts['force'])
453 453
454 454 if revs:
455 455 cg = repo.changegroupsubset(o, revs, 'bundle')
456 456 else:
457 457 cg = repo.changegroup(o, 'bundle')
458 458 changegroup.writebundle(cg, fname, "HG10BZ")
459 459
460 460 def cat(ui, repo, file1, *pats, **opts):
461 461 """output the current or given revision of files
462 462
463 463 Print the specified files as they were at the given revision.
464 464 If no revision is given, the parent of the working directory is used,
465 465 or tip if no revision is checked out.
466 466
467 467 Output may be to a file, in which case the name of the file is
468 468 given using a format string. The formatting rules are the same as
469 469 for the export command, with the following additions:
470 470
471 471 %s basename of file being printed
472 472 %d dirname of file being printed, or '.' if in repo root
473 473 %p root-relative path name of file being printed
474 474 """
475 475 ctx = repo.changectx(opts['rev'])
476 476 err = 1
477 477 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
478 478 ctx.node()):
479 479 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
480 480 data = ctx.filectx(abs).data()
481 481 if opts.get('decode'):
482 482 data = repo.wwritedata(abs, data)
483 483 fp.write(data)
484 484 err = 0
485 485 return err
486 486
487 487 def clone(ui, source, dest=None, **opts):
488 488 """make a copy of an existing repository
489 489
490 490 Create a copy of an existing repository in a new directory.
491 491
492 492 If no destination directory name is specified, it defaults to the
493 493 basename of the source.
494 494
495 495 The location of the source is added to the new repository's
496 496 .hg/hgrc file, as the default to be used for future pulls.
497 497
498 498 For efficiency, hardlinks are used for cloning whenever the source
499 499 and destination are on the same filesystem (note this applies only
500 500 to the repository data, not to the checked out files). Some
501 501 filesystems, such as AFS, implement hardlinking incorrectly, but
502 502 do not report errors. In these cases, use the --pull option to
503 503 avoid hardlinking.
504 504
505 505 You can safely clone repositories and checked out files using full
506 506 hardlinks with
507 507
508 508 $ cp -al REPO REPOCLONE
509 509
510 510 which is the fastest way to clone. However, the operation is not
511 511 atomic (making sure REPO is not modified during the operation is
512 512 up to you) and you have to make sure your editor breaks hardlinks
513 513 (Emacs and most Linux Kernel tools do so).
514 514
515 515 If you use the -r option to clone up to a specific revision, no
516 516 subsequent revisions will be present in the cloned repository.
517 517 This option implies --pull, even on local repositories.
518 518
519 519 See pull for valid source format details.
520 520
521 521 It is possible to specify an ssh:// URL as the destination, but no
522 522 .hg/hgrc and working directory will be created on the remote side.
523 523 Look at the help text for the pull command for important details
524 524 about ssh:// URLs.
525 525 """
526 526 cmdutil.setremoteconfig(ui, opts)
527 527 hg.clone(ui, source, dest,
528 528 pull=opts['pull'],
529 529 stream=opts['uncompressed'],
530 530 rev=opts['rev'],
531 531 update=not opts['noupdate'])
532 532
533 533 def commit(ui, repo, *pats, **opts):
534 534 """commit the specified files or all outstanding changes
535 535
536 536 Commit changes to the given files into the repository.
537 537
538 538 If a list of files is omitted, all changes reported by "hg status"
539 539 will be committed.
540 540
541 541 If no commit message is specified, the configured editor is started to
542 542 enter a message.
543 543
544 544 See 'hg help dates' for a list of formats valid for -d/--date.
545 545 """
546 546 def commitfunc(ui, repo, files, message, match, opts):
547 547 return repo.commit(files, message, opts['user'], opts['date'], match,
548 548 force_editor=opts.get('force_editor'))
549 549 cmdutil.commit(ui, repo, commitfunc, pats, opts)
550 550
551 551 def copy(ui, repo, *pats, **opts):
552 552 """mark files as copied for the next commit
553 553
554 554 Mark dest as having copies of source files. If dest is a
555 555 directory, copies are put in that directory. If dest is a file,
556 556 there can only be one source.
557 557
558 558 By default, this command copies the contents of files as they
559 559 stand in the working directory. If invoked with --after, the
560 560 operation is recorded, but no copying is performed.
561 561
562 562 This command takes effect in the next commit. To undo a copy
563 563 before that, see hg revert.
564 564 """
565 565 wlock = repo.wlock(False)
566 566 try:
567 567 return cmdutil.copy(ui, repo, pats, opts)
568 568 finally:
569 569 del wlock
570 570
571 571 def debugancestor(ui, repo, *args):
572 572 """find the ancestor revision of two revisions in a given index"""
573 573 if len(args) == 3:
574 574 index, rev1, rev2 = args
575 575 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
576 576 elif len(args) == 2:
577 577 if not repo:
578 578 raise util.Abort(_("There is no Mercurial repository here "
579 579 "(.hg not found)"))
580 580 rev1, rev2 = args
581 581 r = repo.changelog
582 582 else:
583 583 raise util.Abort(_('either two or three arguments required'))
584 584 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
585 585 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
586 586
587 587 def debugcomplete(ui, cmd='', **opts):
588 588 """returns the completion list associated with the given command"""
589 589
590 590 if opts['options']:
591 591 options = []
592 592 otables = [globalopts]
593 593 if cmd:
594 594 aliases, entry = cmdutil.findcmd(ui, cmd, table)
595 595 otables.append(entry[1])
596 596 for t in otables:
597 597 for o in t:
598 598 if o[0]:
599 599 options.append('-%s' % o[0])
600 600 options.append('--%s' % o[1])
601 601 ui.write("%s\n" % "\n".join(options))
602 602 return
603 603
604 604 clist = cmdutil.findpossible(ui, cmd, table).keys()
605 605 clist.sort()
606 606 ui.write("%s\n" % "\n".join(clist))
607 607
608 608 def debugfsinfo(ui, path = "."):
609 609 file('.debugfsinfo', 'w').write('')
610 610 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
611 611 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
612 612 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
613 613 and 'yes' or 'no'))
614 614 os.unlink('.debugfsinfo')
615 615
616 616 def debugrebuildstate(ui, repo, rev=""):
617 617 """rebuild the dirstate as it would look like for the given revision"""
618 618 if rev == "":
619 619 rev = repo.changelog.tip()
620 620 ctx = repo.changectx(rev)
621 621 files = ctx.manifest()
622 622 wlock = repo.wlock()
623 623 try:
624 624 repo.dirstate.rebuild(rev, files)
625 625 finally:
626 626 del wlock
627 627
628 628 def debugcheckstate(ui, repo):
629 629 """validate the correctness of the current dirstate"""
630 630 parent1, parent2 = repo.dirstate.parents()
631 631 m1 = repo.changectx(parent1).manifest()
632 632 m2 = repo.changectx(parent2).manifest()
633 633 errors = 0
634 634 for f in repo.dirstate:
635 635 state = repo.dirstate[f]
636 636 if state in "nr" and f not in m1:
637 637 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
638 638 errors += 1
639 639 if state in "a" and f in m1:
640 640 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
641 641 errors += 1
642 642 if state in "m" and f not in m1 and f not in m2:
643 643 ui.warn(_("%s in state %s, but not in either manifest\n") %
644 644 (f, state))
645 645 errors += 1
646 646 for f in m1:
647 647 state = repo.dirstate[f]
648 648 if state not in "nrm":
649 649 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
650 650 errors += 1
651 651 if errors:
652 652 error = _(".hg/dirstate inconsistent with current parent's manifest")
653 653 raise util.Abort(error)
654 654
655 655 def showconfig(ui, repo, *values, **opts):
656 656 """show combined config settings from all hgrc files
657 657
658 658 With no args, print names and values of all config items.
659 659
660 660 With one arg of the form section.name, print just the value of
661 661 that config item.
662 662
663 663 With multiple args, print names and values of all config items
664 664 with matching section names."""
665 665
666 666 untrusted = bool(opts.get('untrusted'))
667 667 if values:
668 668 if len([v for v in values if '.' in v]) > 1:
669 669 raise util.Abort(_('only one config item permitted'))
670 670 for section, name, value in ui.walkconfig(untrusted=untrusted):
671 671 sectname = section + '.' + name
672 672 if values:
673 673 for v in values:
674 674 if v == section:
675 675 ui.write('%s=%s\n' % (sectname, value))
676 676 elif v == sectname:
677 677 ui.write(value, '\n')
678 678 else:
679 679 ui.write('%s=%s\n' % (sectname, value))
680 680
681 681 def debugsetparents(ui, repo, rev1, rev2=None):
682 682 """manually set the parents of the current working directory
683 683
684 684 This is useful for writing repository conversion tools, but should
685 685 be used with care.
686 686 """
687 687
688 688 if not rev2:
689 689 rev2 = hex(nullid)
690 690
691 691 wlock = repo.wlock()
692 692 try:
693 693 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
694 694 finally:
695 695 del wlock
696 696
697 697 def debugstate(ui, repo):
698 698 """show the contents of the current dirstate"""
699 699 k = repo.dirstate._map.items()
700 700 k.sort()
701 701 for file_, ent in k:
702 702 if ent[3] == -1:
703 703 # Pad or slice to locale representation
704 704 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
705 705 timestr = 'unset'
706 706 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
707 707 else:
708 708 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
709 709 if ent[1] & 020000:
710 710 mode = 'lnk'
711 711 else:
712 712 mode = '%3o' % (ent[1] & 0777)
713 713 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
714 714 for f in repo.dirstate.copies():
715 715 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
716 716
717 717 def debugdata(ui, file_, rev):
718 718 """dump the contents of a data file revision"""
719 719 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
720 720 try:
721 721 ui.write(r.revision(r.lookup(rev)))
722 722 except KeyError:
723 723 raise util.Abort(_('invalid revision identifier %s') % rev)
724 724
725 725 def debugdate(ui, date, range=None, **opts):
726 726 """parse and display a date"""
727 727 if opts["extended"]:
728 728 d = util.parsedate(date, util.extendeddateformats)
729 729 else:
730 730 d = util.parsedate(date)
731 731 ui.write("internal: %s %s\n" % d)
732 732 ui.write("standard: %s\n" % util.datestr(d))
733 733 if range:
734 734 m = util.matchdate(range)
735 735 ui.write("match: %s\n" % m(d[0]))
736 736
737 737 def debugindex(ui, file_):
738 738 """dump the contents of an index file"""
739 739 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
740 740 ui.write(" rev offset length base linkrev" +
741 741 " nodeid p1 p2\n")
742 742 for i in xrange(r.count()):
743 743 node = r.node(i)
744 744 try:
745 745 pp = r.parents(node)
746 746 except:
747 747 pp = [nullid, nullid]
748 748 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
749 749 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
750 750 short(node), short(pp[0]), short(pp[1])))
751 751
752 752 def debugindexdot(ui, file_):
753 753 """dump an index DAG as a .dot file"""
754 754 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
755 755 ui.write("digraph G {\n")
756 756 for i in xrange(r.count()):
757 757 node = r.node(i)
758 758 pp = r.parents(node)
759 759 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
760 760 if pp[1] != nullid:
761 761 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
762 762 ui.write("}\n")
763 763
764 764 def debuginstall(ui):
765 765 '''test Mercurial installation'''
766 766
767 767 def writetemp(contents):
768 768 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
769 769 f = os.fdopen(fd, "wb")
770 770 f.write(contents)
771 771 f.close()
772 772 return name
773 773
774 774 problems = 0
775 775
776 776 # encoding
777 777 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
778 778 try:
779 779 util.fromlocal("test")
780 780 except util.Abort, inst:
781 781 ui.write(" %s\n" % inst)
782 782 ui.write(_(" (check that your locale is properly set)\n"))
783 783 problems += 1
784 784
785 785 # compiled modules
786 786 ui.status(_("Checking extensions...\n"))
787 787 try:
788 788 import bdiff, mpatch, base85
789 789 except Exception, inst:
790 790 ui.write(" %s\n" % inst)
791 791 ui.write(_(" One or more extensions could not be found"))
792 792 ui.write(_(" (check that you compiled the extensions)\n"))
793 793 problems += 1
794 794
795 795 # templates
796 796 ui.status(_("Checking templates...\n"))
797 797 try:
798 798 import templater
799 799 t = templater.templater(templater.templatepath("map-cmdline.default"))
800 800 except Exception, inst:
801 801 ui.write(" %s\n" % inst)
802 802 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
803 803 problems += 1
804 804
805 805 # patch
806 806 ui.status(_("Checking patch...\n"))
807 807 patchproblems = 0
808 808 a = "1\n2\n3\n4\n"
809 809 b = "1\n2\n3\ninsert\n4\n"
810 810 fa = writetemp(a)
811 811 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
812 812 os.path.basename(fa))
813 813 fd = writetemp(d)
814 814
815 815 files = {}
816 816 try:
817 817 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
818 818 except util.Abort, e:
819 819 ui.write(_(" patch call failed:\n"))
820 820 ui.write(" " + str(e) + "\n")
821 821 patchproblems += 1
822 822 else:
823 823 if list(files) != [os.path.basename(fa)]:
824 824 ui.write(_(" unexpected patch output!\n"))
825 825 patchproblems += 1
826 826 a = file(fa).read()
827 827 if a != b:
828 828 ui.write(_(" patch test failed!\n"))
829 829 patchproblems += 1
830 830
831 831 if patchproblems:
832 832 if ui.config('ui', 'patch'):
833 833 ui.write(_(" (Current patch tool may be incompatible with patch,"
834 834 " or misconfigured. Please check your .hgrc file)\n"))
835 835 else:
836 836 ui.write(_(" Internal patcher failure, please report this error"
837 837 " to http://www.selenic.com/mercurial/bts\n"))
838 838 problems += patchproblems
839 839
840 840 os.unlink(fa)
841 841 os.unlink(fd)
842 842
843 843 # editor
844 844 ui.status(_("Checking commit editor...\n"))
845 845 editor = ui.geteditor()
846 846 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
847 847 if not cmdpath:
848 848 if editor == 'vi':
849 849 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
850 850 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
851 851 else:
852 852 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
853 853 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
854 854 problems += 1
855 855
856 856 # check username
857 857 ui.status(_("Checking username...\n"))
858 858 user = os.environ.get("HGUSER")
859 859 if user is None:
860 860 user = ui.config("ui", "username")
861 861 if user is None:
862 862 user = os.environ.get("EMAIL")
863 863 if not user:
864 864 ui.warn(" ")
865 865 ui.username()
866 866 ui.write(_(" (specify a username in your .hgrc file)\n"))
867 867
868 868 if not problems:
869 869 ui.status(_("No problems detected\n"))
870 870 else:
871 871 ui.write(_("%s problems detected,"
872 872 " please check your install!\n") % problems)
873 873
874 874 return problems
875 875
876 876 def debugrename(ui, repo, file1, *pats, **opts):
877 877 """dump rename information"""
878 878
879 879 ctx = repo.changectx(opts.get('rev', 'tip'))
880 880 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
881 881 ctx.node()):
882 882 fctx = ctx.filectx(abs)
883 883 m = fctx.filelog().renamed(fctx.filenode())
884 884 if m:
885 885 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
886 886 else:
887 887 ui.write(_("%s not renamed\n") % rel)
888 888
889 889 def debugwalk(ui, repo, *pats, **opts):
890 890 """show how files match on given patterns"""
891 891 items = list(cmdutil.walk(repo, pats, opts))
892 892 if not items:
893 893 return
894 894 fmt = '%%s %%-%ds %%-%ds %%s' % (
895 895 max([len(abs) for (src, abs, rel, exact) in items]),
896 896 max([len(rel) for (src, abs, rel, exact) in items]))
897 897 for src, abs, rel, exact in items:
898 898 line = fmt % (src, abs, rel, exact and 'exact' or '')
899 899 ui.write("%s\n" % line.rstrip())
900 900
901 901 def diff(ui, repo, *pats, **opts):
902 902 """diff repository (or selected files)
903 903
904 904 Show differences between revisions for the specified files.
905 905
906 906 Differences between files are shown using the unified diff format.
907 907
908 908 NOTE: diff may generate unexpected results for merges, as it will
909 909 default to comparing against the working directory's first parent
910 910 changeset if no revisions are specified.
911 911
912 912 When two revision arguments are given, then changes are shown
913 913 between those revisions. If only one revision is specified then
914 914 that revision is compared to the working directory, and, when no
915 915 revisions are specified, the working directory files are compared
916 916 to its parent.
917 917
918 918 Without the -a option, diff will avoid generating diffs of files
919 919 it detects as binary. With -a, diff will generate a diff anyway,
920 920 probably with undesirable results.
921 921 """
922 922 node1, node2 = cmdutil.revpair(repo, opts['rev'])
923 923
924 924 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
925 925
926 926 patch.diff(repo, node1, node2, fns, match=matchfn,
927 927 opts=patch.diffopts(ui, opts))
928 928
929 929 def export(ui, repo, *changesets, **opts):
930 930 """dump the header and diffs for one or more changesets
931 931
932 932 Print the changeset header and diffs for one or more revisions.
933 933
934 934 The information shown in the changeset header is: author,
935 935 changeset hash, parent(s) and commit comment.
936 936
937 937 NOTE: export may generate unexpected diff output for merge changesets,
938 938 as it will compare the merge changeset against its first parent only.
939 939
940 940 Output may be to a file, in which case the name of the file is
941 941 given using a format string. The formatting rules are as follows:
942 942
943 943 %% literal "%" character
944 944 %H changeset hash (40 bytes of hexadecimal)
945 945 %N number of patches being generated
946 946 %R changeset revision number
947 947 %b basename of the exporting repository
948 948 %h short-form changeset hash (12 bytes of hexadecimal)
949 949 %n zero-padded sequence number, starting at 1
950 950 %r zero-padded changeset revision number
951 951
952 952 Without the -a option, export will avoid generating diffs of files
953 953 it detects as binary. With -a, export will generate a diff anyway,
954 954 probably with undesirable results.
955 955
956 956 With the --switch-parent option, the diff will be against the second
957 957 parent. It can be useful to review a merge.
958 958 """
959 959 if not changesets:
960 960 raise util.Abort(_("export requires at least one changeset"))
961 961 revs = cmdutil.revrange(repo, changesets)
962 962 if len(revs) > 1:
963 963 ui.note(_('exporting patches:\n'))
964 964 else:
965 965 ui.note(_('exporting patch:\n'))
966 966 patch.export(repo, revs, template=opts['output'],
967 967 switch_parent=opts['switch_parent'],
968 968 opts=patch.diffopts(ui, opts))
969 969
970 970 def grep(ui, repo, pattern, *pats, **opts):
971 971 """search for a pattern in specified files and revisions
972 972
973 973 Search revisions of files for a regular expression.
974 974
975 975 This command behaves differently than Unix grep. It only accepts
976 976 Python/Perl regexps. It searches repository history, not the
977 977 working directory. It always prints the revision number in which
978 978 a match appears.
979 979
980 980 By default, grep only prints output for the first revision of a
981 981 file in which it finds a match. To get it to print every revision
982 982 that contains a change in match status ("-" for a match that
983 983 becomes a non-match, or "+" for a non-match that becomes a match),
984 984 use the --all flag.
985 985 """
986 986 reflags = 0
987 987 if opts['ignore_case']:
988 988 reflags |= re.I
989 989 try:
990 990 regexp = re.compile(pattern, reflags)
991 991 except Exception, inst:
992 992 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
993 993 return None
994 994 sep, eol = ':', '\n'
995 995 if opts['print0']:
996 996 sep = eol = '\0'
997 997
998 998 fcache = {}
999 999 def getfile(fn):
1000 1000 if fn not in fcache:
1001 1001 fcache[fn] = repo.file(fn)
1002 1002 return fcache[fn]
1003 1003
1004 1004 def matchlines(body):
1005 1005 begin = 0
1006 1006 linenum = 0
1007 1007 while True:
1008 1008 match = regexp.search(body, begin)
1009 1009 if not match:
1010 1010 break
1011 1011 mstart, mend = match.span()
1012 1012 linenum += body.count('\n', begin, mstart) + 1
1013 1013 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1014 1014 lend = body.find('\n', mend)
1015 1015 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1016 1016 begin = lend + 1
1017 1017
1018 1018 class linestate(object):
1019 1019 def __init__(self, line, linenum, colstart, colend):
1020 1020 self.line = line
1021 1021 self.linenum = linenum
1022 1022 self.colstart = colstart
1023 1023 self.colend = colend
1024 1024
1025 1025 def __eq__(self, other):
1026 1026 return self.line == other.line
1027 1027
1028 1028 matches = {}
1029 1029 copies = {}
1030 1030 def grepbody(fn, rev, body):
1031 1031 matches[rev].setdefault(fn, [])
1032 1032 m = matches[rev][fn]
1033 1033 for lnum, cstart, cend, line in matchlines(body):
1034 1034 s = linestate(line, lnum, cstart, cend)
1035 1035 m.append(s)
1036 1036
1037 1037 def difflinestates(a, b):
1038 1038 sm = difflib.SequenceMatcher(None, a, b)
1039 1039 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1040 1040 if tag == 'insert':
1041 1041 for i in xrange(blo, bhi):
1042 1042 yield ('+', b[i])
1043 1043 elif tag == 'delete':
1044 1044 for i in xrange(alo, ahi):
1045 1045 yield ('-', a[i])
1046 1046 elif tag == 'replace':
1047 1047 for i in xrange(alo, ahi):
1048 1048 yield ('-', a[i])
1049 1049 for i in xrange(blo, bhi):
1050 1050 yield ('+', b[i])
1051 1051
1052 1052 prev = {}
1053 1053 def display(fn, rev, states, prevstates):
1054 1054 datefunc = ui.quiet and util.shortdate or util.datestr
1055 1055 found = False
1056 1056 filerevmatches = {}
1057 1057 r = prev.get(fn, -1)
1058 1058 if opts['all']:
1059 1059 iter = difflinestates(states, prevstates)
1060 1060 else:
1061 1061 iter = [('', l) for l in prevstates]
1062 1062 for change, l in iter:
1063 1063 cols = [fn, str(r)]
1064 1064 if opts['line_number']:
1065 1065 cols.append(str(l.linenum))
1066 1066 if opts['all']:
1067 1067 cols.append(change)
1068 1068 if opts['user']:
1069 1069 cols.append(ui.shortuser(get(r)[1]))
1070 1070 if opts.get('date'):
1071 1071 cols.append(datefunc(get(r)[2]))
1072 1072 if opts['files_with_matches']:
1073 1073 c = (fn, r)
1074 1074 if c in filerevmatches:
1075 1075 continue
1076 1076 filerevmatches[c] = 1
1077 1077 else:
1078 1078 cols.append(l.line)
1079 1079 ui.write(sep.join(cols), eol)
1080 1080 found = True
1081 1081 return found
1082 1082
1083 1083 fstate = {}
1084 1084 skip = {}
1085 1085 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1086 1086 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1087 1087 found = False
1088 1088 follow = opts.get('follow')
1089 1089 for st, rev, fns in changeiter:
1090 1090 if st == 'window':
1091 1091 matches.clear()
1092 1092 elif st == 'add':
1093 1093 ctx = repo.changectx(rev)
1094 1094 matches[rev] = {}
1095 1095 for fn in fns:
1096 1096 if fn in skip:
1097 1097 continue
1098 1098 try:
1099 1099 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1100 1100 fstate.setdefault(fn, [])
1101 1101 if follow:
1102 1102 copied = getfile(fn).renamed(ctx.filenode(fn))
1103 1103 if copied:
1104 1104 copies.setdefault(rev, {})[fn] = copied[0]
1105 1105 except revlog.LookupError:
1106 1106 pass
1107 1107 elif st == 'iter':
1108 1108 states = matches[rev].items()
1109 1109 states.sort()
1110 1110 for fn, m in states:
1111 1111 copy = copies.get(rev, {}).get(fn)
1112 1112 if fn in skip:
1113 1113 if copy:
1114 1114 skip[copy] = True
1115 1115 continue
1116 1116 if fn in prev or fstate[fn]:
1117 1117 r = display(fn, rev, m, fstate[fn])
1118 1118 found = found or r
1119 1119 if r and not opts['all']:
1120 1120 skip[fn] = True
1121 1121 if copy:
1122 1122 skip[copy] = True
1123 1123 fstate[fn] = m
1124 1124 if copy:
1125 1125 fstate[copy] = m
1126 1126 prev[fn] = rev
1127 1127
1128 1128 fstate = fstate.items()
1129 1129 fstate.sort()
1130 1130 for fn, state in fstate:
1131 1131 if fn in skip:
1132 1132 continue
1133 1133 if fn not in copies.get(prev[fn], {}):
1134 1134 found = display(fn, rev, {}, state) or found
1135 1135 return (not found and 1) or 0
1136 1136
1137 1137 def heads(ui, repo, *branchrevs, **opts):
1138 1138 """show current repository heads or show branch heads
1139 1139
1140 1140 With no arguments, show all repository head changesets.
1141 1141
1142 1142 If branch or revisions names are given this will show the heads of
1143 1143 the specified branches or the branches those revisions are tagged
1144 1144 with.
1145 1145
1146 1146 Repository "heads" are changesets that don't have child
1147 1147 changesets. They are where development generally takes place and
1148 1148 are the usual targets for update and merge operations.
1149 1149
1150 1150 Branch heads are changesets that have a given branch tag, but have
1151 1151 no child changesets with that tag. They are usually where
1152 1152 development on the given branch takes place.
1153 1153 """
1154 1154 if opts['rev']:
1155 1155 start = repo.lookup(opts['rev'])
1156 1156 else:
1157 1157 start = None
1158 1158 if not branchrevs:
1159 1159 # Assume we're looking repo-wide heads if no revs were specified.
1160 1160 heads = repo.heads(start)
1161 1161 else:
1162 1162 heads = []
1163 1163 visitedset = util.set()
1164 1164 for branchrev in branchrevs:
1165 1165 branch = repo.changectx(branchrev).branch()
1166 1166 if branch in visitedset:
1167 1167 continue
1168 1168 visitedset.add(branch)
1169 1169 bheads = repo.branchheads(branch, start)
1170 1170 if not bheads:
1171 1171 if branch != branchrev:
1172 1172 ui.warn(_("no changes on branch %s containing %s are "
1173 1173 "reachable from %s\n")
1174 1174 % (branch, branchrev, opts['rev']))
1175 1175 else:
1176 1176 ui.warn(_("no changes on branch %s are reachable from %s\n")
1177 1177 % (branch, opts['rev']))
1178 1178 heads.extend(bheads)
1179 1179 if not heads:
1180 1180 return 1
1181 1181 displayer = cmdutil.show_changeset(ui, repo, opts)
1182 1182 for n in heads:
1183 1183 displayer.show(changenode=n)
1184 1184
1185 1185 def help_(ui, name=None, with_version=False):
1186 1186 """show help for a command, extension, or list of commands
1187 1187
1188 1188 With no arguments, print a list of commands and short help.
1189 1189
1190 1190 Given a command name, print help for that command.
1191 1191
1192 1192 Given an extension name, print help for that extension, and the
1193 1193 commands it provides."""
1194 1194 option_lists = []
1195 1195
1196 1196 def addglobalopts(aliases):
1197 1197 if ui.verbose:
1198 1198 option_lists.append((_("global options:"), globalopts))
1199 1199 if name == 'shortlist':
1200 1200 option_lists.append((_('use "hg help" for the full list '
1201 1201 'of commands'), ()))
1202 1202 else:
1203 1203 if name == 'shortlist':
1204 1204 msg = _('use "hg help" for the full list of commands '
1205 1205 'or "hg -v" for details')
1206 1206 elif aliases:
1207 1207 msg = _('use "hg -v help%s" to show aliases and '
1208 1208 'global options') % (name and " " + name or "")
1209 1209 else:
1210 1210 msg = _('use "hg -v help %s" to show global options') % name
1211 1211 option_lists.append((msg, ()))
1212 1212
1213 1213 def helpcmd(name):
1214 1214 if with_version:
1215 1215 version_(ui)
1216 1216 ui.write('\n')
1217 1217 aliases, i = cmdutil.findcmd(ui, name, table)
1218 1218 # synopsis
1219 1219 ui.write("%s\n" % i[2])
1220 1220
1221 1221 # aliases
1222 1222 if not ui.quiet and len(aliases) > 1:
1223 1223 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1224 1224
1225 1225 # description
1226 1226 doc = i[0].__doc__
1227 1227 if not doc:
1228 1228 doc = _("(No help text available)")
1229 1229 if ui.quiet:
1230 1230 doc = doc.splitlines(0)[0]
1231 1231 ui.write("\n%s\n" % doc.rstrip())
1232 1232
1233 1233 if not ui.quiet:
1234 1234 # options
1235 1235 if i[1]:
1236 1236 option_lists.append((_("options:\n"), i[1]))
1237 1237
1238 1238 addglobalopts(False)
1239 1239
1240 1240 def helplist(header, select=None):
1241 1241 h = {}
1242 1242 cmds = {}
1243 1243 for c, e in table.items():
1244 1244 f = c.split("|", 1)[0]
1245 1245 if select and not select(f):
1246 1246 continue
1247 1247 if name == "shortlist" and not f.startswith("^"):
1248 1248 continue
1249 1249 f = f.lstrip("^")
1250 1250 if not ui.debugflag and f.startswith("debug"):
1251 1251 continue
1252 1252 doc = e[0].__doc__
1253 1253 if not doc:
1254 1254 doc = _("(No help text available)")
1255 1255 h[f] = doc.splitlines(0)[0].rstrip()
1256 1256 cmds[f] = c.lstrip("^")
1257 1257
1258 1258 if not h:
1259 1259 ui.status(_('no commands defined\n'))
1260 1260 return
1261 1261
1262 1262 ui.status(header)
1263 1263 fns = h.keys()
1264 1264 fns.sort()
1265 1265 m = max(map(len, fns))
1266 1266 for f in fns:
1267 1267 if ui.verbose:
1268 1268 commands = cmds[f].replace("|",", ")
1269 1269 ui.write(" %s:\n %s\n"%(commands, h[f]))
1270 1270 else:
1271 1271 ui.write(' %-*s %s\n' % (m, f, h[f]))
1272 1272
1273 1273 if not ui.quiet:
1274 1274 addglobalopts(True)
1275 1275
1276 1276 def helptopic(name):
1277 1277 v = None
1278 1278 for i in help.helptable:
1279 1279 l = i.split('|')
1280 1280 if name in l:
1281 1281 v = i
1282 1282 header = l[-1]
1283 1283 if not v:
1284 1284 raise cmdutil.UnknownCommand(name)
1285 1285
1286 1286 # description
1287 1287 doc = help.helptable[v]
1288 1288 if not doc:
1289 1289 doc = _("(No help text available)")
1290 1290 if callable(doc):
1291 1291 doc = doc()
1292 1292
1293 1293 ui.write("%s\n" % header)
1294 1294 ui.write("%s\n" % doc.rstrip())
1295 1295
1296 1296 def helpext(name):
1297 1297 try:
1298 1298 mod = extensions.find(name)
1299 1299 except KeyError:
1300 1300 raise cmdutil.UnknownCommand(name)
1301 1301
1302 1302 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1303 1303 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1304 1304 for d in doc[1:]:
1305 1305 ui.write(d, '\n')
1306 1306
1307 1307 ui.status('\n')
1308 1308
1309 1309 try:
1310 1310 ct = mod.cmdtable
1311 1311 except AttributeError:
1312 1312 ct = {}
1313 1313
1314 1314 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1315 1315 helplist(_('list of commands:\n\n'), modcmds.has_key)
1316 1316
1317 1317 if name and name != 'shortlist':
1318 1318 i = None
1319 1319 for f in (helpcmd, helptopic, helpext):
1320 1320 try:
1321 1321 f(name)
1322 1322 i = None
1323 1323 break
1324 1324 except cmdutil.UnknownCommand, inst:
1325 1325 i = inst
1326 1326 if i:
1327 1327 raise i
1328 1328
1329 1329 else:
1330 1330 # program name
1331 1331 if ui.verbose or with_version:
1332 1332 version_(ui)
1333 1333 else:
1334 1334 ui.status(_("Mercurial Distributed SCM\n"))
1335 1335 ui.status('\n')
1336 1336
1337 1337 # list of commands
1338 1338 if name == "shortlist":
1339 1339 header = _('basic commands:\n\n')
1340 1340 else:
1341 1341 header = _('list of commands:\n\n')
1342 1342
1343 1343 helplist(header)
1344 1344
1345 1345 # list all option lists
1346 1346 opt_output = []
1347 1347 for title, options in option_lists:
1348 1348 opt_output.append(("\n%s" % title, None))
1349 1349 for shortopt, longopt, default, desc in options:
1350 1350 if "DEPRECATED" in desc and not ui.verbose: continue
1351 1351 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1352 1352 longopt and " --%s" % longopt),
1353 1353 "%s%s" % (desc,
1354 1354 default
1355 1355 and _(" (default: %s)") % default
1356 1356 or "")))
1357 1357
1358 1358 if opt_output:
1359 1359 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1360 1360 for first, second in opt_output:
1361 1361 if second:
1362 1362 ui.write(" %-*s %s\n" % (opts_len, first, second))
1363 1363 else:
1364 1364 ui.write("%s\n" % first)
1365 1365
1366 1366 def identify(ui, repo, source=None,
1367 1367 rev=None, num=None, id=None, branch=None, tags=None):
1368 1368 """identify the working copy or specified revision
1369 1369
1370 1370 With no revision, print a summary of the current state of the repo.
1371 1371
1372 1372 With a path, do a lookup in another repository.
1373 1373
1374 1374 This summary identifies the repository state using one or two parent
1375 1375 hash identifiers, followed by a "+" if there are uncommitted changes
1376 1376 in the working directory, a list of tags for this revision and a branch
1377 1377 name for non-default branches.
1378 1378 """
1379 1379
1380 1380 if not repo and not source:
1381 1381 raise util.Abort(_("There is no Mercurial repository here "
1382 1382 "(.hg not found)"))
1383 1383
1384 1384 hexfunc = ui.debugflag and hex or short
1385 1385 default = not (num or id or branch or tags)
1386 1386 output = []
1387 1387
1388 1388 if source:
1389 1389 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1390 1390 srepo = hg.repository(ui, source)
1391 1391 if not rev and revs:
1392 1392 rev = revs[0]
1393 1393 if not rev:
1394 1394 rev = "tip"
1395 1395 if num or branch or tags:
1396 1396 raise util.Abort(
1397 1397 "can't query remote revision number, branch, or tags")
1398 1398 output = [hexfunc(srepo.lookup(rev))]
1399 1399 elif not rev:
1400 1400 ctx = repo.workingctx()
1401 1401 parents = ctx.parents()
1402 1402 changed = False
1403 1403 if default or id or num:
1404 1404 changed = ctx.files() + ctx.deleted()
1405 1405 if default or id:
1406 1406 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1407 1407 (changed) and "+" or "")]
1408 1408 if num:
1409 1409 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1410 1410 (changed) and "+" or ""))
1411 1411 else:
1412 1412 ctx = repo.changectx(rev)
1413 1413 if default or id:
1414 1414 output = [hexfunc(ctx.node())]
1415 1415 if num:
1416 1416 output.append(str(ctx.rev()))
1417 1417
1418 1418 if not source and default and not ui.quiet:
1419 1419 b = util.tolocal(ctx.branch())
1420 1420 if b != 'default':
1421 1421 output.append("(%s)" % b)
1422 1422
1423 1423 # multiple tags for a single parent separated by '/'
1424 1424 t = "/".join(ctx.tags())
1425 1425 if t:
1426 1426 output.append(t)
1427 1427
1428 1428 if branch:
1429 1429 output.append(util.tolocal(ctx.branch()))
1430 1430
1431 1431 if tags:
1432 1432 output.extend(ctx.tags())
1433 1433
1434 1434 ui.write("%s\n" % ' '.join(output))
1435 1435
1436 1436 def import_(ui, repo, patch1, *patches, **opts):
1437 1437 """import an ordered set of patches
1438 1438
1439 1439 Import a list of patches and commit them individually.
1440 1440
1441 1441 If there are outstanding changes in the working directory, import
1442 1442 will abort unless given the -f flag.
1443 1443
1444 1444 You can import a patch straight from a mail message. Even patches
1445 1445 as attachments work (body part must be type text/plain or
1446 1446 text/x-patch to be used). From and Subject headers of email
1447 1447 message are used as default committer and commit message. All
1448 1448 text/plain body parts before first diff are added to commit
1449 1449 message.
1450 1450
1451 1451 If the imported patch was generated by hg export, user and description
1452 1452 from patch override values from message headers and body. Values
1453 1453 given on command line with -m and -u override these.
1454 1454
1455 1455 If --exact is specified, import will set the working directory
1456 1456 to the parent of each patch before applying it, and will abort
1457 1457 if the resulting changeset has a different ID than the one
1458 1458 recorded in the patch. This may happen due to character set
1459 1459 problems or other deficiencies in the text patch format.
1460 1460
1461 1461 To read a patch from standard input, use patch name "-".
1462 1462 See 'hg help dates' for a list of formats valid for -d/--date.
1463 1463 """
1464 1464 patches = (patch1,) + patches
1465 1465
1466 1466 date = opts.get('date')
1467 1467 if date:
1468 1468 opts['date'] = util.parsedate(date)
1469 1469
1470 1470 if opts.get('exact') or not opts['force']:
1471 1471 cmdutil.bail_if_changed(repo)
1472 1472
1473 1473 d = opts["base"]
1474 1474 strip = opts["strip"]
1475 1475 wlock = lock = None
1476 1476 try:
1477 1477 wlock = repo.wlock()
1478 1478 lock = repo.lock()
1479 1479 for p in patches:
1480 1480 pf = os.path.join(d, p)
1481 1481
1482 1482 if pf == '-':
1483 1483 ui.status(_("applying patch from stdin\n"))
1484 1484 data = patch.extract(ui, sys.stdin)
1485 1485 else:
1486 1486 ui.status(_("applying %s\n") % p)
1487 1487 if os.path.exists(pf):
1488 1488 data = patch.extract(ui, file(pf, 'rb'))
1489 1489 else:
1490 1490 data = patch.extract(ui, urllib.urlopen(pf))
1491 1491 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1492 1492
1493 1493 if tmpname is None:
1494 1494 raise util.Abort(_('no diffs found'))
1495 1495
1496 1496 try:
1497 1497 cmdline_message = cmdutil.logmessage(opts)
1498 1498 if cmdline_message:
1499 1499 # pickup the cmdline msg
1500 1500 message = cmdline_message
1501 1501 elif message:
1502 1502 # pickup the patch msg
1503 1503 message = message.strip()
1504 1504 else:
1505 1505 # launch the editor
1506 1506 message = None
1507 1507 ui.debug(_('message:\n%s\n') % message)
1508 1508
1509 1509 wp = repo.workingctx().parents()
1510 1510 if opts.get('exact'):
1511 1511 if not nodeid or not p1:
1512 1512 raise util.Abort(_('not a mercurial patch'))
1513 1513 p1 = repo.lookup(p1)
1514 1514 p2 = repo.lookup(p2 or hex(nullid))
1515 1515
1516 1516 if p1 != wp[0].node():
1517 1517 hg.clean(repo, p1)
1518 1518 repo.dirstate.setparents(p1, p2)
1519 1519 elif p2:
1520 1520 try:
1521 1521 p1 = repo.lookup(p1)
1522 1522 p2 = repo.lookup(p2)
1523 1523 if p1 == wp[0].node():
1524 1524 repo.dirstate.setparents(p1, p2)
1525 1525 except hg.RepoError:
1526 1526 pass
1527 1527 if opts.get('exact') or opts.get('import_branch'):
1528 1528 repo.dirstate.setbranch(branch or 'default')
1529 1529
1530 1530 files = {}
1531 1531 try:
1532 1532 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1533 1533 files=files)
1534 1534 finally:
1535 1535 files = patch.updatedir(ui, repo, files)
1536 1536 if not opts.get('no_commit'):
1537 1537 n = repo.commit(files, message, opts.get('user') or user,
1538 1538 opts.get('date') or date)
1539 1539 if opts.get('exact'):
1540 1540 if hex(n) != nodeid:
1541 1541 repo.rollback()
1542 1542 raise util.Abort(_('patch is damaged'
1543 1543 ' or loses information'))
1544 1544 # Force a dirstate write so that the next transaction
1545 1545 # backups an up-do-date file.
1546 1546 repo.dirstate.write()
1547 1547 finally:
1548 1548 os.unlink(tmpname)
1549 1549 finally:
1550 1550 del lock, wlock
1551 1551
1552 1552 def incoming(ui, repo, source="default", **opts):
1553 1553 """show new changesets found in source
1554 1554
1555 1555 Show new changesets found in the specified path/URL or the default
1556 1556 pull location. These are the changesets that would be pulled if a pull
1557 1557 was requested.
1558 1558
1559 1559 For remote repository, using --bundle avoids downloading the changesets
1560 1560 twice if the incoming is followed by a pull.
1561 1561
1562 1562 See pull for valid source format details.
1563 1563 """
1564 1564 limit = cmdutil.loglimit(opts)
1565 1565 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1566 1566 cmdutil.setremoteconfig(ui, opts)
1567 1567
1568 1568 other = hg.repository(ui, source)
1569 1569 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1570 1570 if revs:
1571 1571 revs = [other.lookup(rev) for rev in revs]
1572 1572 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1573 1573 if not incoming:
1574 1574 try:
1575 1575 os.unlink(opts["bundle"])
1576 1576 except:
1577 1577 pass
1578 1578 ui.status(_("no changes found\n"))
1579 1579 return 1
1580 1580
1581 1581 cleanup = None
1582 1582 try:
1583 1583 fname = opts["bundle"]
1584 1584 if fname or not other.local():
1585 1585 # create a bundle (uncompressed if other repo is not local)
1586 1586 if revs is None:
1587 1587 cg = other.changegroup(incoming, "incoming")
1588 1588 else:
1589 1589 cg = other.changegroupsubset(incoming, revs, 'incoming')
1590 1590 bundletype = other.local() and "HG10BZ" or "HG10UN"
1591 1591 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1592 1592 # keep written bundle?
1593 1593 if opts["bundle"]:
1594 1594 cleanup = None
1595 1595 if not other.local():
1596 1596 # use the created uncompressed bundlerepo
1597 1597 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1598 1598
1599 1599 o = other.changelog.nodesbetween(incoming, revs)[0]
1600 1600 if opts['newest_first']:
1601 1601 o.reverse()
1602 1602 displayer = cmdutil.show_changeset(ui, other, opts)
1603 1603 count = 0
1604 1604 for n in o:
1605 1605 if count >= limit:
1606 1606 break
1607 1607 parents = [p for p in other.changelog.parents(n) if p != nullid]
1608 1608 if opts['no_merges'] and len(parents) == 2:
1609 1609 continue
1610 1610 count += 1
1611 1611 displayer.show(changenode=n)
1612 1612 finally:
1613 1613 if hasattr(other, 'close'):
1614 1614 other.close()
1615 1615 if cleanup:
1616 1616 os.unlink(cleanup)
1617 1617
1618 1618 def init(ui, dest=".", **opts):
1619 1619 """create a new repository in the given directory
1620 1620
1621 1621 Initialize a new repository in the given directory. If the given
1622 1622 directory does not exist, it is created.
1623 1623
1624 1624 If no directory is given, the current directory is used.
1625 1625
1626 1626 It is possible to specify an ssh:// URL as the destination.
1627 1627 Look at the help text for the pull command for important details
1628 1628 about ssh:// URLs.
1629 1629 """
1630 1630 cmdutil.setremoteconfig(ui, opts)
1631 1631 hg.repository(ui, dest, create=1)
1632 1632
1633 1633 def locate(ui, repo, *pats, **opts):
1634 1634 """locate files matching specific patterns
1635 1635
1636 1636 Print all files under Mercurial control whose names match the
1637 1637 given patterns.
1638 1638
1639 1639 This command searches the entire repository by default. To search
1640 1640 just the current directory and its subdirectories, use
1641 1641 "--include .".
1642 1642
1643 1643 If no patterns are given to match, this command prints all file
1644 1644 names.
1645 1645
1646 1646 If you want to feed the output of this command into the "xargs"
1647 1647 command, use the "-0" option to both this command and "xargs".
1648 1648 This will avoid the problem of "xargs" treating single filenames
1649 1649 that contain white space as multiple filenames.
1650 1650 """
1651 1651 end = opts['print0'] and '\0' or '\n'
1652 1652 rev = opts['rev']
1653 1653 if rev:
1654 1654 node = repo.lookup(rev)
1655 1655 else:
1656 1656 node = None
1657 1657
1658 1658 ret = 1
1659 1659 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1660 1660 badmatch=util.always,
1661 1661 default='relglob'):
1662 1662 if src == 'b':
1663 1663 continue
1664 1664 if not node and abs not in repo.dirstate:
1665 1665 continue
1666 1666 if opts['fullpath']:
1667 1667 ui.write(os.path.join(repo.root, abs), end)
1668 1668 else:
1669 1669 ui.write(((pats and rel) or abs), end)
1670 1670 ret = 0
1671 1671
1672 1672 return ret
1673 1673
1674 1674 def log(ui, repo, *pats, **opts):
1675 1675 """show revision history of entire repository or files
1676 1676
1677 1677 Print the revision history of the specified files or the entire
1678 1678 project.
1679 1679
1680 1680 File history is shown without following rename or copy history of
1681 1681 files. Use -f/--follow with a file name to follow history across
1682 1682 renames and copies. --follow without a file name will only show
1683 1683 ancestors or descendants of the starting revision. --follow-first
1684 1684 only follows the first parent of merge revisions.
1685 1685
1686 1686 If no revision range is specified, the default is tip:0 unless
1687 1687 --follow is set, in which case the working directory parent is
1688 1688 used as the starting revision.
1689 1689
1690 1690 See 'hg help dates' for a list of formats valid for -d/--date.
1691 1691
1692 1692 By default this command outputs: changeset id and hash, tags,
1693 1693 non-trivial parents, user, date and time, and a summary for each
1694 1694 commit. When the -v/--verbose switch is used, the list of changed
1695 1695 files and full commit message is shown.
1696 1696
1697 1697 NOTE: log -p may generate unexpected diff output for merge
1698 1698 changesets, as it will compare the merge changeset against its
1699 1699 first parent only. Also, the files: list will only reflect files
1700 1700 that are different from BOTH parents.
1701 1701
1702 1702 """
1703 1703
1704 1704 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1705 1705 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1706 1706
1707 1707 limit = cmdutil.loglimit(opts)
1708 1708 count = 0
1709 1709
1710 1710 if opts['copies'] and opts['rev']:
1711 1711 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1712 1712 else:
1713 1713 endrev = repo.changelog.count()
1714 1714 rcache = {}
1715 1715 ncache = {}
1716 1716 def getrenamed(fn, rev):
1717 1717 '''looks up all renames for a file (up to endrev) the first
1718 1718 time the file is given. It indexes on the changerev and only
1719 1719 parses the manifest if linkrev != changerev.
1720 1720 Returns rename info for fn at changerev rev.'''
1721 1721 if fn not in rcache:
1722 1722 rcache[fn] = {}
1723 1723 ncache[fn] = {}
1724 1724 fl = repo.file(fn)
1725 1725 for i in xrange(fl.count()):
1726 1726 node = fl.node(i)
1727 1727 lr = fl.linkrev(node)
1728 1728 renamed = fl.renamed(node)
1729 1729 rcache[fn][lr] = renamed
1730 1730 if renamed:
1731 1731 ncache[fn][node] = renamed
1732 1732 if lr >= endrev:
1733 1733 break
1734 1734 if rev in rcache[fn]:
1735 1735 return rcache[fn][rev]
1736 1736
1737 1737 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1738 1738 # filectx logic.
1739 1739
1740 1740 try:
1741 1741 return repo.changectx(rev).filectx(fn).renamed()
1742 1742 except revlog.LookupError:
1743 1743 pass
1744 1744 return None
1745 1745
1746 1746 df = False
1747 1747 if opts["date"]:
1748 1748 df = util.matchdate(opts["date"])
1749 1749
1750 1750 only_branches = opts['only_branch']
1751 1751
1752 1752 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1753 1753 for st, rev, fns in changeiter:
1754 1754 if st == 'add':
1755 1755 changenode = repo.changelog.node(rev)
1756 1756 parents = [p for p in repo.changelog.parentrevs(rev)
1757 1757 if p != nullrev]
1758 1758 if opts['no_merges'] and len(parents) == 2:
1759 1759 continue
1760 1760 if opts['only_merges'] and len(parents) != 2:
1761 1761 continue
1762 1762
1763 1763 if only_branches:
1764 1764 revbranch = get(rev)[5]['branch']
1765 1765 if revbranch not in only_branches:
1766 1766 continue
1767 1767
1768 1768 if df:
1769 1769 changes = get(rev)
1770 1770 if not df(changes[2][0]):
1771 1771 continue
1772 1772
1773 1773 if opts['keyword']:
1774 1774 changes = get(rev)
1775 1775 miss = 0
1776 1776 for k in [kw.lower() for kw in opts['keyword']]:
1777 1777 if not (k in changes[1].lower() or
1778 1778 k in changes[4].lower() or
1779 1779 k in " ".join(changes[3]).lower()):
1780 1780 miss = 1
1781 1781 break
1782 1782 if miss:
1783 1783 continue
1784 1784
1785 1785 copies = []
1786 1786 if opts.get('copies') and rev:
1787 1787 for fn in get(rev)[3]:
1788 1788 rename = getrenamed(fn, rev)
1789 1789 if rename:
1790 1790 copies.append((fn, rename[0]))
1791 1791 displayer.show(rev, changenode, copies=copies)
1792 1792 elif st == 'iter':
1793 1793 if count == limit: break
1794 1794 if displayer.flush(rev):
1795 1795 count += 1
1796 1796
1797 1797 def manifest(ui, repo, node=None, rev=None):
1798 1798 """output the current or given revision of the project manifest
1799 1799
1800 1800 Print a list of version controlled files for the given revision.
1801 1801 If no revision is given, the parent of the working directory is used,
1802 1802 or tip if no revision is checked out.
1803 1803
1804 1804 The manifest is the list of files being version controlled. If no revision
1805 1805 is given then the first parent of the working directory is used.
1806 1806
1807 1807 With -v flag, print file permissions, symlink and executable bits. With
1808 1808 --debug flag, print file revision hashes.
1809 1809 """
1810 1810
1811 1811 if rev and node:
1812 1812 raise util.Abort(_("please specify just one revision"))
1813 1813
1814 1814 if not node:
1815 1815 node = rev
1816 1816
1817 1817 m = repo.changectx(node).manifest()
1818 1818 files = m.keys()
1819 1819 files.sort()
1820 1820
1821 1821 for f in files:
1822 1822 if ui.debugflag:
1823 1823 ui.write("%40s " % hex(m[f]))
1824 1824 if ui.verbose:
1825 1825 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1826 1826 perm = m.execf(f) and "755" or "644"
1827 1827 ui.write("%3s %1s " % (perm, type))
1828 1828 ui.write("%s\n" % f)
1829 1829
1830 1830 def merge(ui, repo, node=None, force=None, rev=None):
1831 1831 """merge working directory with another revision
1832 1832
1833 1833 Merge the contents of the current working directory and the
1834 1834 requested revision. Files that changed between either parent are
1835 1835 marked as changed for the next commit and a commit must be
1836 1836 performed before any further updates are allowed.
1837 1837
1838 1838 If no revision is specified, the working directory's parent is a
1839 1839 head revision, and the repository contains exactly one other head,
1840 1840 the other head is merged with by default. Otherwise, an explicit
1841 1841 revision to merge with must be provided.
1842 1842 """
1843 1843
1844 1844 if rev and node:
1845 1845 raise util.Abort(_("please specify just one revision"))
1846 1846 if not node:
1847 1847 node = rev
1848 1848
1849 1849 if not node:
1850 1850 heads = repo.heads()
1851 1851 if len(heads) > 2:
1852 1852 raise util.Abort(_('repo has %d heads - '
1853 1853 'please merge with an explicit rev') %
1854 1854 len(heads))
1855 1855 parent = repo.dirstate.parents()[0]
1856 1856 if len(heads) == 1:
1857 1857 msg = _('there is nothing to merge')
1858 1858 if parent != repo.lookup(repo.workingctx().branch()):
1859 1859 msg = _('%s - use "hg update" instead') % msg
1860 1860 raise util.Abort(msg)
1861 1861
1862 1862 if parent not in heads:
1863 1863 raise util.Abort(_('working dir not at a head rev - '
1864 1864 'use "hg update" or merge with an explicit rev'))
1865 1865 node = parent == heads[0] and heads[-1] or heads[0]
1866 1866 return hg.merge(repo, node, force=force)
1867 1867
1868 1868 def outgoing(ui, repo, dest=None, **opts):
1869 1869 """show changesets not found in destination
1870 1870
1871 1871 Show changesets not found in the specified destination repository or
1872 1872 the default push location. These are the changesets that would be pushed
1873 1873 if a push was requested.
1874 1874
1875 1875 See pull for valid destination format details.
1876 1876 """
1877 1877 limit = cmdutil.loglimit(opts)
1878 1878 dest, revs, checkout = hg.parseurl(
1879 1879 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1880 1880 cmdutil.setremoteconfig(ui, opts)
1881 1881 if revs:
1882 1882 revs = [repo.lookup(rev) for rev in revs]
1883 1883
1884 1884 other = hg.repository(ui, dest)
1885 1885 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1886 1886 o = repo.findoutgoing(other, force=opts['force'])
1887 1887 if not o:
1888 1888 ui.status(_("no changes found\n"))
1889 1889 return 1
1890 1890 o = repo.changelog.nodesbetween(o, revs)[0]
1891 1891 if opts['newest_first']:
1892 1892 o.reverse()
1893 1893 displayer = cmdutil.show_changeset(ui, repo, opts)
1894 1894 count = 0
1895 1895 for n in o:
1896 1896 if count >= limit:
1897 1897 break
1898 1898 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1899 1899 if opts['no_merges'] and len(parents) == 2:
1900 1900 continue
1901 1901 count += 1
1902 1902 displayer.show(changenode=n)
1903 1903
1904 1904 def parents(ui, repo, file_=None, **opts):
1905 1905 """show the parents of the working dir or revision
1906 1906
1907 1907 Print the working directory's parent revisions. If a
1908 1908 revision is given via --rev, the parent of that revision
1909 1909 will be printed. If a file argument is given, revision in
1910 1910 which the file was last changed (before the working directory
1911 1911 revision or the argument to --rev if given) is printed.
1912 1912 """
1913 1913 rev = opts.get('rev')
1914 1914 if rev:
1915 1915 ctx = repo.changectx(rev)
1916 1916 else:
1917 1917 ctx = repo.workingctx()
1918 1918
1919 1919 if file_:
1920 1920 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1921 1921 if anypats or len(files) != 1:
1922 1922 raise util.Abort(_('can only specify an explicit file name'))
1923 1923 file_ = files[0]
1924 1924 filenodes = []
1925 1925 for cp in ctx.parents():
1926 1926 if not cp:
1927 1927 continue
1928 1928 try:
1929 1929 filenodes.append(cp.filenode(file_))
1930 1930 except revlog.LookupError:
1931 1931 pass
1932 1932 if not filenodes:
1933 1933 raise util.Abort(_("'%s' not found in manifest!") % file_)
1934 1934 fl = repo.file(file_)
1935 1935 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1936 1936 else:
1937 1937 p = [cp.node() for cp in ctx.parents()]
1938 1938
1939 1939 displayer = cmdutil.show_changeset(ui, repo, opts)
1940 1940 for n in p:
1941 1941 if n != nullid:
1942 1942 displayer.show(changenode=n)
1943 1943
1944 1944 def paths(ui, repo, search=None):
1945 1945 """show definition of symbolic path names
1946 1946
1947 1947 Show definition of symbolic path name NAME. If no name is given, show
1948 1948 definition of available names.
1949 1949
1950 1950 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1951 1951 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1952 1952 """
1953 1953 if search:
1954 1954 for name, path in ui.configitems("paths"):
1955 1955 if name == search:
1956 1956 ui.write("%s\n" % path)
1957 1957 return
1958 1958 ui.warn(_("not found!\n"))
1959 1959 return 1
1960 1960 else:
1961 1961 for name, path in ui.configitems("paths"):
1962 1962 ui.write("%s = %s\n" % (name, path))
1963 1963
1964 1964 def postincoming(ui, repo, modheads, optupdate, checkout):
1965 1965 if modheads == 0:
1966 1966 return
1967 1967 if optupdate:
1968 1968 if modheads <= 1 or checkout:
1969 1969 return hg.update(repo, checkout)
1970 1970 else:
1971 1971 ui.status(_("not updating, since new heads added\n"))
1972 1972 if modheads > 1:
1973 1973 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1974 1974 else:
1975 1975 ui.status(_("(run 'hg update' to get a working copy)\n"))
1976 1976
1977 1977 def pull(ui, repo, source="default", **opts):
1978 1978 """pull changes from the specified source
1979 1979
1980 1980 Pull changes from a remote repository to a local one.
1981 1981
1982 1982 This finds all changes from the repository at the specified path
1983 1983 or URL and adds them to the local repository. By default, this
1984 1984 does not update the copy of the project in the working directory.
1985 1985
1986 1986 Valid URLs are of the form:
1987 1987
1988 1988 local/filesystem/path (or file://local/filesystem/path)
1989 1989 http://[user@]host[:port]/[path]
1990 1990 https://[user@]host[:port]/[path]
1991 1991 ssh://[user@]host[:port]/[path]
1992 1992 static-http://host[:port]/[path]
1993 1993
1994 1994 Paths in the local filesystem can either point to Mercurial
1995 1995 repositories or to bundle files (as created by 'hg bundle' or
1996 1996 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1997 1997 allows access to a Mercurial repository where you simply use a web
1998 1998 server to publish the .hg directory as static content.
1999 1999
2000 2000 An optional identifier after # indicates a particular branch, tag,
2001 2001 or changeset to pull.
2002 2002
2003 2003 Some notes about using SSH with Mercurial:
2004 2004 - SSH requires an accessible shell account on the destination machine
2005 2005 and a copy of hg in the remote path or specified with as remotecmd.
2006 2006 - path is relative to the remote user's home directory by default.
2007 2007 Use an extra slash at the start of a path to specify an absolute path:
2008 2008 ssh://example.com//tmp/repository
2009 2009 - Mercurial doesn't use its own compression via SSH; the right thing
2010 2010 to do is to configure it in your ~/.ssh/config, e.g.:
2011 2011 Host *.mylocalnetwork.example.com
2012 2012 Compression no
2013 2013 Host *
2014 2014 Compression yes
2015 2015 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2016 2016 with the --ssh command line option.
2017 2017 """
2018 2018 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2019 2019 cmdutil.setremoteconfig(ui, opts)
2020 2020
2021 2021 other = hg.repository(ui, source)
2022 2022 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2023 2023 if revs:
2024 2024 try:
2025 2025 revs = [other.lookup(rev) for rev in revs]
2026 2026 except repo.NoCapability:
2027 2027 error = _("Other repository doesn't support revision lookup, "
2028 2028 "so a rev cannot be specified.")
2029 2029 raise util.Abort(error)
2030 2030
2031 2031 modheads = repo.pull(other, heads=revs, force=opts['force'])
2032 2032 return postincoming(ui, repo, modheads, opts['update'], checkout)
2033 2033
2034 2034 def push(ui, repo, dest=None, **opts):
2035 2035 """push changes to the specified destination
2036 2036
2037 2037 Push changes from the local repository to the given destination.
2038 2038
2039 2039 This is the symmetrical operation for pull. It helps to move
2040 2040 changes from the current repository to a different one. If the
2041 2041 destination is local this is identical to a pull in that directory
2042 2042 from the current one.
2043 2043
2044 2044 By default, push will refuse to run if it detects the result would
2045 2045 increase the number of remote heads. This generally indicates the
2046 2046 the client has forgotten to sync and merge before pushing.
2047 2047
2048 2048 Valid URLs are of the form:
2049 2049
2050 2050 local/filesystem/path (or file://local/filesystem/path)
2051 2051 ssh://[user@]host[:port]/[path]
2052 2052 http://[user@]host[:port]/[path]
2053 2053 https://[user@]host[:port]/[path]
2054 2054
2055 2055 An optional identifier after # indicates a particular branch, tag,
2056 2056 or changeset to push.
2057 2057
2058 2058 Look at the help text for the pull command for important details
2059 2059 about ssh:// URLs.
2060 2060
2061 2061 Pushing to http:// and https:// URLs is only possible, if this
2062 2062 feature is explicitly enabled on the remote Mercurial server.
2063 2063 """
2064 2064 dest, revs, checkout = hg.parseurl(
2065 2065 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2066 2066 cmdutil.setremoteconfig(ui, opts)
2067 2067
2068 2068 other = hg.repository(ui, dest)
2069 2069 ui.status('pushing to %s\n' % util.hidepassword(dest))
2070 2070 if revs:
2071 2071 revs = [repo.lookup(rev) for rev in revs]
2072 2072 r = repo.push(other, opts['force'], revs=revs)
2073 2073 return r == 0
2074 2074
2075 2075 def rawcommit(ui, repo, *pats, **opts):
2076 2076 """raw commit interface (DEPRECATED)
2077 2077
2078 2078 (DEPRECATED)
2079 2079 Lowlevel commit, for use in helper scripts.
2080 2080
2081 2081 This command is not intended to be used by normal users, as it is
2082 2082 primarily useful for importing from other SCMs.
2083 2083
2084 2084 This command is now deprecated and will be removed in a future
2085 2085 release, please use debugsetparents and commit instead.
2086 2086 """
2087 2087
2088 2088 ui.warn(_("(the rawcommit command is deprecated)\n"))
2089 2089
2090 2090 message = cmdutil.logmessage(opts)
2091 2091
2092 2092 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2093 2093 if opts['files']:
2094 2094 files += open(opts['files']).read().splitlines()
2095 2095
2096 2096 parents = [repo.lookup(p) for p in opts['parent']]
2097 2097
2098 2098 try:
2099 2099 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2100 2100 except ValueError, inst:
2101 2101 raise util.Abort(str(inst))
2102 2102
2103 2103 def recover(ui, repo):
2104 2104 """roll back an interrupted transaction
2105 2105
2106 2106 Recover from an interrupted commit or pull.
2107 2107
2108 2108 This command tries to fix the repository status after an interrupted
2109 2109 operation. It should only be necessary when Mercurial suggests it.
2110 2110 """
2111 2111 if repo.recover():
2112 2112 return hg.verify(repo)
2113 2113 return 1
2114 2114
2115 2115 def remove(ui, repo, *pats, **opts):
2116 2116 """remove the specified files on the next commit
2117 2117
2118 2118 Schedule the indicated files for removal from the repository.
2119 2119
2120 2120 This only removes files from the current branch, not from the
2121 2121 entire project history. If the files still exist in the working
2122 2122 directory, they will be deleted from it. If invoked with --after,
2123 2123 files are marked as removed, but not actually unlinked unless --force
2124 2124 is also given. Without exact file names, --after will only mark
2125 2125 files as removed if they are no longer in the working directory.
2126 2126
2127 2127 This command schedules the files to be removed at the next commit.
2128 2128 To undo a remove before that, see hg revert.
2129 2129
2130 2130 Modified files and added files are not removed by default. To
2131 2131 remove them, use the -f/--force option.
2132 2132 """
2133 2133 if not opts['after'] and not pats:
2134 2134 raise util.Abort(_('no files specified'))
2135 2135 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2136 2136 exact = dict.fromkeys(files)
2137 2137 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2138 2138 modified, added, removed, deleted, unknown = mardu
2139 2139 remove, forget = [], []
2140 2140 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2141 2141 reason = None
2142 2142 if abs in modified and not opts['force']:
2143 2143 reason = _('is modified (use -f to force removal)')
2144 2144 elif abs in added:
2145 2145 if opts['force']:
2146 2146 forget.append(abs)
2147 2147 continue
2148 2148 reason = _('has been marked for add (use -f to force removal)')
2149 2149 exact = 1 # force the message
2150 2150 elif abs not in repo.dirstate:
2151 2151 reason = _('is not managed')
2152 2152 elif opts['after'] and not exact and abs not in deleted:
2153 2153 continue
2154 2154 elif abs in removed:
2155 2155 continue
2156 2156 if reason:
2157 2157 if exact:
2158 2158 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2159 2159 else:
2160 2160 if ui.verbose or not exact:
2161 2161 ui.status(_('removing %s\n') % rel)
2162 2162 remove.append(abs)
2163 2163 repo.forget(forget)
2164 2164 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2165 2165
2166 2166 def rename(ui, repo, *pats, **opts):
2167 2167 """rename files; equivalent of copy + remove
2168 2168
2169 2169 Mark dest as copies of sources; mark sources for deletion. If
2170 2170 dest is a directory, copies are put in that directory. If dest is
2171 2171 a file, there can only be one source.
2172 2172
2173 2173 By default, this command copies the contents of files as they
2174 2174 stand in the working directory. If invoked with --after, the
2175 2175 operation is recorded, but no copying is performed.
2176 2176
2177 2177 This command takes effect in the next commit. To undo a rename
2178 2178 before that, see hg revert.
2179 2179 """
2180 2180 wlock = repo.wlock(False)
2181 2181 try:
2182 2182 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2183 2183 finally:
2184 2184 del wlock
2185 2185
2186 2186 def revert(ui, repo, *pats, **opts):
2187 2187 """restore individual files or dirs to an earlier state
2188 2188
2189 2189 (use update -r to check out earlier revisions, revert does not
2190 2190 change the working dir parents)
2191 2191
2192 2192 With no revision specified, revert the named files or directories
2193 2193 to the contents they had in the parent of the working directory.
2194 2194 This restores the contents of the affected files to an unmodified
2195 2195 state and unschedules adds, removes, copies, and renames. If the
2196 2196 working directory has two parents, you must explicitly specify the
2197 2197 revision to revert to.
2198 2198
2199 2199 Using the -r option, revert the given files or directories to their
2200 2200 contents as of a specific revision. This can be helpful to "roll
2201 2201 back" some or all of an earlier change.
2202 2202 See 'hg help dates' for a list of formats valid for -d/--date.
2203 2203
2204 2204 Revert modifies the working directory. It does not commit any
2205 2205 changes, or change the parent of the working directory. If you
2206 2206 revert to a revision other than the parent of the working
2207 2207 directory, the reverted files will thus appear modified
2208 2208 afterwards.
2209 2209
2210 2210 If a file has been deleted, it is restored. If the executable
2211 2211 mode of a file was changed, it is reset.
2212 2212
2213 2213 If names are given, all files matching the names are reverted.
2214 2214 If no arguments are given, no files are reverted.
2215 2215
2216 2216 Modified files are saved with a .orig suffix before reverting.
2217 2217 To disable these backups, use --no-backup.
2218 2218 """
2219 2219
2220 2220 if opts["date"]:
2221 2221 if opts["rev"]:
2222 2222 raise util.Abort(_("you can't specify a revision and a date"))
2223 2223 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2224 2224
2225 2225 if not pats and not opts['all']:
2226 2226 raise util.Abort(_('no files or directories specified; '
2227 2227 'use --all to revert the whole repo'))
2228 2228
2229 2229 parent, p2 = repo.dirstate.parents()
2230 2230 if not opts['rev'] and p2 != nullid:
2231 2231 raise util.Abort(_('uncommitted merge - please provide a '
2232 2232 'specific revision'))
2233 2233 ctx = repo.changectx(opts['rev'])
2234 2234 node = ctx.node()
2235 2235 mf = ctx.manifest()
2236 2236 if node == parent:
2237 2237 pmf = mf
2238 2238 else:
2239 2239 pmf = None
2240 2240
2241 2241 # need all matching names in dirstate and manifest of target rev,
2242 2242 # so have to walk both. do not print errors if files exist in one
2243 2243 # but not other.
2244 2244
2245 2245 names = {}
2246 2246
2247 2247 wlock = repo.wlock()
2248 2248 try:
2249 2249 # walk dirstate.
2250 2250 files = []
2251 2251 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2252 2252 badmatch=mf.has_key):
2253 2253 names[abs] = (rel, exact)
2254 2254 if src != 'b':
2255 2255 files.append(abs)
2256 2256
2257 2257 # walk target manifest.
2258 2258
2259 2259 def badmatch(path):
2260 2260 if path in names:
2261 2261 return True
2262 2262 path_ = path + '/'
2263 2263 for f in names:
2264 2264 if f.startswith(path_):
2265 2265 return True
2266 2266 return False
2267 2267
2268 2268 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2269 2269 badmatch=badmatch):
2270 2270 if abs in names or src == 'b':
2271 2271 continue
2272 2272 names[abs] = (rel, exact)
2273 2273
2274 2274 changes = repo.status(files=files, match=names.has_key)[:4]
2275 2275 modified, added, removed, deleted = map(dict.fromkeys, changes)
2276 2276
2277 2277 # if f is a rename, also revert the source
2278 2278 cwd = repo.getcwd()
2279 2279 for f in added:
2280 2280 src = repo.dirstate.copied(f)
2281 2281 if src and src not in names and repo.dirstate[src] == 'r':
2282 2282 removed[src] = None
2283 2283 names[src] = (repo.pathto(src, cwd), True)
2284 2284
2285 2285 def removeforget(abs):
2286 2286 if repo.dirstate[abs] == 'a':
2287 2287 return _('forgetting %s\n')
2288 2288 return _('removing %s\n')
2289 2289
2290 2290 revert = ([], _('reverting %s\n'))
2291 2291 add = ([], _('adding %s\n'))
2292 2292 remove = ([], removeforget)
2293 2293 undelete = ([], _('undeleting %s\n'))
2294 2294
2295 2295 disptable = (
2296 2296 # dispatch table:
2297 2297 # file state
2298 2298 # action if in target manifest
2299 2299 # action if not in target manifest
2300 2300 # make backup if in target manifest
2301 2301 # make backup if not in target manifest
2302 2302 (modified, revert, remove, True, True),
2303 2303 (added, revert, remove, True, False),
2304 2304 (removed, undelete, None, False, False),
2305 2305 (deleted, revert, remove, False, False),
2306 2306 )
2307 2307
2308 2308 entries = names.items()
2309 2309 entries.sort()
2310 2310
2311 2311 for abs, (rel, exact) in entries:
2312 2312 mfentry = mf.get(abs)
2313 2313 target = repo.wjoin(abs)
2314 2314 def handle(xlist, dobackup):
2315 2315 xlist[0].append(abs)
2316 2316 if dobackup and not opts['no_backup'] and util.lexists(target):
2317 2317 bakname = "%s.orig" % rel
2318 2318 ui.note(_('saving current version of %s as %s\n') %
2319 2319 (rel, bakname))
2320 2320 if not opts.get('dry_run'):
2321 2321 util.copyfile(target, bakname)
2322 2322 if ui.verbose or not exact:
2323 2323 msg = xlist[1]
2324 2324 if not isinstance(msg, basestring):
2325 2325 msg = msg(abs)
2326 2326 ui.status(msg % rel)
2327 2327 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2328 2328 if abs not in table: continue
2329 2329 # file has changed in dirstate
2330 2330 if mfentry:
2331 2331 handle(hitlist, backuphit)
2332 2332 elif misslist is not None:
2333 2333 handle(misslist, backupmiss)
2334 2334 break
2335 2335 else:
2336 2336 if abs not in repo.dirstate:
2337 2337 if mfentry:
2338 2338 handle(add, True)
2339 2339 elif exact:
2340 2340 ui.warn(_('file not managed: %s\n') % rel)
2341 2341 continue
2342 2342 # file has not changed in dirstate
2343 2343 if node == parent:
2344 2344 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2345 2345 continue
2346 2346 if pmf is None:
2347 2347 # only need parent manifest in this unlikely case,
2348 2348 # so do not read by default
2349 2349 pmf = repo.changectx(parent).manifest()
2350 2350 if abs in pmf:
2351 2351 if mfentry:
2352 2352 # if version of file is same in parent and target
2353 2353 # manifests, do nothing
2354 2354 if (pmf[abs] != mfentry or
2355 2355 pmf.flags(abs) != mf.flags(abs)):
2356 2356 handle(revert, False)
2357 2357 else:
2358 2358 handle(remove, False)
2359 2359
2360 2360 if not opts.get('dry_run'):
2361 2361 def checkout(f):
2362 2362 fc = ctx[f]
2363 2363 repo.wwrite(f, fc.data(), fc.fileflags())
2364 2364
2365 2365 audit_path = util.path_auditor(repo.root)
2366 2366 for f in remove[0]:
2367 2367 if repo.dirstate[f] == 'a':
2368 2368 repo.dirstate.forget(f)
2369 2369 continue
2370 2370 audit_path(f)
2371 2371 try:
2372 2372 util.unlink(repo.wjoin(f))
2373 2373 except OSError:
2374 2374 pass
2375 2375 repo.dirstate.remove(f)
2376 2376
2377 2377 for f in revert[0]:
2378 2378 checkout(f)
2379 2379
2380 2380 for f in add[0]:
2381 2381 checkout(f)
2382 2382 repo.dirstate.add(f)
2383 2383
2384 2384 normal = repo.dirstate.normallookup
2385 2385 if node == parent and p2 == nullid:
2386 2386 normal = repo.dirstate.normal
2387 2387 for f in undelete[0]:
2388 2388 checkout(f)
2389 2389 normal(f)
2390 2390
2391 2391 finally:
2392 2392 del wlock
2393 2393
2394 2394 def rollback(ui, repo):
2395 2395 """roll back the last transaction
2396 2396
2397 2397 This command should be used with care. There is only one level of
2398 2398 rollback, and there is no way to undo a rollback. It will also
2399 2399 restore the dirstate at the time of the last transaction, losing
2400 2400 any dirstate changes since that time.
2401 2401
2402 2402 Transactions are used to encapsulate the effects of all commands
2403 2403 that create new changesets or propagate existing changesets into a
2404 2404 repository. For example, the following commands are transactional,
2405 2405 and their effects can be rolled back:
2406 2406
2407 2407 commit
2408 2408 import
2409 2409 pull
2410 2410 push (with this repository as destination)
2411 2411 unbundle
2412 2412
2413 2413 This command is not intended for use on public repositories. Once
2414 2414 changes are visible for pull by other users, rolling a transaction
2415 2415 back locally is ineffective (someone else may already have pulled
2416 2416 the changes). Furthermore, a race is possible with readers of the
2417 2417 repository; for example an in-progress pull from the repository
2418 2418 may fail if a rollback is performed.
2419 2419 """
2420 2420 repo.rollback()
2421 2421
2422 2422 def root(ui, repo):
2423 2423 """print the root (top) of the current working dir
2424 2424
2425 2425 Print the root directory of the current repository.
2426 2426 """
2427 2427 ui.write(repo.root + "\n")
2428 2428
2429 2429 def serve(ui, repo, **opts):
2430 2430 """export the repository via HTTP
2431 2431
2432 2432 Start a local HTTP repository browser and pull server.
2433 2433
2434 2434 By default, the server logs accesses to stdout and errors to
2435 2435 stderr. Use the "-A" and "-E" options to log to files.
2436 2436 """
2437 2437
2438 2438 if opts["stdio"]:
2439 2439 if repo is None:
2440 2440 raise hg.RepoError(_("There is no Mercurial repository here"
2441 2441 " (.hg not found)"))
2442 2442 s = sshserver.sshserver(ui, repo)
2443 2443 s.serve_forever()
2444 2444
2445 2445 parentui = ui.parentui or ui
2446 2446 optlist = ("name templates style address port prefix ipv6"
2447 2447 " accesslog errorlog webdir_conf certificate")
2448 2448 for o in optlist.split():
2449 2449 if opts[o]:
2450 2450 parentui.setconfig("web", o, str(opts[o]))
2451 2451 if (repo is not None) and (repo.ui != parentui):
2452 2452 repo.ui.setconfig("web", o, str(opts[o]))
2453 2453
2454 2454 if repo is None and not ui.config("web", "webdir_conf"):
2455 2455 raise hg.RepoError(_("There is no Mercurial repository here"
2456 2456 " (.hg not found)"))
2457 2457
2458 2458 class service:
2459 2459 def init(self):
2460 2460 util.set_signal_handler()
2461 2461 try:
2462 2462 self.httpd = hgweb.server.create_server(parentui, repo)
2463 2463 except socket.error, inst:
2464 2464 raise util.Abort(_('cannot start server: ') + inst.args[1])
2465 2465
2466 2466 if not ui.verbose: return
2467 2467
2468 2468 if self.httpd.prefix:
2469 2469 prefix = self.httpd.prefix.strip('/') + '/'
2470 2470 else:
2471 2471 prefix = ''
2472 2472
2473 2473 if self.httpd.port != 80:
2474 2474 ui.status(_('listening at http://%s:%d/%s\n') %
2475 2475 (self.httpd.addr, self.httpd.port, prefix))
2476 2476 else:
2477 2477 ui.status(_('listening at http://%s/%s\n') %
2478 2478 (self.httpd.addr, prefix))
2479 2479
2480 2480 def run(self):
2481 2481 self.httpd.serve_forever()
2482 2482
2483 2483 service = service()
2484 2484
2485 2485 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2486 2486
2487 2487 def status(ui, repo, *pats, **opts):
2488 2488 """show changed files in the working directory
2489 2489
2490 2490 Show status of files in the repository. If names are given, only
2491 2491 files that match are shown. Files that are clean or ignored or
2492 2492 source of a copy/move operation, are not listed unless -c (clean),
2493 2493 -i (ignored), -C (copies) or -A is given. Unless options described
2494 2494 with "show only ..." are given, the options -mardu are used.
2495 2495
2496 2496 Option -q/--quiet hides untracked (unknown and ignored) files
2497 2497 unless explicitly requested with -u/--unknown or -i/-ignored.
2498 2498
2499 2499 NOTE: status may appear to disagree with diff if permissions have
2500 2500 changed or a merge has occurred. The standard diff format does not
2501 2501 report permission changes and diff only reports changes relative
2502 2502 to one merge parent.
2503 2503
2504 2504 If one revision is given, it is used as the base revision.
2505 2505 If two revisions are given, the difference between them is shown.
2506 2506
2507 2507 The codes used to show the status of files are:
2508 2508 M = modified
2509 2509 A = added
2510 2510 R = removed
2511 2511 C = clean
2512 2512 ! = deleted, but still tracked
2513 2513 ? = not tracked
2514 2514 I = ignored
2515 2515 = the previous added file was copied from here
2516 2516 """
2517 2517
2518 2518 all = opts['all']
2519 2519 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2520 2520
2521 2521 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2522 2522 cwd = (pats and repo.getcwd()) or ''
2523 2523 modified, added, removed, deleted, unknown, ignored, clean = [
2524 2524 n for n in repo.status(node1=node1, node2=node2, files=files,
2525 2525 match=matchfn,
2526 2526 list_ignored=opts['ignored']
2527 2527 or all and not ui.quiet,
2528 2528 list_clean=opts['clean'] or all,
2529 2529 list_unknown=opts['unknown']
2530 2530 or not (ui.quiet or
2531 2531 opts['modified'] or
2532 2532 opts['added'] or
2533 2533 opts['removed'] or
2534 2534 opts['deleted'] or
2535 2535 opts['ignored']))]
2536 2536
2537 2537 changetypes = (('modified', 'M', modified),
2538 2538 ('added', 'A', added),
2539 2539 ('removed', 'R', removed),
2540 2540 ('deleted', '!', deleted),
2541 2541 ('unknown', '?', unknown),
2542 2542 ('ignored', 'I', ignored))
2543 2543
2544 2544 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2545 2545
2546 2546 end = opts['print0'] and '\0' or '\n'
2547 2547
2548 2548 for opt, char, changes in ([ct for ct in explicit_changetypes
2549 2549 if all or opts[ct[0]]]
2550 2550 or changetypes):
2551 2551
2552 2552 if opts['no_status']:
2553 2553 format = "%%s%s" % end
2554 2554 else:
2555 2555 format = "%s %%s%s" % (char, end)
2556 2556
2557 2557 for f in changes:
2558 2558 ui.write(format % repo.pathto(f, cwd))
2559 2559 if ((all or opts.get('copies')) and not opts.get('no_status')):
2560 2560 copied = repo.dirstate.copied(f)
2561 2561 if copied:
2562 2562 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2563 2563
2564 2564 def tag(ui, repo, name, rev_=None, **opts):
2565 2565 """add a tag for the current or given revision
2566 2566
2567 2567 Name a particular revision using <name>.
2568 2568
2569 2569 Tags are used to name particular revisions of the repository and are
2570 2570 very useful to compare different revision, to go back to significant
2571 2571 earlier versions or to mark branch points as releases, etc.
2572 2572
2573 2573 If no revision is given, the parent of the working directory is used,
2574 2574 or tip if no revision is checked out.
2575 2575
2576 2576 To facilitate version control, distribution, and merging of tags,
2577 2577 they are stored as a file named ".hgtags" which is managed
2578 2578 similarly to other project files and can be hand-edited if
2579 2579 necessary. The file '.hg/localtags' is used for local tags (not
2580 2580 shared among repositories).
2581 2581
2582 2582 See 'hg help dates' for a list of formats valid for -d/--date.
2583 2583 """
2584 2584 if name in ['tip', '.', 'null']:
2585 2585 raise util.Abort(_("the name '%s' is reserved") % name)
2586 2586 if rev_ is not None:
2587 2587 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2588 2588 "please use 'hg tag [-r REV] NAME' instead\n"))
2589 2589 if opts['rev']:
2590 2590 raise util.Abort(_("use only one form to specify the revision"))
2591 2591 if opts['rev'] and opts['remove']:
2592 2592 raise util.Abort(_("--rev and --remove are incompatible"))
2593 2593 if opts['rev']:
2594 2594 rev_ = opts['rev']
2595 2595 message = opts['message']
2596 2596 if opts['remove']:
2597 2597 tagtype = repo.tagtype(name)
2598 2598
2599 2599 if not tagtype:
2600 2600 raise util.Abort(_('tag %s does not exist') % name)
2601 2601 if opts['local'] and tagtype == 'global':
2602 2602 raise util.Abort(_('%s tag is global') % name)
2603 2603 if not opts['local'] and tagtype == 'local':
2604 2604 raise util.Abort(_('%s tag is local') % name)
2605 2605
2606 2606 rev_ = nullid
2607 2607 if not message:
2608 2608 message = _('Removed tag %s') % name
2609 2609 elif name in repo.tags() and not opts['force']:
2610 2610 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2611 2611 % name)
2612 2612 if not rev_ and repo.dirstate.parents()[1] != nullid:
2613 2613 raise util.Abort(_('uncommitted merge - please provide a '
2614 2614 'specific revision'))
2615 2615 r = repo.changectx(rev_).node()
2616 2616
2617 2617 if not message:
2618 2618 message = _('Added tag %s for changeset %s') % (name, short(r))
2619 2619
2620 2620 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2621 2621
2622 2622 def tags(ui, repo):
2623 2623 """list repository tags
2624 2624
2625 2625 List the repository tags.
2626 2626
2627 2627 This lists both regular and local tags. When the -v/--verbose switch
2628 2628 is used, a third column "local" is printed for local tags.
2629 2629 """
2630 2630
2631 2631 l = repo.tagslist()
2632 2632 l.reverse()
2633 2633 hexfunc = ui.debugflag and hex or short
2634 2634 tagtype = ""
2635 2635
2636 2636 for t, n in l:
2637 2637 if ui.quiet:
2638 2638 ui.write("%s\n" % t)
2639 2639 continue
2640 2640
2641 2641 try:
2642 2642 hn = hexfunc(n)
2643 2643 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2644 2644 except revlog.LookupError:
2645 2645 r = " ?:%s" % hn
2646 2646 else:
2647 2647 spaces = " " * (30 - util.locallen(t))
2648 2648 if ui.verbose:
2649 2649 if repo.tagtype(t) == 'local':
2650 2650 tagtype = " local"
2651 2651 else:
2652 2652 tagtype = ""
2653 2653 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2654 2654
2655 2655 def tip(ui, repo, **opts):
2656 2656 """show the tip revision
2657 2657
2658 2658 Show the tip revision.
2659 2659 """
2660 2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2661 2661
2662 2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2663 2663 """apply one or more changegroup files
2664 2664
2665 2665 Apply one or more compressed changegroup files generated by the
2666 2666 bundle command.
2667 2667 """
2668 2668 fnames = (fname1,) + fnames
2669 2669
2670 2670 lock = None
2671 2671 try:
2672 2672 lock = repo.lock()
2673 2673 for fname in fnames:
2674 2674 if os.path.exists(fname):
2675 2675 f = open(fname, "rb")
2676 2676 else:
2677 2677 f = urllib.urlopen(fname)
2678 2678 gen = changegroup.readbundle(f, fname)
2679 2679 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2680 2680 finally:
2681 2681 del lock
2682 2682
2683 2683 return postincoming(ui, repo, modheads, opts['update'], None)
2684 2684
2685 2685 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2686 2686 """update working directory
2687 2687
2688 2688 Update the working directory to the specified revision, or the
2689 2689 tip of the current branch if none is specified.
2690 2690 See 'hg help dates' for a list of formats valid for -d/--date.
2691 2691
2692 2692 If there are no outstanding changes in the working directory and
2693 2693 there is a linear relationship between the current version and the
2694 2694 requested version, the result is the requested version.
2695 2695
2696 2696 To merge the working directory with another revision, use the
2697 2697 merge command.
2698 2698
2699 2699 By default, update will refuse to run if doing so would require
2700 2700 discarding local changes.
2701 2701 """
2702 2702 if rev and node:
2703 2703 raise util.Abort(_("please specify just one revision"))
2704 2704
2705 2705 if not rev:
2706 2706 rev = node
2707 2707
2708 2708 if date:
2709 2709 if rev:
2710 2710 raise util.Abort(_("you can't specify a revision and a date"))
2711 2711 rev = cmdutil.finddate(ui, repo, date)
2712 2712
2713 2713 if clean:
2714 2714 return hg.clean(repo, rev)
2715 2715 else:
2716 2716 return hg.update(repo, rev)
2717 2717
2718 2718 def verify(ui, repo):
2719 2719 """verify the integrity of the repository
2720 2720
2721 2721 Verify the integrity of the current repository.
2722 2722
2723 2723 This will perform an extensive check of the repository's
2724 2724 integrity, validating the hashes and checksums of each entry in
2725 2725 the changelog, manifest, and tracked files, as well as the
2726 2726 integrity of their crosslinks and indices.
2727 2727 """
2728 2728 return hg.verify(repo)
2729 2729
2730 2730 def version_(ui):
2731 2731 """output version and copyright information"""
2732 2732 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2733 2733 % version.get_version())
2734 2734 ui.status(_(
2735 2735 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2736 2736 "This is free software; see the source for copying conditions. "
2737 2737 "There is NO\nwarranty; "
2738 2738 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2739 2739 ))
2740 2740
2741 2741 # Command options and aliases are listed here, alphabetically
2742 2742
2743 2743 globalopts = [
2744 2744 ('R', 'repository', '',
2745 2745 _('repository root directory or symbolic path name')),
2746 2746 ('', 'cwd', '', _('change working directory')),
2747 2747 ('y', 'noninteractive', None,
2748 2748 _('do not prompt, assume \'yes\' for any required answers')),
2749 2749 ('q', 'quiet', None, _('suppress output')),
2750 2750 ('v', 'verbose', None, _('enable additional output')),
2751 2751 ('', 'config', [], _('set/override config option')),
2752 2752 ('', 'debug', None, _('enable debugging output')),
2753 2753 ('', 'debugger', None, _('start debugger')),
2754 2754 ('', 'encoding', util._encoding, _('set the charset encoding')),
2755 2755 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2756 2756 ('', 'lsprof', None, _('print improved command execution profile')),
2757 2757 ('', 'traceback', None, _('print traceback on exception')),
2758 2758 ('', 'time', None, _('time how long the command takes')),
2759 2759 ('', 'profile', None, _('print command execution profile')),
2760 2760 ('', 'version', None, _('output version information and exit')),
2761 2761 ('h', 'help', None, _('display help and exit')),
2762 2762 ]
2763 2763
2764 2764 dryrunopts = [('n', 'dry-run', None,
2765 2765 _('do not perform actions, just print output'))]
2766 2766
2767 2767 remoteopts = [
2768 2768 ('e', 'ssh', '', _('specify ssh command to use')),
2769 2769 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2770 2770 ]
2771 2771
2772 2772 walkopts = [
2773 2773 ('I', 'include', [], _('include names matching the given patterns')),
2774 2774 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2775 2775 ]
2776 2776
2777 2777 commitopts = [
2778 2778 ('m', 'message', '', _('use <text> as commit message')),
2779 2779 ('l', 'logfile', '', _('read commit message from <file>')),
2780 2780 ]
2781 2781
2782 2782 commitopts2 = [
2783 2783 ('d', 'date', '', _('record datecode as commit date')),
2784 2784 ('u', 'user', '', _('record user as committer')),
2785 2785 ]
2786 2786
2787 2787 templateopts = [
2788 2788 ('', 'style', '', _('display using template map file')),
2789 2789 ('', 'template', '', _('display with template')),
2790 2790 ]
2791 2791
2792 2792 logopts = [
2793 2793 ('p', 'patch', None, _('show patch')),
2794 2794 ('l', 'limit', '', _('limit number of changes displayed')),
2795 2795 ('M', 'no-merges', None, _('do not show merges')),
2796 2796 ] + templateopts
2797 2797
2798 2798 table = {
2799 2799 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2800 2800 "addremove":
2801 2801 (addremove,
2802 2802 [('s', 'similarity', '',
2803 2803 _('guess renamed files by similarity (0<=s<=100)')),
2804 2804 ] + walkopts + dryrunopts,
2805 2805 _('hg addremove [OPTION]... [FILE]...')),
2806 2806 "^annotate|blame":
2807 2807 (annotate,
2808 2808 [('r', 'rev', '', _('annotate the specified revision')),
2809 2809 ('f', 'follow', None, _('follow file copies and renames')),
2810 2810 ('a', 'text', None, _('treat all files as text')),
2811 2811 ('u', 'user', None, _('list the author (long with -v)')),
2812 2812 ('d', 'date', None, _('list the date (short with -q)')),
2813 2813 ('n', 'number', None, _('list the revision number (default)')),
2814 2814 ('c', 'changeset', None, _('list the changeset')),
2815 2815 ('l', 'line-number', None,
2816 2816 _('show line number at the first appearance'))
2817 2817 ] + walkopts,
2818 2818 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2819 2819 "archive":
2820 2820 (archive,
2821 2821 [('', 'no-decode', None, _('do not pass files through decoders')),
2822 2822 ('p', 'prefix', '', _('directory prefix for files in archive')),
2823 2823 ('r', 'rev', '', _('revision to distribute')),
2824 2824 ('t', 'type', '', _('type of distribution to create')),
2825 2825 ] + walkopts,
2826 2826 _('hg archive [OPTION]... DEST')),
2827 2827 "backout":
2828 2828 (backout,
2829 2829 [('', 'merge', None,
2830 2830 _('merge with old dirstate parent after backout')),
2831 2831 ('', 'parent', '', _('parent to choose when backing out merge')),
2832 2832 ('r', 'rev', '', _('revision to backout')),
2833 2833 ] + walkopts + commitopts + commitopts2,
2834 2834 _('hg backout [OPTION]... [-r] REV')),
2835 2835 "bisect":
2836 2836 (bisect,
2837 2837 [('r', 'reset', False, _('reset bisect state')),
2838 2838 ('g', 'good', False, _('mark changeset good')),
2839 2839 ('b', 'bad', False, _('mark changeset bad')),
2840 2840 ('s', 'skip', False, _('skip testing changeset')),
2841 2841 ('U', 'noupdate', False, _('do not update to target'))],
2842 2842 _("hg bisect [-gbsr] [REV]")),
2843 2843 "branch":
2844 2844 (branch,
2845 2845 [('f', 'force', None,
2846 2846 _('set branch name even if it shadows an existing branch'))],
2847 2847 _('hg branch [-f] [NAME]')),
2848 2848 "branches":
2849 2849 (branches,
2850 2850 [('a', 'active', False,
2851 2851 _('show only branches that have unmerged heads'))],
2852 2852 _('hg branches [-a]')),
2853 2853 "bundle":
2854 2854 (bundle,
2855 2855 [('f', 'force', None,
2856 2856 _('run even when remote repository is unrelated')),
2857 2857 ('r', 'rev', [],
2858 2858 _('a changeset you would like to bundle')),
2859 2859 ('', 'base', [],
2860 2860 _('a base changeset to specify instead of a destination')),
2861 2861 ('a', 'all', None,
2862 2862 _('bundle all changesets in the repository')),
2863 2863 ] + remoteopts,
2864 2864 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
2865 2865 "cat":
2866 2866 (cat,
2867 2867 [('o', 'output', '', _('print output to file with formatted name')),
2868 2868 ('r', 'rev', '', _('print the given revision')),
2869 2869 ('', 'decode', None, _('apply any matching decode filter')),
2870 2870 ] + walkopts,
2871 2871 _('hg cat [OPTION]... FILE...')),
2872 2872 "^clone":
2873 2873 (clone,
2874 2874 [('U', 'noupdate', None, _('do not update the new working directory')),
2875 2875 ('r', 'rev', [],
2876 2876 _('a changeset you would like to have after cloning')),
2877 2877 ('', 'pull', None, _('use pull protocol to copy metadata')),
2878 2878 ('', 'uncompressed', None,
2879 2879 _('use uncompressed transfer (fast over LAN)')),
2880 2880 ] + remoteopts,
2881 2881 _('hg clone [OPTION]... SOURCE [DEST]')),
2882 2882 "^commit|ci":
2883 2883 (commit,
2884 2884 [('A', 'addremove', None,
2885 2885 _('mark new/missing files as added/removed before committing')),
2886 2886 ] + walkopts + commitopts + commitopts2,
2887 2887 _('hg commit [OPTION]... [FILE]...')),
2888 2888 "copy|cp":
2889 2889 (copy,
2890 2890 [('A', 'after', None, _('record a copy that has already occurred')),
2891 2891 ('f', 'force', None,
2892 2892 _('forcibly copy over an existing managed file')),
2893 2893 ] + walkopts + dryrunopts,
2894 2894 _('hg copy [OPTION]... [SOURCE]... DEST')),
2895 2895 "debugancestor": (debugancestor, [],
2896 2896 _('hg debugancestor [INDEX] REV1 REV2')),
2897 2897 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2898 2898 "debugcomplete":
2899 2899 (debugcomplete,
2900 2900 [('o', 'options', None, _('show the command options'))],
2901 2901 _('hg debugcomplete [-o] CMD')),
2902 2902 "debugdate":
2903 2903 (debugdate,
2904 2904 [('e', 'extended', None, _('try extended date formats'))],
2905 2905 _('hg debugdate [-e] DATE [RANGE]')),
2906 2906 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2907 2907 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2908 2908 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2909 2909 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2910 2910 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2911 2911 "debugrawcommit|rawcommit":
2912 2912 (rawcommit,
2913 2913 [('p', 'parent', [], _('parent')),
2914 2914 ('F', 'files', '', _('file list'))
2915 2915 ] + commitopts + commitopts2,
2916 2916 _('hg debugrawcommit [OPTION]... [FILE]...')),
2917 2917 "debugrebuildstate":
2918 2918 (debugrebuildstate,
2919 2919 [('r', 'rev', '', _('revision to rebuild to'))],
2920 2920 _('hg debugrebuildstate [-r REV] [REV]')),
2921 2921 "debugrename":
2922 2922 (debugrename,
2923 2923 [('r', 'rev', '', _('revision to debug'))],
2924 2924 _('hg debugrename [-r REV] FILE')),
2925 2925 "debugsetparents":
2926 2926 (debugsetparents,
2927 2927 [],
2928 2928 _('hg debugsetparents REV1 [REV2]')),
2929 2929 "debugstate": (debugstate, [], _('hg debugstate')),
2930 2930 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2931 2931 "^diff":
2932 2932 (diff,
2933 2933 [('r', 'rev', [], _('revision')),
2934 2934 ('a', 'text', None, _('treat all files as text')),
2935 2935 ('p', 'show-function', None,
2936 2936 _('show which function each change is in')),
2937 2937 ('g', 'git', None, _('use git extended diff format')),
2938 2938 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 2939 ('w', 'ignore-all-space', None,
2940 2940 _('ignore white space when comparing lines')),
2941 2941 ('b', 'ignore-space-change', None,
2942 2942 _('ignore changes in the amount of white space')),
2943 2943 ('B', 'ignore-blank-lines', None,
2944 2944 _('ignore changes whose lines are all blank')),
2945 2945 ('U', 'unified', 3,
2946 2946 _('number of lines of context to show'))
2947 2947 ] + walkopts,
2948 2948 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2949 2949 "^export":
2950 2950 (export,
2951 2951 [('o', 'output', '', _('print output to file with formatted name')),
2952 2952 ('a', 'text', None, _('treat all files as text')),
2953 2953 ('g', 'git', None, _('use git extended diff format')),
2954 2954 ('', 'nodates', None, _("don't include dates in diff headers")),
2955 2955 ('', 'switch-parent', None, _('diff against the second parent'))],
2956 2956 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2957 2957 "grep":
2958 2958 (grep,
2959 2959 [('0', 'print0', None, _('end fields with NUL')),
2960 2960 ('', 'all', None, _('print all revisions that match')),
2961 2961 ('f', 'follow', None,
2962 2962 _('follow changeset history, or file history across copies and renames')),
2963 2963 ('i', 'ignore-case', None, _('ignore case when matching')),
2964 2964 ('l', 'files-with-matches', None,
2965 2965 _('print only filenames and revs that match')),
2966 2966 ('n', 'line-number', None, _('print matching line numbers')),
2967 2967 ('r', 'rev', [], _('search in given revision range')),
2968 2968 ('u', 'user', None, _('list the author (long with -v)')),
2969 2969 ('d', 'date', None, _('list the date (short with -q)')),
2970 2970 ] + walkopts,
2971 2971 _('hg grep [OPTION]... PATTERN [FILE]...')),
2972 2972 "heads":
2973 2973 (heads,
2974 2974 [('r', 'rev', '', _('show only heads which are descendants of rev')),
2975 2975 ] + templateopts,
2976 2976 _('hg heads [-r REV] [REV]...')),
2977 2977 "help": (help_, [], _('hg help [COMMAND]')),
2978 2978 "identify|id":
2979 2979 (identify,
2980 2980 [('r', 'rev', '', _('identify the specified rev')),
2981 2981 ('n', 'num', None, _('show local revision number')),
2982 2982 ('i', 'id', None, _('show global revision id')),
2983 2983 ('b', 'branch', None, _('show branch')),
2984 2984 ('t', 'tags', None, _('show tags'))],
2985 2985 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2986 2986 "import|patch":
2987 2987 (import_,
2988 2988 [('p', 'strip', 1,
2989 2989 _('directory strip option for patch. This has the same\n'
2990 2990 'meaning as the corresponding patch option')),
2991 2991 ('b', 'base', '', _('base path')),
2992 2992 ('f', 'force', None,
2993 2993 _('skip check for outstanding uncommitted changes')),
2994 2994 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2995 2995 ('', 'exact', None,
2996 2996 _('apply patch to the nodes from which it was generated')),
2997 2997 ('', 'import-branch', None,
2998 2998 _('Use any branch information in patch (implied by --exact)'))] +
2999 2999 commitopts + commitopts2,
3000 3000 _('hg import [OPTION]... PATCH...')),
3001 3001 "incoming|in":
3002 3002 (incoming,
3003 3003 [('f', 'force', None,
3004 3004 _('run even when remote repository is unrelated')),
3005 3005 ('n', 'newest-first', None, _('show newest record first')),
3006 3006 ('', 'bundle', '', _('file to store the bundles into')),
3007 3007 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3008 3008 ] + logopts + remoteopts,
3009 3009 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3010 3010 ' [--bundle FILENAME] [SOURCE]')),
3011 3011 "^init":
3012 3012 (init,
3013 3013 remoteopts,
3014 3014 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3015 3015 "locate":
3016 3016 (locate,
3017 3017 [('r', 'rev', '', _('search the repository as it stood at rev')),
3018 3018 ('0', 'print0', None,
3019 3019 _('end filenames with NUL, for use with xargs')),
3020 3020 ('f', 'fullpath', None,
3021 3021 _('print complete paths from the filesystem root')),
3022 3022 ] + walkopts,
3023 3023 _('hg locate [OPTION]... [PATTERN]...')),
3024 3024 "^log|history":
3025 3025 (log,
3026 3026 [('f', 'follow', None,
3027 3027 _('follow changeset history, or file history across copies and renames')),
3028 3028 ('', 'follow-first', None,
3029 3029 _('only follow the first parent of merge changesets')),
3030 3030 ('d', 'date', '', _('show revs matching date spec')),
3031 3031 ('C', 'copies', None, _('show copied files')),
3032 3032 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3033 3033 ('r', 'rev', [], _('show the specified revision or range')),
3034 3034 ('', 'removed', None, _('include revs where files were removed')),
3035 3035 ('m', 'only-merges', None, _('show only merges')),
3036 3036 ('b', 'only-branch', [],
3037 3037 _('show only changesets within the given named branch')),
3038 3038 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3039 3039 ] + logopts + walkopts,
3040 3040 _('hg log [OPTION]... [FILE]')),
3041 3041 "manifest":
3042 3042 (manifest,
3043 3043 [('r', 'rev', '', _('revision to display'))],
3044 3044 _('hg manifest [-r REV]')),
3045 3045 "^merge":
3046 3046 (merge,
3047 3047 [('f', 'force', None, _('force a merge with outstanding changes')),
3048 3048 ('r', 'rev', '', _('revision to merge')),
3049 3049 ],
3050 3050 _('hg merge [-f] [[-r] REV]')),
3051 3051 "outgoing|out":
3052 3052 (outgoing,
3053 3053 [('f', 'force', None,
3054 3054 _('run even when remote repository is unrelated')),
3055 3055 ('r', 'rev', [], _('a specific revision you would like to push')),
3056 3056 ('n', 'newest-first', None, _('show newest record first')),
3057 3057 ] + logopts + remoteopts,
3058 3058 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3059 3059 "^parents":
3060 3060 (parents,
3061 3061 [('r', 'rev', '', _('show parents from the specified rev')),
3062 3062 ] + templateopts,
3063 3063 _('hg parents [-r REV] [FILE]')),
3064 3064 "paths": (paths, [], _('hg paths [NAME]')),
3065 3065 "^pull":
3066 3066 (pull,
3067 3067 [('u', 'update', None,
3068 3068 _('update to new tip if changesets were pulled')),
3069 3069 ('f', 'force', None,
3070 3070 _('run even when remote repository is unrelated')),
3071 3071 ('r', 'rev', [],
3072 3072 _('a specific revision up to which you would like to pull')),
3073 3073 ] + remoteopts,
3074 3074 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3075 3075 "^push":
3076 3076 (push,
3077 3077 [('f', 'force', None, _('force push')),
3078 3078 ('r', 'rev', [], _('a specific revision you would like to push')),
3079 3079 ] + remoteopts,
3080 3080 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3081 3081 "recover": (recover, [], _('hg recover')),
3082 3082 "^remove|rm":
3083 3083 (remove,
3084 3084 [('A', 'after', None, _('record remove without deleting')),
3085 3085 ('f', 'force', None, _('remove file even if modified')),
3086 3086 ] + walkopts,
3087 3087 _('hg remove [OPTION]... FILE...')),
3088 3088 "rename|mv":
3089 3089 (rename,
3090 3090 [('A', 'after', None, _('record a rename that has already occurred')),
3091 3091 ('f', 'force', None,
3092 3092 _('forcibly copy over an existing managed file')),
3093 3093 ] + walkopts + dryrunopts,
3094 3094 _('hg rename [OPTION]... SOURCE... DEST')),
3095 3095 "revert":
3096 3096 (revert,
3097 3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3098 3098 ('d', 'date', '', _('tipmost revision matching date')),
3099 3099 ('r', 'rev', '', _('revision to revert to')),
3100 3100 ('', 'no-backup', None, _('do not save backup copies of files')),
3101 3101 ] + walkopts + dryrunopts,
3102 3102 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3103 3103 "rollback": (rollback, [], _('hg rollback')),
3104 3104 "root": (root, [], _('hg root')),
3105 3105 "^serve":
3106 3106 (serve,
3107 3107 [('A', 'accesslog', '', _('name of access log file to write to')),
3108 3108 ('d', 'daemon', None, _('run server in background')),
3109 3109 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3110 3110 ('E', 'errorlog', '', _('name of error log file to write to')),
3111 3111 ('p', 'port', 0, _('port to use (default: 8000)')),
3112 3112 ('a', 'address', '', _('address to use')),
3113 3113 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3114 3114 ('n', 'name', '',
3115 3115 _('name to show in web pages (default: working dir)')),
3116 3116 ('', 'webdir-conf', '', _('name of the webdir config file'
3117 3117 ' (serve more than one repo)')),
3118 3118 ('', 'pid-file', '', _('name of file to write process ID to')),
3119 3119 ('', 'stdio', None, _('for remote clients')),
3120 3120 ('t', 'templates', '', _('web templates to use')),
3121 3121 ('', 'style', '', _('template style to use')),
3122 3122 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3123 3123 ('', 'certificate', '', _('SSL certificate file'))],
3124 3124 _('hg serve [OPTION]...')),
3125 3125 "showconfig|debugconfig":
3126 3126 (showconfig,
3127 3127 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3128 3128 _('hg showconfig [-u] [NAME]...')),
3129 3129 "^status|st":
3130 3130 (status,
3131 3131 [('A', 'all', None, _('show status of all files')),
3132 3132 ('m', 'modified', None, _('show only modified files')),
3133 3133 ('a', 'added', None, _('show only added files')),
3134 3134 ('r', 'removed', None, _('show only removed files')),
3135 3135 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3136 3136 ('c', 'clean', None, _('show only files without changes')),
3137 3137 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3138 3138 ('i', 'ignored', None, _('show only ignored files')),
3139 3139 ('n', 'no-status', None, _('hide status prefix')),
3140 3140 ('C', 'copies', None, _('show source of copied files')),
3141 3141 ('0', 'print0', None,
3142 3142 _('end filenames with NUL, for use with xargs')),
3143 3143 ('', 'rev', [], _('show difference from revision')),
3144 3144 ] + walkopts,
3145 3145 _('hg status [OPTION]... [FILE]...')),
3146 3146 "tag":
3147 3147 (tag,
3148 3148 [('f', 'force', None, _('replace existing tag')),
3149 3149 ('l', 'local', None, _('make the tag local')),
3150 3150 ('r', 'rev', '', _('revision to tag')),
3151 3151 ('', 'remove', None, _('remove a tag')),
3152 3152 # -l/--local is already there, commitopts cannot be used
3153 3153 ('m', 'message', '', _('use <text> as commit message')),
3154 3154 ] + commitopts2,
3155 3155 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3156 3156 "tags": (tags, [], _('hg tags')),
3157 3157 "tip":
3158 3158 (tip,
3159 3159 [('p', 'patch', None, _('show patch')),
3160 3160 ] + templateopts,
3161 3161 _('hg tip [-p]')),
3162 3162 "unbundle":
3163 3163 (unbundle,
3164 3164 [('u', 'update', None,
3165 3165 _('update to new tip if changesets were unbundled'))],
3166 3166 _('hg unbundle [-u] FILE...')),
3167 3167 "^update|up|checkout|co":
3168 3168 (update,
3169 3169 [('C', 'clean', None, _('overwrite locally modified files')),
3170 3170 ('d', 'date', '', _('tipmost revision matching date')),
3171 3171 ('r', 'rev', '', _('revision'))],
3172 3172 _('hg update [-C] [-d DATE] [[-r] REV]')),
3173 3173 "verify": (verify, [], _('hg verify')),
3174 3174 "version": (version_, [], _('hg version')),
3175 3175 }
3176 3176
3177 3177 norepo = ("clone init version help debugcomplete debugdata"
3178 3178 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3179 3179 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,620 +1,620 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import nullid, nullrev, short
9 9 from i18n import _
10 10 import ancestor, bdiff, repo, revlog, util, os, errno
11 11
12 12 class changectx(object):
13 13 """A changecontext object makes access to data related to a particular
14 14 changeset convenient."""
15 15 def __init__(self, repo, changeid=None):
16 16 """changeid is a revision number, node, or tag"""
17 17 self._repo = repo
18 18
19 19 if not changeid and changeid != 0:
20 20 p1, p2 = self._repo.dirstate.parents()
21 21 self._rev = self._repo.changelog.rev(p1)
22 22 if self._rev == -1:
23 23 changeid = 'tip'
24 24 else:
25 25 self._node = p1
26 26 return
27 27
28 28 self._node = self._repo.lookup(changeid)
29 29 self._rev = self._repo.changelog.rev(self._node)
30 30
31 31 def __str__(self):
32 32 return short(self.node())
33 33
34 34 def __repr__(self):
35 35 return "<changectx %s>" % str(self)
36 36
37 37 def __eq__(self, other):
38 38 try:
39 39 return self._rev == other._rev
40 40 except AttributeError:
41 41 return False
42 42
43 43 def __ne__(self, other):
44 44 return not (self == other)
45 45
46 46 def __nonzero__(self):
47 47 return self._rev != nullrev
48 48
49 49 def __getattr__(self, name):
50 50 if name == '_changeset':
51 51 self._changeset = self._repo.changelog.read(self.node())
52 52 return self._changeset
53 53 elif name == '_manifest':
54 54 self._manifest = self._repo.manifest.read(self._changeset[0])
55 55 return self._manifest
56 56 elif name == '_manifestdelta':
57 57 md = self._repo.manifest.readdelta(self._changeset[0])
58 58 self._manifestdelta = md
59 59 return self._manifestdelta
60 60 else:
61 61 raise AttributeError, name
62 62
63 63 def __contains__(self, key):
64 64 return key in self._manifest
65 65
66 66 def __getitem__(self, key):
67 67 return self.filectx(key)
68 68
69 69 def __iter__(self):
70 70 a = self._manifest.keys()
71 71 a.sort()
72 72 for f in a:
73 73 yield f
74 74
75 75 def changeset(self): return self._changeset
76 76 def manifest(self): return self._manifest
77 77
78 78 def rev(self): return self._rev
79 79 def node(self): return self._node
80 80 def user(self): return self._changeset[1]
81 81 def date(self): return self._changeset[2]
82 82 def files(self): return self._changeset[3]
83 83 def description(self): return self._changeset[4]
84 84 def branch(self): return self._changeset[5].get("branch")
85 85 def extra(self): return self._changeset[5]
86 86 def tags(self): return self._repo.nodetags(self._node)
87 87
88 88 def parents(self):
89 89 """return contexts for each parent changeset"""
90 90 p = self._repo.changelog.parents(self._node)
91 91 return [changectx(self._repo, x) for x in p]
92 92
93 93 def children(self):
94 94 """return contexts for each child changeset"""
95 95 c = self._repo.changelog.children(self._node)
96 96 return [changectx(self._repo, x) for x in c]
97 97
98 98 def _fileinfo(self, path):
99 99 if '_manifest' in self.__dict__:
100 100 try:
101 101 return self._manifest[path], self._manifest.flags(path)
102 102 except KeyError:
103 103 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
104 104 if '_manifestdelta' in self.__dict__ or path in self.files():
105 105 if path in self._manifestdelta:
106 106 return self._manifestdelta[path], self._manifestdelta.flags(path)
107 107 node, flag = self._repo.manifest.find(self._changeset[0], path)
108 108 if not node:
109 109 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
110 110
111 111 return node, flag
112 112
113 113 def filenode(self, path):
114 114 return self._fileinfo(path)[0]
115 115
116 116 def fileflags(self, path):
117 117 try:
118 118 return self._fileinfo(path)[1]
119 119 except revlog.LookupError:
120 120 return ''
121 121
122 122 def filectx(self, path, fileid=None, filelog=None):
123 123 """get a file context from this changeset"""
124 124 if fileid is None:
125 125 fileid = self.filenode(path)
126 126 return filectx(self._repo, path, fileid=fileid,
127 127 changectx=self, filelog=filelog)
128 128
129 129 def filectxs(self):
130 130 """generate a file context for each file in this changeset's
131 131 manifest"""
132 132 mf = self.manifest()
133 133 m = mf.keys()
134 134 m.sort()
135 135 for f in m:
136 136 yield self.filectx(f, fileid=mf[f])
137 137
138 138 def ancestor(self, c2):
139 139 """
140 140 return the ancestor context of self and c2
141 141 """
142 142 n = self._repo.changelog.ancestor(self._node, c2._node)
143 143 return changectx(self._repo, n)
144 144
145 145 class filectx(object):
146 146 """A filecontext object makes access to data related to a particular
147 147 filerevision convenient."""
148 148 def __init__(self, repo, path, changeid=None, fileid=None,
149 149 filelog=None, changectx=None):
150 150 """changeid can be a changeset revision, node, or tag.
151 151 fileid can be a file revision or node."""
152 152 self._repo = repo
153 153 self._path = path
154 154
155 155 assert (changeid is not None
156 156 or fileid is not None
157 157 or changectx is not None)
158 158
159 159 if filelog:
160 160 self._filelog = filelog
161 161
162 162 if changeid is not None:
163 163 self._changeid = changeid
164 164 if changectx is not None:
165 165 self._changectx = changectx
166 166 if fileid is not None:
167 167 self._fileid = fileid
168 168
169 169 def __getattr__(self, name):
170 170 if name == '_changectx':
171 171 self._changectx = changectx(self._repo, self._changeid)
172 172 return self._changectx
173 173 elif name == '_filelog':
174 174 self._filelog = self._repo.file(self._path)
175 175 return self._filelog
176 176 elif name == '_changeid':
177 177 if '_changectx' in self.__dict__:
178 178 self._changeid = self._changectx.rev()
179 179 else:
180 180 self._changeid = self._filelog.linkrev(self._filenode)
181 181 return self._changeid
182 182 elif name == '_filenode':
183 183 if '_fileid' in self.__dict__:
184 184 self._filenode = self._filelog.lookup(self._fileid)
185 185 else:
186 186 self._filenode = self._changectx.filenode(self._path)
187 187 return self._filenode
188 188 elif name == '_filerev':
189 189 self._filerev = self._filelog.rev(self._filenode)
190 190 return self._filerev
191 191 else:
192 192 raise AttributeError, name
193 193
194 194 def __nonzero__(self):
195 195 try:
196 196 n = self._filenode
197 197 return True
198 198 except revlog.LookupError:
199 199 # file is missing
200 200 return False
201 201
202 202 def __str__(self):
203 203 return "%s@%s" % (self.path(), short(self.node()))
204 204
205 205 def __repr__(self):
206 206 return "<filectx %s>" % str(self)
207 207
208 208 def __eq__(self, other):
209 209 try:
210 210 return (self._path == other._path
211 211 and self._fileid == other._fileid)
212 212 except AttributeError:
213 213 return False
214 214
215 215 def __ne__(self, other):
216 216 return not (self == other)
217 217
218 218 def filectx(self, fileid):
219 219 '''opens an arbitrary revision of the file without
220 220 opening a new filelog'''
221 221 return filectx(self._repo, self._path, fileid=fileid,
222 222 filelog=self._filelog)
223 223
224 224 def filerev(self): return self._filerev
225 225 def filenode(self): return self._filenode
226 226 def fileflags(self): return self._changectx.fileflags(self._path)
227 227 def isexec(self): return 'x' in self.fileflags()
228 228 def islink(self): return 'l' in self.fileflags()
229 229 def filelog(self): return self._filelog
230 230
231 231 def rev(self):
232 232 if '_changectx' in self.__dict__:
233 233 return self._changectx.rev()
234 234 if '_changeid' in self.__dict__:
235 235 return self._changectx.rev()
236 236 return self._filelog.linkrev(self._filenode)
237 237
238 238 def linkrev(self): return self._filelog.linkrev(self._filenode)
239 239 def node(self): return self._changectx.node()
240 240 def user(self): return self._changectx.user()
241 241 def date(self): return self._changectx.date()
242 242 def files(self): return self._changectx.files()
243 243 def description(self): return self._changectx.description()
244 244 def branch(self): return self._changectx.branch()
245 245 def manifest(self): return self._changectx.manifest()
246 246 def changectx(self): return self._changectx
247 247
248 248 def data(self): return self._filelog.read(self._filenode)
249 249 def path(self): return self._path
250 250 def size(self): return self._filelog.size(self._filerev)
251 251
252 252 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
253 253
254 254 def renamed(self):
255 255 """check if file was actually renamed in this changeset revision
256 256
257 257 If rename logged in file revision, we report copy for changeset only
258 258 if file revisions linkrev points back to the changeset in question
259 259 or both changeset parents contain different file revisions.
260 260 """
261 261
262 262 renamed = self._filelog.renamed(self._filenode)
263 263 if not renamed:
264 264 return renamed
265 265
266 266 if self.rev() == self.linkrev():
267 267 return renamed
268 268
269 269 name = self.path()
270 270 fnode = self._filenode
271 271 for p in self._changectx.parents():
272 272 try:
273 273 if fnode == p.filenode(name):
274 274 return None
275 275 except revlog.LookupError:
276 276 pass
277 277 return renamed
278 278
279 279 def parents(self):
280 280 p = self._path
281 281 fl = self._filelog
282 282 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
283 283
284 284 r = self._filelog.renamed(self._filenode)
285 285 if r:
286 286 pl[0] = (r[0], r[1], None)
287 287
288 288 return [filectx(self._repo, p, fileid=n, filelog=l)
289 289 for p,n,l in pl if n != nullid]
290 290
291 291 def children(self):
292 292 # hard for renames
293 293 c = self._filelog.children(self._filenode)
294 294 return [filectx(self._repo, self._path, fileid=x,
295 295 filelog=self._filelog) for x in c]
296 296
297 297 def annotate(self, follow=False, linenumber=None):
298 298 '''returns a list of tuples of (ctx, line) for each line
299 299 in the file, where ctx is the filectx of the node where
300 300 that line was last changed.
301 301 This returns tuples of ((ctx, linenumber), line) for each line,
302 302 if "linenumber" parameter is NOT "None".
303 303 In such tuples, linenumber means one at the first appearance
304 304 in the managed file.
305 305 To reduce annotation cost,
306 306 this returns fixed value(False is used) as linenumber,
307 307 if "linenumber" parameter is "False".'''
308 308
309 309 def decorate_compat(text, rev):
310 310 return ([rev] * len(text.splitlines()), text)
311 311
312 312 def without_linenumber(text, rev):
313 313 return ([(rev, False)] * len(text.splitlines()), text)
314 314
315 315 def with_linenumber(text, rev):
316 316 size = len(text.splitlines())
317 317 return ([(rev, i) for i in xrange(1, size + 1)], text)
318 318
319 319 decorate = (((linenumber is None) and decorate_compat) or
320 320 (linenumber and with_linenumber) or
321 321 without_linenumber)
322 322
323 323 def pair(parent, child):
324 324 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
325 325 child[0][b1:b2] = parent[0][a1:a2]
326 326 return child
327 327
328 328 getlog = util.cachefunc(lambda x: self._repo.file(x))
329 329 def getctx(path, fileid):
330 330 log = path == self._path and self._filelog or getlog(path)
331 331 return filectx(self._repo, path, fileid=fileid, filelog=log)
332 332 getctx = util.cachefunc(getctx)
333 333
334 334 def parents(f):
335 335 # we want to reuse filectx objects as much as possible
336 336 p = f._path
337 337 if f._filerev is None: # working dir
338 338 pl = [(n.path(), n.filerev()) for n in f.parents()]
339 339 else:
340 340 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
341 341
342 342 if follow:
343 343 r = f.renamed()
344 344 if r:
345 345 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
346 346
347 347 return [getctx(p, n) for p, n in pl if n != nullrev]
348 348
349 349 # use linkrev to find the first changeset where self appeared
350 350 if self.rev() != self.linkrev():
351 351 base = self.filectx(self.filerev())
352 352 else:
353 353 base = self
354 354
355 355 # find all ancestors
356 356 needed = {base: 1}
357 357 visit = [base]
358 358 files = [base._path]
359 359 while visit:
360 360 f = visit.pop(0)
361 361 for p in parents(f):
362 362 if p not in needed:
363 363 needed[p] = 1
364 364 visit.append(p)
365 365 if p._path not in files:
366 366 files.append(p._path)
367 367 else:
368 368 # count how many times we'll use this
369 369 needed[p] += 1
370 370
371 371 # sort by revision (per file) which is a topological order
372 372 visit = []
373 373 for f in files:
374 374 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
375 375 visit.extend(fn)
376 376 visit.sort()
377 377 hist = {}
378 378
379 379 for r, f in visit:
380 380 curr = decorate(f.data(), f)
381 381 for p in parents(f):
382 382 if p != nullid:
383 383 curr = pair(hist[p], curr)
384 384 # trim the history of unneeded revs
385 385 needed[p] -= 1
386 386 if not needed[p]:
387 387 del hist[p]
388 388 hist[f] = curr
389 389
390 390 return zip(hist[f][0], hist[f][1].splitlines(1))
391 391
392 392 def ancestor(self, fc2):
393 393 """
394 394 find the common ancestor file context, if any, of self, and fc2
395 395 """
396 396
397 397 acache = {}
398 398
399 399 # prime the ancestor cache for the working directory
400 400 for c in (self, fc2):
401 401 if c._filerev == None:
402 402 pl = [(n.path(), n.filenode()) for n in c.parents()]
403 403 acache[(c._path, None)] = pl
404 404
405 405 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
406 406 def parents(vertex):
407 407 if vertex in acache:
408 408 return acache[vertex]
409 409 f, n = vertex
410 410 if f not in flcache:
411 411 flcache[f] = self._repo.file(f)
412 412 fl = flcache[f]
413 413 pl = [(f, p) for p in fl.parents(n) if p != nullid]
414 414 re = fl.renamed(n)
415 415 if re:
416 416 pl.append(re)
417 417 acache[vertex] = pl
418 418 return pl
419 419
420 420 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
421 421 v = ancestor.ancestor(a, b, parents)
422 422 if v:
423 423 f, n = v
424 424 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
425 425
426 426 return None
427 427
428 428 class workingctx(changectx):
429 429 """A workingctx object makes access to data related to
430 430 the current working directory convenient."""
431 431 def __init__(self, repo):
432 432 self._repo = repo
433 433 self._rev = None
434 434 self._node = None
435 435
436 436 def __str__(self):
437 437 return str(self._parents[0]) + "+"
438 438
439 439 def __nonzero__(self):
440 440 return True
441 441
442 442 def __getattr__(self, name):
443 443 if name == '_parents':
444 444 self._parents = self._repo.parents()
445 445 return self._parents
446 446 if name == '_status':
447 447 self._status = self._repo.status()
448 448 return self._status
449 449 if name == '_manifest':
450 450 self._buildmanifest()
451 451 return self._manifest
452 452 else:
453 453 raise AttributeError, name
454 454
455 455 def _buildmanifest(self):
456 456 """generate a manifest corresponding to the working directory"""
457 457
458 458 man = self._parents[0].manifest().copy()
459 459 copied = self._repo.dirstate.copies()
460 460 is_exec = util.execfunc(self._repo.root,
461 461 lambda p: man.execf(copied.get(p,p)))
462 462 is_link = util.linkfunc(self._repo.root,
463 463 lambda p: man.linkf(copied.get(p,p)))
464 464 modified, added, removed, deleted, unknown = self._status[:5]
465 465 for i, l in (("a", added), ("m", modified), ("u", unknown)):
466 466 for f in l:
467 467 man[f] = man.get(copied.get(f, f), nullid) + i
468 468 try:
469 469 man.set(f, is_exec(f), is_link(f))
470 470 except OSError:
471 471 pass
472 472
473 473 for f in deleted + removed:
474 474 if f in man:
475 475 del man[f]
476 476
477 477 self._manifest = man
478 478
479 479 def manifest(self): return self._manifest
480 480
481 481 def user(self): return self._repo.ui.username()
482 482 def date(self): return util.makedate()
483 483 def description(self): return ""
484 484 def files(self):
485 485 f = self.modified() + self.added() + self.removed()
486 486 f.sort()
487 487 return f
488 488
489 489 def modified(self): return self._status[0]
490 490 def added(self): return self._status[1]
491 491 def removed(self): return self._status[2]
492 492 def deleted(self): return self._status[3]
493 493 def unknown(self): return self._status[4]
494 494 def clean(self): return self._status[5]
495 495 def branch(self): return self._repo.dirstate.branch()
496 496
497 497 def tags(self):
498 498 t = []
499 499 [t.extend(p.tags()) for p in self.parents()]
500 500 return t
501 501
502 502 def parents(self):
503 503 """return contexts for each parent changeset"""
504 504 return self._parents
505 505
506 506 def children(self):
507 507 return []
508 508
509 509 def fileflags(self, path):
510 510 if '_manifest' in self.__dict__:
511 511 try:
512 512 return self._manifest.flags(path)
513 513 except KeyError:
514 514 return ''
515 515
516 516 pnode = self._parents[0].changeset()[0]
517 517 orig = self._repo.dirstate.copies().get(path, path)
518 518 node, flag = self._repo.manifest.find(pnode, orig)
519 519 is_link = util.linkfunc(self._repo.root, lambda p: 'l' in flag)
520 520 is_exec = util.execfunc(self._repo.root, lambda p: 'x' in flag)
521 521 try:
522 522 return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
523 523 except OSError:
524 524 pass
525 525
526 526 if not node or path in self.deleted() or path in self.removed():
527 527 return ''
528 528 return flag
529 529
530 530 def filectx(self, path, filelog=None):
531 531 """get a file context from the working directory"""
532 532 return workingfilectx(self._repo, path, workingctx=self,
533 533 filelog=filelog)
534 534
535 535 def ancestor(self, c2):
536 536 """return the ancestor context of self and c2"""
537 537 return self._parents[0].ancestor(c2) # punt on two parents for now
538 538
539 539 class workingfilectx(filectx):
540 540 """A workingfilectx object makes access to data related to a particular
541 541 file in the working directory convenient."""
542 542 def __init__(self, repo, path, filelog=None, workingctx=None):
543 543 """changeid can be a changeset revision, node, or tag.
544 544 fileid can be a file revision or node."""
545 545 self._repo = repo
546 546 self._path = path
547 547 self._changeid = None
548 548 self._filerev = self._filenode = None
549 549
550 550 if filelog:
551 551 self._filelog = filelog
552 552 if workingctx:
553 553 self._changectx = workingctx
554 554
555 555 def __getattr__(self, name):
556 556 if name == '_changectx':
557 557 self._changectx = workingctx(self._repo)
558 558 return self._changectx
559 559 elif name == '_repopath':
560 560 self._repopath = (self._repo.dirstate.copied(self._path)
561 561 or self._path)
562 562 return self._repopath
563 563 elif name == '_filelog':
564 564 self._filelog = self._repo.file(self._repopath)
565 565 return self._filelog
566 566 else:
567 567 raise AttributeError, name
568 568
569 569 def __nonzero__(self):
570 570 return True
571 571
572 572 def __str__(self):
573 573 return "%s@%s" % (self.path(), self._changectx)
574 574
575 575 def filectx(self, fileid):
576 576 '''opens an arbitrary revision of the file without
577 577 opening a new filelog'''
578 578 return filectx(self._repo, self._repopath, fileid=fileid,
579 579 filelog=self._filelog)
580 580
581 581 def rev(self):
582 582 if '_changectx' in self.__dict__:
583 583 return self._changectx.rev()
584 584 return self._filelog.linkrev(self._filenode)
585 585
586 586 def data(self): return self._repo.wread(self._path)
587 587 def renamed(self):
588 588 rp = self._repopath
589 589 if rp == self._path:
590 590 return None
591 591 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
592 592
593 593 def parents(self):
594 594 '''return parent filectxs, following copies if necessary'''
595 595 p = self._path
596 596 rp = self._repopath
597 597 pcl = self._changectx._parents
598 598 fl = self._filelog
599 599 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
600 600 if len(pcl) > 1:
601 601 if rp != p:
602 602 fl = None
603 603 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
604 604
605 605 return [filectx(self._repo, p, fileid=n, filelog=l)
606 606 for p,n,l in pl if n != nullid]
607 607
608 608 def children(self):
609 609 return []
610 610
611 611 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
612 612 def date(self):
613 613 t, tz = self._changectx.date()
614 614 try:
615 615 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
616 616 except OSError, err:
617 617 if err.errno != errno.ENOENT: raise
618 618 return (t, tz)
619 619
620 620 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,598 +1,598 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 from node import *
10 from node import nullid
11 11 from i18n import _
12 12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 13 import cStringIO, osutil
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._dirtypl = False
25 25 self._ui = ui
26 26
27 27 def __getattr__(self, name):
28 28 if name == '_map':
29 29 self._read()
30 30 return self._map
31 31 elif name == '_copymap':
32 32 self._read()
33 33 return self._copymap
34 34 elif name == '_branch':
35 35 try:
36 36 self._branch = (self._opener("branch").read().strip()
37 37 or "default")
38 38 except IOError:
39 39 self._branch = "default"
40 40 return self._branch
41 41 elif name == '_pl':
42 42 self._pl = [nullid, nullid]
43 43 try:
44 44 st = self._opener("dirstate").read(40)
45 45 if len(st) == 40:
46 46 self._pl = st[:20], st[20:40]
47 47 except IOError, err:
48 48 if err.errno != errno.ENOENT: raise
49 49 return self._pl
50 50 elif name == '_dirs':
51 51 self._dirs = {}
52 52 for f in self._map:
53 53 if self[f] != 'r':
54 54 self._incpath(f)
55 55 return self._dirs
56 56 elif name == '_ignore':
57 57 files = [self._join('.hgignore')]
58 58 for name, path in self._ui.configitems("ui"):
59 59 if name == 'ignore' or name.startswith('ignore.'):
60 60 files.append(os.path.expanduser(path))
61 61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
62 62 return self._ignore
63 63 elif name == '_slash':
64 64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
65 65 return self._slash
66 66 else:
67 67 raise AttributeError, name
68 68
69 69 def _join(self, f):
70 70 return os.path.join(self._root, f)
71 71
72 72 def getcwd(self):
73 73 cwd = os.getcwd()
74 74 if cwd == self._root: return ''
75 75 # self._root ends with a path separator if self._root is '/' or 'C:\'
76 76 rootsep = self._root
77 77 if not util.endswithsep(rootsep):
78 78 rootsep += os.sep
79 79 if cwd.startswith(rootsep):
80 80 return cwd[len(rootsep):]
81 81 else:
82 82 # we're outside the repo. return an absolute path.
83 83 return cwd
84 84
85 85 def pathto(self, f, cwd=None):
86 86 if cwd is None:
87 87 cwd = self.getcwd()
88 88 path = util.pathto(self._root, cwd, f)
89 89 if self._slash:
90 90 return util.normpath(path)
91 91 return path
92 92
93 93 def __getitem__(self, key):
94 94 ''' current states:
95 95 n normal
96 96 m needs merging
97 97 r marked for removal
98 98 a marked for addition
99 99 ? not tracked'''
100 100 return self._map.get(key, ("?",))[0]
101 101
102 102 def __contains__(self, key):
103 103 return key in self._map
104 104
105 105 def __iter__(self):
106 106 a = self._map.keys()
107 107 a.sort()
108 108 for x in a:
109 109 yield x
110 110
111 111 def parents(self):
112 112 return self._pl
113 113
114 114 def branch(self):
115 115 return self._branch
116 116
117 117 def setparents(self, p1, p2=nullid):
118 118 self._dirty = self._dirtypl = True
119 119 self._pl = p1, p2
120 120
121 121 def setbranch(self, branch):
122 122 self._branch = branch
123 123 self._opener("branch", "w").write(branch + '\n')
124 124
125 125 def _read(self):
126 126 self._map = {}
127 127 self._copymap = {}
128 128 if not self._dirtypl:
129 129 self._pl = [nullid, nullid]
130 130 try:
131 131 st = self._opener("dirstate").read()
132 132 except IOError, err:
133 133 if err.errno != errno.ENOENT: raise
134 134 return
135 135 if not st:
136 136 return
137 137
138 138 if not self._dirtypl:
139 139 self._pl = [st[:20], st[20: 40]]
140 140
141 141 # deref fields so they will be local in loop
142 142 dmap = self._map
143 143 copymap = self._copymap
144 144 unpack = struct.unpack
145 145 e_size = struct.calcsize(_format)
146 146 pos1 = 40
147 147 l = len(st)
148 148
149 149 # the inner loop
150 150 while pos1 < l:
151 151 pos2 = pos1 + e_size
152 152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
153 153 pos1 = pos2 + e[4]
154 154 f = st[pos2:pos1]
155 155 if '\0' in f:
156 156 f, c = f.split('\0')
157 157 copymap[f] = c
158 158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
159 159
160 160 def invalidate(self):
161 161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 162 if a in self.__dict__:
163 163 delattr(self, a)
164 164 self._dirty = False
165 165
166 166 def copy(self, source, dest):
167 167 self._dirty = True
168 168 self._copymap[dest] = source
169 169
170 170 def copied(self, file):
171 171 return self._copymap.get(file, None)
172 172
173 173 def copies(self):
174 174 return self._copymap
175 175
176 176 def _incpath(self, path):
177 177 c = path.rfind('/')
178 178 if c >= 0:
179 179 dirs = self._dirs
180 180 base = path[:c]
181 181 if base not in dirs:
182 182 self._incpath(base)
183 183 dirs[base] = 1
184 184 else:
185 185 dirs[base] += 1
186 186
187 187 def _decpath(self, path):
188 188 c = path.rfind('/')
189 189 if c >= 0:
190 190 base = path[:c]
191 191 dirs = self._dirs
192 192 if dirs[base] == 1:
193 193 del dirs[base]
194 194 self._decpath(base)
195 195 else:
196 196 dirs[base] -= 1
197 197
198 198 def _incpathcheck(self, f):
199 199 if '\r' in f or '\n' in f:
200 200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
201 201 % f)
202 202 # shadows
203 203 if f in self._dirs:
204 204 raise util.Abort(_('directory %r already in dirstate') % f)
205 205 for c in strutil.rfindall(f, '/'):
206 206 d = f[:c]
207 207 if d in self._dirs:
208 208 break
209 209 if d in self._map and self[d] != 'r':
210 210 raise util.Abort(_('file %r in dirstate clashes with %r') %
211 211 (d, f))
212 212 self._incpath(f)
213 213
214 214 def _changepath(self, f, newstate, relaxed=False):
215 215 # handle upcoming path changes
216 216 oldstate = self[f]
217 217 if oldstate not in "?r" and newstate in "?r":
218 218 if "_dirs" in self.__dict__:
219 219 self._decpath(f)
220 220 return
221 221 if oldstate in "?r" and newstate not in "?r":
222 222 if relaxed and oldstate == '?':
223 223 # XXX
224 224 # in relaxed mode we assume the caller knows
225 225 # what it is doing, workaround for updating
226 226 # dir-to-file revisions
227 227 if "_dirs" in self.__dict__:
228 228 self._incpath(f)
229 229 return
230 230 self._incpathcheck(f)
231 231 return
232 232
233 233 def normal(self, f):
234 234 'mark a file normal and clean'
235 235 self._dirty = True
236 236 self._changepath(f, 'n', True)
237 237 s = os.lstat(self._join(f))
238 238 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
239 239 if f in self._copymap:
240 240 del self._copymap[f]
241 241
242 242 def normallookup(self, f):
243 243 'mark a file normal, but possibly dirty'
244 244 self._dirty = True
245 245 self._changepath(f, 'n', True)
246 246 self._map[f] = ('n', 0, -1, -1, 0)
247 247 if f in self._copymap:
248 248 del self._copymap[f]
249 249
250 250 def normaldirty(self, f):
251 251 'mark a file normal, but dirty'
252 252 self._dirty = True
253 253 self._changepath(f, 'n', True)
254 254 self._map[f] = ('n', 0, -2, -1, 0)
255 255 if f in self._copymap:
256 256 del self._copymap[f]
257 257
258 258 def add(self, f):
259 259 'mark a file added'
260 260 self._dirty = True
261 261 self._changepath(f, 'a')
262 262 self._map[f] = ('a', 0, -1, -1, 0)
263 263 if f in self._copymap:
264 264 del self._copymap[f]
265 265
266 266 def remove(self, f):
267 267 'mark a file removed'
268 268 self._dirty = True
269 269 self._changepath(f, 'r')
270 270 self._map[f] = ('r', 0, 0, 0, 0)
271 271 if f in self._copymap:
272 272 del self._copymap[f]
273 273
274 274 def merge(self, f):
275 275 'mark a file merged'
276 276 self._dirty = True
277 277 s = os.lstat(self._join(f))
278 278 self._changepath(f, 'm', True)
279 279 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
280 280 if f in self._copymap:
281 281 del self._copymap[f]
282 282
283 283 def forget(self, f):
284 284 'forget a file'
285 285 self._dirty = True
286 286 try:
287 287 self._changepath(f, '?')
288 288 del self._map[f]
289 289 except KeyError:
290 290 self._ui.warn(_("not in dirstate: %s\n") % f)
291 291
292 292 def clear(self):
293 293 self._map = {}
294 294 if "_dirs" in self.__dict__:
295 295 delattr(self, "_dirs");
296 296 self._copymap = {}
297 297 self._pl = [nullid, nullid]
298 298 self._dirty = True
299 299
300 300 def rebuild(self, parent, files):
301 301 self.clear()
302 302 for f in files:
303 303 if files.execf(f):
304 304 self._map[f] = ('n', 0777, -1, 0, 0)
305 305 else:
306 306 self._map[f] = ('n', 0666, -1, 0, 0)
307 307 self._pl = (parent, nullid)
308 308 self._dirty = True
309 309
310 310 def write(self):
311 311 if not self._dirty:
312 312 return
313 313 cs = cStringIO.StringIO()
314 314 copymap = self._copymap
315 315 pack = struct.pack
316 316 write = cs.write
317 317 write("".join(self._pl))
318 318 for f, e in self._map.iteritems():
319 319 if f in copymap:
320 320 f = "%s\0%s" % (f, copymap[f])
321 321 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
322 322 write(e)
323 323 write(f)
324 324 st = self._opener("dirstate", "w", atomictemp=True)
325 325 st.write(cs.getvalue())
326 326 st.rename()
327 327 self._dirty = self._dirtypl = False
328 328
329 329 def _filter(self, files):
330 330 ret = {}
331 331 unknown = []
332 332
333 333 for x in files:
334 334 if x == '.':
335 335 return self._map.copy()
336 336 if x not in self._map:
337 337 unknown.append(x)
338 338 else:
339 339 ret[x] = self._map[x]
340 340
341 341 if not unknown:
342 342 return ret
343 343
344 344 b = self._map.keys()
345 345 b.sort()
346 346 blen = len(b)
347 347
348 348 for x in unknown:
349 349 bs = bisect.bisect(b, "%s%s" % (x, '/'))
350 350 while bs < blen:
351 351 s = b[bs]
352 352 if len(s) > len(x) and s.startswith(x):
353 353 ret[s] = self._map[s]
354 354 else:
355 355 break
356 356 bs += 1
357 357 return ret
358 358
359 359 def _supported(self, f, mode, verbose=False):
360 360 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
361 361 return True
362 362 if verbose:
363 363 kind = 'unknown'
364 364 if stat.S_ISCHR(mode): kind = _('character device')
365 365 elif stat.S_ISBLK(mode): kind = _('block device')
366 366 elif stat.S_ISFIFO(mode): kind = _('fifo')
367 367 elif stat.S_ISSOCK(mode): kind = _('socket')
368 368 elif stat.S_ISDIR(mode): kind = _('directory')
369 369 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
370 370 % (self.pathto(f), kind))
371 371 return False
372 372
373 373 def _dirignore(self, f):
374 374 if self._ignore(f):
375 375 return True
376 376 for c in strutil.findall(f, '/'):
377 377 if self._ignore(f[:c]):
378 378 return True
379 379 return False
380 380
381 381 def walk(self, files=None, match=util.always, badmatch=None):
382 382 # filter out the stat
383 383 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
384 384 yield src, f
385 385
386 386 def statwalk(self, files=None, match=util.always, unknown=True,
387 387 ignored=False, badmatch=None, directories=False):
388 388 '''
389 389 walk recursively through the directory tree, finding all files
390 390 matched by the match function
391 391
392 392 results are yielded in a tuple (src, filename, st), where src
393 393 is one of:
394 394 'f' the file was found in the directory tree
395 395 'd' the file is a directory of the tree
396 396 'm' the file was only in the dirstate and not in the tree
397 397 'b' file was not found and matched badmatch
398 398
399 399 and st is the stat result if the file was found in the directory.
400 400 '''
401 401
402 402 # walk all files by default
403 403 if not files:
404 404 files = ['.']
405 405 dc = self._map.copy()
406 406 else:
407 407 files = util.unique(files)
408 408 dc = self._filter(files)
409 409
410 410 def imatch(file_):
411 411 if file_ not in dc and self._ignore(file_):
412 412 return False
413 413 return match(file_)
414 414
415 415 # TODO: don't walk unknown directories if unknown and ignored are False
416 416 ignore = self._ignore
417 417 dirignore = self._dirignore
418 418 if ignored:
419 419 imatch = match
420 420 ignore = util.never
421 421 dirignore = util.never
422 422
423 423 # self._root may end with a path separator when self._root == '/'
424 424 common_prefix_len = len(self._root)
425 425 if not util.endswithsep(self._root):
426 426 common_prefix_len += 1
427 427
428 428 normpath = util.normpath
429 429 listdir = osutil.listdir
430 430 lstat = os.lstat
431 431 bisect_left = bisect.bisect_left
432 432 isdir = os.path.isdir
433 433 pconvert = util.pconvert
434 434 join = os.path.join
435 435 s_isdir = stat.S_ISDIR
436 436 supported = self._supported
437 437 _join = self._join
438 438 known = {'.hg': 1}
439 439
440 440 # recursion free walker, faster than os.walk.
441 441 def findfiles(s):
442 442 work = [s]
443 443 wadd = work.append
444 444 found = []
445 445 add = found.append
446 446 if directories:
447 447 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
448 448 while work:
449 449 top = work.pop()
450 450 entries = listdir(top, stat=True)
451 451 # nd is the top of the repository dir tree
452 452 nd = normpath(top[common_prefix_len:])
453 453 if nd == '.':
454 454 nd = ''
455 455 else:
456 456 # do not recurse into a repo contained in this
457 457 # one. use bisect to find .hg directory so speed
458 458 # is good on big directory.
459 459 names = [e[0] for e in entries]
460 460 hg = bisect_left(names, '.hg')
461 461 if hg < len(names) and names[hg] == '.hg':
462 462 if isdir(join(top, '.hg')):
463 463 continue
464 464 for f, kind, st in entries:
465 465 np = pconvert(join(nd, f))
466 466 if np in known:
467 467 continue
468 468 known[np] = 1
469 469 p = join(top, f)
470 470 # don't trip over symlinks
471 471 if kind == stat.S_IFDIR:
472 472 if not ignore(np):
473 473 wadd(p)
474 474 if directories:
475 475 add((np, 'd', st))
476 476 if np in dc and match(np):
477 477 add((np, 'm', st))
478 478 elif imatch(np):
479 479 if supported(np, st.st_mode):
480 480 add((np, 'f', st))
481 481 elif np in dc:
482 482 add((np, 'm', st))
483 483 found.sort()
484 484 return found
485 485
486 486 # step one, find all files that match our criteria
487 487 files.sort()
488 488 for ff in files:
489 489 nf = normpath(ff)
490 490 f = _join(ff)
491 491 try:
492 492 st = lstat(f)
493 493 except OSError, inst:
494 494 found = False
495 495 for fn in dc:
496 496 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
497 497 found = True
498 498 break
499 499 if not found:
500 500 if inst.errno != errno.ENOENT or not badmatch:
501 501 self._ui.warn('%s: %s\n' %
502 502 (self.pathto(ff), inst.strerror))
503 503 elif badmatch and badmatch(ff) and imatch(nf):
504 504 yield 'b', ff, None
505 505 continue
506 506 if s_isdir(st.st_mode):
507 507 if not dirignore(nf):
508 508 for f, src, st in findfiles(f):
509 509 yield src, f, st
510 510 else:
511 511 if nf in known:
512 512 continue
513 513 known[nf] = 1
514 514 if match(nf):
515 515 if supported(ff, st.st_mode, verbose=True):
516 516 yield 'f', nf, st
517 517 elif ff in dc:
518 518 yield 'm', nf, st
519 519
520 520 # step two run through anything left in the dc hash and yield
521 521 # if we haven't already seen it
522 522 ks = dc.keys()
523 523 ks.sort()
524 524 for k in ks:
525 525 if k in known:
526 526 continue
527 527 known[k] = 1
528 528 if imatch(k):
529 529 yield 'm', k, None
530 530
531 531 def status(self, files, match, list_ignored, list_clean, list_unknown=True):
532 532 lookup, modified, added, unknown, ignored = [], [], [], [], []
533 533 removed, deleted, clean = [], [], []
534 534
535 535 files = files or []
536 536 _join = self._join
537 537 lstat = os.lstat
538 538 cmap = self._copymap
539 539 dmap = self._map
540 540 ladd = lookup.append
541 541 madd = modified.append
542 542 aadd = added.append
543 543 uadd = unknown.append
544 544 iadd = ignored.append
545 545 radd = removed.append
546 546 dadd = deleted.append
547 547 cadd = clean.append
548 548
549 549 for src, fn, st in self.statwalk(files, match, unknown=list_unknown,
550 550 ignored=list_ignored):
551 551 if fn in dmap:
552 552 type_, mode, size, time, foo = dmap[fn]
553 553 else:
554 554 if (list_ignored or fn in files) and self._dirignore(fn):
555 555 if list_ignored:
556 556 iadd(fn)
557 557 elif list_unknown:
558 558 uadd(fn)
559 559 continue
560 560 if src == 'm':
561 561 nonexistent = True
562 562 if not st:
563 563 try:
564 564 st = lstat(_join(fn))
565 565 except OSError, inst:
566 566 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
567 567 raise
568 568 st = None
569 569 # We need to re-check that it is a valid file
570 570 if st and self._supported(fn, st.st_mode):
571 571 nonexistent = False
572 572 # XXX: what to do with file no longer present in the fs
573 573 # who are not removed in the dirstate ?
574 574 if nonexistent and type_ in "nma":
575 575 dadd(fn)
576 576 continue
577 577 # check the common case first
578 578 if type_ == 'n':
579 579 if not st:
580 580 st = lstat(_join(fn))
581 581 if (size >= 0 and (size != st.st_size
582 582 or (mode ^ st.st_mode) & 0100)
583 583 or size == -2
584 584 or fn in self._copymap):
585 585 madd(fn)
586 586 elif time != int(st.st_mtime):
587 587 ladd(fn)
588 588 elif list_clean:
589 589 cadd(fn)
590 590 elif type_ == 'm':
591 591 madd(fn)
592 592 elif type_ == 'a':
593 593 aadd(fn)
594 594 elif type_ == 'r':
595 595 radd(fn)
596 596
597 597 return (lookup, modified, added, removed, deleted, unknown, ignored,
598 598 clean)
@@ -1,417 +1,416 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
9 8 from i18n import _
10 9 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
11 10 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
12 11 import cmdutil
13 12 import ui as _ui
14 13
15 14 class ParseError(Exception):
16 15 """Exception raised on errors in parsing the command line."""
17 16
18 17 def run():
19 18 "run the command in sys.argv"
20 19 sys.exit(dispatch(sys.argv[1:]))
21 20
22 21 def dispatch(args):
23 22 "run the command specified in args"
24 23 try:
25 24 u = _ui.ui(traceback='--traceback' in args)
26 25 except util.Abort, inst:
27 26 sys.stderr.write(_("abort: %s\n") % inst)
28 27 return -1
29 28 return _runcatch(u, args)
30 29
31 30 def _runcatch(ui, args):
32 31 def catchterm(*args):
33 32 raise util.SignalInterrupt
34 33
35 34 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 35 num = getattr(signal, name, None)
37 36 if num: signal.signal(num, catchterm)
38 37
39 38 try:
40 39 try:
41 40 # enter the debugger before command execution
42 41 if '--debugger' in args:
43 42 pdb.set_trace()
44 43 try:
45 44 return _dispatch(ui, args)
46 45 finally:
47 46 ui.flush()
48 47 except:
49 48 # enter the debugger when we hit an exception
50 49 if '--debugger' in args:
51 50 pdb.post_mortem(sys.exc_info()[2])
52 51 ui.print_exc()
53 52 raise
54 53
55 54 except ParseError, inst:
56 55 if inst.args[0]:
57 56 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
58 57 commands.help_(ui, inst.args[0])
59 58 else:
60 59 ui.warn(_("hg: %s\n") % inst.args[1])
61 60 commands.help_(ui, 'shortlist')
62 61 except cmdutil.AmbiguousCommand, inst:
63 62 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
64 63 (inst.args[0], " ".join(inst.args[1])))
65 64 except cmdutil.UnknownCommand, inst:
66 65 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
67 66 commands.help_(ui, 'shortlist')
68 67 except hg.RepoError, inst:
69 68 ui.warn(_("abort: %s!\n") % inst)
70 69 except lock.LockHeld, inst:
71 70 if inst.errno == errno.ETIMEDOUT:
72 71 reason = _('timed out waiting for lock held by %s') % inst.locker
73 72 else:
74 73 reason = _('lock held by %s') % inst.locker
75 74 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
76 75 except lock.LockUnavailable, inst:
77 76 ui.warn(_("abort: could not lock %s: %s\n") %
78 77 (inst.desc or inst.filename, inst.strerror))
79 78 except revlog.RevlogError, inst:
80 79 ui.warn(_("abort: %s!\n") % inst)
81 80 except util.SignalInterrupt:
82 81 ui.warn(_("killed!\n"))
83 82 except KeyboardInterrupt:
84 83 try:
85 84 ui.warn(_("interrupted!\n"))
86 85 except IOError, inst:
87 86 if inst.errno == errno.EPIPE:
88 87 if ui.debugflag:
89 88 ui.warn(_("\nbroken pipe\n"))
90 89 else:
91 90 raise
92 91 except socket.error, inst:
93 92 ui.warn(_("abort: %s\n") % inst[1])
94 93 except IOError, inst:
95 94 if hasattr(inst, "code"):
96 95 ui.warn(_("abort: %s\n") % inst)
97 96 elif hasattr(inst, "reason"):
98 97 try: # usually it is in the form (errno, strerror)
99 98 reason = inst.reason.args[1]
100 99 except: # it might be anything, for example a string
101 100 reason = inst.reason
102 101 ui.warn(_("abort: error: %s\n") % reason)
103 102 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
104 103 if ui.debugflag:
105 104 ui.warn(_("broken pipe\n"))
106 105 elif getattr(inst, "strerror", None):
107 106 if getattr(inst, "filename", None):
108 107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
109 108 else:
110 109 ui.warn(_("abort: %s\n") % inst.strerror)
111 110 else:
112 111 raise
113 112 except OSError, inst:
114 113 if getattr(inst, "filename", None):
115 114 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
116 115 else:
117 116 ui.warn(_("abort: %s\n") % inst.strerror)
118 117 except util.UnexpectedOutput, inst:
119 118 ui.warn(_("abort: %s") % inst[0])
120 119 if not isinstance(inst[1], basestring):
121 120 ui.warn(" %r\n" % (inst[1],))
122 121 elif not inst[1]:
123 122 ui.warn(_(" empty string\n"))
124 123 else:
125 124 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
126 125 except ImportError, inst:
127 126 m = str(inst).split()[-1]
128 127 ui.warn(_("abort: could not import module %s!\n") % m)
129 128 if m in "mpatch bdiff".split():
130 129 ui.warn(_("(did you forget to compile extensions?)\n"))
131 130 elif m in "zlib".split():
132 131 ui.warn(_("(is your Python install correct?)\n"))
133 132
134 133 except util.Abort, inst:
135 134 ui.warn(_("abort: %s\n") % inst)
136 135 except MemoryError:
137 136 ui.warn(_("abort: out of memory\n"))
138 137 except SystemExit, inst:
139 138 # Commands shouldn't sys.exit directly, but give a return code.
140 139 # Just in case catch this and and pass exit code to caller.
141 140 return inst.code
142 141 except:
143 142 ui.warn(_("** unknown exception encountered, details follow\n"))
144 143 ui.warn(_("** report bug details to "
145 144 "http://www.selenic.com/mercurial/bts\n"))
146 145 ui.warn(_("** or mercurial@selenic.com\n"))
147 146 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
148 147 % version.get_version())
149 148 raise
150 149
151 150 return -1
152 151
153 152 def _findrepo(p):
154 153 while not os.path.isdir(os.path.join(p, ".hg")):
155 154 oldp, p = p, os.path.dirname(p)
156 155 if p == oldp:
157 156 return None
158 157
159 158 return p
160 159
161 160 def _parse(ui, args):
162 161 options = {}
163 162 cmdoptions = {}
164 163
165 164 try:
166 165 args = fancyopts.fancyopts(args, commands.globalopts, options)
167 166 except fancyopts.getopt.GetoptError, inst:
168 167 raise ParseError(None, inst)
169 168
170 169 if args:
171 170 cmd, args = args[0], args[1:]
172 171 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
173 172 cmd = aliases[0]
174 173 defaults = ui.config("defaults", cmd)
175 174 if defaults:
176 175 args = shlex.split(defaults) + args
177 176 c = list(i[1])
178 177 else:
179 178 cmd = None
180 179 c = []
181 180
182 181 # combine global options into local
183 182 for o in commands.globalopts:
184 183 c.append((o[0], o[1], options[o[1]], o[3]))
185 184
186 185 try:
187 186 args = fancyopts.fancyopts(args, c, cmdoptions)
188 187 except fancyopts.getopt.GetoptError, inst:
189 188 raise ParseError(cmd, inst)
190 189
191 190 # separate global options back out
192 191 for o in commands.globalopts:
193 192 n = o[1]
194 193 options[n] = cmdoptions[n]
195 194 del cmdoptions[n]
196 195
197 196 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
198 197
199 198 def _parseconfig(config):
200 199 """parse the --config options from the command line"""
201 200 parsed = []
202 201 for cfg in config:
203 202 try:
204 203 name, value = cfg.split('=', 1)
205 204 section, name = name.split('.', 1)
206 205 if not section or not name:
207 206 raise IndexError
208 207 parsed.append((section, name, value))
209 208 except (IndexError, ValueError):
210 209 raise util.Abort(_('malformed --config option: %s') % cfg)
211 210 return parsed
212 211
213 212 def _earlygetopt(aliases, args):
214 213 """Return list of values for an option (or aliases).
215 214
216 215 The values are listed in the order they appear in args.
217 216 The options and values are removed from args.
218 217 """
219 218 try:
220 219 argcount = args.index("--")
221 220 except ValueError:
222 221 argcount = len(args)
223 222 shortopts = [opt for opt in aliases if len(opt) == 2]
224 223 values = []
225 224 pos = 0
226 225 while pos < argcount:
227 226 if args[pos] in aliases:
228 227 if pos + 1 >= argcount:
229 228 # ignore and let getopt report an error if there is no value
230 229 break
231 230 del args[pos]
232 231 values.append(args.pop(pos))
233 232 argcount -= 2
234 233 elif args[pos][:2] in shortopts:
235 234 # short option can have no following space, e.g. hg log -Rfoo
236 235 values.append(args.pop(pos)[2:])
237 236 argcount -= 1
238 237 else:
239 238 pos += 1
240 239 return values
241 240
242 241 _loaded = {}
243 242 def _dispatch(ui, args):
244 243 # read --config before doing anything else
245 244 # (e.g. to change trust settings for reading .hg/hgrc)
246 245 config = _earlygetopt(['--config'], args)
247 246 if config:
248 247 ui.updateopts(config=_parseconfig(config))
249 248
250 249 # check for cwd
251 250 cwd = _earlygetopt(['--cwd'], args)
252 251 if cwd:
253 252 os.chdir(cwd[-1])
254 253
255 254 # read the local repository .hgrc into a local ui object
256 255 path = _findrepo(os.getcwd()) or ""
257 256 if not path:
258 257 lui = ui
259 258 if path:
260 259 try:
261 260 lui = _ui.ui(parentui=ui)
262 261 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
263 262 except IOError:
264 263 pass
265 264
266 265 # now we can expand paths, even ones in .hg/hgrc
267 266 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
268 267 if rpath:
269 268 path = lui.expandpath(rpath[-1])
270 269 lui = _ui.ui(parentui=ui)
271 270 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
272 271
273 272 extensions.loadall(lui)
274 273 for name, module in extensions.extensions():
275 274 if name in _loaded:
276 275 continue
277 276
278 277 # setup extensions
279 278 # TODO this should be generalized to scheme, where extensions can
280 279 # redepend on other extensions. then we should toposort them, and
281 280 # do initialization in correct order
282 281 extsetup = getattr(module, 'extsetup', None)
283 282 if extsetup:
284 283 extsetup()
285 284
286 285 cmdtable = getattr(module, 'cmdtable', {})
287 286 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
288 287 if overrides:
289 288 ui.warn(_("extension '%s' overrides commands: %s\n")
290 289 % (name, " ".join(overrides)))
291 290 commands.table.update(cmdtable)
292 291 _loaded[name] = 1
293 292 # check for fallback encoding
294 293 fallback = lui.config('ui', 'fallbackencoding')
295 294 if fallback:
296 295 util._fallbackencoding = fallback
297 296
298 297 fullargs = args
299 298 cmd, func, args, options, cmdoptions = _parse(lui, args)
300 299
301 300 if options["config"]:
302 301 raise util.Abort(_("Option --config may not be abbreviated!"))
303 302 if options["cwd"]:
304 303 raise util.Abort(_("Option --cwd may not be abbreviated!"))
305 304 if options["repository"]:
306 305 raise util.Abort(_(
307 306 "Option -R has to be separated from other options (i.e. not -qR) "
308 307 "and --repository may only be abbreviated as --repo!"))
309 308
310 309 if options["encoding"]:
311 310 util._encoding = options["encoding"]
312 311 if options["encodingmode"]:
313 312 util._encodingmode = options["encodingmode"]
314 313 if options["time"]:
315 314 def get_times():
316 315 t = os.times()
317 316 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
318 317 t = (t[0], t[1], t[2], t[3], time.clock())
319 318 return t
320 319 s = get_times()
321 320 def print_time():
322 321 t = get_times()
323 322 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
324 323 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
325 324 atexit.register(print_time)
326 325
327 326 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
328 327 not options["noninteractive"], options["traceback"])
329 328
330 329 if options['help']:
331 330 return commands.help_(ui, cmd, options['version'])
332 331 elif options['version']:
333 332 return commands.version_(ui)
334 333 elif not cmd:
335 334 return commands.help_(ui, 'shortlist')
336 335
337 336 repo = None
338 337 if cmd not in commands.norepo.split():
339 338 try:
340 339 repo = hg.repository(ui, path=path)
341 340 ui = repo.ui
342 341 if not repo.local():
343 342 raise util.Abort(_("repository '%s' is not local") % path)
344 343 ui.setconfig("bundle", "mainreporoot", repo.root)
345 344 except hg.RepoError:
346 345 if cmd not in commands.optionalrepo.split():
347 346 if args and not path: # try to infer -R from command args
348 347 repos = map(_findrepo, args)
349 348 guess = repos[0]
350 349 if guess and repos.count(guess) == len(repos):
351 350 return _dispatch(ui, ['--repository', guess] + fullargs)
352 351 if not path:
353 352 raise hg.RepoError(_("There is no Mercurial repository here"
354 353 " (.hg not found)"))
355 354 raise
356 355 d = lambda: func(ui, repo, *args, **cmdoptions)
357 356 else:
358 357 d = lambda: func(ui, *args, **cmdoptions)
359 358
360 359 # run pre-hook, and abort if it fails
361 360 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
362 361 if ret:
363 362 return ret
364 363 ret = _runcommand(ui, options, cmd, d)
365 364 # run post-hook, passing command result
366 365 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
367 366 result = ret)
368 367 return ret
369 368
370 369 def _runcommand(ui, options, cmd, cmdfunc):
371 370 def checkargs():
372 371 try:
373 372 return cmdfunc()
374 373 except TypeError, inst:
375 374 # was this an argument error?
376 375 tb = traceback.extract_tb(sys.exc_info()[2])
377 376 if len(tb) != 2: # no
378 377 raise
379 378 raise ParseError(cmd, _("invalid arguments"))
380 379
381 380 if options['profile']:
382 381 import hotshot, hotshot.stats
383 382 prof = hotshot.Profile("hg.prof")
384 383 try:
385 384 try:
386 385 return prof.runcall(checkargs)
387 386 except:
388 387 try:
389 388 ui.warn(_('exception raised - generating '
390 389 'profile anyway\n'))
391 390 except:
392 391 pass
393 392 raise
394 393 finally:
395 394 prof.close()
396 395 stats = hotshot.stats.load("hg.prof")
397 396 stats.strip_dirs()
398 397 stats.sort_stats('time', 'calls')
399 398 stats.print_stats(40)
400 399 elif options['lsprof']:
401 400 try:
402 401 from mercurial import lsprof
403 402 except ImportError:
404 403 raise util.Abort(_(
405 404 'lsprof not available - install from '
406 405 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
407 406 p = lsprof.Profiler()
408 407 p.enable(subcalls=True)
409 408 try:
410 409 return checkargs()
411 410 finally:
412 411 p.disable()
413 412 stats = lsprof.Stats(p.getstats())
414 413 stats.sort()
415 414 stats.pprint(top=10, file=sys.stderr, climit=5)
416 415 else:
417 416 return checkargs()
@@ -1,83 +1,84 b''
1 1 # filelog.py - file history class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from revlog import *
8 from node import bin, nullid
9 from revlog import revlog
9 10 import os
10 11
11 12 class filelog(revlog):
12 13 def __init__(self, opener, path):
13 14 revlog.__init__(self, opener,
14 15 "/".join(("data", self.encodedir(path + ".i"))))
15 16
16 17 # This avoids a collision between a file named foo and a dir named
17 18 # foo.i or foo.d
18 19 def encodedir(self, path):
19 20 return (path
20 21 .replace(".hg/", ".hg.hg/")
21 22 .replace(".i/", ".i.hg/")
22 23 .replace(".d/", ".d.hg/"))
23 24
24 25 def decodedir(self, path):
25 26 return (path
26 27 .replace(".d.hg/", ".d/")
27 28 .replace(".i.hg/", ".i/")
28 29 .replace(".hg.hg/", ".hg/"))
29 30
30 31 def read(self, node):
31 32 t = self.revision(node)
32 33 if not t.startswith('\1\n'):
33 34 return t
34 35 s = t.index('\1\n', 2)
35 36 return t[s+2:]
36 37
37 38 def _readmeta(self, node):
38 39 t = self.revision(node)
39 40 if not t.startswith('\1\n'):
40 41 return {}
41 42 s = t.index('\1\n', 2)
42 43 mt = t[2:s]
43 44 m = {}
44 45 for l in mt.splitlines():
45 46 k, v = l.split(": ", 1)
46 47 m[k] = v
47 48 return m
48 49
49 50 def add(self, text, meta, transaction, link, p1=None, p2=None):
50 51 if meta or text.startswith('\1\n'):
51 52 mt = ""
52 53 if meta:
53 54 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
54 55 text = "\1\n%s\1\n%s" % ("".join(mt), text)
55 56 return self.addrevision(text, transaction, link, p1, p2)
56 57
57 58 def renamed(self, node):
58 59 if self.parents(node)[0] != nullid:
59 60 return False
60 61 m = self._readmeta(node)
61 62 if m and "copy" in m:
62 63 return (m["copy"], bin(m["copyrev"]))
63 64 return False
64 65
65 66 def size(self, rev):
66 67 """return the size of a given revision"""
67 68
68 69 # for revisions with renames, we have to go the slow way
69 70 node = self.node(rev)
70 71 if self.renamed(node):
71 72 return len(self.read(node))
72 73
73 74 return revlog.size(self, rev)
74 75
75 76 def cmp(self, node, text):
76 77 """compare text with a given file revision"""
77 78
78 79 # for renames, we have to go the slow way
79 80 if self.renamed(node):
80 81 t2 = self.read(node)
81 82 return t2 != text
82 83
83 84 return revlog.cmp(self, node, text)
@@ -1,217 +1,217 b''
1 1 # filemerge.py - file-level merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import nullrev
9 9 from i18n import _
10 10 import util, os, tempfile, context, simplemerge, re, filecmp
11 11
12 12 def _toolstr(ui, tool, part, default=""):
13 13 return ui.config("merge-tools", tool + "." + part, default)
14 14
15 15 def _toolbool(ui, tool, part, default=False):
16 16 return ui.configbool("merge-tools", tool + "." + part, default)
17 17
18 18 def _findtool(ui, tool):
19 19 k = _toolstr(ui, tool, "regkey")
20 20 if k:
21 21 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
22 22 if p:
23 23 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
24 24 if p:
25 25 return p
26 26 return util.find_exe(_toolstr(ui, tool, "executable", tool))
27 27
28 28 def _picktool(repo, ui, path, binary, symlink):
29 29 def check(tool, pat, symlink, binary):
30 30 tmsg = tool
31 31 if pat:
32 32 tmsg += " specified for " + pat
33 33 if pat and not _findtool(ui, tool): # skip search if not matching
34 34 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
35 35 elif symlink and not _toolbool(ui, tool, "symlink"):
36 36 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
37 37 elif binary and not _toolbool(ui, tool, "binary"):
38 38 ui.warn(_("tool %s can't handle binary\n") % tmsg)
39 39 elif not util.gui() and _toolbool(ui, tool, "gui"):
40 40 ui.warn(_("tool %s requires a GUI\n") % tmsg)
41 41 else:
42 42 return True
43 43 return False
44 44
45 45 # HGMERGE takes precedence
46 46 hgmerge = os.environ.get("HGMERGE")
47 47 if hgmerge:
48 48 return (hgmerge, hgmerge)
49 49
50 50 # then patterns
51 51 for pat, tool in ui.configitems("merge-patterns"):
52 52 mf = util.matcher(repo.root, "", [pat], [], [])[1]
53 53 if mf(path) and check(tool, pat, symlink, False):
54 54 toolpath = _findtool(ui, tool)
55 55 return (tool, '"' + toolpath + '"')
56 56
57 57 # then merge tools
58 58 tools = {}
59 59 for k,v in ui.configitems("merge-tools"):
60 60 t = k.split('.')[0]
61 61 if t not in tools:
62 62 tools[t] = int(_toolstr(ui, t, "priority", "0"))
63 63 names = tools.keys()
64 64 tools = [(-p,t) for t,p in tools.items()]
65 65 tools.sort()
66 66 uimerge = ui.config("ui", "merge")
67 67 if uimerge:
68 68 if uimerge not in names:
69 69 return (uimerge, uimerge)
70 70 tools.insert(0, (None, uimerge)) # highest priority
71 71 tools.append((None, "hgmerge")) # the old default, if found
72 72 for p,t in tools:
73 73 toolpath = _findtool(ui, t)
74 74 if toolpath and check(t, None, symlink, binary):
75 75 return (t, '"' + toolpath + '"')
76 76 # internal merge as last resort
77 77 return (not (symlink or binary) and "internal:merge" or None, None)
78 78
79 79 def _eoltype(data):
80 80 "Guess the EOL type of a file"
81 81 if '\0' in data: # binary
82 82 return None
83 83 if '\r\n' in data: # Windows
84 84 return '\r\n'
85 85 if '\r' in data: # Old Mac
86 86 return '\r'
87 87 if '\n' in data: # UNIX
88 88 return '\n'
89 89 return None # unknown
90 90
91 91 def _matcheol(file, origfile):
92 92 "Convert EOL markers in a file to match origfile"
93 93 tostyle = _eoltype(open(origfile, "rb").read())
94 94 if tostyle:
95 95 data = open(file, "rb").read()
96 96 style = _eoltype(data)
97 97 if style:
98 98 newdata = data.replace(style, tostyle)
99 99 if newdata != data:
100 100 open(file, "wb").write(newdata)
101 101
102 102 def filemerge(repo, fw, fd, fo, wctx, mctx):
103 103 """perform a 3-way merge in the working directory
104 104
105 105 fw = original filename in the working directory
106 106 fd = destination filename in the working directory
107 107 fo = filename in other parent
108 108 wctx, mctx = working and merge changecontexts
109 109 """
110 110
111 111 def temp(prefix, ctx):
112 112 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
113 113 (fd, name) = tempfile.mkstemp(prefix=pre)
114 114 data = repo.wwritedata(ctx.path(), ctx.data())
115 115 f = os.fdopen(fd, "wb")
116 116 f.write(data)
117 117 f.close()
118 118 return name
119 119
120 120 def isbin(ctx):
121 121 try:
122 122 return util.binary(ctx.data())
123 123 except IOError:
124 124 return False
125 125
126 126 fco = mctx.filectx(fo)
127 127 if not fco.cmp(wctx.filectx(fd).data()): # files identical?
128 128 return None
129 129
130 130 ui = repo.ui
131 131 fcm = wctx.filectx(fw)
132 132 fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
133 133 binary = isbin(fcm) or isbin(fco) or isbin(fca)
134 134 symlink = fcm.islink() or fco.islink()
135 135 tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
136 136 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
137 137 (tool, fw, binary, symlink))
138 138
139 139 if not tool:
140 140 tool = "internal:local"
141 141 if ui.prompt(_(" no tool found to merge %s\n"
142 142 "keep (l)ocal or take (o)ther?") % fw,
143 143 _("[lo]"), _("l")) != _("l"):
144 144 tool = "internal:other"
145 145 if tool == "internal:local":
146 146 return 0
147 147 if tool == "internal:other":
148 148 repo.wwrite(fd, fco.data(), fco.fileflags())
149 149 return 0
150 150 if tool == "internal:fail":
151 151 return 1
152 152
153 153 # do the actual merge
154 154 a = repo.wjoin(fd)
155 155 b = temp("base", fca)
156 156 c = temp("other", fco)
157 157 out = ""
158 158 back = a + ".orig"
159 159 util.copyfile(a, back)
160 160
161 161 if fw != fo:
162 162 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
163 163 else:
164 164 repo.ui.status(_("merging %s\n") % fw)
165 165 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
166 166
167 167 # do we attempt to simplemerge first?
168 168 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
169 169 r = simplemerge.simplemerge(a, b, c, quiet=True)
170 170 if not r:
171 171 ui.debug(_(" premerge successful\n"))
172 172 os.unlink(back)
173 173 os.unlink(b)
174 174 os.unlink(c)
175 175 return 0
176 176 util.copyfile(back, a) # restore from backup and try again
177 177
178 178 env = dict(HG_FILE=fd,
179 179 HG_MY_NODE=str(wctx.parents()[0]),
180 180 HG_OTHER_NODE=str(mctx),
181 181 HG_MY_ISLINK=fcm.islink(),
182 182 HG_OTHER_ISLINK=fco.islink(),
183 183 HG_BASE_ISLINK=fca.islink())
184 184
185 185 if tool == "internal:merge":
186 186 r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
187 187 else:
188 188 args = _toolstr(ui, tool, "args", '$local $base $other')
189 189 if "$output" in args:
190 190 out, a = a, back # read input from backup, write to original
191 191 replace = dict(local=a, base=b, other=c, output=out)
192 192 args = re.sub("\$(local|base|other|output)",
193 193 lambda x: '"%s"' % replace[x.group()[1:]], args)
194 194 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
195 195
196 196 if not r and _toolbool(ui, tool, "checkconflicts"):
197 197 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
198 198 r = 1
199 199
200 200 if not r and _toolbool(ui, tool, "checkchanged"):
201 201 if filecmp.cmp(repo.wjoin(fd), back):
202 202 if ui.prompt(_(" output file %s appears unchanged\n"
203 203 "was merge successful (yn)?") % fd,
204 204 _("[yn]"), _("n")) != _("y"):
205 205 r = 1
206 206
207 207 if _toolbool(ui, tool, "fixeol"):
208 208 _matcheol(repo.wjoin(fd), back)
209 209
210 210 if r:
211 211 repo.ui.warn(_("merging %s failed!\n") % fd)
212 212 else:
213 213 os.unlink(back)
214 214
215 215 os.unlink(b)
216 216 os.unlink(c)
217 217 return r
@@ -1,313 +1,313 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 from node import *
10 from repo import *
9 from node import bin, hex, nullid, nullrev, short
10 from repo import NoCapability, RepoError
11 11 from i18n import _
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 13 import errno, lock, os, shutil, util, extensions
14 14 import merge as _merge
15 15 import verify as _verify
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 def parseurl(url, revs):
22 22 '''parse url#branch, returning url, branch + revs'''
23 23
24 24 if '#' not in url:
25 25 return url, (revs or None), None
26 26
27 27 url, rev = url.split('#', 1)
28 28 return url, revs + [rev], rev
29 29
30 30 schemes = {
31 31 'bundle': bundlerepo,
32 32 'file': _local,
33 33 'http': httprepo,
34 34 'https': httprepo,
35 35 'ssh': sshrepo,
36 36 'static-http': statichttprepo,
37 37 }
38 38
39 39 def _lookup(path):
40 40 scheme = 'file'
41 41 if path:
42 42 c = path.find(':')
43 43 if c > 0:
44 44 scheme = path[:c]
45 45 thing = schemes.get(scheme) or schemes['file']
46 46 try:
47 47 return thing(path)
48 48 except TypeError:
49 49 return thing
50 50
51 51 def islocal(repo):
52 52 '''return true if repo or path is local'''
53 53 if isinstance(repo, str):
54 54 try:
55 55 return _lookup(repo).islocal(repo)
56 56 except AttributeError:
57 57 return False
58 58 return repo.local()
59 59
60 60 def repository(ui, path='', create=False):
61 61 """return a repository object for the specified path"""
62 62 repo = _lookup(path).instance(ui, path, create)
63 63 ui = getattr(repo, "ui", ui)
64 64 for name, module in extensions.extensions():
65 65 hook = getattr(module, 'reposetup', None)
66 66 if hook:
67 67 hook(ui, repo)
68 68 return repo
69 69
70 70 def defaultdest(source):
71 71 '''return default destination of clone if none is given'''
72 72 return os.path.basename(os.path.normpath(source))
73 73
74 74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
75 75 stream=False):
76 76 """Make a copy of an existing repository.
77 77
78 78 Create a copy of an existing repository in a new directory. The
79 79 source and destination are URLs, as passed to the repository
80 80 function. Returns a pair of repository objects, the source and
81 81 newly created destination.
82 82
83 83 The location of the source is added to the new repository's
84 84 .hg/hgrc file, as the default to be used for future pulls and
85 85 pushes.
86 86
87 87 If an exception is raised, the partly cloned/updated destination
88 88 repository will be deleted.
89 89
90 90 Arguments:
91 91
92 92 source: repository object or URL
93 93
94 94 dest: URL of destination repository to create (defaults to base
95 95 name of source repository)
96 96
97 97 pull: always pull from source repository, even in local case
98 98
99 99 stream: stream raw data uncompressed from repository (fast over
100 100 LAN, slow over WAN)
101 101
102 102 rev: revision to clone up to (implies pull=True)
103 103
104 104 update: update working directory after clone completes, if
105 105 destination is local repository
106 106 """
107 107
108 108 if isinstance(source, str):
109 109 origsource = ui.expandpath(source)
110 110 source, rev, checkout = parseurl(origsource, rev)
111 111 src_repo = repository(ui, source)
112 112 else:
113 113 src_repo = source
114 114 origsource = source = src_repo.url()
115 115 checkout = None
116 116
117 117 if dest is None:
118 118 dest = defaultdest(source)
119 119 ui.status(_("destination directory: %s\n") % dest)
120 120
121 121 def localpath(path):
122 122 if path.startswith('file://localhost/'):
123 123 return path[16:]
124 124 if path.startswith('file://'):
125 125 return path[7:]
126 126 if path.startswith('file:'):
127 127 return path[5:]
128 128 return path
129 129
130 130 dest = localpath(dest)
131 131 source = localpath(source)
132 132
133 133 if os.path.exists(dest):
134 134 raise util.Abort(_("destination '%s' already exists") % dest)
135 135
136 136 class DirCleanup(object):
137 137 def __init__(self, dir_):
138 138 self.rmtree = shutil.rmtree
139 139 self.dir_ = dir_
140 140 def close(self):
141 141 self.dir_ = None
142 142 def __del__(self):
143 143 if self.dir_:
144 144 self.rmtree(self.dir_, True)
145 145
146 146 src_lock = dest_lock = dir_cleanup = None
147 147 try:
148 148 if islocal(dest):
149 149 dir_cleanup = DirCleanup(dest)
150 150
151 151 abspath = origsource
152 152 copy = False
153 153 if src_repo.local() and islocal(dest):
154 154 abspath = os.path.abspath(util.drop_scheme('file', origsource))
155 155 copy = not pull and not rev
156 156
157 157 if copy:
158 158 try:
159 159 # we use a lock here because if we race with commit, we
160 160 # can end up with extra data in the cloned revlogs that's
161 161 # not pointed to by changesets, thus causing verify to
162 162 # fail
163 163 src_lock = src_repo.lock()
164 164 except lock.LockException:
165 165 copy = False
166 166
167 167 if copy:
168 168 def force_copy(src, dst):
169 169 if not os.path.exists(src):
170 170 # Tolerate empty source repository and optional files
171 171 return
172 172 util.copyfiles(src, dst)
173 173
174 174 src_store = os.path.realpath(src_repo.spath)
175 175 if not os.path.exists(dest):
176 176 os.mkdir(dest)
177 177 try:
178 178 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
179 179 os.mkdir(dest_path)
180 180 except OSError, inst:
181 181 if inst.errno == errno.EEXIST:
182 182 dir_cleanup.close()
183 183 raise util.Abort(_("destination '%s' already exists")
184 184 % dest)
185 185 raise
186 186 if src_repo.spath != src_repo.path:
187 187 # XXX racy
188 188 dummy_changelog = os.path.join(dest_path, "00changelog.i")
189 189 # copy the dummy changelog
190 190 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
191 191 dest_store = os.path.join(dest_path, "store")
192 192 os.mkdir(dest_store)
193 193 else:
194 194 dest_store = dest_path
195 195 # copy the requires file
196 196 force_copy(src_repo.join("requires"),
197 197 os.path.join(dest_path, "requires"))
198 198 # we lock here to avoid premature writing to the target
199 199 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
200 200
201 201 files = ("data",
202 202 "00manifest.d", "00manifest.i",
203 203 "00changelog.d", "00changelog.i")
204 204 for f in files:
205 205 src = os.path.join(src_store, f)
206 206 dst = os.path.join(dest_store, f)
207 207 force_copy(src, dst)
208 208
209 209 # we need to re-init the repo after manually copying the data
210 210 # into it
211 211 dest_repo = repository(ui, dest)
212 212
213 213 else:
214 214 try:
215 215 dest_repo = repository(ui, dest, create=True)
216 216 except OSError, inst:
217 217 if inst.errno == errno.EEXIST:
218 218 dir_cleanup.close()
219 219 raise util.Abort(_("destination '%s' already exists")
220 220 % dest)
221 221 raise
222 222
223 223 revs = None
224 224 if rev:
225 225 if 'lookup' not in src_repo.capabilities:
226 226 raise util.Abort(_("src repository does not support revision "
227 227 "lookup and so doesn't support clone by "
228 228 "revision"))
229 229 revs = [src_repo.lookup(r) for r in rev]
230 230
231 231 if dest_repo.local():
232 232 dest_repo.clone(src_repo, heads=revs, stream=stream)
233 233 elif src_repo.local():
234 234 src_repo.push(dest_repo, revs=revs)
235 235 else:
236 236 raise util.Abort(_("clone from remote to remote not supported"))
237 237
238 238 if dir_cleanup:
239 239 dir_cleanup.close()
240 240
241 241 if dest_repo.local():
242 242 fp = dest_repo.opener("hgrc", "w", text=True)
243 243 fp.write("[paths]\n")
244 244 fp.write("default = %s\n" % abspath)
245 245 fp.close()
246 246
247 247 if update:
248 248 if not checkout:
249 249 try:
250 250 checkout = dest_repo.lookup("default")
251 251 except:
252 252 checkout = dest_repo.changelog.tip()
253 253 _update(dest_repo, checkout)
254 254
255 255 return src_repo, dest_repo
256 256 finally:
257 257 del src_lock, dest_lock, dir_cleanup
258 258
259 259 def _showstats(repo, stats):
260 260 stats = ((stats[0], _("updated")),
261 261 (stats[1], _("merged")),
262 262 (stats[2], _("removed")),
263 263 (stats[3], _("unresolved")))
264 264 note = ", ".join([_("%d files %s") % s for s in stats])
265 265 repo.ui.status("%s\n" % note)
266 266
267 267 def _update(repo, node): return update(repo, node)
268 268
269 269 def update(repo, node):
270 270 """update the working directory to node, merging linear changes"""
271 271 pl = repo.parents()
272 272 stats = _merge.update(repo, node, False, False, None)
273 273 _showstats(repo, stats)
274 274 if stats[3]:
275 275 repo.ui.status(_("There are unresolved merges with"
276 276 " locally modified files.\n"))
277 277 if stats[1]:
278 278 repo.ui.status(_("You can finish the partial merge using:\n"))
279 279 else:
280 280 repo.ui.status(_("You can redo the full merge using:\n"))
281 281 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
282 282 repo.ui.status(_(" hg update %s\n hg update %s\n")
283 283 % (pl[0].rev(), repo.changectx(node).rev()))
284 284 return stats[3] > 0
285 285
286 286 def clean(repo, node, show_stats=True):
287 287 """forcibly switch the working directory to node, clobbering changes"""
288 288 stats = _merge.update(repo, node, False, True, None)
289 289 if show_stats: _showstats(repo, stats)
290 290 return stats[3] > 0
291 291
292 292 def merge(repo, node, force=None, remind=True):
293 293 """branch merge with node, resolving changes"""
294 294 stats = _merge.update(repo, node, True, force, False)
295 295 _showstats(repo, stats)
296 296 if stats[3]:
297 297 pl = repo.parents()
298 298 repo.ui.status(_("There are unresolved merges,"
299 299 " you can redo the full merge using:\n"
300 300 " hg update -C %s\n"
301 301 " hg merge %s\n")
302 302 % (pl[0].rev(), pl[1].rev()))
303 303 elif remind:
304 304 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
305 305 return stats[3] > 0
306 306
307 307 def revert(repo, node, choose):
308 308 """revert changes to revision in node without updating dirstate"""
309 309 return _merge.update(repo, node, False, True, choose)[3] > 0
310 310
311 311 def verify(repo):
312 312 """verify the consistency of a repository"""
313 313 return _verify.verify(repo)
@@ -1,951 +1,951 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes, re
10 from mercurial.node import *
10 from mercurial.node import hex, nullid, short
11 11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
12 12 from mercurial import revlog, templater, templatefilters, changegroup
13 13 from common import get_mtime, style_map, paritygen, countgen, get_contact
14 14 from common import ErrorResponse
15 15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
16 16 from request import wsgirequest
17 17 import webcommands, protocol
18 18
19 19 shortcuts = {
20 20 'cl': [('cmd', ['changelog']), ('rev', None)],
21 21 'sl': [('cmd', ['shortlog']), ('rev', None)],
22 22 'cs': [('cmd', ['changeset']), ('node', None)],
23 23 'f': [('cmd', ['file']), ('filenode', None)],
24 24 'fl': [('cmd', ['filelog']), ('filenode', None)],
25 25 'fd': [('cmd', ['filediff']), ('node', None)],
26 26 'fa': [('cmd', ['annotate']), ('filenode', None)],
27 27 'mf': [('cmd', ['manifest']), ('manifest', None)],
28 28 'ca': [('cmd', ['archive']), ('node', None)],
29 29 'tags': [('cmd', ['tags'])],
30 30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
31 31 'static': [('cmd', ['static']), ('file', None)]
32 32 }
33 33
34 34 def _up(p):
35 35 if p[0] != "/":
36 36 p = "/" + p
37 37 if p[-1] == "/":
38 38 p = p[:-1]
39 39 up = os.path.dirname(p)
40 40 if up == "/":
41 41 return "/"
42 42 return up + "/"
43 43
44 44 def revnavgen(pos, pagelen, limit, nodefunc):
45 45 def seq(factor, limit=None):
46 46 if limit:
47 47 yield limit
48 48 if limit >= 20 and limit <= 40:
49 49 yield 50
50 50 else:
51 51 yield 1 * factor
52 52 yield 3 * factor
53 53 for f in seq(factor * 10):
54 54 yield f
55 55
56 56 def nav(**map):
57 57 l = []
58 58 last = 0
59 59 for f in seq(1, pagelen):
60 60 if f < pagelen or f <= last:
61 61 continue
62 62 if f > limit:
63 63 break
64 64 last = f
65 65 if pos + f < limit:
66 66 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
67 67 if pos - f >= 0:
68 68 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
69 69
70 70 try:
71 71 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
72 72
73 73 for label, node in l:
74 74 yield {"label": label, "node": node}
75 75
76 76 yield {"label": "tip", "node": "tip"}
77 77 except hg.RepoError:
78 78 pass
79 79
80 80 return nav
81 81
82 82 class hgweb(object):
83 83 def __init__(self, repo, name=None):
84 84 if isinstance(repo, str):
85 85 parentui = ui.ui(report_untrusted=False, interactive=False)
86 86 self.repo = hg.repository(parentui, repo)
87 87 else:
88 88 self.repo = repo
89 89
90 90 hook.redirect(True)
91 91 self.mtime = -1
92 92 self.reponame = name
93 93 self.archives = 'zip', 'gz', 'bz2'
94 94 self.stripecount = 1
95 95 self._capabilities = None
96 96 # a repo owner may set web.templates in .hg/hgrc to get any file
97 97 # readable by the user running the CGI script
98 98 self.templatepath = self.config("web", "templates",
99 99 templater.templatepath(),
100 100 untrusted=False)
101 101
102 102 # The CGI scripts are often run by a user different from the repo owner.
103 103 # Trust the settings from the .hg/hgrc files by default.
104 104 def config(self, section, name, default=None, untrusted=True):
105 105 return self.repo.ui.config(section, name, default,
106 106 untrusted=untrusted)
107 107
108 108 def configbool(self, section, name, default=False, untrusted=True):
109 109 return self.repo.ui.configbool(section, name, default,
110 110 untrusted=untrusted)
111 111
112 112 def configlist(self, section, name, default=None, untrusted=True):
113 113 return self.repo.ui.configlist(section, name, default,
114 114 untrusted=untrusted)
115 115
116 116 def refresh(self):
117 117 mtime = get_mtime(self.repo.root)
118 118 if mtime != self.mtime:
119 119 self.mtime = mtime
120 120 self.repo = hg.repository(self.repo.ui, self.repo.root)
121 121 self.maxchanges = int(self.config("web", "maxchanges", 10))
122 122 self.stripecount = int(self.config("web", "stripes", 1))
123 123 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
124 124 self.maxfiles = int(self.config("web", "maxfiles", 10))
125 125 self.allowpull = self.configbool("web", "allowpull", True)
126 126 self.encoding = self.config("web", "encoding", util._encoding)
127 127 self._capabilities = None
128 128
129 129 def capabilities(self):
130 130 if self._capabilities is not None:
131 131 return self._capabilities
132 132 caps = ['lookup', 'changegroupsubset']
133 133 if self.configbool('server', 'uncompressed'):
134 134 caps.append('stream=%d' % self.repo.changelog.version)
135 135 if changegroup.bundlepriority:
136 136 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
137 137 self._capabilities = caps
138 138 return caps
139 139
140 140 def run(self):
141 141 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
142 142 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
143 143 import mercurial.hgweb.wsgicgi as wsgicgi
144 144 wsgicgi.launch(self)
145 145
146 146 def __call__(self, env, respond):
147 147 req = wsgirequest(env, respond)
148 148 self.run_wsgi(req)
149 149 return req
150 150
151 151 def run_wsgi(self, req):
152 152
153 153 self.refresh()
154 154
155 155 # expand form shortcuts
156 156
157 157 for k in shortcuts.iterkeys():
158 158 if k in req.form:
159 159 for name, value in shortcuts[k]:
160 160 if value is None:
161 161 value = req.form[k]
162 162 req.form[name] = value
163 163 del req.form[k]
164 164
165 165 # work with CGI variables to create coherent structure
166 166 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
167 167
168 168 req.url = req.env['SCRIPT_NAME']
169 169 if not req.url.endswith('/'):
170 170 req.url += '/'
171 171 if 'REPO_NAME' in req.env:
172 172 req.url += req.env['REPO_NAME'] + '/'
173 173
174 174 if req.env.get('PATH_INFO'):
175 175 parts = req.env.get('PATH_INFO').strip('/').split('/')
176 176 repo_parts = req.env.get('REPO_NAME', '').split('/')
177 177 if parts[:len(repo_parts)] == repo_parts:
178 178 parts = parts[len(repo_parts):]
179 179 query = '/'.join(parts)
180 180 else:
181 181 query = req.env['QUERY_STRING'].split('&', 1)[0]
182 182 query = query.split(';', 1)[0]
183 183
184 184 # translate user-visible url structure to internal structure
185 185
186 186 args = query.split('/', 2)
187 187 if 'cmd' not in req.form and args and args[0]:
188 188
189 189 cmd = args.pop(0)
190 190 style = cmd.rfind('-')
191 191 if style != -1:
192 192 req.form['style'] = [cmd[:style]]
193 193 cmd = cmd[style+1:]
194 194
195 195 # avoid accepting e.g. style parameter as command
196 196 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
197 197 req.form['cmd'] = [cmd]
198 198
199 199 if args and args[0]:
200 200 node = args.pop(0)
201 201 req.form['node'] = [node]
202 202 if args:
203 203 req.form['file'] = args
204 204
205 205 if cmd == 'static':
206 206 req.form['file'] = req.form['node']
207 207 elif cmd == 'archive':
208 208 fn = req.form['node'][0]
209 209 for type_, spec in self.archive_specs.iteritems():
210 210 ext = spec[2]
211 211 if fn.endswith(ext):
212 212 req.form['node'] = [fn[:-len(ext)]]
213 213 req.form['type'] = [type_]
214 214
215 215 # process this if it's a protocol request
216 216
217 217 cmd = req.form.get('cmd', [''])[0]
218 218 if cmd in protocol.__all__:
219 219 method = getattr(protocol, cmd)
220 220 method(self, req)
221 221 return
222 222
223 223 # process the web interface request
224 224
225 225 try:
226 226
227 227 tmpl = self.templater(req)
228 228 ctype = tmpl('mimetype', encoding=self.encoding)
229 229 ctype = templater.stringify(ctype)
230 230
231 231 if cmd == '':
232 232 req.form['cmd'] = [tmpl.cache['default']]
233 233 cmd = req.form['cmd'][0]
234 234
235 235 if cmd not in webcommands.__all__:
236 236 msg = 'No such method: %s' % cmd
237 237 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
238 238 elif cmd == 'file' and 'raw' in req.form.get('style', []):
239 239 self.ctype = ctype
240 240 content = webcommands.rawfile(self, req, tmpl)
241 241 else:
242 242 content = getattr(webcommands, cmd)(self, req, tmpl)
243 243 req.respond(HTTP_OK, ctype)
244 244
245 245 req.write(content)
246 246 del tmpl
247 247
248 248 except revlog.LookupError, err:
249 249 req.respond(HTTP_NOT_FOUND, ctype)
250 250 req.write(tmpl('error', error='revision not found: %s' % err.name))
251 251 except (hg.RepoError, revlog.RevlogError), inst:
252 252 req.respond(HTTP_SERVER_ERROR, ctype)
253 253 req.write(tmpl('error', error=str(inst)))
254 254 except ErrorResponse, inst:
255 255 req.respond(inst.code, ctype)
256 256 req.write(tmpl('error', error=inst.message))
257 257
258 258 def templater(self, req):
259 259
260 260 # determine scheme, port and server name
261 261 # this is needed to create absolute urls
262 262
263 263 proto = req.env.get('wsgi.url_scheme')
264 264 if proto == 'https':
265 265 proto = 'https'
266 266 default_port = "443"
267 267 else:
268 268 proto = 'http'
269 269 default_port = "80"
270 270
271 271 port = req.env["SERVER_PORT"]
272 272 port = port != default_port and (":" + port) or ""
273 273 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
274 274 staticurl = self.config("web", "staticurl") or req.url + 'static/'
275 275 if not staticurl.endswith('/'):
276 276 staticurl += '/'
277 277
278 278 # some functions for the templater
279 279
280 280 def header(**map):
281 281 yield tmpl('header', encoding=self.encoding, **map)
282 282
283 283 def footer(**map):
284 284 yield tmpl("footer", **map)
285 285
286 286 def motd(**map):
287 287 yield self.config("web", "motd", "")
288 288
289 289 def sessionvars(**map):
290 290 fields = []
291 291 if 'style' in req.form:
292 292 style = req.form['style'][0]
293 293 if style != self.config('web', 'style', ''):
294 294 fields.append(('style', style))
295 295
296 296 separator = req.url[-1] == '?' and ';' or '?'
297 297 for name, value in fields:
298 298 yield dict(name=name, value=value, separator=separator)
299 299 separator = ';'
300 300
301 301 # figure out which style to use
302 302
303 303 style = self.config("web", "style", "")
304 304 if 'style' in req.form:
305 305 style = req.form['style'][0]
306 306 mapfile = style_map(self.templatepath, style)
307 307
308 308 if not self.reponame:
309 309 self.reponame = (self.config("web", "name")
310 310 or req.env.get('REPO_NAME')
311 311 or req.url.strip('/') or self.repo.root)
312 312
313 313 # create the templater
314 314
315 315 tmpl = templater.templater(mapfile, templatefilters.filters,
316 316 defaults={"url": req.url,
317 317 "staticurl": staticurl,
318 318 "urlbase": urlbase,
319 319 "repo": self.reponame,
320 320 "header": header,
321 321 "footer": footer,
322 322 "motd": motd,
323 323 "sessionvars": sessionvars
324 324 })
325 325 return tmpl
326 326
327 327 def archivelist(self, nodeid):
328 328 allowed = self.configlist("web", "allow_archive")
329 329 for i, spec in self.archive_specs.iteritems():
330 330 if i in allowed or self.configbool("web", "allow" + i):
331 331 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
332 332
333 333 def listfilediffs(self, tmpl, files, changeset):
334 334 for f in files[:self.maxfiles]:
335 335 yield tmpl("filedifflink", node=hex(changeset), file=f)
336 336 if len(files) > self.maxfiles:
337 337 yield tmpl("fileellipses")
338 338
339 339 def siblings(self, siblings=[], hiderev=None, **args):
340 340 siblings = [s for s in siblings if s.node() != nullid]
341 341 if len(siblings) == 1 and siblings[0].rev() == hiderev:
342 342 return
343 343 for s in siblings:
344 344 d = {'node': hex(s.node()), 'rev': s.rev()}
345 345 if hasattr(s, 'path'):
346 346 d['file'] = s.path()
347 347 d.update(args)
348 348 yield d
349 349
350 350 def renamelink(self, fl, node):
351 351 r = fl.renamed(node)
352 352 if r:
353 353 return [dict(file=r[0], node=hex(r[1]))]
354 354 return []
355 355
356 356 def nodetagsdict(self, node):
357 357 return [{"name": i} for i in self.repo.nodetags(node)]
358 358
359 359 def nodebranchdict(self, ctx):
360 360 branches = []
361 361 branch = ctx.branch()
362 362 # If this is an empty repo, ctx.node() == nullid,
363 363 # ctx.branch() == 'default', but branchtags() is
364 364 # an empty dict. Using dict.get avoids a traceback.
365 365 if self.repo.branchtags().get(branch) == ctx.node():
366 366 branches.append({"name": branch})
367 367 return branches
368 368
369 369 def showtag(self, tmpl, t1, node=nullid, **args):
370 370 for t in self.repo.nodetags(node):
371 371 yield tmpl(t1, tag=t, **args)
372 372
373 373 def diff(self, tmpl, node1, node2, files):
374 374 def filterfiles(filters, files):
375 375 l = [x for x in files if x in filters]
376 376
377 377 for t in filters:
378 378 if t and t[-1] != os.sep:
379 379 t += os.sep
380 380 l += [x for x in files if x.startswith(t)]
381 381 return l
382 382
383 383 parity = paritygen(self.stripecount)
384 384 def diffblock(diff, f, fn):
385 385 yield tmpl("diffblock",
386 386 lines=prettyprintlines(diff),
387 387 parity=parity.next(),
388 388 file=f,
389 389 filenode=hex(fn or nullid))
390 390
391 391 blockcount = countgen()
392 392 def prettyprintlines(diff):
393 393 blockno = blockcount.next()
394 394 for lineno, l in enumerate(diff.splitlines(1)):
395 395 if blockno == 0:
396 396 lineno = lineno + 1
397 397 else:
398 398 lineno = "%d.%d" % (blockno, lineno + 1)
399 399 if l.startswith('+'):
400 400 ltype = "difflineplus"
401 401 elif l.startswith('-'):
402 402 ltype = "difflineminus"
403 403 elif l.startswith('@'):
404 404 ltype = "difflineat"
405 405 else:
406 406 ltype = "diffline"
407 407 yield tmpl(ltype,
408 408 line=l,
409 409 lineid="l%s" % lineno,
410 410 linenumber="% 8s" % lineno)
411 411
412 412 r = self.repo
413 413 c1 = r.changectx(node1)
414 414 c2 = r.changectx(node2)
415 415 date1 = util.datestr(c1.date())
416 416 date2 = util.datestr(c2.date())
417 417
418 418 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
419 419 if files:
420 420 modified, added, removed = map(lambda x: filterfiles(files, x),
421 421 (modified, added, removed))
422 422
423 423 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
424 424 for f in modified:
425 425 to = c1.filectx(f).data()
426 426 tn = c2.filectx(f).data()
427 427 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
428 428 opts=diffopts), f, tn)
429 429 for f in added:
430 430 to = None
431 431 tn = c2.filectx(f).data()
432 432 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
433 433 opts=diffopts), f, tn)
434 434 for f in removed:
435 435 to = c1.filectx(f).data()
436 436 tn = None
437 437 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
438 438 opts=diffopts), f, tn)
439 439
440 440 def changelog(self, tmpl, ctx, shortlog=False):
441 441 def changelist(limit=0,**map):
442 442 cl = self.repo.changelog
443 443 l = [] # build a list in forward order for efficiency
444 444 for i in xrange(start, end):
445 445 ctx = self.repo.changectx(i)
446 446 n = ctx.node()
447 447 showtags = self.showtag(tmpl, 'changelogtag', n)
448 448
449 449 l.insert(0, {"parity": parity.next(),
450 450 "author": ctx.user(),
451 451 "parent": self.siblings(ctx.parents(), i - 1),
452 452 "child": self.siblings(ctx.children(), i + 1),
453 453 "changelogtag": showtags,
454 454 "desc": ctx.description(),
455 455 "date": ctx.date(),
456 456 "files": self.listfilediffs(tmpl, ctx.files(), n),
457 457 "rev": i,
458 458 "node": hex(n),
459 459 "tags": self.nodetagsdict(n),
460 460 "branches": self.nodebranchdict(ctx)})
461 461
462 462 if limit > 0:
463 463 l = l[:limit]
464 464
465 465 for e in l:
466 466 yield e
467 467
468 468 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
469 469 cl = self.repo.changelog
470 470 count = cl.count()
471 471 pos = ctx.rev()
472 472 start = max(0, pos - maxchanges + 1)
473 473 end = min(count, start + maxchanges)
474 474 pos = end - 1
475 475 parity = paritygen(self.stripecount, offset=start-end)
476 476
477 477 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
478 478
479 479 return tmpl(shortlog and 'shortlog' or 'changelog',
480 480 changenav=changenav,
481 481 node=hex(cl.tip()),
482 482 rev=pos, changesets=count,
483 483 entries=lambda **x: changelist(limit=0,**x),
484 484 latestentry=lambda **x: changelist(limit=1,**x),
485 485 archives=self.archivelist("tip"))
486 486
487 487 def search(self, tmpl, query):
488 488
489 489 def changelist(**map):
490 490 cl = self.repo.changelog
491 491 count = 0
492 492 qw = query.lower().split()
493 493
494 494 def revgen():
495 495 for i in xrange(cl.count() - 1, 0, -100):
496 496 l = []
497 497 for j in xrange(max(0, i - 100), i + 1):
498 498 ctx = self.repo.changectx(j)
499 499 l.append(ctx)
500 500 l.reverse()
501 501 for e in l:
502 502 yield e
503 503
504 504 for ctx in revgen():
505 505 miss = 0
506 506 for q in qw:
507 507 if not (q in ctx.user().lower() or
508 508 q in ctx.description().lower() or
509 509 q in " ".join(ctx.files()).lower()):
510 510 miss = 1
511 511 break
512 512 if miss:
513 513 continue
514 514
515 515 count += 1
516 516 n = ctx.node()
517 517 showtags = self.showtag(tmpl, 'changelogtag', n)
518 518
519 519 yield tmpl('searchentry',
520 520 parity=parity.next(),
521 521 author=ctx.user(),
522 522 parent=self.siblings(ctx.parents()),
523 523 child=self.siblings(ctx.children()),
524 524 changelogtag=showtags,
525 525 desc=ctx.description(),
526 526 date=ctx.date(),
527 527 files=self.listfilediffs(tmpl, ctx.files(), n),
528 528 rev=ctx.rev(),
529 529 node=hex(n),
530 530 tags=self.nodetagsdict(n),
531 531 branches=self.nodebranchdict(ctx))
532 532
533 533 if count >= self.maxchanges:
534 534 break
535 535
536 536 cl = self.repo.changelog
537 537 parity = paritygen(self.stripecount)
538 538
539 539 return tmpl('search',
540 540 query=query,
541 541 node=hex(cl.tip()),
542 542 entries=changelist,
543 543 archives=self.archivelist("tip"))
544 544
545 545 def changeset(self, tmpl, ctx):
546 546 n = ctx.node()
547 547 showtags = self.showtag(tmpl, 'changesettag', n)
548 548 parents = ctx.parents()
549 549 p1 = parents[0].node()
550 550
551 551 files = []
552 552 parity = paritygen(self.stripecount)
553 553 for f in ctx.files():
554 554 files.append(tmpl("filenodelink",
555 555 node=hex(n), file=f,
556 556 parity=parity.next()))
557 557
558 558 def diff(**map):
559 559 yield self.diff(tmpl, p1, n, None)
560 560
561 561 return tmpl('changeset',
562 562 diff=diff,
563 563 rev=ctx.rev(),
564 564 node=hex(n),
565 565 parent=self.siblings(parents),
566 566 child=self.siblings(ctx.children()),
567 567 changesettag=showtags,
568 568 author=ctx.user(),
569 569 desc=ctx.description(),
570 570 date=ctx.date(),
571 571 files=files,
572 572 archives=self.archivelist(hex(n)),
573 573 tags=self.nodetagsdict(n),
574 574 branches=self.nodebranchdict(ctx))
575 575
576 576 def filelog(self, tmpl, fctx):
577 577 f = fctx.path()
578 578 fl = fctx.filelog()
579 579 count = fl.count()
580 580 pagelen = self.maxshortchanges
581 581 pos = fctx.filerev()
582 582 start = max(0, pos - pagelen + 1)
583 583 end = min(count, start + pagelen)
584 584 pos = end - 1
585 585 parity = paritygen(self.stripecount, offset=start-end)
586 586
587 587 def entries(limit=0, **map):
588 588 l = []
589 589
590 590 for i in xrange(start, end):
591 591 ctx = fctx.filectx(i)
592 592 n = fl.node(i)
593 593
594 594 l.insert(0, {"parity": parity.next(),
595 595 "filerev": i,
596 596 "file": f,
597 597 "node": hex(ctx.node()),
598 598 "author": ctx.user(),
599 599 "date": ctx.date(),
600 600 "rename": self.renamelink(fl, n),
601 601 "parent": self.siblings(fctx.parents()),
602 602 "child": self.siblings(fctx.children()),
603 603 "desc": ctx.description()})
604 604
605 605 if limit > 0:
606 606 l = l[:limit]
607 607
608 608 for e in l:
609 609 yield e
610 610
611 611 nodefunc = lambda x: fctx.filectx(fileid=x)
612 612 nav = revnavgen(pos, pagelen, count, nodefunc)
613 613 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
614 614 entries=lambda **x: entries(limit=0, **x),
615 615 latestentry=lambda **x: entries(limit=1, **x))
616 616
617 617 def filerevision(self, tmpl, fctx):
618 618 f = fctx.path()
619 619 text = fctx.data()
620 620 fl = fctx.filelog()
621 621 n = fctx.filenode()
622 622 parity = paritygen(self.stripecount)
623 623
624 624 if util.binary(text):
625 625 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
626 626 text = '(binary:%s)' % mt
627 627
628 628 def lines():
629 629 for lineno, t in enumerate(text.splitlines(1)):
630 630 yield {"line": t,
631 631 "lineid": "l%d" % (lineno + 1),
632 632 "linenumber": "% 6d" % (lineno + 1),
633 633 "parity": parity.next()}
634 634
635 635 return tmpl("filerevision",
636 636 file=f,
637 637 path=_up(f),
638 638 text=lines(),
639 639 rev=fctx.rev(),
640 640 node=hex(fctx.node()),
641 641 author=fctx.user(),
642 642 date=fctx.date(),
643 643 desc=fctx.description(),
644 644 parent=self.siblings(fctx.parents()),
645 645 child=self.siblings(fctx.children()),
646 646 rename=self.renamelink(fl, n),
647 647 permissions=fctx.manifest().flags(f))
648 648
649 649 def fileannotate(self, tmpl, fctx):
650 650 f = fctx.path()
651 651 n = fctx.filenode()
652 652 fl = fctx.filelog()
653 653 parity = paritygen(self.stripecount)
654 654
655 655 def annotate(**map):
656 656 last = None
657 657 if util.binary(fctx.data()):
658 658 mt = (mimetypes.guess_type(fctx.path())[0]
659 659 or 'application/octet-stream')
660 660 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
661 661 '(binary:%s)' % mt)])
662 662 else:
663 663 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
664 664 for lineno, ((f, targetline), l) in lines:
665 665 fnode = f.filenode()
666 666 name = self.repo.ui.shortuser(f.user())
667 667
668 668 if last != fnode:
669 669 last = fnode
670 670
671 671 yield {"parity": parity.next(),
672 672 "node": hex(f.node()),
673 673 "rev": f.rev(),
674 674 "author": name,
675 675 "file": f.path(),
676 676 "targetline": targetline,
677 677 "line": l,
678 678 "lineid": "l%d" % (lineno + 1),
679 679 "linenumber": "% 6d" % (lineno + 1)}
680 680
681 681 return tmpl("fileannotate",
682 682 file=f,
683 683 annotate=annotate,
684 684 path=_up(f),
685 685 rev=fctx.rev(),
686 686 node=hex(fctx.node()),
687 687 author=fctx.user(),
688 688 date=fctx.date(),
689 689 desc=fctx.description(),
690 690 rename=self.renamelink(fl, n),
691 691 parent=self.siblings(fctx.parents()),
692 692 child=self.siblings(fctx.children()),
693 693 permissions=fctx.manifest().flags(f))
694 694
695 695 def manifest(self, tmpl, ctx, path):
696 696 mf = ctx.manifest()
697 697 node = ctx.node()
698 698
699 699 files = {}
700 700 parity = paritygen(self.stripecount)
701 701
702 702 if path and path[-1] != "/":
703 703 path += "/"
704 704 l = len(path)
705 705 abspath = "/" + path
706 706
707 707 for f, n in mf.items():
708 708 if f[:l] != path:
709 709 continue
710 710 remain = f[l:]
711 711 if "/" in remain:
712 712 short = remain[:remain.index("/") + 1] # bleah
713 713 files[short] = (f, None)
714 714 else:
715 715 short = os.path.basename(remain)
716 716 files[short] = (f, n)
717 717
718 718 if not files:
719 719 raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
720 720
721 721 def filelist(**map):
722 722 fl = files.keys()
723 723 fl.sort()
724 724 for f in fl:
725 725 full, fnode = files[f]
726 726 if not fnode:
727 727 continue
728 728
729 729 fctx = ctx.filectx(full)
730 730 yield {"file": full,
731 731 "parity": parity.next(),
732 732 "basename": f,
733 733 "date": fctx.changectx().date(),
734 734 "size": fctx.size(),
735 735 "permissions": mf.flags(full)}
736 736
737 737 def dirlist(**map):
738 738 fl = files.keys()
739 739 fl.sort()
740 740 for f in fl:
741 741 full, fnode = files[f]
742 742 if fnode:
743 743 continue
744 744
745 745 yield {"parity": parity.next(),
746 746 "path": "%s%s" % (abspath, f),
747 747 "basename": f[:-1]}
748 748
749 749 return tmpl("manifest",
750 750 rev=ctx.rev(),
751 751 node=hex(node),
752 752 path=abspath,
753 753 up=_up(abspath),
754 754 upparity=parity.next(),
755 755 fentries=filelist,
756 756 dentries=dirlist,
757 757 archives=self.archivelist(hex(node)),
758 758 tags=self.nodetagsdict(node),
759 759 branches=self.nodebranchdict(ctx))
760 760
761 761 def tags(self, tmpl):
762 762 i = self.repo.tagslist()
763 763 i.reverse()
764 764 parity = paritygen(self.stripecount)
765 765
766 766 def entries(notip=False,limit=0, **map):
767 767 count = 0
768 768 for k, n in i:
769 769 if notip and k == "tip":
770 770 continue
771 771 if limit > 0 and count >= limit:
772 772 continue
773 773 count = count + 1
774 774 yield {"parity": parity.next(),
775 775 "tag": k,
776 776 "date": self.repo.changectx(n).date(),
777 777 "node": hex(n)}
778 778
779 779 return tmpl("tags",
780 780 node=hex(self.repo.changelog.tip()),
781 781 entries=lambda **x: entries(False,0, **x),
782 782 entriesnotip=lambda **x: entries(True,0, **x),
783 783 latestentry=lambda **x: entries(True,1, **x))
784 784
785 785 def summary(self, tmpl):
786 786 i = self.repo.tagslist()
787 787 i.reverse()
788 788
789 789 def tagentries(**map):
790 790 parity = paritygen(self.stripecount)
791 791 count = 0
792 792 for k, n in i:
793 793 if k == "tip": # skip tip
794 794 continue;
795 795
796 796 count += 1
797 797 if count > 10: # limit to 10 tags
798 798 break;
799 799
800 800 yield tmpl("tagentry",
801 801 parity=parity.next(),
802 802 tag=k,
803 803 node=hex(n),
804 804 date=self.repo.changectx(n).date())
805 805
806 806
807 807 def branches(**map):
808 808 parity = paritygen(self.stripecount)
809 809
810 810 b = self.repo.branchtags()
811 811 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
812 812 l.sort()
813 813
814 814 for r,n,t in l:
815 815 ctx = self.repo.changectx(n)
816 816
817 817 yield {'parity': parity.next(),
818 818 'branch': t,
819 819 'node': hex(n),
820 820 'date': ctx.date()}
821 821
822 822 def changelist(**map):
823 823 parity = paritygen(self.stripecount, offset=start-end)
824 824 l = [] # build a list in forward order for efficiency
825 825 for i in xrange(start, end):
826 826 ctx = self.repo.changectx(i)
827 827 n = ctx.node()
828 828 hn = hex(n)
829 829
830 830 l.insert(0, tmpl(
831 831 'shortlogentry',
832 832 parity=parity.next(),
833 833 author=ctx.user(),
834 834 desc=ctx.description(),
835 835 date=ctx.date(),
836 836 rev=i,
837 837 node=hn,
838 838 tags=self.nodetagsdict(n),
839 839 branches=self.nodebranchdict(ctx)))
840 840
841 841 yield l
842 842
843 843 cl = self.repo.changelog
844 844 count = cl.count()
845 845 start = max(0, count - self.maxchanges)
846 846 end = min(count, start + self.maxchanges)
847 847
848 848 return tmpl("summary",
849 849 desc=self.config("web", "description", "unknown"),
850 850 owner=get_contact(self.config) or "unknown",
851 851 lastchange=cl.read(cl.tip())[2],
852 852 tags=tagentries,
853 853 branches=branches,
854 854 shortlog=changelist,
855 855 node=hex(cl.tip()),
856 856 archives=self.archivelist("tip"))
857 857
858 858 def filediff(self, tmpl, fctx):
859 859 n = fctx.node()
860 860 path = fctx.path()
861 861 parents = fctx.parents()
862 862 p1 = parents and parents[0].node() or nullid
863 863
864 864 def diff(**map):
865 865 yield self.diff(tmpl, p1, n, [path])
866 866
867 867 return tmpl("filediff",
868 868 file=path,
869 869 node=hex(n),
870 870 rev=fctx.rev(),
871 871 parent=self.siblings(parents),
872 872 child=self.siblings(fctx.children()),
873 873 diff=diff)
874 874
875 875 archive_specs = {
876 876 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
877 877 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
878 878 'zip': ('application/zip', 'zip', '.zip', None),
879 879 }
880 880
881 881 def archive(self, tmpl, req, key, type_):
882 882 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
883 883 cnode = self.repo.lookup(key)
884 884 arch_version = key
885 885 if cnode == key or key == 'tip':
886 886 arch_version = short(cnode)
887 887 name = "%s-%s" % (reponame, arch_version)
888 888 mimetype, artype, extension, encoding = self.archive_specs[type_]
889 889 headers = [
890 890 ('Content-Type', mimetype),
891 891 ('Content-Disposition', 'attachment; filename=%s%s' %
892 892 (name, extension))
893 893 ]
894 894 if encoding:
895 895 headers.append(('Content-Encoding', encoding))
896 896 req.header(headers)
897 897 req.respond(HTTP_OK)
898 898 archival.archive(self.repo, req, cnode, artype, prefix=name)
899 899
900 900 # add tags to things
901 901 # tags -> list of changesets corresponding to tags
902 902 # find tag, changeset, file
903 903
904 904 def cleanpath(self, path):
905 905 path = path.lstrip('/')
906 906 return util.canonpath(self.repo.root, '', path)
907 907
908 908 def changectx(self, req):
909 909 if 'node' in req.form:
910 910 changeid = req.form['node'][0]
911 911 elif 'manifest' in req.form:
912 912 changeid = req.form['manifest'][0]
913 913 else:
914 914 changeid = self.repo.changelog.count() - 1
915 915
916 916 try:
917 917 ctx = self.repo.changectx(changeid)
918 918 except hg.RepoError:
919 919 man = self.repo.manifest
920 920 mn = man.lookup(changeid)
921 921 ctx = self.repo.changectx(man.linkrev(mn))
922 922
923 923 return ctx
924 924
925 925 def filectx(self, req):
926 926 path = self.cleanpath(req.form['file'][0])
927 927 if 'node' in req.form:
928 928 changeid = req.form['node'][0]
929 929 else:
930 930 changeid = req.form['filenode'][0]
931 931 try:
932 932 ctx = self.repo.changectx(changeid)
933 933 fctx = ctx.filectx(path)
934 934 except hg.RepoError:
935 935 fctx = self.repo.filectx(path, fileid=changeid)
936 936
937 937 return fctx
938 938
939 939 def check_perm(self, req, op, default):
940 940 '''check permission for operation based on user auth.
941 941 return true if op allowed, else false.
942 942 default is policy to use if no config given.'''
943 943
944 944 user = req.env.get('REMOTE_USER')
945 945
946 946 deny = self.configlist('web', 'deny_' + op)
947 947 if deny and (not user or deny == ['*'] or user in deny):
948 948 return False
949 949
950 950 allow = self.configlist('web', 'allow_' + op)
951 951 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,221 +1,221 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
9 9 from mercurial import util, streamclone
10 10 from mercurial.i18n import gettext as _
11 from mercurial.node import *
11 from mercurial.node import bin, hex
12 12 from mercurial import changegroup as changegroupmod
13 13 from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 14
15 15 # __all__ is populated with the allowed commands. Be sure to add to it if
16 16 # you're adding a new command, or the new command won't work.
17 17
18 18 __all__ = [
19 19 'lookup', 'heads', 'branches', 'between', 'changegroup',
20 20 'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
21 21 ]
22 22
23 23 HGTYPE = 'application/mercurial-0.1'
24 24
25 25 def lookup(web, req):
26 26 try:
27 27 r = hex(web.repo.lookup(req.form['key'][0]))
28 28 success = 1
29 29 except Exception,inst:
30 30 r = str(inst)
31 31 success = 0
32 32 resp = "%s %s\n" % (success, r)
33 33 req.respond(HTTP_OK, HGTYPE, length=len(resp))
34 34 req.write(resp)
35 35
36 36 def heads(web, req):
37 37 resp = " ".join(map(hex, web.repo.heads())) + "\n"
38 38 req.respond(HTTP_OK, HGTYPE, length=len(resp))
39 39 req.write(resp)
40 40
41 41 def branches(web, req):
42 42 nodes = []
43 43 if 'nodes' in req.form:
44 44 nodes = map(bin, req.form['nodes'][0].split(" "))
45 45 resp = cStringIO.StringIO()
46 46 for b in web.repo.branches(nodes):
47 47 resp.write(" ".join(map(hex, b)) + "\n")
48 48 resp = resp.getvalue()
49 49 req.respond(HTTP_OK, HGTYPE, length=len(resp))
50 50 req.write(resp)
51 51
52 52 def between(web, req):
53 53 if 'pairs' in req.form:
54 54 pairs = [map(bin, p.split("-"))
55 55 for p in req.form['pairs'][0].split(" ")]
56 56 resp = cStringIO.StringIO()
57 57 for b in web.repo.between(pairs):
58 58 resp.write(" ".join(map(hex, b)) + "\n")
59 59 resp = resp.getvalue()
60 60 req.respond(HTTP_OK, HGTYPE, length=len(resp))
61 61 req.write(resp)
62 62
63 63 def changegroup(web, req):
64 64 req.respond(HTTP_OK, HGTYPE)
65 65 nodes = []
66 66 if not web.allowpull:
67 67 return
68 68
69 69 if 'roots' in req.form:
70 70 nodes = map(bin, req.form['roots'][0].split(" "))
71 71
72 72 z = zlib.compressobj()
73 73 f = web.repo.changegroup(nodes, 'serve')
74 74 while 1:
75 75 chunk = f.read(4096)
76 76 if not chunk:
77 77 break
78 78 req.write(z.compress(chunk))
79 79
80 80 req.write(z.flush())
81 81
82 82 def changegroupsubset(web, req):
83 83 req.respond(HTTP_OK, HGTYPE)
84 84 bases = []
85 85 heads = []
86 86 if not web.allowpull:
87 87 return
88 88
89 89 if 'bases' in req.form:
90 90 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
91 91 if 'heads' in req.form:
92 92 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
93 93
94 94 z = zlib.compressobj()
95 95 f = web.repo.changegroupsubset(bases, heads, 'serve')
96 96 while 1:
97 97 chunk = f.read(4096)
98 98 if not chunk:
99 99 break
100 100 req.write(z.compress(chunk))
101 101
102 102 req.write(z.flush())
103 103
104 104 def capabilities(web, req):
105 105 resp = ' '.join(web.capabilities())
106 106 req.respond(HTTP_OK, HGTYPE, length=len(resp))
107 107 req.write(resp)
108 108
109 109 def unbundle(web, req):
110 110 def bail(response, headers={}):
111 111 length = int(req.env['CONTENT_LENGTH'])
112 112 for s in util.filechunkiter(req, limit=length):
113 113 # drain incoming bundle, else client will not see
114 114 # response when run outside cgi script
115 115 pass
116 116 req.header(headers.items())
117 117 req.respond(HTTP_OK, HGTYPE)
118 118 req.write('0\n')
119 119 req.write(response)
120 120
121 121 # require ssl by default, auth info cannot be sniffed and
122 122 # replayed
123 123 ssl_req = web.configbool('web', 'push_ssl', True)
124 124 if ssl_req:
125 125 if req.env.get('wsgi.url_scheme') != 'https':
126 126 bail('ssl required\n')
127 127 return
128 128 proto = 'https'
129 129 else:
130 130 proto = 'http'
131 131
132 132 # do not allow push unless explicitly allowed
133 133 if not web.check_perm(req, 'push', False):
134 134 bail('push not authorized\n',
135 135 headers={'status': '401 Unauthorized'})
136 136 return
137 137
138 138 their_heads = req.form['heads'][0].split(' ')
139 139
140 140 def check_heads():
141 141 heads = map(hex, web.repo.heads())
142 142 return their_heads == [hex('force')] or their_heads == heads
143 143
144 144 # fail early if possible
145 145 if not check_heads():
146 146 bail('unsynced changes\n')
147 147 return
148 148
149 149 req.respond(HTTP_OK, HGTYPE)
150 150
151 151 # do not lock repo until all changegroup data is
152 152 # streamed. save to temporary file.
153 153
154 154 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
155 155 fp = os.fdopen(fd, 'wb+')
156 156 try:
157 157 length = int(req.env['CONTENT_LENGTH'])
158 158 for s in util.filechunkiter(req, limit=length):
159 159 fp.write(s)
160 160
161 161 try:
162 162 lock = web.repo.lock()
163 163 try:
164 164 if not check_heads():
165 165 req.write('0\n')
166 166 req.write('unsynced changes\n')
167 167 return
168 168
169 169 fp.seek(0)
170 170 header = fp.read(6)
171 171 if header.startswith('HG') and not header.startswith('HG10'):
172 172 raise ValueError('unknown bundle version')
173 173 elif header not in changegroupmod.bundletypes:
174 174 raise ValueError('unknown bundle compression type')
175 175 gen = changegroupmod.unbundle(header, fp)
176 176
177 177 # send addchangegroup output to client
178 178
179 179 old_stdout = sys.stdout
180 180 sys.stdout = cStringIO.StringIO()
181 181
182 182 try:
183 183 url = 'remote:%s:%s' % (proto,
184 184 req.env.get('REMOTE_HOST', ''))
185 185 try:
186 186 ret = web.repo.addchangegroup(gen, 'serve', url)
187 187 except util.Abort, inst:
188 188 sys.stdout.write("abort: %s\n" % inst)
189 189 ret = 0
190 190 finally:
191 191 val = sys.stdout.getvalue()
192 192 sys.stdout = old_stdout
193 193 req.write('%d\n' % ret)
194 194 req.write(val)
195 195 finally:
196 196 del lock
197 197 except ValueError, inst:
198 198 req.write('0\n')
199 199 req.write(str(inst) + '\n')
200 200 except (OSError, IOError), inst:
201 201 req.write('0\n')
202 202 filename = getattr(inst, 'filename', '')
203 203 # Don't send our filesystem layout to the client
204 204 if filename.startswith(web.repo.root):
205 205 filename = filename[len(web.repo.root)+1:]
206 206 else:
207 207 filename = ''
208 208 error = getattr(inst, 'strerror', 'Unknown error')
209 209 if inst.errno == errno.ENOENT:
210 210 code = HTTP_NOT_FOUND
211 211 else:
212 212 code = HTTP_SERVER_ERROR
213 213 req.respond(code)
214 214 req.write('%s: %s\n' % (error, filename))
215 215 finally:
216 216 fp.close()
217 217 os.unlink(tempname)
218 218
219 219 def stream_out(web, req):
220 220 req.respond(HTTP_OK, HGTYPE)
221 221 streamclone.stream_out(web.repo, req, untrusted=True)
@@ -1,458 +1,458 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 from node import *
10 from remoterepo import *
9 from node import bin, hex
10 from remoterepo import remoterepository
11 11 from i18n import _
12 12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 13 import errno, keepalive, tempfile, socket, changegroup
14 14
15 15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 16 def __init__(self, ui):
17 17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 18 self.ui = ui
19 19
20 20 def find_user_password(self, realm, authuri):
21 21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 22 self, realm, authuri)
23 23 user, passwd = authinfo
24 24 if user and passwd:
25 25 return (user, passwd)
26 26
27 27 if not self.ui.interactive:
28 28 raise util.Abort(_('http authorization required'))
29 29
30 30 self.ui.write(_("http authorization required\n"))
31 31 self.ui.status(_("realm: %s\n") % realm)
32 32 if user:
33 33 self.ui.status(_("user: %s\n") % user)
34 34 else:
35 35 user = self.ui.prompt(_("user:"), default=None)
36 36
37 37 if not passwd:
38 38 passwd = self.ui.getpass()
39 39
40 40 self.add_password(realm, authuri, user, passwd)
41 41 return (user, passwd)
42 42
43 43 def netlocsplit(netloc):
44 44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 45
46 46 a = netloc.find('@')
47 47 if a == -1:
48 48 user, passwd = None, None
49 49 else:
50 50 userpass, netloc = netloc[:a], netloc[a+1:]
51 51 c = userpass.find(':')
52 52 if c == -1:
53 53 user, passwd = urllib.unquote(userpass), None
54 54 else:
55 55 user = urllib.unquote(userpass[:c])
56 56 passwd = urllib.unquote(userpass[c+1:])
57 57 c = netloc.find(':')
58 58 if c == -1:
59 59 host, port = netloc, None
60 60 else:
61 61 host, port = netloc[:c], netloc[c+1:]
62 62 return host, port, user, passwd
63 63
64 64 def netlocunsplit(host, port, user=None, passwd=None):
65 65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 66 if port:
67 67 hostport = host + ':' + port
68 68 else:
69 69 hostport = host
70 70 if user:
71 71 if passwd:
72 72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 73 else:
74 74 userpass = urllib.quote(user)
75 75 return userpass + '@' + hostport
76 76 return hostport
77 77
78 78 # work around a bug in Python < 2.4.2
79 79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 80 class request(urllib2.Request):
81 81 def add_header(self, key, val):
82 82 if key.lower() == 'proxy-authorization':
83 83 val = val.strip()
84 84 return urllib2.Request.add_header(self, key, val)
85 85
86 86 class httpsendfile(file):
87 87 def __len__(self):
88 88 return os.fstat(self.fileno()).st_size
89 89
90 90 def _gen_sendfile(connection):
91 91 def _sendfile(self, data):
92 92 # send a file
93 93 if isinstance(data, httpsendfile):
94 94 # if auth required, some data sent twice, so rewind here
95 95 data.seek(0)
96 96 for chunk in util.filechunkiter(data):
97 97 connection.send(self, chunk)
98 98 else:
99 99 connection.send(self, data)
100 100 return _sendfile
101 101
102 102 class httpconnection(keepalive.HTTPConnection):
103 103 # must be able to send big bundle as stream.
104 104 send = _gen_sendfile(keepalive.HTTPConnection)
105 105
106 106 class httphandler(keepalive.HTTPHandler):
107 107 def http_open(self, req):
108 108 return self.do_open(httpconnection, req)
109 109
110 110 def __del__(self):
111 111 self.close_all()
112 112
113 113 has_https = hasattr(urllib2, 'HTTPSHandler')
114 114 if has_https:
115 115 class httpsconnection(httplib.HTTPSConnection):
116 116 response_class = keepalive.HTTPResponse
117 117 # must be able to send big bundle as stream.
118 118 send = _gen_sendfile(httplib.HTTPSConnection)
119 119
120 120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
121 121 def https_open(self, req):
122 122 return self.do_open(httpsconnection, req)
123 123
124 124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 125 # it doesn't know about the auth type requested. This can happen if
126 126 # somebody is using BasicAuth and types a bad password.
127 127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 129 try:
130 130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 131 self, auth_header, host, req, headers)
132 132 except ValueError, inst:
133 133 arg = inst.args[0]
134 134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 135 return
136 136 raise
137 137
138 138 def zgenerator(f):
139 139 zd = zlib.decompressobj()
140 140 try:
141 141 for chunk in util.filechunkiter(f):
142 142 yield zd.decompress(chunk)
143 143 except httplib.HTTPException, inst:
144 144 raise IOError(None, _('connection ended unexpectedly'))
145 145 yield zd.flush()
146 146
147 147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 149 '0123456789' '_.-/')
150 150 _safeset = None
151 151 _hex = None
152 152 def quotepath(path):
153 153 '''quote the path part of a URL
154 154
155 155 This is similar to urllib.quote, but it also tries to avoid
156 156 quoting things twice (inspired by wget):
157 157
158 158 >>> quotepath('abc def')
159 159 'abc%20def'
160 160 >>> quotepath('abc%20def')
161 161 'abc%20def'
162 162 >>> quotepath('abc%20 def')
163 163 'abc%20%20def'
164 164 >>> quotepath('abc def%20')
165 165 'abc%20def%20'
166 166 >>> quotepath('abc def%2')
167 167 'abc%20def%252'
168 168 >>> quotepath('abc def%')
169 169 'abc%20def%25'
170 170 '''
171 171 global _safeset, _hex
172 172 if _safeset is None:
173 173 _safeset = util.set(_safe)
174 174 _hex = util.set('abcdefABCDEF0123456789')
175 175 l = list(path)
176 176 for i in xrange(len(l)):
177 177 c = l[i]
178 178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 179 pass
180 180 elif c not in _safeset:
181 181 l[i] = '%%%02X' % ord(c)
182 182 return ''.join(l)
183 183
184 184 class httprepository(remoterepository):
185 185 def __init__(self, ui, path):
186 186 self.path = path
187 187 self.caps = None
188 188 self.handler = None
189 189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 190 if query or frag:
191 191 raise util.Abort(_('unsupported URL component: "%s"') %
192 192 (query or frag))
193 193 if not urlpath:
194 194 urlpath = '/'
195 195 urlpath = quotepath(urlpath)
196 196 host, port, user, passwd = netlocsplit(netloc)
197 197
198 198 # urllib cannot handle URLs with embedded user or passwd
199 199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 200 urlpath, '', ''))
201 201 self.ui = ui
202 202 self.ui.debug(_('using %s\n') % self._url)
203 203
204 204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 205 # XXX proxyauthinfo = None
206 206 handlers = [httphandler()]
207 207 if has_https:
208 208 handlers.append(httpshandler())
209 209
210 210 if proxyurl:
211 211 # proxy can be proper url or host[:port]
212 212 if not (proxyurl.startswith('http:') or
213 213 proxyurl.startswith('https:')):
214 214 proxyurl = 'http://' + proxyurl + '/'
215 215 snpqf = urlparse.urlsplit(proxyurl)
216 216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
217 217 hpup = netlocsplit(proxynetloc)
218 218
219 219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
220 220 if not proxyuser:
221 221 proxyuser = ui.config("http_proxy", "user")
222 222 proxypasswd = ui.config("http_proxy", "passwd")
223 223
224 224 # see if we should use a proxy for this url
225 225 no_list = [ "localhost", "127.0.0.1" ]
226 226 no_list.extend([p.lower() for
227 227 p in ui.configlist("http_proxy", "no")])
228 228 no_list.extend([p.strip().lower() for
229 229 p in os.getenv("no_proxy", '').split(',')
230 230 if p.strip()])
231 231 # "http_proxy.always" config is for running tests on localhost
232 232 if (not ui.configbool("http_proxy", "always") and
233 233 host.lower() in no_list):
234 234 # avoid auto-detection of proxy settings by appending
235 235 # a ProxyHandler with no proxies defined.
236 236 handlers.append(urllib2.ProxyHandler({}))
237 237 ui.debug(_('disabling proxy for %s\n') % host)
238 238 else:
239 239 proxyurl = urlparse.urlunsplit((
240 240 proxyscheme, netlocunsplit(proxyhost, proxyport,
241 241 proxyuser, proxypasswd or ''),
242 242 proxypath, proxyquery, proxyfrag))
243 243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
244 244 ui.debug(_('proxying through http://%s:%s\n') %
245 245 (proxyhost, proxyport))
246 246
247 247 # urllib2 takes proxy values from the environment and those
248 248 # will take precedence if found, so drop them
249 249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
250 250 try:
251 251 if env in os.environ:
252 252 del os.environ[env]
253 253 except OSError:
254 254 pass
255 255
256 256 passmgr = passwordmgr(ui)
257 257 if user:
258 258 ui.debug(_('http auth: user %s, password %s\n') %
259 259 (user, passwd and '*' * len(passwd) or 'not set'))
260 260 netloc = host
261 261 if port:
262 262 netloc += ':' + port
263 263 # Python < 2.4.3 uses only the netloc to search for a password
264 264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
265 265
266 266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
267 267 httpdigestauthhandler(passmgr)))
268 268 opener = urllib2.build_opener(*handlers)
269 269
270 270 # 1.0 here is the _protocol_ version
271 271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
272 272 urllib2.install_opener(opener)
273 273
274 274 def url(self):
275 275 return self.path
276 276
277 277 # look up capabilities only when needed
278 278
279 279 def get_caps(self):
280 280 if self.caps is None:
281 281 try:
282 282 self.caps = util.set(self.do_read('capabilities').split())
283 283 except repo.RepoError:
284 284 self.caps = util.set()
285 285 self.ui.debug(_('capabilities: %s\n') %
286 286 (' '.join(self.caps or ['none'])))
287 287 return self.caps
288 288
289 289 capabilities = property(get_caps)
290 290
291 291 def lock(self):
292 292 raise util.Abort(_('operation not supported over http'))
293 293
294 294 def do_cmd(self, cmd, **args):
295 295 data = args.pop('data', None)
296 296 headers = args.pop('headers', {})
297 297 self.ui.debug(_("sending %s command\n") % cmd)
298 298 q = {"cmd": cmd}
299 299 q.update(args)
300 300 qs = '?%s' % urllib.urlencode(q)
301 301 cu = "%s%s" % (self._url, qs)
302 302 try:
303 303 if data:
304 304 self.ui.debug(_("sending %s bytes\n") % len(data))
305 305 resp = urllib2.urlopen(request(cu, data, headers))
306 306 except urllib2.HTTPError, inst:
307 307 if inst.code == 401:
308 308 raise util.Abort(_('authorization failed'))
309 309 raise
310 310 except httplib.HTTPException, inst:
311 311 self.ui.debug(_('http error while sending %s command\n') % cmd)
312 312 self.ui.print_exc()
313 313 raise IOError(None, inst)
314 314 except IndexError:
315 315 # this only happens with Python 2.3, later versions raise URLError
316 316 raise util.Abort(_('http error, possibly caused by proxy setting'))
317 317 # record the url we got redirected to
318 318 resp_url = resp.geturl()
319 319 if resp_url.endswith(qs):
320 320 resp_url = resp_url[:-len(qs)]
321 321 if self._url != resp_url:
322 322 self.ui.status(_('real URL is %s\n') % resp_url)
323 323 self._url = resp_url
324 324 try:
325 325 proto = resp.getheader('content-type')
326 326 except AttributeError:
327 327 proto = resp.headers['content-type']
328 328
329 329 # accept old "text/plain" and "application/hg-changegroup" for now
330 330 if not (proto.startswith('application/mercurial-') or
331 331 proto.startswith('text/plain') or
332 332 proto.startswith('application/hg-changegroup')):
333 333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
334 334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
335 335 % self._url)
336 336
337 337 if proto.startswith('application/mercurial-'):
338 338 try:
339 339 version = proto.split('-', 1)[1]
340 340 version_info = tuple([int(n) for n in version.split('.')])
341 341 except ValueError:
342 342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
343 343 "header (%s)") % (self._url, proto))
344 344 if version_info > (0, 1):
345 345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
346 346 (self._url, version))
347 347
348 348 return resp
349 349
350 350 def do_read(self, cmd, **args):
351 351 fp = self.do_cmd(cmd, **args)
352 352 try:
353 353 return fp.read()
354 354 finally:
355 355 # if using keepalive, allow connection to be reused
356 356 fp.close()
357 357
358 358 def lookup(self, key):
359 359 self.requirecap('lookup', _('look up remote revision'))
360 360 d = self.do_cmd("lookup", key = key).read()
361 361 success, data = d[:-1].split(' ', 1)
362 362 if int(success):
363 363 return bin(data)
364 364 raise repo.RepoError(data)
365 365
366 366 def heads(self):
367 367 d = self.do_read("heads")
368 368 try:
369 369 return map(bin, d[:-1].split(" "))
370 370 except:
371 371 raise util.UnexpectedOutput(_("unexpected response:"), d)
372 372
373 373 def branches(self, nodes):
374 374 n = " ".join(map(hex, nodes))
375 375 d = self.do_read("branches", nodes=n)
376 376 try:
377 377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
378 378 return br
379 379 except:
380 380 raise util.UnexpectedOutput(_("unexpected response:"), d)
381 381
382 382 def between(self, pairs):
383 383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
384 384 d = self.do_read("between", pairs=n)
385 385 try:
386 386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
387 387 return p
388 388 except:
389 389 raise util.UnexpectedOutput(_("unexpected response:"), d)
390 390
391 391 def changegroup(self, nodes, kind):
392 392 n = " ".join(map(hex, nodes))
393 393 f = self.do_cmd("changegroup", roots=n)
394 394 return util.chunkbuffer(zgenerator(f))
395 395
396 396 def changegroupsubset(self, bases, heads, source):
397 397 self.requirecap('changegroupsubset', _('look up remote changes'))
398 398 baselst = " ".join([hex(n) for n in bases])
399 399 headlst = " ".join([hex(n) for n in heads])
400 400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
401 401 return util.chunkbuffer(zgenerator(f))
402 402
403 403 def unbundle(self, cg, heads, source):
404 404 # have to stream bundle to a temp file because we do not have
405 405 # http 1.1 chunked transfer.
406 406
407 407 type = ""
408 408 types = self.capable('unbundle')
409 409 # servers older than d1b16a746db6 will send 'unbundle' as a
410 410 # boolean capability
411 411 try:
412 412 types = types.split(',')
413 413 except AttributeError:
414 414 types = [""]
415 415 if types:
416 416 for x in types:
417 417 if x in changegroup.bundletypes:
418 418 type = x
419 419 break
420 420
421 421 tempname = changegroup.writebundle(cg, None, type)
422 422 fp = httpsendfile(tempname, "rb")
423 423 try:
424 424 try:
425 425 rfp = self.do_cmd(
426 426 'unbundle', data=fp,
427 427 headers={'Content-Type': 'application/octet-stream'},
428 428 heads=' '.join(map(hex, heads)))
429 429 try:
430 430 ret = int(rfp.readline())
431 431 self.ui.write(rfp.read())
432 432 return ret
433 433 finally:
434 434 rfp.close()
435 435 except socket.error, err:
436 436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
437 437 raise util.Abort(_('push failed: %s') % err[1])
438 438 raise util.Abort(err[1])
439 439 finally:
440 440 fp.close()
441 441 os.unlink(tempname)
442 442
443 443 def stream_out(self):
444 444 return self.do_cmd('stream_out')
445 445
446 446 class httpsrepository(httprepository):
447 447 def __init__(self, ui, path):
448 448 if not has_https:
449 449 raise util.Abort(_('Python support for SSL and HTTPS '
450 450 'is not installed'))
451 451 httprepository.__init__(self, ui, path)
452 452
453 453 def instance(ui, path, create):
454 454 if create:
455 455 raise util.Abort(_('cannot create new http repository'))
456 456 if path.startswith('https:'):
457 457 return httpsrepository(ui, path)
458 458 return httprepository(ui, path)
@@ -1,2126 +1,2126 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import re, lock, transaction, tempfile, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.root = os.path.realpath(path)
22 22 self.path = os.path.join(self.root, ".hg")
23 23 self.origroot = path
24 24 self.opener = util.opener(self.path)
25 25 self.wopener = util.opener(self.root)
26 26
27 27 if not os.path.isdir(self.path):
28 28 if create:
29 29 if not os.path.exists(path):
30 30 os.mkdir(path)
31 31 os.mkdir(self.path)
32 32 requirements = ["revlogv1"]
33 33 if parentui.configbool('format', 'usestore', True):
34 34 os.mkdir(os.path.join(self.path, "store"))
35 35 requirements.append("store")
36 36 # create an invalid changelog
37 37 self.opener("00changelog.i", "a").write(
38 38 '\0\0\0\2' # represents revlogv2
39 39 ' dummy changelog to prevent using the old repo layout'
40 40 )
41 41 reqfile = self.opener("requires", "w")
42 42 for r in requirements:
43 43 reqfile.write("%s\n" % r)
44 44 reqfile.close()
45 45 else:
46 46 raise repo.RepoError(_("repository %s not found") % path)
47 47 elif create:
48 48 raise repo.RepoError(_("repository %s already exists") % path)
49 49 else:
50 50 # find requirements
51 51 try:
52 52 requirements = self.opener("requires").read().splitlines()
53 53 except IOError, inst:
54 54 if inst.errno != errno.ENOENT:
55 55 raise
56 56 requirements = []
57 57 # check them
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 61
62 62 # setup store
63 63 if "store" in requirements:
64 64 self.encodefn = util.encodefilename
65 65 self.decodefn = util.decodefilename
66 66 self.spath = os.path.join(self.path, "store")
67 67 else:
68 68 self.encodefn = lambda x: x
69 69 self.decodefn = lambda x: x
70 70 self.spath = self.path
71 71
72 72 try:
73 73 # files in .hg/ will be created using this mode
74 74 mode = os.stat(self.spath).st_mode
75 75 # avoid some useless chmods
76 76 if (0777 & ~util._umask) == (0777 & mode):
77 77 mode = None
78 78 except OSError:
79 79 mode = None
80 80
81 81 self._createmode = mode
82 82 self.opener.createmode = mode
83 83 sopener = util.opener(self.spath)
84 84 sopener.createmode = mode
85 85 self.sopener = util.encodedopener(sopener, self.encodefn)
86 86
87 87 self.ui = ui.ui(parentui=parentui)
88 88 try:
89 89 self.ui.readconfig(self.join("hgrc"), self.root)
90 90 extensions.loadall(self.ui)
91 91 except IOError:
92 92 pass
93 93
94 94 self.tagscache = None
95 95 self._tagstypecache = None
96 96 self.branchcache = None
97 97 self._ubranchcache = None # UTF-8 version of branchcache
98 98 self._branchcachetip = None
99 99 self.nodetagscache = None
100 100 self.filterpats = {}
101 101 self._datafilters = {}
102 102 self._transref = self._lockref = self._wlockref = None
103 103
104 104 def __getattr__(self, name):
105 105 if name == 'changelog':
106 106 self.changelog = changelog.changelog(self.sopener)
107 107 self.sopener.defversion = self.changelog.version
108 108 return self.changelog
109 109 if name == 'manifest':
110 110 self.changelog
111 111 self.manifest = manifest.manifest(self.sopener)
112 112 return self.manifest
113 113 if name == 'dirstate':
114 114 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
115 115 return self.dirstate
116 116 else:
117 117 raise AttributeError, name
118 118
119 119 def url(self):
120 120 return 'file:' + self.root
121 121
122 122 def hook(self, name, throw=False, **args):
123 123 return hook.hook(self.ui, self, name, throw, **args)
124 124
125 125 tag_disallowed = ':\r\n'
126 126
127 127 def _tag(self, name, node, message, local, user, date, parent=None,
128 128 extra={}):
129 129 use_dirstate = parent is None
130 130
131 131 for c in self.tag_disallowed:
132 132 if c in name:
133 133 raise util.Abort(_('%r cannot be used in a tag name') % c)
134 134
135 135 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
136 136
137 137 def writetag(fp, name, munge, prevtags):
138 138 fp.seek(0, 2)
139 139 if prevtags and prevtags[-1] != '\n':
140 140 fp.write('\n')
141 141 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
142 142 fp.close()
143 143
144 144 prevtags = ''
145 145 if local:
146 146 try:
147 147 fp = self.opener('localtags', 'r+')
148 148 except IOError, err:
149 149 fp = self.opener('localtags', 'a')
150 150 else:
151 151 prevtags = fp.read()
152 152
153 153 # local tags are stored in the current charset
154 154 writetag(fp, name, None, prevtags)
155 155 self.hook('tag', node=hex(node), tag=name, local=local)
156 156 return
157 157
158 158 if use_dirstate:
159 159 try:
160 160 fp = self.wfile('.hgtags', 'rb+')
161 161 except IOError, err:
162 162 fp = self.wfile('.hgtags', 'ab')
163 163 else:
164 164 prevtags = fp.read()
165 165 else:
166 166 try:
167 167 prevtags = self.filectx('.hgtags', parent).data()
168 168 except revlog.LookupError:
169 169 pass
170 170 fp = self.wfile('.hgtags', 'wb')
171 171 if prevtags:
172 172 fp.write(prevtags)
173 173
174 174 # committed tags are stored in UTF-8
175 175 writetag(fp, name, util.fromlocal, prevtags)
176 176
177 177 if use_dirstate and '.hgtags' not in self.dirstate:
178 178 self.add(['.hgtags'])
179 179
180 180 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
181 181 extra=extra)
182 182
183 183 self.hook('tag', node=hex(node), tag=name, local=local)
184 184
185 185 return tagnode
186 186
187 187 def tag(self, name, node, message, local, user, date):
188 188 '''tag a revision with a symbolic name.
189 189
190 190 if local is True, the tag is stored in a per-repository file.
191 191 otherwise, it is stored in the .hgtags file, and a new
192 192 changeset is committed with the change.
193 193
194 194 keyword arguments:
195 195
196 196 local: whether to store tag in non-version-controlled file
197 197 (default False)
198 198
199 199 message: commit message to use if committing
200 200
201 201 user: name of user to use if committing
202 202
203 203 date: date tuple to use if committing'''
204 204
205 205 date = util.parsedate(date)
206 206 for x in self.status()[:5]:
207 207 if '.hgtags' in x:
208 208 raise util.Abort(_('working copy of .hgtags is changed '
209 209 '(please commit .hgtags manually)'))
210 210
211 211
212 212 self._tag(name, node, message, local, user, date)
213 213
214 214 def tags(self):
215 215 '''return a mapping of tag to node'''
216 216 if self.tagscache:
217 217 return self.tagscache
218 218
219 219 globaltags = {}
220 220 tagtypes = {}
221 221
222 222 def readtags(lines, fn, tagtype):
223 223 filetags = {}
224 224 count = 0
225 225
226 226 def warn(msg):
227 227 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
228 228
229 229 for l in lines:
230 230 count += 1
231 231 if not l:
232 232 continue
233 233 s = l.split(" ", 1)
234 234 if len(s) != 2:
235 235 warn(_("cannot parse entry"))
236 236 continue
237 237 node, key = s
238 238 key = util.tolocal(key.strip()) # stored in UTF-8
239 239 try:
240 240 bin_n = bin(node)
241 241 except TypeError:
242 242 warn(_("node '%s' is not well formed") % node)
243 243 continue
244 244 if bin_n not in self.changelog.nodemap:
245 245 warn(_("tag '%s' refers to unknown node") % key)
246 246 continue
247 247
248 248 h = []
249 249 if key in filetags:
250 250 n, h = filetags[key]
251 251 h.append(n)
252 252 filetags[key] = (bin_n, h)
253 253
254 254 for k, nh in filetags.items():
255 255 if k not in globaltags:
256 256 globaltags[k] = nh
257 257 tagtypes[k] = tagtype
258 258 continue
259 259
260 260 # we prefer the global tag if:
261 261 # it supercedes us OR
262 262 # mutual supercedes and it has a higher rank
263 263 # otherwise we win because we're tip-most
264 264 an, ah = nh
265 265 bn, bh = globaltags[k]
266 266 if (bn != an and an in bh and
267 267 (bn not in ah or len(bh) > len(ah))):
268 268 an = bn
269 269 ah.extend([n for n in bh if n not in ah])
270 270 globaltags[k] = an, ah
271 271 tagtypes[k] = tagtype
272 272
273 273 # read the tags file from each head, ending with the tip
274 274 f = None
275 275 for rev, node, fnode in self._hgtagsnodes():
276 276 f = (f and f.filectx(fnode) or
277 277 self.filectx('.hgtags', fileid=fnode))
278 278 readtags(f.data().splitlines(), f, "global")
279 279
280 280 try:
281 281 data = util.fromlocal(self.opener("localtags").read())
282 282 # localtags are stored in the local character set
283 283 # while the internal tag table is stored in UTF-8
284 284 readtags(data.splitlines(), "localtags", "local")
285 285 except IOError:
286 286 pass
287 287
288 288 self.tagscache = {}
289 289 self._tagstypecache = {}
290 290 for k,nh in globaltags.items():
291 291 n = nh[0]
292 292 if n != nullid:
293 293 self.tagscache[k] = n
294 294 self._tagstypecache[k] = tagtypes[k]
295 295 self.tagscache['tip'] = self.changelog.tip()
296 296
297 297 return self.tagscache
298 298
299 299 def tagtype(self, tagname):
300 300 '''
301 301 return the type of the given tag. result can be:
302 302
303 303 'local' : a local tag
304 304 'global' : a global tag
305 305 None : tag does not exist
306 306 '''
307 307
308 308 self.tags()
309 309
310 310 return self._tagstypecache.get(tagname)
311 311
312 312 def _hgtagsnodes(self):
313 313 heads = self.heads()
314 314 heads.reverse()
315 315 last = {}
316 316 ret = []
317 317 for node in heads:
318 318 c = self.changectx(node)
319 319 rev = c.rev()
320 320 try:
321 321 fnode = c.filenode('.hgtags')
322 322 except revlog.LookupError:
323 323 continue
324 324 ret.append((rev, node, fnode))
325 325 if fnode in last:
326 326 ret[last[fnode]] = None
327 327 last[fnode] = len(ret) - 1
328 328 return [item for item in ret if item]
329 329
330 330 def tagslist(self):
331 331 '''return a list of tags ordered by revision'''
332 332 l = []
333 333 for t, n in self.tags().items():
334 334 try:
335 335 r = self.changelog.rev(n)
336 336 except:
337 337 r = -2 # sort to the beginning of the list if unknown
338 338 l.append((r, t, n))
339 339 l.sort()
340 340 return [(t, n) for r, t, n in l]
341 341
342 342 def nodetags(self, node):
343 343 '''return the tags associated with a node'''
344 344 if not self.nodetagscache:
345 345 self.nodetagscache = {}
346 346 for t, n in self.tags().items():
347 347 self.nodetagscache.setdefault(n, []).append(t)
348 348 return self.nodetagscache.get(node, [])
349 349
350 350 def _branchtags(self, partial, lrev):
351 351 tiprev = self.changelog.count() - 1
352 352 if lrev != tiprev:
353 353 self._updatebranchcache(partial, lrev+1, tiprev+1)
354 354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
355 355
356 356 return partial
357 357
358 358 def branchtags(self):
359 359 tip = self.changelog.tip()
360 360 if self.branchcache is not None and self._branchcachetip == tip:
361 361 return self.branchcache
362 362
363 363 oldtip = self._branchcachetip
364 364 self._branchcachetip = tip
365 365 if self.branchcache is None:
366 366 self.branchcache = {} # avoid recursion in changectx
367 367 else:
368 368 self.branchcache.clear() # keep using the same dict
369 369 if oldtip is None or oldtip not in self.changelog.nodemap:
370 370 partial, last, lrev = self._readbranchcache()
371 371 else:
372 372 lrev = self.changelog.rev(oldtip)
373 373 partial = self._ubranchcache
374 374
375 375 self._branchtags(partial, lrev)
376 376
377 377 # the branch cache is stored on disk as UTF-8, but in the local
378 378 # charset internally
379 379 for k, v in partial.items():
380 380 self.branchcache[util.tolocal(k)] = v
381 381 self._ubranchcache = partial
382 382 return self.branchcache
383 383
384 384 def _readbranchcache(self):
385 385 partial = {}
386 386 try:
387 387 f = self.opener("branch.cache")
388 388 lines = f.read().split('\n')
389 389 f.close()
390 390 except (IOError, OSError):
391 391 return {}, nullid, nullrev
392 392
393 393 try:
394 394 last, lrev = lines.pop(0).split(" ", 1)
395 395 last, lrev = bin(last), int(lrev)
396 396 if not (lrev < self.changelog.count() and
397 397 self.changelog.node(lrev) == last): # sanity check
398 398 # invalidate the cache
399 399 raise ValueError('invalidating branch cache (tip differs)')
400 400 for l in lines:
401 401 if not l: continue
402 402 node, label = l.split(" ", 1)
403 403 partial[label.strip()] = bin(node)
404 404 except (KeyboardInterrupt, util.SignalInterrupt):
405 405 raise
406 406 except Exception, inst:
407 407 if self.ui.debugflag:
408 408 self.ui.warn(str(inst), '\n')
409 409 partial, last, lrev = {}, nullid, nullrev
410 410 return partial, last, lrev
411 411
412 412 def _writebranchcache(self, branches, tip, tiprev):
413 413 try:
414 414 f = self.opener("branch.cache", "w", atomictemp=True)
415 415 f.write("%s %s\n" % (hex(tip), tiprev))
416 416 for label, node in branches.iteritems():
417 417 f.write("%s %s\n" % (hex(node), label))
418 418 f.rename()
419 419 except (IOError, OSError):
420 420 pass
421 421
422 422 def _updatebranchcache(self, partial, start, end):
423 423 for r in xrange(start, end):
424 424 c = self.changectx(r)
425 425 b = c.branch()
426 426 partial[b] = c.node()
427 427
428 428 def lookup(self, key):
429 429 if key == '.':
430 430 key, second = self.dirstate.parents()
431 431 if key == nullid:
432 432 raise repo.RepoError(_("no revision checked out"))
433 433 if second != nullid:
434 434 self.ui.warn(_("warning: working directory has two parents, "
435 435 "tag '.' uses the first\n"))
436 436 elif key == 'null':
437 437 return nullid
438 438 n = self.changelog._match(key)
439 439 if n:
440 440 return n
441 441 if key in self.tags():
442 442 return self.tags()[key]
443 443 if key in self.branchtags():
444 444 return self.branchtags()[key]
445 445 n = self.changelog._partialmatch(key)
446 446 if n:
447 447 return n
448 448 try:
449 449 if len(key) == 20:
450 450 key = hex(key)
451 451 except:
452 452 pass
453 453 raise repo.RepoError(_("unknown revision '%s'") % key)
454 454
455 455 def dev(self):
456 456 return os.lstat(self.path).st_dev
457 457
458 458 def local(self):
459 459 return True
460 460
461 461 def join(self, f):
462 462 return os.path.join(self.path, f)
463 463
464 464 def sjoin(self, f):
465 465 f = self.encodefn(f)
466 466 return os.path.join(self.spath, f)
467 467
468 468 def wjoin(self, f):
469 469 return os.path.join(self.root, f)
470 470
471 471 def file(self, f):
472 472 if f[0] == '/':
473 473 f = f[1:]
474 474 return filelog.filelog(self.sopener, f)
475 475
476 476 def changectx(self, changeid=None):
477 477 return context.changectx(self, changeid)
478 478
479 479 def workingctx(self):
480 480 return context.workingctx(self)
481 481
482 482 def parents(self, changeid=None):
483 483 '''
484 484 get list of changectxs for parents of changeid or working directory
485 485 '''
486 486 if changeid is None:
487 487 pl = self.dirstate.parents()
488 488 else:
489 489 n = self.changelog.lookup(changeid)
490 490 pl = self.changelog.parents(n)
491 491 if pl[1] == nullid:
492 492 return [self.changectx(pl[0])]
493 493 return [self.changectx(pl[0]), self.changectx(pl[1])]
494 494
495 495 def filectx(self, path, changeid=None, fileid=None):
496 496 """changeid can be a changeset revision, node, or tag.
497 497 fileid can be a file revision or node."""
498 498 return context.filectx(self, path, changeid, fileid)
499 499
500 500 def getcwd(self):
501 501 return self.dirstate.getcwd()
502 502
503 503 def pathto(self, f, cwd=None):
504 504 return self.dirstate.pathto(f, cwd)
505 505
506 506 def wfile(self, f, mode='r'):
507 507 return self.wopener(f, mode)
508 508
509 509 def _link(self, f):
510 510 return os.path.islink(self.wjoin(f))
511 511
512 512 def _filter(self, filter, filename, data):
513 513 if filter not in self.filterpats:
514 514 l = []
515 515 for pat, cmd in self.ui.configitems(filter):
516 516 mf = util.matcher(self.root, "", [pat], [], [])[1]
517 517 fn = None
518 518 params = cmd
519 519 for name, filterfn in self._datafilters.iteritems():
520 520 if cmd.startswith(name):
521 521 fn = filterfn
522 522 params = cmd[len(name):].lstrip()
523 523 break
524 524 if not fn:
525 525 fn = lambda s, c, **kwargs: util.filter(s, c)
526 526 # Wrap old filters not supporting keyword arguments
527 527 if not inspect.getargspec(fn)[2]:
528 528 oldfn = fn
529 529 fn = lambda s, c, **kwargs: oldfn(s, c)
530 530 l.append((mf, fn, params))
531 531 self.filterpats[filter] = l
532 532
533 533 for mf, fn, cmd in self.filterpats[filter]:
534 534 if mf(filename):
535 535 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
536 536 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
537 537 break
538 538
539 539 return data
540 540
541 541 def adddatafilter(self, name, filter):
542 542 self._datafilters[name] = filter
543 543
544 544 def wread(self, filename):
545 545 if self._link(filename):
546 546 data = os.readlink(self.wjoin(filename))
547 547 else:
548 548 data = self.wopener(filename, 'r').read()
549 549 return self._filter("encode", filename, data)
550 550
551 551 def wwrite(self, filename, data, flags):
552 552 data = self._filter("decode", filename, data)
553 553 try:
554 554 os.unlink(self.wjoin(filename))
555 555 except OSError:
556 556 pass
557 557 self.wopener(filename, 'w').write(data)
558 558 util.set_flags(self.wjoin(filename), flags)
559 559
560 560 def wwritedata(self, filename, data):
561 561 return self._filter("decode", filename, data)
562 562
563 563 def transaction(self):
564 564 if self._transref and self._transref():
565 565 return self._transref().nest()
566 566
567 567 # abort here if the journal already exists
568 568 if os.path.exists(self.sjoin("journal")):
569 569 raise repo.RepoError(_("journal already exists - run hg recover"))
570 570
571 571 # save dirstate for rollback
572 572 try:
573 573 ds = self.opener("dirstate").read()
574 574 except IOError:
575 575 ds = ""
576 576 self.opener("journal.dirstate", "w").write(ds)
577 577 self.opener("journal.branch", "w").write(self.dirstate.branch())
578 578
579 579 renames = [(self.sjoin("journal"), self.sjoin("undo")),
580 580 (self.join("journal.dirstate"), self.join("undo.dirstate")),
581 581 (self.join("journal.branch"), self.join("undo.branch"))]
582 582 tr = transaction.transaction(self.ui.warn, self.sopener,
583 583 self.sjoin("journal"),
584 584 aftertrans(renames),
585 585 self._createmode)
586 586 self._transref = weakref.ref(tr)
587 587 return tr
588 588
589 589 def recover(self):
590 590 l = self.lock()
591 591 try:
592 592 if os.path.exists(self.sjoin("journal")):
593 593 self.ui.status(_("rolling back interrupted transaction\n"))
594 594 transaction.rollback(self.sopener, self.sjoin("journal"))
595 595 self.invalidate()
596 596 return True
597 597 else:
598 598 self.ui.warn(_("no interrupted transaction available\n"))
599 599 return False
600 600 finally:
601 601 del l
602 602
603 603 def rollback(self):
604 604 wlock = lock = None
605 605 try:
606 606 wlock = self.wlock()
607 607 lock = self.lock()
608 608 if os.path.exists(self.sjoin("undo")):
609 609 self.ui.status(_("rolling back last transaction\n"))
610 610 transaction.rollback(self.sopener, self.sjoin("undo"))
611 611 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
612 612 try:
613 613 branch = self.opener("undo.branch").read()
614 614 self.dirstate.setbranch(branch)
615 615 except IOError:
616 616 self.ui.warn(_("Named branch could not be reset, "
617 617 "current branch still is: %s\n")
618 618 % util.tolocal(self.dirstate.branch()))
619 619 self.invalidate()
620 620 self.dirstate.invalidate()
621 621 else:
622 622 self.ui.warn(_("no rollback information available\n"))
623 623 finally:
624 624 del lock, wlock
625 625
626 626 def invalidate(self):
627 627 for a in "changelog manifest".split():
628 628 if hasattr(self, a):
629 629 self.__delattr__(a)
630 630 self.tagscache = None
631 631 self._tagstypecache = None
632 632 self.nodetagscache = None
633 633 self.branchcache = None
634 634 self._ubranchcache = None
635 635 self._branchcachetip = None
636 636
637 637 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 638 try:
639 639 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 640 except lock.LockHeld, inst:
641 641 if not wait:
642 642 raise
643 643 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 644 (desc, inst.locker))
645 645 # default to 600 seconds timeout
646 646 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 647 releasefn, desc=desc)
648 648 if acquirefn:
649 649 acquirefn()
650 650 return l
651 651
652 652 def lock(self, wait=True):
653 653 if self._lockref and self._lockref():
654 654 return self._lockref()
655 655
656 656 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
657 657 _('repository %s') % self.origroot)
658 658 self._lockref = weakref.ref(l)
659 659 return l
660 660
661 661 def wlock(self, wait=True):
662 662 if self._wlockref and self._wlockref():
663 663 return self._wlockref()
664 664
665 665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
666 666 self.dirstate.invalidate, _('working directory of %s') %
667 667 self.origroot)
668 668 self._wlockref = weakref.ref(l)
669 669 return l
670 670
671 671 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
672 672 """
673 673 commit an individual file as part of a larger transaction
674 674 """
675 675
676 676 t = self.wread(fn)
677 677 fl = self.file(fn)
678 678 fp1 = manifest1.get(fn, nullid)
679 679 fp2 = manifest2.get(fn, nullid)
680 680
681 681 meta = {}
682 682 cp = self.dirstate.copied(fn)
683 683 if cp:
684 684 # Mark the new revision of this file as a copy of another
685 685 # file. This copy data will effectively act as a parent
686 686 # of this new revision. If this is a merge, the first
687 687 # parent will be the nullid (meaning "look up the copy data")
688 688 # and the second one will be the other parent. For example:
689 689 #
690 690 # 0 --- 1 --- 3 rev1 changes file foo
691 691 # \ / rev2 renames foo to bar and changes it
692 692 # \- 2 -/ rev3 should have bar with all changes and
693 693 # should record that bar descends from
694 694 # bar in rev2 and foo in rev1
695 695 #
696 696 # this allows this merge to succeed:
697 697 #
698 698 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
699 699 # \ / merging rev3 and rev4 should use bar@rev2
700 700 # \- 2 --- 4 as the merge base
701 701 #
702 702 meta["copy"] = cp
703 703 if not manifest2: # not a branch merge
704 704 meta["copyrev"] = hex(manifest1.get(cp, nullid))
705 705 fp2 = nullid
706 706 elif fp2 != nullid: # copied on remote side
707 707 meta["copyrev"] = hex(manifest1.get(cp, nullid))
708 708 elif fp1 != nullid: # copied on local side, reversed
709 709 meta["copyrev"] = hex(manifest2.get(cp))
710 710 fp2 = fp1
711 711 elif cp in manifest2: # directory rename on local side
712 712 meta["copyrev"] = hex(manifest2[cp])
713 713 else: # directory rename on remote side
714 714 meta["copyrev"] = hex(manifest1.get(cp, nullid))
715 715 self.ui.debug(_(" %s: copy %s:%s\n") %
716 716 (fn, cp, meta["copyrev"]))
717 717 fp1 = nullid
718 718 elif fp2 != nullid:
719 719 # is one parent an ancestor of the other?
720 720 fpa = fl.ancestor(fp1, fp2)
721 721 if fpa == fp1:
722 722 fp1, fp2 = fp2, nullid
723 723 elif fpa == fp2:
724 724 fp2 = nullid
725 725
726 726 # is the file unmodified from the parent? report existing entry
727 727 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
728 728 return fp1
729 729
730 730 changelist.append(fn)
731 731 return fl.add(t, meta, tr, linkrev, fp1, fp2)
732 732
733 733 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
734 734 if p1 is None:
735 735 p1, p2 = self.dirstate.parents()
736 736 return self.commit(files=files, text=text, user=user, date=date,
737 737 p1=p1, p2=p2, extra=extra, empty_ok=True)
738 738
739 739 def commit(self, files=None, text="", user=None, date=None,
740 740 match=util.always, force=False, force_editor=False,
741 741 p1=None, p2=None, extra={}, empty_ok=False):
742 742 wlock = lock = tr = None
743 743 valid = 0 # don't save the dirstate if this isn't set
744 744 if files:
745 745 files = util.unique(files)
746 746 try:
747 747 commit = []
748 748 remove = []
749 749 changed = []
750 750 use_dirstate = (p1 is None) # not rawcommit
751 751 extra = extra.copy()
752 752
753 753 if use_dirstate:
754 754 if files:
755 755 for f in files:
756 756 s = self.dirstate[f]
757 757 if s in 'nma':
758 758 commit.append(f)
759 759 elif s == 'r':
760 760 remove.append(f)
761 761 else:
762 762 self.ui.warn(_("%s not tracked!\n") % f)
763 763 else:
764 764 changes = self.status(match=match)[:5]
765 765 modified, added, removed, deleted, unknown = changes
766 766 commit = modified + added
767 767 remove = removed
768 768 else:
769 769 commit = files
770 770
771 771 if use_dirstate:
772 772 p1, p2 = self.dirstate.parents()
773 773 update_dirstate = True
774 774 else:
775 775 p1, p2 = p1, p2 or nullid
776 776 update_dirstate = (self.dirstate.parents()[0] == p1)
777 777
778 778 c1 = self.changelog.read(p1)
779 779 c2 = self.changelog.read(p2)
780 780 m1 = self.manifest.read(c1[0]).copy()
781 781 m2 = self.manifest.read(c2[0])
782 782
783 783 if use_dirstate:
784 784 branchname = self.workingctx().branch()
785 785 try:
786 786 branchname = branchname.decode('UTF-8').encode('UTF-8')
787 787 except UnicodeDecodeError:
788 788 raise util.Abort(_('branch name not in UTF-8!'))
789 789 else:
790 790 branchname = ""
791 791
792 792 if use_dirstate:
793 793 oldname = c1[5].get("branch") # stored in UTF-8
794 794 if (not commit and not remove and not force and p2 == nullid
795 795 and branchname == oldname):
796 796 self.ui.status(_("nothing changed\n"))
797 797 return None
798 798
799 799 xp1 = hex(p1)
800 800 if p2 == nullid: xp2 = ''
801 801 else: xp2 = hex(p2)
802 802
803 803 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
804 804
805 805 wlock = self.wlock()
806 806 lock = self.lock()
807 807 tr = self.transaction()
808 808 trp = weakref.proxy(tr)
809 809
810 810 # check in files
811 811 new = {}
812 812 linkrev = self.changelog.count()
813 813 commit.sort()
814 814 is_exec = util.execfunc(self.root, m1.execf)
815 815 is_link = util.linkfunc(self.root, m1.linkf)
816 816 for f in commit:
817 817 self.ui.note(f + "\n")
818 818 try:
819 819 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
820 820 new_exec = is_exec(f)
821 821 new_link = is_link(f)
822 822 if ((not changed or changed[-1] != f) and
823 823 m2.get(f) != new[f]):
824 824 # mention the file in the changelog if some
825 825 # flag changed, even if there was no content
826 826 # change.
827 827 old_exec = m1.execf(f)
828 828 old_link = m1.linkf(f)
829 829 if old_exec != new_exec or old_link != new_link:
830 830 changed.append(f)
831 831 m1.set(f, new_exec, new_link)
832 832 if use_dirstate:
833 833 self.dirstate.normal(f)
834 834
835 835 except (OSError, IOError):
836 836 if use_dirstate:
837 837 self.ui.warn(_("trouble committing %s!\n") % f)
838 838 raise
839 839 else:
840 840 remove.append(f)
841 841
842 842 # update manifest
843 843 m1.update(new)
844 844 remove.sort()
845 845 removed = []
846 846
847 847 for f in remove:
848 848 if f in m1:
849 849 del m1[f]
850 850 removed.append(f)
851 851 elif f in m2:
852 852 removed.append(f)
853 853 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
854 854 (new, removed))
855 855
856 856 # add changeset
857 857 new = new.keys()
858 858 new.sort()
859 859
860 860 user = user or self.ui.username()
861 861 if (not empty_ok and not text) or force_editor:
862 862 edittext = []
863 863 if text:
864 864 edittext.append(text)
865 865 edittext.append("")
866 866 edittext.append(_("HG: Enter commit message."
867 867 " Lines beginning with 'HG:' are removed."))
868 868 edittext.append("HG: --")
869 869 edittext.append("HG: user: %s" % user)
870 870 if p2 != nullid:
871 871 edittext.append("HG: branch merge")
872 872 if branchname:
873 873 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
874 874 edittext.extend(["HG: changed %s" % f for f in changed])
875 875 edittext.extend(["HG: removed %s" % f for f in removed])
876 876 if not changed and not remove:
877 877 edittext.append("HG: no files changed")
878 878 edittext.append("")
879 879 # run editor in the repository root
880 880 olddir = os.getcwd()
881 881 os.chdir(self.root)
882 882 text = self.ui.edit("\n".join(edittext), user)
883 883 os.chdir(olddir)
884 884
885 885 if branchname:
886 886 extra["branch"] = branchname
887 887
888 888 if use_dirstate:
889 889 lines = [line.rstrip() for line in text.rstrip().splitlines()]
890 890 while lines and not lines[0]:
891 891 del lines[0]
892 892 if not lines:
893 893 raise util.Abort(_("empty commit message"))
894 894 text = '\n'.join(lines)
895 895
896 896 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
897 897 user, date, extra)
898 898 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
899 899 parent2=xp2)
900 900 tr.close()
901 901
902 902 if self.branchcache:
903 903 self.branchtags()
904 904
905 905 if use_dirstate or update_dirstate:
906 906 self.dirstate.setparents(n)
907 907 if use_dirstate:
908 908 for f in removed:
909 909 self.dirstate.forget(f)
910 910 valid = 1 # our dirstate updates are complete
911 911
912 912 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
913 913 return n
914 914 finally:
915 915 if not valid: # don't save our updated dirstate
916 916 self.dirstate.invalidate()
917 917 del tr, lock, wlock
918 918
919 919 def walk(self, node=None, files=[], match=util.always, badmatch=None):
920 920 '''
921 921 walk recursively through the directory tree or a given
922 922 changeset, finding all files matched by the match
923 923 function
924 924
925 925 results are yielded in a tuple (src, filename), where src
926 926 is one of:
927 927 'f' the file was found in the directory tree
928 928 'm' the file was only in the dirstate and not in the tree
929 929 'b' file was not found and matched badmatch
930 930 '''
931 931
932 932 if node:
933 933 fdict = dict.fromkeys(files)
934 934 # for dirstate.walk, files=['.'] means "walk the whole tree".
935 935 # follow that here, too
936 936 fdict.pop('.', None)
937 937 mdict = self.manifest.read(self.changelog.read(node)[0])
938 938 mfiles = mdict.keys()
939 939 mfiles.sort()
940 940 for fn in mfiles:
941 941 for ffn in fdict:
942 942 # match if the file is the exact name or a directory
943 943 if ffn == fn or fn.startswith("%s/" % ffn):
944 944 del fdict[ffn]
945 945 break
946 946 if match(fn):
947 947 yield 'm', fn
948 948 ffiles = fdict.keys()
949 949 ffiles.sort()
950 950 for fn in ffiles:
951 951 if badmatch and badmatch(fn):
952 952 if match(fn):
953 953 yield 'b', fn
954 954 else:
955 955 self.ui.warn(_('%s: No such file in rev %s\n')
956 956 % (self.pathto(fn), short(node)))
957 957 else:
958 958 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
959 959 yield src, fn
960 960
961 961 def status(self, node1=None, node2=None, files=[], match=util.always,
962 962 list_ignored=False, list_clean=False, list_unknown=True):
963 963 """return status of files between two nodes or node and working directory
964 964
965 965 If node1 is None, use the first dirstate parent instead.
966 966 If node2 is None, compare node1 with working directory.
967 967 """
968 968
969 969 def fcmp(fn, getnode):
970 970 t1 = self.wread(fn)
971 971 return self.file(fn).cmp(getnode(fn), t1)
972 972
973 973 def mfmatches(node):
974 974 change = self.changelog.read(node)
975 975 mf = self.manifest.read(change[0]).copy()
976 976 for fn in mf.keys():
977 977 if not match(fn):
978 978 del mf[fn]
979 979 return mf
980 980
981 981 modified, added, removed, deleted, unknown = [], [], [], [], []
982 982 ignored, clean = [], []
983 983
984 984 compareworking = False
985 985 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
986 986 compareworking = True
987 987
988 988 if not compareworking:
989 989 # read the manifest from node1 before the manifest from node2,
990 990 # so that we'll hit the manifest cache if we're going through
991 991 # all the revisions in parent->child order.
992 992 mf1 = mfmatches(node1)
993 993
994 994 # are we comparing the working directory?
995 995 if not node2:
996 996 (lookup, modified, added, removed, deleted, unknown,
997 997 ignored, clean) = self.dirstate.status(files, match,
998 998 list_ignored, list_clean,
999 999 list_unknown)
1000 1000
1001 1001 # are we comparing working dir against its parent?
1002 1002 if compareworking:
1003 1003 if lookup:
1004 1004 fixup = []
1005 1005 # do a full compare of any files that might have changed
1006 1006 ctx = self.changectx()
1007 1007 mexec = lambda f: 'x' in ctx.fileflags(f)
1008 1008 mlink = lambda f: 'l' in ctx.fileflags(f)
1009 1009 is_exec = util.execfunc(self.root, mexec)
1010 1010 is_link = util.linkfunc(self.root, mlink)
1011 1011 def flags(f):
1012 1012 return is_link(f) and 'l' or is_exec(f) and 'x' or ''
1013 1013 for f in lookup:
1014 1014 if (f not in ctx or flags(f) != ctx.fileflags(f)
1015 1015 or ctx[f].cmp(self.wread(f))):
1016 1016 modified.append(f)
1017 1017 else:
1018 1018 fixup.append(f)
1019 1019 if list_clean:
1020 1020 clean.append(f)
1021 1021
1022 1022 # update dirstate for files that are actually clean
1023 1023 if fixup:
1024 1024 wlock = None
1025 1025 try:
1026 1026 try:
1027 1027 wlock = self.wlock(False)
1028 1028 except lock.LockException:
1029 1029 pass
1030 1030 if wlock:
1031 1031 for f in fixup:
1032 1032 self.dirstate.normal(f)
1033 1033 finally:
1034 1034 del wlock
1035 1035 else:
1036 1036 # we are comparing working dir against non-parent
1037 1037 # generate a pseudo-manifest for the working dir
1038 1038 # XXX: create it in dirstate.py ?
1039 1039 mf2 = mfmatches(self.dirstate.parents()[0])
1040 1040 is_exec = util.execfunc(self.root, mf2.execf)
1041 1041 is_link = util.linkfunc(self.root, mf2.linkf)
1042 1042 for f in lookup + modified + added:
1043 1043 mf2[f] = ""
1044 1044 mf2.set(f, is_exec(f), is_link(f))
1045 1045 for f in removed:
1046 1046 if f in mf2:
1047 1047 del mf2[f]
1048 1048
1049 1049 else:
1050 1050 # we are comparing two revisions
1051 1051 mf2 = mfmatches(node2)
1052 1052
1053 1053 if not compareworking:
1054 1054 # flush lists from dirstate before comparing manifests
1055 1055 modified, added, clean = [], [], []
1056 1056
1057 1057 # make sure to sort the files so we talk to the disk in a
1058 1058 # reasonable order
1059 1059 mf2keys = mf2.keys()
1060 1060 mf2keys.sort()
1061 1061 getnode = lambda fn: mf1.get(fn, nullid)
1062 1062 for fn in mf2keys:
1063 1063 if fn in mf1:
1064 1064 if (mf1.flags(fn) != mf2.flags(fn) or
1065 1065 (mf1[fn] != mf2[fn] and
1066 1066 (mf2[fn] != "" or fcmp(fn, getnode)))):
1067 1067 modified.append(fn)
1068 1068 elif list_clean:
1069 1069 clean.append(fn)
1070 1070 del mf1[fn]
1071 1071 else:
1072 1072 added.append(fn)
1073 1073
1074 1074 removed = mf1.keys()
1075 1075
1076 1076 # sort and return results:
1077 1077 for l in modified, added, removed, deleted, unknown, ignored, clean:
1078 1078 l.sort()
1079 1079 return (modified, added, removed, deleted, unknown, ignored, clean)
1080 1080
1081 1081 def add(self, list):
1082 1082 wlock = self.wlock()
1083 1083 try:
1084 1084 rejected = []
1085 1085 for f in list:
1086 1086 p = self.wjoin(f)
1087 1087 try:
1088 1088 st = os.lstat(p)
1089 1089 except:
1090 1090 self.ui.warn(_("%s does not exist!\n") % f)
1091 1091 rejected.append(f)
1092 1092 continue
1093 1093 if st.st_size > 10000000:
1094 1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1095 1095 " performance problems\n"
1096 1096 "(use 'hg revert %s' to unadd the file)\n")
1097 1097 % (f, f))
1098 1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1099 1099 self.ui.warn(_("%s not added: only files and symlinks "
1100 1100 "supported currently\n") % f)
1101 1101 rejected.append(p)
1102 1102 elif self.dirstate[f] in 'amn':
1103 1103 self.ui.warn(_("%s already tracked!\n") % f)
1104 1104 elif self.dirstate[f] == 'r':
1105 1105 self.dirstate.normallookup(f)
1106 1106 else:
1107 1107 self.dirstate.add(f)
1108 1108 return rejected
1109 1109 finally:
1110 1110 del wlock
1111 1111
1112 1112 def forget(self, list):
1113 1113 wlock = self.wlock()
1114 1114 try:
1115 1115 for f in list:
1116 1116 if self.dirstate[f] != 'a':
1117 1117 self.ui.warn(_("%s not added!\n") % f)
1118 1118 else:
1119 1119 self.dirstate.forget(f)
1120 1120 finally:
1121 1121 del wlock
1122 1122
1123 1123 def remove(self, list, unlink=False):
1124 1124 wlock = None
1125 1125 try:
1126 1126 if unlink:
1127 1127 for f in list:
1128 1128 try:
1129 1129 util.unlink(self.wjoin(f))
1130 1130 except OSError, inst:
1131 1131 if inst.errno != errno.ENOENT:
1132 1132 raise
1133 1133 wlock = self.wlock()
1134 1134 for f in list:
1135 1135 if unlink and os.path.exists(self.wjoin(f)):
1136 1136 self.ui.warn(_("%s still exists!\n") % f)
1137 1137 elif self.dirstate[f] == 'a':
1138 1138 self.dirstate.forget(f)
1139 1139 elif f not in self.dirstate:
1140 1140 self.ui.warn(_("%s not tracked!\n") % f)
1141 1141 else:
1142 1142 self.dirstate.remove(f)
1143 1143 finally:
1144 1144 del wlock
1145 1145
1146 1146 def undelete(self, list):
1147 1147 wlock = None
1148 1148 try:
1149 1149 manifests = [self.manifest.read(self.changelog.read(p)[0])
1150 1150 for p in self.dirstate.parents() if p != nullid]
1151 1151 wlock = self.wlock()
1152 1152 for f in list:
1153 1153 if self.dirstate[f] != 'r':
1154 1154 self.ui.warn("%s not removed!\n" % f)
1155 1155 else:
1156 1156 m = f in manifests[0] and manifests[0] or manifests[1]
1157 1157 t = self.file(f).read(m[f])
1158 1158 self.wwrite(f, t, m.flags(f))
1159 1159 self.dirstate.normal(f)
1160 1160 finally:
1161 1161 del wlock
1162 1162
1163 1163 def copy(self, source, dest):
1164 1164 wlock = None
1165 1165 try:
1166 1166 p = self.wjoin(dest)
1167 1167 if not (os.path.exists(p) or os.path.islink(p)):
1168 1168 self.ui.warn(_("%s does not exist!\n") % dest)
1169 1169 elif not (os.path.isfile(p) or os.path.islink(p)):
1170 1170 self.ui.warn(_("copy failed: %s is not a file or a "
1171 1171 "symbolic link\n") % dest)
1172 1172 else:
1173 1173 wlock = self.wlock()
1174 1174 if dest not in self.dirstate:
1175 1175 self.dirstate.add(dest)
1176 1176 self.dirstate.copy(source, dest)
1177 1177 finally:
1178 1178 del wlock
1179 1179
1180 1180 def heads(self, start=None):
1181 1181 heads = self.changelog.heads(start)
1182 1182 # sort the output in rev descending order
1183 1183 heads = [(-self.changelog.rev(h), h) for h in heads]
1184 1184 heads.sort()
1185 1185 return [n for (r, n) in heads]
1186 1186
1187 1187 def branchheads(self, branch, start=None):
1188 1188 branches = self.branchtags()
1189 1189 if branch not in branches:
1190 1190 return []
1191 1191 # The basic algorithm is this:
1192 1192 #
1193 1193 # Start from the branch tip since there are no later revisions that can
1194 1194 # possibly be in this branch, and the tip is a guaranteed head.
1195 1195 #
1196 1196 # Remember the tip's parents as the first ancestors, since these by
1197 1197 # definition are not heads.
1198 1198 #
1199 1199 # Step backwards from the brach tip through all the revisions. We are
1200 1200 # guaranteed by the rules of Mercurial that we will now be visiting the
1201 1201 # nodes in reverse topological order (children before parents).
1202 1202 #
1203 1203 # If a revision is one of the ancestors of a head then we can toss it
1204 1204 # out of the ancestors set (we've already found it and won't be
1205 1205 # visiting it again) and put its parents in the ancestors set.
1206 1206 #
1207 1207 # Otherwise, if a revision is in the branch it's another head, since it
1208 1208 # wasn't in the ancestor list of an existing head. So add it to the
1209 1209 # head list, and add its parents to the ancestor list.
1210 1210 #
1211 1211 # If it is not in the branch ignore it.
1212 1212 #
1213 1213 # Once we have a list of heads, use nodesbetween to filter out all the
1214 1214 # heads that cannot be reached from startrev. There may be a more
1215 1215 # efficient way to do this as part of the previous algorithm.
1216 1216
1217 1217 set = util.set
1218 1218 heads = [self.changelog.rev(branches[branch])]
1219 1219 # Don't care if ancestors contains nullrev or not.
1220 1220 ancestors = set(self.changelog.parentrevs(heads[0]))
1221 1221 for rev in xrange(heads[0] - 1, nullrev, -1):
1222 1222 if rev in ancestors:
1223 1223 ancestors.update(self.changelog.parentrevs(rev))
1224 1224 ancestors.remove(rev)
1225 1225 elif self.changectx(rev).branch() == branch:
1226 1226 heads.append(rev)
1227 1227 ancestors.update(self.changelog.parentrevs(rev))
1228 1228 heads = [self.changelog.node(rev) for rev in heads]
1229 1229 if start is not None:
1230 1230 heads = self.changelog.nodesbetween([start], heads)[2]
1231 1231 return heads
1232 1232
1233 1233 def branches(self, nodes):
1234 1234 if not nodes:
1235 1235 nodes = [self.changelog.tip()]
1236 1236 b = []
1237 1237 for n in nodes:
1238 1238 t = n
1239 1239 while 1:
1240 1240 p = self.changelog.parents(n)
1241 1241 if p[1] != nullid or p[0] == nullid:
1242 1242 b.append((t, n, p[0], p[1]))
1243 1243 break
1244 1244 n = p[0]
1245 1245 return b
1246 1246
1247 1247 def between(self, pairs):
1248 1248 r = []
1249 1249
1250 1250 for top, bottom in pairs:
1251 1251 n, l, i = top, [], 0
1252 1252 f = 1
1253 1253
1254 1254 while n != bottom:
1255 1255 p = self.changelog.parents(n)[0]
1256 1256 if i == f:
1257 1257 l.append(n)
1258 1258 f = f * 2
1259 1259 n = p
1260 1260 i += 1
1261 1261
1262 1262 r.append(l)
1263 1263
1264 1264 return r
1265 1265
1266 1266 def findincoming(self, remote, base=None, heads=None, force=False):
1267 1267 """Return list of roots of the subsets of missing nodes from remote
1268 1268
1269 1269 If base dict is specified, assume that these nodes and their parents
1270 1270 exist on the remote side and that no child of a node of base exists
1271 1271 in both remote and self.
1272 1272 Furthermore base will be updated to include the nodes that exists
1273 1273 in self and remote but no children exists in self and remote.
1274 1274 If a list of heads is specified, return only nodes which are heads
1275 1275 or ancestors of these heads.
1276 1276
1277 1277 All the ancestors of base are in self and in remote.
1278 1278 All the descendants of the list returned are missing in self.
1279 1279 (and so we know that the rest of the nodes are missing in remote, see
1280 1280 outgoing)
1281 1281 """
1282 1282 m = self.changelog.nodemap
1283 1283 search = []
1284 1284 fetch = {}
1285 1285 seen = {}
1286 1286 seenbranch = {}
1287 1287 if base == None:
1288 1288 base = {}
1289 1289
1290 1290 if not heads:
1291 1291 heads = remote.heads()
1292 1292
1293 1293 if self.changelog.tip() == nullid:
1294 1294 base[nullid] = 1
1295 1295 if heads != [nullid]:
1296 1296 return [nullid]
1297 1297 return []
1298 1298
1299 1299 # assume we're closer to the tip than the root
1300 1300 # and start by examining the heads
1301 1301 self.ui.status(_("searching for changes\n"))
1302 1302
1303 1303 unknown = []
1304 1304 for h in heads:
1305 1305 if h not in m:
1306 1306 unknown.append(h)
1307 1307 else:
1308 1308 base[h] = 1
1309 1309
1310 1310 if not unknown:
1311 1311 return []
1312 1312
1313 1313 req = dict.fromkeys(unknown)
1314 1314 reqcnt = 0
1315 1315
1316 1316 # search through remote branches
1317 1317 # a 'branch' here is a linear segment of history, with four parts:
1318 1318 # head, root, first parent, second parent
1319 1319 # (a branch always has two parents (or none) by definition)
1320 1320 unknown = remote.branches(unknown)
1321 1321 while unknown:
1322 1322 r = []
1323 1323 while unknown:
1324 1324 n = unknown.pop(0)
1325 1325 if n[0] in seen:
1326 1326 continue
1327 1327
1328 1328 self.ui.debug(_("examining %s:%s\n")
1329 1329 % (short(n[0]), short(n[1])))
1330 1330 if n[0] == nullid: # found the end of the branch
1331 1331 pass
1332 1332 elif n in seenbranch:
1333 1333 self.ui.debug(_("branch already found\n"))
1334 1334 continue
1335 1335 elif n[1] and n[1] in m: # do we know the base?
1336 1336 self.ui.debug(_("found incomplete branch %s:%s\n")
1337 1337 % (short(n[0]), short(n[1])))
1338 1338 search.append(n) # schedule branch range for scanning
1339 1339 seenbranch[n] = 1
1340 1340 else:
1341 1341 if n[1] not in seen and n[1] not in fetch:
1342 1342 if n[2] in m and n[3] in m:
1343 1343 self.ui.debug(_("found new changeset %s\n") %
1344 1344 short(n[1]))
1345 1345 fetch[n[1]] = 1 # earliest unknown
1346 1346 for p in n[2:4]:
1347 1347 if p in m:
1348 1348 base[p] = 1 # latest known
1349 1349
1350 1350 for p in n[2:4]:
1351 1351 if p not in req and p not in m:
1352 1352 r.append(p)
1353 1353 req[p] = 1
1354 1354 seen[n[0]] = 1
1355 1355
1356 1356 if r:
1357 1357 reqcnt += 1
1358 1358 self.ui.debug(_("request %d: %s\n") %
1359 1359 (reqcnt, " ".join(map(short, r))))
1360 1360 for p in xrange(0, len(r), 10):
1361 1361 for b in remote.branches(r[p:p+10]):
1362 1362 self.ui.debug(_("received %s:%s\n") %
1363 1363 (short(b[0]), short(b[1])))
1364 1364 unknown.append(b)
1365 1365
1366 1366 # do binary search on the branches we found
1367 1367 while search:
1368 1368 n = search.pop(0)
1369 1369 reqcnt += 1
1370 1370 l = remote.between([(n[0], n[1])])[0]
1371 1371 l.append(n[1])
1372 1372 p = n[0]
1373 1373 f = 1
1374 1374 for i in l:
1375 1375 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1376 1376 if i in m:
1377 1377 if f <= 2:
1378 1378 self.ui.debug(_("found new branch changeset %s\n") %
1379 1379 short(p))
1380 1380 fetch[p] = 1
1381 1381 base[i] = 1
1382 1382 else:
1383 1383 self.ui.debug(_("narrowed branch search to %s:%s\n")
1384 1384 % (short(p), short(i)))
1385 1385 search.append((p, i))
1386 1386 break
1387 1387 p, f = i, f * 2
1388 1388
1389 1389 # sanity check our fetch list
1390 1390 for f in fetch.keys():
1391 1391 if f in m:
1392 1392 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1393 1393
1394 1394 if base.keys() == [nullid]:
1395 1395 if force:
1396 1396 self.ui.warn(_("warning: repository is unrelated\n"))
1397 1397 else:
1398 1398 raise util.Abort(_("repository is unrelated"))
1399 1399
1400 1400 self.ui.debug(_("found new changesets starting at ") +
1401 1401 " ".join([short(f) for f in fetch]) + "\n")
1402 1402
1403 1403 self.ui.debug(_("%d total queries\n") % reqcnt)
1404 1404
1405 1405 return fetch.keys()
1406 1406
1407 1407 def findoutgoing(self, remote, base=None, heads=None, force=False):
1408 1408 """Return list of nodes that are roots of subsets not in remote
1409 1409
1410 1410 If base dict is specified, assume that these nodes and their parents
1411 1411 exist on the remote side.
1412 1412 If a list of heads is specified, return only nodes which are heads
1413 1413 or ancestors of these heads, and return a second element which
1414 1414 contains all remote heads which get new children.
1415 1415 """
1416 1416 if base == None:
1417 1417 base = {}
1418 1418 self.findincoming(remote, base, heads, force=force)
1419 1419
1420 1420 self.ui.debug(_("common changesets up to ")
1421 1421 + " ".join(map(short, base.keys())) + "\n")
1422 1422
1423 1423 remain = dict.fromkeys(self.changelog.nodemap)
1424 1424
1425 1425 # prune everything remote has from the tree
1426 1426 del remain[nullid]
1427 1427 remove = base.keys()
1428 1428 while remove:
1429 1429 n = remove.pop(0)
1430 1430 if n in remain:
1431 1431 del remain[n]
1432 1432 for p in self.changelog.parents(n):
1433 1433 remove.append(p)
1434 1434
1435 1435 # find every node whose parents have been pruned
1436 1436 subset = []
1437 1437 # find every remote head that will get new children
1438 1438 updated_heads = {}
1439 1439 for n in remain:
1440 1440 p1, p2 = self.changelog.parents(n)
1441 1441 if p1 not in remain and p2 not in remain:
1442 1442 subset.append(n)
1443 1443 if heads:
1444 1444 if p1 in heads:
1445 1445 updated_heads[p1] = True
1446 1446 if p2 in heads:
1447 1447 updated_heads[p2] = True
1448 1448
1449 1449 # this is the set of all roots we have to push
1450 1450 if heads:
1451 1451 return subset, updated_heads.keys()
1452 1452 else:
1453 1453 return subset
1454 1454
1455 1455 def pull(self, remote, heads=None, force=False):
1456 1456 lock = self.lock()
1457 1457 try:
1458 1458 fetch = self.findincoming(remote, heads=heads, force=force)
1459 1459 if fetch == [nullid]:
1460 1460 self.ui.status(_("requesting all changes\n"))
1461 1461
1462 1462 if not fetch:
1463 1463 self.ui.status(_("no changes found\n"))
1464 1464 return 0
1465 1465
1466 1466 if heads is None:
1467 1467 cg = remote.changegroup(fetch, 'pull')
1468 1468 else:
1469 1469 if 'changegroupsubset' not in remote.capabilities:
1470 1470 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1471 1471 cg = remote.changegroupsubset(fetch, heads, 'pull')
1472 1472 return self.addchangegroup(cg, 'pull', remote.url())
1473 1473 finally:
1474 1474 del lock
1475 1475
1476 1476 def push(self, remote, force=False, revs=None):
1477 1477 # there are two ways to push to remote repo:
1478 1478 #
1479 1479 # addchangegroup assumes local user can lock remote
1480 1480 # repo (local filesystem, old ssh servers).
1481 1481 #
1482 1482 # unbundle assumes local user cannot lock remote repo (new ssh
1483 1483 # servers, http servers).
1484 1484
1485 1485 if remote.capable('unbundle'):
1486 1486 return self.push_unbundle(remote, force, revs)
1487 1487 return self.push_addchangegroup(remote, force, revs)
1488 1488
1489 1489 def prepush(self, remote, force, revs):
1490 1490 base = {}
1491 1491 remote_heads = remote.heads()
1492 1492 inc = self.findincoming(remote, base, remote_heads, force=force)
1493 1493
1494 1494 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1495 1495 if revs is not None:
1496 1496 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1497 1497 else:
1498 1498 bases, heads = update, self.changelog.heads()
1499 1499
1500 1500 if not bases:
1501 1501 self.ui.status(_("no changes found\n"))
1502 1502 return None, 1
1503 1503 elif not force:
1504 1504 # check if we're creating new remote heads
1505 1505 # to be a remote head after push, node must be either
1506 1506 # - unknown locally
1507 1507 # - a local outgoing head descended from update
1508 1508 # - a remote head that's known locally and not
1509 1509 # ancestral to an outgoing head
1510 1510
1511 1511 warn = 0
1512 1512
1513 1513 if remote_heads == [nullid]:
1514 1514 warn = 0
1515 1515 elif not revs and len(heads) > len(remote_heads):
1516 1516 warn = 1
1517 1517 else:
1518 1518 newheads = list(heads)
1519 1519 for r in remote_heads:
1520 1520 if r in self.changelog.nodemap:
1521 1521 desc = self.changelog.heads(r, heads)
1522 1522 l = [h for h in heads if h in desc]
1523 1523 if not l:
1524 1524 newheads.append(r)
1525 1525 else:
1526 1526 newheads.append(r)
1527 1527 if len(newheads) > len(remote_heads):
1528 1528 warn = 1
1529 1529
1530 1530 if warn:
1531 1531 self.ui.warn(_("abort: push creates new remote heads!\n"))
1532 1532 self.ui.status(_("(did you forget to merge?"
1533 1533 " use push -f to force)\n"))
1534 1534 return None, 0
1535 1535 elif inc:
1536 1536 self.ui.warn(_("note: unsynced remote changes!\n"))
1537 1537
1538 1538
1539 1539 if revs is None:
1540 1540 cg = self.changegroup(update, 'push')
1541 1541 else:
1542 1542 cg = self.changegroupsubset(update, revs, 'push')
1543 1543 return cg, remote_heads
1544 1544
1545 1545 def push_addchangegroup(self, remote, force, revs):
1546 1546 lock = remote.lock()
1547 1547 try:
1548 1548 ret = self.prepush(remote, force, revs)
1549 1549 if ret[0] is not None:
1550 1550 cg, remote_heads = ret
1551 1551 return remote.addchangegroup(cg, 'push', self.url())
1552 1552 return ret[1]
1553 1553 finally:
1554 1554 del lock
1555 1555
1556 1556 def push_unbundle(self, remote, force, revs):
1557 1557 # local repo finds heads on server, finds out what revs it
1558 1558 # must push. once revs transferred, if server finds it has
1559 1559 # different heads (someone else won commit/push race), server
1560 1560 # aborts.
1561 1561
1562 1562 ret = self.prepush(remote, force, revs)
1563 1563 if ret[0] is not None:
1564 1564 cg, remote_heads = ret
1565 1565 if force: remote_heads = ['force']
1566 1566 return remote.unbundle(cg, remote_heads, 'push')
1567 1567 return ret[1]
1568 1568
1569 1569 def changegroupinfo(self, nodes, source):
1570 1570 if self.ui.verbose or source == 'bundle':
1571 1571 self.ui.status(_("%d changesets found\n") % len(nodes))
1572 1572 if self.ui.debugflag:
1573 1573 self.ui.debug(_("List of changesets:\n"))
1574 1574 for node in nodes:
1575 1575 self.ui.debug("%s\n" % hex(node))
1576 1576
1577 1577 def changegroupsubset(self, bases, heads, source, extranodes=None):
1578 1578 """This function generates a changegroup consisting of all the nodes
1579 1579 that are descendents of any of the bases, and ancestors of any of
1580 1580 the heads.
1581 1581
1582 1582 It is fairly complex as determining which filenodes and which
1583 1583 manifest nodes need to be included for the changeset to be complete
1584 1584 is non-trivial.
1585 1585
1586 1586 Another wrinkle is doing the reverse, figuring out which changeset in
1587 1587 the changegroup a particular filenode or manifestnode belongs to.
1588 1588
1589 1589 The caller can specify some nodes that must be included in the
1590 1590 changegroup using the extranodes argument. It should be a dict
1591 1591 where the keys are the filenames (or 1 for the manifest), and the
1592 1592 values are lists of (node, linknode) tuples, where node is a wanted
1593 1593 node and linknode is the changelog node that should be transmitted as
1594 1594 the linkrev.
1595 1595 """
1596 1596
1597 1597 self.hook('preoutgoing', throw=True, source=source)
1598 1598
1599 1599 # Set up some initial variables
1600 1600 # Make it easy to refer to self.changelog
1601 1601 cl = self.changelog
1602 1602 # msng is short for missing - compute the list of changesets in this
1603 1603 # changegroup.
1604 1604 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1605 1605 self.changegroupinfo(msng_cl_lst, source)
1606 1606 # Some bases may turn out to be superfluous, and some heads may be
1607 1607 # too. nodesbetween will return the minimal set of bases and heads
1608 1608 # necessary to re-create the changegroup.
1609 1609
1610 1610 # Known heads are the list of heads that it is assumed the recipient
1611 1611 # of this changegroup will know about.
1612 1612 knownheads = {}
1613 1613 # We assume that all parents of bases are known heads.
1614 1614 for n in bases:
1615 1615 for p in cl.parents(n):
1616 1616 if p != nullid:
1617 1617 knownheads[p] = 1
1618 1618 knownheads = knownheads.keys()
1619 1619 if knownheads:
1620 1620 # Now that we know what heads are known, we can compute which
1621 1621 # changesets are known. The recipient must know about all
1622 1622 # changesets required to reach the known heads from the null
1623 1623 # changeset.
1624 1624 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1625 1625 junk = None
1626 1626 # Transform the list into an ersatz set.
1627 1627 has_cl_set = dict.fromkeys(has_cl_set)
1628 1628 else:
1629 1629 # If there were no known heads, the recipient cannot be assumed to
1630 1630 # know about any changesets.
1631 1631 has_cl_set = {}
1632 1632
1633 1633 # Make it easy to refer to self.manifest
1634 1634 mnfst = self.manifest
1635 1635 # We don't know which manifests are missing yet
1636 1636 msng_mnfst_set = {}
1637 1637 # Nor do we know which filenodes are missing.
1638 1638 msng_filenode_set = {}
1639 1639
1640 1640 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1641 1641 junk = None
1642 1642
1643 1643 # A changeset always belongs to itself, so the changenode lookup
1644 1644 # function for a changenode is identity.
1645 1645 def identity(x):
1646 1646 return x
1647 1647
1648 1648 # A function generating function. Sets up an environment for the
1649 1649 # inner function.
1650 1650 def cmp_by_rev_func(revlog):
1651 1651 # Compare two nodes by their revision number in the environment's
1652 1652 # revision history. Since the revision number both represents the
1653 1653 # most efficient order to read the nodes in, and represents a
1654 1654 # topological sorting of the nodes, this function is often useful.
1655 1655 def cmp_by_rev(a, b):
1656 1656 return cmp(revlog.rev(a), revlog.rev(b))
1657 1657 return cmp_by_rev
1658 1658
1659 1659 # If we determine that a particular file or manifest node must be a
1660 1660 # node that the recipient of the changegroup will already have, we can
1661 1661 # also assume the recipient will have all the parents. This function
1662 1662 # prunes them from the set of missing nodes.
1663 1663 def prune_parents(revlog, hasset, msngset):
1664 1664 haslst = hasset.keys()
1665 1665 haslst.sort(cmp_by_rev_func(revlog))
1666 1666 for node in haslst:
1667 1667 parentlst = [p for p in revlog.parents(node) if p != nullid]
1668 1668 while parentlst:
1669 1669 n = parentlst.pop()
1670 1670 if n not in hasset:
1671 1671 hasset[n] = 1
1672 1672 p = [p for p in revlog.parents(n) if p != nullid]
1673 1673 parentlst.extend(p)
1674 1674 for n in hasset:
1675 1675 msngset.pop(n, None)
1676 1676
1677 1677 # This is a function generating function used to set up an environment
1678 1678 # for the inner function to execute in.
1679 1679 def manifest_and_file_collector(changedfileset):
1680 1680 # This is an information gathering function that gathers
1681 1681 # information from each changeset node that goes out as part of
1682 1682 # the changegroup. The information gathered is a list of which
1683 1683 # manifest nodes are potentially required (the recipient may
1684 1684 # already have them) and total list of all files which were
1685 1685 # changed in any changeset in the changegroup.
1686 1686 #
1687 1687 # We also remember the first changenode we saw any manifest
1688 1688 # referenced by so we can later determine which changenode 'owns'
1689 1689 # the manifest.
1690 1690 def collect_manifests_and_files(clnode):
1691 1691 c = cl.read(clnode)
1692 1692 for f in c[3]:
1693 1693 # This is to make sure we only have one instance of each
1694 1694 # filename string for each filename.
1695 1695 changedfileset.setdefault(f, f)
1696 1696 msng_mnfst_set.setdefault(c[0], clnode)
1697 1697 return collect_manifests_and_files
1698 1698
1699 1699 # Figure out which manifest nodes (of the ones we think might be part
1700 1700 # of the changegroup) the recipient must know about and remove them
1701 1701 # from the changegroup.
1702 1702 def prune_manifests():
1703 1703 has_mnfst_set = {}
1704 1704 for n in msng_mnfst_set:
1705 1705 # If a 'missing' manifest thinks it belongs to a changenode
1706 1706 # the recipient is assumed to have, obviously the recipient
1707 1707 # must have that manifest.
1708 1708 linknode = cl.node(mnfst.linkrev(n))
1709 1709 if linknode in has_cl_set:
1710 1710 has_mnfst_set[n] = 1
1711 1711 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1712 1712
1713 1713 # Use the information collected in collect_manifests_and_files to say
1714 1714 # which changenode any manifestnode belongs to.
1715 1715 def lookup_manifest_link(mnfstnode):
1716 1716 return msng_mnfst_set[mnfstnode]
1717 1717
1718 1718 # A function generating function that sets up the initial environment
1719 1719 # the inner function.
1720 1720 def filenode_collector(changedfiles):
1721 1721 next_rev = [0]
1722 1722 # This gathers information from each manifestnode included in the
1723 1723 # changegroup about which filenodes the manifest node references
1724 1724 # so we can include those in the changegroup too.
1725 1725 #
1726 1726 # It also remembers which changenode each filenode belongs to. It
1727 1727 # does this by assuming the a filenode belongs to the changenode
1728 1728 # the first manifest that references it belongs to.
1729 1729 def collect_msng_filenodes(mnfstnode):
1730 1730 r = mnfst.rev(mnfstnode)
1731 1731 if r == next_rev[0]:
1732 1732 # If the last rev we looked at was the one just previous,
1733 1733 # we only need to see a diff.
1734 1734 deltamf = mnfst.readdelta(mnfstnode)
1735 1735 # For each line in the delta
1736 1736 for f, fnode in deltamf.items():
1737 1737 f = changedfiles.get(f, None)
1738 1738 # And if the file is in the list of files we care
1739 1739 # about.
1740 1740 if f is not None:
1741 1741 # Get the changenode this manifest belongs to
1742 1742 clnode = msng_mnfst_set[mnfstnode]
1743 1743 # Create the set of filenodes for the file if
1744 1744 # there isn't one already.
1745 1745 ndset = msng_filenode_set.setdefault(f, {})
1746 1746 # And set the filenode's changelog node to the
1747 1747 # manifest's if it hasn't been set already.
1748 1748 ndset.setdefault(fnode, clnode)
1749 1749 else:
1750 1750 # Otherwise we need a full manifest.
1751 1751 m = mnfst.read(mnfstnode)
1752 1752 # For every file in we care about.
1753 1753 for f in changedfiles:
1754 1754 fnode = m.get(f, None)
1755 1755 # If it's in the manifest
1756 1756 if fnode is not None:
1757 1757 # See comments above.
1758 1758 clnode = msng_mnfst_set[mnfstnode]
1759 1759 ndset = msng_filenode_set.setdefault(f, {})
1760 1760 ndset.setdefault(fnode, clnode)
1761 1761 # Remember the revision we hope to see next.
1762 1762 next_rev[0] = r + 1
1763 1763 return collect_msng_filenodes
1764 1764
1765 1765 # We have a list of filenodes we think we need for a file, lets remove
1766 1766 # all those we now the recipient must have.
1767 1767 def prune_filenodes(f, filerevlog):
1768 1768 msngset = msng_filenode_set[f]
1769 1769 hasset = {}
1770 1770 # If a 'missing' filenode thinks it belongs to a changenode we
1771 1771 # assume the recipient must have, then the recipient must have
1772 1772 # that filenode.
1773 1773 for n in msngset:
1774 1774 clnode = cl.node(filerevlog.linkrev(n))
1775 1775 if clnode in has_cl_set:
1776 1776 hasset[n] = 1
1777 1777 prune_parents(filerevlog, hasset, msngset)
1778 1778
1779 1779 # A function generator function that sets up the a context for the
1780 1780 # inner function.
1781 1781 def lookup_filenode_link_func(fname):
1782 1782 msngset = msng_filenode_set[fname]
1783 1783 # Lookup the changenode the filenode belongs to.
1784 1784 def lookup_filenode_link(fnode):
1785 1785 return msngset[fnode]
1786 1786 return lookup_filenode_link
1787 1787
1788 1788 # Add the nodes that were explicitly requested.
1789 1789 def add_extra_nodes(name, nodes):
1790 1790 if not extranodes or name not in extranodes:
1791 1791 return
1792 1792
1793 1793 for node, linknode in extranodes[name]:
1794 1794 if node not in nodes:
1795 1795 nodes[node] = linknode
1796 1796
1797 1797 # Now that we have all theses utility functions to help out and
1798 1798 # logically divide up the task, generate the group.
1799 1799 def gengroup():
1800 1800 # The set of changed files starts empty.
1801 1801 changedfiles = {}
1802 1802 # Create a changenode group generator that will call our functions
1803 1803 # back to lookup the owning changenode and collect information.
1804 1804 group = cl.group(msng_cl_lst, identity,
1805 1805 manifest_and_file_collector(changedfiles))
1806 1806 for chnk in group:
1807 1807 yield chnk
1808 1808
1809 1809 # The list of manifests has been collected by the generator
1810 1810 # calling our functions back.
1811 1811 prune_manifests()
1812 1812 add_extra_nodes(1, msng_mnfst_set)
1813 1813 msng_mnfst_lst = msng_mnfst_set.keys()
1814 1814 # Sort the manifestnodes by revision number.
1815 1815 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1816 1816 # Create a generator for the manifestnodes that calls our lookup
1817 1817 # and data collection functions back.
1818 1818 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1819 1819 filenode_collector(changedfiles))
1820 1820 for chnk in group:
1821 1821 yield chnk
1822 1822
1823 1823 # These are no longer needed, dereference and toss the memory for
1824 1824 # them.
1825 1825 msng_mnfst_lst = None
1826 1826 msng_mnfst_set.clear()
1827 1827
1828 1828 if extranodes:
1829 1829 for fname in extranodes:
1830 1830 if isinstance(fname, int):
1831 1831 continue
1832 1832 add_extra_nodes(fname,
1833 1833 msng_filenode_set.setdefault(fname, {}))
1834 1834 changedfiles[fname] = 1
1835 1835 changedfiles = changedfiles.keys()
1836 1836 changedfiles.sort()
1837 1837 # Go through all our files in order sorted by name.
1838 1838 for fname in changedfiles:
1839 1839 filerevlog = self.file(fname)
1840 1840 if filerevlog.count() == 0:
1841 1841 raise util.Abort(_("empty or missing revlog for %s") % fname)
1842 1842 # Toss out the filenodes that the recipient isn't really
1843 1843 # missing.
1844 1844 if fname in msng_filenode_set:
1845 1845 prune_filenodes(fname, filerevlog)
1846 1846 msng_filenode_lst = msng_filenode_set[fname].keys()
1847 1847 else:
1848 1848 msng_filenode_lst = []
1849 1849 # If any filenodes are left, generate the group for them,
1850 1850 # otherwise don't bother.
1851 1851 if len(msng_filenode_lst) > 0:
1852 1852 yield changegroup.chunkheader(len(fname))
1853 1853 yield fname
1854 1854 # Sort the filenodes by their revision #
1855 1855 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1856 1856 # Create a group generator and only pass in a changenode
1857 1857 # lookup function as we need to collect no information
1858 1858 # from filenodes.
1859 1859 group = filerevlog.group(msng_filenode_lst,
1860 1860 lookup_filenode_link_func(fname))
1861 1861 for chnk in group:
1862 1862 yield chnk
1863 1863 if fname in msng_filenode_set:
1864 1864 # Don't need this anymore, toss it to free memory.
1865 1865 del msng_filenode_set[fname]
1866 1866 # Signal that no more groups are left.
1867 1867 yield changegroup.closechunk()
1868 1868
1869 1869 if msng_cl_lst:
1870 1870 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1871 1871
1872 1872 return util.chunkbuffer(gengroup())
1873 1873
1874 1874 def changegroup(self, basenodes, source):
1875 1875 """Generate a changegroup of all nodes that we have that a recipient
1876 1876 doesn't.
1877 1877
1878 1878 This is much easier than the previous function as we can assume that
1879 1879 the recipient has any changenode we aren't sending them."""
1880 1880
1881 1881 self.hook('preoutgoing', throw=True, source=source)
1882 1882
1883 1883 cl = self.changelog
1884 1884 nodes = cl.nodesbetween(basenodes, None)[0]
1885 1885 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1886 1886 self.changegroupinfo(nodes, source)
1887 1887
1888 1888 def identity(x):
1889 1889 return x
1890 1890
1891 1891 def gennodelst(revlog):
1892 1892 for r in xrange(0, revlog.count()):
1893 1893 n = revlog.node(r)
1894 1894 if revlog.linkrev(n) in revset:
1895 1895 yield n
1896 1896
1897 1897 def changed_file_collector(changedfileset):
1898 1898 def collect_changed_files(clnode):
1899 1899 c = cl.read(clnode)
1900 1900 for fname in c[3]:
1901 1901 changedfileset[fname] = 1
1902 1902 return collect_changed_files
1903 1903
1904 1904 def lookuprevlink_func(revlog):
1905 1905 def lookuprevlink(n):
1906 1906 return cl.node(revlog.linkrev(n))
1907 1907 return lookuprevlink
1908 1908
1909 1909 def gengroup():
1910 1910 # construct a list of all changed files
1911 1911 changedfiles = {}
1912 1912
1913 1913 for chnk in cl.group(nodes, identity,
1914 1914 changed_file_collector(changedfiles)):
1915 1915 yield chnk
1916 1916 changedfiles = changedfiles.keys()
1917 1917 changedfiles.sort()
1918 1918
1919 1919 mnfst = self.manifest
1920 1920 nodeiter = gennodelst(mnfst)
1921 1921 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1922 1922 yield chnk
1923 1923
1924 1924 for fname in changedfiles:
1925 1925 filerevlog = self.file(fname)
1926 1926 if filerevlog.count() == 0:
1927 1927 raise util.Abort(_("empty or missing revlog for %s") % fname)
1928 1928 nodeiter = gennodelst(filerevlog)
1929 1929 nodeiter = list(nodeiter)
1930 1930 if nodeiter:
1931 1931 yield changegroup.chunkheader(len(fname))
1932 1932 yield fname
1933 1933 lookup = lookuprevlink_func(filerevlog)
1934 1934 for chnk in filerevlog.group(nodeiter, lookup):
1935 1935 yield chnk
1936 1936
1937 1937 yield changegroup.closechunk()
1938 1938
1939 1939 if nodes:
1940 1940 self.hook('outgoing', node=hex(nodes[0]), source=source)
1941 1941
1942 1942 return util.chunkbuffer(gengroup())
1943 1943
1944 1944 def addchangegroup(self, source, srctype, url, emptyok=False):
1945 1945 """add changegroup to repo.
1946 1946
1947 1947 return values:
1948 1948 - nothing changed or no source: 0
1949 1949 - more heads than before: 1+added heads (2..n)
1950 1950 - less heads than before: -1-removed heads (-2..-n)
1951 1951 - number of heads stays the same: 1
1952 1952 """
1953 1953 def csmap(x):
1954 1954 self.ui.debug(_("add changeset %s\n") % short(x))
1955 1955 return cl.count()
1956 1956
1957 1957 def revmap(x):
1958 1958 return cl.rev(x)
1959 1959
1960 1960 if not source:
1961 1961 return 0
1962 1962
1963 1963 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1964 1964
1965 1965 changesets = files = revisions = 0
1966 1966
1967 1967 # write changelog data to temp files so concurrent readers will not see
1968 1968 # inconsistent view
1969 1969 cl = self.changelog
1970 1970 cl.delayupdate()
1971 1971 oldheads = len(cl.heads())
1972 1972
1973 1973 tr = self.transaction()
1974 1974 try:
1975 1975 trp = weakref.proxy(tr)
1976 1976 # pull off the changeset group
1977 1977 self.ui.status(_("adding changesets\n"))
1978 1978 cor = cl.count() - 1
1979 1979 chunkiter = changegroup.chunkiter(source)
1980 1980 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1981 1981 raise util.Abort(_("received changelog group is empty"))
1982 1982 cnr = cl.count() - 1
1983 1983 changesets = cnr - cor
1984 1984
1985 1985 # pull off the manifest group
1986 1986 self.ui.status(_("adding manifests\n"))
1987 1987 chunkiter = changegroup.chunkiter(source)
1988 1988 # no need to check for empty manifest group here:
1989 1989 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1990 1990 # no new manifest will be created and the manifest group will
1991 1991 # be empty during the pull
1992 1992 self.manifest.addgroup(chunkiter, revmap, trp)
1993 1993
1994 1994 # process the files
1995 1995 self.ui.status(_("adding file changes\n"))
1996 1996 while 1:
1997 1997 f = changegroup.getchunk(source)
1998 1998 if not f:
1999 1999 break
2000 2000 self.ui.debug(_("adding %s revisions\n") % f)
2001 2001 fl = self.file(f)
2002 2002 o = fl.count()
2003 2003 chunkiter = changegroup.chunkiter(source)
2004 2004 if fl.addgroup(chunkiter, revmap, trp) is None:
2005 2005 raise util.Abort(_("received file revlog group is empty"))
2006 2006 revisions += fl.count() - o
2007 2007 files += 1
2008 2008
2009 2009 # make changelog see real files again
2010 2010 cl.finalize(trp)
2011 2011
2012 2012 newheads = len(self.changelog.heads())
2013 2013 heads = ""
2014 2014 if oldheads and newheads != oldheads:
2015 2015 heads = _(" (%+d heads)") % (newheads - oldheads)
2016 2016
2017 2017 self.ui.status(_("added %d changesets"
2018 2018 " with %d changes to %d files%s\n")
2019 2019 % (changesets, revisions, files, heads))
2020 2020
2021 2021 if changesets > 0:
2022 2022 self.hook('pretxnchangegroup', throw=True,
2023 2023 node=hex(self.changelog.node(cor+1)), source=srctype,
2024 2024 url=url)
2025 2025
2026 2026 tr.close()
2027 2027 finally:
2028 2028 del tr
2029 2029
2030 2030 if changesets > 0:
2031 2031 # forcefully update the on-disk branch cache
2032 2032 self.ui.debug(_("updating the branch cache\n"))
2033 2033 self.branchtags()
2034 2034 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2035 2035 source=srctype, url=url)
2036 2036
2037 2037 for i in xrange(cor + 1, cnr + 1):
2038 2038 self.hook("incoming", node=hex(self.changelog.node(i)),
2039 2039 source=srctype, url=url)
2040 2040
2041 2041 # never return 0 here:
2042 2042 if newheads < oldheads:
2043 2043 return newheads - oldheads - 1
2044 2044 else:
2045 2045 return newheads - oldheads + 1
2046 2046
2047 2047
2048 2048 def stream_in(self, remote):
2049 2049 fp = remote.stream_out()
2050 2050 l = fp.readline()
2051 2051 try:
2052 2052 resp = int(l)
2053 2053 except ValueError:
2054 2054 raise util.UnexpectedOutput(
2055 2055 _('Unexpected response from remote server:'), l)
2056 2056 if resp == 1:
2057 2057 raise util.Abort(_('operation forbidden by server'))
2058 2058 elif resp == 2:
2059 2059 raise util.Abort(_('locking the remote repository failed'))
2060 2060 elif resp != 0:
2061 2061 raise util.Abort(_('the server sent an unknown error code'))
2062 2062 self.ui.status(_('streaming all changes\n'))
2063 2063 l = fp.readline()
2064 2064 try:
2065 2065 total_files, total_bytes = map(int, l.split(' ', 1))
2066 2066 except ValueError, TypeError:
2067 2067 raise util.UnexpectedOutput(
2068 2068 _('Unexpected response from remote server:'), l)
2069 2069 self.ui.status(_('%d files to transfer, %s of data\n') %
2070 2070 (total_files, util.bytecount(total_bytes)))
2071 2071 start = time.time()
2072 2072 for i in xrange(total_files):
2073 2073 # XXX doesn't support '\n' or '\r' in filenames
2074 2074 l = fp.readline()
2075 2075 try:
2076 2076 name, size = l.split('\0', 1)
2077 2077 size = int(size)
2078 2078 except ValueError, TypeError:
2079 2079 raise util.UnexpectedOutput(
2080 2080 _('Unexpected response from remote server:'), l)
2081 2081 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2082 2082 ofp = self.sopener(name, 'w')
2083 2083 for chunk in util.filechunkiter(fp, limit=size):
2084 2084 ofp.write(chunk)
2085 2085 ofp.close()
2086 2086 elapsed = time.time() - start
2087 2087 if elapsed <= 0:
2088 2088 elapsed = 0.001
2089 2089 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2090 2090 (util.bytecount(total_bytes), elapsed,
2091 2091 util.bytecount(total_bytes / elapsed)))
2092 2092 self.invalidate()
2093 2093 return len(self.heads()) + 1
2094 2094
2095 2095 def clone(self, remote, heads=[], stream=False):
2096 2096 '''clone remote repository.
2097 2097
2098 2098 keyword arguments:
2099 2099 heads: list of revs to clone (forces use of pull)
2100 2100 stream: use streaming clone if possible'''
2101 2101
2102 2102 # now, all clients that can request uncompressed clones can
2103 2103 # read repo formats supported by all servers that can serve
2104 2104 # them.
2105 2105
2106 2106 # if revlog format changes, client will have to check version
2107 2107 # and format flags on "stream" capability, and use
2108 2108 # uncompressed only if compatible.
2109 2109
2110 2110 if stream and not heads and remote.capable('stream'):
2111 2111 return self.stream_in(remote)
2112 2112 return self.pull(remote, heads)
2113 2113
2114 2114 # used to avoid circular references so destructors work
2115 2115 def aftertrans(files):
2116 2116 renamefiles = [tuple(t) for t in files]
2117 2117 def a():
2118 2118 for src, dest in renamefiles:
2119 2119 util.rename(src, dest)
2120 2120 return a
2121 2121
2122 2122 def instance(ui, path, create):
2123 2123 return localrepository(ui, util.drop_scheme('file', path), create)
2124 2124
2125 2125 def islocal(path):
2126 2126 return True
@@ -1,213 +1,214 b''
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from revlog import *
8 from node import bin, hex, nullid
9 from revlog import revlog, RevlogError
9 10 from i18n import _
10 11 import array, bisect, struct, mdiff
11 12
12 13 class manifestdict(dict):
13 14 def __init__(self, mapping=None, flags=None):
14 15 if mapping is None: mapping = {}
15 16 if flags is None: flags = {}
16 17 dict.__init__(self, mapping)
17 18 self._flags = flags
18 19 def flags(self, f):
19 20 return self._flags.get(f, "")
20 21 def execf(self, f):
21 22 "test for executable in manifest flags"
22 23 return "x" in self.flags(f)
23 24 def linkf(self, f):
24 25 "test for symlink in manifest flags"
25 26 return "l" in self.flags(f)
26 27 def set(self, f, execf=False, linkf=False):
27 28 if linkf: self._flags[f] = "l"
28 29 elif execf: self._flags[f] = "x"
29 30 else: self._flags[f] = ""
30 31 def copy(self):
31 32 return manifestdict(dict.copy(self), dict.copy(self._flags))
32 33
33 34 class manifest(revlog):
34 35 def __init__(self, opener):
35 36 self.mapcache = None
36 37 self.listcache = None
37 38 revlog.__init__(self, opener, "00manifest.i")
38 39
39 40 def parse(self, lines):
40 41 mfdict = manifestdict()
41 42 fdict = mfdict._flags
42 43 for l in lines.splitlines():
43 44 f, n = l.split('\0')
44 45 if len(n) > 40:
45 46 fdict[f] = n[40:]
46 47 mfdict[f] = bin(n[:40])
47 48 else:
48 49 mfdict[f] = bin(n)
49 50 return mfdict
50 51
51 52 def readdelta(self, node):
52 53 return self.parse(mdiff.patchtext(self.delta(node)))
53 54
54 55 def read(self, node):
55 56 if node == nullid: return manifestdict() # don't upset local cache
56 57 if self.mapcache and self.mapcache[0] == node:
57 58 return self.mapcache[1]
58 59 text = self.revision(node)
59 60 self.listcache = array.array('c', text)
60 61 mapping = self.parse(text)
61 62 self.mapcache = (node, mapping)
62 63 return mapping
63 64
64 65 def _search(self, m, s, lo=0, hi=None):
65 66 '''return a tuple (start, end) that says where to find s within m.
66 67
67 68 If the string is found m[start:end] are the line containing
68 69 that string. If start == end the string was not found and
69 70 they indicate the proper sorted insertion point. This was
70 71 taken from bisect_left, and modified to find line start/end as
71 72 it goes along.
72 73
73 74 m should be a buffer or a string
74 75 s is a string'''
75 76 def advance(i, c):
76 77 while i < lenm and m[i] != c:
77 78 i += 1
78 79 return i
79 80 lenm = len(m)
80 81 if not hi:
81 82 hi = lenm
82 83 while lo < hi:
83 84 mid = (lo + hi) // 2
84 85 start = mid
85 86 while start > 0 and m[start-1] != '\n':
86 87 start -= 1
87 88 end = advance(start, '\0')
88 89 if m[start:end] < s:
89 90 # we know that after the null there are 40 bytes of sha1
90 91 # this translates to the bisect lo = mid + 1
91 92 lo = advance(end + 40, '\n') + 1
92 93 else:
93 94 # this translates to the bisect hi = mid
94 95 hi = start
95 96 end = advance(lo, '\0')
96 97 found = m[lo:end]
97 98 if cmp(s, found) == 0:
98 99 # we know that after the null there are 40 bytes of sha1
99 100 end = advance(end + 40, '\n')
100 101 return (lo, end+1)
101 102 else:
102 103 return (lo, lo)
103 104
104 105 def find(self, node, f):
105 106 '''look up entry for a single file efficiently.
106 107 return (node, flags) pair if found, (None, None) if not.'''
107 108 if self.mapcache and node == self.mapcache[0]:
108 109 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
109 110 text = self.revision(node)
110 111 start, end = self._search(text, f)
111 112 if start == end:
112 113 return None, None
113 114 l = text[start:end]
114 115 f, n = l.split('\0')
115 116 return bin(n[:40]), n[40:-1]
116 117
117 118 def add(self, map, transaction, link, p1=None, p2=None,
118 119 changed=None):
119 120 # apply the changes collected during the bisect loop to our addlist
120 121 # return a delta suitable for addrevision
121 122 def addlistdelta(addlist, x):
122 123 # start from the bottom up
123 124 # so changes to the offsets don't mess things up.
124 125 i = len(x)
125 126 while i > 0:
126 127 i -= 1
127 128 start = x[i][0]
128 129 end = x[i][1]
129 130 if x[i][2]:
130 131 addlist[start:end] = array.array('c', x[i][2])
131 132 else:
132 133 del addlist[start:end]
133 134 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2]
134 135 for d in x ])
135 136
136 137 def checkforbidden(f):
137 138 if '\n' in f or '\r' in f:
138 139 raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
139 140
140 141 # if we're using the listcache, make sure it is valid and
141 142 # parented by the same node we're diffing against
142 143 if not (changed and self.listcache and p1 and self.mapcache[0] == p1):
143 144 files = map.keys()
144 145 files.sort()
145 146
146 147 for f in files:
147 148 checkforbidden(f)
148 149
149 150 # if this is changed to support newlines in filenames,
150 151 # be sure to check the templates/ dir again (especially *-raw.tmpl)
151 152 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
152 153 for f in files]
153 154 self.listcache = array.array('c', "".join(text))
154 155 cachedelta = None
155 156 else:
156 157 addlist = self.listcache
157 158
158 159 for f in changed[0]:
159 160 checkforbidden(f)
160 161 # combine the changed lists into one list for sorting
161 162 work = [[x, 0] for x in changed[0]]
162 163 work[len(work):] = [[x, 1] for x in changed[1]]
163 164 work.sort()
164 165
165 166 delta = []
166 167 dstart = None
167 168 dend = None
168 169 dline = [""]
169 170 start = 0
170 171 # zero copy representation of addlist as a buffer
171 172 addbuf = buffer(addlist)
172 173
173 174 # start with a readonly loop that finds the offset of
174 175 # each line and creates the deltas
175 176 for w in work:
176 177 f = w[0]
177 178 # bs will either be the index of the item or the insert point
178 179 start, end = self._search(addbuf, f, start)
179 180 if w[1] == 0:
180 181 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
181 182 else:
182 183 l = ""
183 184 if start == end and w[1] == 1:
184 185 # item we want to delete was not found, error out
185 186 raise AssertionError(
186 187 _("failed to remove %s from manifest") % f)
187 188 if dstart != None and dstart <= start and dend >= start:
188 189 if dend < end:
189 190 dend = end
190 191 if l:
191 192 dline.append(l)
192 193 else:
193 194 if dstart != None:
194 195 delta.append([dstart, dend, "".join(dline)])
195 196 dstart = start
196 197 dend = end
197 198 dline = [l]
198 199
199 200 if dstart != None:
200 201 delta.append([dstart, dend, "".join(dline)])
201 202 # apply the delta to the addlist, and get a delta for addrevision
202 203 cachedelta = addlistdelta(addlist, delta)
203 204
204 205 # the delta is only valid if we've been processing the tip revision
205 206 if self.mapcache[0] != self.tip():
206 207 cachedelta = None
207 208 self.listcache = addlist
208 209
209 210 n = self.addrevision(buffer(self.listcache), transaction, link,
210 211 p1, p2, cachedelta)
211 212 self.mapcache = (n, map)
212 213
213 214 return n
@@ -1,632 +1,632 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import nullid, nullrev
9 9 from i18n import _
10 10 import errno, util, os, heapq, filemerge
11 11
12 12 def checkunknown(wctx, mctx):
13 13 "check for collisions between unknown files and files in mctx"
14 14 man = mctx.manifest()
15 15 for f in wctx.unknown():
16 16 if f in man:
17 17 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
18 18 raise util.Abort(_("untracked file in working directory differs"
19 19 " from file in requested revision: '%s'")
20 20 % f)
21 21
22 22 def checkcollision(mctx):
23 23 "check for case folding collisions in the destination context"
24 24 folded = {}
25 25 for fn in mctx.manifest():
26 26 fold = fn.lower()
27 27 if fold in folded:
28 28 raise util.Abort(_("case-folding collision between %s and %s")
29 29 % (fn, folded[fold]))
30 30 folded[fold] = fn
31 31
32 32 def forgetremoved(wctx, mctx):
33 33 """
34 34 Forget removed files
35 35
36 36 If we're jumping between revisions (as opposed to merging), and if
37 37 neither the working directory nor the target rev has the file,
38 38 then we need to remove it from the dirstate, to prevent the
39 39 dirstate from listing the file when it is no longer in the
40 40 manifest.
41 41 """
42 42
43 43 action = []
44 44 man = mctx.manifest()
45 45 for f in wctx.deleted() + wctx.removed():
46 46 if f not in man:
47 47 action.append((f, "f"))
48 48
49 49 return action
50 50
51 51 def findcopies(repo, m1, m2, ma, limit):
52 52 """
53 53 Find moves and copies between m1 and m2 back to limit linkrev
54 54 """
55 55
56 56 def nonoverlap(d1, d2, d3):
57 57 "Return list of elements in d1 not in d2 or d3"
58 58 l = [d for d in d1 if d not in d3 and d not in d2]
59 59 l.sort()
60 60 return l
61 61
62 62 def dirname(f):
63 63 s = f.rfind("/")
64 64 if s == -1:
65 65 return ""
66 66 return f[:s]
67 67
68 68 def dirs(files):
69 69 d = {}
70 70 for f in files:
71 71 f = dirname(f)
72 72 while f not in d:
73 73 d[f] = True
74 74 f = dirname(f)
75 75 return d
76 76
77 77 wctx = repo.workingctx()
78 78
79 79 def makectx(f, n):
80 80 if len(n) == 20:
81 81 return repo.filectx(f, fileid=n)
82 82 return wctx.filectx(f)
83 83 ctx = util.cachefunc(makectx)
84 84
85 85 def findold(fctx):
86 86 "find files that path was copied from, back to linkrev limit"
87 87 old = {}
88 88 seen = {}
89 89 orig = fctx.path()
90 90 visit = [fctx]
91 91 while visit:
92 92 fc = visit.pop()
93 93 s = str(fc)
94 94 if s in seen:
95 95 continue
96 96 seen[s] = 1
97 97 if fc.path() != orig and fc.path() not in old:
98 98 old[fc.path()] = 1
99 99 if fc.rev() < limit:
100 100 continue
101 101 visit += fc.parents()
102 102
103 103 old = old.keys()
104 104 old.sort()
105 105 return old
106 106
107 107 copy = {}
108 108 fullcopy = {}
109 109 diverge = {}
110 110
111 111 def checkcopies(c, man, aman):
112 112 '''check possible copies for filectx c'''
113 113 for of in findold(c):
114 114 fullcopy[c.path()] = of # remember for dir rename detection
115 115 if of not in man: # original file not in other manifest?
116 116 if of in ma:
117 117 diverge.setdefault(of, []).append(c.path())
118 118 continue
119 119 # if the original file is unchanged on the other branch,
120 120 # no merge needed
121 121 if man[of] == aman.get(of):
122 122 continue
123 123 c2 = ctx(of, man[of])
124 124 ca = c.ancestor(c2)
125 125 if not ca: # unrelated?
126 126 continue
127 127 # named changed on only one side?
128 128 if ca.path() == c.path() or ca.path() == c2.path():
129 129 if c == ca and c2 == ca: # no merge needed, ignore copy
130 130 continue
131 131 copy[c.path()] = of
132 132
133 133 if not repo.ui.configbool("merge", "followcopies", True):
134 134 return {}, {}
135 135
136 136 # avoid silly behavior for update from empty dir
137 137 if not m1 or not m2 or not ma:
138 138 return {}, {}
139 139
140 140 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
141 141
142 142 u1 = nonoverlap(m1, m2, ma)
143 143 u2 = nonoverlap(m2, m1, ma)
144 144
145 145 if u1:
146 146 repo.ui.debug(_(" unmatched files in local:\n %s\n")
147 147 % "\n ".join(u1))
148 148 if u2:
149 149 repo.ui.debug(_(" unmatched files in other:\n %s\n")
150 150 % "\n ".join(u2))
151 151
152 152 for f in u1:
153 153 checkcopies(ctx(f, m1[f]), m2, ma)
154 154
155 155 for f in u2:
156 156 checkcopies(ctx(f, m2[f]), m1, ma)
157 157
158 158 diverge2 = {}
159 159 for of, fl in diverge.items():
160 160 if len(fl) == 1:
161 161 del diverge[of] # not actually divergent
162 162 else:
163 163 diverge2.update(dict.fromkeys(fl)) # reverse map for below
164 164
165 165 if fullcopy:
166 166 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
167 167 for f in fullcopy:
168 168 note = ""
169 169 if f in copy: note += "*"
170 170 if f in diverge2: note += "!"
171 171 repo.ui.debug(_(" %s -> %s %s\n") % (f, fullcopy[f], note))
172 172
173 173 del diverge2
174 174
175 175 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
176 176 return copy, diverge
177 177
178 178 repo.ui.debug(_(" checking for directory renames\n"))
179 179
180 180 # generate a directory move map
181 181 d1, d2 = dirs(m1), dirs(m2)
182 182 invalid = {}
183 183 dirmove = {}
184 184
185 185 # examine each file copy for a potential directory move, which is
186 186 # when all the files in a directory are moved to a new directory
187 187 for dst, src in fullcopy.items():
188 188 dsrc, ddst = dirname(src), dirname(dst)
189 189 if dsrc in invalid:
190 190 # already seen to be uninteresting
191 191 continue
192 192 elif dsrc in d1 and ddst in d1:
193 193 # directory wasn't entirely moved locally
194 194 invalid[dsrc] = True
195 195 elif dsrc in d2 and ddst in d2:
196 196 # directory wasn't entirely moved remotely
197 197 invalid[dsrc] = True
198 198 elif dsrc in dirmove and dirmove[dsrc] != ddst:
199 199 # files from the same directory moved to two different places
200 200 invalid[dsrc] = True
201 201 else:
202 202 # looks good so far
203 203 dirmove[dsrc + "/"] = ddst + "/"
204 204
205 205 for i in invalid:
206 206 if i in dirmove:
207 207 del dirmove[i]
208 208
209 209 del d1, d2, invalid
210 210
211 211 if not dirmove:
212 212 return copy, diverge
213 213
214 214 for d in dirmove:
215 215 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
216 216
217 217 # check unaccounted nonoverlapping files against directory moves
218 218 for f in u1 + u2:
219 219 if f not in fullcopy:
220 220 for d in dirmove:
221 221 if f.startswith(d):
222 222 # new file added in a directory that was moved, move it
223 223 copy[f] = dirmove[d] + f[len(d):]
224 224 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
225 225 break
226 226
227 227 return copy, diverge
228 228
229 229 def symmetricdifference(repo, rev1, rev2):
230 230 """symmetric difference of the sets of ancestors of rev1 and rev2
231 231
232 232 I.e. revisions that are ancestors of rev1 or rev2, but not both.
233 233 """
234 234 # basic idea:
235 235 # - mark rev1 and rev2 with different colors
236 236 # - walk the graph in topological order with the help of a heap;
237 237 # for each revision r:
238 238 # - if r has only one color, we want to return it
239 239 # - add colors[r] to its parents
240 240 #
241 241 # We keep track of the number of revisions in the heap that
242 242 # we may be interested in. We stop walking the graph as soon
243 243 # as this number reaches 0.
244 244 WHITE = 1
245 245 BLACK = 2
246 246 ALLCOLORS = WHITE | BLACK
247 247 colors = {rev1: WHITE, rev2: BLACK}
248 248
249 249 cl = repo.changelog
250 250
251 251 visit = [-rev1, -rev2]
252 252 heapq.heapify(visit)
253 253 n_wanted = len(visit)
254 254 ret = []
255 255
256 256 while n_wanted:
257 257 r = -heapq.heappop(visit)
258 258 wanted = colors[r] != ALLCOLORS
259 259 n_wanted -= wanted
260 260 if wanted:
261 261 ret.append(r)
262 262
263 263 for p in cl.parentrevs(r):
264 264 if p == nullrev:
265 265 continue
266 266 if p not in colors:
267 267 # first time we see p; add it to visit
268 268 n_wanted += wanted
269 269 colors[p] = colors[r]
270 270 heapq.heappush(visit, -p)
271 271 elif colors[p] != ALLCOLORS and colors[p] != colors[r]:
272 272 # at first we thought we wanted p, but now
273 273 # we know we don't really want it
274 274 n_wanted -= 1
275 275 colors[p] |= colors[r]
276 276
277 277 del colors[r]
278 278
279 279 return ret
280 280
281 281 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
282 282 """
283 283 Merge p1 and p2 with ancestor ma and generate merge action list
284 284
285 285 overwrite = whether we clobber working files
286 286 partial = function to filter file lists
287 287 """
288 288
289 289 repo.ui.note(_("resolving manifests\n"))
290 290 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
291 291 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
292 292
293 293 m1 = p1.manifest()
294 294 m2 = p2.manifest()
295 295 ma = pa.manifest()
296 296 backwards = (pa == p2)
297 297 action = []
298 298 copy = {}
299 299 diverge = {}
300 300
301 301 def fmerge(f, f2=None, fa=None):
302 302 """merge flags"""
303 303 if not f2:
304 304 f2 = f
305 305 fa = f
306 306 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
307 307 if m == n: # flags agree
308 308 return m # unchanged
309 309 if m and n: # flags are set but don't agree
310 310 if not a: # both differ from parent
311 311 r = repo.ui.prompt(
312 312 _(" conflicting flags for %s\n"
313 313 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
314 314 return r != "n" and r or ''
315 315 if m == a:
316 316 return n # changed from m to n
317 317 return m # changed from n to m
318 318 if m and m != a: # changed from a to m
319 319 return m
320 320 if n and n != a: # changed from a to n
321 321 return n
322 322 return '' # flag was cleared
323 323
324 324 def act(msg, m, f, *args):
325 325 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
326 326 action.append((f, m) + args)
327 327
328 328 if not (backwards or overwrite):
329 329 rev1 = p1.rev()
330 330 if rev1 is None:
331 331 # p1 is a workingctx
332 332 rev1 = p1.parents()[0].rev()
333 333 limit = min(symmetricdifference(repo, rev1, p2.rev()))
334 334 copy, diverge = findcopies(repo, m1, m2, ma, limit)
335 335
336 336 for of, fl in diverge.items():
337 337 act("divergent renames", "dr", of, fl)
338 338
339 339 copied = dict.fromkeys(copy.values())
340 340
341 341 # Compare manifests
342 342 for f, n in m1.iteritems():
343 343 if partial and not partial(f):
344 344 continue
345 345 if f in m2:
346 346 if overwrite or backwards:
347 347 rflags = m2.flags(f)
348 348 else:
349 349 rflags = fmerge(f)
350 350 # are files different?
351 351 if n != m2[f]:
352 352 a = ma.get(f, nullid)
353 353 # are we clobbering?
354 354 if overwrite:
355 355 act("clobbering", "g", f, rflags)
356 356 # or are we going back in time and clean?
357 357 elif backwards and not n[20:]:
358 358 act("reverting", "g", f, rflags)
359 359 # are both different from the ancestor?
360 360 elif n != a and m2[f] != a:
361 361 act("versions differ", "m", f, f, f, rflags, False)
362 362 # is remote's version newer?
363 363 elif m2[f] != a:
364 364 act("remote is newer", "g", f, rflags)
365 365 # local is newer, not overwrite, check mode bits
366 366 elif m1.flags(f) != rflags:
367 367 act("update permissions", "e", f, rflags)
368 368 # contents same, check mode bits
369 369 elif m1.flags(f) != rflags:
370 370 act("update permissions", "e", f, rflags)
371 371 elif f in copied:
372 372 continue
373 373 elif f in copy:
374 374 f2 = copy[f]
375 375 if f2 not in m2: # directory rename
376 376 act("remote renamed directory to " + f2, "d",
377 377 f, None, f2, m1.flags(f))
378 378 elif f2 in m1: # case 2 A,B/B/B
379 379 act("local copied to " + f2, "m",
380 380 f, f2, f, fmerge(f, f2, f2), False)
381 381 else: # case 4,21 A/B/B
382 382 act("local moved to " + f2, "m",
383 383 f, f2, f, fmerge(f, f2, f2), False)
384 384 elif f in ma:
385 385 if n != ma[f] and not overwrite:
386 386 if repo.ui.prompt(
387 387 _(" local changed %s which remote deleted\n"
388 388 "use (c)hanged version or (d)elete?") % f,
389 389 _("[cd]"), _("c")) == _("d"):
390 390 act("prompt delete", "r", f)
391 391 else:
392 392 act("other deleted", "r", f)
393 393 else:
394 394 # file is created on branch or in working directory
395 395 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
396 396 act("remote deleted", "r", f)
397 397
398 398 for f, n in m2.iteritems():
399 399 if partial and not partial(f):
400 400 continue
401 401 if f in m1:
402 402 continue
403 403 if f in copied:
404 404 continue
405 405 if f in copy:
406 406 f2 = copy[f]
407 407 if f2 not in m1: # directory rename
408 408 act("local renamed directory to " + f2, "d",
409 409 None, f, f2, m2.flags(f))
410 410 elif f2 in m2: # rename case 1, A/A,B/A
411 411 act("remote copied to " + f, "m",
412 412 f2, f, f, fmerge(f2, f, f2), False)
413 413 else: # case 3,20 A/B/A
414 414 act("remote moved to " + f, "m",
415 415 f2, f, f, fmerge(f2, f, f2), True)
416 416 elif f in ma:
417 417 if overwrite or backwards:
418 418 act("recreating", "g", f, m2.flags(f))
419 419 elif n != ma[f]:
420 420 if repo.ui.prompt(
421 421 _("remote changed %s which local deleted\n"
422 422 "use (c)hanged version or leave (d)eleted?") % f,
423 423 _("[cd]"), _("c")) == _("c"):
424 424 act("prompt recreating", "g", f, m2.flags(f))
425 425 else:
426 426 act("remote created", "g", f, m2.flags(f))
427 427
428 428 return action
429 429
430 430 def applyupdates(repo, action, wctx, mctx):
431 431 "apply the merge action list to the working directory"
432 432
433 433 updated, merged, removed, unresolved = 0, 0, 0, 0
434 434 action.sort()
435 435 # prescan for copy/renames
436 436 for a in action:
437 437 f, m = a[:2]
438 438 if m == 'm': # merge
439 439 f2, fd, flags, move = a[2:]
440 440 if f != fd:
441 441 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
442 442 repo.wwrite(fd, repo.wread(f), flags)
443 443
444 444 audit_path = util.path_auditor(repo.root)
445 445
446 446 for a in action:
447 447 f, m = a[:2]
448 448 if f and f[0] == "/":
449 449 continue
450 450 if m == "r": # remove
451 451 repo.ui.note(_("removing %s\n") % f)
452 452 audit_path(f)
453 453 try:
454 454 util.unlink(repo.wjoin(f))
455 455 except OSError, inst:
456 456 if inst.errno != errno.ENOENT:
457 457 repo.ui.warn(_("update failed to remove %s: %s!\n") %
458 458 (f, inst.strerror))
459 459 removed += 1
460 460 elif m == "m": # merge
461 461 f2, fd, flags, move = a[2:]
462 462 r = filemerge.filemerge(repo, f, fd, f2, wctx, mctx)
463 463 if r > 0:
464 464 unresolved += 1
465 465 else:
466 466 if r is None:
467 467 updated += 1
468 468 else:
469 469 merged += 1
470 470 util.set_flags(repo.wjoin(fd), flags)
471 471 if f != fd and move and util.lexists(repo.wjoin(f)):
472 472 repo.ui.debug(_("removing %s\n") % f)
473 473 os.unlink(repo.wjoin(f))
474 474 elif m == "g": # get
475 475 flags = a[2]
476 476 repo.ui.note(_("getting %s\n") % f)
477 477 t = mctx.filectx(f).data()
478 478 repo.wwrite(f, t, flags)
479 479 updated += 1
480 480 elif m == "d": # directory rename
481 481 f2, fd, flags = a[2:]
482 482 if f:
483 483 repo.ui.note(_("moving %s to %s\n") % (f, fd))
484 484 t = wctx.filectx(f).data()
485 485 repo.wwrite(fd, t, flags)
486 486 util.unlink(repo.wjoin(f))
487 487 if f2:
488 488 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
489 489 t = mctx.filectx(f2).data()
490 490 repo.wwrite(fd, t, flags)
491 491 updated += 1
492 492 elif m == "dr": # divergent renames
493 493 fl = a[2]
494 494 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
495 495 for nf in fl:
496 496 repo.ui.warn(" %s\n" % nf)
497 497 elif m == "e": # exec
498 498 flags = a[2]
499 499 util.set_flags(repo.wjoin(f), flags)
500 500
501 501 return updated, merged, removed, unresolved
502 502
503 503 def recordupdates(repo, action, branchmerge):
504 504 "record merge actions to the dirstate"
505 505
506 506 for a in action:
507 507 f, m = a[:2]
508 508 if m == "r": # remove
509 509 if branchmerge:
510 510 repo.dirstate.remove(f)
511 511 else:
512 512 repo.dirstate.forget(f)
513 513 elif m == "f": # forget
514 514 repo.dirstate.forget(f)
515 515 elif m in "ge": # get or exec change
516 516 if branchmerge:
517 517 repo.dirstate.normaldirty(f)
518 518 else:
519 519 repo.dirstate.normal(f)
520 520 elif m == "m": # merge
521 521 f2, fd, flag, move = a[2:]
522 522 if branchmerge:
523 523 # We've done a branch merge, mark this file as merged
524 524 # so that we properly record the merger later
525 525 repo.dirstate.merge(fd)
526 526 if f != f2: # copy/rename
527 527 if move:
528 528 repo.dirstate.remove(f)
529 529 if f != fd:
530 530 repo.dirstate.copy(f, fd)
531 531 else:
532 532 repo.dirstate.copy(f2, fd)
533 533 else:
534 534 # We've update-merged a locally modified file, so
535 535 # we set the dirstate to emulate a normal checkout
536 536 # of that file some time in the past. Thus our
537 537 # merge will appear as a normal local file
538 538 # modification.
539 539 repo.dirstate.normallookup(fd)
540 540 if move:
541 541 repo.dirstate.forget(f)
542 542 elif m == "d": # directory rename
543 543 f2, fd, flag = a[2:]
544 544 if not f2 and f not in repo.dirstate:
545 545 # untracked file moved
546 546 continue
547 547 if branchmerge:
548 548 repo.dirstate.add(fd)
549 549 if f:
550 550 repo.dirstate.remove(f)
551 551 repo.dirstate.copy(f, fd)
552 552 if f2:
553 553 repo.dirstate.copy(f2, fd)
554 554 else:
555 555 repo.dirstate.normal(fd)
556 556 if f:
557 557 repo.dirstate.forget(f)
558 558
559 559 def update(repo, node, branchmerge, force, partial):
560 560 """
561 561 Perform a merge between the working directory and the given node
562 562
563 563 branchmerge = whether to merge between branches
564 564 force = whether to force branch merging or file overwriting
565 565 partial = a function to filter file lists (dirstate not updated)
566 566 """
567 567
568 568 wlock = repo.wlock()
569 569 try:
570 570 wc = repo.workingctx()
571 571 if node is None:
572 572 # tip of current branch
573 573 try:
574 574 node = repo.branchtags()[wc.branch()]
575 575 except KeyError:
576 576 if wc.branch() == "default": # no default branch!
577 577 node = repo.lookup("tip") # update to tip
578 578 else:
579 579 raise util.Abort(_("branch %s not found") % wc.branch())
580 580 overwrite = force and not branchmerge
581 581 forcemerge = force and branchmerge
582 582 pl = wc.parents()
583 583 p1, p2 = pl[0], repo.changectx(node)
584 584 pa = p1.ancestor(p2)
585 585 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
586 586 fastforward = False
587 587
588 588 ### check phase
589 589 if not overwrite and len(pl) > 1:
590 590 raise util.Abort(_("outstanding uncommitted merges"))
591 591 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
592 592 if branchmerge:
593 593 if p1.branch() != p2.branch() and pa != p2:
594 594 fastforward = True
595 595 else:
596 596 raise util.Abort(_("there is nothing to merge, just use "
597 597 "'hg update' or look at 'hg heads'"))
598 598 elif not (overwrite or branchmerge):
599 599 raise util.Abort(_("update spans branches, use 'hg merge' "
600 600 "or 'hg update -C' to lose changes"))
601 601 if branchmerge and not forcemerge:
602 602 if wc.files():
603 603 raise util.Abort(_("outstanding uncommitted changes"))
604 604
605 605 ### calculate phase
606 606 action = []
607 607 if not force:
608 608 checkunknown(wc, p2)
609 609 if not util.checkfolding(repo.path):
610 610 checkcollision(p2)
611 611 if not branchmerge:
612 612 action += forgetremoved(wc, p2)
613 613 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
614 614
615 615 ### apply phase
616 616 if not branchmerge: # just jump to the new rev
617 617 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
618 618 if not partial:
619 619 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
620 620
621 621 stats = applyupdates(repo, action, wc, p2)
622 622
623 623 if not partial:
624 624 recordupdates(repo, action, branchmerge)
625 625 repo.dirstate.setparents(fp1, fp2)
626 626 if not branchmerge and not fastforward:
627 627 repo.dirstate.setbranch(p2.branch())
628 628 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
629 629
630 630 return stats
631 631 finally:
632 632 del wlock
@@ -1,1393 +1,1393 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 from node import *
10 from node import hex, nullid, short
11 11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 12 import cStringIO, email.Parser, os, popen2, re, sha, errno
13 13 import sys, tempfile, zlib
14 14
15 15 class PatchError(Exception):
16 16 pass
17 17
18 18 class NoHunks(PatchError):
19 19 pass
20 20
21 21 # helper functions
22 22
23 23 def copyfile(src, dst, basedir=None):
24 24 if not basedir:
25 25 basedir = os.getcwd()
26 26
27 27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 28 if os.path.exists(absdst):
29 29 raise util.Abort(_("cannot create %s: destination already exists") %
30 30 dst)
31 31
32 32 targetdir = os.path.dirname(absdst)
33 33 if not os.path.isdir(targetdir):
34 34 os.makedirs(targetdir)
35 35
36 36 util.copyfile(abssrc, absdst)
37 37
38 38 # public functions
39 39
40 40 def extract(ui, fileobj):
41 41 '''extract patch from data read from fileobj.
42 42
43 43 patch can be a normal patch or contained in an email message.
44 44
45 45 return tuple (filename, message, user, date, node, p1, p2).
46 46 Any item in the returned tuple can be None. If filename is None,
47 47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48 48
49 49 # attempt to detect the start of a patch
50 50 # (this heuristic is borrowed from quilt)
51 51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54 54
55 55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 56 tmpfp = os.fdopen(fd, 'w')
57 57 try:
58 58 msg = email.Parser.Parser().parse(fileobj)
59 59
60 60 subject = msg['Subject']
61 61 user = msg['From']
62 62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 63 # should try to parse msg['Date']
64 64 date = None
65 65 nodeid = None
66 66 branch = None
67 67 parents = []
68 68
69 69 if subject:
70 70 if subject.startswith('[PATCH'):
71 71 pend = subject.find(']')
72 72 if pend >= 0:
73 73 subject = subject[pend+1:].lstrip()
74 74 subject = subject.replace('\n\t', ' ')
75 75 ui.debug('Subject: %s\n' % subject)
76 76 if user:
77 77 ui.debug('From: %s\n' % user)
78 78 diffs_seen = 0
79 79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 80 message = ''
81 81 for part in msg.walk():
82 82 content_type = part.get_content_type()
83 83 ui.debug('Content-Type: %s\n' % content_type)
84 84 if content_type not in ok_types:
85 85 continue
86 86 payload = part.get_payload(decode=True)
87 87 m = diffre.search(payload)
88 88 if m:
89 89 hgpatch = False
90 90 ignoretext = False
91 91
92 92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 93 diffs_seen += 1
94 94 cfp = cStringIO.StringIO()
95 95 for line in payload[:m.start(0)].splitlines():
96 96 if line.startswith('# HG changeset patch'):
97 97 ui.debug(_('patch generated by hg export\n'))
98 98 hgpatch = True
99 99 # drop earlier commit message content
100 100 cfp.seek(0)
101 101 cfp.truncate()
102 102 subject = None
103 103 elif hgpatch:
104 104 if line.startswith('# User '):
105 105 user = line[7:]
106 106 ui.debug('From: %s\n' % user)
107 107 elif line.startswith("# Date "):
108 108 date = line[7:]
109 109 elif line.startswith("# Branch "):
110 110 branch = line[9:]
111 111 elif line.startswith("# Node ID "):
112 112 nodeid = line[10:]
113 113 elif line.startswith("# Parent "):
114 114 parents.append(line[10:])
115 115 elif line == '---' and gitsendmail:
116 116 ignoretext = True
117 117 if not line.startswith('# ') and not ignoretext:
118 118 cfp.write(line)
119 119 cfp.write('\n')
120 120 message = cfp.getvalue()
121 121 if tmpfp:
122 122 tmpfp.write(payload)
123 123 if not payload.endswith('\n'):
124 124 tmpfp.write('\n')
125 125 elif not diffs_seen and message and content_type == 'text/plain':
126 126 message += '\n' + payload
127 127 except:
128 128 tmpfp.close()
129 129 os.unlink(tmpname)
130 130 raise
131 131
132 132 if subject and not message.startswith(subject):
133 133 message = '%s\n%s' % (subject, message)
134 134 tmpfp.close()
135 135 if not diffs_seen:
136 136 os.unlink(tmpname)
137 137 return None, message, user, date, branch, None, None, None
138 138 p1 = parents and parents.pop(0) or None
139 139 p2 = parents and parents.pop(0) or None
140 140 return tmpname, message, user, date, branch, nodeid, p1, p2
141 141
142 142 GP_PATCH = 1 << 0 # we have to run patch
143 143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 144 GP_BINARY = 1 << 2 # there's a binary patch
145 145
146 146 def readgitpatch(fp, firstline=None):
147 147 """extract git-style metadata about patches from <patchname>"""
148 148 class gitpatch:
149 149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 150 def __init__(self, path):
151 151 self.path = path
152 152 self.oldpath = None
153 153 self.mode = None
154 154 self.op = 'MODIFY'
155 155 self.lineno = 0
156 156 self.binary = False
157 157
158 158 def reader(fp, firstline):
159 159 if firstline is not None:
160 160 yield firstline
161 161 for line in fp:
162 162 yield line
163 163
164 164 # Filter patch for git information
165 165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 166 gp = None
167 167 gitpatches = []
168 168 # Can have a git patch with only metadata, causing patch to complain
169 169 dopatch = 0
170 170
171 171 lineno = 0
172 172 for line in reader(fp, firstline):
173 173 lineno += 1
174 174 if line.startswith('diff --git'):
175 175 m = gitre.match(line)
176 176 if m:
177 177 if gp:
178 178 gitpatches.append(gp)
179 179 src, dst = m.group(1, 2)
180 180 gp = gitpatch(dst)
181 181 gp.lineno = lineno
182 182 elif gp:
183 183 if line.startswith('--- '):
184 184 if gp.op in ('COPY', 'RENAME'):
185 185 dopatch |= GP_FILTER
186 186 gitpatches.append(gp)
187 187 gp = None
188 188 dopatch |= GP_PATCH
189 189 continue
190 190 if line.startswith('rename from '):
191 191 gp.op = 'RENAME'
192 192 gp.oldpath = line[12:].rstrip()
193 193 elif line.startswith('rename to '):
194 194 gp.path = line[10:].rstrip()
195 195 elif line.startswith('copy from '):
196 196 gp.op = 'COPY'
197 197 gp.oldpath = line[10:].rstrip()
198 198 elif line.startswith('copy to '):
199 199 gp.path = line[8:].rstrip()
200 200 elif line.startswith('deleted file'):
201 201 gp.op = 'DELETE'
202 202 elif line.startswith('new file mode '):
203 203 gp.op = 'ADD'
204 204 gp.mode = int(line.rstrip()[-6:], 8)
205 205 elif line.startswith('new mode '):
206 206 gp.mode = int(line.rstrip()[-6:], 8)
207 207 elif line.startswith('GIT binary patch'):
208 208 dopatch |= GP_BINARY
209 209 gp.binary = True
210 210 if gp:
211 211 gitpatches.append(gp)
212 212
213 213 if not gitpatches:
214 214 dopatch = GP_PATCH
215 215
216 216 return (dopatch, gitpatches)
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 219 """apply <patchname> to the working directory.
220 220 returns whether patch was applied with fuzz factor."""
221 221 patcher = ui.config('ui', 'patch')
222 222 args = []
223 223 try:
224 224 if patcher:
225 225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 226 files)
227 227 else:
228 228 try:
229 229 return internalpatch(patchname, ui, strip, cwd, files)
230 230 except NoHunks:
231 231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 233 patcher)
234 234 if util.needbinarypatch():
235 235 args.append('--binary')
236 236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 237 files)
238 238 except PatchError, err:
239 239 s = str(err)
240 240 if s:
241 241 raise util.Abort(s)
242 242 else:
243 243 raise util.Abort(_('patch failed to apply'))
244 244
245 245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 246 """use <patcher> to apply <patchname> to the working directory.
247 247 returns whether patch was applied with fuzz factor."""
248 248
249 249 fuzz = False
250 250 if cwd:
251 251 args.append('-d %s' % util.shellquote(cwd))
252 252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 253 util.shellquote(patchname)))
254 254
255 255 for line in fp:
256 256 line = line.rstrip()
257 257 ui.note(line + '\n')
258 258 if line.startswith('patching file '):
259 259 pf = util.parse_patch_output(line)
260 260 printed_file = False
261 261 files.setdefault(pf, (None, None))
262 262 elif line.find('with fuzz') >= 0:
263 263 fuzz = True
264 264 if not printed_file:
265 265 ui.warn(pf + '\n')
266 266 printed_file = True
267 267 ui.warn(line + '\n')
268 268 elif line.find('saving rejects to file') >= 0:
269 269 ui.warn(line + '\n')
270 270 elif line.find('FAILED') >= 0:
271 271 if not printed_file:
272 272 ui.warn(pf + '\n')
273 273 printed_file = True
274 274 ui.warn(line + '\n')
275 275 code = fp.close()
276 276 if code:
277 277 raise PatchError(_("patch command failed: %s") %
278 278 util.explain_exit(code)[0])
279 279 return fuzz
280 280
281 281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 282 """use builtin patch to apply <patchobj> to the working directory.
283 283 returns whether patch was applied with fuzz factor."""
284 284 try:
285 285 fp = file(patchobj, 'rb')
286 286 except TypeError:
287 287 fp = patchobj
288 288 if cwd:
289 289 curdir = os.getcwd()
290 290 os.chdir(cwd)
291 291 try:
292 292 ret = applydiff(ui, fp, files, strip=strip)
293 293 finally:
294 294 if cwd:
295 295 os.chdir(curdir)
296 296 if ret < 0:
297 297 raise PatchError
298 298 return ret > 0
299 299
300 300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303 303
304 304 class patchfile:
305 305 def __init__(self, ui, fname, missing=False):
306 306 self.fname = fname
307 307 self.ui = ui
308 308 self.lines = []
309 309 self.exists = False
310 310 self.missing = missing
311 311 if not missing:
312 312 try:
313 313 fp = file(fname, 'rb')
314 314 self.lines = fp.readlines()
315 315 self.exists = True
316 316 except IOError:
317 317 pass
318 318 else:
319 319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320 320
321 321 if not self.exists:
322 322 dirname = os.path.dirname(fname)
323 323 if dirname and not os.path.isdir(dirname):
324 324 os.makedirs(dirname)
325 325
326 326 self.hash = {}
327 327 self.dirty = 0
328 328 self.offset = 0
329 329 self.rej = []
330 330 self.fileprinted = False
331 331 self.printfile(False)
332 332 self.hunks = 0
333 333
334 334 def printfile(self, warn):
335 335 if self.fileprinted:
336 336 return
337 337 if warn or self.ui.verbose:
338 338 self.fileprinted = True
339 339 s = _("patching file %s\n") % self.fname
340 340 if warn:
341 341 self.ui.warn(s)
342 342 else:
343 343 self.ui.note(s)
344 344
345 345
346 346 def findlines(self, l, linenum):
347 347 # looks through the hash and finds candidate lines. The
348 348 # result is a list of line numbers sorted based on distance
349 349 # from linenum
350 350 def sorter(a, b):
351 351 vala = abs(a - linenum)
352 352 valb = abs(b - linenum)
353 353 return cmp(vala, valb)
354 354
355 355 try:
356 356 cand = self.hash[l]
357 357 except:
358 358 return []
359 359
360 360 if len(cand) > 1:
361 361 # resort our list of potentials forward then back.
362 362 cand.sort(sorter)
363 363 return cand
364 364
365 365 def hashlines(self):
366 366 self.hash = {}
367 367 for x in xrange(len(self.lines)):
368 368 s = self.lines[x]
369 369 self.hash.setdefault(s, []).append(x)
370 370
371 371 def write_rej(self):
372 372 # our rejects are a little different from patch(1). This always
373 373 # creates rejects in the same form as the original patch. A file
374 374 # header is inserted so that you can run the reject through patch again
375 375 # without having to type the filename.
376 376
377 377 if not self.rej:
378 378 return
379 379 if self.hunks != 1:
380 380 hunkstr = "s"
381 381 else:
382 382 hunkstr = ""
383 383
384 384 fname = self.fname + ".rej"
385 385 self.ui.warn(
386 386 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
387 387 (len(self.rej), self.hunks, hunkstr, fname))
388 388 try: os.unlink(fname)
389 389 except:
390 390 pass
391 391 fp = file(fname, 'wb')
392 392 base = os.path.basename(self.fname)
393 393 fp.write("--- %s\n+++ %s\n" % (base, base))
394 394 for x in self.rej:
395 395 for l in x.hunk:
396 396 fp.write(l)
397 397 if l[-1] != '\n':
398 398 fp.write("\n\ No newline at end of file\n")
399 399
400 400 def write(self, dest=None):
401 401 if self.dirty:
402 402 if not dest:
403 403 dest = self.fname
404 404 st = None
405 405 try:
406 406 st = os.lstat(dest)
407 407 except OSError, inst:
408 408 if inst.errno != errno.ENOENT:
409 409 raise
410 410 if st and st.st_nlink > 1:
411 411 os.unlink(dest)
412 412 fp = file(dest, 'wb')
413 413 if st and st.st_nlink > 1:
414 414 os.chmod(dest, st.st_mode)
415 415 fp.writelines(self.lines)
416 416 fp.close()
417 417
418 418 def close(self):
419 419 self.write()
420 420 self.write_rej()
421 421
422 422 def apply(self, h, reverse):
423 423 if not h.complete():
424 424 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
425 425 (h.number, h.desc, len(h.a), h.lena, len(h.b),
426 426 h.lenb))
427 427
428 428 self.hunks += 1
429 429 if reverse:
430 430 h.reverse()
431 431
432 432 if self.missing:
433 433 self.rej.append(h)
434 434 return -1
435 435
436 436 if self.exists and h.createfile():
437 437 self.ui.warn(_("file %s already exists\n") % self.fname)
438 438 self.rej.append(h)
439 439 return -1
440 440
441 441 if isinstance(h, binhunk):
442 442 if h.rmfile():
443 443 os.unlink(self.fname)
444 444 else:
445 445 self.lines[:] = h.new()
446 446 self.offset += len(h.new())
447 447 self.dirty = 1
448 448 return 0
449 449
450 450 # fast case first, no offsets, no fuzz
451 451 old = h.old()
452 452 # patch starts counting at 1 unless we are adding the file
453 453 if h.starta == 0:
454 454 start = 0
455 455 else:
456 456 start = h.starta + self.offset - 1
457 457 orig_start = start
458 458 if diffhelpers.testhunk(old, self.lines, start) == 0:
459 459 if h.rmfile():
460 460 os.unlink(self.fname)
461 461 else:
462 462 self.lines[start : start + h.lena] = h.new()
463 463 self.offset += h.lenb - h.lena
464 464 self.dirty = 1
465 465 return 0
466 466
467 467 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
468 468 self.hashlines()
469 469 if h.hunk[-1][0] != ' ':
470 470 # if the hunk tried to put something at the bottom of the file
471 471 # override the start line and use eof here
472 472 search_start = len(self.lines)
473 473 else:
474 474 search_start = orig_start
475 475
476 476 for fuzzlen in xrange(3):
477 477 for toponly in [ True, False ]:
478 478 old = h.old(fuzzlen, toponly)
479 479
480 480 cand = self.findlines(old[0][1:], search_start)
481 481 for l in cand:
482 482 if diffhelpers.testhunk(old, self.lines, l) == 0:
483 483 newlines = h.new(fuzzlen, toponly)
484 484 self.lines[l : l + len(old)] = newlines
485 485 self.offset += len(newlines) - len(old)
486 486 self.dirty = 1
487 487 if fuzzlen:
488 488 fuzzstr = "with fuzz %d " % fuzzlen
489 489 f = self.ui.warn
490 490 self.printfile(True)
491 491 else:
492 492 fuzzstr = ""
493 493 f = self.ui.note
494 494 offset = l - orig_start - fuzzlen
495 495 if offset == 1:
496 496 linestr = "line"
497 497 else:
498 498 linestr = "lines"
499 499 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
500 500 (h.number, l+1, fuzzstr, offset, linestr))
501 501 return fuzzlen
502 502 self.printfile(True)
503 503 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
504 504 self.rej.append(h)
505 505 return -1
506 506
507 507 class hunk:
508 508 def __init__(self, desc, num, lr, context, gitpatch=None):
509 509 self.number = num
510 510 self.desc = desc
511 511 self.hunk = [ desc ]
512 512 self.a = []
513 513 self.b = []
514 514 if context:
515 515 self.read_context_hunk(lr)
516 516 else:
517 517 self.read_unified_hunk(lr)
518 518 self.gitpatch = gitpatch
519 519
520 520 def read_unified_hunk(self, lr):
521 521 m = unidesc.match(self.desc)
522 522 if not m:
523 523 raise PatchError(_("bad hunk #%d") % self.number)
524 524 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
525 525 if self.lena == None:
526 526 self.lena = 1
527 527 else:
528 528 self.lena = int(self.lena)
529 529 if self.lenb == None:
530 530 self.lenb = 1
531 531 else:
532 532 self.lenb = int(self.lenb)
533 533 self.starta = int(self.starta)
534 534 self.startb = int(self.startb)
535 535 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
536 536 # if we hit eof before finishing out the hunk, the last line will
537 537 # be zero length. Lets try to fix it up.
538 538 while len(self.hunk[-1]) == 0:
539 539 del self.hunk[-1]
540 540 del self.a[-1]
541 541 del self.b[-1]
542 542 self.lena -= 1
543 543 self.lenb -= 1
544 544
545 545 def read_context_hunk(self, lr):
546 546 self.desc = lr.readline()
547 547 m = contextdesc.match(self.desc)
548 548 if not m:
549 549 raise PatchError(_("bad hunk #%d") % self.number)
550 550 foo, self.starta, foo2, aend, foo3 = m.groups()
551 551 self.starta = int(self.starta)
552 552 if aend == None:
553 553 aend = self.starta
554 554 self.lena = int(aend) - self.starta
555 555 if self.starta:
556 556 self.lena += 1
557 557 for x in xrange(self.lena):
558 558 l = lr.readline()
559 559 if l.startswith('---'):
560 560 lr.push(l)
561 561 break
562 562 s = l[2:]
563 563 if l.startswith('- ') or l.startswith('! '):
564 564 u = '-' + s
565 565 elif l.startswith(' '):
566 566 u = ' ' + s
567 567 else:
568 568 raise PatchError(_("bad hunk #%d old text line %d") %
569 569 (self.number, x))
570 570 self.a.append(u)
571 571 self.hunk.append(u)
572 572
573 573 l = lr.readline()
574 574 if l.startswith('\ '):
575 575 s = self.a[-1][:-1]
576 576 self.a[-1] = s
577 577 self.hunk[-1] = s
578 578 l = lr.readline()
579 579 m = contextdesc.match(l)
580 580 if not m:
581 581 raise PatchError(_("bad hunk #%d") % self.number)
582 582 foo, self.startb, foo2, bend, foo3 = m.groups()
583 583 self.startb = int(self.startb)
584 584 if bend == None:
585 585 bend = self.startb
586 586 self.lenb = int(bend) - self.startb
587 587 if self.startb:
588 588 self.lenb += 1
589 589 hunki = 1
590 590 for x in xrange(self.lenb):
591 591 l = lr.readline()
592 592 if l.startswith('\ '):
593 593 s = self.b[-1][:-1]
594 594 self.b[-1] = s
595 595 self.hunk[hunki-1] = s
596 596 continue
597 597 if not l:
598 598 lr.push(l)
599 599 break
600 600 s = l[2:]
601 601 if l.startswith('+ ') or l.startswith('! '):
602 602 u = '+' + s
603 603 elif l.startswith(' '):
604 604 u = ' ' + s
605 605 elif len(self.b) == 0:
606 606 # this can happen when the hunk does not add any lines
607 607 lr.push(l)
608 608 break
609 609 else:
610 610 raise PatchError(_("bad hunk #%d old text line %d") %
611 611 (self.number, x))
612 612 self.b.append(s)
613 613 while True:
614 614 if hunki >= len(self.hunk):
615 615 h = ""
616 616 else:
617 617 h = self.hunk[hunki]
618 618 hunki += 1
619 619 if h == u:
620 620 break
621 621 elif h.startswith('-'):
622 622 continue
623 623 else:
624 624 self.hunk.insert(hunki-1, u)
625 625 break
626 626
627 627 if not self.a:
628 628 # this happens when lines were only added to the hunk
629 629 for x in self.hunk:
630 630 if x.startswith('-') or x.startswith(' '):
631 631 self.a.append(x)
632 632 if not self.b:
633 633 # this happens when lines were only deleted from the hunk
634 634 for x in self.hunk:
635 635 if x.startswith('+') or x.startswith(' '):
636 636 self.b.append(x[1:])
637 637 # @@ -start,len +start,len @@
638 638 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
639 639 self.startb, self.lenb)
640 640 self.hunk[0] = self.desc
641 641
642 642 def reverse(self):
643 643 origlena = self.lena
644 644 origstarta = self.starta
645 645 self.lena = self.lenb
646 646 self.starta = self.startb
647 647 self.lenb = origlena
648 648 self.startb = origstarta
649 649 self.a = []
650 650 self.b = []
651 651 # self.hunk[0] is the @@ description
652 652 for x in xrange(1, len(self.hunk)):
653 653 o = self.hunk[x]
654 654 if o.startswith('-'):
655 655 n = '+' + o[1:]
656 656 self.b.append(o[1:])
657 657 elif o.startswith('+'):
658 658 n = '-' + o[1:]
659 659 self.a.append(n)
660 660 else:
661 661 n = o
662 662 self.b.append(o[1:])
663 663 self.a.append(o)
664 664 self.hunk[x] = o
665 665
666 666 def fix_newline(self):
667 667 diffhelpers.fix_newline(self.hunk, self.a, self.b)
668 668
669 669 def complete(self):
670 670 return len(self.a) == self.lena and len(self.b) == self.lenb
671 671
672 672 def createfile(self):
673 673 create = self.gitpatch is None or self.gitpatch.op == 'ADD'
674 674 return self.starta == 0 and self.lena == 0 and create
675 675
676 676 def rmfile(self):
677 677 remove = self.gitpatch is None or self.gitpatch.op == 'DELETE'
678 678 return self.startb == 0 and self.lenb == 0 and remove
679 679
680 680 def fuzzit(self, l, fuzz, toponly):
681 681 # this removes context lines from the top and bottom of list 'l'. It
682 682 # checks the hunk to make sure only context lines are removed, and then
683 683 # returns a new shortened list of lines.
684 684 fuzz = min(fuzz, len(l)-1)
685 685 if fuzz:
686 686 top = 0
687 687 bot = 0
688 688 hlen = len(self.hunk)
689 689 for x in xrange(hlen-1):
690 690 # the hunk starts with the @@ line, so use x+1
691 691 if self.hunk[x+1][0] == ' ':
692 692 top += 1
693 693 else:
694 694 break
695 695 if not toponly:
696 696 for x in xrange(hlen-1):
697 697 if self.hunk[hlen-bot-1][0] == ' ':
698 698 bot += 1
699 699 else:
700 700 break
701 701
702 702 # top and bot now count context in the hunk
703 703 # adjust them if either one is short
704 704 context = max(top, bot, 3)
705 705 if bot < context:
706 706 bot = max(0, fuzz - (context - bot))
707 707 else:
708 708 bot = min(fuzz, bot)
709 709 if top < context:
710 710 top = max(0, fuzz - (context - top))
711 711 else:
712 712 top = min(fuzz, top)
713 713
714 714 return l[top:len(l)-bot]
715 715 return l
716 716
717 717 def old(self, fuzz=0, toponly=False):
718 718 return self.fuzzit(self.a, fuzz, toponly)
719 719
720 720 def newctrl(self):
721 721 res = []
722 722 for x in self.hunk:
723 723 c = x[0]
724 724 if c == ' ' or c == '+':
725 725 res.append(x)
726 726 return res
727 727
728 728 def new(self, fuzz=0, toponly=False):
729 729 return self.fuzzit(self.b, fuzz, toponly)
730 730
731 731 class binhunk:
732 732 'A binary patch file. Only understands literals so far.'
733 733 def __init__(self, gitpatch):
734 734 self.gitpatch = gitpatch
735 735 self.text = None
736 736 self.hunk = ['GIT binary patch\n']
737 737
738 738 def createfile(self):
739 739 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
740 740
741 741 def rmfile(self):
742 742 return self.gitpatch.op == 'DELETE'
743 743
744 744 def complete(self):
745 745 return self.text is not None
746 746
747 747 def new(self):
748 748 return [self.text]
749 749
750 750 def extract(self, fp):
751 751 line = fp.readline()
752 752 self.hunk.append(line)
753 753 while line and not line.startswith('literal '):
754 754 line = fp.readline()
755 755 self.hunk.append(line)
756 756 if not line:
757 757 raise PatchError(_('could not extract binary patch'))
758 758 size = int(line[8:].rstrip())
759 759 dec = []
760 760 line = fp.readline()
761 761 self.hunk.append(line)
762 762 while len(line) > 1:
763 763 l = line[0]
764 764 if l <= 'Z' and l >= 'A':
765 765 l = ord(l) - ord('A') + 1
766 766 else:
767 767 l = ord(l) - ord('a') + 27
768 768 dec.append(base85.b85decode(line[1:-1])[:l])
769 769 line = fp.readline()
770 770 self.hunk.append(line)
771 771 text = zlib.decompress(''.join(dec))
772 772 if len(text) != size:
773 773 raise PatchError(_('binary patch is %d bytes, not %d') %
774 774 len(text), size)
775 775 self.text = text
776 776
777 777 def parsefilename(str):
778 778 # --- filename \t|space stuff
779 779 s = str[4:].rstrip('\r\n')
780 780 i = s.find('\t')
781 781 if i < 0:
782 782 i = s.find(' ')
783 783 if i < 0:
784 784 return s
785 785 return s[:i]
786 786
787 787 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
788 788 def pathstrip(path, count=1):
789 789 pathlen = len(path)
790 790 i = 0
791 791 if count == 0:
792 792 return path.rstrip()
793 793 while count > 0:
794 794 i = path.find('/', i)
795 795 if i == -1:
796 796 raise PatchError(_("unable to strip away %d dirs from %s") %
797 797 (count, path))
798 798 i += 1
799 799 # consume '//' in the path
800 800 while i < pathlen - 1 and path[i] == '/':
801 801 i += 1
802 802 count -= 1
803 803 return path[i:].rstrip()
804 804
805 805 nulla = afile_orig == "/dev/null"
806 806 nullb = bfile_orig == "/dev/null"
807 807 afile = pathstrip(afile_orig, strip)
808 808 gooda = not nulla and os.path.exists(afile)
809 809 bfile = pathstrip(bfile_orig, strip)
810 810 if afile == bfile:
811 811 goodb = gooda
812 812 else:
813 813 goodb = not nullb and os.path.exists(bfile)
814 814 createfunc = hunk.createfile
815 815 if reverse:
816 816 createfunc = hunk.rmfile
817 817 missing = not goodb and not gooda and not createfunc()
818 818 fname = None
819 819 if not missing:
820 820 if gooda and goodb:
821 821 fname = (afile in bfile) and afile or bfile
822 822 elif gooda:
823 823 fname = afile
824 824
825 825 if not fname:
826 826 if not nullb:
827 827 fname = (afile in bfile) and afile or bfile
828 828 elif not nulla:
829 829 fname = afile
830 830 else:
831 831 raise PatchError(_("undefined source and destination files"))
832 832
833 833 return fname, missing
834 834
835 835 class linereader:
836 836 # simple class to allow pushing lines back into the input stream
837 837 def __init__(self, fp):
838 838 self.fp = fp
839 839 self.buf = []
840 840
841 841 def push(self, line):
842 842 self.buf.append(line)
843 843
844 844 def readline(self):
845 845 if self.buf:
846 846 l = self.buf[0]
847 847 del self.buf[0]
848 848 return l
849 849 return self.fp.readline()
850 850
851 851 def iterhunks(ui, fp, sourcefile=None):
852 852 """Read a patch and yield the following events:
853 853 - ("file", afile, bfile, firsthunk): select a new target file.
854 854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
855 855 "file" event.
856 856 - ("git", gitchanges): current diff is in git format, gitchanges
857 857 maps filenames to gitpatch records. Unique event.
858 858 """
859 859
860 860 def scangitpatch(fp, firstline):
861 861 '''git patches can modify a file, then copy that file to
862 862 a new file, but expect the source to be the unmodified form.
863 863 So we scan the patch looking for that case so we can do
864 864 the copies ahead of time.'''
865 865
866 866 pos = 0
867 867 try:
868 868 pos = fp.tell()
869 869 except IOError:
870 870 fp = cStringIO.StringIO(fp.read())
871 871
872 872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
873 873 fp.seek(pos)
874 874
875 875 return fp, dopatch, gitpatches
876 876
877 877 changed = {}
878 878 current_hunk = None
879 879 afile = ""
880 880 bfile = ""
881 881 state = None
882 882 hunknum = 0
883 883 emitfile = False
884 884
885 885 git = False
886 886 gitre = re.compile('diff --git (a/.*) (b/.*)')
887 887
888 888 # our states
889 889 BFILE = 1
890 890 context = None
891 891 lr = linereader(fp)
892 892 dopatch = True
893 893 # gitworkdone is True if a git operation (copy, rename, ...) was
894 894 # performed already for the current file. Useful when the file
895 895 # section may have no hunk.
896 896 gitworkdone = False
897 897
898 898 while True:
899 899 newfile = False
900 900 x = lr.readline()
901 901 if not x:
902 902 break
903 903 if current_hunk:
904 904 if x.startswith('\ '):
905 905 current_hunk.fix_newline()
906 906 yield 'hunk', current_hunk
907 907 current_hunk = None
908 908 gitworkdone = False
909 909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
910 910 ((context or context == None) and x.startswith('***************')))):
911 911 try:
912 912 if context == None and x.startswith('***************'):
913 913 context = True
914 914 gpatch = changed.get(bfile[2:], (None, None))[1]
915 915 current_hunk = hunk(x, hunknum + 1, lr, context, gpatch)
916 916 except PatchError, err:
917 917 ui.debug(err)
918 918 current_hunk = None
919 919 continue
920 920 hunknum += 1
921 921 if emitfile:
922 922 emitfile = False
923 923 yield 'file', (afile, bfile, current_hunk)
924 924 elif state == BFILE and x.startswith('GIT binary patch'):
925 925 current_hunk = binhunk(changed[bfile[2:]][1])
926 926 hunknum += 1
927 927 if emitfile:
928 928 emitfile = False
929 929 yield 'file', (afile, bfile, current_hunk)
930 930 current_hunk.extract(fp)
931 931 elif x.startswith('diff --git'):
932 932 # check for git diff, scanning the whole patch file if needed
933 933 m = gitre.match(x)
934 934 if m:
935 935 afile, bfile = m.group(1, 2)
936 936 if not git:
937 937 git = True
938 938 fp, dopatch, gitpatches = scangitpatch(fp, x)
939 939 yield 'git', gitpatches
940 940 for gp in gitpatches:
941 941 changed[gp.path] = (gp.op, gp)
942 942 # else error?
943 943 # copy/rename + modify should modify target, not source
944 944 gitop = changed.get(bfile[2:], (None, None))[0]
945 945 if gitop in ('COPY', 'DELETE', 'RENAME'):
946 946 afile = bfile
947 947 gitworkdone = True
948 948 newfile = True
949 949 elif x.startswith('---'):
950 950 # check for a unified diff
951 951 l2 = lr.readline()
952 952 if not l2.startswith('+++'):
953 953 lr.push(l2)
954 954 continue
955 955 newfile = True
956 956 context = False
957 957 afile = parsefilename(x)
958 958 bfile = parsefilename(l2)
959 959 elif x.startswith('***'):
960 960 # check for a context diff
961 961 l2 = lr.readline()
962 962 if not l2.startswith('---'):
963 963 lr.push(l2)
964 964 continue
965 965 l3 = lr.readline()
966 966 lr.push(l3)
967 967 if not l3.startswith("***************"):
968 968 lr.push(l2)
969 969 continue
970 970 newfile = True
971 971 context = True
972 972 afile = parsefilename(x)
973 973 bfile = parsefilename(l2)
974 974
975 975 if newfile:
976 976 emitfile = True
977 977 state = BFILE
978 978 hunknum = 0
979 979 if current_hunk:
980 980 if current_hunk.complete():
981 981 yield 'hunk', current_hunk
982 982 else:
983 983 raise PatchError(_("malformed patch %s %s") % (afile,
984 984 current_hunk.desc))
985 985
986 986 if hunknum == 0 and dopatch and not gitworkdone:
987 987 raise NoHunks
988 988
989 989 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
990 990 rejmerge=None, updatedir=None):
991 991 """reads a patch from fp and tries to apply it. The dict 'changed' is
992 992 filled in with all of the filenames changed by the patch. Returns 0
993 993 for a clean patch, -1 if any rejects were found and 1 if there was
994 994 any fuzz."""
995 995
996 996 rejects = 0
997 997 err = 0
998 998 current_file = None
999 999 gitpatches = None
1000 1000
1001 1001 def closefile():
1002 1002 if not current_file:
1003 1003 return 0
1004 1004 current_file.close()
1005 1005 if rejmerge:
1006 1006 rejmerge(current_file)
1007 1007 return len(current_file.rej)
1008 1008
1009 1009 for state, values in iterhunks(ui, fp, sourcefile):
1010 1010 if state == 'hunk':
1011 1011 if not current_file:
1012 1012 continue
1013 1013 current_hunk = values
1014 1014 ret = current_file.apply(current_hunk, reverse)
1015 1015 if ret >= 0:
1016 1016 changed.setdefault(current_file.fname, (None, None))
1017 1017 if ret > 0:
1018 1018 err = 1
1019 1019 elif state == 'file':
1020 1020 rejects += closefile()
1021 1021 afile, bfile, first_hunk = values
1022 1022 try:
1023 1023 if sourcefile:
1024 1024 current_file = patchfile(ui, sourcefile)
1025 1025 else:
1026 1026 current_file, missing = selectfile(afile, bfile, first_hunk,
1027 1027 strip, reverse)
1028 1028 current_file = patchfile(ui, current_file, missing)
1029 1029 except PatchError, err:
1030 1030 ui.warn(str(err) + '\n')
1031 1031 current_file, current_hunk = None, None
1032 1032 rejects += 1
1033 1033 continue
1034 1034 elif state == 'git':
1035 1035 gitpatches = values
1036 1036 for gp in gitpatches:
1037 1037 if gp.op in ('COPY', 'RENAME'):
1038 1038 copyfile(gp.oldpath, gp.path)
1039 1039 changed[gp.path] = (gp.op, gp)
1040 1040 else:
1041 1041 raise util.Abort(_('unsupported parser state: %s') % state)
1042 1042
1043 1043 rejects += closefile()
1044 1044
1045 1045 if updatedir and gitpatches:
1046 1046 updatedir(gitpatches)
1047 1047 if rejects:
1048 1048 return -1
1049 1049 return err
1050 1050
1051 1051 def diffopts(ui, opts={}, untrusted=False):
1052 1052 def get(key, name=None):
1053 1053 return (opts.get(key) or
1054 1054 ui.configbool('diff', name or key, None, untrusted=untrusted))
1055 1055 return mdiff.diffopts(
1056 1056 text=opts.get('text'),
1057 1057 git=get('git'),
1058 1058 nodates=get('nodates'),
1059 1059 showfunc=get('show_function', 'showfunc'),
1060 1060 ignorews=get('ignore_all_space', 'ignorews'),
1061 1061 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1062 1062 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1063 1063 context=get('unified'))
1064 1064
1065 1065 def updatedir(ui, repo, patches):
1066 1066 '''Update dirstate after patch application according to metadata'''
1067 1067 if not patches:
1068 1068 return
1069 1069 copies = []
1070 1070 removes = {}
1071 1071 cfiles = patches.keys()
1072 1072 cwd = repo.getcwd()
1073 1073 if cwd:
1074 1074 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1075 1075 for f in patches:
1076 1076 ctype, gp = patches[f]
1077 1077 if ctype == 'RENAME':
1078 1078 copies.append((gp.oldpath, gp.path))
1079 1079 removes[gp.oldpath] = 1
1080 1080 elif ctype == 'COPY':
1081 1081 copies.append((gp.oldpath, gp.path))
1082 1082 elif ctype == 'DELETE':
1083 1083 removes[gp.path] = 1
1084 1084 for src, dst in copies:
1085 1085 repo.copy(src, dst)
1086 1086 removes = removes.keys()
1087 1087 if removes:
1088 1088 removes.sort()
1089 1089 repo.remove(removes, True)
1090 1090 for f in patches:
1091 1091 ctype, gp = patches[f]
1092 1092 if gp and gp.mode:
1093 1093 flags = ''
1094 1094 if gp.mode & 0100:
1095 1095 flags = 'x'
1096 1096 elif gp.mode & 020000:
1097 1097 flags = 'l'
1098 1098 dst = os.path.join(repo.root, gp.path)
1099 1099 # patch won't create empty files
1100 1100 if ctype == 'ADD' and not os.path.exists(dst):
1101 1101 repo.wwrite(gp.path, '', flags)
1102 1102 else:
1103 1103 util.set_flags(dst, flags)
1104 1104 cmdutil.addremove(repo, cfiles)
1105 1105 files = patches.keys()
1106 1106 files.extend([r for r in removes if r not in files])
1107 1107 files.sort()
1108 1108
1109 1109 return files
1110 1110
1111 1111 def b85diff(to, tn):
1112 1112 '''print base85-encoded binary diff'''
1113 1113 def gitindex(text):
1114 1114 if not text:
1115 1115 return '0' * 40
1116 1116 l = len(text)
1117 1117 s = sha.new('blob %d\0' % l)
1118 1118 s.update(text)
1119 1119 return s.hexdigest()
1120 1120
1121 1121 def fmtline(line):
1122 1122 l = len(line)
1123 1123 if l <= 26:
1124 1124 l = chr(ord('A') + l - 1)
1125 1125 else:
1126 1126 l = chr(l - 26 + ord('a') - 1)
1127 1127 return '%c%s\n' % (l, base85.b85encode(line, True))
1128 1128
1129 1129 def chunk(text, csize=52):
1130 1130 l = len(text)
1131 1131 i = 0
1132 1132 while i < l:
1133 1133 yield text[i:i+csize]
1134 1134 i += csize
1135 1135
1136 1136 tohash = gitindex(to)
1137 1137 tnhash = gitindex(tn)
1138 1138 if tohash == tnhash:
1139 1139 return ""
1140 1140
1141 1141 # TODO: deltas
1142 1142 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1143 1143 (tohash, tnhash, len(tn))]
1144 1144 for l in chunk(zlib.compress(tn)):
1145 1145 ret.append(fmtline(l))
1146 1146 ret.append('\n')
1147 1147 return ''.join(ret)
1148 1148
1149 1149 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1150 1150 fp=None, changes=None, opts=None):
1151 1151 '''print diff of changes to files between two nodes, or node and
1152 1152 working directory.
1153 1153
1154 1154 if node1 is None, use first dirstate parent instead.
1155 1155 if node2 is None, compare node1 with working directory.'''
1156 1156
1157 1157 if opts is None:
1158 1158 opts = mdiff.defaultopts
1159 1159 if fp is None:
1160 1160 fp = repo.ui
1161 1161
1162 1162 if not node1:
1163 1163 node1 = repo.dirstate.parents()[0]
1164 1164
1165 1165 ccache = {}
1166 1166 def getctx(r):
1167 1167 if r not in ccache:
1168 1168 ccache[r] = context.changectx(repo, r)
1169 1169 return ccache[r]
1170 1170
1171 1171 flcache = {}
1172 1172 def getfilectx(f, ctx):
1173 1173 flctx = ctx.filectx(f, filelog=flcache.get(f))
1174 1174 if f not in flcache:
1175 1175 flcache[f] = flctx._filelog
1176 1176 return flctx
1177 1177
1178 1178 # reading the data for node1 early allows it to play nicely
1179 1179 # with repo.status and the revlog cache.
1180 1180 ctx1 = context.changectx(repo, node1)
1181 1181 # force manifest reading
1182 1182 man1 = ctx1.manifest()
1183 1183 date1 = util.datestr(ctx1.date())
1184 1184
1185 1185 if not changes:
1186 1186 changes = repo.status(node1, node2, files, match=match)[:5]
1187 1187 modified, added, removed, deleted, unknown = changes
1188 1188
1189 1189 if not modified and not added and not removed:
1190 1190 return
1191 1191
1192 1192 if node2:
1193 1193 ctx2 = context.changectx(repo, node2)
1194 1194 execf2 = ctx2.manifest().execf
1195 1195 linkf2 = ctx2.manifest().linkf
1196 1196 else:
1197 1197 ctx2 = context.workingctx(repo)
1198 1198 execf2 = util.execfunc(repo.root, None)
1199 1199 linkf2 = util.linkfunc(repo.root, None)
1200 1200 if execf2 is None:
1201 1201 mc = ctx2.parents()[0].manifest().copy()
1202 1202 execf2 = mc.execf
1203 1203 linkf2 = mc.linkf
1204 1204
1205 1205 # returns False if there was no rename between ctx1 and ctx2
1206 1206 # returns None if the file was created between ctx1 and ctx2
1207 1207 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1208 1208 # This will only really work if c1 is the Nth 1st parent of c2.
1209 1209 def renamed(c1, c2, man, f):
1210 1210 startrev = c1.rev()
1211 1211 c = c2
1212 1212 crev = c.rev()
1213 1213 if crev is None:
1214 1214 crev = repo.changelog.count()
1215 1215 orig = f
1216 1216 files = (f,)
1217 1217 while crev > startrev:
1218 1218 if f in files:
1219 1219 try:
1220 1220 src = getfilectx(f, c).renamed()
1221 1221 except revlog.LookupError:
1222 1222 return None
1223 1223 if src:
1224 1224 f = src[0]
1225 1225 crev = c.parents()[0].rev()
1226 1226 # try to reuse
1227 1227 c = getctx(crev)
1228 1228 files = c.files()
1229 1229 if f not in man:
1230 1230 return None
1231 1231 if f == orig:
1232 1232 return False
1233 1233 return f
1234 1234
1235 1235 if repo.ui.quiet:
1236 1236 r = None
1237 1237 else:
1238 1238 hexfunc = repo.ui.debugflag and hex or short
1239 1239 r = [hexfunc(node) for node in [node1, node2] if node]
1240 1240
1241 1241 if opts.git:
1242 1242 copied = {}
1243 1243 c1, c2 = ctx1, ctx2
1244 1244 files = added
1245 1245 man = man1
1246 1246 if node2 and ctx1.rev() >= ctx2.rev():
1247 1247 # renamed() starts at c2 and walks back in history until c1.
1248 1248 # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
1249 1249 # detect (inverted) copies.
1250 1250 c1, c2 = ctx2, ctx1
1251 1251 files = removed
1252 1252 man = ctx2.manifest()
1253 1253 for f in files:
1254 1254 src = renamed(c1, c2, man, f)
1255 1255 if src:
1256 1256 copied[f] = src
1257 1257 if ctx1 == c2:
1258 1258 # invert the copied dict
1259 1259 copied = dict([(v, k) for (k, v) in copied.iteritems()])
1260 1260 # If we've renamed file foo to bar (copied['bar'] = 'foo'),
1261 1261 # avoid showing a diff for foo if we're going to show
1262 1262 # the rename to bar.
1263 1263 srcs = [x[1] for x in copied.iteritems() if x[0] in added]
1264 1264
1265 1265 all = modified + added + removed
1266 1266 all.sort()
1267 1267 gone = {}
1268 1268
1269 1269 for f in all:
1270 1270 to = None
1271 1271 tn = None
1272 1272 dodiff = True
1273 1273 header = []
1274 1274 if f in man1:
1275 1275 to = getfilectx(f, ctx1).data()
1276 1276 if f not in removed:
1277 1277 tn = getfilectx(f, ctx2).data()
1278 1278 a, b = f, f
1279 1279 if opts.git:
1280 1280 def gitmode(x, l):
1281 1281 return l and '120000' or (x and '100755' or '100644')
1282 1282 def addmodehdr(header, omode, nmode):
1283 1283 if omode != nmode:
1284 1284 header.append('old mode %s\n' % omode)
1285 1285 header.append('new mode %s\n' % nmode)
1286 1286
1287 1287 if f in added:
1288 1288 mode = gitmode(execf2(f), linkf2(f))
1289 1289 if f in copied:
1290 1290 a = copied[f]
1291 1291 omode = gitmode(man1.execf(a), man1.linkf(a))
1292 1292 addmodehdr(header, omode, mode)
1293 1293 if a in removed and a not in gone:
1294 1294 op = 'rename'
1295 1295 gone[a] = 1
1296 1296 else:
1297 1297 op = 'copy'
1298 1298 header.append('%s from %s\n' % (op, a))
1299 1299 header.append('%s to %s\n' % (op, f))
1300 1300 to = getfilectx(a, ctx1).data()
1301 1301 else:
1302 1302 header.append('new file mode %s\n' % mode)
1303 1303 if util.binary(tn):
1304 1304 dodiff = 'binary'
1305 1305 elif f in removed:
1306 1306 if f in srcs:
1307 1307 dodiff = False
1308 1308 else:
1309 1309 mode = gitmode(man1.execf(f), man1.linkf(f))
1310 1310 header.append('deleted file mode %s\n' % mode)
1311 1311 else:
1312 1312 omode = gitmode(man1.execf(f), man1.linkf(f))
1313 1313 nmode = gitmode(execf2(f), linkf2(f))
1314 1314 addmodehdr(header, omode, nmode)
1315 1315 if util.binary(to) or util.binary(tn):
1316 1316 dodiff = 'binary'
1317 1317 r = None
1318 1318 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1319 1319 if dodiff:
1320 1320 if dodiff == 'binary':
1321 1321 text = b85diff(to, tn)
1322 1322 else:
1323 1323 text = mdiff.unidiff(to, date1,
1324 1324 # ctx2 date may be dynamic
1325 1325 tn, util.datestr(ctx2.date()),
1326 1326 a, b, r, opts=opts)
1327 1327 if text or len(header) > 1:
1328 1328 fp.write(''.join(header))
1329 1329 fp.write(text)
1330 1330
1331 1331 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1332 1332 opts=None):
1333 1333 '''export changesets as hg patches.'''
1334 1334
1335 1335 total = len(revs)
1336 1336 revwidth = max([len(str(rev)) for rev in revs])
1337 1337
1338 1338 def single(rev, seqno, fp):
1339 1339 ctx = repo.changectx(rev)
1340 1340 node = ctx.node()
1341 1341 parents = [p.node() for p in ctx.parents() if p]
1342 1342 branch = ctx.branch()
1343 1343 if switch_parent:
1344 1344 parents.reverse()
1345 1345 prev = (parents and parents[0]) or nullid
1346 1346
1347 1347 if not fp:
1348 1348 fp = cmdutil.make_file(repo, template, node, total=total,
1349 1349 seqno=seqno, revwidth=revwidth)
1350 1350 if fp != sys.stdout and hasattr(fp, 'name'):
1351 1351 repo.ui.note("%s\n" % fp.name)
1352 1352
1353 1353 fp.write("# HG changeset patch\n")
1354 1354 fp.write("# User %s\n" % ctx.user())
1355 1355 fp.write("# Date %d %d\n" % ctx.date())
1356 1356 if branch and (branch != 'default'):
1357 1357 fp.write("# Branch %s\n" % branch)
1358 1358 fp.write("# Node ID %s\n" % hex(node))
1359 1359 fp.write("# Parent %s\n" % hex(prev))
1360 1360 if len(parents) > 1:
1361 1361 fp.write("# Parent %s\n" % hex(parents[1]))
1362 1362 fp.write(ctx.description().rstrip())
1363 1363 fp.write("\n\n")
1364 1364
1365 1365 diff(repo, prev, node, fp=fp, opts=opts)
1366 1366 if fp not in (sys.stdout, repo.ui):
1367 1367 fp.close()
1368 1368
1369 1369 for seqno, rev in enumerate(revs):
1370 1370 single(rev, seqno+1, fp)
1371 1371
1372 1372 def diffstat(patchlines):
1373 1373 if not util.find_exe('diffstat'):
1374 1374 return
1375 1375 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1376 1376 try:
1377 1377 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1378 1378 try:
1379 1379 for line in patchlines:
1380 1380 p.tochild.write(line + "\n")
1381 1381 p.tochild.close()
1382 1382 if p.wait(): return
1383 1383 fp = os.fdopen(fd, 'r')
1384 1384 stat = []
1385 1385 for line in fp: stat.append(line.lstrip())
1386 1386 last = stat.pop()
1387 1387 stat.insert(0, last)
1388 1388 stat = ''.join(stat)
1389 1389 return stat
1390 1390 except: raise
1391 1391 finally:
1392 1392 try: os.unlink(name)
1393 1393 except: pass
@@ -1,136 +1,136 b''
1 1 # repair.py - functions for repository repair for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 # Copyright 2007 Matt Mackall
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import changegroup, os
10 from node import *
10 from node import nullrev, short
11 11
12 12 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
13 13 """create a bundle with the specified revisions as a backup"""
14 14 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
15 15 backupdir = repo.join("strip-backup")
16 16 if not os.path.isdir(backupdir):
17 17 os.mkdir(backupdir)
18 18 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
19 19 repo.ui.warn("saving bundle to %s\n" % name)
20 20 return changegroup.writebundle(cg, name, "HG10BZ")
21 21
22 22 def _collectfiles(repo, striprev):
23 23 """find out the filelogs affected by the strip"""
24 24 files = {}
25 25
26 26 for x in xrange(striprev, repo.changelog.count()):
27 27 for name in repo.changectx(x).files():
28 28 if name in files:
29 29 continue
30 30 files[name] = 1
31 31
32 32 files = files.keys()
33 33 files.sort()
34 34 return files
35 35
36 36 def _collectextranodes(repo, files, link):
37 37 """return the nodes that have to be saved before the strip"""
38 38 def collectone(revlog):
39 39 extra = []
40 40 startrev = count = revlog.count()
41 41 # find the truncation point of the revlog
42 42 for i in xrange(0, count):
43 43 node = revlog.node(i)
44 44 lrev = revlog.linkrev(node)
45 45 if lrev >= link:
46 46 startrev = i + 1
47 47 break
48 48
49 49 # see if any revision after that point has a linkrev less than link
50 50 # (we have to manually save these guys)
51 51 for i in xrange(startrev, count):
52 52 node = revlog.node(i)
53 53 lrev = revlog.linkrev(node)
54 54 if lrev < link:
55 55 extra.append((node, cl.node(lrev)))
56 56
57 57 return extra
58 58
59 59 extranodes = {}
60 60 cl = repo.changelog
61 61 extra = collectone(repo.manifest)
62 62 if extra:
63 63 extranodes[1] = extra
64 64 for fname in files:
65 65 f = repo.file(fname)
66 66 extra = collectone(f)
67 67 if extra:
68 68 extranodes[fname] = extra
69 69
70 70 return extranodes
71 71
72 72 def strip(ui, repo, node, backup="all"):
73 73 cl = repo.changelog
74 74 # TODO delete the undo files, and handle undo of merge sets
75 75 pp = cl.parents(node)
76 76 striprev = cl.rev(node)
77 77
78 78 # Some revisions with rev > striprev may not be descendants of striprev.
79 79 # We have to find these revisions and put them in a bundle, so that
80 80 # we can restore them after the truncations.
81 81 # To create the bundle we use repo.changegroupsubset which requires
82 82 # the list of heads and bases of the set of interesting revisions.
83 83 # (head = revision in the set that has no descendant in the set;
84 84 # base = revision in the set that has no ancestor in the set)
85 85 tostrip = {striprev: 1}
86 86 saveheads = {}
87 87 savebases = []
88 88 for r in xrange(striprev + 1, cl.count()):
89 89 parents = cl.parentrevs(r)
90 90 if parents[0] in tostrip or parents[1] in tostrip:
91 91 # r is a descendant of striprev
92 92 tostrip[r] = 1
93 93 # if this is a merge and one of the parents does not descend
94 94 # from striprev, mark that parent as a savehead.
95 95 if parents[1] != nullrev:
96 96 for p in parents:
97 97 if p not in tostrip and p > striprev:
98 98 saveheads[p] = 1
99 99 else:
100 100 # if no parents of this revision will be stripped, mark it as
101 101 # a savebase
102 102 if parents[0] < striprev and parents[1] < striprev:
103 103 savebases.append(cl.node(r))
104 104
105 105 for p in parents:
106 106 if p in saveheads:
107 107 del saveheads[p]
108 108 saveheads[r] = 1
109 109
110 110 saveheads = [cl.node(r) for r in saveheads]
111 111 files = _collectfiles(repo, striprev)
112 112
113 113 extranodes = _collectextranodes(repo, files, striprev)
114 114
115 115 # create a changegroup for all the branches we need to keep
116 116 if backup == "all":
117 117 _bundle(repo, [node], cl.heads(), node, 'backup')
118 118 if saveheads or extranodes:
119 119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
120 120 extranodes)
121 121
122 122 cl.strip(striprev)
123 123 repo.manifest.strip(striprev)
124 124 for name in files:
125 125 f = repo.file(name)
126 126 f.strip(striprev)
127 127
128 128 if saveheads or extranodes:
129 129 ui.status("adding branch\n")
130 130 f = open(chgrpfile, "rb")
131 131 gen = changegroup.readbundle(f, chgrpfile)
132 132 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
133 133 f.close()
134 134 if backup != "strip":
135 135 os.unlink(chgrpfile)
136 136
@@ -1,1319 +1,1319 b''
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 from node import *
13 from node import bin, hex, nullid, nullrev, short
14 14 from i18n import _
15 15 import binascii, changegroup, errno, ancestor, mdiff, os
16 16 import sha, struct, util, zlib
17 17
18 18 _pack = struct.pack
19 19 _unpack = struct.unpack
20 20 _compress = zlib.compress
21 21 _decompress = zlib.decompress
22 22 _sha = sha.new
23 23
24 24 # revlog flags
25 25 REVLOGV0 = 0
26 26 REVLOGNG = 1
27 27 REVLOGNGINLINEDATA = (1 << 16)
28 28 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
29 29 REVLOG_DEFAULT_FORMAT = REVLOGNG
30 30 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
31 31
32 32 class RevlogError(Exception):
33 33 pass
34 34
35 35 class LookupError(RevlogError):
36 36 def __init__(self, name, message=None):
37 37 if message is None:
38 38 message = _('not found: %s') % name
39 39 RevlogError.__init__(self, message)
40 40 self.name = name
41 41
42 42 def getoffset(q):
43 43 return int(q >> 16)
44 44
45 45 def gettype(q):
46 46 return int(q & 0xFFFF)
47 47
48 48 def offset_type(offset, type):
49 49 return long(long(offset) << 16 | type)
50 50
51 51 def hash(text, p1, p2):
52 52 """generate a hash from the given text and its parent hashes
53 53
54 54 This hash combines both the current file contents and its history
55 55 in a manner that makes it easy to distinguish nodes with the same
56 56 content in the revision graph.
57 57 """
58 58 l = [p1, p2]
59 59 l.sort()
60 60 s = _sha(l[0])
61 61 s.update(l[1])
62 62 s.update(text)
63 63 return s.digest()
64 64
65 65 def compress(text):
66 66 """ generate a possibly-compressed representation of text """
67 67 if not text:
68 68 return ("", text)
69 69 l = len(text)
70 70 bin = None
71 71 if l < 44:
72 72 pass
73 73 elif l > 1000000:
74 74 # zlib makes an internal copy, thus doubling memory usage for
75 75 # large files, so lets do this in pieces
76 76 z = zlib.compressobj()
77 77 p = []
78 78 pos = 0
79 79 while pos < l:
80 80 pos2 = pos + 2**20
81 81 p.append(z.compress(text[pos:pos2]))
82 82 pos = pos2
83 83 p.append(z.flush())
84 84 if sum(map(len, p)) < l:
85 85 bin = "".join(p)
86 86 else:
87 87 bin = _compress(text)
88 88 if bin is None or len(bin) > l:
89 89 if text[0] == '\0':
90 90 return ("", text)
91 91 return ('u', text)
92 92 return ("", bin)
93 93
94 94 def decompress(bin):
95 95 """ decompress the given input """
96 96 if not bin:
97 97 return bin
98 98 t = bin[0]
99 99 if t == '\0':
100 100 return bin
101 101 if t == 'x':
102 102 return _decompress(bin)
103 103 if t == 'u':
104 104 return bin[1:]
105 105 raise RevlogError(_("unknown compression type %r") % t)
106 106
107 107 class lazyparser(object):
108 108 """
109 109 this class avoids the need to parse the entirety of large indices
110 110 """
111 111
112 112 # lazyparser is not safe to use on windows if win32 extensions not
113 113 # available. it keeps file handle open, which make it not possible
114 114 # to break hardlinks on local cloned repos.
115 115
116 116 def __init__(self, dataf, size):
117 117 self.dataf = dataf
118 118 self.s = struct.calcsize(indexformatng)
119 119 self.datasize = size
120 120 self.l = size/self.s
121 121 self.index = [None] * self.l
122 122 self.map = {nullid: nullrev}
123 123 self.allmap = 0
124 124 self.all = 0
125 125 self.mapfind_count = 0
126 126
127 127 def loadmap(self):
128 128 """
129 129 during a commit, we need to make sure the rev being added is
130 130 not a duplicate. This requires loading the entire index,
131 131 which is fairly slow. loadmap can load up just the node map,
132 132 which takes much less time.
133 133 """
134 134 if self.allmap:
135 135 return
136 136 end = self.datasize
137 137 self.allmap = 1
138 138 cur = 0
139 139 count = 0
140 140 blocksize = self.s * 256
141 141 self.dataf.seek(0)
142 142 while cur < end:
143 143 data = self.dataf.read(blocksize)
144 144 off = 0
145 145 for x in xrange(256):
146 146 n = data[off + ngshaoffset:off + ngshaoffset + 20]
147 147 self.map[n] = count
148 148 count += 1
149 149 if count >= self.l:
150 150 break
151 151 off += self.s
152 152 cur += blocksize
153 153
154 154 def loadblock(self, blockstart, blocksize, data=None):
155 155 if self.all:
156 156 return
157 157 if data is None:
158 158 self.dataf.seek(blockstart)
159 159 if blockstart + blocksize > self.datasize:
160 160 # the revlog may have grown since we've started running,
161 161 # but we don't have space in self.index for more entries.
162 162 # limit blocksize so that we don't get too much data.
163 163 blocksize = max(self.datasize - blockstart, 0)
164 164 data = self.dataf.read(blocksize)
165 165 lend = len(data) / self.s
166 166 i = blockstart / self.s
167 167 off = 0
168 168 # lazyindex supports __delitem__
169 169 if lend > len(self.index) - i:
170 170 lend = len(self.index) - i
171 171 for x in xrange(lend):
172 172 if self.index[i + x] == None:
173 173 b = data[off : off + self.s]
174 174 self.index[i + x] = b
175 175 n = b[ngshaoffset:ngshaoffset + 20]
176 176 self.map[n] = i + x
177 177 off += self.s
178 178
179 179 def findnode(self, node):
180 180 """search backwards through the index file for a specific node"""
181 181 if self.allmap:
182 182 return None
183 183
184 184 # hg log will cause many many searches for the manifest
185 185 # nodes. After we get called a few times, just load the whole
186 186 # thing.
187 187 if self.mapfind_count > 8:
188 188 self.loadmap()
189 189 if node in self.map:
190 190 return node
191 191 return None
192 192 self.mapfind_count += 1
193 193 last = self.l - 1
194 194 while self.index[last] != None:
195 195 if last == 0:
196 196 self.all = 1
197 197 self.allmap = 1
198 198 return None
199 199 last -= 1
200 200 end = (last + 1) * self.s
201 201 blocksize = self.s * 256
202 202 while end >= 0:
203 203 start = max(end - blocksize, 0)
204 204 self.dataf.seek(start)
205 205 data = self.dataf.read(end - start)
206 206 findend = end - start
207 207 while True:
208 208 # we're searching backwards, so we have to make sure
209 209 # we don't find a changeset where this node is a parent
210 210 off = data.find(node, 0, findend)
211 211 findend = off
212 212 if off >= 0:
213 213 i = off / self.s
214 214 off = i * self.s
215 215 n = data[off + ngshaoffset:off + ngshaoffset + 20]
216 216 if n == node:
217 217 self.map[n] = i + start / self.s
218 218 return node
219 219 else:
220 220 break
221 221 end -= blocksize
222 222 return None
223 223
224 224 def loadindex(self, i=None, end=None):
225 225 if self.all:
226 226 return
227 227 all = False
228 228 if i == None:
229 229 blockstart = 0
230 230 blocksize = (65536 / self.s) * self.s
231 231 end = self.datasize
232 232 all = True
233 233 else:
234 234 if end:
235 235 blockstart = i * self.s
236 236 end = end * self.s
237 237 blocksize = end - blockstart
238 238 else:
239 239 blockstart = (i & ~1023) * self.s
240 240 blocksize = self.s * 1024
241 241 end = blockstart + blocksize
242 242 while blockstart < end:
243 243 self.loadblock(blockstart, blocksize)
244 244 blockstart += blocksize
245 245 if all:
246 246 self.all = True
247 247
248 248 class lazyindex(object):
249 249 """a lazy version of the index array"""
250 250 def __init__(self, parser):
251 251 self.p = parser
252 252 def __len__(self):
253 253 return len(self.p.index)
254 254 def load(self, pos):
255 255 if pos < 0:
256 256 pos += len(self.p.index)
257 257 self.p.loadindex(pos)
258 258 return self.p.index[pos]
259 259 def __getitem__(self, pos):
260 260 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
261 261 def __setitem__(self, pos, item):
262 262 self.p.index[pos] = _pack(indexformatng, *item)
263 263 def __delitem__(self, pos):
264 264 del self.p.index[pos]
265 265 def insert(self, pos, e):
266 266 self.p.index.insert(pos, _pack(indexformatng, *e))
267 267 def append(self, e):
268 268 self.p.index.append(_pack(indexformatng, *e))
269 269
270 270 class lazymap(object):
271 271 """a lazy version of the node map"""
272 272 def __init__(self, parser):
273 273 self.p = parser
274 274 def load(self, key):
275 275 n = self.p.findnode(key)
276 276 if n == None:
277 277 raise KeyError(key)
278 278 def __contains__(self, key):
279 279 if key in self.p.map:
280 280 return True
281 281 self.p.loadmap()
282 282 return key in self.p.map
283 283 def __iter__(self):
284 284 yield nullid
285 285 for i in xrange(self.p.l):
286 286 ret = self.p.index[i]
287 287 if not ret:
288 288 self.p.loadindex(i)
289 289 ret = self.p.index[i]
290 290 if isinstance(ret, str):
291 291 ret = _unpack(indexformatng, ret)
292 292 yield ret[7]
293 293 def __getitem__(self, key):
294 294 try:
295 295 return self.p.map[key]
296 296 except KeyError:
297 297 try:
298 298 self.load(key)
299 299 return self.p.map[key]
300 300 except KeyError:
301 301 raise KeyError("node " + hex(key))
302 302 def __setitem__(self, key, val):
303 303 self.p.map[key] = val
304 304 def __delitem__(self, key):
305 305 del self.p.map[key]
306 306
307 307 indexformatv0 = ">4l20s20s20s"
308 308 v0shaoffset = 56
309 309
310 310 class revlogoldio(object):
311 311 def __init__(self):
312 312 self.size = struct.calcsize(indexformatv0)
313 313
314 314 def parseindex(self, fp, inline):
315 315 s = self.size
316 316 index = []
317 317 nodemap = {nullid: nullrev}
318 318 n = off = 0
319 319 data = fp.read()
320 320 l = len(data)
321 321 while off + s <= l:
322 322 cur = data[off:off + s]
323 323 off += s
324 324 e = _unpack(indexformatv0, cur)
325 325 # transform to revlogv1 format
326 326 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
327 327 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
328 328 index.append(e2)
329 329 nodemap[e[6]] = n
330 330 n += 1
331 331
332 332 return index, nodemap, None
333 333
334 334 def packentry(self, entry, node, version, rev):
335 335 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
336 336 node(entry[5]), node(entry[6]), entry[7])
337 337 return _pack(indexformatv0, *e2)
338 338
339 339 # index ng:
340 340 # 6 bytes offset
341 341 # 2 bytes flags
342 342 # 4 bytes compressed length
343 343 # 4 bytes uncompressed length
344 344 # 4 bytes: base rev
345 345 # 4 bytes link rev
346 346 # 4 bytes parent 1 rev
347 347 # 4 bytes parent 2 rev
348 348 # 32 bytes: nodeid
349 349 indexformatng = ">Qiiiiii20s12x"
350 350 ngshaoffset = 32
351 351 versionformat = ">I"
352 352
353 353 class revlogio(object):
354 354 def __init__(self):
355 355 self.size = struct.calcsize(indexformatng)
356 356
357 357 def parseindex(self, fp, inline):
358 358 try:
359 359 size = util.fstat(fp).st_size
360 360 except AttributeError:
361 361 size = 0
362 362
363 363 if util.openhardlinks() and not inline and size > 1000000:
364 364 # big index, let's parse it on demand
365 365 parser = lazyparser(fp, size)
366 366 index = lazyindex(parser)
367 367 nodemap = lazymap(parser)
368 368 e = list(index[0])
369 369 type = gettype(e[0])
370 370 e[0] = offset_type(0, type)
371 371 index[0] = e
372 372 return index, nodemap, None
373 373
374 374 s = self.size
375 375 cache = None
376 376 index = []
377 377 nodemap = {nullid: nullrev}
378 378 n = off = 0
379 379 # if we're not using lazymap, always read the whole index
380 380 data = fp.read()
381 381 l = len(data) - s
382 382 append = index.append
383 383 if inline:
384 384 cache = (0, data)
385 385 while off <= l:
386 386 e = _unpack(indexformatng, data[off:off + s])
387 387 nodemap[e[7]] = n
388 388 append(e)
389 389 n += 1
390 390 if e[1] < 0:
391 391 break
392 392 off += e[1] + s
393 393 else:
394 394 while off <= l:
395 395 e = _unpack(indexformatng, data[off:off + s])
396 396 nodemap[e[7]] = n
397 397 append(e)
398 398 n += 1
399 399 off += s
400 400
401 401 e = list(index[0])
402 402 type = gettype(e[0])
403 403 e[0] = offset_type(0, type)
404 404 index[0] = e
405 405
406 406 return index, nodemap, cache
407 407
408 408 def packentry(self, entry, node, version, rev):
409 409 p = _pack(indexformatng, *entry)
410 410 if rev == 0:
411 411 p = _pack(versionformat, version) + p[4:]
412 412 return p
413 413
414 414 class revlog(object):
415 415 """
416 416 the underlying revision storage object
417 417
418 418 A revlog consists of two parts, an index and the revision data.
419 419
420 420 The index is a file with a fixed record size containing
421 421 information on each revision, includings its nodeid (hash), the
422 422 nodeids of its parents, the position and offset of its data within
423 423 the data file, and the revision it's based on. Finally, each entry
424 424 contains a linkrev entry that can serve as a pointer to external
425 425 data.
426 426
427 427 The revision data itself is a linear collection of data chunks.
428 428 Each chunk represents a revision and is usually represented as a
429 429 delta against the previous chunk. To bound lookup time, runs of
430 430 deltas are limited to about 2 times the length of the original
431 431 version data. This makes retrieval of a version proportional to
432 432 its size, or O(1) relative to the number of revisions.
433 433
434 434 Both pieces of the revlog are written to in an append-only
435 435 fashion, which means we never need to rewrite a file to insert or
436 436 remove data, and can use some simple techniques to avoid the need
437 437 for locking while reading.
438 438 """
439 439 def __init__(self, opener, indexfile):
440 440 """
441 441 create a revlog object
442 442
443 443 opener is a function that abstracts the file opening operation
444 444 and can be used to implement COW semantics or the like.
445 445 """
446 446 self.indexfile = indexfile
447 447 self.datafile = indexfile[:-2] + ".d"
448 448 self.opener = opener
449 449 self._cache = None
450 450 self._chunkcache = None
451 451 self.nodemap = {nullid: nullrev}
452 452 self.index = []
453 453
454 454 v = REVLOG_DEFAULT_VERSION
455 455 if hasattr(opener, "defversion"):
456 456 v = opener.defversion
457 457 if v & REVLOGNG:
458 458 v |= REVLOGNGINLINEDATA
459 459
460 460 i = ""
461 461 try:
462 462 f = self.opener(self.indexfile)
463 463 i = f.read(4)
464 464 f.seek(0)
465 465 if len(i) > 0:
466 466 v = struct.unpack(versionformat, i)[0]
467 467 except IOError, inst:
468 468 if inst.errno != errno.ENOENT:
469 469 raise
470 470
471 471 self.version = v
472 472 self._inline = v & REVLOGNGINLINEDATA
473 473 flags = v & ~0xFFFF
474 474 fmt = v & 0xFFFF
475 475 if fmt == REVLOGV0 and flags:
476 476 raise RevlogError(_("index %s unknown flags %#04x for format v0")
477 477 % (self.indexfile, flags >> 16))
478 478 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
479 479 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
480 480 % (self.indexfile, flags >> 16))
481 481 elif fmt > REVLOGNG:
482 482 raise RevlogError(_("index %s unknown format %d")
483 483 % (self.indexfile, fmt))
484 484
485 485 self._io = revlogio()
486 486 if self.version == REVLOGV0:
487 487 self._io = revlogoldio()
488 488 if i:
489 489 d = self._io.parseindex(f, self._inline)
490 490 self.index, self.nodemap, self._chunkcache = d
491 491
492 492 # add the magic null revision at -1
493 493 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
494 494
495 495 def _loadindex(self, start, end):
496 496 """load a block of indexes all at once from the lazy parser"""
497 497 if isinstance(self.index, lazyindex):
498 498 self.index.p.loadindex(start, end)
499 499
500 500 def _loadindexmap(self):
501 501 """loads both the map and the index from the lazy parser"""
502 502 if isinstance(self.index, lazyindex):
503 503 p = self.index.p
504 504 p.loadindex()
505 505 self.nodemap = p.map
506 506
507 507 def _loadmap(self):
508 508 """loads the map from the lazy parser"""
509 509 if isinstance(self.nodemap, lazymap):
510 510 self.nodemap.p.loadmap()
511 511 self.nodemap = self.nodemap.p.map
512 512
513 513 def tip(self):
514 514 return self.node(len(self.index) - 2)
515 515 def count(self):
516 516 return len(self.index) - 1
517 517
518 518 def rev(self, node):
519 519 try:
520 520 return self.nodemap[node]
521 521 except KeyError:
522 522 raise LookupError(hex(node), _('%s: no node %s') % (self.indexfile, hex(node)))
523 523 def node(self, rev):
524 524 return self.index[rev][7]
525 525 def linkrev(self, node):
526 526 return self.index[self.rev(node)][4]
527 527 def parents(self, node):
528 528 d = self.index[self.rev(node)][5:7]
529 529 return (self.node(d[0]), self.node(d[1]))
530 530 def parentrevs(self, rev):
531 531 return self.index[rev][5:7]
532 532 def start(self, rev):
533 533 return int(self.index[rev][0] >> 16)
534 534 def end(self, rev):
535 535 return self.start(rev) + self.length(rev)
536 536 def length(self, rev):
537 537 return self.index[rev][1]
538 538 def base(self, rev):
539 539 return self.index[rev][3]
540 540
541 541 def size(self, rev):
542 542 """return the length of the uncompressed text for a given revision"""
543 543 l = self.index[rev][2]
544 544 if l >= 0:
545 545 return l
546 546
547 547 t = self.revision(self.node(rev))
548 548 return len(t)
549 549
550 550 # alternate implementation, The advantage to this code is it
551 551 # will be faster for a single revision. But, the results are not
552 552 # cached, so finding the size of every revision will be slower.
553 553 """
554 554 if self.cache and self.cache[1] == rev:
555 555 return len(self.cache[2])
556 556
557 557 base = self.base(rev)
558 558 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
559 559 base = self.cache[1]
560 560 text = self.cache[2]
561 561 else:
562 562 text = self.revision(self.node(base))
563 563
564 564 l = len(text)
565 565 for x in xrange(base + 1, rev + 1):
566 566 l = mdiff.patchedsize(l, self.chunk(x))
567 567 return l
568 568 """
569 569
570 570 def reachable(self, node, stop=None):
571 571 """return a hash of all nodes ancestral to a given node, including
572 572 the node itself, stopping when stop is matched"""
573 573 reachable = {}
574 574 visit = [node]
575 575 reachable[node] = 1
576 576 if stop:
577 577 stopn = self.rev(stop)
578 578 else:
579 579 stopn = 0
580 580 while visit:
581 581 n = visit.pop(0)
582 582 if n == stop:
583 583 continue
584 584 if n == nullid:
585 585 continue
586 586 for p in self.parents(n):
587 587 if self.rev(p) < stopn:
588 588 continue
589 589 if p not in reachable:
590 590 reachable[p] = 1
591 591 visit.append(p)
592 592 return reachable
593 593
594 594 def nodesbetween(self, roots=None, heads=None):
595 595 """Return a tuple containing three elements. Elements 1 and 2 contain
596 596 a final list bases and heads after all the unreachable ones have been
597 597 pruned. Element 0 contains a topologically sorted list of all
598 598
599 599 nodes that satisfy these constraints:
600 600 1. All nodes must be descended from a node in roots (the nodes on
601 601 roots are considered descended from themselves).
602 602 2. All nodes must also be ancestors of a node in heads (the nodes in
603 603 heads are considered to be their own ancestors).
604 604
605 605 If roots is unspecified, nullid is assumed as the only root.
606 606 If heads is unspecified, it is taken to be the output of the
607 607 heads method (i.e. a list of all nodes in the repository that
608 608 have no children)."""
609 609 nonodes = ([], [], [])
610 610 if roots is not None:
611 611 roots = list(roots)
612 612 if not roots:
613 613 return nonodes
614 614 lowestrev = min([self.rev(n) for n in roots])
615 615 else:
616 616 roots = [nullid] # Everybody's a descendent of nullid
617 617 lowestrev = nullrev
618 618 if (lowestrev == nullrev) and (heads is None):
619 619 # We want _all_ the nodes!
620 620 return ([self.node(r) for r in xrange(0, self.count())],
621 621 [nullid], list(self.heads()))
622 622 if heads is None:
623 623 # All nodes are ancestors, so the latest ancestor is the last
624 624 # node.
625 625 highestrev = self.count() - 1
626 626 # Set ancestors to None to signal that every node is an ancestor.
627 627 ancestors = None
628 628 # Set heads to an empty dictionary for later discovery of heads
629 629 heads = {}
630 630 else:
631 631 heads = list(heads)
632 632 if not heads:
633 633 return nonodes
634 634 ancestors = {}
635 635 # Turn heads into a dictionary so we can remove 'fake' heads.
636 636 # Also, later we will be using it to filter out the heads we can't
637 637 # find from roots.
638 638 heads = dict.fromkeys(heads, 0)
639 639 # Start at the top and keep marking parents until we're done.
640 640 nodestotag = heads.keys()
641 641 # Remember where the top was so we can use it as a limit later.
642 642 highestrev = max([self.rev(n) for n in nodestotag])
643 643 while nodestotag:
644 644 # grab a node to tag
645 645 n = nodestotag.pop()
646 646 # Never tag nullid
647 647 if n == nullid:
648 648 continue
649 649 # A node's revision number represents its place in a
650 650 # topologically sorted list of nodes.
651 651 r = self.rev(n)
652 652 if r >= lowestrev:
653 653 if n not in ancestors:
654 654 # If we are possibly a descendent of one of the roots
655 655 # and we haven't already been marked as an ancestor
656 656 ancestors[n] = 1 # Mark as ancestor
657 657 # Add non-nullid parents to list of nodes to tag.
658 658 nodestotag.extend([p for p in self.parents(n) if
659 659 p != nullid])
660 660 elif n in heads: # We've seen it before, is it a fake head?
661 661 # So it is, real heads should not be the ancestors of
662 662 # any other heads.
663 663 heads.pop(n)
664 664 if not ancestors:
665 665 return nonodes
666 666 # Now that we have our set of ancestors, we want to remove any
667 667 # roots that are not ancestors.
668 668
669 669 # If one of the roots was nullid, everything is included anyway.
670 670 if lowestrev > nullrev:
671 671 # But, since we weren't, let's recompute the lowest rev to not
672 672 # include roots that aren't ancestors.
673 673
674 674 # Filter out roots that aren't ancestors of heads
675 675 roots = [n for n in roots if n in ancestors]
676 676 # Recompute the lowest revision
677 677 if roots:
678 678 lowestrev = min([self.rev(n) for n in roots])
679 679 else:
680 680 # No more roots? Return empty list
681 681 return nonodes
682 682 else:
683 683 # We are descending from nullid, and don't need to care about
684 684 # any other roots.
685 685 lowestrev = nullrev
686 686 roots = [nullid]
687 687 # Transform our roots list into a 'set' (i.e. a dictionary where the
688 688 # values don't matter.
689 689 descendents = dict.fromkeys(roots, 1)
690 690 # Also, keep the original roots so we can filter out roots that aren't
691 691 # 'real' roots (i.e. are descended from other roots).
692 692 roots = descendents.copy()
693 693 # Our topologically sorted list of output nodes.
694 694 orderedout = []
695 695 # Don't start at nullid since we don't want nullid in our output list,
696 696 # and if nullid shows up in descedents, empty parents will look like
697 697 # they're descendents.
698 698 for r in xrange(max(lowestrev, 0), highestrev + 1):
699 699 n = self.node(r)
700 700 isdescendent = False
701 701 if lowestrev == nullrev: # Everybody is a descendent of nullid
702 702 isdescendent = True
703 703 elif n in descendents:
704 704 # n is already a descendent
705 705 isdescendent = True
706 706 # This check only needs to be done here because all the roots
707 707 # will start being marked is descendents before the loop.
708 708 if n in roots:
709 709 # If n was a root, check if it's a 'real' root.
710 710 p = tuple(self.parents(n))
711 711 # If any of its parents are descendents, it's not a root.
712 712 if (p[0] in descendents) or (p[1] in descendents):
713 713 roots.pop(n)
714 714 else:
715 715 p = tuple(self.parents(n))
716 716 # A node is a descendent if either of its parents are
717 717 # descendents. (We seeded the dependents list with the roots
718 718 # up there, remember?)
719 719 if (p[0] in descendents) or (p[1] in descendents):
720 720 descendents[n] = 1
721 721 isdescendent = True
722 722 if isdescendent and ((ancestors is None) or (n in ancestors)):
723 723 # Only include nodes that are both descendents and ancestors.
724 724 orderedout.append(n)
725 725 if (ancestors is not None) and (n in heads):
726 726 # We're trying to figure out which heads are reachable
727 727 # from roots.
728 728 # Mark this head as having been reached
729 729 heads[n] = 1
730 730 elif ancestors is None:
731 731 # Otherwise, we're trying to discover the heads.
732 732 # Assume this is a head because if it isn't, the next step
733 733 # will eventually remove it.
734 734 heads[n] = 1
735 735 # But, obviously its parents aren't.
736 736 for p in self.parents(n):
737 737 heads.pop(p, None)
738 738 heads = [n for n in heads.iterkeys() if heads[n] != 0]
739 739 roots = roots.keys()
740 740 assert orderedout
741 741 assert roots
742 742 assert heads
743 743 return (orderedout, roots, heads)
744 744
745 745 def heads(self, start=None, stop=None):
746 746 """return the list of all nodes that have no children
747 747
748 748 if start is specified, only heads that are descendants of
749 749 start will be returned
750 750 if stop is specified, it will consider all the revs from stop
751 751 as if they had no children
752 752 """
753 753 if start is None and stop is None:
754 754 count = self.count()
755 755 if not count:
756 756 return [nullid]
757 757 ishead = [1] * (count + 1)
758 758 index = self.index
759 759 for r in xrange(count):
760 760 e = index[r]
761 761 ishead[e[5]] = ishead[e[6]] = 0
762 762 return [self.node(r) for r in xrange(count) if ishead[r]]
763 763
764 764 if start is None:
765 765 start = nullid
766 766 if stop is None:
767 767 stop = []
768 768 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
769 769 startrev = self.rev(start)
770 770 reachable = {startrev: 1}
771 771 heads = {startrev: 1}
772 772
773 773 parentrevs = self.parentrevs
774 774 for r in xrange(startrev + 1, self.count()):
775 775 for p in parentrevs(r):
776 776 if p in reachable:
777 777 if r not in stoprevs:
778 778 reachable[r] = 1
779 779 heads[r] = 1
780 780 if p in heads and p not in stoprevs:
781 781 del heads[p]
782 782
783 783 return [self.node(r) for r in heads]
784 784
785 785 def children(self, node):
786 786 """find the children of a given node"""
787 787 c = []
788 788 p = self.rev(node)
789 789 for r in range(p + 1, self.count()):
790 790 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
791 791 if prevs:
792 792 for pr in prevs:
793 793 if pr == p:
794 794 c.append(self.node(r))
795 795 elif p == nullrev:
796 796 c.append(self.node(r))
797 797 return c
798 798
799 799 def _match(self, id):
800 800 if isinstance(id, (long, int)):
801 801 # rev
802 802 return self.node(id)
803 803 if len(id) == 20:
804 804 # possibly a binary node
805 805 # odds of a binary node being all hex in ASCII are 1 in 10**25
806 806 try:
807 807 node = id
808 808 r = self.rev(node) # quick search the index
809 809 return node
810 810 except LookupError:
811 811 pass # may be partial hex id
812 812 try:
813 813 # str(rev)
814 814 rev = int(id)
815 815 if str(rev) != id:
816 816 raise ValueError
817 817 if rev < 0:
818 818 rev = self.count() + rev
819 819 if rev < 0 or rev >= self.count():
820 820 raise ValueError
821 821 return self.node(rev)
822 822 except (ValueError, OverflowError):
823 823 pass
824 824 if len(id) == 40:
825 825 try:
826 826 # a full hex nodeid?
827 827 node = bin(id)
828 828 r = self.rev(node)
829 829 return node
830 830 except TypeError:
831 831 pass
832 832
833 833 def _partialmatch(self, id):
834 834 if len(id) < 40:
835 835 try:
836 836 # hex(node)[:...]
837 837 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
838 838 node = None
839 839 for n in self.nodemap:
840 840 if n.startswith(bin_id) and hex(n).startswith(id):
841 841 if node is not None:
842 842 raise LookupError(hex(node),
843 843 _("Ambiguous identifier"))
844 844 node = n
845 845 if node is not None:
846 846 return node
847 847 except TypeError:
848 848 pass
849 849
850 850 def lookup(self, id):
851 851 """locate a node based on:
852 852 - revision number or str(revision number)
853 853 - nodeid or subset of hex nodeid
854 854 """
855 855 n = self._match(id)
856 856 if n is not None:
857 857 return n
858 858 n = self._partialmatch(id)
859 859 if n:
860 860 return n
861 861
862 862 raise LookupError(id, _("No match found"))
863 863
864 864 def cmp(self, node, text):
865 865 """compare text with a given file revision"""
866 866 p1, p2 = self.parents(node)
867 867 return hash(text, p1, p2) != node
868 868
869 869 def chunk(self, rev, df=None):
870 870 def loadcache(df):
871 871 if not df:
872 872 if self._inline:
873 873 df = self.opener(self.indexfile)
874 874 else:
875 875 df = self.opener(self.datafile)
876 876 df.seek(start)
877 877 self._chunkcache = (start, df.read(cache_length))
878 878
879 879 start, length = self.start(rev), self.length(rev)
880 880 if self._inline:
881 881 start += (rev + 1) * self._io.size
882 882 end = start + length
883 883
884 884 offset = 0
885 885 if not self._chunkcache:
886 886 cache_length = max(65536, length)
887 887 loadcache(df)
888 888 else:
889 889 cache_start = self._chunkcache[0]
890 890 cache_length = len(self._chunkcache[1])
891 891 cache_end = cache_start + cache_length
892 892 if start >= cache_start and end <= cache_end:
893 893 # it is cached
894 894 offset = start - cache_start
895 895 else:
896 896 cache_length = max(65536, length)
897 897 loadcache(df)
898 898
899 899 # avoid copying large chunks
900 900 c = self._chunkcache[1]
901 901 if cache_length != length:
902 902 c = c[offset:offset + length]
903 903
904 904 return decompress(c)
905 905
906 906 def delta(self, node):
907 907 """return or calculate a delta between a node and its predecessor"""
908 908 r = self.rev(node)
909 909 return self.revdiff(r - 1, r)
910 910
911 911 def revdiff(self, rev1, rev2):
912 912 """return or calculate a delta between two revisions"""
913 913 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
914 914 return self.chunk(rev2)
915 915
916 916 return mdiff.textdiff(self.revision(self.node(rev1)),
917 917 self.revision(self.node(rev2)))
918 918
919 919 def revision(self, node):
920 920 """return an uncompressed revision of a given"""
921 921 if node == nullid:
922 922 return ""
923 923 if self._cache and self._cache[0] == node:
924 924 return str(self._cache[2])
925 925
926 926 # look up what we need to read
927 927 text = None
928 928 rev = self.rev(node)
929 929 base = self.base(rev)
930 930
931 931 # check rev flags
932 932 if self.index[rev][0] & 0xFFFF:
933 933 raise RevlogError(_('incompatible revision flag %x') %
934 934 (self.index[rev][0] & 0xFFFF))
935 935
936 936 df = None
937 937
938 938 # do we have useful data cached?
939 939 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
940 940 base = self._cache[1]
941 941 text = str(self._cache[2])
942 942 self._loadindex(base, rev + 1)
943 943 if not self._inline and rev > base + 1:
944 944 df = self.opener(self.datafile)
945 945 else:
946 946 self._loadindex(base, rev + 1)
947 947 if not self._inline and rev > base:
948 948 df = self.opener(self.datafile)
949 949 text = self.chunk(base, df=df)
950 950
951 951 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
952 952 text = mdiff.patches(text, bins)
953 953 p1, p2 = self.parents(node)
954 954 if node != hash(text, p1, p2):
955 955 raise RevlogError(_("integrity check failed on %s:%d")
956 956 % (self.datafile, rev))
957 957
958 958 self._cache = (node, rev, text)
959 959 return text
960 960
961 961 def checkinlinesize(self, tr, fp=None):
962 962 if not self._inline:
963 963 return
964 964 if not fp:
965 965 fp = self.opener(self.indexfile, 'r')
966 966 fp.seek(0, 2)
967 967 size = fp.tell()
968 968 if size < 131072:
969 969 return
970 970 trinfo = tr.find(self.indexfile)
971 971 if trinfo == None:
972 972 raise RevlogError(_("%s not found in the transaction")
973 973 % self.indexfile)
974 974
975 975 trindex = trinfo[2]
976 976 dataoff = self.start(trindex)
977 977
978 978 tr.add(self.datafile, dataoff)
979 979 df = self.opener(self.datafile, 'w')
980 980 calc = self._io.size
981 981 for r in xrange(self.count()):
982 982 start = self.start(r) + (r + 1) * calc
983 983 length = self.length(r)
984 984 fp.seek(start)
985 985 d = fp.read(length)
986 986 df.write(d)
987 987 fp.close()
988 988 df.close()
989 989 fp = self.opener(self.indexfile, 'w', atomictemp=True)
990 990 self.version &= ~(REVLOGNGINLINEDATA)
991 991 self._inline = False
992 992 for i in xrange(self.count()):
993 993 e = self._io.packentry(self.index[i], self.node, self.version, i)
994 994 fp.write(e)
995 995
996 996 # if we don't call rename, the temp file will never replace the
997 997 # real index
998 998 fp.rename()
999 999
1000 1000 tr.replace(self.indexfile, trindex * calc)
1001 1001 self._chunkcache = None
1002 1002
1003 1003 def addrevision(self, text, transaction, link, p1, p2, d=None):
1004 1004 """add a revision to the log
1005 1005
1006 1006 text - the revision data to add
1007 1007 transaction - the transaction object used for rollback
1008 1008 link - the linkrev data to add
1009 1009 p1, p2 - the parent nodeids of the revision
1010 1010 d - an optional precomputed delta
1011 1011 """
1012 1012 dfh = None
1013 1013 if not self._inline:
1014 1014 dfh = self.opener(self.datafile, "a")
1015 1015 ifh = self.opener(self.indexfile, "a+")
1016 1016 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1017 1017
1018 1018 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1019 1019 node = hash(text, p1, p2)
1020 1020 if node in self.nodemap:
1021 1021 return node
1022 1022
1023 1023 curr = self.count()
1024 1024 prev = curr - 1
1025 1025 base = self.base(prev)
1026 1026 offset = self.end(prev)
1027 1027
1028 1028 if curr:
1029 1029 if not d:
1030 1030 ptext = self.revision(self.node(prev))
1031 1031 d = mdiff.textdiff(ptext, text)
1032 1032 data = compress(d)
1033 1033 l = len(data[1]) + len(data[0])
1034 1034 dist = l + offset - self.start(base)
1035 1035
1036 1036 # full versions are inserted when the needed deltas
1037 1037 # become comparable to the uncompressed text
1038 1038 if not curr or dist > len(text) * 2:
1039 1039 data = compress(text)
1040 1040 l = len(data[1]) + len(data[0])
1041 1041 base = curr
1042 1042
1043 1043 e = (offset_type(offset, 0), l, len(text),
1044 1044 base, link, self.rev(p1), self.rev(p2), node)
1045 1045 self.index.insert(-1, e)
1046 1046 self.nodemap[node] = curr
1047 1047
1048 1048 entry = self._io.packentry(e, self.node, self.version, curr)
1049 1049 if not self._inline:
1050 1050 transaction.add(self.datafile, offset)
1051 1051 transaction.add(self.indexfile, curr * len(entry))
1052 1052 if data[0]:
1053 1053 dfh.write(data[0])
1054 1054 dfh.write(data[1])
1055 1055 dfh.flush()
1056 1056 ifh.write(entry)
1057 1057 else:
1058 1058 offset += curr * self._io.size
1059 1059 transaction.add(self.indexfile, offset, curr)
1060 1060 ifh.write(entry)
1061 1061 ifh.write(data[0])
1062 1062 ifh.write(data[1])
1063 1063 self.checkinlinesize(transaction, ifh)
1064 1064
1065 1065 self._cache = (node, curr, text)
1066 1066 return node
1067 1067
1068 1068 def ancestor(self, a, b):
1069 1069 """calculate the least common ancestor of nodes a and b"""
1070 1070
1071 1071 def parents(rev):
1072 1072 return [p for p in self.parentrevs(rev) if p != nullrev]
1073 1073
1074 1074 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1075 1075 if c is None:
1076 1076 return nullid
1077 1077
1078 1078 return self.node(c)
1079 1079
1080 1080 def group(self, nodelist, lookup, infocollect=None):
1081 1081 """calculate a delta group
1082 1082
1083 1083 Given a list of changeset revs, return a set of deltas and
1084 1084 metadata corresponding to nodes. the first delta is
1085 1085 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1086 1086 have this parent as it has all history before these
1087 1087 changesets. parent is parent[0]
1088 1088 """
1089 1089 revs = [self.rev(n) for n in nodelist]
1090 1090
1091 1091 # if we don't have any revisions touched by these changesets, bail
1092 1092 if not revs:
1093 1093 yield changegroup.closechunk()
1094 1094 return
1095 1095
1096 1096 # add the parent of the first rev
1097 1097 p = self.parents(self.node(revs[0]))[0]
1098 1098 revs.insert(0, self.rev(p))
1099 1099
1100 1100 # build deltas
1101 1101 for d in xrange(0, len(revs) - 1):
1102 1102 a, b = revs[d], revs[d + 1]
1103 1103 nb = self.node(b)
1104 1104
1105 1105 if infocollect is not None:
1106 1106 infocollect(nb)
1107 1107
1108 1108 p = self.parents(nb)
1109 1109 meta = nb + p[0] + p[1] + lookup(nb)
1110 1110 if a == -1:
1111 1111 d = self.revision(nb)
1112 1112 meta += mdiff.trivialdiffheader(len(d))
1113 1113 else:
1114 1114 d = self.revdiff(a, b)
1115 1115 yield changegroup.chunkheader(len(meta) + len(d))
1116 1116 yield meta
1117 1117 if len(d) > 2**20:
1118 1118 pos = 0
1119 1119 while pos < len(d):
1120 1120 pos2 = pos + 2 ** 18
1121 1121 yield d[pos:pos2]
1122 1122 pos = pos2
1123 1123 else:
1124 1124 yield d
1125 1125
1126 1126 yield changegroup.closechunk()
1127 1127
1128 1128 def addgroup(self, revs, linkmapper, transaction, unique=0):
1129 1129 """
1130 1130 add a delta group
1131 1131
1132 1132 given a set of deltas, add them to the revision log. the
1133 1133 first delta is against its parent, which should be in our
1134 1134 log, the rest are against the previous delta.
1135 1135 """
1136 1136
1137 1137 #track the base of the current delta log
1138 1138 r = self.count()
1139 1139 t = r - 1
1140 1140 node = None
1141 1141
1142 1142 base = prev = nullrev
1143 1143 start = end = textlen = 0
1144 1144 if r:
1145 1145 end = self.end(t)
1146 1146
1147 1147 ifh = self.opener(self.indexfile, "a+")
1148 1148 isize = r * self._io.size
1149 1149 if self._inline:
1150 1150 transaction.add(self.indexfile, end + isize, r)
1151 1151 dfh = None
1152 1152 else:
1153 1153 transaction.add(self.indexfile, isize, r)
1154 1154 transaction.add(self.datafile, end)
1155 1155 dfh = self.opener(self.datafile, "a")
1156 1156
1157 1157 # loop through our set of deltas
1158 1158 chain = None
1159 1159 for chunk in revs:
1160 1160 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1161 1161 link = linkmapper(cs)
1162 1162 if node in self.nodemap:
1163 1163 # this can happen if two branches make the same change
1164 1164 # if unique:
1165 1165 # raise RevlogError(_("already have %s") % hex(node[:4]))
1166 1166 chain = node
1167 1167 continue
1168 1168 delta = buffer(chunk, 80)
1169 1169 del chunk
1170 1170
1171 1171 for p in (p1, p2):
1172 1172 if not p in self.nodemap:
1173 1173 raise LookupError(hex(p), _("unknown parent %s") % short(p))
1174 1174
1175 1175 if not chain:
1176 1176 # retrieve the parent revision of the delta chain
1177 1177 chain = p1
1178 1178 if not chain in self.nodemap:
1179 1179 raise LookupError(hex(chain), _("unknown base %s") % short(chain[:4]))
1180 1180
1181 1181 # full versions are inserted when the needed deltas become
1182 1182 # comparable to the uncompressed text or when the previous
1183 1183 # version is not the one we have a delta against. We use
1184 1184 # the size of the previous full rev as a proxy for the
1185 1185 # current size.
1186 1186
1187 1187 if chain == prev:
1188 1188 cdelta = compress(delta)
1189 1189 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1190 1190 textlen = mdiff.patchedsize(textlen, delta)
1191 1191
1192 1192 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1193 1193 # flush our writes here so we can read it in revision
1194 1194 if dfh:
1195 1195 dfh.flush()
1196 1196 ifh.flush()
1197 1197 text = self.revision(chain)
1198 1198 if len(text) == 0:
1199 1199 # skip over trivial delta header
1200 1200 text = buffer(delta, 12)
1201 1201 else:
1202 1202 text = mdiff.patches(text, [delta])
1203 1203 del delta
1204 1204 chk = self._addrevision(text, transaction, link, p1, p2, None,
1205 1205 ifh, dfh)
1206 1206 if not dfh and not self._inline:
1207 1207 # addrevision switched from inline to conventional
1208 1208 # reopen the index
1209 1209 dfh = self.opener(self.datafile, "a")
1210 1210 ifh = self.opener(self.indexfile, "a")
1211 1211 if chk != node:
1212 1212 raise RevlogError(_("consistency error adding group"))
1213 1213 textlen = len(text)
1214 1214 else:
1215 1215 e = (offset_type(end, 0), cdeltalen, textlen, base,
1216 1216 link, self.rev(p1), self.rev(p2), node)
1217 1217 self.index.insert(-1, e)
1218 1218 self.nodemap[node] = r
1219 1219 entry = self._io.packentry(e, self.node, self.version, r)
1220 1220 if self._inline:
1221 1221 ifh.write(entry)
1222 1222 ifh.write(cdelta[0])
1223 1223 ifh.write(cdelta[1])
1224 1224 self.checkinlinesize(transaction, ifh)
1225 1225 if not self._inline:
1226 1226 dfh = self.opener(self.datafile, "a")
1227 1227 ifh = self.opener(self.indexfile, "a")
1228 1228 else:
1229 1229 dfh.write(cdelta[0])
1230 1230 dfh.write(cdelta[1])
1231 1231 ifh.write(entry)
1232 1232
1233 1233 t, r, chain, prev = r, r + 1, node, node
1234 1234 base = self.base(t)
1235 1235 start = self.start(base)
1236 1236 end = self.end(t)
1237 1237
1238 1238 return node
1239 1239
1240 1240 def strip(self, minlink):
1241 1241 """truncate the revlog on the first revision with a linkrev >= minlink
1242 1242
1243 1243 This function is called when we're stripping revision minlink and
1244 1244 its descendants from the repository.
1245 1245
1246 1246 We have to remove all revisions with linkrev >= minlink, because
1247 1247 the equivalent changelog revisions will be renumbered after the
1248 1248 strip.
1249 1249
1250 1250 So we truncate the revlog on the first of these revisions, and
1251 1251 trust that the caller has saved the revisions that shouldn't be
1252 1252 removed and that it'll readd them after this truncation.
1253 1253 """
1254 1254 if self.count() == 0:
1255 1255 return
1256 1256
1257 1257 if isinstance(self.index, lazyindex):
1258 1258 self._loadindexmap()
1259 1259
1260 1260 for rev in xrange(0, self.count()):
1261 1261 if self.index[rev][4] >= minlink:
1262 1262 break
1263 1263 else:
1264 1264 return
1265 1265
1266 1266 # first truncate the files on disk
1267 1267 end = self.start(rev)
1268 1268 if not self._inline:
1269 1269 df = self.opener(self.datafile, "a")
1270 1270 df.truncate(end)
1271 1271 end = rev * self._io.size
1272 1272 else:
1273 1273 end += rev * self._io.size
1274 1274
1275 1275 indexf = self.opener(self.indexfile, "a")
1276 1276 indexf.truncate(end)
1277 1277
1278 1278 # then reset internal state in memory to forget those revisions
1279 1279 self._cache = None
1280 1280 self._chunkcache = None
1281 1281 for x in xrange(rev, self.count()):
1282 1282 del self.nodemap[self.node(x)]
1283 1283
1284 1284 del self.index[rev:-1]
1285 1285
1286 1286 def checksize(self):
1287 1287 expected = 0
1288 1288 if self.count():
1289 1289 expected = max(0, self.end(self.count() - 1))
1290 1290
1291 1291 try:
1292 1292 f = self.opener(self.datafile)
1293 1293 f.seek(0, 2)
1294 1294 actual = f.tell()
1295 1295 dd = actual - expected
1296 1296 except IOError, inst:
1297 1297 if inst.errno != errno.ENOENT:
1298 1298 raise
1299 1299 dd = 0
1300 1300
1301 1301 try:
1302 1302 f = self.opener(self.indexfile)
1303 1303 f.seek(0, 2)
1304 1304 actual = f.tell()
1305 1305 s = self._io.size
1306 1306 i = max(0, actual / s)
1307 1307 di = actual - (i * s)
1308 1308 if self._inline:
1309 1309 databytes = 0
1310 1310 for r in xrange(self.count()):
1311 1311 databytes += max(0, self.length(r))
1312 1312 dd = 0
1313 1313 di = actual - self.count() * s - databytes
1314 1314 except IOError, inst:
1315 1315 if inst.errno != errno.ENOENT:
1316 1316 raise
1317 1317 di = 0
1318 1318
1319 1319 return (dd, di)
@@ -1,238 +1,238 b''
1 1 # sshrepo.py - ssh repository proxy class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
9 from remoterepo import *
8 from node import bin, hex
9 from remoterepo import remotelock, remoterepository
10 10 from i18n import _
11 11 import repo, os, re, stat, util
12 12
13 13 class sshrepository(remoterepository):
14 14 def __init__(self, ui, path, create=0):
15 15 self._url = path
16 16 self.ui = ui
17 17
18 18 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
19 19 if not m:
20 20 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
21 21
22 22 self.user = m.group(2)
23 23 self.host = m.group(3)
24 24 self.port = m.group(5)
25 25 self.path = m.group(7) or "."
26 26
27 27 sshcmd = self.ui.config("ui", "ssh", "ssh")
28 28 remotecmd = self.ui.config("ui", "remotecmd", "hg")
29 29
30 30 args = util.sshargs(sshcmd, self.host, self.user, self.port)
31 31
32 32 if create:
33 33 cmd = '%s %s "%s init %s"'
34 34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
35 35
36 36 ui.note('running %s\n' % cmd)
37 37 res = util.system(cmd)
38 38 if res != 0:
39 39 self.raise_(repo.RepoError(_("could not create remote repo")))
40 40
41 41 self.validate_repo(ui, sshcmd, args, remotecmd)
42 42
43 43 def url(self):
44 44 return self._url
45 45
46 46 def validate_repo(self, ui, sshcmd, args, remotecmd):
47 47 # cleanup up previous run
48 48 self.cleanup()
49 49
50 50 cmd = '%s %s "%s -R %s serve --stdio"'
51 51 cmd = cmd % (sshcmd, args, remotecmd, self.path)
52 52
53 53 cmd = util.quotecommand(cmd)
54 54 ui.note('running %s\n' % cmd)
55 55 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
56 56
57 57 # skip any noise generated by remote shell
58 58 self.do_cmd("hello")
59 59 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
60 60 lines = ["", "dummy"]
61 61 max_noise = 500
62 62 while lines[-1] and max_noise:
63 63 l = r.readline()
64 64 self.readerr()
65 65 if lines[-1] == "1\n" and l == "\n":
66 66 break
67 67 if l:
68 68 ui.debug(_("remote: "), l)
69 69 lines.append(l)
70 70 max_noise -= 1
71 71 else:
72 72 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
73 73
74 74 self.capabilities = util.set()
75 75 lines.reverse()
76 76 for l in lines:
77 77 if l.startswith("capabilities:"):
78 78 self.capabilities.update(l[:-1].split(":")[1].split())
79 79 break
80 80
81 81 def readerr(self):
82 82 while 1:
83 83 size = util.fstat(self.pipee).st_size
84 84 if size == 0: break
85 85 l = self.pipee.readline()
86 86 if not l: break
87 87 self.ui.status(_("remote: "), l)
88 88
89 89 def raise_(self, exception):
90 90 self.cleanup()
91 91 raise exception
92 92
93 93 def cleanup(self):
94 94 try:
95 95 self.pipeo.close()
96 96 self.pipei.close()
97 97 # read the error descriptor until EOF
98 98 for l in self.pipee:
99 99 self.ui.status(_("remote: "), l)
100 100 self.pipee.close()
101 101 except:
102 102 pass
103 103
104 104 __del__ = cleanup
105 105
106 106 def do_cmd(self, cmd, **args):
107 107 self.ui.debug(_("sending %s command\n") % cmd)
108 108 self.pipeo.write("%s\n" % cmd)
109 109 for k, v in args.items():
110 110 self.pipeo.write("%s %d\n" % (k, len(v)))
111 111 self.pipeo.write(v)
112 112 self.pipeo.flush()
113 113
114 114 return self.pipei
115 115
116 116 def call(self, cmd, **args):
117 117 self.do_cmd(cmd, **args)
118 118 return self._recv()
119 119
120 120 def _recv(self):
121 121 l = self.pipei.readline()
122 122 self.readerr()
123 123 try:
124 124 l = int(l)
125 125 except:
126 126 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
127 127 return self.pipei.read(l)
128 128
129 129 def _send(self, data, flush=False):
130 130 self.pipeo.write("%d\n" % len(data))
131 131 if data:
132 132 self.pipeo.write(data)
133 133 if flush:
134 134 self.pipeo.flush()
135 135 self.readerr()
136 136
137 137 def lock(self):
138 138 self.call("lock")
139 139 return remotelock(self)
140 140
141 141 def unlock(self):
142 142 self.call("unlock")
143 143
144 144 def lookup(self, key):
145 145 self.requirecap('lookup', _('look up remote revision'))
146 146 d = self.call("lookup", key=key)
147 147 success, data = d[:-1].split(" ", 1)
148 148 if int(success):
149 149 return bin(data)
150 150 else:
151 151 self.raise_(repo.RepoError(data))
152 152
153 153 def heads(self):
154 154 d = self.call("heads")
155 155 try:
156 156 return map(bin, d[:-1].split(" "))
157 157 except:
158 158 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
159 159
160 160 def branches(self, nodes):
161 161 n = " ".join(map(hex, nodes))
162 162 d = self.call("branches", nodes=n)
163 163 try:
164 164 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
165 165 return br
166 166 except:
167 167 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
168 168
169 169 def between(self, pairs):
170 170 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
171 171 d = self.call("between", pairs=n)
172 172 try:
173 173 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
174 174 return p
175 175 except:
176 176 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
177 177
178 178 def changegroup(self, nodes, kind):
179 179 n = " ".join(map(hex, nodes))
180 180 return self.do_cmd("changegroup", roots=n)
181 181
182 182 def changegroupsubset(self, bases, heads, kind):
183 183 self.requirecap('changegroupsubset', _('look up remote changes'))
184 184 bases = " ".join(map(hex, bases))
185 185 heads = " ".join(map(hex, heads))
186 186 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
187 187
188 188 def unbundle(self, cg, heads, source):
189 189 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
190 190 if d:
191 191 # remote may send "unsynced changes"
192 192 self.raise_(repo.RepoError(_("push refused: %s") % d))
193 193
194 194 while 1:
195 195 d = cg.read(4096)
196 196 if not d:
197 197 break
198 198 self._send(d)
199 199
200 200 self._send("", flush=True)
201 201
202 202 r = self._recv()
203 203 if r:
204 204 # remote may send "unsynced changes"
205 205 self.raise_(repo.RepoError(_("push failed: %s") % r))
206 206
207 207 r = self._recv()
208 208 try:
209 209 return int(r)
210 210 except:
211 211 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
212 212
213 213 def addchangegroup(self, cg, source, url):
214 214 d = self.call("addchangegroup")
215 215 if d:
216 216 self.raise_(repo.RepoError(_("push refused: %s") % d))
217 217 while 1:
218 218 d = cg.read(4096)
219 219 if not d:
220 220 break
221 221 self.pipeo.write(d)
222 222 self.readerr()
223 223
224 224 self.pipeo.flush()
225 225
226 226 self.readerr()
227 227 r = self._recv()
228 228 if not r:
229 229 return 1
230 230 try:
231 231 return int(r)
232 232 except:
233 233 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
234 234
235 235 def stream_out(self):
236 236 return self.do_cmd('stream_out')
237 237
238 238 instance = sshrepository
@@ -1,205 +1,205 b''
1 1 # sshserver.py - ssh protocol server support for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 from node import *
10 from node import bin, hex
11 11 import os, streamclone, sys, tempfile, util, hook
12 12
13 13 class sshserver(object):
14 14 def __init__(self, ui, repo):
15 15 self.ui = ui
16 16 self.repo = repo
17 17 self.lock = None
18 18 self.fin = sys.stdin
19 19 self.fout = sys.stdout
20 20
21 21 hook.redirect(True)
22 22 sys.stdout = sys.stderr
23 23
24 24 # Prevent insertion/deletion of CRs
25 25 util.set_binary(self.fin)
26 26 util.set_binary(self.fout)
27 27
28 28 def getarg(self):
29 29 argline = self.fin.readline()[:-1]
30 30 arg, l = argline.split()
31 31 val = self.fin.read(int(l))
32 32 return arg, val
33 33
34 34 def respond(self, v):
35 35 self.fout.write("%d\n" % len(v))
36 36 self.fout.write(v)
37 37 self.fout.flush()
38 38
39 39 def serve_forever(self):
40 40 while self.serve_one(): pass
41 41 sys.exit(0)
42 42
43 43 def serve_one(self):
44 44 cmd = self.fin.readline()[:-1]
45 45 if cmd:
46 46 impl = getattr(self, 'do_' + cmd, None)
47 47 if impl: impl()
48 48 else: self.respond("")
49 49 return cmd != ''
50 50
51 51 def do_lookup(self):
52 52 arg, key = self.getarg()
53 53 assert arg == 'key'
54 54 try:
55 55 r = hex(self.repo.lookup(key))
56 56 success = 1
57 57 except Exception,inst:
58 58 r = str(inst)
59 59 success = 0
60 60 self.respond("%s %s\n" % (success, r))
61 61
62 62 def do_heads(self):
63 63 h = self.repo.heads()
64 64 self.respond(" ".join(map(hex, h)) + "\n")
65 65
66 66 def do_hello(self):
67 67 '''the hello command returns a set of lines describing various
68 68 interesting things about the server, in an RFC822-like format.
69 69 Currently the only one defined is "capabilities", which
70 70 consists of a line in the form:
71 71
72 72 capabilities: space separated list of tokens
73 73 '''
74 74
75 75 caps = ['unbundle', 'lookup', 'changegroupsubset']
76 76 if self.ui.configbool('server', 'uncompressed'):
77 77 caps.append('stream=%d' % self.repo.changelog.version)
78 78 self.respond("capabilities: %s\n" % (' '.join(caps),))
79 79
80 80 def do_lock(self):
81 81 '''DEPRECATED - allowing remote client to lock repo is not safe'''
82 82
83 83 self.lock = self.repo.lock()
84 84 self.respond("")
85 85
86 86 def do_unlock(self):
87 87 '''DEPRECATED'''
88 88
89 89 if self.lock:
90 90 self.lock.release()
91 91 self.lock = None
92 92 self.respond("")
93 93
94 94 def do_branches(self):
95 95 arg, nodes = self.getarg()
96 96 nodes = map(bin, nodes.split(" "))
97 97 r = []
98 98 for b in self.repo.branches(nodes):
99 99 r.append(" ".join(map(hex, b)) + "\n")
100 100 self.respond("".join(r))
101 101
102 102 def do_between(self):
103 103 arg, pairs = self.getarg()
104 104 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
105 105 r = []
106 106 for b in self.repo.between(pairs):
107 107 r.append(" ".join(map(hex, b)) + "\n")
108 108 self.respond("".join(r))
109 109
110 110 def do_changegroup(self):
111 111 nodes = []
112 112 arg, roots = self.getarg()
113 113 nodes = map(bin, roots.split(" "))
114 114
115 115 cg = self.repo.changegroup(nodes, 'serve')
116 116 while True:
117 117 d = cg.read(4096)
118 118 if not d:
119 119 break
120 120 self.fout.write(d)
121 121
122 122 self.fout.flush()
123 123
124 124 def do_changegroupsubset(self):
125 125 bases = []
126 126 heads = []
127 127 argmap = dict([self.getarg(), self.getarg()])
128 128 bases = [bin(n) for n in argmap['bases'].split(' ')]
129 129 heads = [bin(n) for n in argmap['heads'].split(' ')]
130 130
131 131 cg = self.repo.changegroupsubset(bases, heads, 'serve')
132 132 while True:
133 133 d = cg.read(4096)
134 134 if not d:
135 135 break
136 136 self.fout.write(d)
137 137
138 138 self.fout.flush()
139 139
140 140 def do_addchangegroup(self):
141 141 '''DEPRECATED'''
142 142
143 143 if not self.lock:
144 144 self.respond("not locked")
145 145 return
146 146
147 147 self.respond("")
148 148 r = self.repo.addchangegroup(self.fin, 'serve', self.client_url())
149 149 self.respond(str(r))
150 150
151 151 def client_url(self):
152 152 client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
153 153 return 'remote:ssh:' + client
154 154
155 155 def do_unbundle(self):
156 156 their_heads = self.getarg()[1].split()
157 157
158 158 def check_heads():
159 159 heads = map(hex, self.repo.heads())
160 160 return their_heads == [hex('force')] or their_heads == heads
161 161
162 162 # fail early if possible
163 163 if not check_heads():
164 164 self.respond(_('unsynced changes'))
165 165 return
166 166
167 167 self.respond('')
168 168
169 169 # write bundle data to temporary file because it can be big
170 170
171 171 try:
172 172 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
173 173 fp = os.fdopen(fd, 'wb+')
174 174
175 175 count = int(self.fin.readline())
176 176 while count:
177 177 fp.write(self.fin.read(count))
178 178 count = int(self.fin.readline())
179 179
180 180 was_locked = self.lock is not None
181 181 if not was_locked:
182 182 self.lock = self.repo.lock()
183 183 try:
184 184 if not check_heads():
185 185 # someone else committed/pushed/unbundled while we
186 186 # were transferring data
187 187 self.respond(_('unsynced changes'))
188 188 return
189 189 self.respond('')
190 190
191 191 # push can proceed
192 192
193 193 fp.seek(0)
194 194 r = self.repo.addchangegroup(fp, 'serve', self.client_url())
195 195 self.respond(str(r))
196 196 finally:
197 197 if not was_locked:
198 198 self.lock.release()
199 199 self.lock = None
200 200 finally:
201 201 fp.close()
202 202 os.unlink(tempname)
203 203
204 204 def do_stream_out(self):
205 205 streamclone.stream_out(self.repo, self.fout)
@@ -1,274 +1,274 b''
1 1 # verify.py - repository integrity checking for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 from node import *
8 from node import nullid, short
9 9 from i18n import _
10 10 import revlog
11 11
12 12 def verify(repo):
13 13 lock = repo.lock()
14 14 try:
15 15 return _verify(repo)
16 16 finally:
17 17 del lock
18 18
19 19 def _verify(repo):
20 20 filelinkrevs = {}
21 21 filenodes = {}
22 22 changesets = revisions = files = 0
23 23 firstbad = [None]
24 24 errors = [0]
25 25 warnings = [0]
26 26 neededmanifests = {}
27 27
28 28 def err(linkrev, msg, filename=None):
29 29 if linkrev != None:
30 30 if firstbad[0] != None:
31 31 firstbad[0] = min(firstbad[0], linkrev)
32 32 else:
33 33 firstbad[0] = linkrev
34 34 else:
35 35 linkrev = "?"
36 36 msg = "%s: %s" % (linkrev, msg)
37 37 if filename:
38 38 msg = "%s@%s" % (filename, msg)
39 39 repo.ui.warn(" " + msg + "\n")
40 40 errors[0] += 1
41 41
42 42 def warn(msg):
43 43 repo.ui.warn(msg + "\n")
44 44 warnings[0] += 1
45 45
46 46 def checksize(obj, name):
47 47 d = obj.checksize()
48 48 if d[0]:
49 49 err(None, _("data length off by %d bytes") % d[0], name)
50 50 if d[1]:
51 51 err(None, _("index contains %d extra bytes") % d[1], name)
52 52
53 53 def checkversion(obj, name):
54 54 if obj.version != revlog.REVLOGV0:
55 55 if not revlogv1:
56 56 warn(_("warning: `%s' uses revlog format 1") % name)
57 57 elif revlogv1:
58 58 warn(_("warning: `%s' uses revlog format 0") % name)
59 59
60 60 revlogv1 = repo.changelog.version != revlog.REVLOGV0
61 61 if repo.ui.verbose or not revlogv1:
62 62 repo.ui.status(_("repository uses revlog format %d\n") %
63 63 (revlogv1 and 1 or 0))
64 64
65 65 havecl = havemf = 1
66 66 seen = {}
67 67 repo.ui.status(_("checking changesets\n"))
68 68 if repo.changelog.count() == 0 and repo.manifest.count() > 1:
69 69 havecl = 0
70 70 err(0, _("empty or missing 00changelog.i"))
71 71 else:
72 72 checksize(repo.changelog, "changelog")
73 73
74 74 for i in xrange(repo.changelog.count()):
75 75 changesets += 1
76 76 n = repo.changelog.node(i)
77 77 l = repo.changelog.linkrev(n)
78 78 if l != i:
79 79 err(i, _("incorrect link (%d) for changeset") %(l))
80 80 if n in seen:
81 81 err(i, _("duplicates changeset at revision %d") % seen[n])
82 82 seen[n] = i
83 83
84 84 for p in repo.changelog.parents(n):
85 85 if p not in repo.changelog.nodemap:
86 86 err(i, _("changeset has unknown parent %s") % short(p))
87 87 try:
88 88 changes = repo.changelog.read(n)
89 89 except KeyboardInterrupt:
90 90 repo.ui.warn(_("interrupted"))
91 91 raise
92 92 except Exception, inst:
93 93 err(i, _("unpacking changeset: %s") % inst)
94 94 continue
95 95
96 96 if changes[0] not in neededmanifests:
97 97 neededmanifests[changes[0]] = i
98 98
99 99 for f in changes[3]:
100 100 filelinkrevs.setdefault(f, []).append(i)
101 101
102 102 seen = {}
103 103 repo.ui.status(_("checking manifests\n"))
104 104 if repo.changelog.count() > 0 and repo.manifest.count() == 0:
105 105 havemf = 0
106 106 err(0, _("empty or missing 00manifest.i"))
107 107 else:
108 108 checkversion(repo.manifest, "manifest")
109 109 checksize(repo.manifest, "manifest")
110 110
111 111 for i in xrange(repo.manifest.count()):
112 112 n = repo.manifest.node(i)
113 113 l = repo.manifest.linkrev(n)
114 114
115 115 if l < 0 or (havecl and l >= repo.changelog.count()):
116 116 err(None, _("bad link (%d) at manifest revision %d") % (l, i))
117 117
118 118 if n in neededmanifests:
119 119 del neededmanifests[n]
120 120
121 121 if n in seen:
122 122 err(l, _("duplicates manifest from %d") % seen[n])
123 123
124 124 seen[n] = l
125 125
126 126 for p in repo.manifest.parents(n):
127 127 if p not in repo.manifest.nodemap:
128 128 err(l, _("manifest has unknown parent %s") % short(p))
129 129
130 130 try:
131 131 for f, fn in repo.manifest.readdelta(n).iteritems():
132 132 fns = filenodes.setdefault(f, {})
133 133 if fn not in fns:
134 134 fns[fn] = n
135 135 except KeyboardInterrupt:
136 136 repo.ui.warn(_("interrupted"))
137 137 raise
138 138 except Exception, inst:
139 139 err(l, _("reading manifest delta: %s") % inst)
140 140 continue
141 141
142 142 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
143 143
144 144 if havemf > 0:
145 145 nm = [(c, m) for m, c in neededmanifests.items()]
146 146 nm.sort()
147 147 for c, m in nm:
148 148 err(c, _("changeset refers to unknown manifest %s") % short(m))
149 149 del neededmanifests, nm
150 150
151 151 if havecl:
152 152 fl = filenodes.keys()
153 153 fl.sort()
154 154 for f in fl:
155 155 if f not in filelinkrevs:
156 156 lrs = [repo.manifest.linkrev(n) for n in filenodes[f]]
157 157 lrs.sort()
158 158 err(lrs[0], _("in manifest but not in changeset"), f)
159 159 del fl
160 160
161 161 if havemf:
162 162 fl = filelinkrevs.keys()
163 163 fl.sort()
164 164 for f in fl:
165 165 if f not in filenodes:
166 166 lr = filelinkrevs[f][0]
167 167 err(lr, _("in changeset but not in manifest"), f)
168 168 del fl
169 169
170 170 repo.ui.status(_("checking files\n"))
171 171 ff = dict.fromkeys(filenodes.keys() + filelinkrevs.keys()).keys()
172 172 ff.sort()
173 173 for f in ff:
174 174 if f == "/dev/null":
175 175 continue
176 176 files += 1
177 177 if not f:
178 178 lr = filelinkrevs[f][0]
179 179 err(lr, _("file without name in manifest"))
180 180 continue
181 181 fl = repo.file(f)
182 182 checkversion(fl, f)
183 183 checksize(fl, f)
184 184
185 185 if fl.count() == 0:
186 186 err(filelinkrevs[f][0], _("empty or missing revlog"), f)
187 187 continue
188 188
189 189 seen = {}
190 190 nodes = {nullid: 1}
191 191 for i in xrange(fl.count()):
192 192 revisions += 1
193 193 n = fl.node(i)
194 194 flr = fl.linkrev(n)
195 195
196 196 if flr < 0 or (havecl and flr not in filelinkrevs.get(f, [])):
197 197 if flr < 0 or flr >= repo.changelog.count():
198 198 err(None, _("rev %d point to nonexistent changeset %d")
199 199 % (i, flr), f)
200 200 else:
201 201 err(None, _("rev %d points to unexpected changeset %d")
202 202 % (i, flr), f)
203 203 if f in filelinkrevs:
204 204 warn(_(" (expected %s)") % filelinkrevs[f][0])
205 205 flr = None # can't be trusted
206 206 else:
207 207 if havecl:
208 208 filelinkrevs[f].remove(flr)
209 209
210 210 if n in seen:
211 211 err(flr, _("duplicate revision %d") % i, f)
212 212 if f in filenodes:
213 213 if havemf and n not in filenodes[f]:
214 214 err(flr, _("%s not in manifests") % (short(n)), f)
215 215 else:
216 216 del filenodes[f][n]
217 217
218 218 # verify contents
219 219 try:
220 220 t = fl.read(n)
221 221 except KeyboardInterrupt:
222 222 repo.ui.warn(_("interrupted"))
223 223 raise
224 224 except Exception, inst:
225 225 err(flr, _("unpacking %s: %s") % (short(n), inst), f)
226 226
227 227 # verify parents
228 228 try:
229 229 (p1, p2) = fl.parents(n)
230 230 if p1 not in nodes:
231 231 err(flr, _("unknown parent 1 %s of %s") %
232 232 (short(p1), short(n)), f)
233 233 if p2 not in nodes:
234 234 err(flr, _("unknown parent 2 %s of %s") %
235 235 (short(p2), short(p1)), f)
236 236 except KeyboardInterrupt:
237 237 repo.ui.warn(_("interrupted"))
238 238 raise
239 239 except Exception, inst:
240 240 err(flr, _("checking parents of %s: %s") % (short(n), inst), f)
241 241 nodes[n] = 1
242 242
243 243 # check renames
244 244 try:
245 245 rp = fl.renamed(n)
246 246 if rp:
247 247 fl2 = repo.file(rp[0])
248 248 rev = fl2.rev(rp[1])
249 249 except KeyboardInterrupt:
250 250 repo.ui.warn(_("interrupted"))
251 251 raise
252 252 except Exception, inst:
253 253 err(flr, _("checking rename of %s: %s") %
254 254 (short(n), inst), f)
255 255
256 256 # cross-check
257 257 if f in filenodes:
258 258 fns = [(repo.manifest.linkrev(filenodes[f][n]), n)
259 259 for n in filenodes[f]]
260 260 fns.sort()
261 261 for lr, node in fns:
262 262 err(lr, _("%s in manifests not found") % short(node), f)
263 263
264 264 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
265 265 (files, changesets, revisions))
266 266
267 267 if warnings[0]:
268 268 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
269 269 if errors[0]:
270 270 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
271 271 if firstbad[0]:
272 272 repo.ui.warn(_("(first damaged changeset appears to be %d)\n")
273 273 % firstbad[0])
274 274 return 1
General Comments 0
You need to be logged in to leave comments. Login now