##// END OF EJS Templates
util: add sort helper
Matt Mackall -
r6762:f67d1468 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,311 +1,307
1 1 # bugzilla.py - bugzilla integration for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to update comments of bugzilla bugs when changesets
9 9 # that refer to bugs by id are seen. this hook does not change bug
10 10 # status, only comments.
11 11 #
12 12 # to configure, add items to '[bugzilla]' section of hgrc.
13 13 #
14 14 # to use, configure bugzilla extension and enable like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.bugzilla =
18 18 #
19 19 # [hooks]
20 20 # # run bugzilla hook on every change pulled or pushed in here
21 21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 22 #
23 23 # config items:
24 24 #
25 25 # section name is 'bugzilla'.
26 26 # [bugzilla]
27 27 #
28 28 # REQUIRED:
29 29 # host = bugzilla # mysql server where bugzilla database lives
30 30 # password = ** # user's password
31 31 # version = 2.16 # version of bugzilla installed
32 32 #
33 33 # OPTIONAL:
34 34 # bzuser = ... # fallback bugzilla user name to record comments with
35 35 # db = bugs # database to connect to
36 36 # notify = ... # command to run to get bugzilla to send mail
37 37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 38 # strip = 0 # number of slashes to strip for url paths
39 39 # style = ... # style file to use when formatting comments
40 40 # template = ... # template to use when formatting comments
41 41 # timeout = 5 # database connection timeout (seconds)
42 42 # user = bugs # user to connect to database as
43 43 # [web]
44 44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 45 #
46 46 # if hg committer names are not same as bugzilla user names, use
47 47 # "usermap" feature to map from committer email to bugzilla user name.
48 48 # usermap can be in hgrc or separate config file.
49 49 #
50 50 # [bugzilla]
51 51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 52 # [usermap]
53 53 # committer_email = bugzilla_user_name
54 54
55 55 from mercurial.i18n import _
56 56 from mercurial.node import short
57 57 from mercurial import cmdutil, templater, util
58 58 import re, time
59 59
60 60 MySQLdb = None
61 61
62 62 def buglist(ids):
63 63 return '(' + ','.join(map(str, ids)) + ')'
64 64
65 65 class bugzilla_2_16(object):
66 66 '''support for bugzilla version 2.16.'''
67 67
68 68 def __init__(self, ui):
69 69 self.ui = ui
70 70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 72 passwd = self.ui.config('bugzilla', 'password')
73 73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 75 usermap = self.ui.config('bugzilla', 'usermap')
76 76 if usermap:
77 77 self.ui.readsections(usermap, 'usermap')
78 78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 79 (host, db, user, '*' * len(passwd)))
80 80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 81 db=db, connect_timeout=timeout)
82 82 self.cursor = self.conn.cursor()
83 83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 84 ids = self.cursor.fetchall()
85 85 if len(ids) != 1:
86 86 raise util.Abort(_('unknown database schema'))
87 87 self.longdesc_id = ids[0][0]
88 88 self.user_ids = {}
89 89
90 90 def run(self, *args, **kwargs):
91 91 '''run a query.'''
92 92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 93 try:
94 94 self.cursor.execute(*args, **kwargs)
95 95 except MySQLdb.MySQLError, err:
96 96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 97 raise
98 98
99 99 def filter_real_bug_ids(self, ids):
100 100 '''filter not-existing bug ids from list.'''
101 101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 ids = [c[0] for c in self.cursor.fetchall()]
103 ids.sort()
104 return ids
102 return util.sort([c[0] for c in self.cursor.fetchall()])
105 103
106 104 def filter_unknown_bug_ids(self, node, ids):
107 105 '''filter bug ids from list that already refer to this changeset.'''
108 106
109 107 self.run('''select bug_id from longdescs where
110 108 bug_id in %s and thetext like "%%%s%%"''' %
111 109 (buglist(ids), short(node)))
112 110 unknown = dict.fromkeys(ids)
113 111 for (id,) in self.cursor.fetchall():
114 112 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 113 (id, short(node)))
116 114 unknown.pop(id, None)
117 ids = unknown.keys()
118 ids.sort()
119 return ids
115 return util.sort(unknown.keys())
120 116
121 117 def notify(self, ids):
122 118 '''tell bugzilla to send mail.'''
123 119
124 120 self.ui.status(_('telling bugzilla to send mail:\n'))
125 121 for id in ids:
126 122 self.ui.status(_(' bug %s\n') % id)
127 123 cmd = self.ui.config('bugzilla', 'notify',
128 124 'cd /var/www/html/bugzilla && '
129 125 './processmail %s nobody@nowhere.com') % id
130 126 fp = util.popen('(%s) 2>&1' % cmd)
131 127 out = fp.read()
132 128 ret = fp.close()
133 129 if ret:
134 130 self.ui.warn(out)
135 131 raise util.Abort(_('bugzilla notify command %s') %
136 132 util.explain_exit(ret)[0])
137 133 self.ui.status(_('done\n'))
138 134
139 135 def get_user_id(self, user):
140 136 '''look up numeric bugzilla user id.'''
141 137 try:
142 138 return self.user_ids[user]
143 139 except KeyError:
144 140 try:
145 141 userid = int(user)
146 142 except ValueError:
147 143 self.ui.note(_('looking up user %s\n') % user)
148 144 self.run('''select userid from profiles
149 145 where login_name like %s''', user)
150 146 all = self.cursor.fetchall()
151 147 if len(all) != 1:
152 148 raise KeyError(user)
153 149 userid = int(all[0][0])
154 150 self.user_ids[user] = userid
155 151 return userid
156 152
157 153 def map_committer(self, user):
158 154 '''map name of committer to bugzilla user name.'''
159 155 for committer, bzuser in self.ui.configitems('usermap'):
160 156 if committer.lower() == user.lower():
161 157 return bzuser
162 158 return user
163 159
164 160 def add_comment(self, bugid, text, committer):
165 161 '''add comment to bug. try adding comment as committer of
166 162 changeset, otherwise as default bugzilla user.'''
167 163 user = self.map_committer(committer)
168 164 try:
169 165 userid = self.get_user_id(user)
170 166 except KeyError:
171 167 try:
172 168 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 169 if not defaultuser:
174 170 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 171 user)
176 172 userid = self.get_user_id(defaultuser)
177 173 except KeyError:
178 174 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 175 (user, defaultuser))
180 176 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 177 self.run('''insert into longdescs
182 178 (bug_id, who, bug_when, thetext)
183 179 values (%s, %s, %s, %s)''',
184 180 (bugid, userid, now, text))
185 181 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 182 values (%s, %s, %s, %s)''',
187 183 (bugid, userid, now, self.longdesc_id))
188 184
189 185 class bugzilla(object):
190 186 # supported versions of bugzilla. different versions have
191 187 # different schemas.
192 188 _versions = {
193 189 '2.16': bugzilla_2_16,
194 190 }
195 191
196 192 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 193 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198 194
199 195 _bz = None
200 196
201 197 def __init__(self, ui, repo):
202 198 self.ui = ui
203 199 self.repo = repo
204 200
205 201 def bz(self):
206 202 '''return object that knows how to talk to bugzilla version in
207 203 use.'''
208 204
209 205 if bugzilla._bz is None:
210 206 bzversion = self.ui.config('bugzilla', 'version')
211 207 try:
212 208 bzclass = bugzilla._versions[bzversion]
213 209 except KeyError:
214 210 raise util.Abort(_('bugzilla version %s not supported') %
215 211 bzversion)
216 212 bugzilla._bz = bzclass(self.ui)
217 213 return bugzilla._bz
218 214
219 215 def __getattr__(self, key):
220 216 return getattr(self.bz(), key)
221 217
222 218 _bug_re = None
223 219 _split_re = None
224 220
225 221 def find_bug_ids(self, ctx):
226 222 '''find valid bug ids that are referred to in changeset
227 223 comments and that do not already have references to this
228 224 changeset.'''
229 225
230 226 if bugzilla._bug_re is None:
231 227 bugzilla._bug_re = re.compile(
232 228 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 229 re.IGNORECASE)
234 230 bugzilla._split_re = re.compile(r'\D+')
235 231 start = 0
236 232 ids = {}
237 233 while True:
238 234 m = bugzilla._bug_re.search(ctx.description(), start)
239 235 if not m:
240 236 break
241 237 start = m.end()
242 238 for id in bugzilla._split_re.split(m.group(1)):
243 239 if not id: continue
244 240 ids[int(id)] = 1
245 241 ids = ids.keys()
246 242 if ids:
247 243 ids = self.filter_real_bug_ids(ids)
248 244 if ids:
249 245 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 246 return ids
251 247
252 248 def update(self, bugid, ctx):
253 249 '''update bugzilla bug with reference to changeset.'''
254 250
255 251 def webroot(root):
256 252 '''strip leading prefix of repo root and turn into
257 253 url-safe path.'''
258 254 count = int(self.ui.config('bugzilla', 'strip', 0))
259 255 root = util.pconvert(root)
260 256 while count > 0:
261 257 c = root.find('/')
262 258 if c == -1:
263 259 break
264 260 root = root[c+1:]
265 261 count -= 1
266 262 return root
267 263
268 264 mapfile = self.ui.config('bugzilla', 'style')
269 265 tmpl = self.ui.config('bugzilla', 'template')
270 266 t = cmdutil.changeset_templater(self.ui, self.repo,
271 267 False, mapfile, False)
272 268 if not mapfile and not tmpl:
273 269 tmpl = _('changeset {node|short} in repo {root} refers '
274 270 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 271 if tmpl:
276 272 tmpl = templater.parsestring(tmpl, quoted=False)
277 273 t.use_template(tmpl)
278 274 self.ui.pushbuffer()
279 275 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 276 bug=str(bugid),
281 277 hgweb=self.ui.config('web', 'baseurl'),
282 278 root=self.repo.root,
283 279 webroot=webroot(self.repo.root))
284 280 data = self.ui.popbuffer()
285 281 self.add_comment(bugid, data, util.email(ctx.user()))
286 282
287 283 def hook(ui, repo, hooktype, node=None, **kwargs):
288 284 '''add comment to bugzilla for each changeset that refers to a
289 285 bugzilla bug id. only add a comment once per bug, so same change
290 286 seen multiple times does not fill bug with duplicate data.'''
291 287 try:
292 288 import MySQLdb as mysql
293 289 global MySQLdb
294 290 MySQLdb = mysql
295 291 except ImportError, err:
296 292 raise util.Abort(_('python mysql support not available: %s') % err)
297 293
298 294 if node is None:
299 295 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 296 hooktype)
301 297 try:
302 298 bz = bugzilla(ui, repo)
303 299 ctx = repo[node]
304 300 ids = bz.find_bug_ids(ctx)
305 301 if ids:
306 302 for id in ids:
307 303 bz.update(id, ctx)
308 304 bz.notify(ids)
309 305 except MySQLdb.MySQLError, err:
310 306 raise util.Abort(_('database error: %s') % err[1])
311 307
@@ -1,121 +1,119
1 1 # churn.py - create a graph showing who changed the most lines
2 2 #
3 3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 '''allow graphing the number of lines changed per contributor'''
8 8
9 9 from mercurial.i18n import gettext as _
10 10 from mercurial import patch, cmdutil, util, node
11 11 import os, sys
12 12
13 13 def get_tty_width():
14 14 if 'COLUMNS' in os.environ:
15 15 try:
16 16 return int(os.environ['COLUMNS'])
17 17 except ValueError:
18 18 pass
19 19 try:
20 20 import termios, array, fcntl
21 21 for dev in (sys.stdout, sys.stdin):
22 22 try:
23 23 fd = dev.fileno()
24 24 if not os.isatty(fd):
25 25 continue
26 26 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
27 27 return array.array('h', arri)[1]
28 28 except ValueError:
29 29 pass
30 30 except ImportError:
31 31 pass
32 32 return 80
33 33
34 34 def countrevs(ui, repo, amap, revs, progress=False):
35 35 stats = {}
36 36 count = pct = 0
37 37 if not revs:
38 38 revs = range(len(repo))
39 39
40 40 for rev in revs:
41 41 ctx2 = repo[rev]
42 42 parents = ctx2.parents()
43 43 if len(parents) > 1:
44 44 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
45 45 continue
46 46
47 47 ctx1 = parents[0]
48 48 lines = 0
49 49 ui.pushbuffer()
50 50 patch.diff(repo, ctx1.node(), ctx2.node())
51 51 diff = ui.popbuffer()
52 52
53 53 for l in diff.split('\n'):
54 54 if (l.startswith("+") and not l.startswith("+++ ") or
55 55 l.startswith("-") and not l.startswith("--- ")):
56 56 lines += 1
57 57
58 58 user = util.email(ctx2.user())
59 59 user = amap.get(user, user) # remap
60 60 stats[user] = stats.get(user, 0) + lines
61 61 ui.debug("rev %d: %d lines by %s\n" % (rev, lines, user))
62 62
63 63 if progress:
64 64 count += 1
65 65 newpct = int(100.0 * count / max(len(revs), 1))
66 66 if pct < newpct:
67 67 pct = newpct
68 68 ui.write("\rGenerating stats: %d%%" % pct)
69 69 sys.stdout.flush()
70 70
71 71 if progress:
72 72 ui.write("\r")
73 73 sys.stdout.flush()
74 74
75 75 return stats
76 76
77 77 def churn(ui, repo, **opts):
78 78 '''graphs the number of lines changed
79 79
80 80 The map file format used to specify aliases is fairly simple:
81 81
82 82 <alias email> <actual email>'''
83 83
84 84 def pad(s, l):
85 85 return (s + " " * l)[:l]
86 86
87 87 amap = {}
88 88 aliases = opts.get('aliases')
89 89 if aliases:
90 90 for l in open(aliases, "r"):
91 91 l = l.strip()
92 92 alias, actual = l.split()
93 93 amap[alias] = actual
94 94
95 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
96 revs.sort()
95 revs = util.sort([int(r) for r in cmdutil.revrange(repo, opts['rev'])])
97 96 stats = countrevs(ui, repo, amap, revs, opts.get('progress'))
98 97 if not stats:
99 98 return
100 99
101 stats = [(-l, u, l) for u,l in stats.items()]
102 stats.sort()
100 stats = util.sort([(-l, u, l) for u,l in stats.items()])
103 101 maxchurn = float(max(1, stats[0][2]))
104 102 maxuser = max([len(u) for k, u, l in stats])
105 103
106 104 ttywidth = get_tty_width()
107 105 ui.debug(_("assuming %i character terminal\n") % ttywidth)
108 106 width = ttywidth - maxuser - 2 - 6 - 2 - 2
109 107
110 108 for k, user, churn in stats:
111 109 print "%s %6d %s" % (pad(user, maxuser), churn,
112 110 "*" * int(churn * width / maxchurn))
113 111
114 112 cmdtable = {
115 113 "churn":
116 114 (churn,
117 115 [('r', 'rev', [], _('limit statistics to the specified revisions')),
118 116 ('', 'aliases', '', _('file with email aliases')),
119 117 ('', 'progress', None, _('show progress'))],
120 118 'hg churn [-r revision range] [-a file] [--progress]'),
121 119 }
@@ -1,355 +1,349
1 1 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 2
3 3 import os, locale, re, socket
4 4 from cStringIO import StringIO
5 5 from mercurial import util
6 6 from mercurial.i18n import _
7 7
8 8 from common import NoRepo, commit, converter_source, checktool
9 9 import cvsps
10 10
11 11 class convert_cvs(converter_source):
12 12 def __init__(self, ui, path, rev=None):
13 13 super(convert_cvs, self).__init__(ui, path, rev=rev)
14 14
15 15 cvs = os.path.join(path, "CVS")
16 16 if not os.path.exists(cvs):
17 17 raise NoRepo("%s does not look like a CVS checkout" % path)
18 18
19 19 checktool('cvs')
20 20 self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q')
21 21 cvspsexe = self.cmd.split(None, 1)[0]
22 22 self.builtin = cvspsexe == 'builtin'
23 23
24 24 if not self.builtin:
25 25 checktool(cvspsexe)
26 26
27 27 self.changeset = {}
28 28 self.files = {}
29 29 self.tags = {}
30 30 self.lastbranch = {}
31 31 self.parent = {}
32 32 self.socket = None
33 33 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
34 34 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
35 35 self.encoding = locale.getpreferredencoding()
36 36
37 37 self._parse(ui)
38 38 self._connect()
39 39
40 40 def _parse(self, ui):
41 41 if self.changeset:
42 42 return
43 43
44 44 maxrev = 0
45 45 cmd = self.cmd
46 46 if self.rev:
47 47 # TODO: handle tags
48 48 try:
49 49 # patchset number?
50 50 maxrev = int(self.rev)
51 51 except ValueError:
52 52 try:
53 53 # date
54 54 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
55 55 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
56 56 except util.Abort:
57 57 raise util.Abort('revision %s is not a patchset number or date' % self.rev)
58 58
59 59 d = os.getcwd()
60 60 try:
61 61 os.chdir(self.path)
62 62 id = None
63 63 state = 0
64 64 filerevids = {}
65 65
66 66 if self.builtin:
67 67 # builtin cvsps code
68 68 ui.status(_('using builtin cvsps\n'))
69 69
70 70 db = cvsps.createlog(ui, cache='update')
71 71 db = cvsps.createchangeset(ui, db,
72 72 fuzz=int(ui.config('convert', 'cvsps.fuzz', 60)),
73 73 mergeto=ui.config('convert', 'cvsps.mergeto', None),
74 74 mergefrom=ui.config('convert', 'cvsps.mergefrom', None))
75 75
76 76 for cs in db:
77 77 if maxrev and cs.id>maxrev:
78 78 break
79 79 id = str(cs.id)
80 80 cs.author = self.recode(cs.author)
81 81 self.lastbranch[cs.branch] = id
82 82 cs.comment = self.recode(cs.comment)
83 83 date = util.datestr(cs.date)
84 84 self.tags.update(dict.fromkeys(cs.tags, id))
85 85
86 86 files = {}
87 87 for f in cs.entries:
88 88 files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
89 89 ['', '(DEAD)'][f.dead])
90 90
91 91 # add current commit to set
92 92 c = commit(author=cs.author, date=date,
93 93 parents=[str(p.id) for p in cs.parents],
94 94 desc=cs.comment, branch=cs.branch or '')
95 95 self.changeset[id] = c
96 96 self.files[id] = files
97 97 else:
98 98 # external cvsps
99 99 for l in util.popen(cmd):
100 100 if state == 0: # header
101 101 if l.startswith("PatchSet"):
102 102 id = l[9:-2]
103 103 if maxrev and int(id) > maxrev:
104 104 # ignore everything
105 105 state = 3
106 106 elif l.startswith("Date"):
107 107 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
108 108 date = util.datestr(date)
109 109 elif l.startswith("Branch"):
110 110 branch = l[8:-1]
111 111 self.parent[id] = self.lastbranch.get(branch, 'bad')
112 112 self.lastbranch[branch] = id
113 113 elif l.startswith("Ancestor branch"):
114 114 ancestor = l[17:-1]
115 115 # figure out the parent later
116 116 self.parent[id] = self.lastbranch[ancestor]
117 117 elif l.startswith("Author"):
118 118 author = self.recode(l[8:-1])
119 119 elif l.startswith("Tag:") or l.startswith("Tags:"):
120 120 t = l[l.index(':')+1:]
121 121 t = [ut.strip() for ut in t.split(',')]
122 122 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
123 123 self.tags.update(dict.fromkeys(t, id))
124 124 elif l.startswith("Log:"):
125 125 # switch to gathering log
126 126 state = 1
127 127 log = ""
128 128 elif state == 1: # log
129 129 if l == "Members: \n":
130 130 # switch to gathering members
131 131 files = {}
132 132 oldrevs = []
133 133 log = self.recode(log[:-1])
134 134 state = 2
135 135 else:
136 136 # gather log
137 137 log += l
138 138 elif state == 2: # members
139 139 if l == "\n": # start of next entry
140 140 state = 0
141 141 p = [self.parent[id]]
142 142 if id == "1":
143 143 p = []
144 144 if branch == "HEAD":
145 145 branch = ""
146 146 if branch:
147 147 latest = None
148 148 # the last changeset that contains a base
149 149 # file is our parent
150 150 for r in oldrevs:
151 151 latest = max(filerevids.get(r, None), latest)
152 152 if latest:
153 153 p = [latest]
154 154
155 155 # add current commit to set
156 156 c = commit(author=author, date=date, parents=p,
157 157 desc=log, branch=branch)
158 158 self.changeset[id] = c
159 159 self.files[id] = files
160 160 else:
161 161 colon = l.rfind(':')
162 162 file = l[1:colon]
163 163 rev = l[colon+1:-2]
164 164 oldrev, rev = rev.split("->")
165 165 files[file] = rev
166 166
167 167 # save some information for identifying branch points
168 168 oldrevs.append("%s:%s" % (oldrev, file))
169 169 filerevids["%s:%s" % (rev, file)] = id
170 170 elif state == 3:
171 171 # swallow all input
172 172 continue
173 173
174 174 self.heads = self.lastbranch.values()
175 175 finally:
176 176 os.chdir(d)
177 177
178 178 def _connect(self):
179 179 root = self.cvsroot
180 180 conntype = None
181 181 user, host = None, None
182 182 cmd = ['cvs', 'server']
183 183
184 184 self.ui.status("connecting to %s\n" % root)
185 185
186 186 if root.startswith(":pserver:"):
187 187 root = root[9:]
188 188 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
189 189 root)
190 190 if m:
191 191 conntype = "pserver"
192 192 user, passw, serv, port, root = m.groups()
193 193 if not user:
194 194 user = "anonymous"
195 195 if not port:
196 196 port = 2401
197 197 else:
198 198 port = int(port)
199 199 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
200 200 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
201 201
202 202 if not passw:
203 203 passw = "A"
204 204 pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
205 205 for line in pf.read().splitlines():
206 206 part1, part2 = line.split(' ', 1)
207 207 if part1 == '/1':
208 208 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
209 209 part1, part2 = part2.split(' ', 1)
210 210 format = format1
211 211 else:
212 212 # :pserver:user@example.com:/cvsroot/foo Ah<Z
213 213 format = format0
214 214 if part1 == format:
215 215 passw = part2
216 216 break
217 217 pf.close()
218 218
219 219 sck = socket.socket()
220 220 sck.connect((serv, port))
221 221 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
222 222 "END AUTH REQUEST", ""]))
223 223 if sck.recv(128) != "I LOVE YOU\n":
224 224 raise util.Abort("CVS pserver authentication failed")
225 225
226 226 self.writep = self.readp = sck.makefile('r+')
227 227
228 228 if not conntype and root.startswith(":local:"):
229 229 conntype = "local"
230 230 root = root[7:]
231 231
232 232 if not conntype:
233 233 # :ext:user@host/home/user/path/to/cvsroot
234 234 if root.startswith(":ext:"):
235 235 root = root[5:]
236 236 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
237 237 # Do not take Windows path "c:\foo\bar" for a connection strings
238 238 if os.path.isdir(root) or not m:
239 239 conntype = "local"
240 240 else:
241 241 conntype = "rsh"
242 242 user, host, root = m.group(1), m.group(2), m.group(3)
243 243
244 244 if conntype != "pserver":
245 245 if conntype == "rsh":
246 246 rsh = os.environ.get("CVS_RSH") or "ssh"
247 247 if user:
248 248 cmd = [rsh, '-l', user, host] + cmd
249 249 else:
250 250 cmd = [rsh, host] + cmd
251 251
252 252 # popen2 does not support argument lists under Windows
253 253 cmd = [util.shellquote(arg) for arg in cmd]
254 254 cmd = util.quotecommand(' '.join(cmd))
255 255 self.writep, self.readp = os.popen2(cmd, 'b')
256 256
257 257 self.realroot = root
258 258
259 259 self.writep.write("Root %s\n" % root)
260 260 self.writep.write("Valid-responses ok error Valid-requests Mode"
261 261 " M Mbinary E Checked-in Created Updated"
262 262 " Merged Removed\n")
263 263 self.writep.write("valid-requests\n")
264 264 self.writep.flush()
265 265 r = self.readp.readline()
266 266 if not r.startswith("Valid-requests"):
267 267 raise util.Abort("server sucks")
268 268 if "UseUnchanged" in r:
269 269 self.writep.write("UseUnchanged\n")
270 270 self.writep.flush()
271 271 r = self.readp.readline()
272 272
273 273 def getheads(self):
274 274 return self.heads
275 275
276 276 def _getfile(self, name, rev):
277 277
278 278 def chunkedread(fp, count):
279 279 # file-objects returned by socked.makefile() do not handle
280 280 # large read() requests very well.
281 281 chunksize = 65536
282 282 output = StringIO()
283 283 while count > 0:
284 284 data = fp.read(min(count, chunksize))
285 285 if not data:
286 286 raise util.Abort("%d bytes missing from remote file" % count)
287 287 count -= len(data)
288 288 output.write(data)
289 289 return output.getvalue()
290 290
291 291 if rev.endswith("(DEAD)"):
292 292 raise IOError
293 293
294 294 args = ("-N -P -kk -r %s --" % rev).split()
295 295 args.append(self.cvsrepo + '/' + name)
296 296 for x in args:
297 297 self.writep.write("Argument %s\n" % x)
298 298 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
299 299 self.writep.flush()
300 300
301 301 data = ""
302 302 while 1:
303 303 line = self.readp.readline()
304 304 if line.startswith("Created ") or line.startswith("Updated "):
305 305 self.readp.readline() # path
306 306 self.readp.readline() # entries
307 307 mode = self.readp.readline()[:-1]
308 308 count = int(self.readp.readline()[:-1])
309 309 data = chunkedread(self.readp, count)
310 310 elif line.startswith(" "):
311 311 data += line[1:]
312 312 elif line.startswith("M "):
313 313 pass
314 314 elif line.startswith("Mbinary "):
315 315 count = int(self.readp.readline()[:-1])
316 316 data = chunkedread(self.readp, count)
317 317 else:
318 318 if line == "ok\n":
319 319 return (data, "x" in mode and "x" or "")
320 320 elif line.startswith("E "):
321 321 self.ui.warn("cvs server: %s\n" % line[2:])
322 322 elif line.startswith("Remove"):
323 323 l = self.readp.readline()
324 324 l = self.readp.readline()
325 325 if l != "ok\n":
326 326 raise util.Abort("unknown CVS response: %s" % l)
327 327 else:
328 328 raise util.Abort("unknown CVS response: %s" % line)
329 329
330 330 def getfile(self, file, rev):
331 331 data, mode = self._getfile(file, rev)
332 332 self.modecache[(file, rev)] = mode
333 333 return data
334 334
335 335 def getmode(self, file, rev):
336 336 return self.modecache[(file, rev)]
337 337
338 338 def getchanges(self, rev):
339 339 self.modecache = {}
340 files = self.files[rev]
341 cl = files.items()
342 cl.sort()
343 return (cl, {})
340 return util.sort(self.files[rev].items()), {}
344 341
345 342 def getcommit(self, rev):
346 343 return self.changeset[rev]
347 344
348 345 def gettags(self):
349 346 return self.tags
350 347
351 348 def getchangedfiles(self, rev, i):
352 files = self.files[rev].keys()
353 files.sort()
354 return files
355
349 return util.sort(self.files[rev].keys())
@@ -1,551 +1,548
1 1 #
2 2 # Mercurial built-in replacement for cvsps.
3 3 #
4 4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os
10 10 import re
11 11 import sys
12 12 import cPickle as pickle
13 13 from mercurial import util
14 14 from mercurial.i18n import _
15 15
16 16 def listsort(list, key):
17 17 "helper to sort by key in Python 2.3"
18 18 try:
19 19 list.sort(key=key)
20 20 except TypeError:
21 21 list.sort(lambda l, r: cmp(key(l), key(r)))
22 22
23 23 class logentry(object):
24 24 '''Class logentry has the following attributes:
25 25 .author - author name as CVS knows it
26 26 .branch - name of branch this revision is on
27 27 .branches - revision tuple of branches starting at this revision
28 28 .comment - commit message
29 29 .date - the commit date as a (time, tz) tuple
30 30 .dead - true if file revision is dead
31 31 .file - Name of file
32 32 .lines - a tuple (+lines, -lines) or None
33 33 .parent - Previous revision of this entry
34 34 .rcs - name of file as returned from CVS
35 35 .revision - revision number as tuple
36 36 .tags - list of tags on the file
37 37 '''
38 38 def __init__(self, **entries):
39 39 self.__dict__.update(entries)
40 40
41 41 class logerror(Exception):
42 42 pass
43 43
44 44 def createlog(ui, directory=None, root="", rlog=True, cache=None):
45 45 '''Collect the CVS rlog'''
46 46
47 47 # Because we store many duplicate commit log messages, reusing strings
48 48 # saves a lot of memory and pickle storage space.
49 49 _scache = {}
50 50 def scache(s):
51 51 "return a shared version of a string"
52 52 return _scache.setdefault(s, s)
53 53
54 54 ui.status(_('collecting CVS rlog\n'))
55 55
56 56 log = [] # list of logentry objects containing the CVS state
57 57
58 58 # patterns to match in CVS (r)log output, by state of use
59 59 re_00 = re.compile('RCS file: (.+)$')
60 60 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
61 61 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
62 62 re_03 = re.compile("(Cannot access.+CVSROOT)|(can't create temporary directory.+)$")
63 63 re_10 = re.compile('Working file: (.+)$')
64 64 re_20 = re.compile('symbolic names:')
65 65 re_30 = re.compile('\t(.+): ([\\d.]+)$')
66 66 re_31 = re.compile('----------------------------$')
67 67 re_32 = re.compile('=============================================================================$')
68 68 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
69 69 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?')
70 70 re_70 = re.compile('branches: (.+);$')
71 71
72 72 prefix = '' # leading path to strip of what we get from CVS
73 73
74 74 if directory is None:
75 75 # Current working directory
76 76
77 77 # Get the real directory in the repository
78 78 try:
79 79 prefix = file(os.path.join('CVS','Repository')).read().strip()
80 80 if prefix == ".":
81 81 prefix = ""
82 82 directory = prefix
83 83 except IOError:
84 84 raise logerror('Not a CVS sandbox')
85 85
86 86 if prefix and not prefix.endswith('/'):
87 87 prefix += '/'
88 88
89 89 # Use the Root file in the sandbox, if it exists
90 90 try:
91 91 root = file(os.path.join('CVS','Root')).read().strip()
92 92 except IOError:
93 93 pass
94 94
95 95 if not root:
96 96 root = os.environ.get('CVSROOT', '')
97 97
98 98 # read log cache if one exists
99 99 oldlog = []
100 100 date = None
101 101
102 102 if cache:
103 103 cachedir = os.path.expanduser('~/.hg.cvsps')
104 104 if not os.path.exists(cachedir):
105 105 os.mkdir(cachedir)
106 106
107 107 # The cvsps cache pickle needs a uniquified name, based on the
108 108 # repository location. The address may have all sort of nasties
109 109 # in it, slashes, colons and such. So here we take just the
110 110 # alphanumerics, concatenated in a way that does not mix up the
111 111 # various components, so that
112 112 # :pserver:user@server:/path
113 113 # and
114 114 # /pserver/user/server/path
115 115 # are mapped to different cache file names.
116 116 cachefile = root.split(":") + [directory, "cache"]
117 117 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
118 118 cachefile = os.path.join(cachedir,
119 119 '.'.join([s for s in cachefile if s]))
120 120
121 121 if cache == 'update':
122 122 try:
123 123 ui.note(_('reading cvs log cache %s\n') % cachefile)
124 124 oldlog = pickle.load(file(cachefile))
125 125 ui.note(_('cache has %d log entries\n') % len(oldlog))
126 126 except Exception, e:
127 127 ui.note(_('error reading cache: %r\n') % e)
128 128
129 129 if oldlog:
130 130 date = oldlog[-1].date # last commit date as a (time,tz) tuple
131 131 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
132 132
133 133 # build the CVS commandline
134 134 cmd = ['cvs', '-q']
135 135 if root:
136 136 cmd.append('-d%s' % root)
137 137 p = root.split(':')[-1]
138 138 if not p.endswith('/'):
139 139 p += '/'
140 140 prefix = p + prefix
141 141 cmd.append(['log', 'rlog'][rlog])
142 142 if date:
143 143 # no space between option and date string
144 144 cmd.append('-d>%s' % date)
145 145 cmd.append(directory)
146 146
147 147 # state machine begins here
148 148 tags = {} # dictionary of revisions on current file with their tags
149 149 state = 0
150 150 store = False # set when a new record can be appended
151 151
152 152 cmd = [util.shellquote(arg) for arg in cmd]
153 153 ui.note("running %s\n" % (' '.join(cmd)))
154 154 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
155 155
156 156 for line in util.popen(' '.join(cmd)):
157 157 if line.endswith('\n'):
158 158 line = line[:-1]
159 159 #ui.debug('state=%d line=%r\n' % (state, line))
160 160
161 161 if state == 0:
162 162 # initial state, consume input until we see 'RCS file'
163 163 match = re_00.match(line)
164 164 if match:
165 165 rcs = match.group(1)
166 166 tags = {}
167 167 if rlog:
168 168 filename = rcs[:-2]
169 169 if filename.startswith(prefix):
170 170 filename = filename[len(prefix):]
171 171 if filename.startswith('/'):
172 172 filename = filename[1:]
173 173 if filename.startswith('Attic/'):
174 174 filename = filename[6:]
175 175 else:
176 176 filename = filename.replace('/Attic/', '/')
177 177 state = 2
178 178 continue
179 179 state = 1
180 180 continue
181 181 match = re_01.match(line)
182 182 if match:
183 183 raise Exception(match.group(1))
184 184 match = re_02.match(line)
185 185 if match:
186 186 raise Exception(match.group(2))
187 187 if re_03.match(line):
188 188 raise Exception(line)
189 189
190 190 elif state == 1:
191 191 # expect 'Working file' (only when using log instead of rlog)
192 192 match = re_10.match(line)
193 193 assert match, _('RCS file must be followed by working file')
194 194 filename = match.group(1)
195 195 state = 2
196 196
197 197 elif state == 2:
198 198 # expect 'symbolic names'
199 199 if re_20.match(line):
200 200 state = 3
201 201
202 202 elif state == 3:
203 203 # read the symbolic names and store as tags
204 204 match = re_30.match(line)
205 205 if match:
206 206 rev = [int(x) for x in match.group(2).split('.')]
207 207
208 208 # Convert magic branch number to an odd-numbered one
209 209 revn = len(rev)
210 210 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
211 211 rev = rev[:-2] + rev[-1:]
212 212 rev = tuple(rev)
213 213
214 214 if rev not in tags:
215 215 tags[rev] = []
216 216 tags[rev].append(match.group(1))
217 217
218 218 elif re_31.match(line):
219 219 state = 5
220 220 elif re_32.match(line):
221 221 state = 0
222 222
223 223 elif state == 4:
224 224 # expecting '------' separator before first revision
225 225 if re_31.match(line):
226 226 state = 5
227 227 else:
228 228 assert not re_32.match(line), _('Must have at least some revisions')
229 229
230 230 elif state == 5:
231 231 # expecting revision number and possibly (ignored) lock indication
232 232 # we create the logentry here from values stored in states 0 to 4,
233 233 # as this state is re-entered for subsequent revisions of a file.
234 234 match = re_50.match(line)
235 235 assert match, _('expected revision number')
236 236 e = logentry(rcs=scache(rcs), file=scache(filename),
237 237 revision=tuple([int(x) for x in match.group(1).split('.')]),
238 238 branches=[], parent=None)
239 239 state = 6
240 240
241 241 elif state == 6:
242 242 # expecting date, author, state, lines changed
243 243 match = re_60.match(line)
244 244 assert match, _('revision must be followed by date line')
245 245 d = match.group(1)
246 246 if d[2] == '/':
247 247 # Y2K
248 248 d = '19' + d
249 249
250 250 if len(d.split()) != 3:
251 251 # cvs log dates always in GMT
252 252 d = d + ' UTC'
253 253 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'])
254 254 e.author = scache(match.group(2))
255 255 e.dead = match.group(3).lower() == 'dead'
256 256
257 257 if match.group(5):
258 258 if match.group(6):
259 259 e.lines = (int(match.group(5)), int(match.group(6)))
260 260 else:
261 261 e.lines = (int(match.group(5)), 0)
262 262 elif match.group(6):
263 263 e.lines = (0, int(match.group(6)))
264 264 else:
265 265 e.lines = None
266 266 e.comment = []
267 267 state = 7
268 268
269 269 elif state == 7:
270 270 # read the revision numbers of branches that start at this revision
271 271 # or store the commit log message otherwise
272 272 m = re_70.match(line)
273 273 if m:
274 274 e.branches = [tuple([int(y) for y in x.strip().split('.')])
275 275 for x in m.group(1).split(';')]
276 276 state = 8
277 277 elif re_31.match(line):
278 278 state = 5
279 279 store = True
280 280 elif re_32.match(line):
281 281 state = 0
282 282 store = True
283 283 else:
284 284 e.comment.append(line)
285 285
286 286 elif state == 8:
287 287 # store commit log message
288 288 if re_31.match(line):
289 289 state = 5
290 290 store = True
291 291 elif re_32.match(line):
292 292 state = 0
293 293 store = True
294 294 else:
295 295 e.comment.append(line)
296 296
297 297 if store:
298 298 # clean up the results and save in the log.
299 299 store = False
300 e.tags = [scache(x) for x in tags.get(e.revision, [])]
301 e.tags.sort()
300 e.tags = util.sort([scache(x) for x in tags.get(e.revision, [])])
302 301 e.comment = scache('\n'.join(e.comment))
303 302
304 303 revn = len(e.revision)
305 304 if revn > 3 and (revn % 2) == 0:
306 305 e.branch = tags.get(e.revision[:-1], [None])[0]
307 306 else:
308 307 e.branch = None
309 308
310 309 log.append(e)
311 310
312 311 if len(log) % 100 == 0:
313 312 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
314 313
315 314 listsort(log, key=lambda x:(x.rcs, x.revision))
316 315
317 316 # find parent revisions of individual files
318 317 versions = {}
319 318 for e in log:
320 319 branch = e.revision[:-1]
321 320 p = versions.get((e.rcs, branch), None)
322 321 if p is None:
323 322 p = e.revision[:-2]
324 323 e.parent = p
325 324 versions[(e.rcs, branch)] = e.revision
326 325
327 326 # update the log cache
328 327 if cache:
329 328 if log:
330 329 # join up the old and new logs
331 330 listsort(log, key=lambda x:x.date)
332 331
333 332 if oldlog and oldlog[-1].date >= log[0].date:
334 333 raise logerror('Log cache overlaps with new log entries,'
335 334 ' re-run without cache.')
336 335
337 336 log = oldlog + log
338 337
339 338 # write the new cachefile
340 339 ui.note(_('writing cvs log cache %s\n') % cachefile)
341 340 pickle.dump(log, file(cachefile, 'w'))
342 341 else:
343 342 log = oldlog
344 343
345 344 ui.status(_('%d log entries\n') % len(log))
346 345
347 346 return log
348 347
349 348
350 349 class changeset(object):
351 350 '''Class changeset has the following attributes:
352 351 .author - author name as CVS knows it
353 352 .branch - name of branch this changeset is on, or None
354 353 .comment - commit message
355 354 .date - the commit date as a (time,tz) tuple
356 355 .entries - list of logentry objects in this changeset
357 356 .parents - list of one or two parent changesets
358 357 .tags - list of tags on this changeset
359 358 '''
360 359 def __init__(self, **entries):
361 360 self.__dict__.update(entries)
362 361
363 362 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
364 363 '''Convert log into changesets.'''
365 364
366 365 ui.status(_('creating changesets\n'))
367 366
368 367 # Merge changesets
369 368
370 369 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
371 370
372 371 changesets = []
373 372 files = {}
374 373 c = None
375 374 for i, e in enumerate(log):
376 375
377 376 # Check if log entry belongs to the current changeset or not.
378 377 if not (c and
379 378 e.comment == c.comment and
380 379 e.author == c.author and
381 380 e.branch == c.branch and
382 381 ((c.date[0] + c.date[1]) <=
383 382 (e.date[0] + e.date[1]) <=
384 383 (c.date[0] + c.date[1]) + fuzz) and
385 384 e.file not in files):
386 385 c = changeset(comment=e.comment, author=e.author,
387 386 branch=e.branch, date=e.date, entries=[])
388 387 changesets.append(c)
389 388 files = {}
390 389 if len(changesets) % 100 == 0:
391 390 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
392 391 ui.status(util.ellipsis(t, 80) + '\n')
393 392
394 393 c.entries.append(e)
395 394 files[e.file] = True
396 395 c.date = e.date # changeset date is date of latest commit in it
397 396
398 397 # Sort files in each changeset
399 398
400 399 for c in changesets:
401 400 def pathcompare(l, r):
402 401 'Mimic cvsps sorting order'
403 402 l = l.split('/')
404 403 r = r.split('/')
405 404 nl = len(l)
406 405 nr = len(r)
407 406 n = min(nl, nr)
408 407 for i in range(n):
409 408 if i + 1 == nl and nl < nr:
410 409 return -1
411 410 elif i + 1 == nr and nl > nr:
412 411 return +1
413 412 elif l[i] < r[i]:
414 413 return -1
415 414 elif l[i] > r[i]:
416 415 return +1
417 416 return 0
418 417 def entitycompare(l, r):
419 418 return pathcompare(l.file, r.file)
420 419
421 420 c.entries.sort(entitycompare)
422 421
423 422 # Sort changesets by date
424 423
425 424 def cscmp(l, r):
426 425 d = sum(l.date) - sum(r.date)
427 426 if d:
428 427 return d
429 428
430 429 # detect vendor branches and initial commits on a branch
431 430 le = {}
432 431 for e in l.entries:
433 432 le[e.rcs] = e.revision
434 433 re = {}
435 434 for e in r.entries:
436 435 re[e.rcs] = e.revision
437 436
438 437 d = 0
439 438 for e in l.entries:
440 439 if re.get(e.rcs, None) == e.parent:
441 440 assert not d
442 441 d = 1
443 442 break
444 443
445 444 for e in r.entries:
446 445 if le.get(e.rcs, None) == e.parent:
447 446 assert not d
448 447 d = -1
449 448 break
450 449
451 450 return d
452 451
453 452 changesets.sort(cscmp)
454 453
455 454 # Collect tags
456 455
457 456 globaltags = {}
458 457 for c in changesets:
459 458 tags = {}
460 459 for e in c.entries:
461 460 for tag in e.tags:
462 461 # remember which is the latest changeset to have this tag
463 462 globaltags[tag] = c
464 463
465 464 for c in changesets:
466 465 tags = {}
467 466 for e in c.entries:
468 467 for tag in e.tags:
469 468 tags[tag] = True
470 469 # remember tags only if this is the latest changeset to have it
471 tagnames = [tag for tag in tags if globaltags[tag] is c]
472 tagnames.sort()
473 c.tags = tagnames
470 c.tags = util.sort([tag for tag in tags if globaltags[tag] is c])
474 471
475 472 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
476 473 # by inserting dummy changesets with two parents, and handle
477 474 # {{mergefrombranch BRANCHNAME}} by setting two parents.
478 475
479 476 if mergeto is None:
480 477 mergeto = r'{{mergetobranch ([-\w]+)}}'
481 478 if mergeto:
482 479 mergeto = re.compile(mergeto)
483 480
484 481 if mergefrom is None:
485 482 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
486 483 if mergefrom:
487 484 mergefrom = re.compile(mergefrom)
488 485
489 486 versions = {} # changeset index where we saw any particular file version
490 487 branches = {} # changeset index where we saw a branch
491 488 n = len(changesets)
492 489 i = 0
493 490 while i<n:
494 491 c = changesets[i]
495 492
496 493 for f in c.entries:
497 494 versions[(f.rcs, f.revision)] = i
498 495
499 496 p = None
500 497 if c.branch in branches:
501 498 p = branches[c.branch]
502 499 else:
503 500 for f in c.entries:
504 501 p = max(p, versions.get((f.rcs, f.parent), None))
505 502
506 503 c.parents = []
507 504 if p is not None:
508 505 c.parents.append(changesets[p])
509 506
510 507 if mergefrom:
511 508 m = mergefrom.search(c.comment)
512 509 if m:
513 510 m = m.group(1)
514 511 if m == 'HEAD':
515 512 m = None
516 513 if m in branches and c.branch != m:
517 514 c.parents.append(changesets[branches[m]])
518 515
519 516 if mergeto:
520 517 m = mergeto.search(c.comment)
521 518 if m:
522 519 try:
523 520 m = m.group(1)
524 521 if m == 'HEAD':
525 522 m = None
526 523 except:
527 524 m = None # if no group found then merge to HEAD
528 525 if m in branches and c.branch != m:
529 526 # insert empty changeset for merge
530 527 cc = changeset(author=c.author, branch=m, date=c.date,
531 528 comment='convert-repo: CVS merge from branch %s' % c.branch,
532 529 entries=[], tags=[], parents=[changesets[branches[m]], c])
533 530 changesets.insert(i + 1, cc)
534 531 branches[m] = i + 1
535 532
536 533 # adjust our loop counters now we have inserted a new entry
537 534 n += 1
538 535 i += 2
539 536 continue
540 537
541 538 branches[c.branch] = i
542 539 i += 1
543 540
544 541 # Number changesets
545 542
546 543 for i, c in enumerate(changesets):
547 544 c.id = i + 1
548 545
549 546 ui.status(_('%d changeset entries\n') % len(changesets))
550 547
551 548 return changesets
@@ -1,127 +1,126
1 1 # darcs support for the convert extension
2 2
3 3 from common import NoRepo, checktool, commandline, commit, converter_source
4 4 from mercurial.i18n import _
5 5 from mercurial import util
6 6 import os, shutil, tempfile
7 7
8 8 # The naming drift of ElementTree is fun!
9 9
10 10 try: from xml.etree.cElementTree import ElementTree
11 11 except ImportError:
12 12 try: from xml.etree.ElementTree import ElementTree
13 13 except ImportError:
14 14 try: from elementtree.cElementTree import ElementTree
15 15 except ImportError:
16 16 try: from elementtree.ElementTree import ElementTree
17 17 except ImportError: ElementTree = None
18 18
19 19
20 20 class darcs_source(converter_source, commandline):
21 21 def __init__(self, ui, path, rev=None):
22 22 converter_source.__init__(self, ui, path, rev=rev)
23 23 commandline.__init__(self, ui, 'darcs')
24 24
25 25 # check for _darcs, ElementTree, _darcs/inventory so that we can
26 26 # easily skip test-convert-darcs if ElementTree is not around
27 27 if not os.path.exists(os.path.join(path, '_darcs')):
28 28 raise NoRepo("%s does not look like a darcs repo" % path)
29 29
30 30 checktool('darcs')
31 31
32 32 if ElementTree is None:
33 33 raise util.Abort(_("Python ElementTree module is not available"))
34 34
35 35 if not os.path.exists(os.path.join(path, '_darcs', 'inventory')):
36 36 raise NoRepo("%s does not look like a darcs repo" % path)
37 37
38 38 self.path = os.path.realpath(path)
39 39
40 40 self.lastrev = None
41 41 self.changes = {}
42 42 self.parents = {}
43 43 self.tags = {}
44 44
45 45 def before(self):
46 46 self.tmppath = tempfile.mkdtemp(
47 47 prefix='convert-' + os.path.basename(self.path) + '-')
48 48 output, status = self.run('init', repodir=self.tmppath)
49 49 self.checkexit(status)
50 50
51 51 tree = self.xml('changes', xml_output=True, summary=True,
52 52 repodir=self.path)
53 53 tagname = None
54 54 child = None
55 55 for elt in tree.findall('patch'):
56 56 node = elt.get('hash')
57 57 name = elt.findtext('name', '')
58 58 if name.startswith('TAG '):
59 59 tagname = name[4:].strip()
60 60 elif tagname is not None:
61 61 self.tags[tagname] = node
62 62 tagname = None
63 63 self.changes[node] = elt
64 64 self.parents[child] = [node]
65 65 child = node
66 66 self.parents[child] = []
67 67
68 68 def after(self):
69 69 self.ui.debug('cleaning up %s\n' % self.tmppath)
70 70 shutil.rmtree(self.tmppath, ignore_errors=True)
71 71
72 72 def xml(self, cmd, **kwargs):
73 73 etree = ElementTree()
74 74 fp = self._run(cmd, **kwargs)
75 75 etree.parse(fp)
76 76 self.checkexit(fp.close())
77 77 return etree.getroot()
78 78
79 79 def getheads(self):
80 80 return self.parents[None]
81 81
82 82 def getcommit(self, rev):
83 83 elt = self.changes[rev]
84 84 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
85 85 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
86 86 return commit(author=elt.get('author'), date=util.datestr(date),
87 87 desc=desc.strip(), parents=self.parents[rev])
88 88
89 89 def pull(self, rev):
90 90 output, status = self.run('pull', self.path, all=True,
91 91 match='hash %s' % rev,
92 92 no_test=True, no_posthook=True,
93 93 external_merge='/bin/false',
94 94 repodir=self.tmppath)
95 95 if status:
96 96 if output.find('We have conflicts in') == -1:
97 97 self.checkexit(status, output)
98 98 output, status = self.run('revert', all=True, repodir=self.tmppath)
99 99 self.checkexit(status, output)
100 100
101 101 def getchanges(self, rev):
102 102 self.pull(rev)
103 103 copies = {}
104 104 changes = []
105 105 for elt in self.changes[rev].find('summary').getchildren():
106 106 if elt.tag in ('add_directory', 'remove_directory'):
107 107 continue
108 108 if elt.tag == 'move':
109 109 changes.append((elt.get('from'), rev))
110 110 copies[elt.get('from')] = elt.get('to')
111 111 else:
112 112 changes.append((elt.text.strip(), rev))
113 changes.sort()
114 113 self.lastrev = rev
115 return changes, copies
114 return util.sort(changes), copies
116 115
117 116 def getfile(self, name, rev):
118 117 if rev != self.lastrev:
119 118 raise util.Abort(_('internal calling inconsistency'))
120 119 return open(os.path.join(self.tmppath, name), 'rb').read()
121 120
122 121 def getmode(self, name, rev):
123 122 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
124 123 return (mode & 0111) and 'x' or ''
125 124
126 125 def gettags(self):
127 126 return self.tags
@@ -1,301 +1,299
1 1 # GNU Arch support for the convert extension
2 2
3 3 from common import NoRepo, commandline, commit, converter_source
4 4 from mercurial.i18n import _
5 5 from mercurial import util
6 6 import os, shutil, tempfile, stat
7 7
8 8 class gnuarch_source(converter_source, commandline):
9 9
10 10 class gnuarch_rev:
11 11 def __init__(self, rev):
12 12 self.rev = rev
13 13 self.summary = ''
14 14 self.date = None
15 15 self.author = ''
16 16 self.add_files = []
17 17 self.mod_files = []
18 18 self.del_files = []
19 19 self.ren_files = {}
20 20 self.ren_dirs = {}
21 21
22 22 def __init__(self, ui, path, rev=None):
23 23 super(gnuarch_source, self).__init__(ui, path, rev=rev)
24 24
25 25 if not os.path.exists(os.path.join(path, '{arch}')):
26 26 raise NoRepo(_("%s does not look like a GNU Arch repo" % path))
27 27
28 28 # Could use checktool, but we want to check for baz or tla.
29 29 self.execmd = None
30 30 if util.find_exe('baz'):
31 31 self.execmd = 'baz'
32 32 else:
33 33 if util.find_exe('tla'):
34 34 self.execmd = 'tla'
35 35 else:
36 36 raise util.Abort(_('cannot find a GNU Arch tool'))
37 37
38 38 commandline.__init__(self, ui, self.execmd)
39 39
40 40 self.path = os.path.realpath(path)
41 41 self.tmppath = None
42 42
43 43 self.treeversion = None
44 44 self.lastrev = None
45 45 self.changes = {}
46 46 self.parents = {}
47 47 self.tags = {}
48 48 self.modecache = {}
49 49
50 50 def before(self):
51 51 if self.execmd == 'tla':
52 52 output = self.run0('tree-version', self.path)
53 53 else:
54 54 output = self.run0('tree-version', '-d', self.path)
55 55 self.treeversion = output.strip()
56 56
57 57 self.ui.status(_('analyzing tree version %s...\n' % self.treeversion))
58 58
59 59 # Get name of temporary directory
60 60 version = self.treeversion.split('/')
61 61 self.tmppath = os.path.join(tempfile.gettempdir(),
62 62 'hg-%s' % version[1])
63 63
64 64 # Generate parents dictionary
65 65 child = []
66 66 output, status = self.runlines('revisions', self.treeversion)
67 67 self.checkexit(status, 'archive registered?')
68 68 for l in output:
69 69 rev = l.strip()
70 70 self.changes[rev] = self.gnuarch_rev(rev)
71 71
72 72 # Read author, date and summary
73 73 catlog = self.runlines0('cat-log', '-d', self.path, rev)
74 74 self._parsecatlog(catlog, rev)
75 75
76 76 self.parents[rev] = child
77 77 child = [rev]
78 78 if rev == self.rev:
79 79 break
80 80 self.parents[None] = child
81 81
82 82 def after(self):
83 83 self.ui.debug(_('cleaning up %s\n' % self.tmppath))
84 84 shutil.rmtree(self.tmppath, ignore_errors=True)
85 85
86 86 def getheads(self):
87 87 return self.parents[None]
88 88
89 89 def getfile(self, name, rev):
90 90 if rev != self.lastrev:
91 91 raise util.Abort(_('internal calling inconsistency'))
92 92
93 93 # Raise IOError if necessary (i.e. deleted files).
94 94 if not os.path.exists(os.path.join(self.tmppath, name)):
95 95 raise IOError
96 96
97 97 data, mode = self._getfile(name, rev)
98 98 self.modecache[(name, rev)] = mode
99 99
100 100 return data
101 101
102 102 def getmode(self, name, rev):
103 103 return self.modecache[(name, rev)]
104 104
105 105 def getchanges(self, rev):
106 106 self.modecache = {}
107 107 self._update(rev)
108 108 changes = []
109 109 copies = {}
110 110
111 111 for f in self.changes[rev].add_files:
112 112 changes.append((f, rev))
113 113
114 114 for f in self.changes[rev].mod_files:
115 115 changes.append((f, rev))
116 116
117 117 for f in self.changes[rev].del_files:
118 118 changes.append((f, rev))
119 119
120 120 for src in self.changes[rev].ren_files:
121 121 to = self.changes[rev].ren_files[src]
122 122 changes.append((src, rev))
123 123 changes.append((to, rev))
124 124 copies[src] = to
125 125
126 126 for src in self.changes[rev].ren_dirs:
127 127 to = self.changes[rev].ren_dirs[src]
128 128 chgs, cps = self._rendirchanges(src, to);
129 129 changes += [(f, rev) for f in chgs]
130 130 for c in cps:
131 131 copies[c] = cps[c]
132 132
133 changes.sort()
134 133 self.lastrev = rev
135
136 return changes, copies
134 return util.sort(changes), copies
137 135
138 136 def getcommit(self, rev):
139 137 changes = self.changes[rev]
140 138 return commit(author = changes.author, date = changes.date,
141 139 desc = changes.summary, parents = self.parents[rev])
142 140
143 141 def gettags(self):
144 142 return self.tags
145 143
146 144 def _execute(self, cmd, *args, **kwargs):
147 145 cmdline = [self.execmd, cmd]
148 146 cmdline += args
149 147 cmdline = [util.shellquote(arg) for arg in cmdline]
150 148 cmdline += ['>', util.nulldev, '2>', util.nulldev]
151 149 cmdline = util.quotecommand(' '.join(cmdline))
152 150 self.ui.debug(cmdline, '\n')
153 151 return os.system(cmdline)
154 152
155 153 def _update(self, rev):
156 154 if rev == 'base-0':
157 155 # Initialise 'base-0' revision
158 156 self._obtainrevision(rev)
159 157 else:
160 158 self.ui.debug(_('applying revision %s...\n' % rev))
161 159 revision = '%s--%s' % (self.treeversion, rev)
162 160 changeset, status = self.runlines('replay', '-d', self.tmppath,
163 161 revision)
164 162 if status:
165 163 # Something went wrong while merging (baz or tla
166 164 # issue?), get latest revision and try from there
167 165 shutil.rmtree(self.tmppath, ignore_errors=True)
168 166 self._obtainrevision(rev)
169 167 else:
170 168 old_rev = self.parents[rev][0]
171 169 self.ui.debug(_('computing changeset between %s and %s...\n' \
172 170 % (old_rev, rev)))
173 171 rev_a = '%s--%s' % (self.treeversion, old_rev)
174 172 rev_b = '%s--%s' % (self.treeversion, rev)
175 173 self._parsechangeset(changeset, rev)
176 174
177 175 def _getfile(self, name, rev):
178 176 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
179 177 if stat.S_ISLNK(mode):
180 178 data = os.readlink(os.path.join(self.tmppath, name))
181 179 mode = mode and 'l' or ''
182 180 else:
183 181 data = open(os.path.join(self.tmppath, name), 'rb').read()
184 182 mode = (mode & 0111) and 'x' or ''
185 183 return data, mode
186 184
187 185 def _exclude(self, name):
188 186 exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
189 187 for exc in exclude:
190 188 if name.find(exc) != -1:
191 189 return True
192 190 return False
193 191
194 192 def _readcontents(self, path):
195 193 files = []
196 194 contents = os.listdir(path)
197 195 while len(contents) > 0:
198 196 c = contents.pop()
199 197 p = os.path.join(path, c)
200 198 # os.walk could be used, but here we avoid internal GNU
201 199 # Arch files and directories, thus saving a lot time.
202 200 if not self._exclude(p):
203 201 if os.path.isdir(p):
204 202 contents += [os.path.join(c, f) for f in os.listdir(p)]
205 203 else:
206 204 files.append(c)
207 205 return files
208 206
209 207 def _rendirchanges(self, src, dest):
210 208 changes = []
211 209 copies = {}
212 210 files = self._readcontents(os.path.join(self.tmppath, dest))
213 211 for f in files:
214 212 s = os.path.join(src, f)
215 213 d = os.path.join(dest, f)
216 214 changes.append(s)
217 215 changes.append(d)
218 216 copies[s] = d
219 217 return changes, copies
220 218
221 219 def _obtainrevision(self, rev):
222 220 self.ui.debug(_('obtaining revision %s...\n' % rev))
223 221 revision = '%s--%s' % (self.treeversion, rev)
224 222 output = self._execute('get', revision, self.tmppath)
225 223 self.checkexit(output)
226 224 self.ui.debug(_('analysing revision %s...\n' % rev))
227 225 files = self._readcontents(self.tmppath)
228 226 self.changes[rev].add_files += files
229 227
230 228 def _stripbasepath(self, path):
231 229 if path.startswith('./'):
232 230 return path[2:]
233 231 return path
234 232
235 233 def _parsecatlog(self, data, rev):
236 234 summary = []
237 235 for l in data:
238 236 l = l.strip()
239 237 if summary:
240 238 summary.append(l)
241 239 elif l.startswith('Summary:'):
242 240 summary.append(l[len('Summary: '):])
243 241 elif l.startswith('Standard-date:'):
244 242 date = l[len('Standard-date: '):]
245 243 strdate = util.strdate(date, '%Y-%m-%d %H:%M:%S')
246 244 self.changes[rev].date = util.datestr(strdate)
247 245 elif l.startswith('Creator:'):
248 246 self.changes[rev].author = l[len('Creator: '):]
249 247 self.changes[rev].summary = '\n'.join(summary)
250 248
251 249 def _parsechangeset(self, data, rev):
252 250 for l in data:
253 251 l = l.strip()
254 252 # Added file (ignore added directory)
255 253 if l.startswith('A') and not l.startswith('A/'):
256 254 file = self._stripbasepath(l[1:].strip())
257 255 if not self._exclude(file):
258 256 self.changes[rev].add_files.append(file)
259 257 # Deleted file (ignore deleted directory)
260 258 elif l.startswith('D') and not l.startswith('D/'):
261 259 file = self._stripbasepath(l[1:].strip())
262 260 if not self._exclude(file):
263 261 self.changes[rev].del_files.append(file)
264 262 # Modified binary file
265 263 elif l.startswith('Mb'):
266 264 file = self._stripbasepath(l[2:].strip())
267 265 if not self._exclude(file):
268 266 self.changes[rev].mod_files.append(file)
269 267 # Modified link
270 268 elif l.startswith('M->'):
271 269 file = self._stripbasepath(l[3:].strip())
272 270 if not self._exclude(file):
273 271 self.changes[rev].mod_files.append(file)
274 272 # Modified file
275 273 elif l.startswith('M'):
276 274 file = self._stripbasepath(l[1:].strip())
277 275 if not self._exclude(file):
278 276 self.changes[rev].mod_files.append(file)
279 277 # Renamed file (or link)
280 278 elif l.startswith('=>'):
281 279 files = l[2:].strip().split(' ')
282 280 if len(files) == 1:
283 281 files = l[2:].strip().split('\t')
284 282 src = self._stripbasepath(files[0])
285 283 dst = self._stripbasepath(files[1])
286 284 if not self._exclude(src) and not self._exclude(dst):
287 285 self.changes[rev].ren_files[src] = dst
288 286 # Conversion from file to link or from link to file (modified)
289 287 elif l.startswith('ch'):
290 288 file = self._stripbasepath(l[2:].strip())
291 289 if not self._exclude(file):
292 290 self.changes[rev].mod_files.append(file)
293 291 # Renamed directory
294 292 elif l.startswith('/>'):
295 293 dirs = l[2:].strip().split(' ')
296 294 if len(dirs) == 1:
297 295 dirs = l[2:].strip().split('\t')
298 296 src = self._stripbasepath(dirs[0])
299 297 dst = self._stripbasepath(dirs[1])
300 298 if not self._exclude(src) and not self._exclude(dst):
301 299 self.changes[rev].ren_dirs[src] = dst
@@ -1,289 +1,285
1 1 # hg backend for convert extension
2 2
3 3 # Notes for hg->hg conversion:
4 4 #
5 5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 6 # of commit messages, but new versions do. Changesets created by
7 7 # those older versions, then converted, may thus have different
8 8 # hashes for changesets that are otherwise identical.
9 9 #
10 10 # * By default, the source revision is stored in the converted
11 11 # revision. This will cause the converted revision to have a
12 12 # different identity than the source. To avoid this, use the
13 13 # following option: "--config convert.hg.saverev=false"
14 14
15 15
16 16 import os, time
17 17 from mercurial.i18n import _
18 18 from mercurial.repo import RepoError
19 19 from mercurial.node import bin, hex, nullid
20 20 from mercurial import hg, revlog, util, context
21 21
22 22 from common import NoRepo, commit, converter_source, converter_sink
23 23
24 24 class mercurial_sink(converter_sink):
25 25 def __init__(self, ui, path):
26 26 converter_sink.__init__(self, ui, path)
27 27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
28 28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
29 29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
30 30 self.lastbranch = None
31 31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
32 32 try:
33 33 self.repo = hg.repository(self.ui, path)
34 34 if not self.repo.local():
35 35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
36 36 except RepoError, err:
37 37 ui.print_exc()
38 38 raise NoRepo(err.args[0])
39 39 else:
40 40 try:
41 41 ui.status(_('initializing destination %s repository\n') % path)
42 42 self.repo = hg.repository(self.ui, path, create=True)
43 43 if not self.repo.local():
44 44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
45 45 self.created.append(path)
46 46 except RepoError, err:
47 47 ui.print_exc()
48 48 raise NoRepo("could not create hg repo %s as sink" % path)
49 49 self.lock = None
50 50 self.wlock = None
51 51 self.filemapmode = False
52 52
53 53 def before(self):
54 54 self.ui.debug(_('run hg sink pre-conversion action\n'))
55 55 self.wlock = self.repo.wlock()
56 56 self.lock = self.repo.lock()
57 57
58 58 def after(self):
59 59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 60 self.lock = None
61 61 self.wlock = None
62 62
63 63 def revmapfile(self):
64 64 return os.path.join(self.path, ".hg", "shamap")
65 65
66 66 def authorfile(self):
67 67 return os.path.join(self.path, ".hg", "authormap")
68 68
69 69 def getheads(self):
70 70 h = self.repo.changelog.heads()
71 71 return [ hex(x) for x in h ]
72 72
73 73 def setbranch(self, branch, pbranches):
74 74 if not self.clonebranches:
75 75 return
76 76
77 77 setbranch = (branch != self.lastbranch)
78 78 self.lastbranch = branch
79 79 if not branch:
80 80 branch = 'default'
81 81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
82 82 pbranch = pbranches and pbranches[0][1] or 'default'
83 83
84 84 branchpath = os.path.join(self.path, branch)
85 85 if setbranch:
86 86 self.after()
87 87 try:
88 88 self.repo = hg.repository(self.ui, branchpath)
89 89 except:
90 90 self.repo = hg.repository(self.ui, branchpath, create=True)
91 91 self.before()
92 92
93 93 # pbranches may bring revisions from other branches (merge parents)
94 94 # Make sure we have them, or pull them.
95 95 missings = {}
96 96 for b in pbranches:
97 97 try:
98 98 self.repo.lookup(b[0])
99 99 except:
100 100 missings.setdefault(b[1], []).append(b[0])
101 101
102 102 if missings:
103 103 self.after()
104 104 for pbranch, heads in missings.iteritems():
105 105 pbranchpath = os.path.join(self.path, pbranch)
106 106 prepo = hg.repository(self.ui, pbranchpath)
107 107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
108 108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
109 109 self.before()
110 110
111 111 def putcommit(self, files, copies, parents, commit, source):
112 112
113 113 files = dict(files)
114 114 def getfilectx(repo, memctx, f):
115 115 v = files[f]
116 116 data = source.getfile(f, v)
117 117 e = source.getmode(f, v)
118 118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
119 119
120 120 pl = []
121 121 for p in parents:
122 122 if p not in pl:
123 123 pl.append(p)
124 124 parents = pl
125 125 nparents = len(parents)
126 126 if self.filemapmode and nparents == 1:
127 127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
128 128 parent = parents[0]
129 129
130 130 if len(parents) < 2: parents.append("0" * 40)
131 131 if len(parents) < 2: parents.append("0" * 40)
132 132 p2 = parents.pop(0)
133 133
134 134 text = commit.desc
135 135 extra = commit.extra.copy()
136 136 if self.branchnames and commit.branch:
137 137 extra['branch'] = commit.branch
138 138 if commit.rev:
139 139 extra['convert_revision'] = commit.rev
140 140
141 141 while parents:
142 142 p1 = p2
143 143 p2 = parents.pop(0)
144 144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
145 145 commit.author, commit.date, extra)
146 146 a = self.repo.commitctx(ctx)
147 147 text = "(octopus merge fixup)\n"
148 148 p2 = hex(self.repo.changelog.tip())
149 149
150 150 if self.filemapmode and nparents == 1:
151 151 man = self.repo.manifest
152 152 mnode = self.repo.changelog.read(bin(p2))[0]
153 153 if not man.cmp(m1node, man.revision(mnode)):
154 154 self.repo.rollback()
155 155 return parent
156 156 return p2
157 157
158 158 def puttags(self, tags):
159 159 try:
160 160 parentctx = self.repo[self.tagsbranch]
161 161 tagparent = parentctx.node()
162 162 except RepoError, inst:
163 163 parentctx = None
164 164 tagparent = nullid
165 165
166 166 try:
167 old = parentctx.filectx(".hgtags").data()
168 oldlines = old.splitlines(1)
169 oldlines.sort()
167 oldlines = util.sort(parentctx['.hgtags'].data().splitlines(1))
170 168 except:
171 169 oldlines = []
172 170
173 newlines = [("%s %s\n" % (tags[tag], tag)) for tag in tags.keys()]
174 newlines.sort()
171 newlines = util.sort([("%s %s\n" % (tags[tag], tag)) for tag in tags])
175 172
176 173 if newlines == oldlines:
177 174 return None
178 175 data = "".join(newlines)
179 176
180 177 def getfilectx(repo, memctx, f):
181 178 return context.memfilectx(f, data, False, False, None)
182 179
183 180 self.ui.status("updating tags\n")
184 181 date = "%s 0" % int(time.mktime(time.gmtime()))
185 182 extra = {'branch': self.tagsbranch}
186 183 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
187 184 [".hgtags"], getfilectx, "convert-repo", date,
188 185 extra)
189 186 self.repo.commitctx(ctx)
190 187 return hex(self.repo.changelog.tip())
191 188
192 189 def setfilemapmode(self, active):
193 190 self.filemapmode = active
194 191
195 192 class mercurial_source(converter_source):
196 193 def __init__(self, ui, path, rev=None):
197 194 converter_source.__init__(self, ui, path, rev)
198 195 self.saverev = ui.configbool('convert', 'hg.saverev', True)
199 196 try:
200 197 self.repo = hg.repository(self.ui, path)
201 198 # try to provoke an exception if this isn't really a hg
202 199 # repo, but some other bogus compatible-looking url
203 200 if not self.repo.local():
204 201 raise RepoError()
205 202 except RepoError:
206 203 ui.print_exc()
207 204 raise NoRepo("%s is not a local Mercurial repo" % path)
208 205 self.lastrev = None
209 206 self.lastctx = None
210 207 self._changescache = None
211 208 self.convertfp = None
212 209
213 210 def changectx(self, rev):
214 211 if self.lastrev != rev:
215 212 self.lastctx = self.repo[rev]
216 213 self.lastrev = rev
217 214 return self.lastctx
218 215
219 216 def getheads(self):
220 217 if self.rev:
221 218 return [hex(self.repo[self.rev].node())]
222 219 else:
223 220 return [hex(node) for node in self.repo.heads()]
224 221
225 222 def getfile(self, name, rev):
226 223 try:
227 224 return self.changectx(rev)[name].data()
228 225 except revlog.LookupError, err:
229 226 raise IOError(err)
230 227
231 228 def getmode(self, name, rev):
232 229 return self.changectx(rev).manifest().flags(name)
233 230
234 231 def getchanges(self, rev):
235 232 ctx = self.changectx(rev)
236 233 if self._changescache and self._changescache[0] == rev:
237 234 m, a, r = self._changescache[1]
238 235 else:
239 236 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
240 237 changes = [(name, rev) for name in m + a + r]
241 changes.sort()
242 return (changes, self.getcopies(ctx, m + a))
238 return util.sort(changes), self.getcopies(ctx, m + a)
243 239
244 240 def getcopies(self, ctx, files):
245 241 copies = {}
246 242 for name in files:
247 243 try:
248 244 copies[name] = ctx.filectx(name).renamed()[0]
249 245 except TypeError:
250 246 pass
251 247 return copies
252 248
253 249 def getcommit(self, rev):
254 250 ctx = self.changectx(rev)
255 251 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
256 252 if self.saverev:
257 253 crev = rev
258 254 else:
259 255 crev = None
260 256 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
261 257 desc=ctx.description(), rev=crev, parents=parents,
262 258 branch=ctx.branch(), extra=ctx.extra())
263 259
264 260 def gettags(self):
265 261 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
266 262 return dict([(name, hex(node)) for name, node in tags])
267 263
268 264 def getchangedfiles(self, rev, i):
269 265 ctx = self.changectx(rev)
270 266 i = i or 0
271 267 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
272 268
273 269 if i == 0:
274 270 self._changescache = (rev, changes)
275 271
276 272 return changes[0] + changes[1] + changes[2]
277 273
278 274 def converted(self, rev, destrev):
279 275 if self.convertfp is None:
280 276 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
281 277 'a')
282 278 self.convertfp.write('%s %s\n' % (destrev, rev))
283 279 self.convertfp.flush()
284 280
285 281 def before(self):
286 282 self.ui.debug(_('run hg source pre-conversion action\n'))
287 283
288 284 def after(self):
289 285 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,1144 +1,1140
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4 #
5 5 # Configuration options:
6 6 #
7 7 # convert.svn.trunk
8 8 # Relative path to the trunk (default: "trunk")
9 9 # convert.svn.branches
10 10 # Relative path to tree of branches (default: "branches")
11 11 # convert.svn.tags
12 12 # Relative path to tree of tags (default: "tags")
13 13 #
14 14 # Set these in a hgrc, or on the command line as follows:
15 15 #
16 16 # hg convert --config convert.svn.trunk=wackoname [...]
17 17
18 18 import locale
19 19 import os
20 20 import re
21 21 import sys
22 22 import cPickle as pickle
23 23 import tempfile
24 24
25 25 from mercurial import strutil, util
26 26 from mercurial.i18n import _
27 27
28 28 # Subversion stuff. Works best with very recent Python SVN bindings
29 29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 30 # these bindings.
31 31
32 32 from cStringIO import StringIO
33 33
34 34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
35 35 from common import commandline, converter_sink, mapfile
36 36
37 37 try:
38 38 from svn.core import SubversionException, Pool
39 39 import svn
40 40 import svn.client
41 41 import svn.core
42 42 import svn.ra
43 43 import svn.delta
44 44 import transport
45 45 except ImportError:
46 46 pass
47 47
48 48 def geturl(path):
49 49 try:
50 50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
51 51 except SubversionException:
52 52 pass
53 53 if os.path.isdir(path):
54 54 path = os.path.normpath(os.path.abspath(path))
55 55 if os.name == 'nt':
56 56 path = '/' + util.normpath(path)
57 57 return 'file://%s' % path
58 58 return path
59 59
60 60 def optrev(number):
61 61 optrev = svn.core.svn_opt_revision_t()
62 62 optrev.kind = svn.core.svn_opt_revision_number
63 63 optrev.value.number = number
64 64 return optrev
65 65
66 66 class changedpath(object):
67 67 def __init__(self, p):
68 68 self.copyfrom_path = p.copyfrom_path
69 69 self.copyfrom_rev = p.copyfrom_rev
70 70 self.action = p.action
71 71
72 72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
73 73 strict_node_history=False):
74 74 protocol = -1
75 75 def receiver(orig_paths, revnum, author, date, message, pool):
76 76 if orig_paths is not None:
77 77 for k, v in orig_paths.iteritems():
78 78 orig_paths[k] = changedpath(v)
79 79 pickle.dump((orig_paths, revnum, author, date, message),
80 80 fp, protocol)
81 81
82 82 try:
83 83 # Use an ra of our own so that our parent can consume
84 84 # our results without confusing the server.
85 85 t = transport.SvnRaTransport(url=url)
86 86 svn.ra.get_log(t.ra, paths, start, end, limit,
87 87 discover_changed_paths,
88 88 strict_node_history,
89 89 receiver)
90 90 except SubversionException, (inst, num):
91 91 pickle.dump(num, fp, protocol)
92 92 except IOError:
93 93 # Caller may interrupt the iteration
94 94 pickle.dump(None, fp, protocol)
95 95 else:
96 96 pickle.dump(None, fp, protocol)
97 97 fp.close()
98 98 # With large history, cleanup process goes crazy and suddenly
99 99 # consumes *huge* amount of memory. The output file being closed,
100 100 # there is no need for clean termination.
101 101 os._exit(0)
102 102
103 103 def debugsvnlog(ui, **opts):
104 104 """Fetch SVN log in a subprocess and channel them back to parent to
105 105 avoid memory collection issues.
106 106 """
107 107 util.set_binary(sys.stdin)
108 108 util.set_binary(sys.stdout)
109 109 args = decodeargs(sys.stdin.read())
110 110 get_log_child(sys.stdout, *args)
111 111
112 112 class logstream:
113 113 """Interruptible revision log iterator."""
114 114 def __init__(self, stdout):
115 115 self._stdout = stdout
116 116
117 117 def __iter__(self):
118 118 while True:
119 119 entry = pickle.load(self._stdout)
120 120 try:
121 121 orig_paths, revnum, author, date, message = entry
122 122 except:
123 123 if entry is None:
124 124 break
125 125 raise SubversionException("child raised exception", entry)
126 126 yield entry
127 127
128 128 def close(self):
129 129 if self._stdout:
130 130 self._stdout.close()
131 131 self._stdout = None
132 132
133 133 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
134 134 strict_node_history=False):
135 135 args = [url, paths, start, end, limit, discover_changed_paths,
136 136 strict_node_history]
137 137 arg = encodeargs(args)
138 138 hgexe = util.hgexecutable()
139 139 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
140 140 stdin, stdout = os.popen2(cmd, 'b')
141 141 stdin.write(arg)
142 142 stdin.close()
143 143 return logstream(stdout)
144 144
145 145 # SVN conversion code stolen from bzr-svn and tailor
146 146 #
147 147 # Subversion looks like a versioned filesystem, branches structures
148 148 # are defined by conventions and not enforced by the tool. First,
149 149 # we define the potential branches (modules) as "trunk" and "branches"
150 150 # children directories. Revisions are then identified by their
151 151 # module and revision number (and a repository identifier).
152 152 #
153 153 # The revision graph is really a tree (or a forest). By default, a
154 154 # revision parent is the previous revision in the same module. If the
155 155 # module directory is copied/moved from another module then the
156 156 # revision is the module root and its parent the source revision in
157 157 # the parent module. A revision has at most one parent.
158 158 #
159 159 class svn_source(converter_source):
160 160 def __init__(self, ui, url, rev=None):
161 161 super(svn_source, self).__init__(ui, url, rev=rev)
162 162
163 163 try:
164 164 SubversionException
165 165 except NameError:
166 166 raise NoRepo('Subversion python bindings could not be loaded')
167 167
168 168 self.encoding = locale.getpreferredencoding()
169 169 self.lastrevs = {}
170 170
171 171 latest = None
172 172 try:
173 173 # Support file://path@rev syntax. Useful e.g. to convert
174 174 # deleted branches.
175 175 at = url.rfind('@')
176 176 if at >= 0:
177 177 latest = int(url[at+1:])
178 178 url = url[:at]
179 179 except ValueError, e:
180 180 pass
181 181 self.url = geturl(url)
182 182 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
183 183 try:
184 184 self.transport = transport.SvnRaTransport(url=self.url)
185 185 self.ra = self.transport.ra
186 186 self.ctx = self.transport.client
187 187 self.base = svn.ra.get_repos_root(self.ra)
188 188 # Module is either empty or a repository path starting with
189 189 # a slash and not ending with a slash.
190 190 self.module = self.url[len(self.base):]
191 191 self.rootmodule = self.module
192 192 self.commits = {}
193 193 self.paths = {}
194 194 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
195 195 except SubversionException, e:
196 196 ui.print_exc()
197 197 raise NoRepo("%s does not look like a Subversion repo" % self.url)
198 198
199 199 if rev:
200 200 try:
201 201 latest = int(rev)
202 202 except ValueError:
203 203 raise util.Abort('svn: revision %s is not an integer' % rev)
204 204
205 205 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
206 206 try:
207 207 self.startrev = int(self.startrev)
208 208 if self.startrev < 0:
209 209 self.startrev = 0
210 210 except ValueError:
211 211 raise util.Abort(_('svn: start revision %s is not an integer')
212 212 % self.startrev)
213 213
214 214 try:
215 215 self.get_blacklist()
216 216 except IOError, e:
217 217 pass
218 218
219 219 self.head = self.latest(self.module, latest)
220 220 if not self.head:
221 221 raise util.Abort(_('no revision found in module %s') %
222 222 self.module.encode(self.encoding))
223 223 self.last_changed = self.revnum(self.head)
224 224
225 225 self._changescache = None
226 226
227 227 if os.path.exists(os.path.join(url, '.svn/entries')):
228 228 self.wc = url
229 229 else:
230 230 self.wc = None
231 231 self.convertfp = None
232 232
233 233 def setrevmap(self, revmap):
234 234 lastrevs = {}
235 235 for revid in revmap.iterkeys():
236 236 uuid, module, revnum = self.revsplit(revid)
237 237 lastrevnum = lastrevs.setdefault(module, revnum)
238 238 if revnum > lastrevnum:
239 239 lastrevs[module] = revnum
240 240 self.lastrevs = lastrevs
241 241
242 242 def exists(self, path, optrev):
243 243 try:
244 244 svn.client.ls(self.url.rstrip('/') + '/' + path,
245 245 optrev, False, self.ctx)
246 246 return True
247 247 except SubversionException, err:
248 248 return False
249 249
250 250 def getheads(self):
251 251
252 252 def isdir(path, revnum):
253 253 kind = svn.ra.check_path(self.ra, path, revnum)
254 254 return kind == svn.core.svn_node_dir
255 255
256 256 def getcfgpath(name, rev):
257 257 cfgpath = self.ui.config('convert', 'svn.' + name)
258 258 if cfgpath is not None and cfgpath.strip() == '':
259 259 return None
260 260 path = (cfgpath or name).strip('/')
261 261 if not self.exists(path, rev):
262 262 if cfgpath:
263 263 raise util.Abort(_('expected %s to be at %r, but not found')
264 264 % (name, path))
265 265 return None
266 266 self.ui.note(_('found %s at %r\n') % (name, path))
267 267 return path
268 268
269 269 rev = optrev(self.last_changed)
270 270 oldmodule = ''
271 271 trunk = getcfgpath('trunk', rev)
272 272 self.tags = getcfgpath('tags', rev)
273 273 branches = getcfgpath('branches', rev)
274 274
275 275 # If the project has a trunk or branches, we will extract heads
276 276 # from them. We keep the project root otherwise.
277 277 if trunk:
278 278 oldmodule = self.module or ''
279 279 self.module += '/' + trunk
280 280 self.head = self.latest(self.module, self.last_changed)
281 281 if not self.head:
282 282 raise util.Abort(_('no revision found in module %s') %
283 283 self.module.encode(self.encoding))
284 284
285 285 # First head in the list is the module's head
286 286 self.heads = [self.head]
287 287 if self.tags is not None:
288 288 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
289 289
290 290 # Check if branches bring a few more heads to the list
291 291 if branches:
292 292 rpath = self.url.strip('/')
293 293 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
294 294 self.ctx)
295 295 for branch in branchnames.keys():
296 296 module = '%s/%s/%s' % (oldmodule, branches, branch)
297 297 if not isdir(module, self.last_changed):
298 298 continue
299 299 brevid = self.latest(module, self.last_changed)
300 300 if not brevid:
301 301 self.ui.note(_('ignoring empty branch %s\n') %
302 302 branch.encode(self.encoding))
303 303 continue
304 304 self.ui.note('found branch %s at %d\n' %
305 305 (branch, self.revnum(brevid)))
306 306 self.heads.append(brevid)
307 307
308 308 if self.startrev and self.heads:
309 309 if len(self.heads) > 1:
310 310 raise util.Abort(_('svn: start revision is not supported with '
311 311 'with more than one branch'))
312 312 revnum = self.revnum(self.heads[0])
313 313 if revnum < self.startrev:
314 314 raise util.Abort(_('svn: no revision found after start revision %d')
315 315 % self.startrev)
316 316
317 317 return self.heads
318 318
319 319 def getfile(self, file, rev):
320 320 data, mode = self._getfile(file, rev)
321 321 self.modecache[(file, rev)] = mode
322 322 return data
323 323
324 324 def getmode(self, file, rev):
325 325 return self.modecache[(file, rev)]
326 326
327 327 def getchanges(self, rev):
328 328 if self._changescache and self._changescache[0] == rev:
329 329 return self._changescache[1]
330 330 self._changescache = None
331 331 self.modecache = {}
332 332 (paths, parents) = self.paths[rev]
333 333 if parents:
334 334 files, copies = self.expandpaths(rev, paths, parents)
335 335 else:
336 336 # Perform a full checkout on roots
337 337 uuid, module, revnum = self.revsplit(rev)
338 338 entries = svn.client.ls(self.base + module, optrev(revnum),
339 339 True, self.ctx)
340 340 files = [n for n,e in entries.iteritems()
341 341 if e.kind == svn.core.svn_node_file]
342 342 copies = {}
343 343
344 344 files.sort()
345 345 files = zip(files, [rev] * len(files))
346 346
347 347 # caller caches the result, so free it here to release memory
348 348 del self.paths[rev]
349 349 return (files, copies)
350 350
351 351 def getchangedfiles(self, rev, i):
352 352 changes = self.getchanges(rev)
353 353 self._changescache = (rev, changes)
354 354 return [f[0] for f in changes[0]]
355 355
356 356 def getcommit(self, rev):
357 357 if rev not in self.commits:
358 358 uuid, module, revnum = self.revsplit(rev)
359 359 self.module = module
360 360 self.reparent(module)
361 361 # We assume that:
362 362 # - requests for revisions after "stop" come from the
363 363 # revision graph backward traversal. Cache all of them
364 364 # down to stop, they will be used eventually.
365 365 # - requests for revisions before "stop" come to get
366 366 # isolated branches parents. Just fetch what is needed.
367 367 stop = self.lastrevs.get(module, 0)
368 368 if revnum < stop:
369 369 stop = revnum + 1
370 370 self._fetch_revisions(revnum, stop)
371 371 commit = self.commits[rev]
372 372 # caller caches the result, so free it here to release memory
373 373 del self.commits[rev]
374 374 return commit
375 375
376 376 def gettags(self):
377 377 tags = {}
378 378 if self.tags is None:
379 379 return tags
380 380
381 381 # svn tags are just a convention, project branches left in a
382 382 # 'tags' directory. There is no other relationship than
383 383 # ancestry, which is expensive to discover and makes them hard
384 384 # to update incrementally. Worse, past revisions may be
385 385 # referenced by tags far away in the future, requiring a deep
386 386 # history traversal on every calculation. Current code
387 387 # performs a single backward traversal, tracking moves within
388 388 # the tags directory (tag renaming) and recording a new tag
389 389 # everytime a project is copied from outside the tags
390 390 # directory. It also lists deleted tags, this behaviour may
391 391 # change in the future.
392 392 pendings = []
393 393 tagspath = self.tags
394 394 start = svn.ra.get_latest_revnum(self.ra)
395 395 try:
396 396 for entry in get_log(self.url, [self.tags], start, self.startrev):
397 397 origpaths, revnum, author, date, message = entry
398 398 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
399 399 in origpaths.iteritems() if e.copyfrom_path]
400 400 copies.sort()
401 401 # Apply moves/copies from more specific to general
402 402 copies.reverse()
403 403
404 404 srctagspath = tagspath
405 405 if copies and copies[-1][2] == tagspath:
406 406 # Track tags directory moves
407 407 srctagspath = copies.pop()[0]
408 408
409 409 for source, sourcerev, dest in copies:
410 410 if not dest.startswith(tagspath + '/'):
411 411 continue
412 412 for tag in pendings:
413 413 if tag[0].startswith(dest):
414 414 tagpath = source + tag[0][len(dest):]
415 415 tag[:2] = [tagpath, sourcerev]
416 416 break
417 417 else:
418 418 pendings.append([source, sourcerev, dest.split('/')[-1]])
419 419
420 420 # Tell tag renamings from tag creations
421 421 remainings = []
422 422 for source, sourcerev, tagname in pendings:
423 423 if source.startswith(srctagspath):
424 424 remainings.append([source, sourcerev, tagname])
425 425 continue
426 426 # From revision may be fake, get one with changes
427 427 tagid = self.latest(source, sourcerev)
428 428 if tagid:
429 429 tags[tagname] = tagid
430 430 pendings = remainings
431 431 tagspath = srctagspath
432 432
433 433 except SubversionException, (inst, num):
434 434 self.ui.note('no tags found at revision %d\n' % start)
435 435 return tags
436 436
437 437 def converted(self, rev, destrev):
438 438 if not self.wc:
439 439 return
440 440 if self.convertfp is None:
441 441 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
442 442 'a')
443 443 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
444 444 self.convertfp.flush()
445 445
446 446 # -- helper functions --
447 447
448 448 def revid(self, revnum, module=None):
449 449 if not module:
450 450 module = self.module
451 451 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
452 452 revnum)
453 453
454 454 def revnum(self, rev):
455 455 return int(rev.split('@')[-1])
456 456
457 457 def revsplit(self, rev):
458 458 url, revnum = rev.encode(self.encoding).split('@', 1)
459 459 revnum = int(revnum)
460 460 parts = url.split('/', 1)
461 461 uuid = parts.pop(0)[4:]
462 462 mod = ''
463 463 if parts:
464 464 mod = '/' + parts[0]
465 465 return uuid, mod, revnum
466 466
467 467 def latest(self, path, stop=0):
468 468 """Find the latest revid affecting path, up to stop. It may return
469 469 a revision in a different module, since a branch may be moved without
470 470 a change being reported. Return None if computed module does not
471 471 belong to rootmodule subtree.
472 472 """
473 473 if not path.startswith(self.rootmodule):
474 474 # Requests on foreign branches may be forbidden at server level
475 475 self.ui.debug(_('ignoring foreign branch %r\n') % path)
476 476 return None
477 477
478 478 if not stop:
479 479 stop = svn.ra.get_latest_revnum(self.ra)
480 480 try:
481 481 self.reparent('')
482 482 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
483 483 self.reparent(self.module)
484 484 except SubversionException:
485 485 dirent = None
486 486 if not dirent:
487 487 raise util.Abort('%s not found up to revision %d' % (path, stop))
488 488
489 489 # stat() gives us the previous revision on this line of development, but
490 490 # it might be in *another module*. Fetch the log and detect renames down
491 491 # to the latest revision.
492 492 stream = get_log(self.url, [path], stop, dirent.created_rev)
493 493 try:
494 494 for entry in stream:
495 495 paths, revnum, author, date, message = entry
496 496 if revnum <= dirent.created_rev:
497 497 break
498 498
499 499 for p in paths:
500 500 if not path.startswith(p) or not paths[p].copyfrom_path:
501 501 continue
502 502 newpath = paths[p].copyfrom_path + path[len(p):]
503 503 self.ui.debug("branch renamed from %s to %s at %d\n" %
504 504 (path, newpath, revnum))
505 505 path = newpath
506 506 break
507 507 finally:
508 508 stream.close()
509 509
510 510 if not path.startswith(self.rootmodule):
511 511 self.ui.debug(_('ignoring foreign branch %r\n') % path)
512 512 return None
513 513 return self.revid(dirent.created_rev, path)
514 514
515 515 def get_blacklist(self):
516 516 """Avoid certain revision numbers.
517 517 It is not uncommon for two nearby revisions to cancel each other
518 518 out, e.g. 'I copied trunk into a subdirectory of itself instead
519 519 of making a branch'. The converted repository is significantly
520 520 smaller if we ignore such revisions."""
521 521 self.blacklist = util.set()
522 522 blacklist = self.blacklist
523 523 for line in file("blacklist.txt", "r"):
524 524 if not line.startswith("#"):
525 525 try:
526 526 svn_rev = int(line.strip())
527 527 blacklist.add(svn_rev)
528 528 except ValueError, e:
529 529 pass # not an integer or a comment
530 530
531 531 def is_blacklisted(self, svn_rev):
532 532 return svn_rev in self.blacklist
533 533
534 534 def reparent(self, module):
535 535 svn_url = self.base + module
536 536 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
537 537 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
538 538
539 539 def expandpaths(self, rev, paths, parents):
540 540 entries = []
541 541 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
542 542 copies = {}
543 543
544 544 new_module, revnum = self.revsplit(rev)[1:]
545 545 if new_module != self.module:
546 546 self.module = new_module
547 547 self.reparent(self.module)
548 548
549 549 for path, ent in paths:
550 550 entrypath = self.getrelpath(path)
551 551 entry = entrypath.decode(self.encoding)
552 552
553 553 kind = svn.ra.check_path(self.ra, entrypath, revnum)
554 554 if kind == svn.core.svn_node_file:
555 555 entries.append(self.recode(entry))
556 556 if not ent.copyfrom_path or not parents:
557 557 continue
558 558 # Copy sources not in parent revisions cannot be represented,
559 559 # ignore their origin for now
560 560 pmodule, prevnum = self.revsplit(parents[0])[1:]
561 561 if ent.copyfrom_rev < prevnum:
562 562 continue
563 563 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
564 564 if not copyfrom_path:
565 565 continue
566 566 self.ui.debug("copied to %s from %s@%s\n" %
567 567 (entrypath, copyfrom_path, ent.copyfrom_rev))
568 568 copies[self.recode(entry)] = self.recode(copyfrom_path)
569 569 elif kind == 0: # gone, but had better be a deleted *file*
570 570 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
571 571
572 572 # if a branch is created but entries are removed in the same
573 573 # changeset, get the right fromrev
574 574 # parents cannot be empty here, you cannot remove things from
575 575 # a root revision.
576 576 uuid, old_module, fromrev = self.revsplit(parents[0])
577 577
578 578 basepath = old_module + "/" + self.getrelpath(path)
579 579 entrypath = basepath
580 580
581 581 def lookup_parts(p):
582 582 rc = None
583 583 parts = p.split("/")
584 584 for i in range(len(parts)):
585 585 part = "/".join(parts[:i])
586 586 info = part, copyfrom.get(part, None)
587 587 if info[1] is not None:
588 588 self.ui.debug("Found parent directory %s\n" % info[1])
589 589 rc = info
590 590 return rc
591 591
592 592 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
593 593
594 594 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
595 595
596 596 # need to remove fragment from lookup_parts and replace with copyfrom_path
597 597 if frompath is not None:
598 598 self.ui.debug("munge-o-matic\n")
599 599 self.ui.debug(entrypath + '\n')
600 600 self.ui.debug(entrypath[len(frompath):] + '\n')
601 601 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
602 602 fromrev = froment.copyfrom_rev
603 603 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
604 604
605 605 # We can avoid the reparent calls if the module has not changed
606 606 # but it probably does not worth the pain.
607 607 self.reparent('')
608 608 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
609 609 self.reparent(self.module)
610 610
611 611 if fromkind == svn.core.svn_node_file: # a deleted file
612 612 entries.append(self.recode(entry))
613 613 elif fromkind == svn.core.svn_node_dir:
614 614 # print "Deleted/moved non-file:", revnum, path, ent
615 615 # children = self._find_children(path, revnum - 1)
616 616 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
617 617 # Sometimes this is tricky. For example: in
618 618 # The Subversion Repository revision 6940 a dir
619 619 # was copied and one of its files was deleted
620 620 # from the new location in the same commit. This
621 621 # code can't deal with that yet.
622 622 if ent.action == 'C':
623 623 children = self._find_children(path, fromrev)
624 624 else:
625 625 oroot = entrypath.strip('/')
626 626 nroot = path.strip('/')
627 627 children = self._find_children(oroot, fromrev)
628 628 children = [s.replace(oroot,nroot) for s in children]
629 629 # Mark all [files, not directories] as deleted.
630 630 for child in children:
631 631 # Can we move a child directory and its
632 632 # parent in the same commit? (probably can). Could
633 633 # cause problems if instead of revnum -1,
634 634 # we have to look in (copyfrom_path, revnum - 1)
635 635 entrypath = self.getrelpath("/" + child, module=old_module)
636 636 if entrypath:
637 637 entry = self.recode(entrypath.decode(self.encoding))
638 638 if entry in copies:
639 639 # deleted file within a copy
640 640 del copies[entry]
641 641 else:
642 642 entries.append(entry)
643 643 else:
644 644 self.ui.debug('unknown path in revision %d: %s\n' % \
645 645 (revnum, path))
646 646 elif kind == svn.core.svn_node_dir:
647 647 # Should probably synthesize normal file entries
648 648 # and handle as above to clean up copy/rename handling.
649 649
650 650 # If the directory just had a prop change,
651 651 # then we shouldn't need to look for its children.
652 652 if ent.action == 'M':
653 653 continue
654 654
655 655 # Also this could create duplicate entries. Not sure
656 656 # whether this will matter. Maybe should make entries a set.
657 657 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
658 658 # This will fail if a directory was copied
659 659 # from another branch and then some of its files
660 660 # were deleted in the same transaction.
661 children = self._find_children(path, revnum)
662 children.sort()
661 children = util.sort(self._find_children(path, revnum))
663 662 for child in children:
664 663 # Can we move a child directory and its
665 664 # parent in the same commit? (probably can). Could
666 665 # cause problems if instead of revnum -1,
667 666 # we have to look in (copyfrom_path, revnum - 1)
668 667 entrypath = self.getrelpath("/" + child)
669 668 # print child, self.module, entrypath
670 669 if entrypath:
671 670 # Need to filter out directories here...
672 671 kind = svn.ra.check_path(self.ra, entrypath, revnum)
673 672 if kind != svn.core.svn_node_dir:
674 673 entries.append(self.recode(entrypath))
675 674
676 675 # Copies here (must copy all from source)
677 676 # Probably not a real problem for us if
678 677 # source does not exist
679 678 if not ent.copyfrom_path or not parents:
680 679 continue
681 680 # Copy sources not in parent revisions cannot be represented,
682 681 # ignore their origin for now
683 682 pmodule, prevnum = self.revsplit(parents[0])[1:]
684 683 if ent.copyfrom_rev < prevnum:
685 684 continue
686 685 copyfrompath = ent.copyfrom_path.decode(self.encoding)
687 686 copyfrompath = self.getrelpath(copyfrompath, pmodule)
688 687 if not copyfrompath:
689 688 continue
690 689 copyfrom[path] = ent
691 690 self.ui.debug("mark %s came from %s:%d\n"
692 691 % (path, copyfrompath, ent.copyfrom_rev))
693 692 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
694 693 children.sort()
695 694 for child in children:
696 695 entrypath = self.getrelpath("/" + child, pmodule)
697 696 if not entrypath:
698 697 continue
699 698 entry = entrypath.decode(self.encoding)
700 699 copytopath = path + entry[len(copyfrompath):]
701 700 copytopath = self.getrelpath(copytopath)
702 701 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
703 702
704 703 return (util.unique(entries), copies)
705 704
706 705 def _fetch_revisions(self, from_revnum, to_revnum):
707 706 if from_revnum < to_revnum:
708 707 from_revnum, to_revnum = to_revnum, from_revnum
709 708
710 709 self.child_cset = None
711 710
712 711 def isdescendantof(parent, child):
713 712 if not child or not parent or not child.startswith(parent):
714 713 return False
715 714 subpath = child[len(parent):]
716 715 return len(subpath) > 1 and subpath[0] == '/'
717 716
718 717 def parselogentry(orig_paths, revnum, author, date, message):
719 718 """Return the parsed commit object or None, and True if
720 719 the revision is a branch root.
721 720 """
722 721 self.ui.debug("parsing revision %d (%d changes)\n" %
723 722 (revnum, len(orig_paths)))
724 723
725 724 branched = False
726 725 rev = self.revid(revnum)
727 726 # branch log might return entries for a parent we already have
728 727
729 728 if (rev in self.commits or revnum < to_revnum):
730 729 return None, branched
731 730
732 731 parents = []
733 732 # check whether this revision is the start of a branch or part
734 733 # of a branch renaming
735 orig_paths = orig_paths.items()
736 orig_paths.sort()
734 orig_paths = util.sort(orig_paths.items())
737 735 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
738 736 if root_paths:
739 737 path, ent = root_paths[-1]
740 738 if ent.copyfrom_path:
741 739 # If dir was moved while one of its file was removed
742 740 # the log may look like:
743 741 # A /dir (from /dir:x)
744 742 # A /dir/a (from /dir/a:y)
745 743 # A /dir/b (from /dir/b:z)
746 744 # ...
747 745 # for all remaining children.
748 746 # Let's take the highest child element from rev as source.
749 747 copies = [(p,e) for p,e in orig_paths[:-1]
750 748 if isdescendantof(ent.copyfrom_path, e.copyfrom_path)]
751 749 fromrev = max([e.copyfrom_rev for p,e in copies] + [ent.copyfrom_rev])
752 750 branched = True
753 751 newpath = ent.copyfrom_path + self.module[len(path):]
754 752 # ent.copyfrom_rev may not be the actual last revision
755 753 previd = self.latest(newpath, fromrev)
756 754 if previd is not None:
757 755 prevmodule, prevnum = self.revsplit(previd)[1:]
758 756 if prevnum >= self.startrev:
759 757 parents = [previd]
760 758 self.ui.note('found parent of branch %s at %d: %s\n' %
761 759 (self.module, prevnum, prevmodule))
762 760 else:
763 761 self.ui.debug("No copyfrom path, don't know what to do.\n")
764 762
765 763 paths = []
766 764 # filter out unrelated paths
767 765 for path, ent in orig_paths:
768 766 if self.getrelpath(path) is None:
769 767 continue
770 768 paths.append((path, ent))
771 769
772 770 # Example SVN datetime. Includes microseconds.
773 771 # ISO-8601 conformant
774 772 # '2007-01-04T17:35:00.902377Z'
775 773 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
776 774
777 775 log = message and self.recode(message) or ''
778 776 author = author and self.recode(author) or ''
779 777 try:
780 778 branch = self.module.split("/")[-1]
781 779 if branch == 'trunk':
782 780 branch = ''
783 781 except IndexError:
784 782 branch = None
785 783
786 784 cset = commit(author=author,
787 785 date=util.datestr(date),
788 786 desc=log,
789 787 parents=parents,
790 788 branch=branch,
791 789 rev=rev.encode('utf-8'))
792 790
793 791 self.commits[rev] = cset
794 792 # The parents list is *shared* among self.paths and the
795 793 # commit object. Both will be updated below.
796 794 self.paths[rev] = (paths, cset.parents)
797 795 if self.child_cset and not self.child_cset.parents:
798 796 self.child_cset.parents[:] = [rev]
799 797 self.child_cset = cset
800 798 return cset, branched
801 799
802 800 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
803 801 (self.module, from_revnum, to_revnum))
804 802
805 803 try:
806 804 firstcset = None
807 805 lastonbranch = False
808 806 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
809 807 try:
810 808 for entry in stream:
811 809 paths, revnum, author, date, message = entry
812 810 if revnum < self.startrev:
813 811 lastonbranch = True
814 812 break
815 813 if self.is_blacklisted(revnum):
816 814 self.ui.note('skipping blacklisted revision %d\n'
817 815 % revnum)
818 816 continue
819 817 if paths is None:
820 818 self.ui.debug('revision %d has no entries\n' % revnum)
821 819 continue
822 820 cset, lastonbranch = parselogentry(paths, revnum, author,
823 821 date, message)
824 822 if cset:
825 823 firstcset = cset
826 824 if lastonbranch:
827 825 break
828 826 finally:
829 827 stream.close()
830 828
831 829 if not lastonbranch and firstcset and not firstcset.parents:
832 830 # The first revision of the sequence (the last fetched one)
833 831 # has invalid parents if not a branch root. Find the parent
834 832 # revision now, if any.
835 833 try:
836 834 firstrevnum = self.revnum(firstcset.rev)
837 835 if firstrevnum > 1:
838 836 latest = self.latest(self.module, firstrevnum - 1)
839 837 if latest:
840 838 firstcset.parents.append(latest)
841 839 except util.Abort:
842 840 pass
843 841 except SubversionException, (inst, num):
844 842 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
845 843 raise util.Abort('svn: branch has no revision %s' % to_revnum)
846 844 raise
847 845
848 846 def _getfile(self, file, rev):
849 847 io = StringIO()
850 848 # TODO: ra.get_file transmits the whole file instead of diffs.
851 849 mode = ''
852 850 try:
853 851 new_module, revnum = self.revsplit(rev)[1:]
854 852 if self.module != new_module:
855 853 self.module = new_module
856 854 self.reparent(self.module)
857 855 info = svn.ra.get_file(self.ra, file, revnum, io)
858 856 if isinstance(info, list):
859 857 info = info[-1]
860 858 mode = ("svn:executable" in info) and 'x' or ''
861 859 mode = ("svn:special" in info) and 'l' or mode
862 860 except SubversionException, e:
863 861 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
864 862 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
865 863 if e.apr_err in notfound: # File not found
866 864 raise IOError()
867 865 raise
868 866 data = io.getvalue()
869 867 if mode == 'l':
870 868 link_prefix = "link "
871 869 if data.startswith(link_prefix):
872 870 data = data[len(link_prefix):]
873 871 return data, mode
874 872
875 873 def _find_children(self, path, revnum):
876 874 path = path.strip('/')
877 875 pool = Pool()
878 876 rpath = '/'.join([self.base, path]).strip('/')
879 877 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
880 878
881 879 def getrelpath(self, path, module=None):
882 880 if module is None:
883 881 module = self.module
884 882 # Given the repository url of this wc, say
885 883 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
886 884 # extract the "entry" portion (a relative path) from what
887 885 # svn log --xml says, ie
888 886 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
889 887 # that is to say "tests/PloneTestCase.py"
890 888 if path.startswith(module):
891 889 relative = path.rstrip('/')[len(module):]
892 890 if relative.startswith('/'):
893 891 return relative[1:]
894 892 elif relative == '':
895 893 return relative
896 894
897 895 # The path is outside our tracked tree...
898 896 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
899 897 return None
900 898
901 899 pre_revprop_change = '''#!/bin/sh
902 900
903 901 REPOS="$1"
904 902 REV="$2"
905 903 USER="$3"
906 904 PROPNAME="$4"
907 905 ACTION="$5"
908 906
909 907 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
910 908 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
911 909 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
912 910
913 911 echo "Changing prohibited revision property" >&2
914 912 exit 1
915 913 '''
916 914
917 915 class svn_sink(converter_sink, commandline):
918 916 commit_re = re.compile(r'Committed revision (\d+).', re.M)
919 917
920 918 def prerun(self):
921 919 if self.wc:
922 920 os.chdir(self.wc)
923 921
924 922 def postrun(self):
925 923 if self.wc:
926 924 os.chdir(self.cwd)
927 925
928 926 def join(self, name):
929 927 return os.path.join(self.wc, '.svn', name)
930 928
931 929 def revmapfile(self):
932 930 return self.join('hg-shamap')
933 931
934 932 def authorfile(self):
935 933 return self.join('hg-authormap')
936 934
937 935 def __init__(self, ui, path):
938 936 converter_sink.__init__(self, ui, path)
939 937 commandline.__init__(self, ui, 'svn')
940 938 self.delete = []
941 939 self.setexec = []
942 940 self.delexec = []
943 941 self.copies = []
944 942 self.wc = None
945 943 self.cwd = os.getcwd()
946 944
947 945 path = os.path.realpath(path)
948 946
949 947 created = False
950 948 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
951 949 self.wc = path
952 950 self.run0('update')
953 951 else:
954 952 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
955 953
956 954 if os.path.isdir(os.path.dirname(path)):
957 955 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
958 956 ui.status(_('initializing svn repo %r\n') %
959 957 os.path.basename(path))
960 958 commandline(ui, 'svnadmin').run0('create', path)
961 959 created = path
962 960 path = util.normpath(path)
963 961 if not path.startswith('/'):
964 962 path = '/' + path
965 963 path = 'file://' + path
966 964
967 965 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
968 966 self.run0('checkout', path, wcpath)
969 967
970 968 self.wc = wcpath
971 969 self.opener = util.opener(self.wc)
972 970 self.wopener = util.opener(self.wc)
973 971 self.childmap = mapfile(ui, self.join('hg-childmap'))
974 972 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
975 973
976 974 if created:
977 975 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
978 976 fp = open(hook, 'w')
979 977 fp.write(pre_revprop_change)
980 978 fp.close()
981 979 util.set_flags(hook, "x")
982 980
983 981 xport = transport.SvnRaTransport(url=geturl(path))
984 982 self.uuid = svn.ra.get_uuid(xport.ra)
985 983
986 984 def wjoin(self, *names):
987 985 return os.path.join(self.wc, *names)
988 986
989 987 def putfile(self, filename, flags, data):
990 988 if 'l' in flags:
991 989 self.wopener.symlink(data, filename)
992 990 else:
993 991 try:
994 992 if os.path.islink(self.wjoin(filename)):
995 993 os.unlink(filename)
996 994 except OSError:
997 995 pass
998 996 self.wopener(filename, 'w').write(data)
999 997
1000 998 if self.is_exec:
1001 999 was_exec = self.is_exec(self.wjoin(filename))
1002 1000 else:
1003 1001 # On filesystems not supporting execute-bit, there is no way
1004 1002 # to know if it is set but asking subversion. Setting it
1005 1003 # systematically is just as expensive and much simpler.
1006 1004 was_exec = 'x' not in flags
1007 1005
1008 1006 util.set_flags(self.wjoin(filename), flags)
1009 1007 if was_exec:
1010 1008 if 'x' not in flags:
1011 1009 self.delexec.append(filename)
1012 1010 else:
1013 1011 if 'x' in flags:
1014 1012 self.setexec.append(filename)
1015 1013
1016 1014 def _copyfile(self, source, dest):
1017 1015 # SVN's copy command pukes if the destination file exists, but
1018 1016 # our copyfile method expects to record a copy that has
1019 1017 # already occurred. Cross the semantic gap.
1020 1018 wdest = self.wjoin(dest)
1021 1019 exists = os.path.exists(wdest)
1022 1020 if exists:
1023 1021 fd, tempname = tempfile.mkstemp(
1024 1022 prefix='hg-copy-', dir=os.path.dirname(wdest))
1025 1023 os.close(fd)
1026 1024 os.unlink(tempname)
1027 1025 os.rename(wdest, tempname)
1028 1026 try:
1029 1027 self.run0('copy', source, dest)
1030 1028 finally:
1031 1029 if exists:
1032 1030 try:
1033 1031 os.unlink(wdest)
1034 1032 except OSError:
1035 1033 pass
1036 1034 os.rename(tempname, wdest)
1037 1035
1038 1036 def dirs_of(self, files):
1039 1037 dirs = util.set()
1040 1038 for f in files:
1041 1039 if os.path.isdir(self.wjoin(f)):
1042 1040 dirs.add(f)
1043 1041 for i in strutil.rfindall(f, '/'):
1044 1042 dirs.add(f[:i])
1045 1043 return dirs
1046 1044
1047 1045 def add_dirs(self, files):
1048 add_dirs = [d for d in self.dirs_of(files)
1046 add_dirs = [d for d in util.sort(self.dirs_of(files))
1049 1047 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1050 1048 if add_dirs:
1051 add_dirs.sort()
1052 1049 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1053 1050 return add_dirs
1054 1051
1055 1052 def add_files(self, files):
1056 1053 if files:
1057 1054 self.xargs(files, 'add', quiet=True)
1058 1055 return files
1059 1056
1060 1057 def tidy_dirs(self, names):
1061 dirs = list(self.dirs_of(names))
1062 dirs.sort()
1058 dirs = util.sort(self.dirs_of(names))
1063 1059 dirs.reverse()
1064 1060 deleted = []
1065 1061 for d in dirs:
1066 1062 wd = self.wjoin(d)
1067 1063 if os.listdir(wd) == '.svn':
1068 1064 self.run0('delete', d)
1069 1065 deleted.append(d)
1070 1066 return deleted
1071 1067
1072 1068 def addchild(self, parent, child):
1073 1069 self.childmap[parent] = child
1074 1070
1075 1071 def revid(self, rev):
1076 1072 return u"svn:%s@%s" % (self.uuid, rev)
1077 1073
1078 1074 def putcommit(self, files, copies, parents, commit, source):
1079 1075 # Apply changes to working copy
1080 1076 for f, v in files:
1081 1077 try:
1082 1078 data = source.getfile(f, v)
1083 1079 except IOError, inst:
1084 1080 self.delete.append(f)
1085 1081 else:
1086 1082 e = source.getmode(f, v)
1087 1083 self.putfile(f, e, data)
1088 1084 if f in copies:
1089 1085 self.copies.append([copies[f], f])
1090 1086 files = [f[0] for f in files]
1091 1087
1092 1088 for parent in parents:
1093 1089 try:
1094 1090 return self.revid(self.childmap[parent])
1095 1091 except KeyError:
1096 1092 pass
1097 1093 entries = util.set(self.delete)
1098 1094 files = util.frozenset(files)
1099 1095 entries.update(self.add_dirs(files.difference(entries)))
1100 1096 if self.copies:
1101 1097 for s, d in self.copies:
1102 1098 self._copyfile(s, d)
1103 1099 self.copies = []
1104 1100 if self.delete:
1105 1101 self.xargs(self.delete, 'delete')
1106 1102 self.delete = []
1107 1103 entries.update(self.add_files(files.difference(entries)))
1108 1104 entries.update(self.tidy_dirs(entries))
1109 1105 if self.delexec:
1110 1106 self.xargs(self.delexec, 'propdel', 'svn:executable')
1111 1107 self.delexec = []
1112 1108 if self.setexec:
1113 1109 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1114 1110 self.setexec = []
1115 1111
1116 1112 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1117 1113 fp = os.fdopen(fd, 'w')
1118 1114 fp.write(commit.desc)
1119 1115 fp.close()
1120 1116 try:
1121 1117 output = self.run0('commit',
1122 1118 username=util.shortuser(commit.author),
1123 1119 file=messagefile,
1124 1120 encoding='utf-8')
1125 1121 try:
1126 1122 rev = self.commit_re.search(output).group(1)
1127 1123 except AttributeError:
1128 1124 self.ui.warn(_('unexpected svn output:\n'))
1129 1125 self.ui.warn(output)
1130 1126 raise util.Abort(_('unable to cope with svn output'))
1131 1127 if commit.rev:
1132 1128 self.run('propset', 'hg:convert-rev', commit.rev,
1133 1129 revprop=True, revision=rev)
1134 1130 if commit.branch and commit.branch != 'default':
1135 1131 self.run('propset', 'hg:convert-branch', commit.branch,
1136 1132 revprop=True, revision=rev)
1137 1133 for parent in parents:
1138 1134 self.addchild(parent, rev)
1139 1135 return self.revid(rev)
1140 1136 finally:
1141 1137 os.unlink(messagefile)
1142 1138
1143 1139 def puttags(self, tags):
1144 1140 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,327 +1,326
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7 '''show revision graphs in terminal windows'''
8 8
9 9 import os
10 10 import sys
11 11 from mercurial.cmdutil import revrange, show_changeset
12 12 from mercurial.commands import templateopts
13 13 from mercurial.i18n import _
14 14 from mercurial.node import nullrev
15 15 from mercurial.util import Abort, canonpath
16 from mercurial import util
16 17
17 18 def revision_grapher(repo, start_rev, stop_rev):
18 19 """incremental revision grapher
19 20
20 21 This generator function walks through the revision history from
21 22 revision start_rev to revision stop_rev (which must be less than
22 23 or equal to start_rev) and for each revision emits tuples with the
23 24 following elements:
24 25
25 26 - Current revision.
26 27 - Current node.
27 28 - Column of the current node in the set of ongoing edges.
28 29 - Edges; a list of (col, next_col) indicating the edges between
29 30 the current node and its parents.
30 31 - Number of columns (ongoing edges) in the current revision.
31 32 - The difference between the number of columns (ongoing edges)
32 33 in the next revision and the number of columns (ongoing edges)
33 34 in the current revision. That is: -1 means one column removed;
34 35 0 means no columns added or removed; 1 means one column added.
35 36 """
36 37
37 38 assert start_rev >= stop_rev
38 39 curr_rev = start_rev
39 40 revs = []
40 41 while curr_rev >= stop_rev:
41 42 node = repo.changelog.node(curr_rev)
42 43
43 44 # Compute revs and next_revs.
44 45 if curr_rev not in revs:
45 46 # New head.
46 47 revs.append(curr_rev)
47 48 rev_index = revs.index(curr_rev)
48 49 next_revs = revs[:]
49 50
50 51 # Add parents to next_revs.
51 52 parents = get_rev_parents(repo, curr_rev)
52 53 parents_to_add = []
53 54 for parent in parents:
54 55 if parent not in next_revs:
55 56 parents_to_add.append(parent)
56 parents_to_add.sort()
57 next_revs[rev_index:rev_index + 1] = parents_to_add
57 next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add)
58 58
59 59 edges = []
60 60 for parent in parents:
61 61 edges.append((rev_index, next_revs.index(parent)))
62 62
63 63 n_columns_diff = len(next_revs) - len(revs)
64 64 yield (curr_rev, node, rev_index, edges, len(revs), n_columns_diff)
65 65
66 66 revs = next_revs
67 67 curr_rev -= 1
68 68
69 69 def filelog_grapher(repo, path, start_rev, stop_rev):
70 70 """incremental file log grapher
71 71
72 72 This generator function walks through the revision history of a
73 73 single file from revision start_rev to revision stop_rev (which must
74 74 be less than or equal to start_rev) and for each revision emits
75 75 tuples with the following elements:
76 76
77 77 - Current revision.
78 78 - Current node.
79 79 - Column of the current node in the set of ongoing edges.
80 80 - Edges; a list of (col, next_col) indicating the edges between
81 81 the current node and its parents.
82 82 - Number of columns (ongoing edges) in the current revision.
83 83 - The difference between the number of columns (ongoing edges)
84 84 in the next revision and the number of columns (ongoing edges)
85 85 in the current revision. That is: -1 means one column removed;
86 86 0 means no columns added or removed; 1 means one column added.
87 87 """
88 88
89 89 assert start_rev >= stop_rev
90 90 curr_rev = start_rev
91 91 revs = []
92 92 filerev = len(repo.file(path)) - 1
93 93 while filerev >= 0:
94 94 fctx = repo.filectx(path, fileid=filerev)
95 95
96 96 # Compute revs and next_revs.
97 97 if filerev not in revs:
98 98 revs.append(filerev)
99 99 rev_index = revs.index(filerev)
100 100 next_revs = revs[:]
101 101
102 102 # Add parents to next_revs.
103 103 parents = [f.filerev() for f in fctx.parents() if f.path() == path]
104 104 parents_to_add = []
105 105 for parent in parents:
106 106 if parent not in next_revs:
107 107 parents_to_add.append(parent)
108 parents_to_add.sort()
109 next_revs[rev_index:rev_index + 1] = parents_to_add
108 next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add)
110 109
111 110 edges = []
112 111 for parent in parents:
113 112 edges.append((rev_index, next_revs.index(parent)))
114 113
115 114 changerev = fctx.linkrev()
116 115 if changerev <= start_rev:
117 116 node = repo.changelog.node(changerev)
118 117 n_columns_diff = len(next_revs) - len(revs)
119 118 yield (changerev, node, rev_index, edges, len(revs), n_columns_diff)
120 119 if changerev <= stop_rev:
121 120 break
122 121 revs = next_revs
123 122 filerev -= 1
124 123
125 124 def get_rev_parents(repo, rev):
126 125 return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
127 126
128 127 def fix_long_right_edges(edges):
129 128 for (i, (start, end)) in enumerate(edges):
130 129 if end > start:
131 130 edges[i] = (start, end + 1)
132 131
133 132 def draw_edges(edges, nodeline, interline):
134 133 for (start, end) in edges:
135 134 if start == end + 1:
136 135 interline[2 * end + 1] = "/"
137 136 elif start == end - 1:
138 137 interline[2 * start + 1] = "\\"
139 138 elif start == end:
140 139 interline[2 * start] = "|"
141 140 else:
142 141 nodeline[2 * end] = "+"
143 142 if start > end:
144 143 (start, end) = (end,start)
145 144 for i in range(2 * start + 1, 2 * end):
146 145 if nodeline[i] != "+":
147 146 nodeline[i] = "-"
148 147
149 148 def format_line(line, level, logstr):
150 149 text = "%-*s %s" % (2 * level, "".join(line), logstr)
151 150 return "%s\n" % text.rstrip()
152 151
153 152 def get_nodeline_edges_tail(
154 153 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
155 154 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
156 155 # Still going in the same non-vertical direction.
157 156 if n_columns_diff == -1:
158 157 start = max(node_index + 1, p_node_index)
159 158 tail = ["|", " "] * (start - node_index - 1)
160 159 tail.extend(["/", " "] * (n_columns - start))
161 160 return tail
162 161 else:
163 162 return ["\\", " "] * (n_columns - node_index - 1)
164 163 else:
165 164 return ["|", " "] * (n_columns - node_index - 1)
166 165
167 166 def get_padding_line(ni, n_columns, edges):
168 167 line = []
169 168 line.extend(["|", " "] * ni)
170 169 if (ni, ni - 1) in edges or (ni, ni) in edges:
171 170 # (ni, ni - 1) (ni, ni)
172 171 # | | | | | | | |
173 172 # +---o | | o---+
174 173 # | | c | | c | |
175 174 # | |/ / | |/ /
176 175 # | | | | | |
177 176 c = "|"
178 177 else:
179 178 c = " "
180 179 line.extend([c, " "])
181 180 line.extend(["|", " "] * (n_columns - ni - 1))
182 181 return line
183 182
184 183 def get_limit(limit_opt):
185 184 if limit_opt:
186 185 try:
187 186 limit = int(limit_opt)
188 187 except ValueError:
189 188 raise Abort(_("limit must be a positive integer"))
190 189 if limit <= 0:
191 190 raise Abort(_("limit must be positive"))
192 191 else:
193 192 limit = sys.maxint
194 193 return limit
195 194
196 195 def get_revs(repo, rev_opt):
197 196 if rev_opt:
198 197 revs = revrange(repo, rev_opt)
199 198 return (max(revs), min(revs))
200 199 else:
201 200 return (len(repo) - 1, 0)
202 201
203 202 def graphlog(ui, repo, path=None, **opts):
204 203 """show revision history alongside an ASCII revision graph
205 204
206 205 Print a revision history alongside a revision graph drawn with
207 206 ASCII characters.
208 207
209 208 Nodes printed as an @ character are parents of the working
210 209 directory.
211 210 """
212 211
213 212 limit = get_limit(opts["limit"])
214 213 (start_rev, stop_rev) = get_revs(repo, opts["rev"])
215 214 stop_rev = max(stop_rev, start_rev - limit + 1)
216 215 if start_rev == nullrev:
217 216 return
218 217 cs_printer = show_changeset(ui, repo, opts)
219 218 if path:
220 219 cpath = canonpath(repo.root, os.getcwd(), path)
221 220 grapher = filelog_grapher(repo, cpath, start_rev, stop_rev)
222 221 else:
223 222 grapher = revision_grapher(repo, start_rev, stop_rev)
224 223 repo_parents = repo.dirstate.parents()
225 224 prev_n_columns_diff = 0
226 225 prev_node_index = 0
227 226
228 227 for (rev, node, node_index, edges, n_columns, n_columns_diff) in grapher:
229 228 # log_strings is the list of all log strings to draw alongside
230 229 # the graph.
231 230 ui.pushbuffer()
232 231 cs_printer.show(rev, node)
233 232 log_strings = ui.popbuffer().split("\n")[:-1]
234 233
235 234 if n_columns_diff == -1:
236 235 # Transform
237 236 #
238 237 # | | | | | |
239 238 # o | | into o---+
240 239 # |X / |/ /
241 240 # | | | |
242 241 fix_long_right_edges(edges)
243 242
244 243 # add_padding_line says whether to rewrite
245 244 #
246 245 # | | | | | | | |
247 246 # | o---+ into | o---+
248 247 # | / / | | | # <--- padding line
249 248 # o | | | / /
250 249 # o | |
251 250 add_padding_line = (len(log_strings) > 2 and
252 251 n_columns_diff == -1 and
253 252 [x for (x, y) in edges if x + 1 < y])
254 253
255 254 # fix_nodeline_tail says whether to rewrite
256 255 #
257 256 # | | o | | | | o | |
258 257 # | | |/ / | | |/ /
259 258 # | o | | into | o / / # <--- fixed nodeline tail
260 259 # | |/ / | |/ /
261 260 # o | | o | |
262 261 fix_nodeline_tail = len(log_strings) <= 2 and not add_padding_line
263 262
264 263 # nodeline is the line containing the node character (@ or o).
265 264 nodeline = ["|", " "] * node_index
266 265 if node in repo_parents:
267 266 node_ch = "@"
268 267 else:
269 268 node_ch = "o"
270 269 nodeline.extend([node_ch, " "])
271 270
272 271 nodeline.extend(
273 272 get_nodeline_edges_tail(
274 273 node_index, prev_node_index, n_columns, n_columns_diff,
275 274 prev_n_columns_diff, fix_nodeline_tail))
276 275
277 276 # shift_interline is the line containing the non-vertical
278 277 # edges between this entry and the next.
279 278 shift_interline = ["|", " "] * node_index
280 279 if n_columns_diff == -1:
281 280 n_spaces = 1
282 281 edge_ch = "/"
283 282 elif n_columns_diff == 0:
284 283 n_spaces = 2
285 284 edge_ch = "|"
286 285 else:
287 286 n_spaces = 3
288 287 edge_ch = "\\"
289 288 shift_interline.extend(n_spaces * [" "])
290 289 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
291 290
292 291 # Draw edges from the current node to its parents.
293 292 draw_edges(edges, nodeline, shift_interline)
294 293
295 294 # lines is the list of all graph lines to print.
296 295 lines = [nodeline]
297 296 if add_padding_line:
298 297 lines.append(get_padding_line(node_index, n_columns, edges))
299 298 lines.append(shift_interline)
300 299
301 300 # Make sure that there are as many graph lines as there are
302 301 # log strings.
303 302 while len(log_strings) < len(lines):
304 303 log_strings.append("")
305 304 if len(lines) < len(log_strings):
306 305 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
307 306 while len(lines) < len(log_strings):
308 307 lines.append(extra_interline)
309 308
310 309 # Print lines.
311 310 indentation_level = max(n_columns, n_columns + n_columns_diff)
312 311 for (line, logstr) in zip(lines, log_strings):
313 312 ui.write(format_line(line, indentation_level, logstr))
314 313
315 314 # ...and start over.
316 315 prev_node_index = node_index
317 316 prev_n_columns_diff = n_columns_diff
318 317
319 318 cmdtable = {
320 319 "glog":
321 320 (graphlog,
322 321 [('l', 'limit', '', _('limit number of changes displayed')),
323 322 ('p', 'patch', False, _('show patch')),
324 323 ('r', 'rev', [], _('show the specified revision or range')),
325 324 ] + templateopts,
326 325 _('hg glog [OPTION]... [FILE]')),
327 326 }
@@ -1,717 +1,715
1 1 # server.py - inotify status server
2 2 #
3 3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from mercurial.i18n import gettext as _
10 10 from mercurial import osutil, ui, util
11 11 import common
12 12 import errno, os, select, socket, stat, struct, sys, time
13 13
14 14 try:
15 15 import hgext.inotify.linux as inotify
16 16 from hgext.inotify.linux import watcher
17 17 except ImportError:
18 18 print >> sys.stderr, '*** native support is required for this extension'
19 19 raise
20 20
21 21 class AlreadyStartedException(Exception): pass
22 22
23 23 def join(a, b):
24 24 if a:
25 25 if a[-1] == '/':
26 26 return a + b
27 27 return a + '/' + b
28 28 return b
29 29
30 30 walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG)
31 31
32 32 def walkrepodirs(repo):
33 33 '''Iterate over all subdirectories of this repo.
34 34 Exclude the .hg directory, any nested repos, and ignored dirs.'''
35 35 rootslash = repo.root + os.sep
36 36 def walkit(dirname, top):
37 37 hginside = False
38 38 try:
39 39 for name, kind in osutil.listdir(rootslash + dirname):
40 40 if kind == stat.S_IFDIR:
41 41 if name == '.hg':
42 42 hginside = True
43 43 if not top: break
44 44 else:
45 45 d = join(dirname, name)
46 46 if repo.dirstate._ignore(d):
47 47 continue
48 48 for subdir, hginsub in walkit(d, False):
49 49 if not hginsub:
50 50 yield subdir, False
51 51 except OSError, err:
52 52 if err.errno not in walk_ignored_errors:
53 53 raise
54 54 yield rootslash + dirname, hginside
55 55 for dirname, hginside in walkit('', True):
56 56 yield dirname
57 57
58 58 def walk(repo, root):
59 59 '''Like os.walk, but only yields regular files.'''
60 60
61 61 # This function is critical to performance during startup.
62 62
63 63 reporoot = root == ''
64 64 rootslash = repo.root + os.sep
65 65
66 66 def walkit(root, reporoot):
67 67 files, dirs = [], []
68 68 hginside = False
69 69
70 70 try:
71 71 fullpath = rootslash + root
72 72 for name, kind in osutil.listdir(fullpath):
73 73 if kind == stat.S_IFDIR:
74 74 if name == '.hg':
75 75 hginside = True
76 76 if reporoot:
77 77 continue
78 78 else:
79 79 break
80 80 dirs.append(name)
81 81 elif kind in (stat.S_IFREG, stat.S_IFLNK):
82 82 path = join(root, name)
83 83 files.append((name, kind))
84 84
85 85 yield hginside, fullpath, dirs, files
86 86
87 87 for subdir in dirs:
88 88 path = join(root, subdir)
89 89 if repo.dirstate._ignore(path):
90 90 continue
91 91 for result in walkit(path, False):
92 92 if not result[0]:
93 93 yield result
94 94 except OSError, err:
95 95 if err.errno not in walk_ignored_errors:
96 96 raise
97 97 for result in walkit(root, reporoot):
98 98 yield result[1:]
99 99
100 100 def _explain_watch_limit(ui, repo, count):
101 101 path = '/proc/sys/fs/inotify/max_user_watches'
102 102 try:
103 103 limit = int(file(path).read())
104 104 except IOError, err:
105 105 if err.errno != errno.ENOENT:
106 106 raise
107 107 raise util.Abort(_('this system does not seem to '
108 108 'support inotify'))
109 109 ui.warn(_('*** the current per-user limit on the number '
110 110 'of inotify watches is %s\n') % limit)
111 111 ui.warn(_('*** this limit is too low to watch every '
112 112 'directory in this repository\n'))
113 113 ui.warn(_('*** counting directories: '))
114 114 ndirs = len(list(walkrepodirs(repo)))
115 115 ui.warn(_('found %d\n') % ndirs)
116 116 newlimit = min(limit, 1024)
117 117 while newlimit < ((limit + ndirs) * 1.1):
118 118 newlimit *= 2
119 119 ui.warn(_('*** to raise the limit from %d to %d (run as root):\n') %
120 120 (limit, newlimit))
121 121 ui.warn(_('*** echo %d > %s\n') % (newlimit, path))
122 122 raise util.Abort(_('cannot watch %s until inotify watch limit is raised')
123 123 % repo.root)
124 124
125 125 class Watcher(object):
126 126 poll_events = select.POLLIN
127 127 statuskeys = 'almr!?'
128 128
129 129 def __init__(self, ui, repo, master):
130 130 self.ui = ui
131 131 self.repo = repo
132 132 self.wprefix = self.repo.wjoin('')
133 133 self.timeout = None
134 134 self.master = master
135 135 self.mask = (
136 136 inotify.IN_ATTRIB |
137 137 inotify.IN_CREATE |
138 138 inotify.IN_DELETE |
139 139 inotify.IN_DELETE_SELF |
140 140 inotify.IN_MODIFY |
141 141 inotify.IN_MOVED_FROM |
142 142 inotify.IN_MOVED_TO |
143 143 inotify.IN_MOVE_SELF |
144 144 inotify.IN_ONLYDIR |
145 145 inotify.IN_UNMOUNT |
146 146 0)
147 147 try:
148 148 self.watcher = watcher.Watcher()
149 149 except OSError, err:
150 150 raise util.Abort(_('inotify service not available: %s') %
151 151 err.strerror)
152 152 self.threshold = watcher.Threshold(self.watcher)
153 153 self.registered = True
154 154 self.fileno = self.watcher.fileno
155 155
156 156 self.repo.dirstate.__class__.inotifyserver = True
157 157
158 158 self.tree = {}
159 159 self.statcache = {}
160 160 self.statustrees = dict([(s, {}) for s in self.statuskeys])
161 161
162 162 self.watches = 0
163 163 self.last_event = None
164 164
165 165 self.eventq = {}
166 166 self.deferred = 0
167 167
168 168 self.ds_info = self.dirstate_info()
169 169 self.scan()
170 170
171 171 def event_time(self):
172 172 last = self.last_event
173 173 now = time.time()
174 174 self.last_event = now
175 175
176 176 if last is None:
177 177 return 'start'
178 178 delta = now - last
179 179 if delta < 5:
180 180 return '+%.3f' % delta
181 181 if delta < 50:
182 182 return '+%.2f' % delta
183 183 return '+%.1f' % delta
184 184
185 185 def dirstate_info(self):
186 186 try:
187 187 st = os.lstat(self.repo.join('dirstate'))
188 188 return st.st_mtime, st.st_ino
189 189 except OSError, err:
190 190 if err.errno != errno.ENOENT:
191 191 raise
192 192 return 0, 0
193 193
194 194 def add_watch(self, path, mask):
195 195 if not path:
196 196 return
197 197 if self.watcher.path(path) is None:
198 198 if self.ui.debugflag:
199 199 self.ui.note(_('watching %r\n') % path[len(self.wprefix):])
200 200 try:
201 201 self.watcher.add(path, mask)
202 202 self.watches += 1
203 203 except OSError, err:
204 204 if err.errno in (errno.ENOENT, errno.ENOTDIR):
205 205 return
206 206 if err.errno != errno.ENOSPC:
207 207 raise
208 208 _explain_watch_limit(self.ui, self.repo, self.watches)
209 209
210 210 def setup(self):
211 211 self.ui.note(_('watching directories under %r\n') % self.repo.root)
212 212 self.add_watch(self.repo.path, inotify.IN_DELETE)
213 213 self.check_dirstate()
214 214
215 215 def wpath(self, evt):
216 216 path = evt.fullpath
217 217 if path == self.repo.root:
218 218 return ''
219 219 if path.startswith(self.wprefix):
220 220 return path[len(self.wprefix):]
221 221 raise 'wtf? ' + path
222 222
223 223 def dir(self, tree, path):
224 224 if path:
225 225 for name in path.split('/'):
226 226 tree.setdefault(name, {})
227 227 tree = tree[name]
228 228 return tree
229 229
230 230 def lookup(self, path, tree):
231 231 if path:
232 232 try:
233 233 for name in path.split('/'):
234 234 tree = tree[name]
235 235 except KeyError:
236 236 return 'x'
237 237 except TypeError:
238 238 return 'd'
239 239 return tree
240 240
241 241 def split(self, path):
242 242 c = path.rfind('/')
243 243 if c == -1:
244 244 return '', path
245 245 return path[:c], path[c+1:]
246 246
247 247 def filestatus(self, fn, st):
248 248 try:
249 249 type_, mode, size, time = self.repo.dirstate._map[fn][:4]
250 250 except KeyError:
251 251 type_ = '?'
252 252 if type_ == 'n':
253 253 if not st:
254 254 return '!'
255 255 st_mode, st_size, st_mtime = st
256 256 if size and (size != st_size or (mode ^ st_mode) & 0100):
257 257 return 'm'
258 258 if time != int(st_mtime):
259 259 return 'l'
260 260 return 'n'
261 261 if type_ in 'ma' and not st:
262 262 return '!'
263 263 if type_ == '?' and self.repo.dirstate._ignore(fn):
264 264 return 'i'
265 265 return type_
266 266
267 267 def updatestatus(self, wfn, st=None, status=None, oldstatus=None):
268 268 if st:
269 269 status = self.filestatus(wfn, st)
270 270 else:
271 271 self.statcache.pop(wfn, None)
272 272 root, fn = self.split(wfn)
273 273 d = self.dir(self.tree, root)
274 274 if oldstatus is None:
275 275 oldstatus = d.get(fn)
276 276 isdir = False
277 277 if oldstatus:
278 278 try:
279 279 if not status:
280 280 if oldstatus in 'almn':
281 281 status = '!'
282 282 elif oldstatus == 'r':
283 283 status = 'r'
284 284 except TypeError:
285 285 # oldstatus may be a dict left behind by a deleted
286 286 # directory
287 287 isdir = True
288 288 else:
289 289 if oldstatus in self.statuskeys and oldstatus != status:
290 290 del self.dir(self.statustrees[oldstatus], root)[fn]
291 291 if self.ui.debugflag and oldstatus != status:
292 292 if isdir:
293 293 self.ui.note('status: %r dir(%d) -> %s\n' %
294 294 (wfn, len(oldstatus), status))
295 295 else:
296 296 self.ui.note('status: %r %s -> %s\n' %
297 297 (wfn, oldstatus, status))
298 298 if not isdir:
299 299 if status and status != 'i':
300 300 d[fn] = status
301 301 if status in self.statuskeys:
302 302 dd = self.dir(self.statustrees[status], root)
303 303 if oldstatus != status or fn not in dd:
304 304 dd[fn] = status
305 305 else:
306 306 d.pop(fn, None)
307 307
308 308 def check_deleted(self, key):
309 309 # Files that had been deleted but were present in the dirstate
310 310 # may have vanished from the dirstate; we must clean them up.
311 311 nuke = []
312 312 for wfn, ignore in self.walk(key, self.statustrees[key]):
313 313 if wfn not in self.repo.dirstate:
314 314 nuke.append(wfn)
315 315 for wfn in nuke:
316 316 root, fn = self.split(wfn)
317 317 del self.dir(self.statustrees[key], root)[fn]
318 318 del self.dir(self.tree, root)[fn]
319 319
320 320 def scan(self, topdir=''):
321 321 self.handle_timeout()
322 322 ds = self.repo.dirstate._map.copy()
323 323 self.add_watch(join(self.repo.root, topdir), self.mask)
324 324 for root, dirs, entries in walk(self.repo, topdir):
325 325 for d in dirs:
326 326 self.add_watch(join(root, d), self.mask)
327 327 wroot = root[len(self.wprefix):]
328 328 d = self.dir(self.tree, wroot)
329 329 for fn, kind in entries:
330 330 wfn = join(wroot, fn)
331 331 self.updatestatus(wfn, self.getstat(wfn))
332 332 ds.pop(wfn, None)
333 333 wtopdir = topdir
334 334 if wtopdir and wtopdir[-1] != '/':
335 335 wtopdir += '/'
336 336 for wfn, state in ds.iteritems():
337 337 if not wfn.startswith(wtopdir):
338 338 continue
339 339 status = state[0]
340 340 st = self.getstat(wfn)
341 341 if status == 'r' and not st:
342 342 self.updatestatus(wfn, st, status=status)
343 343 else:
344 344 self.updatestatus(wfn, st, oldstatus=status)
345 345 self.check_deleted('!')
346 346 self.check_deleted('r')
347 347
348 348 def check_dirstate(self):
349 349 ds_info = self.dirstate_info()
350 350 if ds_info == self.ds_info:
351 351 return
352 352 self.ds_info = ds_info
353 353 if not self.ui.debugflag:
354 354 self.last_event = None
355 355 self.ui.note(_('%s dirstate reload\n') % self.event_time())
356 356 self.repo.dirstate.invalidate()
357 357 self.scan()
358 358 self.ui.note(_('%s end dirstate reload\n') % self.event_time())
359 359
360 360 def walk(self, states, tree, prefix=''):
361 361 # This is the "inner loop" when talking to the client.
362 362
363 363 for name, val in tree.iteritems():
364 364 path = join(prefix, name)
365 365 try:
366 366 if val in states:
367 367 yield path, val
368 368 except TypeError:
369 369 for p in self.walk(states, val, path):
370 370 yield p
371 371
372 372 def update_hgignore(self):
373 373 # An update of the ignore file can potentially change the
374 374 # states of all unknown and ignored files.
375 375
376 376 # XXX If the user has other ignore files outside the repo, or
377 377 # changes their list of ignore files at run time, we'll
378 378 # potentially never see changes to them. We could get the
379 379 # client to report to us what ignore data they're using.
380 380 # But it's easier to do nothing than to open that can of
381 381 # worms.
382 382
383 383 if self.repo.dirstate.ignorefunc is not None:
384 384 self.repo.dirstate.ignorefunc = None
385 385 self.ui.note('rescanning due to .hgignore change\n')
386 386 self.scan()
387 387
388 388 def getstat(self, wpath):
389 389 try:
390 390 return self.statcache[wpath]
391 391 except KeyError:
392 392 try:
393 393 return self.stat(wpath)
394 394 except OSError, err:
395 395 if err.errno != errno.ENOENT:
396 396 raise
397 397
398 398 def stat(self, wpath):
399 399 try:
400 400 st = os.lstat(join(self.wprefix, wpath))
401 401 ret = st.st_mode, st.st_size, st.st_mtime
402 402 self.statcache[wpath] = ret
403 403 return ret
404 404 except OSError, err:
405 405 self.statcache.pop(wpath, None)
406 406 raise
407 407
408 408 def created(self, wpath):
409 409 if wpath == '.hgignore':
410 410 self.update_hgignore()
411 411 try:
412 412 st = self.stat(wpath)
413 413 if stat.S_ISREG(st[0]):
414 414 self.updatestatus(wpath, st)
415 415 except OSError, err:
416 416 pass
417 417
418 418 def modified(self, wpath):
419 419 if wpath == '.hgignore':
420 420 self.update_hgignore()
421 421 try:
422 422 st = self.stat(wpath)
423 423 if stat.S_ISREG(st[0]):
424 424 if self.repo.dirstate[wpath] in 'lmn':
425 425 self.updatestatus(wpath, st)
426 426 except OSError:
427 427 pass
428 428
429 429 def deleted(self, wpath):
430 430 if wpath == '.hgignore':
431 431 self.update_hgignore()
432 432 elif wpath.startswith('.hg/'):
433 433 if wpath == '.hg/wlock':
434 434 self.check_dirstate()
435 435 return
436 436
437 437 self.updatestatus(wpath, None)
438 438
439 439 def schedule_work(self, wpath, evt):
440 440 self.eventq.setdefault(wpath, [])
441 441 prev = self.eventq[wpath]
442 442 try:
443 443 if prev and evt == 'm' and prev[-1] in 'cm':
444 444 return
445 445 self.eventq[wpath].append(evt)
446 446 finally:
447 447 self.deferred += 1
448 448 self.timeout = 250
449 449
450 450 def deferred_event(self, wpath, evt):
451 451 if evt == 'c':
452 452 self.created(wpath)
453 453 elif evt == 'm':
454 454 self.modified(wpath)
455 455 elif evt == 'd':
456 456 self.deleted(wpath)
457 457
458 458 def process_create(self, wpath, evt):
459 459 if self.ui.debugflag:
460 460 self.ui.note(_('%s event: created %s\n') %
461 461 (self.event_time(), wpath))
462 462
463 463 if evt.mask & inotify.IN_ISDIR:
464 464 self.scan(wpath)
465 465 else:
466 466 self.schedule_work(wpath, 'c')
467 467
468 468 def process_delete(self, wpath, evt):
469 469 if self.ui.debugflag:
470 470 self.ui.note(('%s event: deleted %s\n') %
471 471 (self.event_time(), wpath))
472 472
473 473 if evt.mask & inotify.IN_ISDIR:
474 474 self.scan(wpath)
475 475 else:
476 476 self.schedule_work(wpath, 'd')
477 477
478 478 def process_modify(self, wpath, evt):
479 479 if self.ui.debugflag:
480 480 self.ui.note(_('%s event: modified %s\n') %
481 481 (self.event_time(), wpath))
482 482
483 483 if not (evt.mask & inotify.IN_ISDIR):
484 484 self.schedule_work(wpath, 'm')
485 485
486 486 def process_unmount(self, evt):
487 487 self.ui.warn(_('filesystem containing %s was unmounted\n') %
488 488 evt.fullpath)
489 489 sys.exit(0)
490 490
491 491 def handle_event(self, fd, event):
492 492 if self.ui.debugflag:
493 493 self.ui.note('%s readable: %d bytes\n' %
494 494 (self.event_time(), self.threshold.readable()))
495 495 if not self.threshold():
496 496 if self.registered:
497 497 if self.ui.debugflag:
498 498 self.ui.note('%s below threshold - unhooking\n' %
499 499 (self.event_time()))
500 500 self.master.poll.unregister(fd)
501 501 self.registered = False
502 502 self.timeout = 250
503 503 else:
504 504 self.read_events()
505 505
506 506 def read_events(self, bufsize=None):
507 507 events = self.watcher.read(bufsize)
508 508 if self.ui.debugflag:
509 509 self.ui.note('%s reading %d events\n' %
510 510 (self.event_time(), len(events)))
511 511 for evt in events:
512 512 wpath = self.wpath(evt)
513 513 if evt.mask & inotify.IN_UNMOUNT:
514 514 self.process_unmount(wpath, evt)
515 515 elif evt.mask & (inotify.IN_MODIFY | inotify.IN_ATTRIB):
516 516 self.process_modify(wpath, evt)
517 517 elif evt.mask & (inotify.IN_DELETE | inotify.IN_DELETE_SELF |
518 518 inotify.IN_MOVED_FROM):
519 519 self.process_delete(wpath, evt)
520 520 elif evt.mask & (inotify.IN_CREATE | inotify.IN_MOVED_TO):
521 521 self.process_create(wpath, evt)
522 522
523 523 def handle_timeout(self):
524 524 if not self.registered:
525 525 if self.ui.debugflag:
526 526 self.ui.note('%s hooking back up with %d bytes readable\n' %
527 527 (self.event_time(), self.threshold.readable()))
528 528 self.read_events(0)
529 529 self.master.poll.register(self, select.POLLIN)
530 530 self.registered = True
531 531
532 532 if self.eventq:
533 533 if self.ui.debugflag:
534 534 self.ui.note('%s processing %d deferred events as %d\n' %
535 535 (self.event_time(), self.deferred,
536 536 len(self.eventq)))
537 eventq = self.eventq.items()
538 eventq.sort()
539 for wpath, evts in eventq:
537 for wpath, evts in util.sort(self.eventq.items()):
540 538 for evt in evts:
541 539 self.deferred_event(wpath, evt)
542 540 self.eventq.clear()
543 541 self.deferred = 0
544 542 self.timeout = None
545 543
546 544 def shutdown(self):
547 545 self.watcher.close()
548 546
549 547 class Server(object):
550 548 poll_events = select.POLLIN
551 549
552 550 def __init__(self, ui, repo, watcher, timeout):
553 551 self.ui = ui
554 552 self.repo = repo
555 553 self.watcher = watcher
556 554 self.timeout = timeout
557 555 self.sock = socket.socket(socket.AF_UNIX)
558 556 self.sockpath = self.repo.join('inotify.sock')
559 557 try:
560 558 self.sock.bind(self.sockpath)
561 559 except socket.error, err:
562 560 if err[0] == errno.EADDRINUSE:
563 561 raise AlreadyStartedException(_('could not start server: %s') \
564 562 % err[1])
565 563 raise
566 564 self.sock.listen(5)
567 565 self.fileno = self.sock.fileno
568 566
569 567 def handle_timeout(self):
570 568 pass
571 569
572 570 def handle_event(self, fd, event):
573 571 sock, addr = self.sock.accept()
574 572
575 573 cs = common.recvcs(sock)
576 574 version = ord(cs.read(1))
577 575
578 576 sock.sendall(chr(common.version))
579 577
580 578 if version != common.version:
581 579 self.ui.warn(_('received query from incompatible client '
582 580 'version %d\n') % version)
583 581 return
584 582
585 583 names = cs.read().split('\0')
586 584
587 585 states = names.pop()
588 586
589 587 self.ui.note(_('answering query for %r\n') % states)
590 588
591 589 if self.watcher.timeout:
592 590 # We got a query while a rescan is pending. Make sure we
593 591 # rescan before responding, or we could give back a wrong
594 592 # answer.
595 593 self.watcher.handle_timeout()
596 594
597 595 if not names:
598 596 def genresult(states, tree):
599 597 for fn, state in self.watcher.walk(states, tree):
600 598 yield fn
601 599 else:
602 600 def genresult(states, tree):
603 601 for fn in names:
604 602 l = self.watcher.lookup(fn, tree)
605 603 try:
606 604 if l in states:
607 605 yield fn
608 606 except TypeError:
609 607 for f, s in self.watcher.walk(states, l, fn):
610 608 yield f
611 609
612 610 results = ['\0'.join(r) for r in [
613 611 genresult('l', self.watcher.statustrees['l']),
614 612 genresult('m', self.watcher.statustrees['m']),
615 613 genresult('a', self.watcher.statustrees['a']),
616 614 genresult('r', self.watcher.statustrees['r']),
617 615 genresult('!', self.watcher.statustrees['!']),
618 616 '?' in states and genresult('?', self.watcher.statustrees['?']) or [],
619 617 [],
620 618 'c' in states and genresult('n', self.watcher.tree) or [],
621 619 ]]
622 620
623 621 try:
624 622 try:
625 623 sock.sendall(struct.pack(common.resphdrfmt,
626 624 *map(len, results)))
627 625 sock.sendall(''.join(results))
628 626 finally:
629 627 sock.shutdown(socket.SHUT_WR)
630 628 except socket.error, err:
631 629 if err[0] != errno.EPIPE:
632 630 raise
633 631
634 632 def shutdown(self):
635 633 self.sock.close()
636 634 try:
637 635 os.unlink(self.sockpath)
638 636 except OSError, err:
639 637 if err.errno != errno.ENOENT:
640 638 raise
641 639
642 640 class Master(object):
643 641 def __init__(self, ui, repo, timeout=None):
644 642 self.ui = ui
645 643 self.repo = repo
646 644 self.poll = select.poll()
647 645 self.watcher = Watcher(ui, repo, self)
648 646 self.server = Server(ui, repo, self.watcher, timeout)
649 647 self.table = {}
650 648 for obj in (self.watcher, self.server):
651 649 fd = obj.fileno()
652 650 self.table[fd] = obj
653 651 self.poll.register(fd, obj.poll_events)
654 652
655 653 def register(self, fd, mask):
656 654 self.poll.register(fd, mask)
657 655
658 656 def shutdown(self):
659 657 for obj in self.table.itervalues():
660 658 obj.shutdown()
661 659
662 660 def run(self):
663 661 self.watcher.setup()
664 662 self.ui.note(_('finished setup\n'))
665 663 if os.getenv('TIME_STARTUP'):
666 664 sys.exit(0)
667 665 while True:
668 666 timeout = None
669 667 timeobj = None
670 668 for obj in self.table.itervalues():
671 669 if obj.timeout is not None and (timeout is None or obj.timeout < timeout):
672 670 timeout, timeobj = obj.timeout, obj
673 671 try:
674 672 if self.ui.debugflag:
675 673 if timeout is None:
676 674 self.ui.note('polling: no timeout\n')
677 675 else:
678 676 self.ui.note('polling: %sms timeout\n' % timeout)
679 677 events = self.poll.poll(timeout)
680 678 except select.error, err:
681 679 if err[0] == errno.EINTR:
682 680 continue
683 681 raise
684 682 if events:
685 683 for fd, event in events:
686 684 self.table[fd].handle_event(fd, event)
687 685 elif timeobj:
688 686 timeobj.handle_timeout()
689 687
690 688 def start(ui, repo):
691 689 m = Master(ui, repo)
692 690 sys.stdout.flush()
693 691 sys.stderr.flush()
694 692
695 693 pid = os.fork()
696 694 if pid:
697 695 return pid
698 696
699 697 os.setsid()
700 698
701 699 fd = os.open('/dev/null', os.O_RDONLY)
702 700 os.dup2(fd, 0)
703 701 if fd > 0:
704 702 os.close(fd)
705 703
706 704 fd = os.open(ui.config('inotify', 'log', '/dev/null'),
707 705 os.O_RDWR | os.O_CREAT | os.O_TRUNC)
708 706 os.dup2(fd, 1)
709 707 os.dup2(fd, 2)
710 708 if fd > 2:
711 709 os.close(fd)
712 710
713 711 try:
714 712 m.run()
715 713 finally:
716 714 m.shutdown()
717 715 os._exit(0)
@@ -1,564 +1,562
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an audience
15 15 # not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Setup in hgrc:
25 25 #
26 26 # [extensions]
27 27 # # enable extension
28 28 # hgext.keyword =
29 29 #
30 30 # Files to act upon/ignore are specified in the [keyword] section.
31 31 # Customized keyword template mappings in the [keywordmaps] section.
32 32 #
33 33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34 34
35 35 '''keyword expansion in local repositories
36 36
37 37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 38 in tracked text files selected by your configuration.
39 39
40 40 Keywords are only expanded in local repositories and not stored in
41 41 the change history. The mechanism can be regarded as a convenience
42 42 for the current user or for archive distribution.
43 43
44 44 Configuration is done in the [keyword] and [keywordmaps] sections
45 45 of hgrc files.
46 46
47 47 Example:
48 48
49 49 [keyword]
50 50 # expand keywords in every python file except those matching "x*"
51 51 **.py =
52 52 x* = ignore
53 53
54 54 Note: the more specific you are in your filename patterns
55 55 the less you lose speed in huge repos.
56 56
57 57 For [keywordmaps] template mapping and expansion demonstration and
58 58 control run "hg kwdemo".
59 59
60 60 An additional date template filter {date|utcdate} is provided.
61 61
62 62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 63 with customized keywords and templates.
64 64 Again, run "hg kwdemo" to control the results of your config changes.
65 65
66 66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 67 the risk of inadvertedly storing expanded keywords in the change history.
68 68
69 69 To force expansion after enabling it, or a configuration change, run
70 70 "hg kwexpand".
71 71
72 72 Also, when committing with the record extension or using mq's qrecord, be aware
73 73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 74 question to update keyword expansions after all changes have been checked in.
75 75
76 76 Expansions spanning more than one line and incremental expansions,
77 77 like CVS' $Log$, are not supported. A keyword template map
78 78 "Log = {desc}" expands to the first line of the changeset description.
79 79 '''
80 80
81 81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
82 82 from mercurial import patch, localrepo, templater, templatefilters, util
83 83 from mercurial.hgweb import webcommands
84 84 from mercurial.node import nullid, hex
85 85 from mercurial.i18n import _
86 86 import re, shutil, tempfile, time
87 87
88 88 commands.optionalrepo += ' kwdemo'
89 89
90 90 # hg commands that do not act on keywords
91 91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
92 92 ' log outgoing push rename rollback tip'
93 93 ' convert email glog')
94 94
95 95 # hg commands that trigger expansion only when writing to working dir,
96 96 # not when reading filelog, and unexpand when reading from working dir
97 97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98 98
99 99 def utcdate(date):
100 100 '''Returns hgdate in cvs-like UTC format.'''
101 101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102 102
103 103 # make keyword tools accessible
104 104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
105 105
106 106
107 107 class kwtemplater(object):
108 108 '''
109 109 Sets up keyword templates, corresponding keyword regex, and
110 110 provides keyword substitution functions.
111 111 '''
112 112 templates = {
113 113 'Revision': '{node|short}',
114 114 'Author': '{author|user}',
115 115 'Date': '{date|utcdate}',
116 116 'RCSFile': '{file|basename},v',
117 117 'Source': '{root}/{file},v',
118 118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
119 119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
120 120 }
121 121
122 122 def __init__(self, ui, repo):
123 123 self.ui = ui
124 124 self.repo = repo
125 125 self.matcher = util.matcher(repo.root,
126 126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
127 127 self.restrict = kwtools['hgcmd'] in restricted.split()
128 128
129 129 kwmaps = self.ui.configitems('keywordmaps')
130 130 if kwmaps: # override default templates
131 131 kwmaps = [(k, templater.parsestring(v, False))
132 132 for (k, v) in kwmaps]
133 133 self.templates = dict(kwmaps)
134 134 escaped = map(re.escape, self.templates.keys())
135 135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
136 136 self.re_kw = re.compile(kwpat)
137 137
138 138 templatefilters.filters['utcdate'] = utcdate
139 139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
140 140 False, '', False)
141 141
142 142 def getnode(self, path, fnode):
143 143 '''Derives changenode from file path and filenode.'''
144 144 # used by kwfilelog.read and kwexpand
145 145 c = self.repo.filectx(path, fileid=fnode)
146 146 return c.node()
147 147
148 148 def substitute(self, data, path, node, subfunc):
149 149 '''Replaces keywords in data with expanded template.'''
150 150 def kwsub(mobj):
151 151 kw = mobj.group(1)
152 152 self.ct.use_template(self.templates[kw])
153 153 self.ui.pushbuffer()
154 154 self.ct.show(changenode=node, root=self.repo.root, file=path)
155 155 ekw = templatefilters.firstline(self.ui.popbuffer())
156 156 return '$%s: %s $' % (kw, ekw)
157 157 return subfunc(kwsub, data)
158 158
159 159 def expand(self, path, node, data):
160 160 '''Returns data with keywords expanded.'''
161 161 if not self.restrict and self.matcher(path) and not util.binary(data):
162 162 changenode = self.getnode(path, node)
163 163 return self.substitute(data, path, changenode, self.re_kw.sub)
164 164 return data
165 165
166 166 def iskwfile(self, path, flagfunc):
167 167 '''Returns true if path matches [keyword] pattern
168 168 and is not a symbolic link.
169 169 Caveat: localrepository._link fails on Windows.'''
170 170 return self.matcher(path) and not 'l' in flagfunc(path)
171 171
172 172 def overwrite(self, node, expand, files):
173 173 '''Overwrites selected files expanding/shrinking keywords.'''
174 174 if node is not None: # commit
175 175 ctx = self.repo[node]
176 176 mf = ctx.manifest()
177 177 files = [f for f in ctx.files() if f in mf]
178 178 notify = self.ui.debug
179 179 else: # kwexpand/kwshrink
180 180 ctx = self.repo['.']
181 181 mf = ctx.manifest()
182 182 notify = self.ui.note
183 183 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
184 184 if candidates:
185 185 self.restrict = True # do not expand when reading
186 candidates.sort()
187 186 action = expand and 'expanding' or 'shrinking'
188 187 for f in candidates:
189 188 fp = self.repo.file(f)
190 189 data = fp.read(mf[f])
191 190 if util.binary(data):
192 191 continue
193 192 if expand:
194 193 changenode = node or self.getnode(f, mf[f])
195 194 data, found = self.substitute(data, f, changenode,
196 195 self.re_kw.subn)
197 196 else:
198 197 found = self.re_kw.search(data)
199 198 if found:
200 199 notify(_('overwriting %s %s keywords\n') % (f, action))
201 200 self.repo.wwrite(f, data, mf.flags(f))
202 201 self.repo.dirstate.normal(f)
203 202 self.restrict = False
204 203
205 204 def shrinktext(self, text):
206 205 '''Unconditionally removes all keyword substitutions from text.'''
207 206 return self.re_kw.sub(r'$\1$', text)
208 207
209 208 def shrink(self, fname, text):
210 209 '''Returns text with all keyword substitutions removed.'''
211 210 if self.matcher(fname) and not util.binary(text):
212 211 return self.shrinktext(text)
213 212 return text
214 213
215 214 def shrinklines(self, fname, lines):
216 215 '''Returns lines with keyword substitutions removed.'''
217 216 if self.matcher(fname):
218 217 text = ''.join(lines)
219 218 if not util.binary(text):
220 219 return self.shrinktext(text).splitlines(True)
221 220 return lines
222 221
223 222 def wread(self, fname, data):
224 223 '''If in restricted mode returns data read from wdir with
225 224 keyword substitutions removed.'''
226 225 return self.restrict and self.shrink(fname, data) or data
227 226
228 227 class kwfilelog(filelog.filelog):
229 228 '''
230 229 Subclass of filelog to hook into its read, add, cmp methods.
231 230 Keywords are "stored" unexpanded, and processed on reading.
232 231 '''
233 232 def __init__(self, opener, kwt, path):
234 233 super(kwfilelog, self).__init__(opener, path)
235 234 self.kwt = kwt
236 235 self.path = path
237 236
238 237 def read(self, node):
239 238 '''Expands keywords when reading filelog.'''
240 239 data = super(kwfilelog, self).read(node)
241 240 return self.kwt.expand(self.path, node, data)
242 241
243 242 def add(self, text, meta, tr, link, p1=None, p2=None):
244 243 '''Removes keyword substitutions when adding to filelog.'''
245 244 text = self.kwt.shrink(self.path, text)
246 245 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
247 246
248 247 def cmp(self, node, text):
249 248 '''Removes keyword substitutions for comparison.'''
250 249 text = self.kwt.shrink(self.path, text)
251 250 if self.renamed(node):
252 251 t2 = super(kwfilelog, self).read(node)
253 252 return t2 != text
254 253 return revlog.revlog.cmp(self, node, text)
255 254
256 255 def _status(ui, repo, kwt, unknown, *pats, **opts):
257 256 '''Bails out if [keyword] configuration is not active.
258 257 Returns status of working directory.'''
259 258 if kwt:
260 259 matcher = cmdutil.match(repo, pats, opts)
261 260 return repo.status(match=matcher, unknown=unknown, clean=True)
262 261 if ui.configitems('keyword'):
263 262 raise util.Abort(_('[keyword] patterns cannot match'))
264 263 raise util.Abort(_('no [keyword] patterns configured'))
265 264
266 265 def _kwfwrite(ui, repo, expand, *pats, **opts):
267 266 '''Selects files and passes them to kwtemplater.overwrite.'''
268 267 if repo.dirstate.parents()[1] != nullid:
269 268 raise util.Abort(_('outstanding uncommitted merge'))
270 269 kwt = kwtools['templater']
271 270 status = _status(ui, repo, kwt, False, *pats, **opts)
272 271 modified, added, removed, deleted = status[:4]
273 272 if modified or added or removed or deleted:
274 273 raise util.Abort(_('outstanding uncommitted changes'))
275 274 wlock = lock = None
276 275 try:
277 276 wlock = repo.wlock()
278 277 lock = repo.lock()
279 278 kwt.overwrite(None, expand, status[6])
280 279 finally:
281 280 del wlock, lock
282 281
283 282
284 283 def demo(ui, repo, *args, **opts):
285 284 '''print [keywordmaps] configuration and an expansion example
286 285
287 286 Show current, custom, or default keyword template maps
288 287 and their expansion.
289 288
290 289 Extend current configuration by specifying maps as arguments
291 290 and optionally by reading from an additional hgrc file.
292 291
293 292 Override current keyword template maps with "default" option.
294 293 '''
295 294 def demostatus(stat):
296 295 ui.status(_('\n\t%s\n') % stat)
297 296
298 297 def demoitems(section, items):
299 298 ui.write('[%s]\n' % section)
300 299 for k, v in items:
301 300 ui.write('%s = %s\n' % (k, v))
302 301
303 302 msg = 'hg keyword config and expansion example'
304 303 kwstatus = 'current'
305 304 fn = 'demo.txt'
306 305 branchname = 'demobranch'
307 306 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
308 307 ui.note(_('creating temporary repo at %s\n') % tmpdir)
309 308 repo = localrepo.localrepository(ui, tmpdir, True)
310 309 ui.setconfig('keyword', fn, '')
311 310 if args or opts.get('rcfile'):
312 311 kwstatus = 'custom'
313 312 if opts.get('rcfile'):
314 313 ui.readconfig(opts.get('rcfile'))
315 314 if opts.get('default'):
316 315 kwstatus = 'default'
317 316 kwmaps = kwtemplater.templates
318 317 if ui.configitems('keywordmaps'):
319 318 # override maps from optional rcfile
320 319 for k, v in kwmaps.iteritems():
321 320 ui.setconfig('keywordmaps', k, v)
322 321 elif args:
323 322 # simulate hgrc parsing
324 323 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
325 324 fp = repo.opener('hgrc', 'w')
326 325 fp.writelines(rcmaps)
327 326 fp.close()
328 327 ui.readconfig(repo.join('hgrc'))
329 328 if not opts.get('default'):
330 329 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
331 330 uisetup(ui)
332 331 reposetup(ui, repo)
333 332 for k, v in ui.configitems('extensions'):
334 333 if k.endswith('keyword'):
335 334 extension = '%s = %s' % (k, v)
336 335 break
337 336 demostatus('config using %s keyword template maps' % kwstatus)
338 337 ui.write('[extensions]\n%s\n' % extension)
339 338 demoitems('keyword', ui.configitems('keyword'))
340 339 demoitems('keywordmaps', kwmaps.iteritems())
341 340 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
342 341 repo.wopener(fn, 'w').write(keywords)
343 342 repo.add([fn])
344 343 path = repo.wjoin(fn)
345 344 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
346 345 ui.note(keywords)
347 346 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
348 347 # silence branch command if not verbose
349 348 quiet = ui.quiet
350 349 ui.quiet = not ui.verbose
351 350 commands.branch(ui, repo, branchname)
352 351 ui.quiet = quiet
353 352 for name, cmd in ui.configitems('hooks'):
354 353 if name.split('.', 1)[0].find('commit') > -1:
355 354 repo.ui.setconfig('hooks', name, '')
356 355 ui.note(_('unhooked all commit hooks\n'))
357 356 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
358 357 repo.commit(text=msg)
359 358 format = ui.verbose and ' in %s' % path or ''
360 359 demostatus('%s keywords expanded%s' % (kwstatus, format))
361 360 ui.write(repo.wread(fn))
362 361 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
363 362 shutil.rmtree(tmpdir, ignore_errors=True)
364 363
365 364 def expand(ui, repo, *pats, **opts):
366 365 '''expand keywords in working directory
367 366
368 367 Run after (re)enabling keyword expansion.
369 368
370 369 kwexpand refuses to run if given files contain local changes.
371 370 '''
372 371 # 3rd argument sets expansion to True
373 372 _kwfwrite(ui, repo, True, *pats, **opts)
374 373
375 374 def files(ui, repo, *pats, **opts):
376 375 '''print files currently configured for keyword expansion
377 376
378 377 Crosscheck which files in working directory are potential targets for
379 378 keyword expansion.
380 379 That is, files matched by [keyword] config patterns but not symlinks.
381 380 '''
382 381 kwt = kwtools['templater']
383 382 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
384 383 modified, added, removed, deleted, unknown, ignored, clean = status
385 files = modified + added + clean + unknown
386 files.sort()
384 files = util.sort(modified + added + clean + unknown)
387 385 wctx = repo[None]
388 386 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
389 387 cwd = pats and repo.getcwd() or ''
390 388 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
391 389 if opts.get('all') or opts.get('ignore'):
392 390 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
393 391 for char, filenames in kwfstats:
394 392 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
395 393 for f in filenames:
396 394 ui.write(format % repo.pathto(f, cwd))
397 395
398 396 def shrink(ui, repo, *pats, **opts):
399 397 '''revert expanded keywords in working directory
400 398
401 399 Run before changing/disabling active keywords
402 400 or if you experience problems with "hg import" or "hg merge".
403 401
404 402 kwshrink refuses to run if given files contain local changes.
405 403 '''
406 404 # 3rd argument sets expansion to False
407 405 _kwfwrite(ui, repo, False, *pats, **opts)
408 406
409 407
410 408 def uisetup(ui):
411 409 '''Collects [keyword] config in kwtools.
412 410 Monkeypatches dispatch._parse if needed.'''
413 411
414 412 for pat, opt in ui.configitems('keyword'):
415 413 if opt != 'ignore':
416 414 kwtools['inc'].append(pat)
417 415 else:
418 416 kwtools['exc'].append(pat)
419 417
420 418 if kwtools['inc']:
421 419 def kwdispatch_parse(ui, args):
422 420 '''Monkeypatch dispatch._parse to obtain running hg command.'''
423 421 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
424 422 kwtools['hgcmd'] = cmd
425 423 return cmd, func, args, options, cmdoptions
426 424
427 425 dispatch_parse = dispatch._parse
428 426 dispatch._parse = kwdispatch_parse
429 427
430 428 def reposetup(ui, repo):
431 429 '''Sets up repo as kwrepo for keyword substitution.
432 430 Overrides file method to return kwfilelog instead of filelog
433 431 if file matches user configuration.
434 432 Wraps commit to overwrite configured files with updated
435 433 keyword substitutions.
436 434 Monkeypatches patch and webcommands.'''
437 435
438 436 try:
439 437 if (not repo.local() or not kwtools['inc']
440 438 or kwtools['hgcmd'] in nokwcommands.split()
441 439 or '.hg' in util.splitpath(repo.root)
442 440 or repo._url.startswith('bundle:')):
443 441 return
444 442 except AttributeError:
445 443 pass
446 444
447 445 kwtools['templater'] = kwt = kwtemplater(ui, repo)
448 446
449 447 class kwrepo(repo.__class__):
450 448 def file(self, f):
451 449 if f[0] == '/':
452 450 f = f[1:]
453 451 return kwfilelog(self.sopener, kwt, f)
454 452
455 453 def wread(self, filename):
456 454 data = super(kwrepo, self).wread(filename)
457 455 return kwt.wread(filename, data)
458 456
459 457 def commit(self, files=None, text='', user=None, date=None,
460 458 match=None, force=False, force_editor=False,
461 459 p1=None, p2=None, extra={}, empty_ok=False):
462 460 wlock = lock = None
463 461 _p1 = _p2 = None
464 462 try:
465 463 wlock = self.wlock()
466 464 lock = self.lock()
467 465 # store and postpone commit hooks
468 466 commithooks = {}
469 467 for name, cmd in ui.configitems('hooks'):
470 468 if name.split('.', 1)[0] == 'commit':
471 469 commithooks[name] = cmd
472 470 ui.setconfig('hooks', name, None)
473 471 if commithooks:
474 472 # store parents for commit hook environment
475 473 if p1 is None:
476 474 _p1, _p2 = repo.dirstate.parents()
477 475 else:
478 476 _p1, _p2 = p1, p2 or nullid
479 477 _p1 = hex(_p1)
480 478 if _p2 == nullid:
481 479 _p2 = ''
482 480 else:
483 481 _p2 = hex(_p2)
484 482
485 483 n = super(kwrepo, self).commit(files, text, user, date, match,
486 484 force, force_editor, p1, p2,
487 485 extra, empty_ok)
488 486
489 487 # restore commit hooks
490 488 for name, cmd in commithooks.iteritems():
491 489 ui.setconfig('hooks', name, cmd)
492 490 if n is not None:
493 491 kwt.overwrite(n, True, None)
494 492 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
495 493 return n
496 494 finally:
497 495 del wlock, lock
498 496
499 497 # monkeypatches
500 498 def kwpatchfile_init(self, ui, fname, missing=False):
501 499 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
502 500 rejects or conflicts due to expanded keywords in working dir.'''
503 501 patchfile_init(self, ui, fname, missing)
504 502 # shrink keywords read from working dir
505 503 self.lines = kwt.shrinklines(self.fname, self.lines)
506 504
507 505 def kw_diff(repo, node1=None, node2=None, match=None,
508 506 fp=None, changes=None, opts=None):
509 507 '''Monkeypatch patch.diff to avoid expansion except when
510 508 comparing against working dir.'''
511 509 if node2 is not None:
512 510 kwt.matcher = util.never
513 511 elif node1 is not None and node1 != repo['.'].node():
514 512 kwt.restrict = True
515 513 patch_diff(repo, node1, node2, match, fp, changes, opts)
516 514
517 515 def kwweb_annotate(web, req, tmpl):
518 516 '''Wraps webcommands.annotate turning off keyword expansion.'''
519 517 kwt.matcher = util.never
520 518 return webcommands_annotate(web, req, tmpl)
521 519
522 520 def kwweb_changeset(web, req, tmpl):
523 521 '''Wraps webcommands.changeset turning off keyword expansion.'''
524 522 kwt.matcher = util.never
525 523 return webcommands_changeset(web, req, tmpl)
526 524
527 525 def kwweb_filediff(web, req, tmpl):
528 526 '''Wraps webcommands.filediff turning off keyword expansion.'''
529 527 kwt.matcher = util.never
530 528 return webcommands_filediff(web, req, tmpl)
531 529
532 530 repo.__class__ = kwrepo
533 531
534 532 patchfile_init = patch.patchfile.__init__
535 533 patch_diff = patch.diff
536 534 webcommands_annotate = webcommands.annotate
537 535 webcommands_changeset = webcommands.changeset
538 536 webcommands_filediff = webcommands.filediff
539 537
540 538 patch.patchfile.__init__ = kwpatchfile_init
541 539 patch.diff = kw_diff
542 540 webcommands.annotate = kwweb_annotate
543 541 webcommands.changeset = webcommands.rev = kwweb_changeset
544 542 webcommands.filediff = webcommands.diff = kwweb_filediff
545 543
546 544
547 545 cmdtable = {
548 546 'kwdemo':
549 547 (demo,
550 548 [('d', 'default', None, _('show default keyword template maps')),
551 549 ('f', 'rcfile', [], _('read maps from rcfile'))],
552 550 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
553 551 'kwexpand': (expand, commands.walkopts,
554 552 _('hg kwexpand [OPTION]... [FILE]...')),
555 553 'kwfiles':
556 554 (files,
557 555 [('a', 'all', None, _('show keyword status flags of all files')),
558 556 ('i', 'ignore', None, _('show files excluded from expansion')),
559 557 ('u', 'untracked', None, _('additionally show untracked files')),
560 558 ] + commands.walkopts,
561 559 _('hg kwfiles [OPTION]... [FILE]...')),
562 560 'kwshrink': (shrink, commands.walkopts,
563 561 _('hg kwshrink [OPTION]... [FILE]...')),
564 562 }
@@ -1,2458 +1,2455
1 1 # mq.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial.node import bin, hex, short
34 34 from mercurial.repo import RepoError
35 35 from mercurial import commands, cmdutil, hg, patch, revlog, util
36 36 from mercurial import repair
37 37 import os, sys, re, errno
38 38
39 39 commands.norepo += " qclone"
40 40
41 41 # Patch names looks like unix-file names.
42 42 # They must be joinable with queue directory and result in the patch path.
43 43 normname = util.normpath
44 44
45 45 class statusentry:
46 46 def __init__(self, rev, name=None):
47 47 if not name:
48 48 fields = rev.split(':', 1)
49 49 if len(fields) == 2:
50 50 self.rev, self.name = fields
51 51 else:
52 52 self.rev, self.name = None, None
53 53 else:
54 54 self.rev, self.name = rev, name
55 55
56 56 def __str__(self):
57 57 return self.rev + ':' + self.name
58 58
59 59 class queue:
60 60 def __init__(self, ui, path, patchdir=None):
61 61 self.basepath = path
62 62 self.path = patchdir or os.path.join(path, "patches")
63 63 self.opener = util.opener(self.path)
64 64 self.ui = ui
65 65 self.applied = []
66 66 self.full_series = []
67 67 self.applied_dirty = 0
68 68 self.series_dirty = 0
69 69 self.series_path = "series"
70 70 self.status_path = "status"
71 71 self.guards_path = "guards"
72 72 self.active_guards = None
73 73 self.guards_dirty = False
74 74 self._diffopts = None
75 75
76 76 if os.path.exists(self.join(self.series_path)):
77 77 self.full_series = self.opener(self.series_path).read().splitlines()
78 78 self.parse_series()
79 79
80 80 if os.path.exists(self.join(self.status_path)):
81 81 lines = self.opener(self.status_path).read().splitlines()
82 82 self.applied = [statusentry(l) for l in lines]
83 83
84 84 def diffopts(self):
85 85 if self._diffopts is None:
86 86 self._diffopts = patch.diffopts(self.ui)
87 87 return self._diffopts
88 88
89 89 def join(self, *p):
90 90 return os.path.join(self.path, *p)
91 91
92 92 def find_series(self, patch):
93 93 pre = re.compile("(\s*)([^#]+)")
94 94 index = 0
95 95 for l in self.full_series:
96 96 m = pre.match(l)
97 97 if m:
98 98 s = m.group(2)
99 99 s = s.rstrip()
100 100 if s == patch:
101 101 return index
102 102 index += 1
103 103 return None
104 104
105 105 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
106 106
107 107 def parse_series(self):
108 108 self.series = []
109 109 self.series_guards = []
110 110 for l in self.full_series:
111 111 h = l.find('#')
112 112 if h == -1:
113 113 patch = l
114 114 comment = ''
115 115 elif h == 0:
116 116 continue
117 117 else:
118 118 patch = l[:h]
119 119 comment = l[h:]
120 120 patch = patch.strip()
121 121 if patch:
122 122 if patch in self.series:
123 123 raise util.Abort(_('%s appears more than once in %s') %
124 124 (patch, self.join(self.series_path)))
125 125 self.series.append(patch)
126 126 self.series_guards.append(self.guard_re.findall(comment))
127 127
128 128 def check_guard(self, guard):
129 129 if not guard:
130 130 return _('guard cannot be an empty string')
131 131 bad_chars = '# \t\r\n\f'
132 132 first = guard[0]
133 133 for c in '-+':
134 134 if first == c:
135 135 return (_('guard %r starts with invalid character: %r') %
136 136 (guard, c))
137 137 for c in bad_chars:
138 138 if c in guard:
139 139 return _('invalid character in guard %r: %r') % (guard, c)
140 140
141 141 def set_active(self, guards):
142 142 for guard in guards:
143 143 bad = self.check_guard(guard)
144 144 if bad:
145 145 raise util.Abort(bad)
146 guards = dict.fromkeys(guards).keys()
147 guards.sort()
146 guards = util.sort(util.unique(guards))
148 147 self.ui.debug('active guards: %s\n' % ' '.join(guards))
149 148 self.active_guards = guards
150 149 self.guards_dirty = True
151 150
152 151 def active(self):
153 152 if self.active_guards is None:
154 153 self.active_guards = []
155 154 try:
156 155 guards = self.opener(self.guards_path).read().split()
157 156 except IOError, err:
158 157 if err.errno != errno.ENOENT: raise
159 158 guards = []
160 159 for i, guard in enumerate(guards):
161 160 bad = self.check_guard(guard)
162 161 if bad:
163 162 self.ui.warn('%s:%d: %s\n' %
164 163 (self.join(self.guards_path), i + 1, bad))
165 164 else:
166 165 self.active_guards.append(guard)
167 166 return self.active_guards
168 167
169 168 def set_guards(self, idx, guards):
170 169 for g in guards:
171 170 if len(g) < 2:
172 171 raise util.Abort(_('guard %r too short') % g)
173 172 if g[0] not in '-+':
174 173 raise util.Abort(_('guard %r starts with invalid char') % g)
175 174 bad = self.check_guard(g[1:])
176 175 if bad:
177 176 raise util.Abort(bad)
178 177 drop = self.guard_re.sub('', self.full_series[idx])
179 178 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
180 179 self.parse_series()
181 180 self.series_dirty = True
182 181
183 182 def pushable(self, idx):
184 183 if isinstance(idx, str):
185 184 idx = self.series.index(idx)
186 185 patchguards = self.series_guards[idx]
187 186 if not patchguards:
188 187 return True, None
189 188 default = False
190 189 guards = self.active()
191 190 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
192 191 if exactneg:
193 192 return False, exactneg[0]
194 193 pos = [g for g in patchguards if g[0] == '+']
195 194 exactpos = [g for g in pos if g[1:] in guards]
196 195 if pos:
197 196 if exactpos:
198 197 return True, exactpos[0]
199 198 return False, pos
200 199 return True, ''
201 200
202 201 def explain_pushable(self, idx, all_patches=False):
203 202 write = all_patches and self.ui.write or self.ui.warn
204 203 if all_patches or self.ui.verbose:
205 204 if isinstance(idx, str):
206 205 idx = self.series.index(idx)
207 206 pushable, why = self.pushable(idx)
208 207 if all_patches and pushable:
209 208 if why is None:
210 209 write(_('allowing %s - no guards in effect\n') %
211 210 self.series[idx])
212 211 else:
213 212 if not why:
214 213 write(_('allowing %s - no matching negative guards\n') %
215 214 self.series[idx])
216 215 else:
217 216 write(_('allowing %s - guarded by %r\n') %
218 217 (self.series[idx], why))
219 218 if not pushable:
220 219 if why:
221 220 write(_('skipping %s - guarded by %r\n') %
222 221 (self.series[idx], why))
223 222 else:
224 223 write(_('skipping %s - no matching guards\n') %
225 224 self.series[idx])
226 225
227 226 def save_dirty(self):
228 227 def write_list(items, path):
229 228 fp = self.opener(path, 'w')
230 229 for i in items:
231 230 fp.write("%s\n" % i)
232 231 fp.close()
233 232 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
234 233 if self.series_dirty: write_list(self.full_series, self.series_path)
235 234 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
236 235
237 236 def readheaders(self, patch):
238 237 def eatdiff(lines):
239 238 while lines:
240 239 l = lines[-1]
241 240 if (l.startswith("diff -") or
242 241 l.startswith("Index:") or
243 242 l.startswith("===========")):
244 243 del lines[-1]
245 244 else:
246 245 break
247 246 def eatempty(lines):
248 247 while lines:
249 248 l = lines[-1]
250 249 if re.match('\s*$', l):
251 250 del lines[-1]
252 251 else:
253 252 break
254 253
255 254 pf = self.join(patch)
256 255 message = []
257 256 comments = []
258 257 user = None
259 258 date = None
260 259 format = None
261 260 subject = None
262 261 diffstart = 0
263 262
264 263 for line in file(pf):
265 264 line = line.rstrip()
266 265 if line.startswith('diff --git'):
267 266 diffstart = 2
268 267 break
269 268 if diffstart:
270 269 if line.startswith('+++ '):
271 270 diffstart = 2
272 271 break
273 272 if line.startswith("--- "):
274 273 diffstart = 1
275 274 continue
276 275 elif format == "hgpatch":
277 276 # parse values when importing the result of an hg export
278 277 if line.startswith("# User "):
279 278 user = line[7:]
280 279 elif line.startswith("# Date "):
281 280 date = line[7:]
282 281 elif not line.startswith("# ") and line:
283 282 message.append(line)
284 283 format = None
285 284 elif line == '# HG changeset patch':
286 285 format = "hgpatch"
287 286 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 287 line.startswith("subject: "))):
289 288 subject = line[9:]
290 289 format = "tag"
291 290 elif (format != "tagdone" and (line.startswith("From: ") or
292 291 line.startswith("from: "))):
293 292 user = line[6:]
294 293 format = "tag"
295 294 elif format == "tag" and line == "":
296 295 # when looking for tags (subject: from: etc) they
297 296 # end once you find a blank line in the source
298 297 format = "tagdone"
299 298 elif message or line:
300 299 message.append(line)
301 300 comments.append(line)
302 301
303 302 eatdiff(message)
304 303 eatdiff(comments)
305 304 eatempty(message)
306 305 eatempty(comments)
307 306
308 307 # make sure message isn't empty
309 308 if format and format.startswith("tag") and subject:
310 309 message.insert(0, "")
311 310 message.insert(0, subject)
312 311 return (message, comments, user, date, diffstart > 1)
313 312
314 313 def removeundo(self, repo):
315 314 undo = repo.sjoin('undo')
316 315 if not os.path.exists(undo):
317 316 return
318 317 try:
319 318 os.unlink(undo)
320 319 except OSError, inst:
321 320 self.ui.warn('error removing undo: %s\n' % str(inst))
322 321
323 322 def printdiff(self, repo, node1, node2=None, files=None,
324 323 fp=None, changes=None, opts={}):
325 324 m = cmdutil.match(repo, files, opts)
326 325 patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
327 326
328 327 def mergeone(self, repo, mergeq, head, patch, rev):
329 328 # first try just applying the patch
330 329 (err, n) = self.apply(repo, [ patch ], update_status=False,
331 330 strict=True, merge=rev)
332 331
333 332 if err == 0:
334 333 return (err, n)
335 334
336 335 if n is None:
337 336 raise util.Abort(_("apply failed for patch %s") % patch)
338 337
339 338 self.ui.warn("patch didn't work out, merging %s\n" % patch)
340 339
341 340 # apply failed, strip away that rev and merge.
342 341 hg.clean(repo, head)
343 342 self.strip(repo, n, update=False, backup='strip')
344 343
345 344 ctx = repo[rev]
346 345 ret = hg.merge(repo, rev)
347 346 if ret:
348 347 raise util.Abort(_("update returned %d") % ret)
349 348 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
350 349 if n == None:
351 350 raise util.Abort(_("repo commit failed"))
352 351 try:
353 352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
354 353 except:
355 354 raise util.Abort(_("unable to read %s") % patch)
356 355
357 356 patchf = self.opener(patch, "w")
358 357 if comments:
359 358 comments = "\n".join(comments) + '\n\n'
360 359 patchf.write(comments)
361 360 self.printdiff(repo, head, n, fp=patchf)
362 361 patchf.close()
363 362 self.removeundo(repo)
364 363 return (0, n)
365 364
366 365 def qparents(self, repo, rev=None):
367 366 if rev is None:
368 367 (p1, p2) = repo.dirstate.parents()
369 368 if p2 == revlog.nullid:
370 369 return p1
371 370 if len(self.applied) == 0:
372 371 return None
373 372 return revlog.bin(self.applied[-1].rev)
374 373 pp = repo.changelog.parents(rev)
375 374 if pp[1] != revlog.nullid:
376 375 arevs = [ x.rev for x in self.applied ]
377 376 p0 = revlog.hex(pp[0])
378 377 p1 = revlog.hex(pp[1])
379 378 if p0 in arevs:
380 379 return pp[0]
381 380 if p1 in arevs:
382 381 return pp[1]
383 382 return pp[0]
384 383
385 384 def mergepatch(self, repo, mergeq, series):
386 385 if len(self.applied) == 0:
387 386 # each of the patches merged in will have two parents. This
388 387 # can confuse the qrefresh, qdiff, and strip code because it
389 388 # needs to know which parent is actually in the patch queue.
390 389 # so, we insert a merge marker with only one parent. This way
391 390 # the first patch in the queue is never a merge patch
392 391 #
393 392 pname = ".hg.patches.merge.marker"
394 393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
395 394 self.removeundo(repo)
396 395 self.applied.append(statusentry(revlog.hex(n), pname))
397 396 self.applied_dirty = 1
398 397
399 398 head = self.qparents(repo)
400 399
401 400 for patch in series:
402 401 patch = mergeq.lookup(patch, strict=True)
403 402 if not patch:
404 403 self.ui.warn("patch %s does not exist\n" % patch)
405 404 return (1, None)
406 405 pushable, reason = self.pushable(patch)
407 406 if not pushable:
408 407 self.explain_pushable(patch, all_patches=True)
409 408 continue
410 409 info = mergeq.isapplied(patch)
411 410 if not info:
412 411 self.ui.warn("patch %s is not applied\n" % patch)
413 412 return (1, None)
414 413 rev = revlog.bin(info[1])
415 414 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
416 415 if head:
417 416 self.applied.append(statusentry(revlog.hex(head), patch))
418 417 self.applied_dirty = 1
419 418 if err:
420 419 return (err, head)
421 420 self.save_dirty()
422 421 return (0, head)
423 422
424 423 def patch(self, repo, patchfile):
425 424 '''Apply patchfile to the working directory.
426 425 patchfile: file name of patch'''
427 426 files = {}
428 427 try:
429 428 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 429 files=files)
431 430 except Exception, inst:
432 431 self.ui.note(str(inst) + '\n')
433 432 if not self.ui.verbose:
434 433 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 434 return (False, files, False)
436 435
437 436 return (True, files, fuzz)
438 437
439 438 def apply(self, repo, series, list=False, update_status=True,
440 439 strict=False, patchdir=None, merge=None, all_files={}):
441 440 wlock = lock = tr = None
442 441 try:
443 442 wlock = repo.wlock()
444 443 lock = repo.lock()
445 444 tr = repo.transaction()
446 445 try:
447 446 ret = self._apply(repo, series, list, update_status,
448 447 strict, patchdir, merge, all_files=all_files)
449 448 tr.close()
450 449 self.save_dirty()
451 450 return ret
452 451 except:
453 452 try:
454 453 tr.abort()
455 454 finally:
456 455 repo.invalidate()
457 456 repo.dirstate.invalidate()
458 457 raise
459 458 finally:
460 459 del tr, lock, wlock
461 460 self.removeundo(repo)
462 461
463 462 def _apply(self, repo, series, list=False, update_status=True,
464 463 strict=False, patchdir=None, merge=None, all_files={}):
465 464 # TODO unify with commands.py
466 465 if not patchdir:
467 466 patchdir = self.path
468 467 err = 0
469 468 n = None
470 469 for patchname in series:
471 470 pushable, reason = self.pushable(patchname)
472 471 if not pushable:
473 472 self.explain_pushable(patchname, all_patches=True)
474 473 continue
475 474 self.ui.warn("applying %s\n" % patchname)
476 475 pf = os.path.join(patchdir, patchname)
477 476
478 477 try:
479 478 message, comments, user, date, patchfound = self.readheaders(patchname)
480 479 except:
481 480 self.ui.warn("Unable to read %s\n" % patchname)
482 481 err = 1
483 482 break
484 483
485 484 if not message:
486 485 message = "imported patch %s\n" % patchname
487 486 else:
488 487 if list:
489 488 message.append("\nimported patch %s" % patchname)
490 489 message = '\n'.join(message)
491 490
492 491 (patcherr, files, fuzz) = self.patch(repo, pf)
493 492 all_files.update(files)
494 493 patcherr = not patcherr
495 494
496 495 if merge and files:
497 496 # Mark as removed/merged and update dirstate parent info
498 497 removed = []
499 498 merged = []
500 499 for f in files:
501 500 if os.path.exists(repo.wjoin(f)):
502 501 merged.append(f)
503 502 else:
504 503 removed.append(f)
505 504 for f in removed:
506 505 repo.dirstate.remove(f)
507 506 for f in merged:
508 507 repo.dirstate.merge(f)
509 508 p1, p2 = repo.dirstate.parents()
510 509 repo.dirstate.setparents(p1, merge)
511 510
512 511 files = patch.updatedir(self.ui, repo, files)
513 512 match = cmdutil.matchfiles(repo, files or [])
514 513 n = repo.commit(files, message, user, date, match=match,
515 514 force=True)
516 515
517 516 if n == None:
518 517 raise util.Abort(_("repo commit failed"))
519 518
520 519 if update_status:
521 520 self.applied.append(statusentry(revlog.hex(n), patchname))
522 521
523 522 if patcherr:
524 523 if not patchfound:
525 524 self.ui.warn("patch %s is empty\n" % patchname)
526 525 err = 0
527 526 else:
528 527 self.ui.warn("patch failed, rejects left in working dir\n")
529 528 err = 1
530 529 break
531 530
532 531 if fuzz and strict:
533 532 self.ui.warn("fuzz found when applying patch, stopping\n")
534 533 err = 1
535 534 break
536 535 return (err, n)
537 536
538 537 def _clean_series(self, patches):
539 indices = [self.find_series(p) for p in patches]
540 indices.sort()
538 indices = util.sort([self.find_series(p) for p in patches])
541 539 for i in indices[-1::-1]:
542 540 del self.full_series[i]
543 541 self.parse_series()
544 542 self.series_dirty = 1
545 543
546 544 def finish(self, repo, revs):
547 545 revs.sort()
548 firstrev = repo.changelog.rev(revlog.bin(self.applied[0].rev))
546 firstrev = repo[self.applied[0].rev].rev()
549 547 appliedbase = 0
550 548 patches = []
551 for rev in revs:
549 for rev in util.sort(revs):
552 550 if rev < firstrev:
553 551 raise util.Abort(_('revision %d is not managed') % rev)
554 552 base = revlog.bin(self.applied[appliedbase].rev)
555 553 node = repo.changelog.node(rev)
556 554 if node != base:
557 555 raise util.Abort(_('cannot delete revision %d above '
558 556 'applied patches') % rev)
559 557 patches.append(self.applied[appliedbase].name)
560 558 appliedbase += 1
561 559
562 560 r = self.qrepo()
563 561 if r:
564 562 r.remove(patches, True)
565 563 else:
566 564 for p in patches:
567 565 os.unlink(self.join(p))
568 566
569 567 del self.applied[:appliedbase]
570 568 self.applied_dirty = 1
571 569 self._clean_series(patches)
572 570
573 571 def delete(self, repo, patches, opts):
574 572 if not patches and not opts.get('rev'):
575 573 raise util.Abort(_('qdelete requires at least one revision or '
576 574 'patch name'))
577 575
578 576 realpatches = []
579 577 for patch in patches:
580 578 patch = self.lookup(patch, strict=True)
581 579 info = self.isapplied(patch)
582 580 if info:
583 581 raise util.Abort(_("cannot delete applied patch %s") % patch)
584 582 if patch not in self.series:
585 583 raise util.Abort(_("patch %s not in series file") % patch)
586 584 realpatches.append(patch)
587 585
588 586 appliedbase = 0
589 587 if opts.get('rev'):
590 588 if not self.applied:
591 589 raise util.Abort(_('no patches applied'))
592 590 revs = cmdutil.revrange(repo, opts['rev'])
593 591 if len(revs) > 1 and revs[0] > revs[1]:
594 592 revs.reverse()
595 593 for rev in revs:
596 594 if appliedbase >= len(self.applied):
597 595 raise util.Abort(_("revision %d is not managed") % rev)
598 596
599 597 base = revlog.bin(self.applied[appliedbase].rev)
600 598 node = repo.changelog.node(rev)
601 599 if node != base:
602 600 raise util.Abort(_("cannot delete revision %d above "
603 601 "applied patches") % rev)
604 602 realpatches.append(self.applied[appliedbase].name)
605 603 appliedbase += 1
606 604
607 605 if not opts.get('keep'):
608 606 r = self.qrepo()
609 607 if r:
610 608 r.remove(realpatches, True)
611 609 else:
612 610 for p in realpatches:
613 611 os.unlink(self.join(p))
614 612
615 613 if appliedbase:
616 614 del self.applied[:appliedbase]
617 615 self.applied_dirty = 1
618 616 self._clean_series(realpatches)
619 617
620 618 def check_toppatch(self, repo):
621 619 if len(self.applied) > 0:
622 620 top = revlog.bin(self.applied[-1].rev)
623 621 pp = repo.dirstate.parents()
624 622 if top not in pp:
625 623 raise util.Abort(_("working directory revision is not qtip"))
626 624 return top
627 625 return None
628 626 def check_localchanges(self, repo, force=False, refresh=True):
629 627 m, a, r, d = repo.status()[:4]
630 628 if m or a or r or d:
631 629 if not force:
632 630 if refresh:
633 631 raise util.Abort(_("local changes found, refresh first"))
634 632 else:
635 633 raise util.Abort(_("local changes found"))
636 634 return m, a, r, d
637 635
638 636 _reserved = ('series', 'status', 'guards')
639 637 def check_reserved_name(self, name):
640 638 if (name in self._reserved or name.startswith('.hg')
641 639 or name.startswith('.mq')):
642 640 raise util.Abort(_('"%s" cannot be used as the name of a patch')
643 641 % name)
644 642
645 643 def new(self, repo, patch, *pats, **opts):
646 644 msg = opts.get('msg')
647 645 force = opts.get('force')
648 646 user = opts.get('user')
649 647 date = opts.get('date')
650 648 if date:
651 649 date = util.parsedate(date)
652 650 self.check_reserved_name(patch)
653 651 if os.path.exists(self.join(patch)):
654 652 raise util.Abort(_('patch "%s" already exists') % patch)
655 653 if opts.get('include') or opts.get('exclude') or pats:
656 654 match = cmdutil.match(repo, pats, opts)
657 655 m, a, r, d = repo.status(match=match)[:4]
658 656 else:
659 657 m, a, r, d = self.check_localchanges(repo, force)
660 658 match = cmdutil.match(repo, m + a + r)
661 659 commitfiles = m + a + r
662 660 self.check_toppatch(repo)
663 661 wlock = repo.wlock()
664 662 try:
665 663 insert = self.full_series_end()
666 664 commitmsg = msg and msg or ("[mq]: %s" % patch)
667 665 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
668 666 if n == None:
669 667 raise util.Abort(_("repo commit failed"))
670 668 self.full_series[insert:insert] = [patch]
671 669 self.applied.append(statusentry(revlog.hex(n), patch))
672 670 self.parse_series()
673 671 self.series_dirty = 1
674 672 self.applied_dirty = 1
675 673 p = self.opener(patch, "w")
676 674 if date:
677 675 p.write("# HG changeset patch\n")
678 676 if user:
679 677 p.write("# User " + user + "\n")
680 678 p.write("# Date %d %d\n" % date)
681 679 p.write("\n")
682 680 elif user:
683 681 p.write("From: " + user + "\n")
684 682 p.write("\n")
685 683 if msg:
686 684 msg = msg + "\n"
687 685 p.write(msg)
688 686 p.close()
689 687 wlock = None
690 688 r = self.qrepo()
691 689 if r: r.add([patch])
692 690 if commitfiles:
693 691 self.refresh(repo, short=True, git=opts.get('git'))
694 692 self.removeundo(repo)
695 693 finally:
696 694 del wlock
697 695
698 696 def strip(self, repo, rev, update=True, backup="all", force=None):
699 697 wlock = lock = None
700 698 try:
701 699 wlock = repo.wlock()
702 700 lock = repo.lock()
703 701
704 702 if update:
705 703 self.check_localchanges(repo, force=force, refresh=False)
706 704 urev = self.qparents(repo, rev)
707 705 hg.clean(repo, urev)
708 706 repo.dirstate.write()
709 707
710 708 self.removeundo(repo)
711 709 repair.strip(self.ui, repo, rev, backup)
712 710 # strip may have unbundled a set of backed up revisions after
713 711 # the actual strip
714 712 self.removeundo(repo)
715 713 finally:
716 714 del lock, wlock
717 715
718 716 def isapplied(self, patch):
719 717 """returns (index, rev, patch)"""
720 718 for i in xrange(len(self.applied)):
721 719 a = self.applied[i]
722 720 if a.name == patch:
723 721 return (i, a.rev, a.name)
724 722 return None
725 723
726 724 # if the exact patch name does not exist, we try a few
727 725 # variations. If strict is passed, we try only #1
728 726 #
729 727 # 1) a number to indicate an offset in the series file
730 728 # 2) a unique substring of the patch name was given
731 729 # 3) patchname[-+]num to indicate an offset in the series file
732 730 def lookup(self, patch, strict=False):
733 731 patch = patch and str(patch)
734 732
735 733 def partial_name(s):
736 734 if s in self.series:
737 735 return s
738 736 matches = [x for x in self.series if s in x]
739 737 if len(matches) > 1:
740 738 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
741 739 for m in matches:
742 740 self.ui.warn(' %s\n' % m)
743 741 return None
744 742 if matches:
745 743 return matches[0]
746 744 if len(self.series) > 0 and len(self.applied) > 0:
747 745 if s == 'qtip':
748 746 return self.series[self.series_end(True)-1]
749 747 if s == 'qbase':
750 748 return self.series[0]
751 749 return None
752 750 if patch == None:
753 751 return None
754 752
755 753 # we don't want to return a partial match until we make
756 754 # sure the file name passed in does not exist (checked below)
757 755 res = partial_name(patch)
758 756 if res and res == patch:
759 757 return res
760 758
761 759 if not os.path.isfile(self.join(patch)):
762 760 try:
763 761 sno = int(patch)
764 762 except(ValueError, OverflowError):
765 763 pass
766 764 else:
767 765 if sno < len(self.series):
768 766 return self.series[sno]
769 767 if not strict:
770 768 # return any partial match made above
771 769 if res:
772 770 return res
773 771 minus = patch.rfind('-')
774 772 if minus >= 0:
775 773 res = partial_name(patch[:minus])
776 774 if res:
777 775 i = self.series.index(res)
778 776 try:
779 777 off = int(patch[minus+1:] or 1)
780 778 except(ValueError, OverflowError):
781 779 pass
782 780 else:
783 781 if i - off >= 0:
784 782 return self.series[i - off]
785 783 plus = patch.rfind('+')
786 784 if plus >= 0:
787 785 res = partial_name(patch[:plus])
788 786 if res:
789 787 i = self.series.index(res)
790 788 try:
791 789 off = int(patch[plus+1:] or 1)
792 790 except(ValueError, OverflowError):
793 791 pass
794 792 else:
795 793 if i + off < len(self.series):
796 794 return self.series[i + off]
797 795 raise util.Abort(_("patch %s not in series") % patch)
798 796
799 797 def push(self, repo, patch=None, force=False, list=False,
800 798 mergeq=None):
801 799 wlock = repo.wlock()
802 800 if repo.dirstate.parents()[0] != repo.changelog.tip():
803 801 self.ui.status(_("(working directory not at tip)\n"))
804 802
805 803 try:
806 804 patch = self.lookup(patch)
807 805 # Suppose our series file is: A B C and the current 'top'
808 806 # patch is B. qpush C should be performed (moving forward)
809 807 # qpush B is a NOP (no change) qpush A is an error (can't
810 808 # go backwards with qpush)
811 809 if patch:
812 810 info = self.isapplied(patch)
813 811 if info:
814 812 if info[0] < len(self.applied) - 1:
815 813 raise util.Abort(
816 814 _("cannot push to a previous patch: %s") % patch)
817 815 if info[0] < len(self.series) - 1:
818 816 self.ui.warn(
819 817 _('qpush: %s is already at the top\n') % patch)
820 818 else:
821 819 self.ui.warn(_('all patches are currently applied\n'))
822 820 return
823 821
824 822 # Following the above example, starting at 'top' of B:
825 823 # qpush should be performed (pushes C), but a subsequent
826 824 # qpush without an argument is an error (nothing to
827 825 # apply). This allows a loop of "...while hg qpush..." to
828 826 # work as it detects an error when done
829 827 if self.series_end() == len(self.series):
830 828 self.ui.warn(_('patch series already fully applied\n'))
831 829 return 1
832 830 if not force:
833 831 self.check_localchanges(repo)
834 832
835 833 self.applied_dirty = 1;
836 834 start = self.series_end()
837 835 if start > 0:
838 836 self.check_toppatch(repo)
839 837 if not patch:
840 838 patch = self.series[start]
841 839 end = start + 1
842 840 else:
843 841 end = self.series.index(patch, start) + 1
844 842 s = self.series[start:end]
845 843 all_files = {}
846 844 try:
847 845 if mergeq:
848 846 ret = self.mergepatch(repo, mergeq, s)
849 847 else:
850 848 ret = self.apply(repo, s, list, all_files=all_files)
851 849 except:
852 850 self.ui.warn(_('cleaning up working directory...'))
853 851 node = repo.dirstate.parents()[0]
854 852 hg.revert(repo, node, None)
855 853 unknown = repo.status(unknown=True)[4]
856 854 # only remove unknown files that we know we touched or
857 855 # created while patching
858 856 for f in unknown:
859 857 if f in all_files:
860 858 util.unlink(repo.wjoin(f))
861 859 self.ui.warn(_('done\n'))
862 860 raise
863 861 top = self.applied[-1].name
864 862 if ret[0]:
865 863 self.ui.write(
866 864 "Errors during apply, please fix and refresh %s\n" % top)
867 865 else:
868 866 self.ui.write("Now at: %s\n" % top)
869 867 return ret[0]
870 868 finally:
871 869 del wlock
872 870
873 871 def pop(self, repo, patch=None, force=False, update=True, all=False):
874 872 def getfile(f, rev, flags):
875 873 t = repo.file(f).read(rev)
876 874 repo.wwrite(f, t, flags)
877 875
878 876 wlock = repo.wlock()
879 877 try:
880 878 if patch:
881 879 # index, rev, patch
882 880 info = self.isapplied(patch)
883 881 if not info:
884 882 patch = self.lookup(patch)
885 883 info = self.isapplied(patch)
886 884 if not info:
887 885 raise util.Abort(_("patch %s is not applied") % patch)
888 886
889 887 if len(self.applied) == 0:
890 888 # Allow qpop -a to work repeatedly,
891 889 # but not qpop without an argument
892 890 self.ui.warn(_("no patches applied\n"))
893 891 return not all
894 892
895 893 if not update:
896 894 parents = repo.dirstate.parents()
897 895 rr = [ revlog.bin(x.rev) for x in self.applied ]
898 896 for p in parents:
899 897 if p in rr:
900 898 self.ui.warn("qpop: forcing dirstate update\n")
901 899 update = True
902 900
903 901 if not force and update:
904 902 self.check_localchanges(repo)
905 903
906 904 self.applied_dirty = 1;
907 905 end = len(self.applied)
908 906 if not patch:
909 907 if all:
910 908 popi = 0
911 909 else:
912 910 popi = len(self.applied) - 1
913 911 else:
914 912 popi = info[0] + 1
915 913 if popi >= end:
916 914 self.ui.warn("qpop: %s is already at the top\n" % patch)
917 915 return
918 916 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
919 917
920 918 start = info[0]
921 919 rev = revlog.bin(info[1])
922 920
923 921 if update:
924 922 top = self.check_toppatch(repo)
925 923
926 924 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
927 925 raise util.Abort("popping would remove a revision not "
928 926 "managed by this patch queue")
929 927
930 928 # we know there are no local changes, so we can make a simplified
931 929 # form of hg.update.
932 930 if update:
933 931 qp = self.qparents(repo, rev)
934 932 changes = repo.changelog.read(qp)
935 933 mmap = repo.manifest.read(changes[0])
936 934 m, a, r, d = repo.status(qp, top)[:4]
937 935 if d:
938 936 raise util.Abort("deletions found between repo revs")
939 937 for f in m:
940 938 getfile(f, mmap[f], mmap.flags(f))
941 939 for f in r:
942 940 getfile(f, mmap[f], mmap.flags(f))
943 941 for f in m + r:
944 942 repo.dirstate.normal(f)
945 943 for f in a:
946 944 try:
947 945 os.unlink(repo.wjoin(f))
948 946 except OSError, e:
949 947 if e.errno != errno.ENOENT:
950 948 raise
951 949 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
952 950 except: pass
953 951 repo.dirstate.forget(f)
954 952 repo.dirstate.setparents(qp, revlog.nullid)
955 953 del self.applied[start:end]
956 954 self.strip(repo, rev, update=False, backup='strip')
957 955 if len(self.applied):
958 956 self.ui.write("Now at: %s\n" % self.applied[-1].name)
959 957 else:
960 958 self.ui.write("Patch queue now empty\n")
961 959 finally:
962 960 del wlock
963 961
964 962 def diff(self, repo, pats, opts):
965 963 top = self.check_toppatch(repo)
966 964 if not top:
967 965 self.ui.write("No patches applied\n")
968 966 return
969 967 qp = self.qparents(repo, top)
970 968 self._diffopts = patch.diffopts(self.ui, opts)
971 969 self.printdiff(repo, qp, files=pats, opts=opts)
972 970
973 971 def refresh(self, repo, pats=None, **opts):
974 972 if len(self.applied) == 0:
975 973 self.ui.write("No patches applied\n")
976 974 return 1
977 975 newdate = opts.get('date')
978 976 if newdate:
979 977 newdate = '%d %d' % util.parsedate(newdate)
980 978 wlock = repo.wlock()
981 979 try:
982 980 self.check_toppatch(repo)
983 981 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
984 982 top = revlog.bin(top)
985 983 if repo.changelog.heads(top) != [top]:
986 984 raise util.Abort("cannot refresh a revision with children")
987 985 cparents = repo.changelog.parents(top)
988 986 patchparent = self.qparents(repo, top)
989 987 message, comments, user, date, patchfound = self.readheaders(patchfn)
990 988
991 989 patchf = self.opener(patchfn, 'r+')
992 990
993 991 # if the patch was a git patch, refresh it as a git patch
994 992 for line in patchf:
995 993 if line.startswith('diff --git'):
996 994 self.diffopts().git = True
997 995 break
998 996
999 997 msg = opts.get('msg', '').rstrip()
1000 998 if msg and comments:
1001 999 # Remove existing message, keeping the rest of the comments
1002 1000 # fields.
1003 1001 # If comments contains 'subject: ', message will prepend
1004 1002 # the field and a blank line.
1005 1003 if message:
1006 1004 subj = 'subject: ' + message[0].lower()
1007 1005 for i in xrange(len(comments)):
1008 1006 if subj == comments[i].lower():
1009 1007 del comments[i]
1010 1008 message = message[2:]
1011 1009 break
1012 1010 ci = 0
1013 1011 for mi in xrange(len(message)):
1014 1012 while message[mi] != comments[ci]:
1015 1013 ci += 1
1016 1014 del comments[ci]
1017 1015
1018 1016 def setheaderfield(comments, prefixes, new):
1019 1017 # Update all references to a field in the patch header.
1020 1018 # If none found, add it email style.
1021 1019 res = False
1022 1020 for prefix in prefixes:
1023 1021 for i in xrange(len(comments)):
1024 1022 if comments[i].startswith(prefix):
1025 1023 comments[i] = prefix + new
1026 1024 res = True
1027 1025 break
1028 1026 return res
1029 1027
1030 1028 newuser = opts.get('user')
1031 1029 if newuser:
1032 1030 if not setheaderfield(comments, ['From: ', '# User '], newuser):
1033 1031 try:
1034 1032 patchheaderat = comments.index('# HG changeset patch')
1035 1033 comments.insert(patchheaderat + 1,'# User ' + newuser)
1036 1034 except ValueError:
1037 1035 comments = ['From: ' + newuser, ''] + comments
1038 1036 user = newuser
1039 1037
1040 1038 if newdate:
1041 1039 if setheaderfield(comments, ['# Date '], newdate):
1042 1040 date = newdate
1043 1041
1044 1042 if msg:
1045 1043 comments.append(msg)
1046 1044
1047 1045 patchf.seek(0)
1048 1046 patchf.truncate()
1049 1047
1050 1048 if comments:
1051 1049 comments = "\n".join(comments) + '\n\n'
1052 1050 patchf.write(comments)
1053 1051
1054 1052 if opts.get('git'):
1055 1053 self.diffopts().git = True
1056 1054 matchfn = cmdutil.match(repo, pats, opts)
1057 1055 tip = repo.changelog.tip()
1058 1056 if top == tip:
1059 1057 # if the top of our patch queue is also the tip, there is an
1060 1058 # optimization here. We update the dirstate in place and strip
1061 1059 # off the tip commit. Then just commit the current directory
1062 1060 # tree. We can also send repo.commit the list of files
1063 1061 # changed to speed up the diff
1064 1062 #
1065 1063 # in short mode, we only diff the files included in the
1066 1064 # patch already
1067 1065 #
1068 1066 # this should really read:
1069 1067 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1070 1068 # but we do it backwards to take advantage of manifest/chlog
1071 1069 # caching against the next repo.status call
1072 1070 #
1073 1071 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1074 1072 changes = repo.changelog.read(tip)
1075 1073 man = repo.manifest.read(changes[0])
1076 1074 aaa = aa[:]
1077 1075 if opts.get('short'):
1078 1076 match = cmdutil.matchfiles(repo, mm + aa + dd)
1079 1077 else:
1080 1078 match = cmdutil.matchall(repo)
1081 1079 m, a, r, d = repo.status(match=match)[:4]
1082 1080
1083 1081 # we might end up with files that were added between
1084 1082 # tip and the dirstate parent, but then changed in the
1085 1083 # local dirstate. in this case, we want them to only
1086 1084 # show up in the added section
1087 1085 for x in m:
1088 1086 if x not in aa:
1089 1087 mm.append(x)
1090 1088 # we might end up with files added by the local dirstate that
1091 1089 # were deleted by the patch. In this case, they should only
1092 1090 # show up in the changed section.
1093 1091 for x in a:
1094 1092 if x in dd:
1095 1093 del dd[dd.index(x)]
1096 1094 mm.append(x)
1097 1095 else:
1098 1096 aa.append(x)
1099 1097 # make sure any files deleted in the local dirstate
1100 1098 # are not in the add or change column of the patch
1101 1099 forget = []
1102 1100 for x in d + r:
1103 1101 if x in aa:
1104 1102 del aa[aa.index(x)]
1105 1103 forget.append(x)
1106 1104 continue
1107 1105 elif x in mm:
1108 1106 del mm[mm.index(x)]
1109 1107 dd.append(x)
1110 1108
1111 1109 m = util.unique(mm)
1112 1110 r = util.unique(dd)
1113 1111 a = util.unique(aa)
1114 1112 c = [filter(matchfn, l) for l in (m, a, r)]
1115 1113 match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
1116 1114 patch.diff(repo, patchparent, match=match,
1117 1115 fp=patchf, changes=c, opts=self.diffopts())
1118 1116 patchf.close()
1119 1117
1120 1118 repo.dirstate.setparents(*cparents)
1121 1119 copies = {}
1122 1120 for dst in a:
1123 1121 src = repo.dirstate.copied(dst)
1124 1122 if src is not None:
1125 1123 copies.setdefault(src, []).append(dst)
1126 1124 repo.dirstate.add(dst)
1127 1125 # remember the copies between patchparent and tip
1128 1126 # this may be slow, so don't do it if we're not tracking copies
1129 1127 if self.diffopts().git:
1130 1128 for dst in aaa:
1131 1129 f = repo.file(dst)
1132 1130 src = f.renamed(man[dst])
1133 1131 if src:
1134 1132 copies[src[0]] = copies.get(dst, [])
1135 1133 if dst in a:
1136 1134 copies[src[0]].append(dst)
1137 1135 # we can't copy a file created by the patch itself
1138 1136 if dst in copies:
1139 1137 del copies[dst]
1140 1138 for src, dsts in copies.iteritems():
1141 1139 for dst in dsts:
1142 1140 repo.dirstate.copy(src, dst)
1143 1141 for f in r:
1144 1142 repo.dirstate.remove(f)
1145 1143 # if the patch excludes a modified file, mark that
1146 1144 # file with mtime=0 so status can see it.
1147 1145 mm = []
1148 1146 for i in xrange(len(m)-1, -1, -1):
1149 1147 if not matchfn(m[i]):
1150 1148 mm.append(m[i])
1151 1149 del m[i]
1152 1150 for f in m:
1153 1151 repo.dirstate.normal(f)
1154 1152 for f in mm:
1155 1153 repo.dirstate.normallookup(f)
1156 1154 for f in forget:
1157 1155 repo.dirstate.forget(f)
1158 1156
1159 1157 if not msg:
1160 1158 if not message:
1161 1159 message = "[mq]: %s\n" % patchfn
1162 1160 else:
1163 1161 message = "\n".join(message)
1164 1162 else:
1165 1163 message = msg
1166 1164
1167 1165 if not user:
1168 1166 user = changes[1]
1169 1167
1170 1168 self.applied.pop()
1171 1169 self.applied_dirty = 1
1172 1170 self.strip(repo, top, update=False,
1173 1171 backup='strip')
1174 1172 n = repo.commit(match.files(), message, user, date, match=match,
1175 1173 force=1)
1176 1174 self.applied.append(statusentry(revlog.hex(n), patchfn))
1177 1175 self.removeundo(repo)
1178 1176 else:
1179 1177 self.printdiff(repo, patchparent, fp=patchf)
1180 1178 patchf.close()
1181 1179 added = repo.status()[1]
1182 1180 for a in added:
1183 1181 f = repo.wjoin(a)
1184 1182 try:
1185 1183 os.unlink(f)
1186 1184 except OSError, e:
1187 1185 if e.errno != errno.ENOENT:
1188 1186 raise
1189 1187 try: os.removedirs(os.path.dirname(f))
1190 1188 except: pass
1191 1189 # forget the file copies in the dirstate
1192 1190 # push should readd the files later on
1193 1191 repo.dirstate.forget(a)
1194 1192 self.pop(repo, force=True)
1195 1193 self.push(repo, force=True)
1196 1194 finally:
1197 1195 del wlock
1198 1196
1199 1197 def init(self, repo, create=False):
1200 1198 if not create and os.path.isdir(self.path):
1201 1199 raise util.Abort(_("patch queue directory already exists"))
1202 1200 try:
1203 1201 os.mkdir(self.path)
1204 1202 except OSError, inst:
1205 1203 if inst.errno != errno.EEXIST or not create:
1206 1204 raise
1207 1205 if create:
1208 1206 return self.qrepo(create=True)
1209 1207
1210 1208 def unapplied(self, repo, patch=None):
1211 1209 if patch and patch not in self.series:
1212 1210 raise util.Abort(_("patch %s is not in series file") % patch)
1213 1211 if not patch:
1214 1212 start = self.series_end()
1215 1213 else:
1216 1214 start = self.series.index(patch) + 1
1217 1215 unapplied = []
1218 1216 for i in xrange(start, len(self.series)):
1219 1217 pushable, reason = self.pushable(i)
1220 1218 if pushable:
1221 1219 unapplied.append((i, self.series[i]))
1222 1220 self.explain_pushable(i)
1223 1221 return unapplied
1224 1222
1225 1223 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1226 1224 summary=False):
1227 1225 def displayname(patchname):
1228 1226 if summary:
1229 1227 msg = self.readheaders(patchname)[0]
1230 1228 msg = msg and ': ' + msg[0] or ': '
1231 1229 else:
1232 1230 msg = ''
1233 1231 return '%s%s' % (patchname, msg)
1234 1232
1235 1233 applied = dict.fromkeys([p.name for p in self.applied])
1236 1234 if length is None:
1237 1235 length = len(self.series) - start
1238 1236 if not missing:
1239 1237 for i in xrange(start, start+length):
1240 1238 patch = self.series[i]
1241 1239 if patch in applied:
1242 1240 stat = 'A'
1243 1241 elif self.pushable(i)[0]:
1244 1242 stat = 'U'
1245 1243 else:
1246 1244 stat = 'G'
1247 1245 pfx = ''
1248 1246 if self.ui.verbose:
1249 1247 pfx = '%d %s ' % (i, stat)
1250 1248 elif status and status != stat:
1251 1249 continue
1252 1250 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1253 1251 else:
1254 1252 msng_list = []
1255 1253 for root, dirs, files in os.walk(self.path):
1256 1254 d = root[len(self.path) + 1:]
1257 1255 for f in files:
1258 1256 fl = os.path.join(d, f)
1259 1257 if (fl not in self.series and
1260 1258 fl not in (self.status_path, self.series_path,
1261 1259 self.guards_path)
1262 1260 and not fl.startswith('.')):
1263 1261 msng_list.append(fl)
1264 msng_list.sort()
1265 for x in msng_list:
1262 for x in util.sort(msng_list):
1266 1263 pfx = self.ui.verbose and ('D ') or ''
1267 1264 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1268 1265
1269 1266 def issaveline(self, l):
1270 1267 if l.name == '.hg.patches.save.line':
1271 1268 return True
1272 1269
1273 1270 def qrepo(self, create=False):
1274 1271 if create or os.path.isdir(self.join(".hg")):
1275 1272 return hg.repository(self.ui, path=self.path, create=create)
1276 1273
1277 1274 def restore(self, repo, rev, delete=None, qupdate=None):
1278 1275 c = repo.changelog.read(rev)
1279 1276 desc = c[4].strip()
1280 1277 lines = desc.splitlines()
1281 1278 i = 0
1282 1279 datastart = None
1283 1280 series = []
1284 1281 applied = []
1285 1282 qpp = None
1286 1283 for i in xrange(0, len(lines)):
1287 1284 if lines[i] == 'Patch Data:':
1288 1285 datastart = i + 1
1289 1286 elif lines[i].startswith('Dirstate:'):
1290 1287 l = lines[i].rstrip()
1291 1288 l = l[10:].split(' ')
1292 1289 qpp = [ bin(x) for x in l ]
1293 1290 elif datastart != None:
1294 1291 l = lines[i].rstrip()
1295 1292 se = statusentry(l)
1296 1293 file_ = se.name
1297 1294 if se.rev:
1298 1295 applied.append(se)
1299 1296 else:
1300 1297 series.append(file_)
1301 1298 if datastart == None:
1302 1299 self.ui.warn("No saved patch data found\n")
1303 1300 return 1
1304 1301 self.ui.warn("restoring status: %s\n" % lines[0])
1305 1302 self.full_series = series
1306 1303 self.applied = applied
1307 1304 self.parse_series()
1308 1305 self.series_dirty = 1
1309 1306 self.applied_dirty = 1
1310 1307 heads = repo.changelog.heads()
1311 1308 if delete:
1312 1309 if rev not in heads:
1313 1310 self.ui.warn("save entry has children, leaving it alone\n")
1314 1311 else:
1315 1312 self.ui.warn("removing save entry %s\n" % short(rev))
1316 1313 pp = repo.dirstate.parents()
1317 1314 if rev in pp:
1318 1315 update = True
1319 1316 else:
1320 1317 update = False
1321 1318 self.strip(repo, rev, update=update, backup='strip')
1322 1319 if qpp:
1323 1320 self.ui.warn("saved queue repository parents: %s %s\n" %
1324 1321 (short(qpp[0]), short(qpp[1])))
1325 1322 if qupdate:
1326 1323 self.ui.status(_("queue directory updating\n"))
1327 1324 r = self.qrepo()
1328 1325 if not r:
1329 1326 self.ui.warn("Unable to load queue repository\n")
1330 1327 return 1
1331 1328 hg.clean(r, qpp[0])
1332 1329
1333 1330 def save(self, repo, msg=None):
1334 1331 if len(self.applied) == 0:
1335 1332 self.ui.warn("save: no patches applied, exiting\n")
1336 1333 return 1
1337 1334 if self.issaveline(self.applied[-1]):
1338 1335 self.ui.warn("status is already saved\n")
1339 1336 return 1
1340 1337
1341 1338 ar = [ ':' + x for x in self.full_series ]
1342 1339 if not msg:
1343 1340 msg = "hg patches saved state"
1344 1341 else:
1345 1342 msg = "hg patches: " + msg.rstrip('\r\n')
1346 1343 r = self.qrepo()
1347 1344 if r:
1348 1345 pp = r.dirstate.parents()
1349 1346 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1350 1347 msg += "\n\nPatch Data:\n"
1351 1348 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1352 1349 "\n".join(ar) + '\n' or "")
1353 1350 n = repo.commit(None, text, user=None, force=1)
1354 1351 if not n:
1355 1352 self.ui.warn("repo commit failed\n")
1356 1353 return 1
1357 1354 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1358 1355 self.applied_dirty = 1
1359 1356 self.removeundo(repo)
1360 1357
1361 1358 def full_series_end(self):
1362 1359 if len(self.applied) > 0:
1363 1360 p = self.applied[-1].name
1364 1361 end = self.find_series(p)
1365 1362 if end == None:
1366 1363 return len(self.full_series)
1367 1364 return end + 1
1368 1365 return 0
1369 1366
1370 1367 def series_end(self, all_patches=False):
1371 1368 """If all_patches is False, return the index of the next pushable patch
1372 1369 in the series, or the series length. If all_patches is True, return the
1373 1370 index of the first patch past the last applied one.
1374 1371 """
1375 1372 end = 0
1376 1373 def next(start):
1377 1374 if all_patches:
1378 1375 return start
1379 1376 i = start
1380 1377 while i < len(self.series):
1381 1378 p, reason = self.pushable(i)
1382 1379 if p:
1383 1380 break
1384 1381 self.explain_pushable(i)
1385 1382 i += 1
1386 1383 return i
1387 1384 if len(self.applied) > 0:
1388 1385 p = self.applied[-1].name
1389 1386 try:
1390 1387 end = self.series.index(p)
1391 1388 except ValueError:
1392 1389 return 0
1393 1390 return next(end + 1)
1394 1391 return next(end)
1395 1392
1396 1393 def appliedname(self, index):
1397 1394 pname = self.applied[index].name
1398 1395 if not self.ui.verbose:
1399 1396 p = pname
1400 1397 else:
1401 1398 p = str(self.series.index(pname)) + " " + pname
1402 1399 return p
1403 1400
1404 1401 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1405 1402 force=None, git=False):
1406 1403 def checkseries(patchname):
1407 1404 if patchname in self.series:
1408 1405 raise util.Abort(_('patch %s is already in the series file')
1409 1406 % patchname)
1410 1407 def checkfile(patchname):
1411 1408 if not force and os.path.exists(self.join(patchname)):
1412 1409 raise util.Abort(_('patch "%s" already exists')
1413 1410 % patchname)
1414 1411
1415 1412 if rev:
1416 1413 if files:
1417 1414 raise util.Abort(_('option "-r" not valid when importing '
1418 1415 'files'))
1419 1416 rev = cmdutil.revrange(repo, rev)
1420 1417 rev.sort(lambda x, y: cmp(y, x))
1421 1418 if (len(files) > 1 or len(rev) > 1) and patchname:
1422 1419 raise util.Abort(_('option "-n" not valid when importing multiple '
1423 1420 'patches'))
1424 1421 i = 0
1425 1422 added = []
1426 1423 if rev:
1427 1424 # If mq patches are applied, we can only import revisions
1428 1425 # that form a linear path to qbase.
1429 1426 # Otherwise, they should form a linear path to a head.
1430 1427 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1431 1428 if len(heads) > 1:
1432 1429 raise util.Abort(_('revision %d is the root of more than one '
1433 1430 'branch') % rev[-1])
1434 1431 if self.applied:
1435 1432 base = revlog.hex(repo.changelog.node(rev[0]))
1436 1433 if base in [n.rev for n in self.applied]:
1437 1434 raise util.Abort(_('revision %d is already managed')
1438 1435 % rev[0])
1439 1436 if heads != [revlog.bin(self.applied[-1].rev)]:
1440 1437 raise util.Abort(_('revision %d is not the parent of '
1441 1438 'the queue') % rev[0])
1442 1439 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1443 1440 lastparent = repo.changelog.parentrevs(base)[0]
1444 1441 else:
1445 1442 if heads != [repo.changelog.node(rev[0])]:
1446 1443 raise util.Abort(_('revision %d has unmanaged children')
1447 1444 % rev[0])
1448 1445 lastparent = None
1449 1446
1450 1447 if git:
1451 1448 self.diffopts().git = True
1452 1449
1453 1450 for r in rev:
1454 1451 p1, p2 = repo.changelog.parentrevs(r)
1455 1452 n = repo.changelog.node(r)
1456 1453 if p2 != revlog.nullrev:
1457 1454 raise util.Abort(_('cannot import merge revision %d') % r)
1458 1455 if lastparent and lastparent != r:
1459 1456 raise util.Abort(_('revision %d is not the parent of %d')
1460 1457 % (r, lastparent))
1461 1458 lastparent = p1
1462 1459
1463 1460 if not patchname:
1464 1461 patchname = normname('%d.diff' % r)
1465 1462 self.check_reserved_name(patchname)
1466 1463 checkseries(patchname)
1467 1464 checkfile(patchname)
1468 1465 self.full_series.insert(0, patchname)
1469 1466
1470 1467 patchf = self.opener(patchname, "w")
1471 1468 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1472 1469 patchf.close()
1473 1470
1474 1471 se = statusentry(revlog.hex(n), patchname)
1475 1472 self.applied.insert(0, se)
1476 1473
1477 1474 added.append(patchname)
1478 1475 patchname = None
1479 1476 self.parse_series()
1480 1477 self.applied_dirty = 1
1481 1478
1482 1479 for filename in files:
1483 1480 if existing:
1484 1481 if filename == '-':
1485 1482 raise util.Abort(_('-e is incompatible with import from -'))
1486 1483 if not patchname:
1487 1484 patchname = normname(filename)
1488 1485 self.check_reserved_name(patchname)
1489 1486 if not os.path.isfile(self.join(patchname)):
1490 1487 raise util.Abort(_("patch %s does not exist") % patchname)
1491 1488 else:
1492 1489 try:
1493 1490 if filename == '-':
1494 1491 if not patchname:
1495 1492 raise util.Abort(_('need --name to import a patch from -'))
1496 1493 text = sys.stdin.read()
1497 1494 else:
1498 1495 text = file(filename, 'rb').read()
1499 1496 except IOError:
1500 1497 raise util.Abort(_("unable to read %s") % patchname)
1501 1498 if not patchname:
1502 1499 patchname = normname(os.path.basename(filename))
1503 1500 self.check_reserved_name(patchname)
1504 1501 checkfile(patchname)
1505 1502 patchf = self.opener(patchname, "w")
1506 1503 patchf.write(text)
1507 1504 checkseries(patchname)
1508 1505 index = self.full_series_end() + i
1509 1506 self.full_series[index:index] = [patchname]
1510 1507 self.parse_series()
1511 1508 self.ui.warn("adding %s to series file\n" % patchname)
1512 1509 i += 1
1513 1510 added.append(patchname)
1514 1511 patchname = None
1515 1512 self.series_dirty = 1
1516 1513 qrepo = self.qrepo()
1517 1514 if qrepo:
1518 1515 qrepo.add(added)
1519 1516
1520 1517 def delete(ui, repo, *patches, **opts):
1521 1518 """remove patches from queue
1522 1519
1523 1520 The patches must not be applied, unless they are arguments to
1524 1521 the --rev parameter. At least one patch or revision is required.
1525 1522
1526 1523 With --rev, mq will stop managing the named revisions (converting
1527 1524 them to regular mercurial changesets). The qfinish command should be
1528 1525 used as an alternative for qdel -r, as the latter option is deprecated.
1529 1526
1530 1527 With --keep, the patch files are preserved in the patch directory."""
1531 1528 q = repo.mq
1532 1529 q.delete(repo, patches, opts)
1533 1530 q.save_dirty()
1534 1531 return 0
1535 1532
1536 1533 def applied(ui, repo, patch=None, **opts):
1537 1534 """print the patches already applied"""
1538 1535 q = repo.mq
1539 1536 if patch:
1540 1537 if patch not in q.series:
1541 1538 raise util.Abort(_("patch %s is not in series file") % patch)
1542 1539 end = q.series.index(patch) + 1
1543 1540 else:
1544 1541 end = q.series_end(True)
1545 1542 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1546 1543
1547 1544 def unapplied(ui, repo, patch=None, **opts):
1548 1545 """print the patches not yet applied"""
1549 1546 q = repo.mq
1550 1547 if patch:
1551 1548 if patch not in q.series:
1552 1549 raise util.Abort(_("patch %s is not in series file") % patch)
1553 1550 start = q.series.index(patch) + 1
1554 1551 else:
1555 1552 start = q.series_end(True)
1556 1553 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1557 1554
1558 1555 def qimport(ui, repo, *filename, **opts):
1559 1556 """import a patch
1560 1557
1561 1558 The patch is inserted into the series after the last applied patch.
1562 1559 If no patches have been applied, qimport prepends the patch
1563 1560 to the series.
1564 1561
1565 1562 The patch will have the same name as its source file unless you
1566 1563 give it a new one with --name.
1567 1564
1568 1565 You can register an existing patch inside the patch directory
1569 1566 with the --existing flag.
1570 1567
1571 1568 With --force, an existing patch of the same name will be overwritten.
1572 1569
1573 1570 An existing changeset may be placed under mq control with --rev
1574 1571 (e.g. qimport --rev tip -n patch will place tip under mq control).
1575 1572 With --git, patches imported with --rev will use the git diff
1576 1573 format.
1577 1574 """
1578 1575 q = repo.mq
1579 1576 q.qimport(repo, filename, patchname=opts['name'],
1580 1577 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1581 1578 git=opts['git'])
1582 1579 q.save_dirty()
1583 1580 return 0
1584 1581
1585 1582 def init(ui, repo, **opts):
1586 1583 """init a new queue repository
1587 1584
1588 1585 The queue repository is unversioned by default. If -c is
1589 1586 specified, qinit will create a separate nested repository
1590 1587 for patches (qinit -c may also be run later to convert
1591 1588 an unversioned patch repository into a versioned one).
1592 1589 You can use qcommit to commit changes to this queue repository."""
1593 1590 q = repo.mq
1594 1591 r = q.init(repo, create=opts['create_repo'])
1595 1592 q.save_dirty()
1596 1593 if r:
1597 1594 if not os.path.exists(r.wjoin('.hgignore')):
1598 1595 fp = r.wopener('.hgignore', 'w')
1599 1596 fp.write('^\\.hg\n')
1600 1597 fp.write('^\\.mq\n')
1601 1598 fp.write('syntax: glob\n')
1602 1599 fp.write('status\n')
1603 1600 fp.write('guards\n')
1604 1601 fp.close()
1605 1602 if not os.path.exists(r.wjoin('series')):
1606 1603 r.wopener('series', 'w').close()
1607 1604 r.add(['.hgignore', 'series'])
1608 1605 commands.add(ui, r)
1609 1606 return 0
1610 1607
1611 1608 def clone(ui, source, dest=None, **opts):
1612 1609 '''clone main and patch repository at same time
1613 1610
1614 1611 If source is local, destination will have no patches applied. If
1615 1612 source is remote, this command can not check if patches are
1616 1613 applied in source, so cannot guarantee that patches are not
1617 1614 applied in destination. If you clone remote repository, be sure
1618 1615 before that it has no patches applied.
1619 1616
1620 1617 Source patch repository is looked for in <src>/.hg/patches by
1621 1618 default. Use -p <url> to change.
1622 1619
1623 1620 The patch directory must be a nested mercurial repository, as
1624 1621 would be created by qinit -c.
1625 1622 '''
1626 1623 def patchdir(repo):
1627 1624 url = repo.url()
1628 1625 if url.endswith('/'):
1629 1626 url = url[:-1]
1630 1627 return url + '/.hg/patches'
1631 1628 cmdutil.setremoteconfig(ui, opts)
1632 1629 if dest is None:
1633 1630 dest = hg.defaultdest(source)
1634 1631 sr = hg.repository(ui, ui.expandpath(source))
1635 1632 patchespath = opts['patches'] or patchdir(sr)
1636 1633 try:
1637 1634 pr = hg.repository(ui, patchespath)
1638 1635 except RepoError:
1639 1636 raise util.Abort(_('versioned patch repository not found'
1640 1637 ' (see qinit -c)'))
1641 1638 qbase, destrev = None, None
1642 1639 if sr.local():
1643 1640 if sr.mq.applied:
1644 1641 qbase = revlog.bin(sr.mq.applied[0].rev)
1645 1642 if not hg.islocal(dest):
1646 1643 heads = dict.fromkeys(sr.heads())
1647 1644 for h in sr.heads(qbase):
1648 1645 del heads[h]
1649 1646 destrev = heads.keys()
1650 1647 destrev.append(sr.changelog.parents(qbase)[0])
1651 1648 elif sr.capable('lookup'):
1652 1649 try:
1653 1650 qbase = sr.lookup('qbase')
1654 1651 except RepoError:
1655 1652 pass
1656 1653 ui.note(_('cloning main repo\n'))
1657 1654 sr, dr = hg.clone(ui, sr.url(), dest,
1658 1655 pull=opts['pull'],
1659 1656 rev=destrev,
1660 1657 update=False,
1661 1658 stream=opts['uncompressed'])
1662 1659 ui.note(_('cloning patch repo\n'))
1663 1660 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1664 1661 pull=opts['pull'], update=not opts['noupdate'],
1665 1662 stream=opts['uncompressed'])
1666 1663 if dr.local():
1667 1664 if qbase:
1668 1665 ui.note(_('stripping applied patches from destination repo\n'))
1669 1666 dr.mq.strip(dr, qbase, update=False, backup=None)
1670 1667 if not opts['noupdate']:
1671 1668 ui.note(_('updating destination repo\n'))
1672 1669 hg.update(dr, dr.changelog.tip())
1673 1670
1674 1671 def commit(ui, repo, *pats, **opts):
1675 1672 """commit changes in the queue repository"""
1676 1673 q = repo.mq
1677 1674 r = q.qrepo()
1678 1675 if not r: raise util.Abort('no queue repository')
1679 1676 commands.commit(r.ui, r, *pats, **opts)
1680 1677
1681 1678 def series(ui, repo, **opts):
1682 1679 """print the entire series file"""
1683 1680 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1684 1681 return 0
1685 1682
1686 1683 def top(ui, repo, **opts):
1687 1684 """print the name of the current patch"""
1688 1685 q = repo.mq
1689 1686 t = q.applied and q.series_end(True) or 0
1690 1687 if t:
1691 1688 return q.qseries(repo, start=t-1, length=1, status='A',
1692 1689 summary=opts.get('summary'))
1693 1690 else:
1694 1691 ui.write("No patches applied\n")
1695 1692 return 1
1696 1693
1697 1694 def next(ui, repo, **opts):
1698 1695 """print the name of the next patch"""
1699 1696 q = repo.mq
1700 1697 end = q.series_end()
1701 1698 if end == len(q.series):
1702 1699 ui.write("All patches applied\n")
1703 1700 return 1
1704 1701 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1705 1702
1706 1703 def prev(ui, repo, **opts):
1707 1704 """print the name of the previous patch"""
1708 1705 q = repo.mq
1709 1706 l = len(q.applied)
1710 1707 if l == 1:
1711 1708 ui.write("Only one patch applied\n")
1712 1709 return 1
1713 1710 if not l:
1714 1711 ui.write("No patches applied\n")
1715 1712 return 1
1716 1713 return q.qseries(repo, start=l-2, length=1, status='A',
1717 1714 summary=opts.get('summary'))
1718 1715
1719 1716 def setupheaderopts(ui, opts):
1720 1717 def do(opt,val):
1721 1718 if not opts[opt] and opts['current' + opt]:
1722 1719 opts[opt] = val
1723 1720 do('user', ui.username())
1724 1721 do('date', "%d %d" % util.makedate())
1725 1722
1726 1723 def new(ui, repo, patch, *args, **opts):
1727 1724 """create a new patch
1728 1725
1729 1726 qnew creates a new patch on top of the currently-applied patch
1730 1727 (if any). It will refuse to run if there are any outstanding
1731 1728 changes unless -f is specified, in which case the patch will
1732 1729 be initialised with them. You may also use -I, -X, and/or a list of
1733 1730 files after the patch name to add only changes to matching files
1734 1731 to the new patch, leaving the rest as uncommitted modifications.
1735 1732
1736 1733 -e, -m or -l set the patch header as well as the commit message.
1737 1734 If none is specified, the patch header is empty and the
1738 1735 commit message is '[mq]: PATCH'"""
1739 1736 q = repo.mq
1740 1737 message = cmdutil.logmessage(opts)
1741 1738 if opts['edit']:
1742 1739 message = ui.edit(message, ui.username())
1743 1740 opts['msg'] = message
1744 1741 setupheaderopts(ui, opts)
1745 1742 q.new(repo, patch, *args, **opts)
1746 1743 q.save_dirty()
1747 1744 return 0
1748 1745
1749 1746 def refresh(ui, repo, *pats, **opts):
1750 1747 """update the current patch
1751 1748
1752 1749 If any file patterns are provided, the refreshed patch will contain only
1753 1750 the modifications that match those patterns; the remaining modifications
1754 1751 will remain in the working directory.
1755 1752
1756 1753 hg add/remove/copy/rename work as usual, though you might want to use
1757 1754 git-style patches (--git or [diff] git=1) to track copies and renames.
1758 1755 """
1759 1756 q = repo.mq
1760 1757 message = cmdutil.logmessage(opts)
1761 1758 if opts['edit']:
1762 1759 if not q.applied:
1763 1760 ui.write(_("No patches applied\n"))
1764 1761 return 1
1765 1762 if message:
1766 1763 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1767 1764 patch = q.applied[-1].name
1768 1765 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1769 1766 message = ui.edit('\n'.join(message), user or ui.username())
1770 1767 setupheaderopts(ui, opts)
1771 1768 ret = q.refresh(repo, pats, msg=message, **opts)
1772 1769 q.save_dirty()
1773 1770 return ret
1774 1771
1775 1772 def diff(ui, repo, *pats, **opts):
1776 1773 """diff of the current patch and subsequent modifications
1777 1774
1778 1775 Shows a diff which includes the current patch as well as any changes which
1779 1776 have been made in the working directory since the last refresh (thus
1780 1777 showing what the current patch would become after a qrefresh).
1781 1778
1782 1779 Use 'hg diff' if you only want to see the changes made since the last
1783 1780 qrefresh, or 'hg export qtip' if you want to see changes made by the
1784 1781 current patch without including changes made since the qrefresh.
1785 1782 """
1786 1783 repo.mq.diff(repo, pats, opts)
1787 1784 return 0
1788 1785
1789 1786 def fold(ui, repo, *files, **opts):
1790 1787 """fold the named patches into the current patch
1791 1788
1792 1789 Patches must not yet be applied. Each patch will be successively
1793 1790 applied to the current patch in the order given. If all the
1794 1791 patches apply successfully, the current patch will be refreshed
1795 1792 with the new cumulative patch, and the folded patches will
1796 1793 be deleted. With -k/--keep, the folded patch files will not
1797 1794 be removed afterwards.
1798 1795
1799 1796 The header for each folded patch will be concatenated with
1800 1797 the current patch header, separated by a line of '* * *'."""
1801 1798
1802 1799 q = repo.mq
1803 1800
1804 1801 if not files:
1805 1802 raise util.Abort(_('qfold requires at least one patch name'))
1806 1803 if not q.check_toppatch(repo):
1807 1804 raise util.Abort(_('No patches applied'))
1808 1805
1809 1806 message = cmdutil.logmessage(opts)
1810 1807 if opts['edit']:
1811 1808 if message:
1812 1809 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1813 1810
1814 1811 parent = q.lookup('qtip')
1815 1812 patches = []
1816 1813 messages = []
1817 1814 for f in files:
1818 1815 p = q.lookup(f)
1819 1816 if p in patches or p == parent:
1820 1817 ui.warn(_('Skipping already folded patch %s') % p)
1821 1818 if q.isapplied(p):
1822 1819 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1823 1820 patches.append(p)
1824 1821
1825 1822 for p in patches:
1826 1823 if not message:
1827 1824 messages.append(q.readheaders(p)[0])
1828 1825 pf = q.join(p)
1829 1826 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1830 1827 if not patchsuccess:
1831 1828 raise util.Abort(_('Error folding patch %s') % p)
1832 1829 patch.updatedir(ui, repo, files)
1833 1830
1834 1831 if not message:
1835 1832 message, comments, user = q.readheaders(parent)[0:3]
1836 1833 for msg in messages:
1837 1834 message.append('* * *')
1838 1835 message.extend(msg)
1839 1836 message = '\n'.join(message)
1840 1837
1841 1838 if opts['edit']:
1842 1839 message = ui.edit(message, user or ui.username())
1843 1840
1844 1841 q.refresh(repo, msg=message)
1845 1842 q.delete(repo, patches, opts)
1846 1843 q.save_dirty()
1847 1844
1848 1845 def goto(ui, repo, patch, **opts):
1849 1846 '''push or pop patches until named patch is at top of stack'''
1850 1847 q = repo.mq
1851 1848 patch = q.lookup(patch)
1852 1849 if q.isapplied(patch):
1853 1850 ret = q.pop(repo, patch, force=opts['force'])
1854 1851 else:
1855 1852 ret = q.push(repo, patch, force=opts['force'])
1856 1853 q.save_dirty()
1857 1854 return ret
1858 1855
1859 1856 def guard(ui, repo, *args, **opts):
1860 1857 '''set or print guards for a patch
1861 1858
1862 1859 Guards control whether a patch can be pushed. A patch with no
1863 1860 guards is always pushed. A patch with a positive guard ("+foo") is
1864 1861 pushed only if the qselect command has activated it. A patch with
1865 1862 a negative guard ("-foo") is never pushed if the qselect command
1866 1863 has activated it.
1867 1864
1868 1865 With no arguments, print the currently active guards.
1869 1866 With arguments, set guards for the named patch.
1870 1867
1871 1868 To set a negative guard "-foo" on topmost patch ("--" is needed so
1872 1869 hg will not interpret "-foo" as an option):
1873 1870 hg qguard -- -foo
1874 1871
1875 1872 To set guards on another patch:
1876 1873 hg qguard other.patch +2.6.17 -stable
1877 1874 '''
1878 1875 def status(idx):
1879 1876 guards = q.series_guards[idx] or ['unguarded']
1880 1877 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1881 1878 q = repo.mq
1882 1879 patch = None
1883 1880 args = list(args)
1884 1881 if opts['list']:
1885 1882 if args or opts['none']:
1886 1883 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1887 1884 for i in xrange(len(q.series)):
1888 1885 status(i)
1889 1886 return
1890 1887 if not args or args[0][0:1] in '-+':
1891 1888 if not q.applied:
1892 1889 raise util.Abort(_('no patches applied'))
1893 1890 patch = q.applied[-1].name
1894 1891 if patch is None and args[0][0:1] not in '-+':
1895 1892 patch = args.pop(0)
1896 1893 if patch is None:
1897 1894 raise util.Abort(_('no patch to work with'))
1898 1895 if args or opts['none']:
1899 1896 idx = q.find_series(patch)
1900 1897 if idx is None:
1901 1898 raise util.Abort(_('no patch named %s') % patch)
1902 1899 q.set_guards(idx, args)
1903 1900 q.save_dirty()
1904 1901 else:
1905 1902 status(q.series.index(q.lookup(patch)))
1906 1903
1907 1904 def header(ui, repo, patch=None):
1908 1905 """Print the header of the topmost or specified patch"""
1909 1906 q = repo.mq
1910 1907
1911 1908 if patch:
1912 1909 patch = q.lookup(patch)
1913 1910 else:
1914 1911 if not q.applied:
1915 1912 ui.write('No patches applied\n')
1916 1913 return 1
1917 1914 patch = q.lookup('qtip')
1918 1915 message = repo.mq.readheaders(patch)[0]
1919 1916
1920 1917 ui.write('\n'.join(message) + '\n')
1921 1918
1922 1919 def lastsavename(path):
1923 1920 (directory, base) = os.path.split(path)
1924 1921 names = os.listdir(directory)
1925 1922 namere = re.compile("%s.([0-9]+)" % base)
1926 1923 maxindex = None
1927 1924 maxname = None
1928 1925 for f in names:
1929 1926 m = namere.match(f)
1930 1927 if m:
1931 1928 index = int(m.group(1))
1932 1929 if maxindex == None or index > maxindex:
1933 1930 maxindex = index
1934 1931 maxname = f
1935 1932 if maxname:
1936 1933 return (os.path.join(directory, maxname), maxindex)
1937 1934 return (None, None)
1938 1935
1939 1936 def savename(path):
1940 1937 (last, index) = lastsavename(path)
1941 1938 if last is None:
1942 1939 index = 0
1943 1940 newpath = path + ".%d" % (index + 1)
1944 1941 return newpath
1945 1942
1946 1943 def push(ui, repo, patch=None, **opts):
1947 1944 """push the next patch onto the stack
1948 1945
1949 1946 When --force is applied, all local changes in patched files will be lost.
1950 1947 """
1951 1948 q = repo.mq
1952 1949 mergeq = None
1953 1950
1954 1951 if opts['all']:
1955 1952 if not q.series:
1956 1953 ui.warn(_('no patches in series\n'))
1957 1954 return 0
1958 1955 patch = q.series[-1]
1959 1956 if opts['merge']:
1960 1957 if opts['name']:
1961 1958 newpath = repo.join(opts['name'])
1962 1959 else:
1963 1960 newpath, i = lastsavename(q.path)
1964 1961 if not newpath:
1965 1962 ui.warn("no saved queues found, please use -n\n")
1966 1963 return 1
1967 1964 mergeq = queue(ui, repo.join(""), newpath)
1968 1965 ui.warn("merging with queue at: %s\n" % mergeq.path)
1969 1966 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1970 1967 mergeq=mergeq)
1971 1968 return ret
1972 1969
1973 1970 def pop(ui, repo, patch=None, **opts):
1974 1971 """pop the current patch off the stack
1975 1972
1976 1973 By default, pops off the top of the patch stack. If given a patch name,
1977 1974 keeps popping off patches until the named patch is at the top of the stack.
1978 1975 """
1979 1976 localupdate = True
1980 1977 if opts['name']:
1981 1978 q = queue(ui, repo.join(""), repo.join(opts['name']))
1982 1979 ui.warn('using patch queue: %s\n' % q.path)
1983 1980 localupdate = False
1984 1981 else:
1985 1982 q = repo.mq
1986 1983 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1987 1984 all=opts['all'])
1988 1985 q.save_dirty()
1989 1986 return ret
1990 1987
1991 1988 def rename(ui, repo, patch, name=None, **opts):
1992 1989 """rename a patch
1993 1990
1994 1991 With one argument, renames the current patch to PATCH1.
1995 1992 With two arguments, renames PATCH1 to PATCH2."""
1996 1993
1997 1994 q = repo.mq
1998 1995
1999 1996 if not name:
2000 1997 name = patch
2001 1998 patch = None
2002 1999
2003 2000 if patch:
2004 2001 patch = q.lookup(patch)
2005 2002 else:
2006 2003 if not q.applied:
2007 2004 ui.write(_('No patches applied\n'))
2008 2005 return
2009 2006 patch = q.lookup('qtip')
2010 2007 absdest = q.join(name)
2011 2008 if os.path.isdir(absdest):
2012 2009 name = normname(os.path.join(name, os.path.basename(patch)))
2013 2010 absdest = q.join(name)
2014 2011 if os.path.exists(absdest):
2015 2012 raise util.Abort(_('%s already exists') % absdest)
2016 2013
2017 2014 if name in q.series:
2018 2015 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2019 2016
2020 2017 if ui.verbose:
2021 2018 ui.write('Renaming %s to %s\n' % (patch, name))
2022 2019 i = q.find_series(patch)
2023 2020 guards = q.guard_re.findall(q.full_series[i])
2024 2021 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2025 2022 q.parse_series()
2026 2023 q.series_dirty = 1
2027 2024
2028 2025 info = q.isapplied(patch)
2029 2026 if info:
2030 2027 q.applied[info[0]] = statusentry(info[1], name)
2031 2028 q.applied_dirty = 1
2032 2029
2033 2030 util.rename(q.join(patch), absdest)
2034 2031 r = q.qrepo()
2035 2032 if r:
2036 2033 wlock = r.wlock()
2037 2034 try:
2038 2035 if r.dirstate[patch] == 'a':
2039 2036 r.dirstate.forget(patch)
2040 2037 r.dirstate.add(name)
2041 2038 else:
2042 2039 if r.dirstate[name] == 'r':
2043 2040 r.undelete([name])
2044 2041 r.copy(patch, name)
2045 2042 r.remove([patch], False)
2046 2043 finally:
2047 2044 del wlock
2048 2045
2049 2046 q.save_dirty()
2050 2047
2051 2048 def restore(ui, repo, rev, **opts):
2052 2049 """restore the queue state saved by a rev"""
2053 2050 rev = repo.lookup(rev)
2054 2051 q = repo.mq
2055 2052 q.restore(repo, rev, delete=opts['delete'],
2056 2053 qupdate=opts['update'])
2057 2054 q.save_dirty()
2058 2055 return 0
2059 2056
2060 2057 def save(ui, repo, **opts):
2061 2058 """save current queue state"""
2062 2059 q = repo.mq
2063 2060 message = cmdutil.logmessage(opts)
2064 2061 ret = q.save(repo, msg=message)
2065 2062 if ret:
2066 2063 return ret
2067 2064 q.save_dirty()
2068 2065 if opts['copy']:
2069 2066 path = q.path
2070 2067 if opts['name']:
2071 2068 newpath = os.path.join(q.basepath, opts['name'])
2072 2069 if os.path.exists(newpath):
2073 2070 if not os.path.isdir(newpath):
2074 2071 raise util.Abort(_('destination %s exists and is not '
2075 2072 'a directory') % newpath)
2076 2073 if not opts['force']:
2077 2074 raise util.Abort(_('destination %s exists, '
2078 2075 'use -f to force') % newpath)
2079 2076 else:
2080 2077 newpath = savename(path)
2081 2078 ui.warn("copy %s to %s\n" % (path, newpath))
2082 2079 util.copyfiles(path, newpath)
2083 2080 if opts['empty']:
2084 2081 try:
2085 2082 os.unlink(q.join(q.status_path))
2086 2083 except:
2087 2084 pass
2088 2085 return 0
2089 2086
2090 2087 def strip(ui, repo, rev, **opts):
2091 2088 """strip a revision and all its descendants from the repository
2092 2089
2093 2090 If one of the working dir's parent revisions is stripped, the working
2094 2091 directory will be updated to the parent of the stripped revision.
2095 2092 """
2096 2093 backup = 'all'
2097 2094 if opts['backup']:
2098 2095 backup = 'strip'
2099 2096 elif opts['nobackup']:
2100 2097 backup = 'none'
2101 2098
2102 2099 rev = repo.lookup(rev)
2103 2100 p = repo.dirstate.parents()
2104 2101 cl = repo.changelog
2105 2102 update = True
2106 2103 if p[0] == revlog.nullid:
2107 2104 update = False
2108 2105 elif p[1] == revlog.nullid and rev != cl.ancestor(p[0], rev):
2109 2106 update = False
2110 2107 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2111 2108 update = False
2112 2109
2113 2110 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2114 2111 return 0
2115 2112
2116 2113 def select(ui, repo, *args, **opts):
2117 2114 '''set or print guarded patches to push
2118 2115
2119 2116 Use the qguard command to set or print guards on patch, then use
2120 2117 qselect to tell mq which guards to use. A patch will be pushed if it
2121 2118 has no guards or any positive guards match the currently selected guard,
2122 2119 but will not be pushed if any negative guards match the current guard.
2123 2120 For example:
2124 2121
2125 2122 qguard foo.patch -stable (negative guard)
2126 2123 qguard bar.patch +stable (positive guard)
2127 2124 qselect stable
2128 2125
2129 2126 This activates the "stable" guard. mq will skip foo.patch (because
2130 2127 it has a negative match) but push bar.patch (because it
2131 2128 has a positive match).
2132 2129
2133 2130 With no arguments, prints the currently active guards.
2134 2131 With one argument, sets the active guard.
2135 2132
2136 2133 Use -n/--none to deactivate guards (no other arguments needed).
2137 2134 When no guards are active, patches with positive guards are skipped
2138 2135 and patches with negative guards are pushed.
2139 2136
2140 2137 qselect can change the guards on applied patches. It does not pop
2141 2138 guarded patches by default. Use --pop to pop back to the last applied
2142 2139 patch that is not guarded. Use --reapply (which implies --pop) to push
2143 2140 back to the current patch afterwards, but skip guarded patches.
2144 2141
2145 2142 Use -s/--series to print a list of all guards in the series file (no
2146 2143 other arguments needed). Use -v for more information.'''
2147 2144
2148 2145 q = repo.mq
2149 2146 guards = q.active()
2150 2147 if args or opts['none']:
2151 2148 old_unapplied = q.unapplied(repo)
2152 2149 old_guarded = [i for i in xrange(len(q.applied)) if
2153 2150 not q.pushable(i)[0]]
2154 2151 q.set_active(args)
2155 2152 q.save_dirty()
2156 2153 if not args:
2157 2154 ui.status(_('guards deactivated\n'))
2158 2155 if not opts['pop'] and not opts['reapply']:
2159 2156 unapplied = q.unapplied(repo)
2160 2157 guarded = [i for i in xrange(len(q.applied))
2161 2158 if not q.pushable(i)[0]]
2162 2159 if len(unapplied) != len(old_unapplied):
2163 2160 ui.status(_('number of unguarded, unapplied patches has '
2164 2161 'changed from %d to %d\n') %
2165 2162 (len(old_unapplied), len(unapplied)))
2166 2163 if len(guarded) != len(old_guarded):
2167 2164 ui.status(_('number of guarded, applied patches has changed '
2168 2165 'from %d to %d\n') %
2169 2166 (len(old_guarded), len(guarded)))
2170 2167 elif opts['series']:
2171 2168 guards = {}
2172 2169 noguards = 0
2173 2170 for gs in q.series_guards:
2174 2171 if not gs:
2175 2172 noguards += 1
2176 2173 for g in gs:
2177 2174 guards.setdefault(g, 0)
2178 2175 guards[g] += 1
2179 2176 if ui.verbose:
2180 2177 guards['NONE'] = noguards
2181 2178 guards = guards.items()
2182 2179 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2183 2180 if guards:
2184 2181 ui.note(_('guards in series file:\n'))
2185 2182 for guard, count in guards:
2186 2183 ui.note('%2d ' % count)
2187 2184 ui.write(guard, '\n')
2188 2185 else:
2189 2186 ui.note(_('no guards in series file\n'))
2190 2187 else:
2191 2188 if guards:
2192 2189 ui.note(_('active guards:\n'))
2193 2190 for g in guards:
2194 2191 ui.write(g, '\n')
2195 2192 else:
2196 2193 ui.write(_('no active guards\n'))
2197 2194 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2198 2195 popped = False
2199 2196 if opts['pop'] or opts['reapply']:
2200 2197 for i in xrange(len(q.applied)):
2201 2198 pushable, reason = q.pushable(i)
2202 2199 if not pushable:
2203 2200 ui.status(_('popping guarded patches\n'))
2204 2201 popped = True
2205 2202 if i == 0:
2206 2203 q.pop(repo, all=True)
2207 2204 else:
2208 2205 q.pop(repo, i-1)
2209 2206 break
2210 2207 if popped:
2211 2208 try:
2212 2209 if reapply:
2213 2210 ui.status(_('reapplying unguarded patches\n'))
2214 2211 q.push(repo, reapply)
2215 2212 finally:
2216 2213 q.save_dirty()
2217 2214
2218 2215 def finish(ui, repo, *revrange, **opts):
2219 2216 """move applied patches into repository history
2220 2217
2221 2218 Finishes the specified revisions (corresponding to applied patches) by
2222 2219 moving them out of mq control into regular repository history.
2223 2220
2224 2221 Accepts a revision range or the --all option. If --all is specified, all
2225 2222 applied mq revisions are removed from mq control. Otherwise, the given
2226 2223 revisions must be at the base of the stack of applied patches.
2227 2224
2228 2225 This can be especially useful if your changes have been applied to an
2229 2226 upstream repository, or if you are about to push your changes to upstream.
2230 2227 """
2231 2228 if not opts['applied'] and not revrange:
2232 2229 raise util.Abort(_('no revisions specified'))
2233 2230 elif opts['applied']:
2234 2231 revrange = ('qbase:qtip',) + revrange
2235 2232
2236 2233 q = repo.mq
2237 2234 if not q.applied:
2238 2235 ui.status(_('no patches applied\n'))
2239 2236 return 0
2240 2237
2241 2238 revs = cmdutil.revrange(repo, revrange)
2242 2239 q.finish(repo, revs)
2243 2240 q.save_dirty()
2244 2241 return 0
2245 2242
2246 2243 def reposetup(ui, repo):
2247 2244 class mqrepo(repo.__class__):
2248 2245 def abort_if_wdir_patched(self, errmsg, force=False):
2249 2246 if self.mq.applied and not force:
2250 2247 parent = revlog.hex(self.dirstate.parents()[0])
2251 2248 if parent in [s.rev for s in self.mq.applied]:
2252 2249 raise util.Abort(errmsg)
2253 2250
2254 2251 def commit(self, *args, **opts):
2255 2252 if len(args) >= 6:
2256 2253 force = args[5]
2257 2254 else:
2258 2255 force = opts.get('force')
2259 2256 self.abort_if_wdir_patched(
2260 2257 _('cannot commit over an applied mq patch'),
2261 2258 force)
2262 2259
2263 2260 return super(mqrepo, self).commit(*args, **opts)
2264 2261
2265 2262 def push(self, remote, force=False, revs=None):
2266 2263 if self.mq.applied and not force and not revs:
2267 2264 raise util.Abort(_('source has mq patches applied'))
2268 2265 return super(mqrepo, self).push(remote, force, revs)
2269 2266
2270 2267 def tags(self):
2271 2268 if self.tagscache:
2272 2269 return self.tagscache
2273 2270
2274 2271 tagscache = super(mqrepo, self).tags()
2275 2272
2276 2273 q = self.mq
2277 2274 if not q.applied:
2278 2275 return tagscache
2279 2276
2280 2277 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2281 2278
2282 2279 if mqtags[-1][0] not in self.changelog.nodemap:
2283 2280 self.ui.warn('mq status file refers to unknown node %s\n'
2284 2281 % revlog.short(mqtags[-1][0]))
2285 2282 return tagscache
2286 2283
2287 2284 mqtags.append((mqtags[-1][0], 'qtip'))
2288 2285 mqtags.append((mqtags[0][0], 'qbase'))
2289 2286 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2290 2287 for patch in mqtags:
2291 2288 if patch[1] in tagscache:
2292 2289 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2293 2290 else:
2294 2291 tagscache[patch[1]] = patch[0]
2295 2292
2296 2293 return tagscache
2297 2294
2298 2295 def _branchtags(self, partial, lrev):
2299 2296 q = self.mq
2300 2297 if not q.applied:
2301 2298 return super(mqrepo, self)._branchtags(partial, lrev)
2302 2299
2303 2300 cl = self.changelog
2304 2301 qbasenode = revlog.bin(q.applied[0].rev)
2305 2302 if qbasenode not in cl.nodemap:
2306 2303 self.ui.warn('mq status file refers to unknown node %s\n'
2307 2304 % revlog.short(qbasenode))
2308 2305 return super(mqrepo, self)._branchtags(partial, lrev)
2309 2306
2310 2307 qbase = cl.rev(qbasenode)
2311 2308 start = lrev + 1
2312 2309 if start < qbase:
2313 2310 # update the cache (excluding the patches) and save it
2314 2311 self._updatebranchcache(partial, lrev+1, qbase)
2315 2312 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2316 2313 start = qbase
2317 2314 # if start = qbase, the cache is as updated as it should be.
2318 2315 # if start > qbase, the cache includes (part of) the patches.
2319 2316 # we might as well use it, but we won't save it.
2320 2317
2321 2318 # update the cache up to the tip
2322 2319 self._updatebranchcache(partial, start, len(cl))
2323 2320
2324 2321 return partial
2325 2322
2326 2323 if repo.local():
2327 2324 repo.__class__ = mqrepo
2328 2325 repo.mq = queue(ui, repo.join(""))
2329 2326
2330 2327 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2331 2328
2332 2329 headeropts = [
2333 2330 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2334 2331 ('u', 'user', '', _('add "From: <given user>" to patch')),
2335 2332 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2336 2333 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2337 2334
2338 2335 cmdtable = {
2339 2336 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2340 2337 "qclone":
2341 2338 (clone,
2342 2339 [('', 'pull', None, _('use pull protocol to copy metadata')),
2343 2340 ('U', 'noupdate', None, _('do not update the new working directories')),
2344 2341 ('', 'uncompressed', None,
2345 2342 _('use uncompressed transfer (fast over LAN)')),
2346 2343 ('p', 'patches', '', _('location of source patch repo')),
2347 2344 ] + commands.remoteopts,
2348 2345 _('hg qclone [OPTION]... SOURCE [DEST]')),
2349 2346 "qcommit|qci":
2350 2347 (commit,
2351 2348 commands.table["^commit|ci"][1],
2352 2349 _('hg qcommit [OPTION]... [FILE]...')),
2353 2350 "^qdiff":
2354 2351 (diff,
2355 2352 commands.diffopts + commands.diffopts2 + commands.walkopts,
2356 2353 _('hg qdiff [OPTION]... [FILE]...')),
2357 2354 "qdelete|qremove|qrm":
2358 2355 (delete,
2359 2356 [('k', 'keep', None, _('keep patch file')),
2360 2357 ('r', 'rev', [], _('stop managing a revision'))],
2361 2358 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2362 2359 'qfold':
2363 2360 (fold,
2364 2361 [('e', 'edit', None, _('edit patch header')),
2365 2362 ('k', 'keep', None, _('keep folded patch files')),
2366 2363 ] + commands.commitopts,
2367 2364 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2368 2365 'qgoto':
2369 2366 (goto,
2370 2367 [('f', 'force', None, _('overwrite any local changes'))],
2371 2368 _('hg qgoto [OPTION]... PATCH')),
2372 2369 'qguard':
2373 2370 (guard,
2374 2371 [('l', 'list', None, _('list all patches and guards')),
2375 2372 ('n', 'none', None, _('drop all guards'))],
2376 2373 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2377 2374 'qheader': (header, [], _('hg qheader [PATCH]')),
2378 2375 "^qimport":
2379 2376 (qimport,
2380 2377 [('e', 'existing', None, 'import file in patch dir'),
2381 2378 ('n', 'name', '', 'patch file name'),
2382 2379 ('f', 'force', None, 'overwrite existing files'),
2383 2380 ('r', 'rev', [], 'place existing revisions under mq control'),
2384 2381 ('g', 'git', None, _('use git extended diff format'))],
2385 2382 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2386 2383 "^qinit":
2387 2384 (init,
2388 2385 [('c', 'create-repo', None, 'create queue repository')],
2389 2386 _('hg qinit [-c]')),
2390 2387 "qnew":
2391 2388 (new,
2392 2389 [('e', 'edit', None, _('edit commit message')),
2393 2390 ('f', 'force', None, _('import uncommitted changes into patch')),
2394 2391 ('g', 'git', None, _('use git extended diff format')),
2395 2392 ] + commands.walkopts + commands.commitopts + headeropts,
2396 2393 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2397 2394 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2398 2395 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2399 2396 "^qpop":
2400 2397 (pop,
2401 2398 [('a', 'all', None, _('pop all patches')),
2402 2399 ('n', 'name', '', _('queue name to pop')),
2403 2400 ('f', 'force', None, _('forget any local changes'))],
2404 2401 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2405 2402 "^qpush":
2406 2403 (push,
2407 2404 [('f', 'force', None, _('apply if the patch has rejects')),
2408 2405 ('l', 'list', None, _('list patch name in commit text')),
2409 2406 ('a', 'all', None, _('apply all patches')),
2410 2407 ('m', 'merge', None, _('merge from another queue')),
2411 2408 ('n', 'name', '', _('merge queue name'))],
2412 2409 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2413 2410 "^qrefresh":
2414 2411 (refresh,
2415 2412 [('e', 'edit', None, _('edit commit message')),
2416 2413 ('g', 'git', None, _('use git extended diff format')),
2417 2414 ('s', 'short', None, _('refresh only files already in the patch')),
2418 2415 ] + commands.walkopts + commands.commitopts + headeropts,
2419 2416 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2420 2417 'qrename|qmv':
2421 2418 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2422 2419 "qrestore":
2423 2420 (restore,
2424 2421 [('d', 'delete', None, _('delete save entry')),
2425 2422 ('u', 'update', None, _('update queue working dir'))],
2426 2423 _('hg qrestore [-d] [-u] REV')),
2427 2424 "qsave":
2428 2425 (save,
2429 2426 [('c', 'copy', None, _('copy patch directory')),
2430 2427 ('n', 'name', '', _('copy directory name')),
2431 2428 ('e', 'empty', None, _('clear queue status file')),
2432 2429 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2433 2430 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2434 2431 "qselect":
2435 2432 (select,
2436 2433 [('n', 'none', None, _('disable all guards')),
2437 2434 ('s', 'series', None, _('list all guards in series file')),
2438 2435 ('', 'pop', None, _('pop to before first guarded applied patch')),
2439 2436 ('', 'reapply', None, _('pop, then reapply patches'))],
2440 2437 _('hg qselect [OPTION]... [GUARD]...')),
2441 2438 "qseries":
2442 2439 (series,
2443 2440 [('m', 'missing', None, _('print patches not in series')),
2444 2441 ] + seriesopts,
2445 2442 _('hg qseries [-ms]')),
2446 2443 "^strip":
2447 2444 (strip,
2448 2445 [('f', 'force', None, _('force removal with local changes')),
2449 2446 ('b', 'backup', None, _('bundle unrelated changesets')),
2450 2447 ('n', 'nobackup', None, _('no backups'))],
2451 2448 _('hg strip [-f] [-b] [-n] REV')),
2452 2449 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2453 2450 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2454 2451 "qfinish":
2455 2452 (finish,
2456 2453 [('a', 'applied', None, _('finish all applied changesets'))],
2457 2454 _('hg qfinish [-a] [REV...]')),
2458 2455 }
@@ -1,283 +1,281
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to email notifications to people when changesets are
9 9 # committed to a repo they subscribe to.
10 10 #
11 11 # default mode is to print messages to stdout, for testing and
12 12 # configuring.
13 13 #
14 14 # to use, configure notify extension and enable in hgrc like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.notify =
18 18 #
19 19 # [hooks]
20 20 # # one email for each incoming changeset
21 21 # incoming.notify = python:hgext.notify.hook
22 22 # # batch emails when many changesets incoming at one time
23 23 # changegroup.notify = python:hgext.notify.hook
24 24 #
25 25 # [notify]
26 26 # # config items go in here
27 27 #
28 28 # config items:
29 29 #
30 30 # REQUIRED:
31 31 # config = /path/to/file # file containing subscriptions
32 32 #
33 33 # OPTIONAL:
34 34 # test = True # print messages to stdout for testing
35 35 # strip = 3 # number of slashes to strip for url paths
36 36 # domain = example.com # domain to use if committer missing domain
37 37 # style = ... # style file to use when formatting email
38 38 # template = ... # template to use when formatting email
39 39 # incoming = ... # template to use when run as incoming hook
40 40 # changegroup = ... # template when run as changegroup hook
41 41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 42 # maxsubject = 67 # truncate subject line longer than this
43 43 # diffstat = True # add a diffstat before the diff content
44 44 # sources = serve # notify if source of incoming changes in this list
45 45 # # (serve == ssh or http, push, pull, bundle)
46 46 # [email]
47 47 # from = user@host.com # email address to send as if none given
48 48 # [web]
49 49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 50 #
51 51 # notify config file has same format as regular hgrc. it has two
52 52 # sections so you can express subscriptions in whatever way is handier
53 53 # for you.
54 54 #
55 55 # [usersubs]
56 56 # # key is subscriber email, value is ","-separated list of glob patterns
57 57 # user@host = pattern
58 58 #
59 59 # [reposubs]
60 60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 61 # pattern = user@host
62 62 #
63 63 # glob patterns are matched against path to repo root.
64 64 #
65 65 # if you like, you can put notify config file in repo that users can
66 66 # push changes to, they can manage their own subscriptions.
67 67
68 68 from mercurial.i18n import _
69 69 from mercurial.node import bin, short
70 70 from mercurial import patch, cmdutil, templater, util, mail
71 71 import email.Parser, fnmatch, socket, time
72 72
73 73 # template for single changeset can include email headers.
74 74 single_template = '''
75 75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 76 From: {author}
77 77
78 78 changeset {node|short} in {root}
79 79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 80 description:
81 81 \t{desc|tabindent|strip}
82 82 '''.lstrip()
83 83
84 84 # template for multiple changesets should not contain email headers,
85 85 # because only first set of headers will be used and result will look
86 86 # strange.
87 87 multiple_template = '''
88 88 changeset {node|short} in {root}
89 89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 90 summary: {desc|firstline}
91 91 '''
92 92
93 93 deftemplates = {
94 94 'changegroup': multiple_template,
95 95 }
96 96
97 97 class notifier(object):
98 98 '''email notification class.'''
99 99
100 100 def __init__(self, ui, repo, hooktype):
101 101 self.ui = ui
102 102 cfg = self.ui.config('notify', 'config')
103 103 if cfg:
104 104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
105 105 self.repo = repo
106 106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 107 self.root = self.strip(self.repo.root)
108 108 self.domain = self.ui.config('notify', 'domain')
109 109 self.subs = self.subscribers()
110 110
111 111 mapfile = self.ui.config('notify', 'style')
112 112 template = (self.ui.config('notify', hooktype) or
113 113 self.ui.config('notify', 'template'))
114 114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 115 False, mapfile, False)
116 116 if not mapfile and not template:
117 117 template = deftemplates.get(hooktype) or single_template
118 118 if template:
119 119 template = templater.parsestring(template, quoted=False)
120 120 self.t.use_template(template)
121 121
122 122 def strip(self, path):
123 123 '''strip leading slashes from local path, turn into web-safe path.'''
124 124
125 125 path = util.pconvert(path)
126 126 count = self.stripcount
127 127 while count > 0:
128 128 c = path.find('/')
129 129 if c == -1:
130 130 break
131 131 path = path[c+1:]
132 132 count -= 1
133 133 return path
134 134
135 135 def fixmail(self, addr):
136 136 '''try to clean up email addresses.'''
137 137
138 138 addr = util.email(addr.strip())
139 139 if self.domain:
140 140 a = addr.find('@localhost')
141 141 if a != -1:
142 142 addr = addr[:a]
143 143 if '@' not in addr:
144 144 return addr + '@' + self.domain
145 145 return addr
146 146
147 147 def subscribers(self):
148 148 '''return list of email addresses of subscribers to this repo.'''
149 149
150 150 subs = {}
151 151 for user, pats in self.ui.configitems('usersubs'):
152 152 for pat in pats.split(','):
153 153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 154 subs[self.fixmail(user)] = 1
155 155 for pat, users in self.ui.configitems('reposubs'):
156 156 if fnmatch.fnmatch(self.repo.root, pat):
157 157 for user in users.split(','):
158 158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
160 subs.sort()
161 return subs
159 return util.sort(subs)
162 160
163 161 def url(self, path=None):
164 162 return self.ui.config('web', 'baseurl') + (path or self.root)
165 163
166 164 def node(self, node):
167 165 '''format one changeset.'''
168 166
169 167 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 168 baseurl=self.ui.config('web', 'baseurl'),
171 169 root=self.repo.root,
172 170 webroot=self.root)
173 171
174 172 def skipsource(self, source):
175 173 '''true if incoming changes from this source should be skipped.'''
176 174 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 175 return source not in ok_sources
178 176
179 177 def send(self, node, count, data):
180 178 '''send message.'''
181 179
182 180 p = email.Parser.Parser()
183 181 msg = p.parsestr(data)
184 182
185 183 def fix_subject():
186 184 '''try to make subject line exist and be useful.'''
187 185
188 186 subject = msg['Subject']
189 187 if not subject:
190 188 if count > 1:
191 189 subject = _('%s: %d new changesets') % (self.root, count)
192 190 else:
193 191 changes = self.repo.changelog.read(node)
194 192 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 193 subject = '%s: %s' % (self.root, s)
196 194 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 195 if maxsubject and len(subject) > maxsubject:
198 196 subject = subject[:maxsubject-3] + '...'
199 197 del msg['Subject']
200 198 msg['Subject'] = subject
201 199
202 200 def fix_sender():
203 201 '''try to make message have proper sender.'''
204 202
205 203 sender = msg['From']
206 204 if not sender:
207 205 sender = self.ui.config('email', 'from') or self.ui.username()
208 206 if '@' not in sender or '@localhost' in sender:
209 207 sender = self.fixmail(sender)
210 208 del msg['From']
211 209 msg['From'] = sender
212 210
213 211 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
214 212 fix_subject()
215 213 fix_sender()
216 214
217 215 msg['X-Hg-Notification'] = 'changeset ' + short(node)
218 216 if not msg['Message-Id']:
219 217 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 218 (short(node), int(time.time()),
221 219 hash(self.repo.root), socket.getfqdn()))
222 220 msg['To'] = ', '.join(self.subs)
223 221
224 222 msgtext = msg.as_string(0)
225 223 if self.ui.configbool('notify', 'test', True):
226 224 self.ui.write(msgtext)
227 225 if not msgtext.endswith('\n'):
228 226 self.ui.write('\n')
229 227 else:
230 228 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 229 (len(self.subs), count))
232 230 mail.sendmail(self.ui, util.email(msg['From']),
233 231 self.subs, msgtext)
234 232
235 233 def diff(self, node, ref):
236 234 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 235 prev = self.repo.changelog.parents(node)[0]
238 236 self.ui.pushbuffer()
239 237 patch.diff(self.repo, prev, ref)
240 238 difflines = self.ui.popbuffer().splitlines(1)
241 239 if self.ui.configbool('notify', 'diffstat', True):
242 240 s = patch.diffstat(difflines)
243 241 # s may be nil, don't include the header if it is
244 242 if s:
245 243 self.ui.write('\ndiffstat:\n\n%s' % s)
246 244 if maxdiff == 0:
247 245 return
248 246 if maxdiff > 0 and len(difflines) > maxdiff:
249 247 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
250 248 (len(difflines), maxdiff))
251 249 difflines = difflines[:maxdiff]
252 250 elif difflines:
253 251 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
254 252 self.ui.write(*difflines)
255 253
256 254 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
257 255 '''send email notifications to interested subscribers.
258 256
259 257 if used as changegroup hook, send one email for all changesets in
260 258 changegroup. else send one email per changeset.'''
261 259 n = notifier(ui, repo, hooktype)
262 260 if not n.subs:
263 261 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
264 262 return
265 263 if n.skipsource(source):
266 264 ui.debug(_('notify: changes have source "%s" - skipping\n') %
267 265 source)
268 266 return
269 267 node = bin(node)
270 268 ui.pushbuffer()
271 269 if hooktype == 'changegroup':
272 270 start = repo[node].rev()
273 271 end = len(repo)
274 272 count = end - start
275 273 for rev in xrange(start, end):
276 274 n.node(repo[node].rev())
277 275 n.diff(node, repo.changelog.tip())
278 276 else:
279 277 count = 1
280 278 n.node(node)
281 279 n.diff(node, node)
282 280 data = ui.popbuffer()
283 281 n.send(node, count, data)
@@ -1,103 +1,100
1 1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 2 #
3 3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 4 # that removes files not known to mercurial
5 5 #
6 6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 7 # (http://www.red-bean.com/cvsutils/).
8 8 #
9 9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 10 # [extensions]
11 11 # hgext.purge =
12 12 #
13 13 # For help on the usage of "hg purge" use:
14 14 # hg help purge
15 15 #
16 16 # This program is free software; you can redistribute it and/or modify
17 17 # it under the terms of the GNU General Public License as published by
18 18 # the Free Software Foundation; either version 2 of the License, or
19 19 # (at your option) any later version.
20 20 #
21 21 # This program is distributed in the hope that it will be useful,
22 22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 24 # GNU General Public License for more details.
25 25 #
26 26 # You should have received a copy of the GNU General Public License
27 27 # along with this program; if not, write to the Free Software
28 28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29 29
30 30 from mercurial import util, commands, cmdutil
31 31 from mercurial.i18n import _
32 32 import os
33 33
34 34 def purge(ui, repo, *dirs, **opts):
35 35 '''removes files not tracked by mercurial
36 36
37 37 Delete files not known to mercurial, this is useful to test local and
38 38 uncommitted changes in the otherwise clean source tree.
39 39
40 40 This means that purge will delete:
41 41 - Unknown files: files marked with "?" by "hg status"
42 42 - Ignored files: files usually ignored by Mercurial because they match
43 43 a pattern in a ".hgignore" file
44 44 - Empty directories: in fact Mercurial ignores directories unless they
45 45 contain files under source control managment
46 46 But it will leave untouched:
47 47 - Unmodified tracked files
48 48 - Modified tracked files
49 49 - New files added to the repository (with "hg add")
50 50
51 51 If directories are given on the command line, only files in these
52 52 directories are considered.
53 53
54 54 Be careful with purge, you could irreversibly delete some files you
55 55 forgot to add to the repository. If you only want to print the list of
56 56 files that this program would delete use the --print option.
57 57 '''
58 58 act = not opts['print']
59 59 eol = '\n'
60 60 if opts['print0']:
61 61 eol = '\0'
62 62 act = False # --print0 implies --print
63 63
64 64 def remove(remove_func, name):
65 65 if act:
66 66 try:
67 67 remove_func(os.path.join(repo.root, name))
68 68 except OSError, e:
69 69 m = _('%s cannot be removed') % name
70 70 if opts['abort_on_err']:
71 71 raise util.Abort(m)
72 72 ui.warn(_('warning: %s\n') % m)
73 73 else:
74 74 ui.write('%s%s' % (name, eol))
75 75
76 76 directories = []
77 77 match = cmdutil.match(repo, dirs, opts)
78 78 match.dir = directories.append
79 79 status = repo.status(match=match, ignored=opts['all'], unknown=True)
80 files = status[4] + status[5]
81 files.sort()
82 directories.sort()
83 80
84 for f in files:
81 for f in util.sort(status[4] + status[5]):
85 82 ui.note(_('Removing file %s\n') % f)
86 83 remove(os.remove, f)
87 84
88 for f in directories[::-1]:
85 for f in util.sort(directories)[::-1]:
89 86 if match(f) and not os.listdir(repo.wjoin(f)):
90 87 ui.note(_('Removing directory %s\n') % f)
91 88 remove(os.rmdir, f)
92 89
93 90 cmdtable = {
94 91 'purge|clean':
95 92 (purge,
96 93 [('a', 'abort-on-err', None, _('abort if an error occurs')),
97 94 ('', 'all', None, _('purge ignored files too')),
98 95 ('p', 'print', None, _('print the file names instead of deleting them')),
99 96 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
100 97 ' (implies -p)')),
101 98 ] + commands.walkopts,
102 99 _('hg purge [OPTION]... [DIR]...'))
103 100 }
@@ -1,597 +1,589
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 import os, tempfile
10 10 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge
11 11 from mercurial import patch, revlog, util
12 12
13 13 '''patch transplanting tool
14 14
15 15 This extension allows you to transplant patches from another branch.
16 16
17 17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 18 from a changeset hash to its hash in the source repository.
19 19 '''
20 20
21 21 class transplantentry:
22 22 def __init__(self, lnode, rnode):
23 23 self.lnode = lnode
24 24 self.rnode = rnode
25 25
26 26 class transplants:
27 27 def __init__(self, path=None, transplantfile=None, opener=None):
28 28 self.path = path
29 29 self.transplantfile = transplantfile
30 30 self.opener = opener
31 31
32 32 if not opener:
33 33 self.opener = util.opener(self.path)
34 34 self.transplants = []
35 35 self.dirty = False
36 36 self.read()
37 37
38 38 def read(self):
39 39 abspath = os.path.join(self.path, self.transplantfile)
40 40 if self.transplantfile and os.path.exists(abspath):
41 41 for line in self.opener(self.transplantfile).read().splitlines():
42 42 lnode, rnode = map(revlog.bin, line.split(':'))
43 43 self.transplants.append(transplantentry(lnode, rnode))
44 44
45 45 def write(self):
46 46 if self.dirty and self.transplantfile:
47 47 if not os.path.isdir(self.path):
48 48 os.mkdir(self.path)
49 49 fp = self.opener(self.transplantfile, 'w')
50 50 for c in self.transplants:
51 51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 52 fp.write(l + ':' + r + '\n')
53 53 fp.close()
54 54 self.dirty = False
55 55
56 56 def get(self, rnode):
57 57 return [t for t in self.transplants if t.rnode == rnode]
58 58
59 59 def set(self, lnode, rnode):
60 60 self.transplants.append(transplantentry(lnode, rnode))
61 61 self.dirty = True
62 62
63 63 def remove(self, transplant):
64 64 del self.transplants[self.transplants.index(transplant)]
65 65 self.dirty = True
66 66
67 67 class transplanter:
68 68 def __init__(self, ui, repo):
69 69 self.ui = ui
70 70 self.path = repo.join('transplant')
71 71 self.opener = util.opener(self.path)
72 72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73 73
74 74 def applied(self, repo, node, parent):
75 75 '''returns True if a node is already an ancestor of parent
76 76 or has already been transplanted'''
77 77 if hasnode(repo, node):
78 78 if node in repo.changelog.reachable(parent, stop=node):
79 79 return True
80 80 for t in self.transplants.get(node):
81 81 # it might have been stripped
82 82 if not hasnode(repo, t.lnode):
83 83 self.transplants.remove(t)
84 84 return False
85 85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 86 return True
87 87 return False
88 88
89 89 def apply(self, repo, source, revmap, merges, opts={}):
90 90 '''apply the revisions in revmap one by one in revision order'''
91 revs = revmap.keys()
92 revs.sort()
93
91 revs = util.sort(revmap)
94 92 p1, p2 = repo.dirstate.parents()
95 93 pulls = []
96 94 diffopts = patch.diffopts(self.ui, opts)
97 95 diffopts.git = True
98 96
99 97 lock = wlock = None
100 98 try:
101 99 wlock = repo.wlock()
102 100 lock = repo.lock()
103 101 for rev in revs:
104 102 node = revmap[rev]
105 103 revstr = '%s:%s' % (rev, revlog.short(node))
106 104
107 105 if self.applied(repo, node, p1):
108 106 self.ui.warn(_('skipping already applied revision %s\n') %
109 107 revstr)
110 108 continue
111 109
112 110 parents = source.changelog.parents(node)
113 111 if not opts.get('filter'):
114 112 # If the changeset parent is the same as the wdir's parent,
115 113 # just pull it.
116 114 if parents[0] == p1:
117 115 pulls.append(node)
118 116 p1 = node
119 117 continue
120 118 if pulls:
121 119 if source != repo:
122 120 repo.pull(source, heads=pulls)
123 121 merge.update(repo, pulls[-1], False, False, None)
124 122 p1, p2 = repo.dirstate.parents()
125 123 pulls = []
126 124
127 125 domerge = False
128 126 if node in merges:
129 127 # pulling all the merge revs at once would mean we couldn't
130 128 # transplant after the latest even if transplants before them
131 129 # fail.
132 130 domerge = True
133 131 if not hasnode(repo, node):
134 132 repo.pull(source, heads=[node])
135 133
136 134 if parents[1] != revlog.nullid:
137 135 self.ui.note(_('skipping merge changeset %s:%s\n')
138 136 % (rev, revlog.short(node)))
139 137 patchfile = None
140 138 else:
141 139 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 140 fp = os.fdopen(fd, 'w')
143 141 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 142 fp.close()
145 143
146 144 del revmap[rev]
147 145 if patchfile or domerge:
148 146 try:
149 147 n = self.applyone(repo, node,
150 148 source.changelog.read(node),
151 149 patchfile, merge=domerge,
152 150 log=opts.get('log'),
153 151 filter=opts.get('filter'))
154 152 if n and domerge:
155 153 self.ui.status(_('%s merged at %s\n') % (revstr,
156 154 revlog.short(n)))
157 155 elif n:
158 156 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 157 revlog.short(n)))
160 158 finally:
161 159 if patchfile:
162 160 os.unlink(patchfile)
163 161 if pulls:
164 162 repo.pull(source, heads=pulls)
165 163 merge.update(repo, pulls[-1], False, False, None)
166 164 finally:
167 165 self.saveseries(revmap, merges)
168 166 self.transplants.write()
169 167 del lock, wlock
170 168
171 169 def filter(self, filter, changelog, patchfile):
172 170 '''arbitrarily rewrite changeset before applying it'''
173 171
174 172 self.ui.status('filtering %s\n' % patchfile)
175 173 user, date, msg = (changelog[1], changelog[2], changelog[4])
176 174
177 175 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
178 176 fp = os.fdopen(fd, 'w')
179 177 fp.write("# HG changeset patch\n")
180 178 fp.write("# User %s\n" % user)
181 179 fp.write("# Date %d %d\n" % date)
182 180 fp.write(changelog[4])
183 181 fp.close()
184 182
185 183 try:
186 184 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
187 185 util.shellquote(patchfile)),
188 186 environ={'HGUSER': changelog[1]},
189 187 onerr=util.Abort, errprefix=_('filter failed'))
190 188 user, date, msg = self.parselog(file(headerfile))[1:4]
191 189 finally:
192 190 os.unlink(headerfile)
193 191
194 192 return (user, date, msg)
195 193
196 194 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
197 195 filter=None):
198 196 '''apply the patch in patchfile to the repository as a transplant'''
199 197 (manifest, user, (time, timezone), files, message) = cl[:5]
200 198 date = "%d %d" % (time, timezone)
201 199 extra = {'transplant_source': node}
202 200 if filter:
203 201 (user, date, message) = self.filter(filter, cl, patchfile)
204 202
205 203 if log:
206 204 message += '\n(transplanted from %s)' % revlog.hex(node)
207 205
208 206 self.ui.status(_('applying %s\n') % revlog.short(node))
209 207 self.ui.note('%s %s\n%s\n' % (user, date, message))
210 208
211 209 if not patchfile and not merge:
212 210 raise util.Abort(_('can only omit patchfile if merging'))
213 211 if patchfile:
214 212 try:
215 213 files = {}
216 214 try:
217 215 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
218 216 files=files)
219 217 if not files:
220 218 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
221 219 return None
222 220 finally:
223 221 files = patch.updatedir(self.ui, repo, files)
224 222 except Exception, inst:
225 223 if filter:
226 224 os.unlink(patchfile)
227 225 seriespath = os.path.join(self.path, 'series')
228 226 if os.path.exists(seriespath):
229 227 os.unlink(seriespath)
230 228 p1 = repo.dirstate.parents()[0]
231 229 p2 = node
232 230 self.log(user, date, message, p1, p2, merge=merge)
233 231 self.ui.write(str(inst) + '\n')
234 232 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
235 233 else:
236 234 files = None
237 235 if merge:
238 236 p1, p2 = repo.dirstate.parents()
239 237 repo.dirstate.setparents(p1, node)
240 238
241 239 n = repo.commit(files, message, user, date, extra=extra)
242 240 if not merge:
243 241 self.transplants.set(n, node)
244 242
245 243 return n
246 244
247 245 def resume(self, repo, source, opts=None):
248 246 '''recover last transaction and apply remaining changesets'''
249 247 if os.path.exists(os.path.join(self.path, 'journal')):
250 248 n, node = self.recover(repo)
251 249 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 250 revlog.short(n)))
253 251 seriespath = os.path.join(self.path, 'series')
254 252 if not os.path.exists(seriespath):
255 253 self.transplants.write()
256 254 return
257 255 nodes, merges = self.readseries()
258 256 revmap = {}
259 257 for n in nodes:
260 258 revmap[source.changelog.rev(n)] = n
261 259 os.unlink(seriespath)
262 260
263 261 self.apply(repo, source, revmap, merges, opts)
264 262
265 263 def recover(self, repo):
266 264 '''commit working directory using journal metadata'''
267 265 node, user, date, message, parents = self.readlog()
268 266 merge = len(parents) == 2
269 267
270 268 if not user or not date or not message or not parents[0]:
271 269 raise util.Abort(_('transplant log file is corrupt'))
272 270
273 271 extra = {'transplant_source': node}
274 272 wlock = repo.wlock()
275 273 try:
276 274 p1, p2 = repo.dirstate.parents()
277 275 if p1 != parents[0]:
278 276 raise util.Abort(
279 277 _('working dir not at transplant parent %s') %
280 278 revlog.hex(parents[0]))
281 279 if merge:
282 280 repo.dirstate.setparents(p1, parents[1])
283 281 n = repo.commit(None, message, user, date, extra=extra)
284 282 if not n:
285 283 raise util.Abort(_('commit failed'))
286 284 if not merge:
287 285 self.transplants.set(n, node)
288 286 self.unlog()
289 287
290 288 return n, node
291 289 finally:
292 290 del wlock
293 291
294 292 def readseries(self):
295 293 nodes = []
296 294 merges = []
297 295 cur = nodes
298 296 for line in self.opener('series').read().splitlines():
299 297 if line.startswith('# Merges'):
300 298 cur = merges
301 299 continue
302 300 cur.append(revlog.bin(line))
303 301
304 302 return (nodes, merges)
305 303
306 304 def saveseries(self, revmap, merges):
307 305 if not revmap:
308 306 return
309 307
310 308 if not os.path.isdir(self.path):
311 309 os.mkdir(self.path)
312 310 series = self.opener('series', 'w')
313 revs = revmap.keys()
314 revs.sort()
315 for rev in revs:
311 for rev in util.sort(revmap):
316 312 series.write(revlog.hex(revmap[rev]) + '\n')
317 313 if merges:
318 314 series.write('# Merges\n')
319 315 for m in merges:
320 316 series.write(revlog.hex(m) + '\n')
321 317 series.close()
322 318
323 319 def parselog(self, fp):
324 320 parents = []
325 321 message = []
326 322 node = revlog.nullid
327 323 inmsg = False
328 324 for line in fp.read().splitlines():
329 325 if inmsg:
330 326 message.append(line)
331 327 elif line.startswith('# User '):
332 328 user = line[7:]
333 329 elif line.startswith('# Date '):
334 330 date = line[7:]
335 331 elif line.startswith('# Node ID '):
336 332 node = revlog.bin(line[10:])
337 333 elif line.startswith('# Parent '):
338 334 parents.append(revlog.bin(line[9:]))
339 335 elif not line.startswith('#'):
340 336 inmsg = True
341 337 message.append(line)
342 338 return (node, user, date, '\n'.join(message), parents)
343 339
344 340 def log(self, user, date, message, p1, p2, merge=False):
345 341 '''journal changelog metadata for later recover'''
346 342
347 343 if not os.path.isdir(self.path):
348 344 os.mkdir(self.path)
349 345 fp = self.opener('journal', 'w')
350 346 fp.write('# User %s\n' % user)
351 347 fp.write('# Date %s\n' % date)
352 348 fp.write('# Node ID %s\n' % revlog.hex(p2))
353 349 fp.write('# Parent ' + revlog.hex(p1) + '\n')
354 350 if merge:
355 351 fp.write('# Parent ' + revlog.hex(p2) + '\n')
356 352 fp.write(message.rstrip() + '\n')
357 353 fp.close()
358 354
359 355 def readlog(self):
360 356 return self.parselog(self.opener('journal'))
361 357
362 358 def unlog(self):
363 359 '''remove changelog journal'''
364 360 absdst = os.path.join(self.path, 'journal')
365 361 if os.path.exists(absdst):
366 362 os.unlink(absdst)
367 363
368 364 def transplantfilter(self, repo, source, root):
369 365 def matchfn(node):
370 366 if self.applied(repo, node, root):
371 367 return False
372 368 if source.changelog.parents(node)[1] != revlog.nullid:
373 369 return False
374 370 extra = source.changelog.read(node)[5]
375 371 cnode = extra.get('transplant_source')
376 372 if cnode and self.applied(repo, cnode, root):
377 373 return False
378 374 return True
379 375
380 376 return matchfn
381 377
382 378 def hasnode(repo, node):
383 379 try:
384 380 return repo.changelog.rev(node) != None
385 381 except revlog.RevlogError:
386 382 return False
387 383
388 384 def browserevs(ui, repo, nodes, opts):
389 385 '''interactively transplant changesets'''
390 386 def browsehelp(ui):
391 387 ui.write('y: transplant this changeset\n'
392 388 'n: skip this changeset\n'
393 389 'm: merge at this changeset\n'
394 390 'p: show patch\n'
395 391 'c: commit selected changesets\n'
396 392 'q: cancel transplant\n'
397 393 '?: show this help\n')
398 394
399 395 displayer = cmdutil.show_changeset(ui, repo, opts)
400 396 transplants = []
401 397 merges = []
402 398 for node in nodes:
403 399 displayer.show(changenode=node)
404 400 action = None
405 401 while not action:
406 402 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
407 403 if action == '?':
408 404 browsehelp(ui)
409 405 action = None
410 406 elif action == 'p':
411 407 parent = repo.changelog.parents(node)[0]
412 408 patch.diff(repo, parent, node)
413 409 action = None
414 410 elif action not in ('y', 'n', 'm', 'c', 'q'):
415 411 ui.write('no such option\n')
416 412 action = None
417 413 if action == 'y':
418 414 transplants.append(node)
419 415 elif action == 'm':
420 416 merges.append(node)
421 417 elif action == 'c':
422 418 break
423 419 elif action == 'q':
424 420 transplants = ()
425 421 merges = ()
426 422 break
427 423 return (transplants, merges)
428 424
429 425 def transplant(ui, repo, *revs, **opts):
430 426 '''transplant changesets from another branch
431 427
432 428 Selected changesets will be applied on top of the current working
433 429 directory with the log of the original changeset. If --log is
434 430 specified, log messages will have a comment appended of the form:
435 431
436 432 (transplanted from CHANGESETHASH)
437 433
438 434 You can rewrite the changelog message with the --filter option.
439 435 Its argument will be invoked with the current changelog message
440 436 as $1 and the patch as $2.
441 437
442 438 If --source is specified, selects changesets from the named
443 439 repository. If --branch is specified, selects changesets from the
444 440 branch holding the named revision, up to that revision. If --all
445 441 is specified, all changesets on the branch will be transplanted,
446 442 otherwise you will be prompted to select the changesets you want.
447 443
448 444 hg transplant --branch REVISION --all will rebase the selected branch
449 445 (up to the named revision) onto your current working directory.
450 446
451 447 You can optionally mark selected transplanted changesets as
452 448 merge changesets. You will not be prompted to transplant any
453 449 ancestors of a merged transplant, and you can merge descendants
454 450 of them normally instead of transplanting them.
455 451
456 452 If no merges or revisions are provided, hg transplant will start
457 453 an interactive changeset browser.
458 454
459 455 If a changeset application fails, you can fix the merge by hand and
460 456 then resume where you left off by calling hg transplant --continue.
461 457 '''
462 458 def getoneitem(opts, item, errmsg):
463 459 val = opts.get(item)
464 460 if val:
465 461 if len(val) > 1:
466 462 raise util.Abort(errmsg)
467 463 else:
468 464 return val[0]
469 465
470 466 def getremotechanges(repo, url):
471 467 sourcerepo = ui.expandpath(url)
472 468 source = hg.repository(ui, sourcerepo)
473 469 incoming = repo.findincoming(source, force=True)
474 470 if not incoming:
475 471 return (source, None, None)
476 472
477 473 bundle = None
478 474 if not source.local():
479 475 cg = source.changegroup(incoming, 'incoming')
480 476 bundle = changegroup.writebundle(cg, None, 'HG10UN')
481 477 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
482 478
483 479 return (source, incoming, bundle)
484 480
485 481 def incwalk(repo, incoming, branches, match=util.always):
486 482 if not branches:
487 483 branches=None
488 484 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
489 485 if match(node):
490 486 yield node
491 487
492 488 def transplantwalk(repo, root, branches, match=util.always):
493 489 if not branches:
494 490 branches = repo.heads()
495 491 ancestors = []
496 492 for branch in branches:
497 493 ancestors.append(repo.changelog.ancestor(root, branch))
498 494 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
499 495 if match(node):
500 496 yield node
501 497
502 498 def checkopts(opts, revs):
503 499 if opts.get('continue'):
504 500 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
505 501 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
506 502 return
507 503 if not (opts.get('source') or revs or
508 504 opts.get('merge') or opts.get('branch')):
509 505 raise util.Abort(_('no source URL, branch tag or revision list provided'))
510 506 if opts.get('all'):
511 507 if not opts.get('branch'):
512 508 raise util.Abort(_('--all requires a branch revision'))
513 509 if revs:
514 510 raise util.Abort(_('--all is incompatible with a revision list'))
515 511
516 512 checkopts(opts, revs)
517 513
518 514 if not opts.get('log'):
519 515 opts['log'] = ui.config('transplant', 'log')
520 516 if not opts.get('filter'):
521 517 opts['filter'] = ui.config('transplant', 'filter')
522 518
523 519 tp = transplanter(ui, repo)
524 520
525 521 p1, p2 = repo.dirstate.parents()
526 522 if p1 == revlog.nullid:
527 523 raise util.Abort(_('no revision checked out'))
528 524 if not opts.get('continue'):
529 525 if p2 != revlog.nullid:
530 526 raise util.Abort(_('outstanding uncommitted merges'))
531 527 m, a, r, d = repo.status()[:4]
532 528 if m or a or r or d:
533 529 raise util.Abort(_('outstanding local changes'))
534 530
535 531 bundle = None
536 532 source = opts.get('source')
537 533 if source:
538 534 (source, incoming, bundle) = getremotechanges(repo, source)
539 535 else:
540 536 source = repo
541 537
542 538 try:
543 539 if opts.get('continue'):
544 540 tp.resume(repo, source, opts)
545 541 return
546 542
547 543 tf=tp.transplantfilter(repo, source, p1)
548 544 if opts.get('prune'):
549 545 prune = [source.lookup(r)
550 546 for r in cmdutil.revrange(source, opts.get('prune'))]
551 547 matchfn = lambda x: tf(x) and x not in prune
552 548 else:
553 549 matchfn = tf
554 550 branches = map(source.lookup, opts.get('branch', ()))
555 551 merges = map(source.lookup, opts.get('merge', ()))
556 552 revmap = {}
557 553 if revs:
558 554 for r in cmdutil.revrange(source, revs):
559 555 revmap[int(r)] = source.lookup(r)
560 556 elif opts.get('all') or not merges:
561 557 if source != repo:
562 558 alltransplants = incwalk(source, incoming, branches, match=matchfn)
563 559 else:
564 560 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
565 561 if opts.get('all'):
566 562 revs = alltransplants
567 563 else:
568 564 revs, newmerges = browserevs(ui, source, alltransplants, opts)
569 565 merges.extend(newmerges)
570 566 for r in revs:
571 567 revmap[source.changelog.rev(r)] = r
572 568 for r in merges:
573 569 revmap[source.changelog.rev(r)] = r
574 570
575 revs = revmap.keys()
576 revs.sort()
577 pulls = []
578
579 571 tp.apply(repo, source, revmap, merges, opts)
580 572 finally:
581 573 if bundle:
582 574 source.close()
583 575 os.unlink(bundle)
584 576
585 577 cmdtable = {
586 578 "transplant":
587 579 (transplant,
588 580 [('s', 'source', '', _('pull patches from REPOSITORY')),
589 581 ('b', 'branch', [], _('pull patches from branch BRANCH')),
590 582 ('a', 'all', None, _('pull all changesets up to BRANCH')),
591 583 ('p', 'prune', [], _('skip over REV')),
592 584 ('m', 'merge', [], _('merge at REV')),
593 585 ('', 'log', None, _('append transplant info to log message')),
594 586 ('c', 'continue', None, _('continue last transplant session after repair')),
595 587 ('', 'filter', '', _('filter changesets through FILTER'))],
596 588 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
597 589 }
@@ -1,195 +1,192
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid
9 9 from revlog import revlog
10 10 import util
11 11
12 12 def _string_escape(text):
13 13 """
14 14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 16 >>> s
17 17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 18 >>> res = _string_escape(s)
19 19 >>> s == res.decode('string_escape')
20 20 True
21 21 """
22 22 # subset of the string_escape codec
23 23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 24 return text.replace('\0', '\\0')
25 25
26 26 class appender:
27 27 '''the changelog index must be update last on disk, so we use this class
28 28 to delay writes to it'''
29 29 def __init__(self, fp, buf):
30 30 self.data = buf
31 31 self.fp = fp
32 32 self.offset = fp.tell()
33 33 self.size = util.fstat(fp).st_size
34 34
35 35 def end(self):
36 36 return self.size + len("".join(self.data))
37 37 def tell(self):
38 38 return self.offset
39 39 def flush(self):
40 40 pass
41 41 def close(self):
42 42 self.fp.close()
43 43
44 44 def seek(self, offset, whence=0):
45 45 '''virtual file offset spans real file and data'''
46 46 if whence == 0:
47 47 self.offset = offset
48 48 elif whence == 1:
49 49 self.offset += offset
50 50 elif whence == 2:
51 51 self.offset = self.end() + offset
52 52 if self.offset < self.size:
53 53 self.fp.seek(self.offset)
54 54
55 55 def read(self, count=-1):
56 56 '''only trick here is reads that span real file and data'''
57 57 ret = ""
58 58 if self.offset < self.size:
59 59 s = self.fp.read(count)
60 60 ret = s
61 61 self.offset += len(s)
62 62 if count > 0:
63 63 count -= len(s)
64 64 if count != 0:
65 65 doff = self.offset - self.size
66 66 self.data.insert(0, "".join(self.data))
67 67 del self.data[1:]
68 68 s = self.data[0][doff:doff+count]
69 69 self.offset += len(s)
70 70 ret += s
71 71 return ret
72 72
73 73 def write(self, s):
74 74 self.data.append(str(s))
75 75 self.offset += len(s)
76 76
77 77 class changelog(revlog):
78 78 def __init__(self, opener):
79 79 revlog.__init__(self, opener, "00changelog.i")
80 80
81 81 def delayupdate(self):
82 82 "delay visibility of index updates to other readers"
83 83 self._realopener = self.opener
84 84 self.opener = self._delayopener
85 85 self._delaycount = len(self)
86 86 self._delaybuf = []
87 87 self._delayname = None
88 88
89 89 def finalize(self, tr):
90 90 "finalize index updates"
91 91 self.opener = self._realopener
92 92 # move redirected index data back into place
93 93 if self._delayname:
94 94 util.rename(self._delayname + ".a", self._delayname)
95 95 elif self._delaybuf:
96 96 fp = self.opener(self.indexfile, 'a')
97 97 fp.write("".join(self._delaybuf))
98 98 fp.close()
99 99 del self._delaybuf
100 100 # split when we're done
101 101 self.checkinlinesize(tr)
102 102
103 103 def _delayopener(self, name, mode='r'):
104 104 fp = self._realopener(name, mode)
105 105 # only divert the index
106 106 if not name == self.indexfile:
107 107 return fp
108 108 # if we're doing an initial clone, divert to another file
109 109 if self._delaycount == 0:
110 110 self._delayname = fp.name
111 111 if not len(self):
112 112 # make sure to truncate the file
113 113 mode = mode.replace('a', 'w')
114 114 return self._realopener(name + ".a", mode)
115 115 # otherwise, divert to memory
116 116 return appender(fp, self._delaybuf)
117 117
118 118 def checkinlinesize(self, tr, fp=None):
119 119 if self.opener == self._delayopener:
120 120 return
121 121 return revlog.checkinlinesize(self, tr, fp)
122 122
123 123 def decode_extra(self, text):
124 124 extra = {}
125 125 for l in text.split('\0'):
126 126 if l:
127 127 k, v = l.decode('string_escape').split(':', 1)
128 128 extra[k] = v
129 129 return extra
130 130
131 131 def encode_extra(self, d):
132 132 # keys must be sorted to produce a deterministic changelog entry
133 keys = d.keys()
134 keys.sort()
135 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
133 items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)]
136 134 return "\0".join(items)
137 135
138 136 def read(self, node):
139 137 """
140 138 format used:
141 139 nodeid\n : manifest node in ascii
142 140 user\n : user, no \n or \r allowed
143 141 time tz extra\n : date (time is int or float, timezone is int)
144 142 : extra is metadatas, encoded and separated by '\0'
145 143 : older versions ignore it
146 144 files\n\n : files modified by the cset, no \n or \r allowed
147 145 (.*) : comment (free text, ideally utf-8)
148 146
149 147 changelog v0 doesn't use extra
150 148 """
151 149 text = self.revision(node)
152 150 if not text:
153 151 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
154 152 last = text.index("\n\n")
155 153 desc = util.tolocal(text[last + 2:])
156 154 l = text[:last].split('\n')
157 155 manifest = bin(l[0])
158 156 user = util.tolocal(l[1])
159 157
160 158 extra_data = l[2].split(' ', 2)
161 159 if len(extra_data) != 3:
162 160 time = float(extra_data.pop(0))
163 161 try:
164 162 # various tools did silly things with the time zone field.
165 163 timezone = int(extra_data[0])
166 164 except:
167 165 timezone = 0
168 166 extra = {}
169 167 else:
170 168 time, timezone, extra = extra_data
171 169 time, timezone = float(time), int(timezone)
172 170 extra = self.decode_extra(extra)
173 171 if not extra.get('branch'):
174 172 extra['branch'] = 'default'
175 173 files = l[3:]
176 174 return (manifest, user, (time, timezone), files, desc, extra)
177 175
178 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
176 def add(self, manifest, files, desc, transaction, p1=None, p2=None,
179 177 user=None, date=None, extra={}):
180 178
181 179 user, desc = util.fromlocal(user), util.fromlocal(desc)
182 180
183 181 if date:
184 182 parseddate = "%d %d" % util.parsedate(date)
185 183 else:
186 184 parseddate = "%d %d" % util.makedate()
187 185 if extra and extra.get("branch") in ("default", ""):
188 186 del extra["branch"]
189 187 if extra:
190 188 extra = self.encode_extra(extra)
191 189 parseddate = "%s %s" % (parseddate, extra)
192 list.sort()
193 l = [hex(manifest), user, parseddate] + list + ["", desc]
190 l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc]
194 191 text = "\n".join(l)
195 192 return self.addrevision(text, transaction, len(self), p1, p2)
@@ -1,1192 +1,1183
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20
21 21 def findpossible(ui, cmd, table):
22 22 """
23 23 Return cmd -> (aliases, command table entry)
24 24 for each matching command.
25 25 Return debug commands (or their aliases) only if no normal command matches.
26 26 """
27 27 choice = {}
28 28 debugchoice = {}
29 29 for e in table.keys():
30 30 aliases = e.lstrip("^").split("|")
31 31 found = None
32 32 if cmd in aliases:
33 33 found = cmd
34 34 elif not ui.config("ui", "strict"):
35 35 for a in aliases:
36 36 if a.startswith(cmd):
37 37 found = a
38 38 break
39 39 if found is not None:
40 40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 41 debugchoice[found] = (aliases, table[e])
42 42 else:
43 43 choice[found] = (aliases, table[e])
44 44
45 45 if not choice and debugchoice:
46 46 choice = debugchoice
47 47
48 48 return choice
49 49
50 50 def findcmd(ui, cmd, table):
51 51 """Return (aliases, command table entry) for command string."""
52 52 choice = findpossible(ui, cmd, table)
53 53
54 54 if cmd in choice:
55 55 return choice[cmd]
56 56
57 57 if len(choice) > 1:
58 58 clist = choice.keys()
59 59 clist.sort()
60 60 raise AmbiguousCommand(cmd, clist)
61 61
62 62 if choice:
63 63 return choice.values()[0]
64 64
65 65 raise UnknownCommand(cmd)
66 66
67 67 def bail_if_changed(repo):
68 68 if repo.dirstate.parents()[1] != nullid:
69 69 raise util.Abort(_('outstanding uncommitted merge'))
70 70 modified, added, removed, deleted = repo.status()[:4]
71 71 if modified or added or removed or deleted:
72 72 raise util.Abort(_("outstanding uncommitted changes"))
73 73
74 74 def logmessage(opts):
75 75 """ get the log message according to -m and -l option """
76 76 message = opts['message']
77 77 logfile = opts['logfile']
78 78
79 79 if message and logfile:
80 80 raise util.Abort(_('options --message and --logfile are mutually '
81 81 'exclusive'))
82 82 if not message and logfile:
83 83 try:
84 84 if logfile == '-':
85 85 message = sys.stdin.read()
86 86 else:
87 87 message = open(logfile).read()
88 88 except IOError, inst:
89 89 raise util.Abort(_("can't read commit message '%s': %s") %
90 90 (logfile, inst.strerror))
91 91 return message
92 92
93 93 def loglimit(opts):
94 94 """get the log limit according to option -l/--limit"""
95 95 limit = opts.get('limit')
96 96 if limit:
97 97 try:
98 98 limit = int(limit)
99 99 except ValueError:
100 100 raise util.Abort(_('limit must be a positive integer'))
101 101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 102 else:
103 103 limit = sys.maxint
104 104 return limit
105 105
106 106 def setremoteconfig(ui, opts):
107 107 "copy remote options to ui tree"
108 108 if opts.get('ssh'):
109 109 ui.setconfig("ui", "ssh", opts['ssh'])
110 110 if opts.get('remotecmd'):
111 111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112 112
113 113 def revpair(repo, revs):
114 114 '''return pair of nodes, given list of revisions. second item can
115 115 be None, meaning use working dir.'''
116 116
117 117 def revfix(repo, val, defval):
118 118 if not val and val != 0 and defval is not None:
119 119 val = defval
120 120 return repo.lookup(val)
121 121
122 122 if not revs:
123 123 return repo.dirstate.parents()[0], None
124 124 end = None
125 125 if len(revs) == 1:
126 126 if revrangesep in revs[0]:
127 127 start, end = revs[0].split(revrangesep, 1)
128 128 start = revfix(repo, start, 0)
129 129 end = revfix(repo, end, len(repo) - 1)
130 130 else:
131 131 start = revfix(repo, revs[0], None)
132 132 elif len(revs) == 2:
133 133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 134 raise util.Abort(_('too many revisions specified'))
135 135 start = revfix(repo, revs[0], None)
136 136 end = revfix(repo, revs[1], None)
137 137 else:
138 138 raise util.Abort(_('too many revisions specified'))
139 139 return start, end
140 140
141 141 def revrange(repo, revs):
142 142 """Yield revision as strings from a list of revision specifications."""
143 143
144 144 def revfix(repo, val, defval):
145 145 if not val and val != 0 and defval is not None:
146 146 return defval
147 147 return repo.changelog.rev(repo.lookup(val))
148 148
149 149 seen, l = {}, []
150 150 for spec in revs:
151 151 if revrangesep in spec:
152 152 start, end = spec.split(revrangesep, 1)
153 153 start = revfix(repo, start, 0)
154 154 end = revfix(repo, end, len(repo) - 1)
155 155 step = start > end and -1 or 1
156 156 for rev in xrange(start, end+step, step):
157 157 if rev in seen:
158 158 continue
159 159 seen[rev] = 1
160 160 l.append(rev)
161 161 else:
162 162 rev = revfix(repo, spec, None)
163 163 if rev in seen:
164 164 continue
165 165 seen[rev] = 1
166 166 l.append(rev)
167 167
168 168 return l
169 169
170 170 def make_filename(repo, pat, node,
171 171 total=None, seqno=None, revwidth=None, pathname=None):
172 172 node_expander = {
173 173 'H': lambda: hex(node),
174 174 'R': lambda: str(repo.changelog.rev(node)),
175 175 'h': lambda: short(node),
176 176 }
177 177 expander = {
178 178 '%': lambda: '%',
179 179 'b': lambda: os.path.basename(repo.root),
180 180 }
181 181
182 182 try:
183 183 if node:
184 184 expander.update(node_expander)
185 185 if node:
186 186 expander['r'] = (lambda:
187 187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 188 if total is not None:
189 189 expander['N'] = lambda: str(total)
190 190 if seqno is not None:
191 191 expander['n'] = lambda: str(seqno)
192 192 if total is not None and seqno is not None:
193 193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 194 if pathname is not None:
195 195 expander['s'] = lambda: os.path.basename(pathname)
196 196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 197 expander['p'] = lambda: pathname
198 198
199 199 newname = []
200 200 patlen = len(pat)
201 201 i = 0
202 202 while i < patlen:
203 203 c = pat[i]
204 204 if c == '%':
205 205 i += 1
206 206 c = pat[i]
207 207 c = expander[c]()
208 208 newname.append(c)
209 209 i += 1
210 210 return ''.join(newname)
211 211 except KeyError, inst:
212 212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 213 inst.args[0])
214 214
215 215 def make_file(repo, pat, node=None,
216 216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217 217 if not pat or pat == '-':
218 218 return 'w' in mode and sys.stdout or sys.stdin
219 219 if hasattr(pat, 'write') and 'w' in mode:
220 220 return pat
221 221 if hasattr(pat, 'read') and 'r' in mode:
222 222 return pat
223 223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 224 pathname),
225 225 mode)
226 226
227 227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 228 if not globbed and default == 'relpath':
229 229 pats = util.expand_glob(pats or [])
230 230 m = _match.match(repo.root, repo.getcwd(), pats,
231 231 opts.get('include'), opts.get('exclude'), default)
232 232 def badfn(f, msg):
233 233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 234 return False
235 235 m.bad = badfn
236 236 return m
237 237
238 238 def matchall(repo):
239 239 return _match.always(repo.root, repo.getcwd())
240 240
241 241 def matchfiles(repo, files):
242 242 return _match.exact(repo.root, repo.getcwd(), files)
243 243
244 244 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 245 '''find renamed files -- yields (before, after, score) tuples'''
246 246 if added is None or removed is None:
247 247 added, removed = repo.status()[1:3]
248 248 ctx = repo['.']
249 249 for a in added:
250 250 aa = repo.wread(a)
251 251 bestname, bestscore = None, threshold
252 252 for r in removed:
253 253 rr = ctx.filectx(r).data()
254 254
255 255 # bdiff.blocks() returns blocks of matching lines
256 256 # count the number of bytes in each
257 257 equal = 0
258 258 alines = mdiff.splitnewlines(aa)
259 259 matches = bdiff.blocks(aa, rr)
260 260 for x1,x2,y1,y2 in matches:
261 261 for line in alines[x1:x2]:
262 262 equal += len(line)
263 263
264 264 lengths = len(aa) + len(rr)
265 265 if lengths:
266 266 myscore = equal*2.0 / lengths
267 267 if myscore >= bestscore:
268 268 bestname, bestscore = r, myscore
269 269 if bestname:
270 270 yield bestname, a, bestscore
271 271
272 272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 273 if dry_run is None:
274 274 dry_run = opts.get('dry_run')
275 275 if similarity is None:
276 276 similarity = float(opts.get('similarity') or 0)
277 277 add, remove = [], []
278 278 mapping = {}
279 279 audit_path = util.path_auditor(repo.root)
280 280 m = match(repo, pats, opts)
281 281 for abs in repo.walk(m):
282 282 target = repo.wjoin(abs)
283 283 good = True
284 284 try:
285 285 audit_path(abs)
286 286 except:
287 287 good = False
288 288 rel = m.rel(abs)
289 289 exact = m.exact(abs)
290 290 if good and abs not in repo.dirstate:
291 291 add.append(abs)
292 292 mapping[abs] = rel, m.exact(abs)
293 293 if repo.ui.verbose or not exact:
294 294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 296 or (os.path.isdir(target) and not os.path.islink(target))):
297 297 remove.append(abs)
298 298 mapping[abs] = rel, exact
299 299 if repo.ui.verbose or not exact:
300 300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 301 if not dry_run:
302 302 repo.remove(remove)
303 303 repo.add(add)
304 304 if similarity > 0:
305 305 for old, new, score in findrenames(repo, add, remove, similarity):
306 306 oldrel, oldexact = mapping[old]
307 307 newrel, newexact = mapping[new]
308 308 if repo.ui.verbose or not oldexact or not newexact:
309 309 repo.ui.status(_('recording removal of %s as rename to %s '
310 310 '(%d%% similar)\n') %
311 311 (oldrel, newrel, score * 100))
312 312 if not dry_run:
313 313 repo.copy(old, new)
314 314
315 315 def copy(ui, repo, pats, opts, rename=False):
316 316 # called with the repo lock held
317 317 #
318 318 # hgsep => pathname that uses "/" to separate directories
319 319 # ossep => pathname that uses os.sep to separate directories
320 320 cwd = repo.getcwd()
321 321 targets = {}
322 322 after = opts.get("after")
323 323 dryrun = opts.get("dry_run")
324 324
325 325 def walkpat(pat):
326 326 srcs = []
327 327 m = match(repo, [pat], opts, globbed=True)
328 328 for abs in repo.walk(m):
329 329 state = repo.dirstate[abs]
330 330 rel = m.rel(abs)
331 331 exact = m.exact(abs)
332 332 if state in '?r':
333 333 if exact and state == '?':
334 334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 335 if exact and state == 'r':
336 336 ui.warn(_('%s: not copying - file has been marked for'
337 337 ' remove\n') % rel)
338 338 continue
339 339 # abs: hgsep
340 340 # rel: ossep
341 341 srcs.append((abs, rel, exact))
342 342 return srcs
343 343
344 344 # abssrc: hgsep
345 345 # relsrc: ossep
346 346 # otarget: ossep
347 347 def copyfile(abssrc, relsrc, otarget, exact):
348 348 abstarget = util.canonpath(repo.root, cwd, otarget)
349 349 reltarget = repo.pathto(abstarget, cwd)
350 350 target = repo.wjoin(abstarget)
351 351 src = repo.wjoin(abssrc)
352 352 state = repo.dirstate[abstarget]
353 353
354 354 # check for collisions
355 355 prevsrc = targets.get(abstarget)
356 356 if prevsrc is not None:
357 357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 358 (reltarget, repo.pathto(abssrc, cwd),
359 359 repo.pathto(prevsrc, cwd)))
360 360 return
361 361
362 362 # check for overwrites
363 363 exists = os.path.exists(target)
364 364 if (not after and exists or after and state in 'mn'):
365 365 if not opts['force']:
366 366 ui.warn(_('%s: not overwriting - file exists\n') %
367 367 reltarget)
368 368 return
369 369
370 370 if after:
371 371 if not exists:
372 372 return
373 373 elif not dryrun:
374 374 try:
375 375 if exists:
376 376 os.unlink(target)
377 377 targetdir = os.path.dirname(target) or '.'
378 378 if not os.path.isdir(targetdir):
379 379 os.makedirs(targetdir)
380 380 util.copyfile(src, target)
381 381 except IOError, inst:
382 382 if inst.errno == errno.ENOENT:
383 383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 384 else:
385 385 ui.warn(_('%s: cannot copy - %s\n') %
386 386 (relsrc, inst.strerror))
387 387 return True # report a failure
388 388
389 389 if ui.verbose or not exact:
390 390 action = rename and "moving" or "copying"
391 391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392 392
393 393 targets[abstarget] = abssrc
394 394
395 395 # fix up dirstate
396 396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 397 if abstarget == origsrc: # copying back a copy?
398 398 if state not in 'mn' and not dryrun:
399 399 repo.dirstate.normallookup(abstarget)
400 400 else:
401 401 if repo.dirstate[origsrc] == 'a':
402 402 if not ui.quiet:
403 403 ui.warn(_("%s has not been committed yet, so no copy "
404 404 "data will be stored for %s.\n")
405 405 % (repo.pathto(origsrc, cwd), reltarget))
406 406 if abstarget not in repo.dirstate and not dryrun:
407 407 repo.add([abstarget])
408 408 elif not dryrun:
409 409 repo.copy(origsrc, abstarget)
410 410
411 411 if rename and not dryrun:
412 412 repo.remove([abssrc], not after)
413 413
414 414 # pat: ossep
415 415 # dest ossep
416 416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 417 # return: function that takes hgsep and returns ossep
418 418 def targetpathfn(pat, dest, srcs):
419 419 if os.path.isdir(pat):
420 420 abspfx = util.canonpath(repo.root, cwd, pat)
421 421 abspfx = util.localpath(abspfx)
422 422 if destdirexists:
423 423 striplen = len(os.path.split(abspfx)[0])
424 424 else:
425 425 striplen = len(abspfx)
426 426 if striplen:
427 427 striplen += len(os.sep)
428 428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 429 elif destdirexists:
430 430 res = lambda p: os.path.join(dest,
431 431 os.path.basename(util.localpath(p)))
432 432 else:
433 433 res = lambda p: dest
434 434 return res
435 435
436 436 # pat: ossep
437 437 # dest ossep
438 438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 439 # return: function that takes hgsep and returns ossep
440 440 def targetpathafterfn(pat, dest, srcs):
441 441 if util.patkind(pat, None)[0]:
442 442 # a mercurial pattern
443 443 res = lambda p: os.path.join(dest,
444 444 os.path.basename(util.localpath(p)))
445 445 else:
446 446 abspfx = util.canonpath(repo.root, cwd, pat)
447 447 if len(abspfx) < len(srcs[0][0]):
448 448 # A directory. Either the target path contains the last
449 449 # component of the source path or it does not.
450 450 def evalpath(striplen):
451 451 score = 0
452 452 for s in srcs:
453 453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 454 if os.path.exists(t):
455 455 score += 1
456 456 return score
457 457
458 458 abspfx = util.localpath(abspfx)
459 459 striplen = len(abspfx)
460 460 if striplen:
461 461 striplen += len(os.sep)
462 462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 463 score = evalpath(striplen)
464 464 striplen1 = len(os.path.split(abspfx)[0])
465 465 if striplen1:
466 466 striplen1 += len(os.sep)
467 467 if evalpath(striplen1) > score:
468 468 striplen = striplen1
469 469 res = lambda p: os.path.join(dest,
470 470 util.localpath(p)[striplen:])
471 471 else:
472 472 # a file
473 473 if destdirexists:
474 474 res = lambda p: os.path.join(dest,
475 475 os.path.basename(util.localpath(p)))
476 476 else:
477 477 res = lambda p: dest
478 478 return res
479 479
480 480
481 481 pats = util.expand_glob(pats)
482 482 if not pats:
483 483 raise util.Abort(_('no source or destination specified'))
484 484 if len(pats) == 1:
485 485 raise util.Abort(_('no destination specified'))
486 486 dest = pats.pop()
487 487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 488 if not destdirexists:
489 489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 490 raise util.Abort(_('with multiple sources, destination must be an '
491 491 'existing directory'))
492 492 if util.endswithsep(dest):
493 493 raise util.Abort(_('destination %s is not a directory') % dest)
494 494
495 495 tfn = targetpathfn
496 496 if after:
497 497 tfn = targetpathafterfn
498 498 copylist = []
499 499 for pat in pats:
500 500 srcs = walkpat(pat)
501 501 if not srcs:
502 502 continue
503 503 copylist.append((tfn(pat, dest, srcs), srcs))
504 504 if not copylist:
505 505 raise util.Abort(_('no files to copy'))
506 506
507 507 errors = 0
508 508 for targetpath, srcs in copylist:
509 509 for abssrc, relsrc, exact in srcs:
510 510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 511 errors += 1
512 512
513 513 if errors:
514 514 ui.warn(_('(consider using --after)\n'))
515 515
516 516 return errors
517 517
518 518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 519 '''Run a command as a service.'''
520 520
521 521 if opts['daemon'] and not opts['daemon_pipefds']:
522 522 rfd, wfd = os.pipe()
523 523 args = sys.argv[:]
524 524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 525 # Don't pass --cwd to the child process, because we've already
526 526 # changed directory.
527 527 for i in xrange(1,len(args)):
528 528 if args[i].startswith('--cwd='):
529 529 del args[i]
530 530 break
531 531 elif args[i].startswith('--cwd'):
532 532 del args[i:i+2]
533 533 break
534 534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 535 args[0], args)
536 536 os.close(wfd)
537 537 os.read(rfd, 1)
538 538 if parentfn:
539 539 return parentfn(pid)
540 540 else:
541 541 os._exit(0)
542 542
543 543 if initfn:
544 544 initfn()
545 545
546 546 if opts['pid_file']:
547 547 fp = open(opts['pid_file'], 'w')
548 548 fp.write(str(os.getpid()) + '\n')
549 549 fp.close()
550 550
551 551 if opts['daemon_pipefds']:
552 552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 553 os.close(rfd)
554 554 try:
555 555 os.setsid()
556 556 except AttributeError:
557 557 pass
558 558 os.write(wfd, 'y')
559 559 os.close(wfd)
560 560 sys.stdout.flush()
561 561 sys.stderr.flush()
562 562 fd = os.open(util.nulldev, os.O_RDWR)
563 563 if fd != 0: os.dup2(fd, 0)
564 564 if fd != 1: os.dup2(fd, 1)
565 565 if fd != 2: os.dup2(fd, 2)
566 566 if fd not in (0, 1, 2): os.close(fd)
567 567
568 568 if runfn:
569 569 return runfn()
570 570
571 571 class changeset_printer(object):
572 572 '''show changeset information when templating not requested.'''
573 573
574 574 def __init__(self, ui, repo, patch, buffered):
575 575 self.ui = ui
576 576 self.repo = repo
577 577 self.buffered = buffered
578 578 self.patch = patch
579 579 self.header = {}
580 580 self.hunk = {}
581 581 self.lastheader = None
582 582
583 583 def flush(self, rev):
584 584 if rev in self.header:
585 585 h = self.header[rev]
586 586 if h != self.lastheader:
587 587 self.lastheader = h
588 588 self.ui.write(h)
589 589 del self.header[rev]
590 590 if rev in self.hunk:
591 591 self.ui.write(self.hunk[rev])
592 592 del self.hunk[rev]
593 593 return 1
594 594 return 0
595 595
596 596 def show(self, rev=0, changenode=None, copies=(), **props):
597 597 if self.buffered:
598 598 self.ui.pushbuffer()
599 599 self._show(rev, changenode, copies, props)
600 600 self.hunk[rev] = self.ui.popbuffer()
601 601 else:
602 602 self._show(rev, changenode, copies, props)
603 603
604 604 def _show(self, rev, changenode, copies, props):
605 605 '''show a single changeset or file revision'''
606 606 log = self.repo.changelog
607 607 if changenode is None:
608 608 changenode = log.node(rev)
609 609 elif not rev:
610 610 rev = log.rev(changenode)
611 611
612 612 if self.ui.quiet:
613 613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 614 return
615 615
616 616 changes = log.read(changenode)
617 617 date = util.datestr(changes[2])
618 618 extra = changes[5]
619 619 branch = extra.get("branch")
620 620
621 621 hexfunc = self.ui.debugflag and hex or short
622 622
623 623 parents = [(p, hexfunc(log.node(p)))
624 624 for p in self._meaningful_parentrevs(log, rev)]
625 625
626 626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627 627
628 628 # don't show the default branch name
629 629 if branch != 'default':
630 630 branch = util.tolocal(branch)
631 631 self.ui.write(_("branch: %s\n") % branch)
632 632 for tag in self.repo.nodetags(changenode):
633 633 self.ui.write(_("tag: %s\n") % tag)
634 634 for parent in parents:
635 635 self.ui.write(_("parent: %d:%s\n") % parent)
636 636
637 637 if self.ui.debugflag:
638 638 self.ui.write(_("manifest: %d:%s\n") %
639 639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 640 self.ui.write(_("user: %s\n") % changes[1])
641 641 self.ui.write(_("date: %s\n") % date)
642 642
643 643 if self.ui.debugflag:
644 644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 646 files):
647 647 if value:
648 648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 649 elif changes[3] and self.ui.verbose:
650 650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 651 if copies and self.ui.verbose:
652 652 copies = ['%s (%s)' % c for c in copies]
653 653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654 654
655 655 if extra and self.ui.debugflag:
656 extraitems = extra.items()
657 extraitems.sort()
658 for key, value in extraitems:
656 for key, value in util.sort(extra.items()):
659 657 self.ui.write(_("extra: %s=%s\n")
660 658 % (key, value.encode('string_escape')))
661 659
662 660 description = changes[4].strip()
663 661 if description:
664 662 if self.ui.verbose:
665 663 self.ui.write(_("description:\n"))
666 664 self.ui.write(description)
667 665 self.ui.write("\n\n")
668 666 else:
669 667 self.ui.write(_("summary: %s\n") %
670 668 description.splitlines()[0])
671 669 self.ui.write("\n")
672 670
673 671 self.showpatch(changenode)
674 672
675 673 def showpatch(self, node):
676 674 if self.patch:
677 675 prev = self.repo.changelog.parents(node)[0]
678 676 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
679 677 opts=patch.diffopts(self.ui))
680 678 self.ui.write("\n")
681 679
682 680 def _meaningful_parentrevs(self, log, rev):
683 681 """Return list of meaningful (or all if debug) parentrevs for rev.
684 682
685 683 For merges (two non-nullrev revisions) both parents are meaningful.
686 684 Otherwise the first parent revision is considered meaningful if it
687 685 is not the preceding revision.
688 686 """
689 687 parents = log.parentrevs(rev)
690 688 if not self.ui.debugflag and parents[1] == nullrev:
691 689 if parents[0] >= rev - 1:
692 690 parents = []
693 691 else:
694 692 parents = [parents[0]]
695 693 return parents
696 694
697 695
698 696 class changeset_templater(changeset_printer):
699 697 '''format changeset information.'''
700 698
701 699 def __init__(self, ui, repo, patch, mapfile, buffered):
702 700 changeset_printer.__init__(self, ui, repo, patch, buffered)
703 701 filters = templatefilters.filters.copy()
704 702 filters['formatnode'] = (ui.debugflag and (lambda x: x)
705 703 or (lambda x: x[:12]))
706 704 self.t = templater.templater(mapfile, filters,
707 705 cache={
708 706 'parent': '{rev}:{node|formatnode} ',
709 707 'manifest': '{rev}:{node|formatnode}',
710 708 'filecopy': '{name} ({source})'})
711 709
712 710 def use_template(self, t):
713 711 '''set template string to use'''
714 712 self.t.cache['changeset'] = t
715 713
716 714 def _show(self, rev, changenode, copies, props):
717 715 '''show a single changeset or file revision'''
718 716 log = self.repo.changelog
719 717 if changenode is None:
720 718 changenode = log.node(rev)
721 719 elif not rev:
722 720 rev = log.rev(changenode)
723 721
724 722 changes = log.read(changenode)
725 723
726 724 def showlist(name, values, plural=None, **args):
727 725 '''expand set of values.
728 726 name is name of key in template map.
729 727 values is list of strings or dicts.
730 728 plural is plural of name, if not simply name + 's'.
731 729
732 730 expansion works like this, given name 'foo'.
733 731
734 732 if values is empty, expand 'no_foos'.
735 733
736 734 if 'foo' not in template map, return values as a string,
737 735 joined by space.
738 736
739 737 expand 'start_foos'.
740 738
741 739 for each value, expand 'foo'. if 'last_foo' in template
742 740 map, expand it instead of 'foo' for last key.
743 741
744 742 expand 'end_foos'.
745 743 '''
746 744 if plural: names = plural
747 745 else: names = name + 's'
748 746 if not values:
749 747 noname = 'no_' + names
750 748 if noname in self.t:
751 749 yield self.t(noname, **args)
752 750 return
753 751 if name not in self.t:
754 752 if isinstance(values[0], str):
755 753 yield ' '.join(values)
756 754 else:
757 755 for v in values:
758 756 yield dict(v, **args)
759 757 return
760 758 startname = 'start_' + names
761 759 if startname in self.t:
762 760 yield self.t(startname, **args)
763 761 vargs = args.copy()
764 762 def one(v, tag=name):
765 763 try:
766 764 vargs.update(v)
767 765 except (AttributeError, ValueError):
768 766 try:
769 767 for a, b in v:
770 768 vargs[a] = b
771 769 except ValueError:
772 770 vargs[name] = v
773 771 return self.t(tag, **vargs)
774 772 lastname = 'last_' + name
775 773 if lastname in self.t:
776 774 last = values.pop()
777 775 else:
778 776 last = None
779 777 for v in values:
780 778 yield one(v)
781 779 if last is not None:
782 780 yield one(last, tag=lastname)
783 781 endname = 'end_' + names
784 782 if endname in self.t:
785 783 yield self.t(endname, **args)
786 784
787 785 def showbranches(**args):
788 786 branch = changes[5].get("branch")
789 787 if branch != 'default':
790 788 branch = util.tolocal(branch)
791 789 return showlist('branch', [branch], plural='branches', **args)
792 790
793 791 def showparents(**args):
794 792 parents = [[('rev', p), ('node', hex(log.node(p)))]
795 793 for p in self._meaningful_parentrevs(log, rev)]
796 794 return showlist('parent', parents, **args)
797 795
798 796 def showtags(**args):
799 797 return showlist('tag', self.repo.nodetags(changenode), **args)
800 798
801 799 def showextras(**args):
802 extras = changes[5].items()
803 extras.sort()
804 for key, value in extras:
800 for key, value in util.sort(changes[5].items()):
805 801 args = args.copy()
806 802 args.update(dict(key=key, value=value))
807 803 yield self.t('extra', **args)
808 804
809 805 def showcopies(**args):
810 806 c = [{'name': x[0], 'source': x[1]} for x in copies]
811 807 return showlist('file_copy', c, plural='file_copies', **args)
812 808
813 809 files = []
814 810 def getfiles():
815 811 if not files:
816 812 files[:] = self.repo.status(
817 813 log.parents(changenode)[0], changenode)[:3]
818 814 return files
819 815 def showfiles(**args):
820 816 return showlist('file', changes[3], **args)
821 817 def showmods(**args):
822 818 return showlist('file_mod', getfiles()[0], **args)
823 819 def showadds(**args):
824 820 return showlist('file_add', getfiles()[1], **args)
825 821 def showdels(**args):
826 822 return showlist('file_del', getfiles()[2], **args)
827 823 def showmanifest(**args):
828 824 args = args.copy()
829 825 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
830 826 node=hex(changes[0])))
831 827 return self.t('manifest', **args)
832 828
833 829 defprops = {
834 830 'author': changes[1],
835 831 'branches': showbranches,
836 832 'date': changes[2],
837 833 'desc': changes[4].strip(),
838 834 'file_adds': showadds,
839 835 'file_dels': showdels,
840 836 'file_mods': showmods,
841 837 'files': showfiles,
842 838 'file_copies': showcopies,
843 839 'manifest': showmanifest,
844 840 'node': hex(changenode),
845 841 'parents': showparents,
846 842 'rev': rev,
847 843 'tags': showtags,
848 844 'extras': showextras,
849 845 }
850 846 props = props.copy()
851 847 props.update(defprops)
852 848
853 849 try:
854 850 if self.ui.debugflag and 'header_debug' in self.t:
855 851 key = 'header_debug'
856 852 elif self.ui.quiet and 'header_quiet' in self.t:
857 853 key = 'header_quiet'
858 854 elif self.ui.verbose and 'header_verbose' in self.t:
859 855 key = 'header_verbose'
860 856 elif 'header' in self.t:
861 857 key = 'header'
862 858 else:
863 859 key = ''
864 860 if key:
865 861 h = templater.stringify(self.t(key, **props))
866 862 if self.buffered:
867 863 self.header[rev] = h
868 864 else:
869 865 self.ui.write(h)
870 866 if self.ui.debugflag and 'changeset_debug' in self.t:
871 867 key = 'changeset_debug'
872 868 elif self.ui.quiet and 'changeset_quiet' in self.t:
873 869 key = 'changeset_quiet'
874 870 elif self.ui.verbose and 'changeset_verbose' in self.t:
875 871 key = 'changeset_verbose'
876 872 else:
877 873 key = 'changeset'
878 874 self.ui.write(templater.stringify(self.t(key, **props)))
879 875 self.showpatch(changenode)
880 876 except KeyError, inst:
881 877 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
882 878 inst.args[0]))
883 879 except SyntaxError, inst:
884 880 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
885 881
886 882 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
887 883 """show one changeset using template or regular display.
888 884
889 885 Display format will be the first non-empty hit of:
890 886 1. option 'template'
891 887 2. option 'style'
892 888 3. [ui] setting 'logtemplate'
893 889 4. [ui] setting 'style'
894 890 If all of these values are either the unset or the empty string,
895 891 regular display via changeset_printer() is done.
896 892 """
897 893 # options
898 894 patch = False
899 895 if opts.get('patch'):
900 896 patch = matchfn or matchall(repo)
901 897
902 898 tmpl = opts.get('template')
903 899 mapfile = None
904 900 if tmpl:
905 901 tmpl = templater.parsestring(tmpl, quoted=False)
906 902 else:
907 903 mapfile = opts.get('style')
908 904 # ui settings
909 905 if not mapfile:
910 906 tmpl = ui.config('ui', 'logtemplate')
911 907 if tmpl:
912 908 tmpl = templater.parsestring(tmpl)
913 909 else:
914 910 mapfile = ui.config('ui', 'style')
915 911
916 912 if tmpl or mapfile:
917 913 if mapfile:
918 914 if not os.path.split(mapfile)[0]:
919 915 mapname = (templater.templatepath('map-cmdline.' + mapfile)
920 916 or templater.templatepath(mapfile))
921 917 if mapname: mapfile = mapname
922 918 try:
923 919 t = changeset_templater(ui, repo, patch, mapfile, buffered)
924 920 except SyntaxError, inst:
925 921 raise util.Abort(inst.args[0])
926 922 if tmpl: t.use_template(tmpl)
927 923 return t
928 924 return changeset_printer(ui, repo, patch, buffered)
929 925
930 926 def finddate(ui, repo, date):
931 927 """Find the tipmost changeset that matches the given date spec"""
932 928 df = util.matchdate(date)
933 929 get = util.cachefunc(lambda r: repo[r].changeset())
934 930 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
935 931 results = {}
936 932 for st, rev, fns in changeiter:
937 933 if st == 'add':
938 934 d = get(rev)[2]
939 935 if df(d[0]):
940 936 results[rev] = d
941 937 elif st == 'iter':
942 938 if rev in results:
943 939 ui.status("Found revision %s from %s\n" %
944 940 (rev, util.datestr(results[rev])))
945 941 return str(rev)
946 942
947 943 raise util.Abort(_("revision matching date not found"))
948 944
949 945 def walkchangerevs(ui, repo, pats, change, opts):
950 946 '''Iterate over files and the revs they changed in.
951 947
952 948 Callers most commonly need to iterate backwards over the history
953 949 it is interested in. Doing so has awful (quadratic-looking)
954 950 performance, so we use iterators in a "windowed" way.
955 951
956 952 We walk a window of revisions in the desired order. Within the
957 953 window, we first walk forwards to gather data, then in the desired
958 954 order (usually backwards) to display it.
959 955
960 956 This function returns an (iterator, matchfn) tuple. The iterator
961 957 yields 3-tuples. They will be of one of the following forms:
962 958
963 959 "window", incrementing, lastrev: stepping through a window,
964 960 positive if walking forwards through revs, last rev in the
965 961 sequence iterated over - use to reset state for the current window
966 962
967 963 "add", rev, fns: out-of-order traversal of the given file names
968 964 fns, which changed during revision rev - use to gather data for
969 965 possible display
970 966
971 967 "iter", rev, None: in-order traversal of the revs earlier iterated
972 968 over with "add" - use to display data'''
973 969
974 970 def increasing_windows(start, end, windowsize=8, sizelimit=512):
975 971 if start < end:
976 972 while start < end:
977 973 yield start, min(windowsize, end-start)
978 974 start += windowsize
979 975 if windowsize < sizelimit:
980 976 windowsize *= 2
981 977 else:
982 978 while start > end:
983 979 yield start, min(windowsize, start-end-1)
984 980 start -= windowsize
985 981 if windowsize < sizelimit:
986 982 windowsize *= 2
987 983
988 984 m = match(repo, pats, opts)
989 985 follow = opts.get('follow') or opts.get('follow_first')
990 986
991 987 if not len(repo):
992 988 return [], m
993 989
994 990 if follow:
995 991 defrange = '%s:0' % repo['.'].rev()
996 992 else:
997 993 defrange = '-1:0'
998 994 revs = revrange(repo, opts['rev'] or [defrange])
999 995 wanted = {}
1000 996 slowpath = m.anypats() or opts.get('removed')
1001 997 fncache = {}
1002 998
1003 999 if not slowpath and not m.files():
1004 1000 # No files, no patterns. Display all revs.
1005 1001 wanted = dict.fromkeys(revs)
1006 1002 copies = []
1007 1003 if not slowpath:
1008 1004 # Only files, no patterns. Check the history of each file.
1009 1005 def filerevgen(filelog, node):
1010 1006 cl_count = len(repo)
1011 1007 if node is None:
1012 1008 last = len(filelog) - 1
1013 1009 else:
1014 1010 last = filelog.rev(node)
1015 1011 for i, window in increasing_windows(last, nullrev):
1016 1012 revs = []
1017 1013 for j in xrange(i - window, i + 1):
1018 1014 n = filelog.node(j)
1019 1015 revs.append((filelog.linkrev(n),
1020 1016 follow and filelog.renamed(n)))
1021 1017 revs.reverse()
1022 1018 for rev in revs:
1023 1019 # only yield rev for which we have the changelog, it can
1024 1020 # happen while doing "hg log" during a pull or commit
1025 1021 if rev[0] < cl_count:
1026 1022 yield rev
1027 1023 def iterfiles():
1028 1024 for filename in m.files():
1029 1025 yield filename, None
1030 1026 for filename_node in copies:
1031 1027 yield filename_node
1032 1028 minrev, maxrev = min(revs), max(revs)
1033 1029 for file_, node in iterfiles():
1034 1030 filelog = repo.file(file_)
1035 1031 if not len(filelog):
1036 1032 if node is None:
1037 1033 # A zero count may be a directory or deleted file, so
1038 1034 # try to find matching entries on the slow path.
1039 1035 slowpath = True
1040 1036 break
1041 1037 else:
1042 1038 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1043 1039 % (file_, short(node)))
1044 1040 continue
1045 1041 for rev, copied in filerevgen(filelog, node):
1046 1042 if rev <= maxrev:
1047 1043 if rev < minrev:
1048 1044 break
1049 1045 fncache.setdefault(rev, [])
1050 1046 fncache[rev].append(file_)
1051 1047 wanted[rev] = 1
1052 1048 if follow and copied:
1053 1049 copies.append(copied)
1054 1050 if slowpath:
1055 1051 if follow:
1056 1052 raise util.Abort(_('can only follow copies/renames for explicit '
1057 1053 'file names'))
1058 1054
1059 1055 # The slow path checks files modified in every changeset.
1060 1056 def changerevgen():
1061 1057 for i, window in increasing_windows(len(repo) - 1, nullrev):
1062 1058 for j in xrange(i - window, i + 1):
1063 1059 yield j, change(j)[3]
1064 1060
1065 1061 for rev, changefiles in changerevgen():
1066 1062 matches = filter(m, changefiles)
1067 1063 if matches:
1068 1064 fncache[rev] = matches
1069 1065 wanted[rev] = 1
1070 1066
1071 1067 class followfilter:
1072 1068 def __init__(self, onlyfirst=False):
1073 1069 self.startrev = nullrev
1074 1070 self.roots = []
1075 1071 self.onlyfirst = onlyfirst
1076 1072
1077 1073 def match(self, rev):
1078 1074 def realparents(rev):
1079 1075 if self.onlyfirst:
1080 1076 return repo.changelog.parentrevs(rev)[0:1]
1081 1077 else:
1082 1078 return filter(lambda x: x != nullrev,
1083 1079 repo.changelog.parentrevs(rev))
1084 1080
1085 1081 if self.startrev == nullrev:
1086 1082 self.startrev = rev
1087 1083 return True
1088 1084
1089 1085 if rev > self.startrev:
1090 1086 # forward: all descendants
1091 1087 if not self.roots:
1092 1088 self.roots.append(self.startrev)
1093 1089 for parent in realparents(rev):
1094 1090 if parent in self.roots:
1095 1091 self.roots.append(rev)
1096 1092 return True
1097 1093 else:
1098 1094 # backwards: all parents
1099 1095 if not self.roots:
1100 1096 self.roots.extend(realparents(self.startrev))
1101 1097 if rev in self.roots:
1102 1098 self.roots.remove(rev)
1103 1099 self.roots.extend(realparents(rev))
1104 1100 return True
1105 1101
1106 1102 return False
1107 1103
1108 1104 # it might be worthwhile to do this in the iterator if the rev range
1109 1105 # is descending and the prune args are all within that range
1110 1106 for rev in opts.get('prune', ()):
1111 1107 rev = repo.changelog.rev(repo.lookup(rev))
1112 1108 ff = followfilter()
1113 1109 stop = min(revs[0], revs[-1])
1114 1110 for x in xrange(rev, stop-1, -1):
1115 1111 if ff.match(x) and x in wanted:
1116 1112 del wanted[x]
1117 1113
1118 1114 def iterate():
1119 1115 if follow and not m.files():
1120 1116 ff = followfilter(onlyfirst=opts.get('follow_first'))
1121 1117 def want(rev):
1122 1118 if ff.match(rev) and rev in wanted:
1123 1119 return True
1124 1120 return False
1125 1121 else:
1126 1122 def want(rev):
1127 1123 return rev in wanted
1128 1124
1129 1125 for i, window in increasing_windows(0, len(revs)):
1130 1126 yield 'window', revs[0] < revs[-1], revs[-1]
1131 1127 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1132 srevs = list(nrevs)
1133 srevs.sort()
1134 for rev in srevs:
1128 for rev in util.sort(list(nrevs)):
1135 1129 fns = fncache.get(rev)
1136 1130 if not fns:
1137 1131 def fns_generator():
1138 1132 for f in change(rev)[3]:
1139 1133 if m(f):
1140 1134 yield f
1141 1135 fns = fns_generator()
1142 1136 yield 'add', rev, fns
1143 1137 for rev in nrevs:
1144 1138 yield 'iter', rev, None
1145 1139 return iterate(), m
1146 1140
1147 1141 def commit(ui, repo, commitfunc, pats, opts):
1148 1142 '''commit the specified files or all outstanding changes'''
1149 1143 date = opts.get('date')
1150 1144 if date:
1151 1145 opts['date'] = util.parsedate(date)
1152 1146 message = logmessage(opts)
1153 1147
1154 1148 # extract addremove carefully -- this function can be called from a command
1155 1149 # that doesn't support addremove
1156 1150 if opts.get('addremove'):
1157 1151 addremove(repo, pats, opts)
1158 1152
1159 1153 m = match(repo, pats, opts)
1160 1154 if pats:
1161 1155 modified, added, removed = repo.status(match=m)[:3]
1162 files = modified + added + removed
1156 files = util.sort(modified + added + removed)
1163 1157 slist = None
1164 1158 for f in m.files():
1165 1159 if f == '.':
1166 1160 continue
1167 1161 if f not in files:
1168 1162 rf = repo.wjoin(f)
1169 1163 rel = repo.pathto(f)
1170 1164 try:
1171 1165 mode = os.lstat(rf)[stat.ST_MODE]
1172 1166 except OSError:
1173 1167 raise util.Abort(_("file %s not found!") % rel)
1174 1168 if stat.S_ISDIR(mode):
1175 1169 name = f + '/'
1176 if slist is None:
1177 slist = list(files)
1178 slist.sort()
1179 i = bisect.bisect(slist, name)
1180 if i >= len(slist) or not slist[i].startswith(name):
1170 i = bisect.bisect(files, name)
1171 if i >= len(files) or not files[i].startswith(name):
1181 1172 raise util.Abort(_("no match under directory %s!")
1182 1173 % rel)
1183 1174 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1184 1175 raise util.Abort(_("can't commit %s: "
1185 1176 "unsupported file type!") % rel)
1186 1177 elif f not in repo.dirstate:
1187 1178 raise util.Abort(_("file %s not tracked!") % rel)
1188 1179 m = matchfiles(repo, files)
1189 1180 try:
1190 1181 return commitfunc(ui, repo, message, m, opts)
1191 1182 except ValueError, inst:
1192 1183 raise util.Abort(str(inst))
@@ -1,3315 +1,3300
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from repo import RepoError, NoCapability
10 10 from i18n import _
11 11 import os, re, sys, urllib
12 12 import hg, util, revlog, bundlerepo, extensions, copies
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the repository.
24 24
25 25 The files will be added to the repository at the next commit. To
26 26 undo an add before that, see hg revert.
27 27
28 28 If no names are given, add all files in the repository.
29 29 """
30 30
31 31 rejected = None
32 32 exacts = {}
33 33 names = []
34 34 m = cmdutil.match(repo, pats, opts)
35 35 m.bad = lambda x,y: True
36 36 for abs in repo.walk(m):
37 37 if m.exact(abs):
38 38 if ui.verbose:
39 39 ui.status(_('adding %s\n') % m.rel(abs))
40 40 names.append(abs)
41 41 exacts[abs] = 1
42 42 elif abs not in repo.dirstate:
43 43 ui.status(_('adding %s\n') % m.rel(abs))
44 44 names.append(abs)
45 45 if not opts.get('dry_run'):
46 46 rejected = repo.add(names)
47 47 rejected = [p for p in rejected if p in exacts]
48 48 return rejected and 1 or 0
49 49
50 50 def addremove(ui, repo, *pats, **opts):
51 51 """add all new files, delete all missing files
52 52
53 53 Add all new files and remove all missing files from the repository.
54 54
55 55 New files are ignored if they match any of the patterns in .hgignore. As
56 56 with add, these changes take effect at the next commit.
57 57
58 58 Use the -s option to detect renamed files. With a parameter > 0,
59 59 this compares every removed file with every added file and records
60 60 those similar enough as renames. This option takes a percentage
61 61 between 0 (disabled) and 100 (files must be identical) as its
62 62 parameter. Detecting renamed files this way can be expensive.
63 63 """
64 64 try:
65 65 sim = float(opts.get('similarity') or 0)
66 66 except ValueError:
67 67 raise util.Abort(_('similarity must be a number'))
68 68 if sim < 0 or sim > 100:
69 69 raise util.Abort(_('similarity must be between 0 and 100'))
70 70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71 71
72 72 def annotate(ui, repo, *pats, **opts):
73 73 """show changeset information per file line
74 74
75 75 List changes in files, showing the revision id responsible for each line
76 76
77 77 This command is useful to discover who did a change or when a change took
78 78 place.
79 79
80 80 Without the -a option, annotate will avoid processing files it
81 81 detects as binary. With -a, annotate will generate an annotation
82 82 anyway, probably with undesirable results.
83 83 """
84 84 datefunc = ui.quiet and util.shortdate or util.datestr
85 85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86 86
87 87 if not pats:
88 88 raise util.Abort(_('at least one file name or pattern required'))
89 89
90 90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 91 ('number', lambda x: str(x[0].rev())),
92 92 ('changeset', lambda x: short(x[0].node())),
93 93 ('date', getdate),
94 94 ('follow', lambda x: x[0].path()),
95 95 ]
96 96
97 97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 98 and not opts['follow']):
99 99 opts['number'] = 1
100 100
101 101 linenumber = opts.get('line_number') is not None
102 102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104 104
105 105 funcmap = [func for op, func in opmap if opts.get(op)]
106 106 if linenumber:
107 107 lastfunc = funcmap[-1]
108 108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109 109
110 110 ctx = repo[opts['rev']]
111 111
112 112 m = cmdutil.match(repo, pats, opts)
113 113 for abs in repo.walk(m, ctx.node()):
114 114 fctx = ctx.filectx(abs)
115 115 if not opts['text'] and util.binary(fctx.data()):
116 116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 117 continue
118 118
119 119 lines = fctx.annotate(follow=opts.get('follow'),
120 120 linenumber=linenumber)
121 121 pieces = []
122 122
123 123 for f in funcmap:
124 124 l = [f(n) for n, dummy in lines]
125 125 if l:
126 126 m = max(map(len, l))
127 127 pieces.append(["%*s" % (m, x) for x in l])
128 128
129 129 if pieces:
130 130 for p, l in zip(zip(*pieces), lines):
131 131 ui.write("%s: %s" % (" ".join(p), l[1]))
132 132
133 133 def archive(ui, repo, dest, **opts):
134 134 '''create unversioned archive of a repository revision
135 135
136 136 By default, the revision used is the parent of the working
137 137 directory; use "-r" to specify a different revision.
138 138
139 139 To specify the type of archive to create, use "-t". Valid
140 140 types are:
141 141
142 142 "files" (default): a directory full of files
143 143 "tar": tar archive, uncompressed
144 144 "tbz2": tar archive, compressed using bzip2
145 145 "tgz": tar archive, compressed using gzip
146 146 "uzip": zip archive, uncompressed
147 147 "zip": zip archive, compressed using deflate
148 148
149 149 The exact name of the destination archive or directory is given
150 150 using a format string; see "hg help export" for details.
151 151
152 152 Each member added to an archive file has a directory prefix
153 153 prepended. Use "-p" to specify a format string for the prefix.
154 154 The default is the basename of the archive, with suffixes removed.
155 155 '''
156 156
157 157 ctx = repo[opts['rev']]
158 158 if not ctx:
159 159 raise util.Abort(_('repository has no revisions'))
160 160 node = ctx.node()
161 161 dest = cmdutil.make_filename(repo, dest, node)
162 162 if os.path.realpath(dest) == repo.root:
163 163 raise util.Abort(_('repository root cannot be destination'))
164 164 matchfn = cmdutil.match(repo, [], opts)
165 165 kind = opts.get('type') or 'files'
166 166 prefix = opts['prefix']
167 167 if dest == '-':
168 168 if kind == 'files':
169 169 raise util.Abort(_('cannot archive plain files to stdout'))
170 170 dest = sys.stdout
171 171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 172 prefix = cmdutil.make_filename(repo, prefix, node)
173 173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 174 matchfn, prefix)
175 175
176 176 def backout(ui, repo, node=None, rev=None, **opts):
177 177 '''reverse effect of earlier changeset
178 178
179 179 Commit the backed out changes as a new changeset. The new
180 180 changeset is a child of the backed out changeset.
181 181
182 182 If you back out a changeset other than the tip, a new head is
183 183 created. This head will be the new tip and you should merge this
184 184 backout changeset with another head (current one by default).
185 185
186 186 The --merge option remembers the parent of the working directory
187 187 before starting the backout, then merges the new head with that
188 188 changeset afterwards. This saves you from doing the merge by
189 189 hand. The result of this merge is not committed, as for a normal
190 190 merge.
191 191
192 192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 193 '''
194 194 if rev and node:
195 195 raise util.Abort(_("please specify just one revision"))
196 196
197 197 if not rev:
198 198 rev = node
199 199
200 200 if not rev:
201 201 raise util.Abort(_("please specify a revision to backout"))
202 202
203 203 date = opts.get('date')
204 204 if date:
205 205 opts['date'] = util.parsedate(date)
206 206
207 207 cmdutil.bail_if_changed(repo)
208 208 node = repo.lookup(rev)
209 209
210 210 op1, op2 = repo.dirstate.parents()
211 211 a = repo.changelog.ancestor(op1, node)
212 212 if a != node:
213 213 raise util.Abort(_('cannot back out change on a different branch'))
214 214
215 215 p1, p2 = repo.changelog.parents(node)
216 216 if p1 == nullid:
217 217 raise util.Abort(_('cannot back out a change with no parents'))
218 218 if p2 != nullid:
219 219 if not opts['parent']:
220 220 raise util.Abort(_('cannot back out a merge changeset without '
221 221 '--parent'))
222 222 p = repo.lookup(opts['parent'])
223 223 if p not in (p1, p2):
224 224 raise util.Abort(_('%s is not a parent of %s') %
225 225 (short(p), short(node)))
226 226 parent = p
227 227 else:
228 228 if opts['parent']:
229 229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 230 parent = p1
231 231
232 232 # the backout should appear on the same branch
233 233 branch = repo.dirstate.branch()
234 234 hg.clean(repo, node, show_stats=False)
235 235 repo.dirstate.setbranch(branch)
236 236 revert_opts = opts.copy()
237 237 revert_opts['date'] = None
238 238 revert_opts['all'] = True
239 239 revert_opts['rev'] = hex(parent)
240 240 revert_opts['no_backup'] = None
241 241 revert(ui, repo, **revert_opts)
242 242 commit_opts = opts.copy()
243 243 commit_opts['addremove'] = False
244 244 if not commit_opts['message'] and not commit_opts['logfile']:
245 245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 246 commit_opts['force_editor'] = True
247 247 commit(ui, repo, **commit_opts)
248 248 def nice(node):
249 249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 250 ui.status(_('changeset %s backs out changeset %s\n') %
251 251 (nice(repo.changelog.tip()), nice(node)))
252 252 if op1 != node:
253 253 hg.clean(repo, op1, show_stats=False)
254 254 if opts['merge']:
255 255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 256 hg.merge(repo, hex(repo.changelog.tip()))
257 257 else:
258 258 ui.status(_('the backout changeset is a new head - '
259 259 'do not forget to merge\n'))
260 260 ui.status(_('(use "backout --merge" '
261 261 'if you want to auto-merge)\n'))
262 262
263 263 def bisect(ui, repo, rev=None, extra=None,
264 264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 265 """subdivision search of changesets
266 266
267 267 This command helps to find changesets which introduce problems.
268 268 To use, mark the earliest changeset you know exhibits the problem
269 269 as bad, then mark the latest changeset which is free from the
270 270 problem as good. Bisect will update your working directory to a
271 271 revision for testing. Once you have performed tests, mark the
272 272 working directory as bad or good and bisect will either update to
273 273 another candidate changeset or announce that it has found the bad
274 274 revision.
275 275 """
276 276 # backward compatibility
277 277 if rev in "good bad reset init".split():
278 278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
279 279 cmd, rev, extra = rev, extra, None
280 280 if cmd == "good":
281 281 good = True
282 282 elif cmd == "bad":
283 283 bad = True
284 284 else:
285 285 reset = True
286 286 elif extra or good + bad + skip + reset > 1:
287 287 raise util.Abort("Incompatible arguments")
288 288
289 289 if reset:
290 290 p = repo.join("bisect.state")
291 291 if os.path.exists(p):
292 292 os.unlink(p)
293 293 return
294 294
295 295 # load state
296 296 state = {'good': [], 'bad': [], 'skip': []}
297 297 if os.path.exists(repo.join("bisect.state")):
298 298 for l in repo.opener("bisect.state"):
299 299 kind, node = l[:-1].split()
300 300 node = repo.lookup(node)
301 301 if kind not in state:
302 302 raise util.Abort(_("unknown bisect kind %s") % kind)
303 303 state[kind].append(node)
304 304
305 305 # update state
306 306 node = repo.lookup(rev or '.')
307 307 if good:
308 308 state['good'].append(node)
309 309 elif bad:
310 310 state['bad'].append(node)
311 311 elif skip:
312 312 state['skip'].append(node)
313 313
314 314 # save state
315 315 f = repo.opener("bisect.state", "w", atomictemp=True)
316 316 wlock = repo.wlock()
317 317 try:
318 318 for kind in state:
319 319 for node in state[kind]:
320 320 f.write("%s %s\n" % (kind, hex(node)))
321 321 f.rename()
322 322 finally:
323 323 del wlock
324 324
325 325 if not state['good'] or not state['bad']:
326 326 return
327 327
328 328 # actually bisect
329 329 node, changesets, good = hbisect.bisect(repo.changelog, state)
330 330 if changesets == 0:
331 331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
332 332 displayer = cmdutil.show_changeset(ui, repo, {})
333 333 displayer.show(changenode=node)
334 334 elif node is not None:
335 335 # compute the approximate number of remaining tests
336 336 tests, size = 0, 2
337 337 while size <= changesets:
338 338 tests, size = tests + 1, size * 2
339 339 rev = repo.changelog.rev(node)
340 340 ui.write(_("Testing changeset %s:%s "
341 341 "(%s changesets remaining, ~%s tests)\n")
342 342 % (rev, short(node), changesets, tests))
343 343 if not noupdate:
344 344 cmdutil.bail_if_changed(repo)
345 345 return hg.clean(repo, node)
346 346
347 347 def branch(ui, repo, label=None, **opts):
348 348 """set or show the current branch name
349 349
350 350 With no argument, show the current branch name. With one argument,
351 351 set the working directory branch name (the branch does not exist in
352 352 the repository until the next commit).
353 353
354 354 Unless --force is specified, branch will not let you set a
355 355 branch name that shadows an existing branch.
356 356
357 357 Use the command 'hg update' to switch to an existing branch.
358 358 """
359 359
360 360 if label:
361 361 if not opts.get('force') and label in repo.branchtags():
362 362 if label not in [p.branch() for p in repo.parents()]:
363 363 raise util.Abort(_('a branch of the same name already exists'
364 364 ' (use --force to override)'))
365 365 repo.dirstate.setbranch(util.fromlocal(label))
366 366 ui.status(_('marked working directory as branch %s\n') % label)
367 367 else:
368 368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
369 369
370 370 def branches(ui, repo, active=False):
371 371 """list repository named branches
372 372
373 373 List the repository's named branches, indicating which ones are
374 374 inactive. If active is specified, only show active branches.
375 375
376 376 A branch is considered active if it contains repository heads.
377 377
378 378 Use the command 'hg update' to switch to an existing branch.
379 379 """
380 380 hexfunc = ui.debugflag and hex or short
381 381 activebranches = [util.tolocal(repo[n].branch())
382 382 for n in repo.heads()]
383 branches = [(tag in activebranches, repo.changelog.rev(node), tag)
384 for tag, node in repo.branchtags().items()]
385 branches.sort()
383 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
384 for tag, node in repo.branchtags().items()])
386 385 branches.reverse()
387 386
388 387 for isactive, node, tag in branches:
389 388 if (not active) or isactive:
390 389 if ui.quiet:
391 390 ui.write("%s\n" % tag)
392 391 else:
393 392 rev = str(node).rjust(32 - util.locallen(tag))
394 393 isinactive = ((not isactive) and " (inactive)") or ''
395 394 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
396 395 ui.write("%s%s:%s%s\n" % data)
397 396
398 397 def bundle(ui, repo, fname, dest=None, **opts):
399 398 """create a changegroup file
400 399
401 400 Generate a compressed changegroup file collecting changesets not
402 401 found in the other repository.
403 402
404 403 If no destination repository is specified the destination is
405 404 assumed to have all the nodes specified by one or more --base
406 405 parameters. To create a bundle containing all changesets, use
407 406 --all (or --base null). To change the compression method applied,
408 407 use the -t option (by default, bundles are compressed using bz2).
409 408
410 409 The bundle file can then be transferred using conventional means and
411 410 applied to another repository with the unbundle or pull command.
412 411 This is useful when direct push and pull are not available or when
413 412 exporting an entire repository is undesirable.
414 413
415 414 Applying bundles preserves all changeset contents including
416 415 permissions, copy/rename information, and revision history.
417 416 """
418 417 revs = opts.get('rev') or None
419 418 if revs:
420 419 revs = [repo.lookup(rev) for rev in revs]
421 420 if opts.get('all'):
422 421 base = ['null']
423 422 else:
424 423 base = opts.get('base')
425 424 if base:
426 425 if dest:
427 426 raise util.Abort(_("--base is incompatible with specifiying "
428 427 "a destination"))
429 428 base = [repo.lookup(rev) for rev in base]
430 429 # create the right base
431 430 # XXX: nodesbetween / changegroup* should be "fixed" instead
432 431 o = []
433 432 has = {nullid: None}
434 433 for n in base:
435 434 has.update(repo.changelog.reachable(n))
436 435 if revs:
437 436 visit = list(revs)
438 437 else:
439 438 visit = repo.changelog.heads()
440 439 seen = {}
441 440 while visit:
442 441 n = visit.pop(0)
443 442 parents = [p for p in repo.changelog.parents(n) if p not in has]
444 443 if len(parents) == 0:
445 444 o.insert(0, n)
446 445 else:
447 446 for p in parents:
448 447 if p not in seen:
449 448 seen[p] = 1
450 449 visit.append(p)
451 450 else:
452 451 cmdutil.setremoteconfig(ui, opts)
453 452 dest, revs, checkout = hg.parseurl(
454 453 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
455 454 other = hg.repository(ui, dest)
456 455 o = repo.findoutgoing(other, force=opts['force'])
457 456
458 457 if revs:
459 458 cg = repo.changegroupsubset(o, revs, 'bundle')
460 459 else:
461 460 cg = repo.changegroup(o, 'bundle')
462 461
463 462 bundletype = opts.get('type', 'bzip2').lower()
464 463 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
465 464 bundletype = btypes.get(bundletype)
466 465 if bundletype not in changegroup.bundletypes:
467 466 raise util.Abort(_('unknown bundle type specified with --type'))
468 467
469 468 changegroup.writebundle(cg, fname, bundletype)
470 469
471 470 def cat(ui, repo, file1, *pats, **opts):
472 471 """output the current or given revision of files
473 472
474 473 Print the specified files as they were at the given revision.
475 474 If no revision is given, the parent of the working directory is used,
476 475 or tip if no revision is checked out.
477 476
478 477 Output may be to a file, in which case the name of the file is
479 478 given using a format string. The formatting rules are the same as
480 479 for the export command, with the following additions:
481 480
482 481 %s basename of file being printed
483 482 %d dirname of file being printed, or '.' if in repo root
484 483 %p root-relative path name of file being printed
485 484 """
486 485 ctx = repo[opts['rev']]
487 486 err = 1
488 487 m = cmdutil.match(repo, (file1,) + pats, opts)
489 488 for abs in repo.walk(m, ctx.node()):
490 489 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
491 490 data = ctx.filectx(abs).data()
492 491 if opts.get('decode'):
493 492 data = repo.wwritedata(abs, data)
494 493 fp.write(data)
495 494 err = 0
496 495 return err
497 496
498 497 def clone(ui, source, dest=None, **opts):
499 498 """make a copy of an existing repository
500 499
501 500 Create a copy of an existing repository in a new directory.
502 501
503 502 If no destination directory name is specified, it defaults to the
504 503 basename of the source.
505 504
506 505 The location of the source is added to the new repository's
507 506 .hg/hgrc file, as the default to be used for future pulls.
508 507
509 508 For efficiency, hardlinks are used for cloning whenever the source
510 509 and destination are on the same filesystem (note this applies only
511 510 to the repository data, not to the checked out files). Some
512 511 filesystems, such as AFS, implement hardlinking incorrectly, but
513 512 do not report errors. In these cases, use the --pull option to
514 513 avoid hardlinking.
515 514
516 515 In some cases, you can clone repositories and checked out files
517 516 using full hardlinks with
518 517
519 518 $ cp -al REPO REPOCLONE
520 519
521 520 This is the fastest way to clone, but it is not always safe. The
522 521 operation is not atomic (making sure REPO is not modified during
523 522 the operation is up to you) and you have to make sure your editor
524 523 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
525 524 this is not compatible with certain extensions that place their
526 525 metadata under the .hg directory, such as mq.
527 526
528 527 If you use the -r option to clone up to a specific revision, no
529 528 subsequent revisions will be present in the cloned repository.
530 529 This option implies --pull, even on local repositories.
531 530
532 531 If the -U option is used, the new clone will contain only a repository
533 532 (.hg) and no working copy (the working copy parent is the null revision).
534 533
535 534 See pull for valid source format details.
536 535
537 536 It is possible to specify an ssh:// URL as the destination, but no
538 537 .hg/hgrc and working directory will be created on the remote side.
539 538 Look at the help text for the pull command for important details
540 539 about ssh:// URLs.
541 540 """
542 541 cmdutil.setremoteconfig(ui, opts)
543 542 hg.clone(ui, source, dest,
544 543 pull=opts['pull'],
545 544 stream=opts['uncompressed'],
546 545 rev=opts['rev'],
547 546 update=not opts['noupdate'])
548 547
549 548 def commit(ui, repo, *pats, **opts):
550 549 """commit the specified files or all outstanding changes
551 550
552 551 Commit changes to the given files into the repository.
553 552
554 553 If a list of files is omitted, all changes reported by "hg status"
555 554 will be committed.
556 555
557 556 If you are committing the result of a merge, do not provide any
558 557 file names or -I/-X filters.
559 558
560 559 If no commit message is specified, the configured editor is started to
561 560 enter a message.
562 561
563 562 See 'hg help dates' for a list of formats valid for -d/--date.
564 563 """
565 564 def commitfunc(ui, repo, message, match, opts):
566 565 return repo.commit(match.files(), message, opts['user'], opts['date'],
567 566 match, force_editor=opts.get('force_editor'))
568 567
569 568 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
570 569 if not node:
571 570 return
572 571 cl = repo.changelog
573 572 rev = cl.rev(node)
574 573 parents = cl.parentrevs(rev)
575 574 if rev - 1 in parents:
576 575 # one of the parents was the old tip
577 576 return
578 577 if (parents == (nullrev, nullrev) or
579 578 len(cl.heads(cl.node(parents[0]))) > 1 and
580 579 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
581 580 ui.status(_('created new head\n'))
582 581
583 582 def copy(ui, repo, *pats, **opts):
584 583 """mark files as copied for the next commit
585 584
586 585 Mark dest as having copies of source files. If dest is a
587 586 directory, copies are put in that directory. If dest is a file,
588 587 there can only be one source.
589 588
590 589 By default, this command copies the contents of files as they
591 590 stand in the working directory. If invoked with --after, the
592 591 operation is recorded, but no copying is performed.
593 592
594 593 This command takes effect in the next commit. To undo a copy
595 594 before that, see hg revert.
596 595 """
597 596 wlock = repo.wlock(False)
598 597 try:
599 598 return cmdutil.copy(ui, repo, pats, opts)
600 599 finally:
601 600 del wlock
602 601
603 602 def debugancestor(ui, repo, *args):
604 603 """find the ancestor revision of two revisions in a given index"""
605 604 if len(args) == 3:
606 605 index, rev1, rev2 = args
607 606 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
608 607 lookup = r.lookup
609 608 elif len(args) == 2:
610 609 if not repo:
611 610 raise util.Abort(_("There is no Mercurial repository here "
612 611 "(.hg not found)"))
613 612 rev1, rev2 = args
614 613 r = repo.changelog
615 614 lookup = repo.lookup
616 615 else:
617 616 raise util.Abort(_('either two or three arguments required'))
618 617 a = r.ancestor(lookup(rev1), lookup(rev2))
619 618 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
620 619
621 620 def debugcomplete(ui, cmd='', **opts):
622 621 """returns the completion list associated with the given command"""
623 622
624 623 if opts['options']:
625 624 options = []
626 625 otables = [globalopts]
627 626 if cmd:
628 627 aliases, entry = cmdutil.findcmd(ui, cmd, table)
629 628 otables.append(entry[1])
630 629 for t in otables:
631 630 for o in t:
632 631 if o[0]:
633 632 options.append('-%s' % o[0])
634 633 options.append('--%s' % o[1])
635 634 ui.write("%s\n" % "\n".join(options))
636 635 return
637 636
638 clist = cmdutil.findpossible(ui, cmd, table).keys()
639 clist.sort()
640 ui.write("%s\n" % "\n".join(clist))
637 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table))))
641 638
642 639 def debugfsinfo(ui, path = "."):
643 640 file('.debugfsinfo', 'w').write('')
644 641 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
645 642 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
646 643 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
647 644 and 'yes' or 'no'))
648 645 os.unlink('.debugfsinfo')
649 646
650 647 def debugrebuildstate(ui, repo, rev="tip"):
651 648 """rebuild the dirstate as it would look like for the given revision"""
652 649 ctx = repo[rev]
653 650 wlock = repo.wlock()
654 651 try:
655 652 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
656 653 finally:
657 654 del wlock
658 655
659 656 def debugcheckstate(ui, repo):
660 657 """validate the correctness of the current dirstate"""
661 658 parent1, parent2 = repo.dirstate.parents()
662 659 m1 = repo[parent1].manifest()
663 660 m2 = repo[parent2].manifest()
664 661 errors = 0
665 662 for f in repo.dirstate:
666 663 state = repo.dirstate[f]
667 664 if state in "nr" and f not in m1:
668 665 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
669 666 errors += 1
670 667 if state in "a" and f in m1:
671 668 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
672 669 errors += 1
673 670 if state in "m" and f not in m1 and f not in m2:
674 671 ui.warn(_("%s in state %s, but not in either manifest\n") %
675 672 (f, state))
676 673 errors += 1
677 674 for f in m1:
678 675 state = repo.dirstate[f]
679 676 if state not in "nrm":
680 677 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
681 678 errors += 1
682 679 if errors:
683 680 error = _(".hg/dirstate inconsistent with current parent's manifest")
684 681 raise util.Abort(error)
685 682
686 683 def showconfig(ui, repo, *values, **opts):
687 684 """show combined config settings from all hgrc files
688 685
689 686 With no args, print names and values of all config items.
690 687
691 688 With one arg of the form section.name, print just the value of
692 689 that config item.
693 690
694 691 With multiple args, print names and values of all config items
695 692 with matching section names."""
696 693
697 694 untrusted = bool(opts.get('untrusted'))
698 695 if values:
699 696 if len([v for v in values if '.' in v]) > 1:
700 697 raise util.Abort(_('only one config item permitted'))
701 698 for section, name, value in ui.walkconfig(untrusted=untrusted):
702 699 sectname = section + '.' + name
703 700 if values:
704 701 for v in values:
705 702 if v == section:
706 703 ui.write('%s=%s\n' % (sectname, value))
707 704 elif v == sectname:
708 705 ui.write(value, '\n')
709 706 else:
710 707 ui.write('%s=%s\n' % (sectname, value))
711 708
712 709 def debugsetparents(ui, repo, rev1, rev2=None):
713 710 """manually set the parents of the current working directory
714 711
715 712 This is useful for writing repository conversion tools, but should
716 713 be used with care.
717 714 """
718 715
719 716 if not rev2:
720 717 rev2 = hex(nullid)
721 718
722 719 wlock = repo.wlock()
723 720 try:
724 721 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
725 722 finally:
726 723 del wlock
727 724
728 725 def debugstate(ui, repo, nodates=None):
729 726 """show the contents of the current dirstate"""
730 k = repo.dirstate._map.items()
731 k.sort()
732 727 timestr = ""
733 728 showdate = not nodates
734 for file_, ent in k:
729 for file_, ent in util.sort(repo.dirstate._map.items()):
735 730 if showdate:
736 731 if ent[3] == -1:
737 732 # Pad or slice to locale representation
738 733 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
739 734 timestr = 'unset'
740 735 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
741 736 else:
742 737 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
743 738 if ent[1] & 020000:
744 739 mode = 'lnk'
745 740 else:
746 741 mode = '%3o' % (ent[1] & 0777)
747 742 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
748 743 for f in repo.dirstate.copies():
749 744 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
750 745
751 746 def debugdata(ui, file_, rev):
752 747 """dump the contents of a data file revision"""
753 748 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
754 749 try:
755 750 ui.write(r.revision(r.lookup(rev)))
756 751 except KeyError:
757 752 raise util.Abort(_('invalid revision identifier %s') % rev)
758 753
759 754 def debugdate(ui, date, range=None, **opts):
760 755 """parse and display a date"""
761 756 if opts["extended"]:
762 757 d = util.parsedate(date, util.extendeddateformats)
763 758 else:
764 759 d = util.parsedate(date)
765 760 ui.write("internal: %s %s\n" % d)
766 761 ui.write("standard: %s\n" % util.datestr(d))
767 762 if range:
768 763 m = util.matchdate(range)
769 764 ui.write("match: %s\n" % m(d[0]))
770 765
771 766 def debugindex(ui, file_):
772 767 """dump the contents of an index file"""
773 768 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
774 769 ui.write(" rev offset length base linkrev" +
775 770 " nodeid p1 p2\n")
776 771 for i in r:
777 772 node = r.node(i)
778 773 try:
779 774 pp = r.parents(node)
780 775 except:
781 776 pp = [nullid, nullid]
782 777 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
783 778 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
784 779 short(node), short(pp[0]), short(pp[1])))
785 780
786 781 def debugindexdot(ui, file_):
787 782 """dump an index DAG as a .dot file"""
788 783 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
789 784 ui.write("digraph G {\n")
790 785 for i in r:
791 786 node = r.node(i)
792 787 pp = r.parents(node)
793 788 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
794 789 if pp[1] != nullid:
795 790 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
796 791 ui.write("}\n")
797 792
798 793 def debuginstall(ui):
799 794 '''test Mercurial installation'''
800 795
801 796 def writetemp(contents):
802 797 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
803 798 f = os.fdopen(fd, "wb")
804 799 f.write(contents)
805 800 f.close()
806 801 return name
807 802
808 803 problems = 0
809 804
810 805 # encoding
811 806 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
812 807 try:
813 808 util.fromlocal("test")
814 809 except util.Abort, inst:
815 810 ui.write(" %s\n" % inst)
816 811 ui.write(_(" (check that your locale is properly set)\n"))
817 812 problems += 1
818 813
819 814 # compiled modules
820 815 ui.status(_("Checking extensions...\n"))
821 816 try:
822 817 import bdiff, mpatch, base85
823 818 except Exception, inst:
824 819 ui.write(" %s\n" % inst)
825 820 ui.write(_(" One or more extensions could not be found"))
826 821 ui.write(_(" (check that you compiled the extensions)\n"))
827 822 problems += 1
828 823
829 824 # templates
830 825 ui.status(_("Checking templates...\n"))
831 826 try:
832 827 import templater
833 828 t = templater.templater(templater.templatepath("map-cmdline.default"))
834 829 except Exception, inst:
835 830 ui.write(" %s\n" % inst)
836 831 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
837 832 problems += 1
838 833
839 834 # patch
840 835 ui.status(_("Checking patch...\n"))
841 836 patchproblems = 0
842 837 a = "1\n2\n3\n4\n"
843 838 b = "1\n2\n3\ninsert\n4\n"
844 839 fa = writetemp(a)
845 840 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
846 841 os.path.basename(fa))
847 842 fd = writetemp(d)
848 843
849 844 files = {}
850 845 try:
851 846 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
852 847 except util.Abort, e:
853 848 ui.write(_(" patch call failed:\n"))
854 849 ui.write(" " + str(e) + "\n")
855 850 patchproblems += 1
856 851 else:
857 852 if list(files) != [os.path.basename(fa)]:
858 853 ui.write(_(" unexpected patch output!\n"))
859 854 patchproblems += 1
860 855 a = file(fa).read()
861 856 if a != b:
862 857 ui.write(_(" patch test failed!\n"))
863 858 patchproblems += 1
864 859
865 860 if patchproblems:
866 861 if ui.config('ui', 'patch'):
867 862 ui.write(_(" (Current patch tool may be incompatible with patch,"
868 863 " or misconfigured. Please check your .hgrc file)\n"))
869 864 else:
870 865 ui.write(_(" Internal patcher failure, please report this error"
871 866 " to http://www.selenic.com/mercurial/bts\n"))
872 867 problems += patchproblems
873 868
874 869 os.unlink(fa)
875 870 os.unlink(fd)
876 871
877 872 # editor
878 873 ui.status(_("Checking commit editor...\n"))
879 874 editor = ui.geteditor()
880 875 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
881 876 if not cmdpath:
882 877 if editor == 'vi':
883 878 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
884 879 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
885 880 else:
886 881 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
887 882 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
888 883 problems += 1
889 884
890 885 # check username
891 886 ui.status(_("Checking username...\n"))
892 887 user = os.environ.get("HGUSER")
893 888 if user is None:
894 889 user = ui.config("ui", "username")
895 890 if user is None:
896 891 user = os.environ.get("EMAIL")
897 892 if not user:
898 893 ui.warn(" ")
899 894 ui.username()
900 895 ui.write(_(" (specify a username in your .hgrc file)\n"))
901 896
902 897 if not problems:
903 898 ui.status(_("No problems detected\n"))
904 899 else:
905 900 ui.write(_("%s problems detected,"
906 901 " please check your install!\n") % problems)
907 902
908 903 return problems
909 904
910 905 def debugrename(ui, repo, file1, *pats, **opts):
911 906 """dump rename information"""
912 907
913 908 ctx = repo[opts.get('rev')]
914 909 m = cmdutil.match(repo, (file1,) + pats, opts)
915 910 for abs in repo.walk(m, ctx.node()):
916 911 fctx = ctx.filectx(abs)
917 912 o = fctx.filelog().renamed(fctx.filenode())
918 913 rel = m.rel(abs)
919 914 if o:
920 915 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
921 916 else:
922 917 ui.write(_("%s not renamed\n") % rel)
923 918
924 919 def debugwalk(ui, repo, *pats, **opts):
925 920 """show how files match on given patterns"""
926 921 m = cmdutil.match(repo, pats, opts)
927 922 items = list(repo.walk(m))
928 923 if not items:
929 924 return
930 925 fmt = 'f %%-%ds %%-%ds %%s' % (
931 926 max([len(abs) for abs in items]),
932 927 max([len(m.rel(abs)) for abs in items]))
933 928 for abs in items:
934 929 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
935 930 ui.write("%s\n" % line.rstrip())
936 931
937 932 def diff(ui, repo, *pats, **opts):
938 933 """diff repository (or selected files)
939 934
940 935 Show differences between revisions for the specified files.
941 936
942 937 Differences between files are shown using the unified diff format.
943 938
944 939 NOTE: diff may generate unexpected results for merges, as it will
945 940 default to comparing against the working directory's first parent
946 941 changeset if no revisions are specified.
947 942
948 943 When two revision arguments are given, then changes are shown
949 944 between those revisions. If only one revision is specified then
950 945 that revision is compared to the working directory, and, when no
951 946 revisions are specified, the working directory files are compared
952 947 to its parent.
953 948
954 949 Without the -a option, diff will avoid generating diffs of files
955 950 it detects as binary. With -a, diff will generate a diff anyway,
956 951 probably with undesirable results.
957 952 """
958 953 node1, node2 = cmdutil.revpair(repo, opts['rev'])
959 954
960 955 m = cmdutil.match(repo, pats, opts)
961 956 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
962 957
963 958 def export(ui, repo, *changesets, **opts):
964 959 """dump the header and diffs for one or more changesets
965 960
966 961 Print the changeset header and diffs for one or more revisions.
967 962
968 963 The information shown in the changeset header is: author,
969 964 changeset hash, parent(s) and commit comment.
970 965
971 966 NOTE: export may generate unexpected diff output for merge changesets,
972 967 as it will compare the merge changeset against its first parent only.
973 968
974 969 Output may be to a file, in which case the name of the file is
975 970 given using a format string. The formatting rules are as follows:
976 971
977 972 %% literal "%" character
978 973 %H changeset hash (40 bytes of hexadecimal)
979 974 %N number of patches being generated
980 975 %R changeset revision number
981 976 %b basename of the exporting repository
982 977 %h short-form changeset hash (12 bytes of hexadecimal)
983 978 %n zero-padded sequence number, starting at 1
984 979 %r zero-padded changeset revision number
985 980
986 981 Without the -a option, export will avoid generating diffs of files
987 982 it detects as binary. With -a, export will generate a diff anyway,
988 983 probably with undesirable results.
989 984
990 985 With the --switch-parent option, the diff will be against the second
991 986 parent. It can be useful to review a merge.
992 987 """
993 988 if not changesets:
994 989 raise util.Abort(_("export requires at least one changeset"))
995 990 revs = cmdutil.revrange(repo, changesets)
996 991 if len(revs) > 1:
997 992 ui.note(_('exporting patches:\n'))
998 993 else:
999 994 ui.note(_('exporting patch:\n'))
1000 995 patch.export(repo, revs, template=opts['output'],
1001 996 switch_parent=opts['switch_parent'],
1002 997 opts=patch.diffopts(ui, opts))
1003 998
1004 999 def grep(ui, repo, pattern, *pats, **opts):
1005 1000 """search for a pattern in specified files and revisions
1006 1001
1007 1002 Search revisions of files for a regular expression.
1008 1003
1009 1004 This command behaves differently than Unix grep. It only accepts
1010 1005 Python/Perl regexps. It searches repository history, not the
1011 1006 working directory. It always prints the revision number in which
1012 1007 a match appears.
1013 1008
1014 1009 By default, grep only prints output for the first revision of a
1015 1010 file in which it finds a match. To get it to print every revision
1016 1011 that contains a change in match status ("-" for a match that
1017 1012 becomes a non-match, or "+" for a non-match that becomes a match),
1018 1013 use the --all flag.
1019 1014 """
1020 1015 reflags = 0
1021 1016 if opts['ignore_case']:
1022 1017 reflags |= re.I
1023 1018 try:
1024 1019 regexp = re.compile(pattern, reflags)
1025 1020 except Exception, inst:
1026 1021 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1027 1022 return None
1028 1023 sep, eol = ':', '\n'
1029 1024 if opts['print0']:
1030 1025 sep = eol = '\0'
1031 1026
1032 1027 fcache = {}
1033 1028 def getfile(fn):
1034 1029 if fn not in fcache:
1035 1030 fcache[fn] = repo.file(fn)
1036 1031 return fcache[fn]
1037 1032
1038 1033 def matchlines(body):
1039 1034 begin = 0
1040 1035 linenum = 0
1041 1036 while True:
1042 1037 match = regexp.search(body, begin)
1043 1038 if not match:
1044 1039 break
1045 1040 mstart, mend = match.span()
1046 1041 linenum += body.count('\n', begin, mstart) + 1
1047 1042 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1048 1043 lend = body.find('\n', mend)
1049 1044 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1050 1045 begin = lend + 1
1051 1046
1052 1047 class linestate(object):
1053 1048 def __init__(self, line, linenum, colstart, colend):
1054 1049 self.line = line
1055 1050 self.linenum = linenum
1056 1051 self.colstart = colstart
1057 1052 self.colend = colend
1058 1053
1059 1054 def __hash__(self):
1060 1055 return hash((self.linenum, self.line))
1061 1056
1062 1057 def __eq__(self, other):
1063 1058 return self.line == other.line
1064 1059
1065 1060 matches = {}
1066 1061 copies = {}
1067 1062 def grepbody(fn, rev, body):
1068 1063 matches[rev].setdefault(fn, [])
1069 1064 m = matches[rev][fn]
1070 1065 for lnum, cstart, cend, line in matchlines(body):
1071 1066 s = linestate(line, lnum, cstart, cend)
1072 1067 m.append(s)
1073 1068
1074 1069 def difflinestates(a, b):
1075 1070 sm = difflib.SequenceMatcher(None, a, b)
1076 1071 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1077 1072 if tag == 'insert':
1078 1073 for i in xrange(blo, bhi):
1079 1074 yield ('+', b[i])
1080 1075 elif tag == 'delete':
1081 1076 for i in xrange(alo, ahi):
1082 1077 yield ('-', a[i])
1083 1078 elif tag == 'replace':
1084 1079 for i in xrange(alo, ahi):
1085 1080 yield ('-', a[i])
1086 1081 for i in xrange(blo, bhi):
1087 1082 yield ('+', b[i])
1088 1083
1089 1084 prev = {}
1090 1085 def display(fn, rev, states, prevstates):
1091 1086 datefunc = ui.quiet and util.shortdate or util.datestr
1092 1087 found = False
1093 1088 filerevmatches = {}
1094 1089 r = prev.get(fn, -1)
1095 1090 if opts['all']:
1096 1091 iter = difflinestates(states, prevstates)
1097 1092 else:
1098 1093 iter = [('', l) for l in prevstates]
1099 1094 for change, l in iter:
1100 1095 cols = [fn, str(r)]
1101 1096 if opts['line_number']:
1102 1097 cols.append(str(l.linenum))
1103 1098 if opts['all']:
1104 1099 cols.append(change)
1105 1100 if opts['user']:
1106 1101 cols.append(ui.shortuser(get(r)[1]))
1107 1102 if opts.get('date'):
1108 1103 cols.append(datefunc(get(r)[2]))
1109 1104 if opts['files_with_matches']:
1110 1105 c = (fn, r)
1111 1106 if c in filerevmatches:
1112 1107 continue
1113 1108 filerevmatches[c] = 1
1114 1109 else:
1115 1110 cols.append(l.line)
1116 1111 ui.write(sep.join(cols), eol)
1117 1112 found = True
1118 1113 return found
1119 1114
1120 1115 fstate = {}
1121 1116 skip = {}
1122 1117 get = util.cachefunc(lambda r: repo[r].changeset())
1123 1118 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1124 1119 found = False
1125 1120 follow = opts.get('follow')
1126 1121 for st, rev, fns in changeiter:
1127 1122 if st == 'window':
1128 1123 matches.clear()
1129 1124 elif st == 'add':
1130 1125 ctx = repo[rev]
1131 1126 matches[rev] = {}
1132 1127 for fn in fns:
1133 1128 if fn in skip:
1134 1129 continue
1135 1130 try:
1136 1131 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1137 1132 fstate.setdefault(fn, [])
1138 1133 if follow:
1139 1134 copied = getfile(fn).renamed(ctx.filenode(fn))
1140 1135 if copied:
1141 1136 copies.setdefault(rev, {})[fn] = copied[0]
1142 1137 except revlog.LookupError:
1143 1138 pass
1144 1139 elif st == 'iter':
1145 states = matches[rev].items()
1146 states.sort()
1147 for fn, m in states:
1140 for fn, m in util.sort(matches[rev].items()):
1148 1141 copy = copies.get(rev, {}).get(fn)
1149 1142 if fn in skip:
1150 1143 if copy:
1151 1144 skip[copy] = True
1152 1145 continue
1153 1146 if fn in prev or fstate[fn]:
1154 1147 r = display(fn, rev, m, fstate[fn])
1155 1148 found = found or r
1156 1149 if r and not opts['all']:
1157 1150 skip[fn] = True
1158 1151 if copy:
1159 1152 skip[copy] = True
1160 1153 fstate[fn] = m
1161 1154 if copy:
1162 1155 fstate[copy] = m
1163 1156 prev[fn] = rev
1164 1157
1165 fstate = fstate.items()
1166 fstate.sort()
1167 for fn, state in fstate:
1158 for fn, state in util.sort(fstate.items()):
1168 1159 if fn in skip:
1169 1160 continue
1170 1161 if fn not in copies.get(prev[fn], {}):
1171 1162 found = display(fn, rev, {}, state) or found
1172 1163 return (not found and 1) or 0
1173 1164
1174 1165 def heads(ui, repo, *branchrevs, **opts):
1175 1166 """show current repository heads or show branch heads
1176 1167
1177 1168 With no arguments, show all repository head changesets.
1178 1169
1179 1170 If branch or revisions names are given this will show the heads of
1180 1171 the specified branches or the branches those revisions are tagged
1181 1172 with.
1182 1173
1183 1174 Repository "heads" are changesets that don't have child
1184 1175 changesets. They are where development generally takes place and
1185 1176 are the usual targets for update and merge operations.
1186 1177
1187 1178 Branch heads are changesets that have a given branch tag, but have
1188 1179 no child changesets with that tag. They are usually where
1189 1180 development on the given branch takes place.
1190 1181 """
1191 1182 if opts['rev']:
1192 1183 start = repo.lookup(opts['rev'])
1193 1184 else:
1194 1185 start = None
1195 1186 if not branchrevs:
1196 1187 # Assume we're looking repo-wide heads if no revs were specified.
1197 1188 heads = repo.heads(start)
1198 1189 else:
1199 1190 heads = []
1200 1191 visitedset = util.set()
1201 1192 for branchrev in branchrevs:
1202 1193 branch = repo[branchrev].branch()
1203 1194 if branch in visitedset:
1204 1195 continue
1205 1196 visitedset.add(branch)
1206 1197 bheads = repo.branchheads(branch, start)
1207 1198 if not bheads:
1208 1199 if branch != branchrev:
1209 1200 ui.warn(_("no changes on branch %s containing %s are "
1210 1201 "reachable from %s\n")
1211 1202 % (branch, branchrev, opts['rev']))
1212 1203 else:
1213 1204 ui.warn(_("no changes on branch %s are reachable from %s\n")
1214 1205 % (branch, opts['rev']))
1215 1206 heads.extend(bheads)
1216 1207 if not heads:
1217 1208 return 1
1218 1209 displayer = cmdutil.show_changeset(ui, repo, opts)
1219 1210 for n in heads:
1220 1211 displayer.show(changenode=n)
1221 1212
1222 1213 def help_(ui, name=None, with_version=False):
1223 1214 """show help for a command, extension, or list of commands
1224 1215
1225 1216 With no arguments, print a list of commands and short help.
1226 1217
1227 1218 Given a command name, print help for that command.
1228 1219
1229 1220 Given an extension name, print help for that extension, and the
1230 1221 commands it provides."""
1231 1222 option_lists = []
1232 1223
1233 1224 def addglobalopts(aliases):
1234 1225 if ui.verbose:
1235 1226 option_lists.append((_("global options:"), globalopts))
1236 1227 if name == 'shortlist':
1237 1228 option_lists.append((_('use "hg help" for the full list '
1238 1229 'of commands'), ()))
1239 1230 else:
1240 1231 if name == 'shortlist':
1241 1232 msg = _('use "hg help" for the full list of commands '
1242 1233 'or "hg -v" for details')
1243 1234 elif aliases:
1244 1235 msg = _('use "hg -v help%s" to show aliases and '
1245 1236 'global options') % (name and " " + name or "")
1246 1237 else:
1247 1238 msg = _('use "hg -v help %s" to show global options') % name
1248 1239 option_lists.append((msg, ()))
1249 1240
1250 1241 def helpcmd(name):
1251 1242 if with_version:
1252 1243 version_(ui)
1253 1244 ui.write('\n')
1254 1245
1255 1246 try:
1256 1247 aliases, i = cmdutil.findcmd(ui, name, table)
1257 1248 except cmdutil.AmbiguousCommand, inst:
1258 1249 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1259 1250 helplist(_('list of commands:\n\n'), select)
1260 1251 return
1261 1252
1262 1253 # synopsis
1263 1254 ui.write("%s\n" % i[2])
1264 1255
1265 1256 # aliases
1266 1257 if not ui.quiet and len(aliases) > 1:
1267 1258 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1268 1259
1269 1260 # description
1270 1261 doc = i[0].__doc__
1271 1262 if not doc:
1272 1263 doc = _("(No help text available)")
1273 1264 if ui.quiet:
1274 1265 doc = doc.splitlines(0)[0]
1275 1266 ui.write("\n%s\n" % doc.rstrip())
1276 1267
1277 1268 if not ui.quiet:
1278 1269 # options
1279 1270 if i[1]:
1280 1271 option_lists.append((_("options:\n"), i[1]))
1281 1272
1282 1273 addglobalopts(False)
1283 1274
1284 1275 def helplist(header, select=None):
1285 1276 h = {}
1286 1277 cmds = {}
1287 1278 for c, e in table.items():
1288 1279 f = c.split("|", 1)[0]
1289 1280 if select and not select(f):
1290 1281 continue
1291 1282 if name == "shortlist" and not f.startswith("^"):
1292 1283 continue
1293 1284 f = f.lstrip("^")
1294 1285 if not ui.debugflag and f.startswith("debug"):
1295 1286 continue
1296 1287 doc = e[0].__doc__
1297 1288 if not doc:
1298 1289 doc = _("(No help text available)")
1299 1290 h[f] = doc.splitlines(0)[0].rstrip()
1300 1291 cmds[f] = c.lstrip("^")
1301 1292
1302 1293 if not h:
1303 1294 ui.status(_('no commands defined\n'))
1304 1295 return
1305 1296
1306 1297 ui.status(header)
1307 fns = h.keys()
1308 fns.sort()
1298 fns = util.sort(h)
1309 1299 m = max(map(len, fns))
1310 1300 for f in fns:
1311 1301 if ui.verbose:
1312 1302 commands = cmds[f].replace("|",", ")
1313 1303 ui.write(" %s:\n %s\n"%(commands, h[f]))
1314 1304 else:
1315 1305 ui.write(' %-*s %s\n' % (m, f, h[f]))
1316 1306
1317 1307 if not ui.quiet:
1318 1308 addglobalopts(True)
1319 1309
1320 1310 def helptopic(name):
1321 1311 v = None
1322 1312 for i, d in help.helptable:
1323 1313 l = i.split('|')
1324 1314 if name in l:
1325 1315 v = i
1326 1316 header = l[-1]
1327 1317 doc = d
1328 1318 if not v:
1329 1319 raise cmdutil.UnknownCommand(name)
1330 1320
1331 1321 # description
1332 1322 if not doc:
1333 1323 doc = _("(No help text available)")
1334 1324 if callable(doc):
1335 1325 doc = doc()
1336 1326
1337 1327 ui.write("%s\n" % header)
1338 1328 ui.write("%s\n" % doc.rstrip())
1339 1329
1340 1330 def helpext(name):
1341 1331 try:
1342 1332 mod = extensions.find(name)
1343 1333 except KeyError:
1344 1334 raise cmdutil.UnknownCommand(name)
1345 1335
1346 1336 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1347 1337 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1348 1338 for d in doc[1:]:
1349 1339 ui.write(d, '\n')
1350 1340
1351 1341 ui.status('\n')
1352 1342
1353 1343 try:
1354 1344 ct = mod.cmdtable
1355 1345 except AttributeError:
1356 1346 ct = {}
1357 1347
1358 1348 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1359 1349 helplist(_('list of commands:\n\n'), modcmds.has_key)
1360 1350
1361 1351 if name and name != 'shortlist':
1362 1352 i = None
1363 1353 for f in (helpcmd, helptopic, helpext):
1364 1354 try:
1365 1355 f(name)
1366 1356 i = None
1367 1357 break
1368 1358 except cmdutil.UnknownCommand, inst:
1369 1359 i = inst
1370 1360 if i:
1371 1361 raise i
1372 1362
1373 1363 else:
1374 1364 # program name
1375 1365 if ui.verbose or with_version:
1376 1366 version_(ui)
1377 1367 else:
1378 1368 ui.status(_("Mercurial Distributed SCM\n"))
1379 1369 ui.status('\n')
1380 1370
1381 1371 # list of commands
1382 1372 if name == "shortlist":
1383 1373 header = _('basic commands:\n\n')
1384 1374 else:
1385 1375 header = _('list of commands:\n\n')
1386 1376
1387 1377 helplist(header)
1388 1378
1389 1379 # list all option lists
1390 1380 opt_output = []
1391 1381 for title, options in option_lists:
1392 1382 opt_output.append(("\n%s" % title, None))
1393 1383 for shortopt, longopt, default, desc in options:
1394 1384 if "DEPRECATED" in desc and not ui.verbose: continue
1395 1385 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1396 1386 longopt and " --%s" % longopt),
1397 1387 "%s%s" % (desc,
1398 1388 default
1399 1389 and _(" (default: %s)") % default
1400 1390 or "")))
1401 1391
1402 1392 if ui.verbose:
1403 1393 ui.write(_("\nspecial help topics:\n"))
1404 1394 topics = []
1405 1395 for i, d in help.helptable:
1406 1396 l = i.split('|')
1407 1397 topics.append((", ".join(l[:-1]), l[-1]))
1408 1398 topics_len = max([len(s[0]) for s in topics])
1409 1399 for t, desc in topics:
1410 1400 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1411 1401
1412 1402 if opt_output:
1413 1403 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1414 1404 for first, second in opt_output:
1415 1405 if second:
1416 1406 ui.write(" %-*s %s\n" % (opts_len, first, second))
1417 1407 else:
1418 1408 ui.write("%s\n" % first)
1419 1409
1420 1410 def identify(ui, repo, source=None,
1421 1411 rev=None, num=None, id=None, branch=None, tags=None):
1422 1412 """identify the working copy or specified revision
1423 1413
1424 1414 With no revision, print a summary of the current state of the repo.
1425 1415
1426 1416 With a path, do a lookup in another repository.
1427 1417
1428 1418 This summary identifies the repository state using one or two parent
1429 1419 hash identifiers, followed by a "+" if there are uncommitted changes
1430 1420 in the working directory, a list of tags for this revision and a branch
1431 1421 name for non-default branches.
1432 1422 """
1433 1423
1434 1424 if not repo and not source:
1435 1425 raise util.Abort(_("There is no Mercurial repository here "
1436 1426 "(.hg not found)"))
1437 1427
1438 1428 hexfunc = ui.debugflag and hex or short
1439 1429 default = not (num or id or branch or tags)
1440 1430 output = []
1441 1431
1442 1432 if source:
1443 1433 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1444 1434 srepo = hg.repository(ui, source)
1445 1435 if not rev and revs:
1446 1436 rev = revs[0]
1447 1437 if not rev:
1448 1438 rev = "tip"
1449 1439 if num or branch or tags:
1450 1440 raise util.Abort(
1451 1441 "can't query remote revision number, branch, or tags")
1452 1442 output = [hexfunc(srepo.lookup(rev))]
1453 1443 elif not rev:
1454 1444 ctx = repo[None]
1455 1445 parents = ctx.parents()
1456 1446 changed = False
1457 1447 if default or id or num:
1458 1448 changed = ctx.files() + ctx.deleted()
1459 1449 if default or id:
1460 1450 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1461 1451 (changed) and "+" or "")]
1462 1452 if num:
1463 1453 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1464 1454 (changed) and "+" or ""))
1465 1455 else:
1466 1456 ctx = repo[rev]
1467 1457 if default or id:
1468 1458 output = [hexfunc(ctx.node())]
1469 1459 if num:
1470 1460 output.append(str(ctx.rev()))
1471 1461
1472 1462 if not source and default and not ui.quiet:
1473 1463 b = util.tolocal(ctx.branch())
1474 1464 if b != 'default':
1475 1465 output.append("(%s)" % b)
1476 1466
1477 1467 # multiple tags for a single parent separated by '/'
1478 1468 t = "/".join(ctx.tags())
1479 1469 if t:
1480 1470 output.append(t)
1481 1471
1482 1472 if branch:
1483 1473 output.append(util.tolocal(ctx.branch()))
1484 1474
1485 1475 if tags:
1486 1476 output.extend(ctx.tags())
1487 1477
1488 1478 ui.write("%s\n" % ' '.join(output))
1489 1479
1490 1480 def import_(ui, repo, patch1, *patches, **opts):
1491 1481 """import an ordered set of patches
1492 1482
1493 1483 Import a list of patches and commit them individually.
1494 1484
1495 1485 If there are outstanding changes in the working directory, import
1496 1486 will abort unless given the -f flag.
1497 1487
1498 1488 You can import a patch straight from a mail message. Even patches
1499 1489 as attachments work (body part must be type text/plain or
1500 1490 text/x-patch to be used). From and Subject headers of email
1501 1491 message are used as default committer and commit message. All
1502 1492 text/plain body parts before first diff are added to commit
1503 1493 message.
1504 1494
1505 1495 If the imported patch was generated by hg export, user and description
1506 1496 from patch override values from message headers and body. Values
1507 1497 given on command line with -m and -u override these.
1508 1498
1509 1499 If --exact is specified, import will set the working directory
1510 1500 to the parent of each patch before applying it, and will abort
1511 1501 if the resulting changeset has a different ID than the one
1512 1502 recorded in the patch. This may happen due to character set
1513 1503 problems or other deficiencies in the text patch format.
1514 1504
1515 1505 To read a patch from standard input, use patch name "-".
1516 1506 See 'hg help dates' for a list of formats valid for -d/--date.
1517 1507 """
1518 1508 patches = (patch1,) + patches
1519 1509
1520 1510 date = opts.get('date')
1521 1511 if date:
1522 1512 opts['date'] = util.parsedate(date)
1523 1513
1524 1514 if opts.get('exact') or not opts['force']:
1525 1515 cmdutil.bail_if_changed(repo)
1526 1516
1527 1517 d = opts["base"]
1528 1518 strip = opts["strip"]
1529 1519 wlock = lock = None
1530 1520 try:
1531 1521 wlock = repo.wlock()
1532 1522 lock = repo.lock()
1533 1523 for p in patches:
1534 1524 pf = os.path.join(d, p)
1535 1525
1536 1526 if pf == '-':
1537 1527 ui.status(_("applying patch from stdin\n"))
1538 1528 data = patch.extract(ui, sys.stdin)
1539 1529 else:
1540 1530 ui.status(_("applying %s\n") % p)
1541 1531 if os.path.exists(pf):
1542 1532 data = patch.extract(ui, file(pf, 'rb'))
1543 1533 else:
1544 1534 data = patch.extract(ui, urllib.urlopen(pf))
1545 1535 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1546 1536
1547 1537 if tmpname is None:
1548 1538 raise util.Abort(_('no diffs found'))
1549 1539
1550 1540 try:
1551 1541 cmdline_message = cmdutil.logmessage(opts)
1552 1542 if cmdline_message:
1553 1543 # pickup the cmdline msg
1554 1544 message = cmdline_message
1555 1545 elif message:
1556 1546 # pickup the patch msg
1557 1547 message = message.strip()
1558 1548 else:
1559 1549 # launch the editor
1560 1550 message = None
1561 1551 ui.debug(_('message:\n%s\n') % message)
1562 1552
1563 1553 wp = repo.parents()
1564 1554 if opts.get('exact'):
1565 1555 if not nodeid or not p1:
1566 1556 raise util.Abort(_('not a mercurial patch'))
1567 1557 p1 = repo.lookup(p1)
1568 1558 p2 = repo.lookup(p2 or hex(nullid))
1569 1559
1570 1560 if p1 != wp[0].node():
1571 1561 hg.clean(repo, p1)
1572 1562 repo.dirstate.setparents(p1, p2)
1573 1563 elif p2:
1574 1564 try:
1575 1565 p1 = repo.lookup(p1)
1576 1566 p2 = repo.lookup(p2)
1577 1567 if p1 == wp[0].node():
1578 1568 repo.dirstate.setparents(p1, p2)
1579 1569 except RepoError:
1580 1570 pass
1581 1571 if opts.get('exact') or opts.get('import_branch'):
1582 1572 repo.dirstate.setbranch(branch or 'default')
1583 1573
1584 1574 files = {}
1585 1575 try:
1586 1576 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1587 1577 files=files)
1588 1578 finally:
1589 1579 files = patch.updatedir(ui, repo, files)
1590 1580 if not opts.get('no_commit'):
1591 1581 n = repo.commit(files, message, opts.get('user') or user,
1592 1582 opts.get('date') or date)
1593 1583 if opts.get('exact'):
1594 1584 if hex(n) != nodeid:
1595 1585 repo.rollback()
1596 1586 raise util.Abort(_('patch is damaged'
1597 1587 ' or loses information'))
1598 1588 # Force a dirstate write so that the next transaction
1599 1589 # backups an up-do-date file.
1600 1590 repo.dirstate.write()
1601 1591 finally:
1602 1592 os.unlink(tmpname)
1603 1593 finally:
1604 1594 del lock, wlock
1605 1595
1606 1596 def incoming(ui, repo, source="default", **opts):
1607 1597 """show new changesets found in source
1608 1598
1609 1599 Show new changesets found in the specified path/URL or the default
1610 1600 pull location. These are the changesets that would be pulled if a pull
1611 1601 was requested.
1612 1602
1613 1603 For remote repository, using --bundle avoids downloading the changesets
1614 1604 twice if the incoming is followed by a pull.
1615 1605
1616 1606 See pull for valid source format details.
1617 1607 """
1618 1608 limit = cmdutil.loglimit(opts)
1619 1609 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1620 1610 cmdutil.setremoteconfig(ui, opts)
1621 1611
1622 1612 other = hg.repository(ui, source)
1623 1613 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1624 1614 if revs:
1625 1615 revs = [other.lookup(rev) for rev in revs]
1626 1616 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1627 1617 if not incoming:
1628 1618 try:
1629 1619 os.unlink(opts["bundle"])
1630 1620 except:
1631 1621 pass
1632 1622 ui.status(_("no changes found\n"))
1633 1623 return 1
1634 1624
1635 1625 cleanup = None
1636 1626 try:
1637 1627 fname = opts["bundle"]
1638 1628 if fname or not other.local():
1639 1629 # create a bundle (uncompressed if other repo is not local)
1640 1630 if revs is None:
1641 1631 cg = other.changegroup(incoming, "incoming")
1642 1632 else:
1643 1633 cg = other.changegroupsubset(incoming, revs, 'incoming')
1644 1634 bundletype = other.local() and "HG10BZ" or "HG10UN"
1645 1635 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1646 1636 # keep written bundle?
1647 1637 if opts["bundle"]:
1648 1638 cleanup = None
1649 1639 if not other.local():
1650 1640 # use the created uncompressed bundlerepo
1651 1641 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1652 1642
1653 1643 o = other.changelog.nodesbetween(incoming, revs)[0]
1654 1644 if opts['newest_first']:
1655 1645 o.reverse()
1656 1646 displayer = cmdutil.show_changeset(ui, other, opts)
1657 1647 count = 0
1658 1648 for n in o:
1659 1649 if count >= limit:
1660 1650 break
1661 1651 parents = [p for p in other.changelog.parents(n) if p != nullid]
1662 1652 if opts['no_merges'] and len(parents) == 2:
1663 1653 continue
1664 1654 count += 1
1665 1655 displayer.show(changenode=n)
1666 1656 finally:
1667 1657 if hasattr(other, 'close'):
1668 1658 other.close()
1669 1659 if cleanup:
1670 1660 os.unlink(cleanup)
1671 1661
1672 1662 def init(ui, dest=".", **opts):
1673 1663 """create a new repository in the given directory
1674 1664
1675 1665 Initialize a new repository in the given directory. If the given
1676 1666 directory does not exist, it is created.
1677 1667
1678 1668 If no directory is given, the current directory is used.
1679 1669
1680 1670 It is possible to specify an ssh:// URL as the destination.
1681 1671 Look at the help text for the pull command for important details
1682 1672 about ssh:// URLs.
1683 1673 """
1684 1674 cmdutil.setremoteconfig(ui, opts)
1685 1675 hg.repository(ui, dest, create=1)
1686 1676
1687 1677 def locate(ui, repo, *pats, **opts):
1688 1678 """locate files matching specific patterns
1689 1679
1690 1680 Print all files under Mercurial control whose names match the
1691 1681 given patterns.
1692 1682
1693 1683 This command searches the entire repository by default. To search
1694 1684 just the current directory and its subdirectories, use
1695 1685 "--include .".
1696 1686
1697 1687 If no patterns are given to match, this command prints all file
1698 1688 names.
1699 1689
1700 1690 If you want to feed the output of this command into the "xargs"
1701 1691 command, use the "-0" option to both this command and "xargs".
1702 1692 This will avoid the problem of "xargs" treating single filenames
1703 1693 that contain white space as multiple filenames.
1704 1694 """
1705 1695 end = opts['print0'] and '\0' or '\n'
1706 1696 rev = opts['rev']
1707 1697 if rev:
1708 1698 node = repo.lookup(rev)
1709 1699 else:
1710 1700 node = None
1711 1701
1712 1702 ret = 1
1713 1703 m = cmdutil.match(repo, pats, opts, default='relglob')
1714 1704 m.bad = lambda x,y: False
1715 1705 for abs in repo.walk(m, node):
1716 1706 if not node and abs not in repo.dirstate:
1717 1707 continue
1718 1708 if opts['fullpath']:
1719 1709 ui.write(os.path.join(repo.root, abs), end)
1720 1710 else:
1721 1711 ui.write(((pats and m.rel(abs)) or abs), end)
1722 1712 ret = 0
1723 1713
1724 1714 return ret
1725 1715
1726 1716 def log(ui, repo, *pats, **opts):
1727 1717 """show revision history of entire repository or files
1728 1718
1729 1719 Print the revision history of the specified files or the entire
1730 1720 project.
1731 1721
1732 1722 File history is shown without following rename or copy history of
1733 1723 files. Use -f/--follow with a file name to follow history across
1734 1724 renames and copies. --follow without a file name will only show
1735 1725 ancestors or descendants of the starting revision. --follow-first
1736 1726 only follows the first parent of merge revisions.
1737 1727
1738 1728 If no revision range is specified, the default is tip:0 unless
1739 1729 --follow is set, in which case the working directory parent is
1740 1730 used as the starting revision.
1741 1731
1742 1732 See 'hg help dates' for a list of formats valid for -d/--date.
1743 1733
1744 1734 By default this command outputs: changeset id and hash, tags,
1745 1735 non-trivial parents, user, date and time, and a summary for each
1746 1736 commit. When the -v/--verbose switch is used, the list of changed
1747 1737 files and full commit message is shown.
1748 1738
1749 1739 NOTE: log -p may generate unexpected diff output for merge
1750 1740 changesets, as it will compare the merge changeset against its
1751 1741 first parent only. Also, the files: list will only reflect files
1752 1742 that are different from BOTH parents.
1753 1743
1754 1744 """
1755 1745
1756 1746 get = util.cachefunc(lambda r: repo[r].changeset())
1757 1747 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1758 1748
1759 1749 limit = cmdutil.loglimit(opts)
1760 1750 count = 0
1761 1751
1762 1752 if opts['copies'] and opts['rev']:
1763 1753 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1764 1754 else:
1765 1755 endrev = len(repo)
1766 1756 rcache = {}
1767 1757 ncache = {}
1768 1758 def getrenamed(fn, rev):
1769 1759 '''looks up all renames for a file (up to endrev) the first
1770 1760 time the file is given. It indexes on the changerev and only
1771 1761 parses the manifest if linkrev != changerev.
1772 1762 Returns rename info for fn at changerev rev.'''
1773 1763 if fn not in rcache:
1774 1764 rcache[fn] = {}
1775 1765 ncache[fn] = {}
1776 1766 fl = repo.file(fn)
1777 1767 for i in fl:
1778 1768 node = fl.node(i)
1779 1769 lr = fl.linkrev(node)
1780 1770 renamed = fl.renamed(node)
1781 1771 rcache[fn][lr] = renamed
1782 1772 if renamed:
1783 1773 ncache[fn][node] = renamed
1784 1774 if lr >= endrev:
1785 1775 break
1786 1776 if rev in rcache[fn]:
1787 1777 return rcache[fn][rev]
1788 1778
1789 1779 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1790 1780 # filectx logic.
1791 1781
1792 1782 try:
1793 1783 return repo[rev][fn].renamed()
1794 1784 except revlog.LookupError:
1795 1785 pass
1796 1786 return None
1797 1787
1798 1788 df = False
1799 1789 if opts["date"]:
1800 1790 df = util.matchdate(opts["date"])
1801 1791
1802 1792 only_branches = opts['only_branch']
1803 1793
1804 1794 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1805 1795 for st, rev, fns in changeiter:
1806 1796 if st == 'add':
1807 1797 changenode = repo.changelog.node(rev)
1808 1798 parents = [p for p in repo.changelog.parentrevs(rev)
1809 1799 if p != nullrev]
1810 1800 if opts['no_merges'] and len(parents) == 2:
1811 1801 continue
1812 1802 if opts['only_merges'] and len(parents) != 2:
1813 1803 continue
1814 1804
1815 1805 if only_branches:
1816 1806 revbranch = get(rev)[5]['branch']
1817 1807 if revbranch not in only_branches:
1818 1808 continue
1819 1809
1820 1810 if df:
1821 1811 changes = get(rev)
1822 1812 if not df(changes[2][0]):
1823 1813 continue
1824 1814
1825 1815 if opts['keyword']:
1826 1816 changes = get(rev)
1827 1817 miss = 0
1828 1818 for k in [kw.lower() for kw in opts['keyword']]:
1829 1819 if not (k in changes[1].lower() or
1830 1820 k in changes[4].lower() or
1831 1821 k in " ".join(changes[3]).lower()):
1832 1822 miss = 1
1833 1823 break
1834 1824 if miss:
1835 1825 continue
1836 1826
1837 1827 copies = []
1838 1828 if opts.get('copies') and rev:
1839 1829 for fn in get(rev)[3]:
1840 1830 rename = getrenamed(fn, rev)
1841 1831 if rename:
1842 1832 copies.append((fn, rename[0]))
1843 1833 displayer.show(rev, changenode, copies=copies)
1844 1834 elif st == 'iter':
1845 1835 if count == limit: break
1846 1836 if displayer.flush(rev):
1847 1837 count += 1
1848 1838
1849 1839 def manifest(ui, repo, node=None, rev=None):
1850 1840 """output the current or given revision of the project manifest
1851 1841
1852 1842 Print a list of version controlled files for the given revision.
1853 1843 If no revision is given, the parent of the working directory is used,
1854 1844 or tip if no revision is checked out.
1855 1845
1856 1846 The manifest is the list of files being version controlled. If no revision
1857 1847 is given then the first parent of the working directory is used.
1858 1848
1859 1849 With -v flag, print file permissions, symlink and executable bits. With
1860 1850 --debug flag, print file revision hashes.
1861 1851 """
1862 1852
1863 1853 if rev and node:
1864 1854 raise util.Abort(_("please specify just one revision"))
1865 1855
1866 1856 if not node:
1867 1857 node = rev
1868 1858
1869 1859 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1870 1860 ctx = repo[node]
1871 1861 for f in ctx:
1872 1862 if ui.debugflag:
1873 1863 ui.write("%40s " % hex(ctx.manifest()[f]))
1874 1864 if ui.verbose:
1875 1865 ui.write(decor[ctx.flags(f)])
1876 1866 ui.write("%s\n" % f)
1877 1867
1878 1868 def merge(ui, repo, node=None, force=None, rev=None):
1879 1869 """merge working directory with another revision
1880 1870
1881 1871 Merge the contents of the current working directory and the
1882 1872 requested revision. Files that changed between either parent are
1883 1873 marked as changed for the next commit and a commit must be
1884 1874 performed before any further updates are allowed.
1885 1875
1886 1876 If no revision is specified, the working directory's parent is a
1887 1877 head revision, and the current branch contains exactly one other head,
1888 1878 the other head is merged with by default. Otherwise, an explicit
1889 1879 revision to merge with must be provided.
1890 1880 """
1891 1881
1892 1882 if rev and node:
1893 1883 raise util.Abort(_("please specify just one revision"))
1894 1884 if not node:
1895 1885 node = rev
1896 1886
1897 1887 if not node:
1898 1888 branch = repo.changectx(None).branch()
1899 1889 bheads = repo.branchheads()
1900 1890 if len(bheads) > 2:
1901 1891 raise util.Abort(_("branch '%s' has %d heads - "
1902 1892 "please merge with an explicit rev") %
1903 1893 (branch, len(bheads)))
1904 1894
1905 1895 parent = repo.dirstate.parents()[0]
1906 1896 if len(bheads) == 1:
1907 1897 if len(repo.heads()) > 1:
1908 1898 raise util.Abort(_("branch '%s' has one head - "
1909 1899 "please merge with an explicit rev") %
1910 1900 branch)
1911 1901 msg = _('there is nothing to merge')
1912 1902 if parent != repo.lookup(repo[None].branch()):
1913 1903 msg = _('%s - use "hg update" instead') % msg
1914 1904 raise util.Abort(msg)
1915 1905
1916 1906 if parent not in bheads:
1917 1907 raise util.Abort(_('working dir not at a head rev - '
1918 1908 'use "hg update" or merge with an explicit rev'))
1919 1909 node = parent == bheads[0] and bheads[-1] or bheads[0]
1920 1910 return hg.merge(repo, node, force=force)
1921 1911
1922 1912 def outgoing(ui, repo, dest=None, **opts):
1923 1913 """show changesets not found in destination
1924 1914
1925 1915 Show changesets not found in the specified destination repository or
1926 1916 the default push location. These are the changesets that would be pushed
1927 1917 if a push was requested.
1928 1918
1929 1919 See pull for valid destination format details.
1930 1920 """
1931 1921 limit = cmdutil.loglimit(opts)
1932 1922 dest, revs, checkout = hg.parseurl(
1933 1923 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1934 1924 cmdutil.setremoteconfig(ui, opts)
1935 1925 if revs:
1936 1926 revs = [repo.lookup(rev) for rev in revs]
1937 1927
1938 1928 other = hg.repository(ui, dest)
1939 1929 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1940 1930 o = repo.findoutgoing(other, force=opts['force'])
1941 1931 if not o:
1942 1932 ui.status(_("no changes found\n"))
1943 1933 return 1
1944 1934 o = repo.changelog.nodesbetween(o, revs)[0]
1945 1935 if opts['newest_first']:
1946 1936 o.reverse()
1947 1937 displayer = cmdutil.show_changeset(ui, repo, opts)
1948 1938 count = 0
1949 1939 for n in o:
1950 1940 if count >= limit:
1951 1941 break
1952 1942 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1953 1943 if opts['no_merges'] and len(parents) == 2:
1954 1944 continue
1955 1945 count += 1
1956 1946 displayer.show(changenode=n)
1957 1947
1958 1948 def parents(ui, repo, file_=None, **opts):
1959 1949 """show the parents of the working dir or revision
1960 1950
1961 1951 Print the working directory's parent revisions. If a
1962 1952 revision is given via --rev, the parent of that revision
1963 1953 will be printed. If a file argument is given, revision in
1964 1954 which the file was last changed (before the working directory
1965 1955 revision or the argument to --rev if given) is printed.
1966 1956 """
1967 1957 rev = opts.get('rev')
1968 1958 if rev:
1969 1959 ctx = repo[rev]
1970 1960 else:
1971 1961 ctx = repo[None]
1972 1962
1973 1963 if file_:
1974 1964 m = cmdutil.match(repo, (file_,), opts)
1975 1965 if m.anypats() or len(m.files()) != 1:
1976 1966 raise util.Abort(_('can only specify an explicit file name'))
1977 1967 file_ = m.files()[0]
1978 1968 filenodes = []
1979 1969 for cp in ctx.parents():
1980 1970 if not cp:
1981 1971 continue
1982 1972 try:
1983 1973 filenodes.append(cp.filenode(file_))
1984 1974 except revlog.LookupError:
1985 1975 pass
1986 1976 if not filenodes:
1987 1977 raise util.Abort(_("'%s' not found in manifest!") % file_)
1988 1978 fl = repo.file(file_)
1989 1979 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1990 1980 else:
1991 1981 p = [cp.node() for cp in ctx.parents()]
1992 1982
1993 1983 displayer = cmdutil.show_changeset(ui, repo, opts)
1994 1984 for n in p:
1995 1985 if n != nullid:
1996 1986 displayer.show(changenode=n)
1997 1987
1998 1988 def paths(ui, repo, search=None):
1999 1989 """show definition of symbolic path names
2000 1990
2001 1991 Show definition of symbolic path name NAME. If no name is given, show
2002 1992 definition of available names.
2003 1993
2004 1994 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2005 1995 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2006 1996 """
2007 1997 if search:
2008 1998 for name, path in ui.configitems("paths"):
2009 1999 if name == search:
2010 2000 ui.write("%s\n" % util.hidepassword(path))
2011 2001 return
2012 2002 ui.warn(_("not found!\n"))
2013 2003 return 1
2014 2004 else:
2015 2005 for name, path in ui.configitems("paths"):
2016 2006 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2017 2007
2018 2008 def postincoming(ui, repo, modheads, optupdate, checkout):
2019 2009 if modheads == 0:
2020 2010 return
2021 2011 if optupdate:
2022 2012 if modheads <= 1 or checkout:
2023 2013 return hg.update(repo, checkout)
2024 2014 else:
2025 2015 ui.status(_("not updating, since new heads added\n"))
2026 2016 if modheads > 1:
2027 2017 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2028 2018 else:
2029 2019 ui.status(_("(run 'hg update' to get a working copy)\n"))
2030 2020
2031 2021 def pull(ui, repo, source="default", **opts):
2032 2022 """pull changes from the specified source
2033 2023
2034 2024 Pull changes from a remote repository to a local one.
2035 2025
2036 2026 This finds all changes from the repository at the specified path
2037 2027 or URL and adds them to the local repository. By default, this
2038 2028 does not update the copy of the project in the working directory.
2039 2029
2040 2030 Valid URLs are of the form:
2041 2031
2042 2032 local/filesystem/path (or file://local/filesystem/path)
2043 2033 http://[user@]host[:port]/[path]
2044 2034 https://[user@]host[:port]/[path]
2045 2035 ssh://[user@]host[:port]/[path]
2046 2036 static-http://host[:port]/[path]
2047 2037
2048 2038 Paths in the local filesystem can either point to Mercurial
2049 2039 repositories or to bundle files (as created by 'hg bundle' or
2050 2040 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2051 2041 allows access to a Mercurial repository where you simply use a web
2052 2042 server to publish the .hg directory as static content.
2053 2043
2054 2044 An optional identifier after # indicates a particular branch, tag,
2055 2045 or changeset to pull.
2056 2046
2057 2047 Some notes about using SSH with Mercurial:
2058 2048 - SSH requires an accessible shell account on the destination machine
2059 2049 and a copy of hg in the remote path or specified with as remotecmd.
2060 2050 - path is relative to the remote user's home directory by default.
2061 2051 Use an extra slash at the start of a path to specify an absolute path:
2062 2052 ssh://example.com//tmp/repository
2063 2053 - Mercurial doesn't use its own compression via SSH; the right thing
2064 2054 to do is to configure it in your ~/.ssh/config, e.g.:
2065 2055 Host *.mylocalnetwork.example.com
2066 2056 Compression no
2067 2057 Host *
2068 2058 Compression yes
2069 2059 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2070 2060 with the --ssh command line option.
2071 2061 """
2072 2062 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2073 2063 cmdutil.setremoteconfig(ui, opts)
2074 2064
2075 2065 other = hg.repository(ui, source)
2076 2066 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2077 2067 if revs:
2078 2068 try:
2079 2069 revs = [other.lookup(rev) for rev in revs]
2080 2070 except NoCapability:
2081 2071 error = _("Other repository doesn't support revision lookup, "
2082 2072 "so a rev cannot be specified.")
2083 2073 raise util.Abort(error)
2084 2074
2085 2075 modheads = repo.pull(other, heads=revs, force=opts['force'])
2086 2076 return postincoming(ui, repo, modheads, opts['update'], checkout)
2087 2077
2088 2078 def push(ui, repo, dest=None, **opts):
2089 2079 """push changes to the specified destination
2090 2080
2091 2081 Push changes from the local repository to the given destination.
2092 2082
2093 2083 This is the symmetrical operation for pull. It helps to move
2094 2084 changes from the current repository to a different one. If the
2095 2085 destination is local this is identical to a pull in that directory
2096 2086 from the current one.
2097 2087
2098 2088 By default, push will refuse to run if it detects the result would
2099 2089 increase the number of remote heads. This generally indicates the
2100 2090 the client has forgotten to pull and merge before pushing.
2101 2091
2102 2092 Valid URLs are of the form:
2103 2093
2104 2094 local/filesystem/path (or file://local/filesystem/path)
2105 2095 ssh://[user@]host[:port]/[path]
2106 2096 http://[user@]host[:port]/[path]
2107 2097 https://[user@]host[:port]/[path]
2108 2098
2109 2099 An optional identifier after # indicates a particular branch, tag,
2110 2100 or changeset to push. If -r is used, the named changeset and all its
2111 2101 ancestors will be pushed to the remote repository.
2112 2102
2113 2103 Look at the help text for the pull command for important details
2114 2104 about ssh:// URLs.
2115 2105
2116 2106 Pushing to http:// and https:// URLs is only possible, if this
2117 2107 feature is explicitly enabled on the remote Mercurial server.
2118 2108 """
2119 2109 dest, revs, checkout = hg.parseurl(
2120 2110 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2121 2111 cmdutil.setremoteconfig(ui, opts)
2122 2112
2123 2113 other = hg.repository(ui, dest)
2124 2114 ui.status('pushing to %s\n' % util.hidepassword(dest))
2125 2115 if revs:
2126 2116 revs = [repo.lookup(rev) for rev in revs]
2127 2117 r = repo.push(other, opts['force'], revs=revs)
2128 2118 return r == 0
2129 2119
2130 2120 def rawcommit(ui, repo, *pats, **opts):
2131 2121 """raw commit interface (DEPRECATED)
2132 2122
2133 2123 (DEPRECATED)
2134 2124 Lowlevel commit, for use in helper scripts.
2135 2125
2136 2126 This command is not intended to be used by normal users, as it is
2137 2127 primarily useful for importing from other SCMs.
2138 2128
2139 2129 This command is now deprecated and will be removed in a future
2140 2130 release, please use debugsetparents and commit instead.
2141 2131 """
2142 2132
2143 2133 ui.warn(_("(the rawcommit command is deprecated)\n"))
2144 2134
2145 2135 message = cmdutil.logmessage(opts)
2146 2136
2147 2137 files = cmdutil.match(repo, pats, opts).files()
2148 2138 if opts['files']:
2149 2139 files += open(opts['files']).read().splitlines()
2150 2140
2151 2141 parents = [repo.lookup(p) for p in opts['parent']]
2152 2142
2153 2143 try:
2154 2144 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2155 2145 except ValueError, inst:
2156 2146 raise util.Abort(str(inst))
2157 2147
2158 2148 def recover(ui, repo):
2159 2149 """roll back an interrupted transaction
2160 2150
2161 2151 Recover from an interrupted commit or pull.
2162 2152
2163 2153 This command tries to fix the repository status after an interrupted
2164 2154 operation. It should only be necessary when Mercurial suggests it.
2165 2155 """
2166 2156 if repo.recover():
2167 2157 return hg.verify(repo)
2168 2158 return 1
2169 2159
2170 2160 def remove(ui, repo, *pats, **opts):
2171 2161 """remove the specified files on the next commit
2172 2162
2173 2163 Schedule the indicated files for removal from the repository.
2174 2164
2175 2165 This only removes files from the current branch, not from the entire
2176 2166 project history. -A can be used to remove only files that have already
2177 2167 been deleted, -f can be used to force deletion, and -Af can be used
2178 2168 to remove files from the next revision without deleting them.
2179 2169
2180 2170 The following table details the behavior of remove for different file
2181 2171 states (columns) and option combinations (rows). The file states are
2182 2172 Added, Clean, Modified and Missing (as reported by hg status). The
2183 2173 actions are Warn, Remove (from branch) and Delete (from disk).
2184 2174
2185 2175 A C M !
2186 2176 none W RD W R
2187 2177 -f R RD RD R
2188 2178 -A W W W R
2189 2179 -Af R R R R
2190 2180
2191 2181 This command schedules the files to be removed at the next commit.
2192 2182 To undo a remove before that, see hg revert.
2193 2183 """
2194 2184
2195 2185 after, force = opts.get('after'), opts.get('force')
2196 2186 if not pats and not after:
2197 2187 raise util.Abort(_('no files specified'))
2198 2188
2199 2189 m = cmdutil.match(repo, pats, opts)
2200 2190 s = repo.status(match=m, clean=True)
2201 2191 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2202 2192
2203 2193 def warn(files, reason):
2204 2194 for f in files:
2205 2195 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2206 2196 % (m.rel(f), reason))
2207 2197
2208 2198 if force:
2209 2199 remove, forget = modified + deleted + clean, added
2210 2200 elif after:
2211 2201 remove, forget = deleted, []
2212 2202 warn(modified + added + clean, _('still exists'))
2213 2203 else:
2214 2204 remove, forget = deleted + clean, []
2215 2205 warn(modified, _('is modified'))
2216 2206 warn(added, _('has been marked for add'))
2217 2207
2218 files = remove + forget
2219 files.sort()
2220 for f in files:
2208 for f in util.sort(remove + forget):
2221 2209 if ui.verbose or not m.exact(f):
2222 2210 ui.status(_('removing %s\n') % m.rel(f))
2223 2211
2224 2212 repo.forget(forget)
2225 2213 repo.remove(remove, unlink=not after)
2226 2214
2227 2215 def rename(ui, repo, *pats, **opts):
2228 2216 """rename files; equivalent of copy + remove
2229 2217
2230 2218 Mark dest as copies of sources; mark sources for deletion. If
2231 2219 dest is a directory, copies are put in that directory. If dest is
2232 2220 a file, there can only be one source.
2233 2221
2234 2222 By default, this command copies the contents of files as they
2235 2223 stand in the working directory. If invoked with --after, the
2236 2224 operation is recorded, but no copying is performed.
2237 2225
2238 2226 This command takes effect in the next commit. To undo a rename
2239 2227 before that, see hg revert.
2240 2228 """
2241 2229 wlock = repo.wlock(False)
2242 2230 try:
2243 2231 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2244 2232 finally:
2245 2233 del wlock
2246 2234
2247 2235 def resolve(ui, repo, *pats, **opts):
2248 2236 """resolve file merges from a branch merge or update
2249 2237
2250 2238 This command will attempt to resolve unresolved merges from the
2251 2239 last update or merge command. This will use the local file
2252 2240 revision preserved at the last update or merge to cleanly retry
2253 2241 the file merge attempt. With no file or options specified, this
2254 2242 command will attempt to resolve all unresolved files.
2255 2243
2256 2244 The codes used to show the status of files are:
2257 2245 U = unresolved
2258 2246 R = resolved
2259 2247 """
2260 2248
2261 2249 if len([x for x in opts if opts[x]]) > 1:
2262 2250 raise util.Abort(_("too many options specified"))
2263 2251
2264 2252 ms = merge_.mergestate(repo)
2265 2253 m = cmdutil.match(repo, pats, opts)
2266 2254
2267 2255 for f in ms:
2268 2256 if m(f):
2269 2257 if opts.get("list"):
2270 2258 ui.write("%s %s\n" % (ms[f].upper(), f))
2271 2259 elif opts.get("mark"):
2272 2260 ms.mark(f, "r")
2273 2261 elif opts.get("unmark"):
2274 2262 ms.mark(f, "u")
2275 2263 else:
2276 2264 wctx = repo[None]
2277 2265 mctx = wctx.parents()[-1]
2278 2266 ms.resolve(f, wctx, mctx)
2279 2267
2280 2268 def revert(ui, repo, *pats, **opts):
2281 2269 """restore individual files or dirs to an earlier state
2282 2270
2283 2271 (use update -r to check out earlier revisions, revert does not
2284 2272 change the working dir parents)
2285 2273
2286 2274 With no revision specified, revert the named files or directories
2287 2275 to the contents they had in the parent of the working directory.
2288 2276 This restores the contents of the affected files to an unmodified
2289 2277 state and unschedules adds, removes, copies, and renames. If the
2290 2278 working directory has two parents, you must explicitly specify the
2291 2279 revision to revert to.
2292 2280
2293 2281 Using the -r option, revert the given files or directories to their
2294 2282 contents as of a specific revision. This can be helpful to "roll
2295 2283 back" some or all of an earlier change.
2296 2284 See 'hg help dates' for a list of formats valid for -d/--date.
2297 2285
2298 2286 Revert modifies the working directory. It does not commit any
2299 2287 changes, or change the parent of the working directory. If you
2300 2288 revert to a revision other than the parent of the working
2301 2289 directory, the reverted files will thus appear modified
2302 2290 afterwards.
2303 2291
2304 2292 If a file has been deleted, it is restored. If the executable
2305 2293 mode of a file was changed, it is reset.
2306 2294
2307 2295 If names are given, all files matching the names are reverted.
2308 2296 If no arguments are given, no files are reverted.
2309 2297
2310 2298 Modified files are saved with a .orig suffix before reverting.
2311 2299 To disable these backups, use --no-backup.
2312 2300 """
2313 2301
2314 2302 if opts["date"]:
2315 2303 if opts["rev"]:
2316 2304 raise util.Abort(_("you can't specify a revision and a date"))
2317 2305 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2318 2306
2319 2307 if not pats and not opts['all']:
2320 2308 raise util.Abort(_('no files or directories specified; '
2321 2309 'use --all to revert the whole repo'))
2322 2310
2323 2311 parent, p2 = repo.dirstate.parents()
2324 2312 if not opts['rev'] and p2 != nullid:
2325 2313 raise util.Abort(_('uncommitted merge - please provide a '
2326 2314 'specific revision'))
2327 2315 ctx = repo[opts['rev']]
2328 2316 node = ctx.node()
2329 2317 mf = ctx.manifest()
2330 2318 if node == parent:
2331 2319 pmf = mf
2332 2320 else:
2333 2321 pmf = None
2334 2322
2335 2323 # need all matching names in dirstate and manifest of target rev,
2336 2324 # so have to walk both. do not print errors if files exist in one
2337 2325 # but not other.
2338 2326
2339 2327 names = {}
2340 2328
2341 2329 wlock = repo.wlock()
2342 2330 try:
2343 2331 # walk dirstate.
2344 2332 files = []
2345 2333
2346 2334 m = cmdutil.match(repo, pats, opts)
2347 2335 m.bad = lambda x,y: False
2348 2336 for abs in repo.walk(m):
2349 2337 names[abs] = m.rel(abs), m.exact(abs)
2350 2338
2351 2339 # walk target manifest.
2352 2340
2353 2341 def badfn(path, msg):
2354 2342 if path in names:
2355 2343 return False
2356 2344 path_ = path + '/'
2357 2345 for f in names:
2358 2346 if f.startswith(path_):
2359 2347 return False
2360 2348 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2361 2349 return False
2362 2350
2363 2351 m = cmdutil.match(repo, pats, opts)
2364 2352 m.bad = badfn
2365 2353 for abs in repo.walk(m, node=node):
2366 2354 if abs not in names:
2367 2355 names[abs] = m.rel(abs), m.exact(abs)
2368 2356
2369 2357 m = cmdutil.matchfiles(repo, names)
2370 2358 changes = repo.status(match=m)[:4]
2371 2359 modified, added, removed, deleted = map(dict.fromkeys, changes)
2372 2360
2373 2361 # if f is a rename, also revert the source
2374 2362 cwd = repo.getcwd()
2375 2363 for f in added:
2376 2364 src = repo.dirstate.copied(f)
2377 2365 if src and src not in names and repo.dirstate[src] == 'r':
2378 2366 removed[src] = None
2379 2367 names[src] = (repo.pathto(src, cwd), True)
2380 2368
2381 2369 def removeforget(abs):
2382 2370 if repo.dirstate[abs] == 'a':
2383 2371 return _('forgetting %s\n')
2384 2372 return _('removing %s\n')
2385 2373
2386 2374 revert = ([], _('reverting %s\n'))
2387 2375 add = ([], _('adding %s\n'))
2388 2376 remove = ([], removeforget)
2389 2377 undelete = ([], _('undeleting %s\n'))
2390 2378
2391 2379 disptable = (
2392 2380 # dispatch table:
2393 2381 # file state
2394 2382 # action if in target manifest
2395 2383 # action if not in target manifest
2396 2384 # make backup if in target manifest
2397 2385 # make backup if not in target manifest
2398 2386 (modified, revert, remove, True, True),
2399 2387 (added, revert, remove, True, False),
2400 2388 (removed, undelete, None, False, False),
2401 2389 (deleted, revert, remove, False, False),
2402 2390 )
2403 2391
2404 entries = names.items()
2405 entries.sort()
2406
2407 for abs, (rel, exact) in entries:
2392 for abs, (rel, exact) in util.sort(names.items()):
2408 2393 mfentry = mf.get(abs)
2409 2394 target = repo.wjoin(abs)
2410 2395 def handle(xlist, dobackup):
2411 2396 xlist[0].append(abs)
2412 2397 if dobackup and not opts['no_backup'] and util.lexists(target):
2413 2398 bakname = "%s.orig" % rel
2414 2399 ui.note(_('saving current version of %s as %s\n') %
2415 2400 (rel, bakname))
2416 2401 if not opts.get('dry_run'):
2417 2402 util.copyfile(target, bakname)
2418 2403 if ui.verbose or not exact:
2419 2404 msg = xlist[1]
2420 2405 if not isinstance(msg, basestring):
2421 2406 msg = msg(abs)
2422 2407 ui.status(msg % rel)
2423 2408 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2424 2409 if abs not in table: continue
2425 2410 # file has changed in dirstate
2426 2411 if mfentry:
2427 2412 handle(hitlist, backuphit)
2428 2413 elif misslist is not None:
2429 2414 handle(misslist, backupmiss)
2430 2415 break
2431 2416 else:
2432 2417 if abs not in repo.dirstate:
2433 2418 if mfentry:
2434 2419 handle(add, True)
2435 2420 elif exact:
2436 2421 ui.warn(_('file not managed: %s\n') % rel)
2437 2422 continue
2438 2423 # file has not changed in dirstate
2439 2424 if node == parent:
2440 2425 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2441 2426 continue
2442 2427 if pmf is None:
2443 2428 # only need parent manifest in this unlikely case,
2444 2429 # so do not read by default
2445 2430 pmf = repo[parent].manifest()
2446 2431 if abs in pmf:
2447 2432 if mfentry:
2448 2433 # if version of file is same in parent and target
2449 2434 # manifests, do nothing
2450 2435 if (pmf[abs] != mfentry or
2451 2436 pmf.flags(abs) != mf.flags(abs)):
2452 2437 handle(revert, False)
2453 2438 else:
2454 2439 handle(remove, False)
2455 2440
2456 2441 if not opts.get('dry_run'):
2457 2442 def checkout(f):
2458 2443 fc = ctx[f]
2459 2444 repo.wwrite(f, fc.data(), fc.flags())
2460 2445
2461 2446 audit_path = util.path_auditor(repo.root)
2462 2447 for f in remove[0]:
2463 2448 if repo.dirstate[f] == 'a':
2464 2449 repo.dirstate.forget(f)
2465 2450 continue
2466 2451 audit_path(f)
2467 2452 try:
2468 2453 util.unlink(repo.wjoin(f))
2469 2454 except OSError:
2470 2455 pass
2471 2456 repo.dirstate.remove(f)
2472 2457
2473 2458 normal = None
2474 2459 if node == parent:
2475 2460 # We're reverting to our parent. If possible, we'd like status
2476 2461 # to report the file as clean. We have to use normallookup for
2477 2462 # merges to avoid losing information about merged/dirty files.
2478 2463 if p2 != nullid:
2479 2464 normal = repo.dirstate.normallookup
2480 2465 else:
2481 2466 normal = repo.dirstate.normal
2482 2467 for f in revert[0]:
2483 2468 checkout(f)
2484 2469 if normal:
2485 2470 normal(f)
2486 2471
2487 2472 for f in add[0]:
2488 2473 checkout(f)
2489 2474 repo.dirstate.add(f)
2490 2475
2491 2476 normal = repo.dirstate.normallookup
2492 2477 if node == parent and p2 == nullid:
2493 2478 normal = repo.dirstate.normal
2494 2479 for f in undelete[0]:
2495 2480 checkout(f)
2496 2481 normal(f)
2497 2482
2498 2483 finally:
2499 2484 del wlock
2500 2485
2501 2486 def rollback(ui, repo):
2502 2487 """roll back the last transaction
2503 2488
2504 2489 This command should be used with care. There is only one level of
2505 2490 rollback, and there is no way to undo a rollback. It will also
2506 2491 restore the dirstate at the time of the last transaction, losing
2507 2492 any dirstate changes since that time.
2508 2493
2509 2494 Transactions are used to encapsulate the effects of all commands
2510 2495 that create new changesets or propagate existing changesets into a
2511 2496 repository. For example, the following commands are transactional,
2512 2497 and their effects can be rolled back:
2513 2498
2514 2499 commit
2515 2500 import
2516 2501 pull
2517 2502 push (with this repository as destination)
2518 2503 unbundle
2519 2504
2520 2505 This command is not intended for use on public repositories. Once
2521 2506 changes are visible for pull by other users, rolling a transaction
2522 2507 back locally is ineffective (someone else may already have pulled
2523 2508 the changes). Furthermore, a race is possible with readers of the
2524 2509 repository; for example an in-progress pull from the repository
2525 2510 may fail if a rollback is performed.
2526 2511 """
2527 2512 repo.rollback()
2528 2513
2529 2514 def root(ui, repo):
2530 2515 """print the root (top) of the current working dir
2531 2516
2532 2517 Print the root directory of the current repository.
2533 2518 """
2534 2519 ui.write(repo.root + "\n")
2535 2520
2536 2521 def serve(ui, repo, **opts):
2537 2522 """export the repository via HTTP
2538 2523
2539 2524 Start a local HTTP repository browser and pull server.
2540 2525
2541 2526 By default, the server logs accesses to stdout and errors to
2542 2527 stderr. Use the "-A" and "-E" options to log to files.
2543 2528 """
2544 2529
2545 2530 if opts["stdio"]:
2546 2531 if repo is None:
2547 2532 raise RepoError(_("There is no Mercurial repository here"
2548 2533 " (.hg not found)"))
2549 2534 s = sshserver.sshserver(ui, repo)
2550 2535 s.serve_forever()
2551 2536
2552 2537 parentui = ui.parentui or ui
2553 2538 optlist = ("name templates style address port prefix ipv6"
2554 2539 " accesslog errorlog webdir_conf certificate")
2555 2540 for o in optlist.split():
2556 2541 if opts[o]:
2557 2542 parentui.setconfig("web", o, str(opts[o]))
2558 2543 if (repo is not None) and (repo.ui != parentui):
2559 2544 repo.ui.setconfig("web", o, str(opts[o]))
2560 2545
2561 2546 if repo is None and not ui.config("web", "webdir_conf"):
2562 2547 raise RepoError(_("There is no Mercurial repository here"
2563 2548 " (.hg not found)"))
2564 2549
2565 2550 class service:
2566 2551 def init(self):
2567 2552 util.set_signal_handler()
2568 2553 self.httpd = hgweb.server.create_server(parentui, repo)
2569 2554
2570 2555 if not ui.verbose: return
2571 2556
2572 2557 if self.httpd.prefix:
2573 2558 prefix = self.httpd.prefix.strip('/') + '/'
2574 2559 else:
2575 2560 prefix = ''
2576 2561
2577 2562 port = ':%d' % self.httpd.port
2578 2563 if port == ':80':
2579 2564 port = ''
2580 2565
2581 2566 bindaddr = self.httpd.addr
2582 2567 if bindaddr == '0.0.0.0':
2583 2568 bindaddr = '*'
2584 2569 elif ':' in bindaddr: # IPv6
2585 2570 bindaddr = '[%s]' % bindaddr
2586 2571
2587 2572 fqaddr = self.httpd.fqaddr
2588 2573 if ':' in fqaddr:
2589 2574 fqaddr = '[%s]' % fqaddr
2590 2575 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2591 2576 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2592 2577
2593 2578 def run(self):
2594 2579 self.httpd.serve_forever()
2595 2580
2596 2581 service = service()
2597 2582
2598 2583 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2599 2584
2600 2585 def status(ui, repo, *pats, **opts):
2601 2586 """show changed files in the working directory
2602 2587
2603 2588 Show status of files in the repository. If names are given, only
2604 2589 files that match are shown. Files that are clean or ignored or
2605 2590 source of a copy/move operation, are not listed unless -c (clean),
2606 2591 -i (ignored), -C (copies) or -A is given. Unless options described
2607 2592 with "show only ..." are given, the options -mardu are used.
2608 2593
2609 2594 Option -q/--quiet hides untracked (unknown and ignored) files
2610 2595 unless explicitly requested with -u/--unknown or -i/-ignored.
2611 2596
2612 2597 NOTE: status may appear to disagree with diff if permissions have
2613 2598 changed or a merge has occurred. The standard diff format does not
2614 2599 report permission changes and diff only reports changes relative
2615 2600 to one merge parent.
2616 2601
2617 2602 If one revision is given, it is used as the base revision.
2618 2603 If two revisions are given, the difference between them is shown.
2619 2604
2620 2605 The codes used to show the status of files are:
2621 2606 M = modified
2622 2607 A = added
2623 2608 R = removed
2624 2609 C = clean
2625 2610 ! = deleted, but still tracked
2626 2611 ? = not tracked
2627 2612 I = ignored
2628 2613 = the previous added file was copied from here
2629 2614 """
2630 2615
2631 2616 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2632 2617 cwd = (pats and repo.getcwd()) or ''
2633 2618 end = opts['print0'] and '\0' or '\n'
2634 2619 copy = {}
2635 2620 states = 'modified added removed deleted unknown ignored clean'.split()
2636 2621 show = [k for k in states if opts[k]]
2637 2622 if opts['all']:
2638 2623 show += ui.quiet and (states[:4] + ['clean']) or states
2639 2624 if not show:
2640 2625 show = ui.quiet and states[:4] or states[:5]
2641 2626
2642 2627 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2643 2628 'ignored' in show, 'clean' in show, 'unknown' in show)
2644 2629 changestates = zip(states, 'MAR!?IC', stat)
2645 2630
2646 2631 if (opts['all'] or opts['copies']) and not opts['no_status']:
2647 2632 ctxn = repo[nullid]
2648 2633 ctx1 = repo[node1]
2649 2634 ctx2 = repo[node2]
2650 2635 added = stat[1]
2651 2636 if node2 is None:
2652 2637 added = stat[0] + stat[1] # merged?
2653 2638
2654 2639 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2655 2640 if k in added:
2656 2641 copy[k] = v
2657 2642 elif v in added:
2658 2643 copy[v] = k
2659 2644
2660 2645 for state, char, files in changestates:
2661 2646 if state in show:
2662 2647 format = "%s %%s%s" % (char, end)
2663 2648 if opts['no_status']:
2664 2649 format = "%%s%s" % end
2665 2650
2666 2651 for f in files:
2667 2652 ui.write(format % repo.pathto(f, cwd))
2668 2653 if f in copy:
2669 2654 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2670 2655
2671 2656 def tag(ui, repo, name1, *names, **opts):
2672 2657 """add one or more tags for the current or given revision
2673 2658
2674 2659 Name a particular revision using <name>.
2675 2660
2676 2661 Tags are used to name particular revisions of the repository and are
2677 2662 very useful to compare different revisions, to go back to significant
2678 2663 earlier versions or to mark branch points as releases, etc.
2679 2664
2680 2665 If no revision is given, the parent of the working directory is used,
2681 2666 or tip if no revision is checked out.
2682 2667
2683 2668 To facilitate version control, distribution, and merging of tags,
2684 2669 they are stored as a file named ".hgtags" which is managed
2685 2670 similarly to other project files and can be hand-edited if
2686 2671 necessary. The file '.hg/localtags' is used for local tags (not
2687 2672 shared among repositories).
2688 2673
2689 2674 See 'hg help dates' for a list of formats valid for -d/--date.
2690 2675 """
2691 2676
2692 2677 rev_ = "."
2693 2678 names = (name1,) + names
2694 2679 if len(names) != len(dict.fromkeys(names)):
2695 2680 raise util.Abort(_('tag names must be unique'))
2696 2681 for n in names:
2697 2682 if n in ['tip', '.', 'null']:
2698 2683 raise util.Abort(_('the name \'%s\' is reserved') % n)
2699 2684 if opts['rev'] and opts['remove']:
2700 2685 raise util.Abort(_("--rev and --remove are incompatible"))
2701 2686 if opts['rev']:
2702 2687 rev_ = opts['rev']
2703 2688 message = opts['message']
2704 2689 if opts['remove']:
2705 2690 expectedtype = opts['local'] and 'local' or 'global'
2706 2691 for n in names:
2707 2692 if not repo.tagtype(n):
2708 2693 raise util.Abort(_('tag \'%s\' does not exist') % n)
2709 2694 if repo.tagtype(n) != expectedtype:
2710 2695 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2711 2696 (n, expectedtype))
2712 2697 rev_ = nullid
2713 2698 if not message:
2714 2699 message = _('Removed tag %s') % ', '.join(names)
2715 2700 elif not opts['force']:
2716 2701 for n in names:
2717 2702 if n in repo.tags():
2718 2703 raise util.Abort(_('tag \'%s\' already exists '
2719 2704 '(use -f to force)') % n)
2720 2705 if not rev_ and repo.dirstate.parents()[1] != nullid:
2721 2706 raise util.Abort(_('uncommitted merge - please provide a '
2722 2707 'specific revision'))
2723 2708 r = repo[rev_].node()
2724 2709
2725 2710 if not message:
2726 2711 message = (_('Added tag %s for changeset %s') %
2727 2712 (', '.join(names), short(r)))
2728 2713
2729 2714 date = opts.get('date')
2730 2715 if date:
2731 2716 date = util.parsedate(date)
2732 2717
2733 2718 repo.tag(names, r, message, opts['local'], opts['user'], date)
2734 2719
2735 2720 def tags(ui, repo):
2736 2721 """list repository tags
2737 2722
2738 2723 List the repository tags.
2739 2724
2740 2725 This lists both regular and local tags. When the -v/--verbose switch
2741 2726 is used, a third column "local" is printed for local tags.
2742 2727 """
2743 2728
2744 2729 l = repo.tagslist()
2745 2730 l.reverse()
2746 2731 hexfunc = ui.debugflag and hex or short
2747 2732 tagtype = ""
2748 2733
2749 2734 for t, n in l:
2750 2735 if ui.quiet:
2751 2736 ui.write("%s\n" % t)
2752 2737 continue
2753 2738
2754 2739 try:
2755 2740 hn = hexfunc(n)
2756 2741 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2757 2742 except revlog.LookupError:
2758 2743 r = " ?:%s" % hn
2759 2744 else:
2760 2745 spaces = " " * (30 - util.locallen(t))
2761 2746 if ui.verbose:
2762 2747 if repo.tagtype(t) == 'local':
2763 2748 tagtype = " local"
2764 2749 else:
2765 2750 tagtype = ""
2766 2751 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2767 2752
2768 2753 def tip(ui, repo, **opts):
2769 2754 """show the tip revision
2770 2755
2771 2756 The tip revision (usually just called the tip) is the most
2772 2757 recently added changeset in the repository, the most recently
2773 2758 changed head.
2774 2759
2775 2760 If you have just made a commit, that commit will be the tip. If
2776 2761 you have just pulled changes from another repository, the tip of
2777 2762 that repository becomes the current tip. The "tip" tag is special
2778 2763 and cannot be renamed or assigned to a different changeset.
2779 2764 """
2780 2765 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2781 2766
2782 2767 def unbundle(ui, repo, fname1, *fnames, **opts):
2783 2768 """apply one or more changegroup files
2784 2769
2785 2770 Apply one or more compressed changegroup files generated by the
2786 2771 bundle command.
2787 2772 """
2788 2773 fnames = (fname1,) + fnames
2789 2774
2790 2775 lock = None
2791 2776 try:
2792 2777 lock = repo.lock()
2793 2778 for fname in fnames:
2794 2779 if os.path.exists(fname):
2795 2780 f = open(fname, "rb")
2796 2781 else:
2797 2782 f = urllib.urlopen(fname)
2798 2783 gen = changegroup.readbundle(f, fname)
2799 2784 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2800 2785 finally:
2801 2786 del lock
2802 2787
2803 2788 return postincoming(ui, repo, modheads, opts['update'], None)
2804 2789
2805 2790 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2806 2791 """update working directory
2807 2792
2808 2793 Update the repository's working directory to the specified revision,
2809 2794 or the tip of the current branch if none is specified.
2810 2795
2811 2796 If the requested revision is a descendant of the working
2812 2797 directory, any outstanding changes in the working directory will
2813 2798 be merged into the result. If it is not directly descended but is
2814 2799 on the same named branch, update aborts with a suggestion to use
2815 2800 merge or update -C instead.
2816 2801
2817 2802 If the requested revision is on a different named branch and the
2818 2803 working directory is clean, update quietly switches branches.
2819 2804
2820 2805 See 'hg help dates' for a list of formats valid for --date.
2821 2806 """
2822 2807 if rev and node:
2823 2808 raise util.Abort(_("please specify just one revision"))
2824 2809
2825 2810 if not rev:
2826 2811 rev = node
2827 2812
2828 2813 if date:
2829 2814 if rev:
2830 2815 raise util.Abort(_("you can't specify a revision and a date"))
2831 2816 rev = cmdutil.finddate(ui, repo, date)
2832 2817
2833 2818 if clean:
2834 2819 return hg.clean(repo, rev)
2835 2820 else:
2836 2821 return hg.update(repo, rev)
2837 2822
2838 2823 def verify(ui, repo):
2839 2824 """verify the integrity of the repository
2840 2825
2841 2826 Verify the integrity of the current repository.
2842 2827
2843 2828 This will perform an extensive check of the repository's
2844 2829 integrity, validating the hashes and checksums of each entry in
2845 2830 the changelog, manifest, and tracked files, as well as the
2846 2831 integrity of their crosslinks and indices.
2847 2832 """
2848 2833 return hg.verify(repo)
2849 2834
2850 2835 def version_(ui):
2851 2836 """output version and copyright information"""
2852 2837 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2853 2838 % version.get_version())
2854 2839 ui.status(_(
2855 2840 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2856 2841 "This is free software; see the source for copying conditions. "
2857 2842 "There is NO\nwarranty; "
2858 2843 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2859 2844 ))
2860 2845
2861 2846 # Command options and aliases are listed here, alphabetically
2862 2847
2863 2848 globalopts = [
2864 2849 ('R', 'repository', '',
2865 2850 _('repository root directory or symbolic path name')),
2866 2851 ('', 'cwd', '', _('change working directory')),
2867 2852 ('y', 'noninteractive', None,
2868 2853 _('do not prompt, assume \'yes\' for any required answers')),
2869 2854 ('q', 'quiet', None, _('suppress output')),
2870 2855 ('v', 'verbose', None, _('enable additional output')),
2871 2856 ('', 'config', [], _('set/override config option')),
2872 2857 ('', 'debug', None, _('enable debugging output')),
2873 2858 ('', 'debugger', None, _('start debugger')),
2874 2859 ('', 'encoding', util._encoding, _('set the charset encoding')),
2875 2860 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2876 2861 ('', 'lsprof', None, _('print improved command execution profile')),
2877 2862 ('', 'traceback', None, _('print traceback on exception')),
2878 2863 ('', 'time', None, _('time how long the command takes')),
2879 2864 ('', 'profile', None, _('print command execution profile')),
2880 2865 ('', 'version', None, _('output version information and exit')),
2881 2866 ('h', 'help', None, _('display help and exit')),
2882 2867 ]
2883 2868
2884 2869 dryrunopts = [('n', 'dry-run', None,
2885 2870 _('do not perform actions, just print output'))]
2886 2871
2887 2872 remoteopts = [
2888 2873 ('e', 'ssh', '', _('specify ssh command to use')),
2889 2874 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2890 2875 ]
2891 2876
2892 2877 walkopts = [
2893 2878 ('I', 'include', [], _('include names matching the given patterns')),
2894 2879 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2895 2880 ]
2896 2881
2897 2882 commitopts = [
2898 2883 ('m', 'message', '', _('use <text> as commit message')),
2899 2884 ('l', 'logfile', '', _('read commit message from <file>')),
2900 2885 ]
2901 2886
2902 2887 commitopts2 = [
2903 2888 ('d', 'date', '', _('record datecode as commit date')),
2904 2889 ('u', 'user', '', _('record user as committer')),
2905 2890 ]
2906 2891
2907 2892 templateopts = [
2908 2893 ('', 'style', '', _('display using template map file')),
2909 2894 ('', 'template', '', _('display with template')),
2910 2895 ]
2911 2896
2912 2897 logopts = [
2913 2898 ('p', 'patch', None, _('show patch')),
2914 2899 ('l', 'limit', '', _('limit number of changes displayed')),
2915 2900 ('M', 'no-merges', None, _('do not show merges')),
2916 2901 ] + templateopts
2917 2902
2918 2903 diffopts = [
2919 2904 ('a', 'text', None, _('treat all files as text')),
2920 2905 ('g', 'git', None, _('use git extended diff format')),
2921 2906 ('', 'nodates', None, _("don't include dates in diff headers"))
2922 2907 ]
2923 2908
2924 2909 diffopts2 = [
2925 2910 ('p', 'show-function', None, _('show which function each change is in')),
2926 2911 ('w', 'ignore-all-space', None,
2927 2912 _('ignore white space when comparing lines')),
2928 2913 ('b', 'ignore-space-change', None,
2929 2914 _('ignore changes in the amount of white space')),
2930 2915 ('B', 'ignore-blank-lines', None,
2931 2916 _('ignore changes whose lines are all blank')),
2932 2917 ('U', 'unified', '', _('number of lines of context to show'))
2933 2918 ]
2934 2919
2935 2920 table = {
2936 2921 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2937 2922 "addremove":
2938 2923 (addremove,
2939 2924 [('s', 'similarity', '',
2940 2925 _('guess renamed files by similarity (0<=s<=100)')),
2941 2926 ] + walkopts + dryrunopts,
2942 2927 _('hg addremove [OPTION]... [FILE]...')),
2943 2928 "^annotate|blame":
2944 2929 (annotate,
2945 2930 [('r', 'rev', '', _('annotate the specified revision')),
2946 2931 ('f', 'follow', None, _('follow file copies and renames')),
2947 2932 ('a', 'text', None, _('treat all files as text')),
2948 2933 ('u', 'user', None, _('list the author (long with -v)')),
2949 2934 ('d', 'date', None, _('list the date (short with -q)')),
2950 2935 ('n', 'number', None, _('list the revision number (default)')),
2951 2936 ('c', 'changeset', None, _('list the changeset')),
2952 2937 ('l', 'line-number', None,
2953 2938 _('show line number at the first appearance'))
2954 2939 ] + walkopts,
2955 2940 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2956 2941 "archive":
2957 2942 (archive,
2958 2943 [('', 'no-decode', None, _('do not pass files through decoders')),
2959 2944 ('p', 'prefix', '', _('directory prefix for files in archive')),
2960 2945 ('r', 'rev', '', _('revision to distribute')),
2961 2946 ('t', 'type', '', _('type of distribution to create')),
2962 2947 ] + walkopts,
2963 2948 _('hg archive [OPTION]... DEST')),
2964 2949 "backout":
2965 2950 (backout,
2966 2951 [('', 'merge', None,
2967 2952 _('merge with old dirstate parent after backout')),
2968 2953 ('', 'parent', '', _('parent to choose when backing out merge')),
2969 2954 ('r', 'rev', '', _('revision to backout')),
2970 2955 ] + walkopts + commitopts + commitopts2,
2971 2956 _('hg backout [OPTION]... [-r] REV')),
2972 2957 "bisect":
2973 2958 (bisect,
2974 2959 [('r', 'reset', False, _('reset bisect state')),
2975 2960 ('g', 'good', False, _('mark changeset good')),
2976 2961 ('b', 'bad', False, _('mark changeset bad')),
2977 2962 ('s', 'skip', False, _('skip testing changeset')),
2978 2963 ('U', 'noupdate', False, _('do not update to target'))],
2979 2964 _("hg bisect [-gbsr] [REV]")),
2980 2965 "branch":
2981 2966 (branch,
2982 2967 [('f', 'force', None,
2983 2968 _('set branch name even if it shadows an existing branch'))],
2984 2969 _('hg branch [-f] [NAME]')),
2985 2970 "branches":
2986 2971 (branches,
2987 2972 [('a', 'active', False,
2988 2973 _('show only branches that have unmerged heads'))],
2989 2974 _('hg branches [-a]')),
2990 2975 "bundle":
2991 2976 (bundle,
2992 2977 [('f', 'force', None,
2993 2978 _('run even when remote repository is unrelated')),
2994 2979 ('r', 'rev', [],
2995 2980 _('a changeset up to which you would like to bundle')),
2996 2981 ('', 'base', [],
2997 2982 _('a base changeset to specify instead of a destination')),
2998 2983 ('a', 'all', None, _('bundle all changesets in the repository')),
2999 2984 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3000 2985 ] + remoteopts,
3001 2986 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3002 2987 "cat":
3003 2988 (cat,
3004 2989 [('o', 'output', '', _('print output to file with formatted name')),
3005 2990 ('r', 'rev', '', _('print the given revision')),
3006 2991 ('', 'decode', None, _('apply any matching decode filter')),
3007 2992 ] + walkopts,
3008 2993 _('hg cat [OPTION]... FILE...')),
3009 2994 "^clone":
3010 2995 (clone,
3011 2996 [('U', 'noupdate', None,
3012 2997 _('the clone will only contain a repository (no working copy)')),
3013 2998 ('r', 'rev', [],
3014 2999 _('a changeset you would like to have after cloning')),
3015 3000 ('', 'pull', None, _('use pull protocol to copy metadata')),
3016 3001 ('', 'uncompressed', None,
3017 3002 _('use uncompressed transfer (fast over LAN)')),
3018 3003 ] + remoteopts,
3019 3004 _('hg clone [OPTION]... SOURCE [DEST]')),
3020 3005 "^commit|ci":
3021 3006 (commit,
3022 3007 [('A', 'addremove', None,
3023 3008 _('mark new/missing files as added/removed before committing')),
3024 3009 ] + walkopts + commitopts + commitopts2,
3025 3010 _('hg commit [OPTION]... [FILE]...')),
3026 3011 "copy|cp":
3027 3012 (copy,
3028 3013 [('A', 'after', None, _('record a copy that has already occurred')),
3029 3014 ('f', 'force', None,
3030 3015 _('forcibly copy over an existing managed file')),
3031 3016 ] + walkopts + dryrunopts,
3032 3017 _('hg copy [OPTION]... [SOURCE]... DEST')),
3033 3018 "debugancestor": (debugancestor, [],
3034 3019 _('hg debugancestor [INDEX] REV1 REV2')),
3035 3020 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3036 3021 "debugcomplete":
3037 3022 (debugcomplete,
3038 3023 [('o', 'options', None, _('show the command options'))],
3039 3024 _('hg debugcomplete [-o] CMD')),
3040 3025 "debugdate":
3041 3026 (debugdate,
3042 3027 [('e', 'extended', None, _('try extended date formats'))],
3043 3028 _('hg debugdate [-e] DATE [RANGE]')),
3044 3029 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3045 3030 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3046 3031 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3047 3032 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3048 3033 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3049 3034 "debugrawcommit|rawcommit":
3050 3035 (rawcommit,
3051 3036 [('p', 'parent', [], _('parent')),
3052 3037 ('F', 'files', '', _('file list'))
3053 3038 ] + commitopts + commitopts2,
3054 3039 _('hg debugrawcommit [OPTION]... [FILE]...')),
3055 3040 "debugrebuildstate":
3056 3041 (debugrebuildstate,
3057 3042 [('r', 'rev', '', _('revision to rebuild to'))],
3058 3043 _('hg debugrebuildstate [-r REV] [REV]')),
3059 3044 "debugrename":
3060 3045 (debugrename,
3061 3046 [('r', 'rev', '', _('revision to debug'))],
3062 3047 _('hg debugrename [-r REV] FILE')),
3063 3048 "debugsetparents":
3064 3049 (debugsetparents,
3065 3050 [],
3066 3051 _('hg debugsetparents REV1 [REV2]')),
3067 3052 "debugstate":
3068 3053 (debugstate,
3069 3054 [('', 'nodates', None, _('do not display the saved mtime'))],
3070 3055 _('hg debugstate [OPTS]')),
3071 3056 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3072 3057 "^diff":
3073 3058 (diff,
3074 3059 [('r', 'rev', [], _('revision'))
3075 3060 ] + diffopts + diffopts2 + walkopts,
3076 3061 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3077 3062 "^export":
3078 3063 (export,
3079 3064 [('o', 'output', '', _('print output to file with formatted name')),
3080 3065 ('', 'switch-parent', None, _('diff against the second parent'))
3081 3066 ] + diffopts,
3082 3067 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3083 3068 "grep":
3084 3069 (grep,
3085 3070 [('0', 'print0', None, _('end fields with NUL')),
3086 3071 ('', 'all', None, _('print all revisions that match')),
3087 3072 ('f', 'follow', None,
3088 3073 _('follow changeset history, or file history across copies and renames')),
3089 3074 ('i', 'ignore-case', None, _('ignore case when matching')),
3090 3075 ('l', 'files-with-matches', None,
3091 3076 _('print only filenames and revs that match')),
3092 3077 ('n', 'line-number', None, _('print matching line numbers')),
3093 3078 ('r', 'rev', [], _('search in given revision range')),
3094 3079 ('u', 'user', None, _('list the author (long with -v)')),
3095 3080 ('d', 'date', None, _('list the date (short with -q)')),
3096 3081 ] + walkopts,
3097 3082 _('hg grep [OPTION]... PATTERN [FILE]...')),
3098 3083 "heads":
3099 3084 (heads,
3100 3085 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3101 3086 ] + templateopts,
3102 3087 _('hg heads [-r REV] [REV]...')),
3103 3088 "help": (help_, [], _('hg help [COMMAND]')),
3104 3089 "identify|id":
3105 3090 (identify,
3106 3091 [('r', 'rev', '', _('identify the specified rev')),
3107 3092 ('n', 'num', None, _('show local revision number')),
3108 3093 ('i', 'id', None, _('show global revision id')),
3109 3094 ('b', 'branch', None, _('show branch')),
3110 3095 ('t', 'tags', None, _('show tags'))],
3111 3096 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3112 3097 "import|patch":
3113 3098 (import_,
3114 3099 [('p', 'strip', 1,
3115 3100 _('directory strip option for patch. This has the same\n'
3116 3101 'meaning as the corresponding patch option')),
3117 3102 ('b', 'base', '', _('base path')),
3118 3103 ('f', 'force', None,
3119 3104 _('skip check for outstanding uncommitted changes')),
3120 3105 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3121 3106 ('', 'exact', None,
3122 3107 _('apply patch to the nodes from which it was generated')),
3123 3108 ('', 'import-branch', None,
3124 3109 _('Use any branch information in patch (implied by --exact)'))] +
3125 3110 commitopts + commitopts2,
3126 3111 _('hg import [OPTION]... PATCH...')),
3127 3112 "incoming|in":
3128 3113 (incoming,
3129 3114 [('f', 'force', None,
3130 3115 _('run even when remote repository is unrelated')),
3131 3116 ('n', 'newest-first', None, _('show newest record first')),
3132 3117 ('', 'bundle', '', _('file to store the bundles into')),
3133 3118 ('r', 'rev', [],
3134 3119 _('a specific revision up to which you would like to pull')),
3135 3120 ] + logopts + remoteopts,
3136 3121 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3137 3122 ' [--bundle FILENAME] [SOURCE]')),
3138 3123 "^init":
3139 3124 (init,
3140 3125 remoteopts,
3141 3126 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3142 3127 "locate":
3143 3128 (locate,
3144 3129 [('r', 'rev', '', _('search the repository as it stood at rev')),
3145 3130 ('0', 'print0', None,
3146 3131 _('end filenames with NUL, for use with xargs')),
3147 3132 ('f', 'fullpath', None,
3148 3133 _('print complete paths from the filesystem root')),
3149 3134 ] + walkopts,
3150 3135 _('hg locate [OPTION]... [PATTERN]...')),
3151 3136 "^log|history":
3152 3137 (log,
3153 3138 [('f', 'follow', None,
3154 3139 _('follow changeset history, or file history across copies and renames')),
3155 3140 ('', 'follow-first', None,
3156 3141 _('only follow the first parent of merge changesets')),
3157 3142 ('d', 'date', '', _('show revs matching date spec')),
3158 3143 ('C', 'copies', None, _('show copied files')),
3159 3144 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3160 3145 ('r', 'rev', [], _('show the specified revision or range')),
3161 3146 ('', 'removed', None, _('include revs where files were removed')),
3162 3147 ('m', 'only-merges', None, _('show only merges')),
3163 3148 ('b', 'only-branch', [],
3164 3149 _('show only changesets within the given named branch')),
3165 3150 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3166 3151 ] + logopts + walkopts,
3167 3152 _('hg log [OPTION]... [FILE]')),
3168 3153 "manifest":
3169 3154 (manifest,
3170 3155 [('r', 'rev', '', _('revision to display'))],
3171 3156 _('hg manifest [-r REV]')),
3172 3157 "^merge":
3173 3158 (merge,
3174 3159 [('f', 'force', None, _('force a merge with outstanding changes')),
3175 3160 ('r', 'rev', '', _('revision to merge')),
3176 3161 ],
3177 3162 _('hg merge [-f] [[-r] REV]')),
3178 3163 "outgoing|out":
3179 3164 (outgoing,
3180 3165 [('f', 'force', None,
3181 3166 _('run even when remote repository is unrelated')),
3182 3167 ('r', 'rev', [],
3183 3168 _('a specific revision up to which you would like to push')),
3184 3169 ('n', 'newest-first', None, _('show newest record first')),
3185 3170 ] + logopts + remoteopts,
3186 3171 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3187 3172 "^parents":
3188 3173 (parents,
3189 3174 [('r', 'rev', '', _('show parents from the specified rev')),
3190 3175 ] + templateopts,
3191 3176 _('hg parents [-r REV] [FILE]')),
3192 3177 "paths": (paths, [], _('hg paths [NAME]')),
3193 3178 "^pull":
3194 3179 (pull,
3195 3180 [('u', 'update', None,
3196 3181 _('update to new tip if changesets were pulled')),
3197 3182 ('f', 'force', None,
3198 3183 _('run even when remote repository is unrelated')),
3199 3184 ('r', 'rev', [],
3200 3185 _('a specific revision up to which you would like to pull')),
3201 3186 ] + remoteopts,
3202 3187 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3203 3188 "^push":
3204 3189 (push,
3205 3190 [('f', 'force', None, _('force push')),
3206 3191 ('r', 'rev', [],
3207 3192 _('a specific revision up to which you would like to push')),
3208 3193 ] + remoteopts,
3209 3194 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3210 3195 "recover": (recover, [], _('hg recover')),
3211 3196 "^remove|rm":
3212 3197 (remove,
3213 3198 [('A', 'after', None, _('record delete for missing files')),
3214 3199 ('f', 'force', None,
3215 3200 _('remove (and delete) file even if added or modified')),
3216 3201 ] + walkopts,
3217 3202 _('hg remove [OPTION]... FILE...')),
3218 3203 "rename|mv":
3219 3204 (rename,
3220 3205 [('A', 'after', None, _('record a rename that has already occurred')),
3221 3206 ('f', 'force', None,
3222 3207 _('forcibly copy over an existing managed file')),
3223 3208 ] + walkopts + dryrunopts,
3224 3209 _('hg rename [OPTION]... SOURCE... DEST')),
3225 3210 "resolve":
3226 3211 (resolve,
3227 3212 [('l', 'list', None, _('list state of files needing merge')),
3228 3213 ('m', 'mark', None, _('mark files as resolved')),
3229 3214 ('u', 'unmark', None, _('unmark files as resolved'))],
3230 3215 ('hg resolve [OPTION] [FILES...]')),
3231 3216 "revert":
3232 3217 (revert,
3233 3218 [('a', 'all', None, _('revert all changes when no arguments given')),
3234 3219 ('d', 'date', '', _('tipmost revision matching date')),
3235 3220 ('r', 'rev', '', _('revision to revert to')),
3236 3221 ('', 'no-backup', None, _('do not save backup copies of files')),
3237 3222 ] + walkopts + dryrunopts,
3238 3223 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3239 3224 "rollback": (rollback, [], _('hg rollback')),
3240 3225 "root": (root, [], _('hg root')),
3241 3226 "^serve":
3242 3227 (serve,
3243 3228 [('A', 'accesslog', '', _('name of access log file to write to')),
3244 3229 ('d', 'daemon', None, _('run server in background')),
3245 3230 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3246 3231 ('E', 'errorlog', '', _('name of error log file to write to')),
3247 3232 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3248 3233 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3249 3234 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3250 3235 ('n', 'name', '',
3251 3236 _('name to show in web pages (default: working dir)')),
3252 3237 ('', 'webdir-conf', '', _('name of the webdir config file'
3253 3238 ' (serve more than one repo)')),
3254 3239 ('', 'pid-file', '', _('name of file to write process ID to')),
3255 3240 ('', 'stdio', None, _('for remote clients')),
3256 3241 ('t', 'templates', '', _('web templates to use')),
3257 3242 ('', 'style', '', _('template style to use')),
3258 3243 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3259 3244 ('', 'certificate', '', _('SSL certificate file'))],
3260 3245 _('hg serve [OPTION]...')),
3261 3246 "showconfig|debugconfig":
3262 3247 (showconfig,
3263 3248 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3264 3249 _('hg showconfig [-u] [NAME]...')),
3265 3250 "^status|st":
3266 3251 (status,
3267 3252 [('A', 'all', None, _('show status of all files')),
3268 3253 ('m', 'modified', None, _('show only modified files')),
3269 3254 ('a', 'added', None, _('show only added files')),
3270 3255 ('r', 'removed', None, _('show only removed files')),
3271 3256 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3272 3257 ('c', 'clean', None, _('show only files without changes')),
3273 3258 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3274 3259 ('i', 'ignored', None, _('show only ignored files')),
3275 3260 ('n', 'no-status', None, _('hide status prefix')),
3276 3261 ('C', 'copies', None, _('show source of copied files')),
3277 3262 ('0', 'print0', None,
3278 3263 _('end filenames with NUL, for use with xargs')),
3279 3264 ('', 'rev', [], _('show difference from revision')),
3280 3265 ] + walkopts,
3281 3266 _('hg status [OPTION]... [FILE]...')),
3282 3267 "tag":
3283 3268 (tag,
3284 3269 [('f', 'force', None, _('replace existing tag')),
3285 3270 ('l', 'local', None, _('make the tag local')),
3286 3271 ('r', 'rev', '', _('revision to tag')),
3287 3272 ('', 'remove', None, _('remove a tag')),
3288 3273 # -l/--local is already there, commitopts cannot be used
3289 3274 ('m', 'message', '', _('use <text> as commit message')),
3290 3275 ] + commitopts2,
3291 3276 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3292 3277 "tags": (tags, [], _('hg tags')),
3293 3278 "tip":
3294 3279 (tip,
3295 3280 [('p', 'patch', None, _('show patch')),
3296 3281 ] + templateopts,
3297 3282 _('hg tip [-p]')),
3298 3283 "unbundle":
3299 3284 (unbundle,
3300 3285 [('u', 'update', None,
3301 3286 _('update to new tip if changesets were unbundled'))],
3302 3287 _('hg unbundle [-u] FILE...')),
3303 3288 "^update|up|checkout|co":
3304 3289 (update,
3305 3290 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3306 3291 ('d', 'date', '', _('tipmost revision matching date')),
3307 3292 ('r', 'rev', '', _('revision'))],
3308 3293 _('hg update [-C] [-d DATE] [[-r] REV]')),
3309 3294 "verify": (verify, [], _('hg verify')),
3310 3295 "version": (version_, [], _('hg version')),
3311 3296 }
3312 3297
3313 3298 norepo = ("clone init version help debugcomplete debugdata"
3314 3299 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3315 3300 optionalrepo = ("identify paths serve showconfig debugancestor")
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now