##// END OF EJS Templates
merged with crew
Martin Geisler -
r8925:3ad0b5dd merge default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,99 +1,107 b''
1 # acl.py - changeset access control for mercurial
1 # acl.py - changeset access control for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7 #
7 #
8
8
9 '''provide simple hooks for access control
9 '''control access to a repository using simple hooks
10
10
11 Authorization is against local user name on system where hook is run, not
11 This hook makes it possible to allow or deny write access to portions
12 committer of original changeset (since that is easy to spoof).
12 of a repository when receiving incoming changesets.
13
14 The authorization is matched based on the local user name on the
15 system where the hook runs, and not the committer of the original
16 changeset (since the latter is merely informative).
13
17
14 The acl hook is best to use if you use hgsh to set up restricted shells for
18 The acl hook is best used along with a restricted shell like hgsh,
15 authenticated users to only push to / pull from. It's not safe if user has
19 preventing authenticating users from doing anything other than
16 interactive shell access, because they can disable the hook. It's also not
20 pushing or pulling. The hook is not safe to use if users have
17 safe if remote users share one local account, because then there's no way to
21 interactive shell access, as they can then disable the hook.
18 tell remote users apart.
22 Nor is it safe if remote users share an account, because then there
23 is no way to distinguish them.
19
24
20 To use, configure the acl extension in hgrc like this:
25 To use this hook, configure the acl extension in your hgrc like this:
21
26
22 [extensions]
27 [extensions]
23 hgext.acl =
28 hgext.acl =
24
29
25 [hooks]
30 [hooks]
26 pretxnchangegroup.acl = python:hgext.acl.hook
31 pretxnchangegroup.acl = python:hgext.acl.hook
27
32
28 [acl]
33 [acl]
29 sources = serve # check if source of incoming changes in this list
34 # Check whether the source of incoming changes is in this list
30 # ("serve" == ssh or http, "push", "pull", "bundle")
35 # ("serve" == ssh or http, "push", "pull", "bundle")
36 sources = serve
31
37
32 Allow and deny lists have a subtree pattern (default syntax is glob) on the
38 The allow and deny sections take a subtree pattern as key (with a
33 left and user names on right. The deny list is checked before the allow list.
39 glob syntax by default), and a comma separated list of users as
40 the corresponding value. The deny list is checked before the allow
41 list is.
34
42
35 [acl.allow]
43 [acl.allow]
36 # if acl.allow not present, all users allowed by default
44 # If acl.allow is not present, all users are allowed by default.
37 # empty acl.allow = no users allowed
45 # An empty acl.allow section means no users allowed.
38 docs/** = doc_writer
46 docs/** = doc_writer
39 .hgtags = release_engineer
47 .hgtags = release_engineer
40
48
41 [acl.deny]
49 [acl.deny]
42 # if acl.deny not present, no users denied by default
50 # If acl.deny is not present, no users are refused by default.
43 # empty acl.deny = all users allowed
51 # An empty acl.deny section means all users allowed.
44 glob pattern = user4, user5
52 glob pattern = user4, user5
45 ** = user6
53 ** = user6
46 '''
54 '''
47
55
48 from mercurial.i18n import _
56 from mercurial.i18n import _
49 from mercurial import util, match
57 from mercurial import util, match
50 import getpass, urllib
58 import getpass, urllib
51
59
52 def buildmatch(ui, repo, user, key):
60 def buildmatch(ui, repo, user, key):
53 '''return tuple of (match function, list enabled).'''
61 '''return tuple of (match function, list enabled).'''
54 if not ui.has_section(key):
62 if not ui.has_section(key):
55 ui.debug(_('acl: %s not enabled\n') % key)
63 ui.debug(_('acl: %s not enabled\n') % key)
56 return None
64 return None
57
65
58 pats = [pat for pat, users in ui.configitems(key)
66 pats = [pat for pat, users in ui.configitems(key)
59 if user in users.replace(',', ' ').split()]
67 if user in users.replace(',', ' ').split()]
60 ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
68 ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
61 (key, len(pats), user))
69 (key, len(pats), user))
62 if pats:
70 if pats:
63 return match.match(repo.root, '', pats)
71 return match.match(repo.root, '', pats)
64 return match.exact(repo.root, '', [])
72 return match.exact(repo.root, '', [])
65
73
66
74
67 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
75 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
68 if hooktype != 'pretxnchangegroup':
76 if hooktype != 'pretxnchangegroup':
69 raise util.Abort(_('config error - hook type "%s" cannot stop '
77 raise util.Abort(_('config error - hook type "%s" cannot stop '
70 'incoming changesets') % hooktype)
78 'incoming changesets') % hooktype)
71 if source not in ui.config('acl', 'sources', 'serve').split():
79 if source not in ui.config('acl', 'sources', 'serve').split():
72 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
80 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
73 return
81 return
74
82
75 user = None
83 user = None
76 if source == 'serve' and 'url' in kwargs:
84 if source == 'serve' and 'url' in kwargs:
77 url = kwargs['url'].split(':')
85 url = kwargs['url'].split(':')
78 if url[0] == 'remote' and url[1].startswith('http'):
86 if url[0] == 'remote' and url[1].startswith('http'):
79 user = urllib.unquote(url[2])
87 user = urllib.unquote(url[2])
80
88
81 if user is None:
89 if user is None:
82 user = getpass.getuser()
90 user = getpass.getuser()
83
91
84 cfg = ui.config('acl', 'config')
92 cfg = ui.config('acl', 'config')
85 if cfg:
93 if cfg:
86 ui.readconfig(cfg, sections = ['acl.allow', 'acl.deny'])
94 ui.readconfig(cfg, sections = ['acl.allow', 'acl.deny'])
87 allow = buildmatch(ui, repo, user, 'acl.allow')
95 allow = buildmatch(ui, repo, user, 'acl.allow')
88 deny = buildmatch(ui, repo, user, 'acl.deny')
96 deny = buildmatch(ui, repo, user, 'acl.deny')
89
97
90 for rev in xrange(repo[node], len(repo)):
98 for rev in xrange(repo[node], len(repo)):
91 ctx = repo[rev]
99 ctx = repo[rev]
92 for f in ctx.files():
100 for f in ctx.files():
93 if deny and deny(f):
101 if deny and deny(f):
94 ui.debug(_('acl: user %s denied on %s\n') % (user, f))
102 ui.debug(_('acl: user %s denied on %s\n') % (user, f))
95 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
103 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
96 if allow and not allow(f):
104 if allow and not allow(f):
97 ui.debug(_('acl: user %s not allowed on %s\n') % (user, f))
105 ui.debug(_('acl: user %s not allowed on %s\n') % (user, f))
98 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
106 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
99 ui.debug(_('acl: allowing changeset %s\n') % ctx)
107 ui.debug(_('acl: allowing changeset %s\n') % ctx)
@@ -1,334 +1,336 b''
1 # Mercurial extension to provide the 'hg bookmark' command
1 # Mercurial extension to provide the 'hg bookmark' command
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''Mercurial bookmarks
8 '''track a line of development with movable markers
9
10 Bookmarks are local movable markers to changesets. Every bookmark
11 points to a changeset identified by its hash. If you commit a
12 changeset that is based on a changeset that has a bookmark on it,
13 the bookmark shifts to the new changeset.
9
14
10 Mercurial bookmarks are local moveable pointers to changesets. Every
15 It is possible to use bookmark names in every revision lookup
11 bookmark points to a changeset identified by its hash. If you commit a
16 (e.g. hg merge, hg update).
12 changeset that is based on a changeset that has a bookmark on it, the
13 bookmark is forwarded to the new changeset.
14
17
15 It is possible to use bookmark names in every revision lookup (e.g. hg
18 By default, when several bookmarks point to the same changeset, they
16 merge, hg update).
19 will all move forward together. It is possible to obtain a more
17
20 git-like experience by adding the following configuration option to
18 The bookmark extension offers the possiblity to have a more git-like
21 your .hgrc:
19 experience by adding the following configuration option to your .hgrc:
20
22
21 [bookmarks]
23 [bookmarks]
22 track.current = True
24 track.current = True
23
25
24 This will cause bookmarks to track the bookmark that you are currently
26 This will cause Mercurial to track the bookmark that you are currently
25 on, and just updates it. This is similar to git's approach to
27 using, and only update it. This is similar to git's approach to
26 branching.
28 branching.
27 '''
29 '''
28
30
29 from mercurial.i18n import _
31 from mercurial.i18n import _
30 from mercurial.node import nullid, nullrev, hex, short
32 from mercurial.node import nullid, nullrev, hex, short
31 from mercurial import util, commands, localrepo, repair, extensions
33 from mercurial import util, commands, localrepo, repair, extensions
32 import os
34 import os
33
35
34 def parse(repo):
36 def parse(repo):
35 '''Parse .hg/bookmarks file and return a dictionary
37 '''Parse .hg/bookmarks file and return a dictionary
36
38
37 Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
39 Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
38 in the .hg/bookmarks file. They are read by the parse() method and
40 in the .hg/bookmarks file. They are read by the parse() method and
39 returned as a dictionary with name => hash values.
41 returned as a dictionary with name => hash values.
40
42
41 The parsed dictionary is cached until a write() operation is done.
43 The parsed dictionary is cached until a write() operation is done.
42 '''
44 '''
43 try:
45 try:
44 if repo._bookmarks:
46 if repo._bookmarks:
45 return repo._bookmarks
47 return repo._bookmarks
46 repo._bookmarks = {}
48 repo._bookmarks = {}
47 for line in repo.opener('bookmarks'):
49 for line in repo.opener('bookmarks'):
48 sha, refspec = line.strip().split(' ', 1)
50 sha, refspec = line.strip().split(' ', 1)
49 repo._bookmarks[refspec] = repo.lookup(sha)
51 repo._bookmarks[refspec] = repo.lookup(sha)
50 except:
52 except:
51 pass
53 pass
52 return repo._bookmarks
54 return repo._bookmarks
53
55
54 def write(repo, refs):
56 def write(repo, refs):
55 '''Write bookmarks
57 '''Write bookmarks
56
58
57 Write the given bookmark => hash dictionary to the .hg/bookmarks file
59 Write the given bookmark => hash dictionary to the .hg/bookmarks file
58 in a format equal to those of localtags.
60 in a format equal to those of localtags.
59
61
60 We also store a backup of the previous state in undo.bookmarks that
62 We also store a backup of the previous state in undo.bookmarks that
61 can be copied back on rollback.
63 can be copied back on rollback.
62 '''
64 '''
63 if os.path.exists(repo.join('bookmarks')):
65 if os.path.exists(repo.join('bookmarks')):
64 util.copyfile(repo.join('bookmarks'), repo.join('undo.bookmarks'))
66 util.copyfile(repo.join('bookmarks'), repo.join('undo.bookmarks'))
65 if current(repo) not in refs:
67 if current(repo) not in refs:
66 setcurrent(repo, None)
68 setcurrent(repo, None)
67 wlock = repo.wlock()
69 wlock = repo.wlock()
68 try:
70 try:
69 file = repo.opener('bookmarks', 'w', atomictemp=True)
71 file = repo.opener('bookmarks', 'w', atomictemp=True)
70 for refspec, node in refs.iteritems():
72 for refspec, node in refs.iteritems():
71 file.write("%s %s\n" % (hex(node), refspec))
73 file.write("%s %s\n" % (hex(node), refspec))
72 file.rename()
74 file.rename()
73 finally:
75 finally:
74 wlock.release()
76 wlock.release()
75
77
76 def current(repo):
78 def current(repo):
77 '''Get the current bookmark
79 '''Get the current bookmark
78
80
79 If we use gittishsh branches we have a current bookmark that
81 If we use gittishsh branches we have a current bookmark that
80 we are on. This function returns the name of the bookmark. It
82 we are on. This function returns the name of the bookmark. It
81 is stored in .hg/bookmarks.current
83 is stored in .hg/bookmarks.current
82 '''
84 '''
83 if repo._bookmarkcurrent:
85 if repo._bookmarkcurrent:
84 return repo._bookmarkcurrent
86 return repo._bookmarkcurrent
85 mark = None
87 mark = None
86 if os.path.exists(repo.join('bookmarks.current')):
88 if os.path.exists(repo.join('bookmarks.current')):
87 file = repo.opener('bookmarks.current')
89 file = repo.opener('bookmarks.current')
88 # No readline() in posixfile_nt, reading everything is cheap
90 # No readline() in posixfile_nt, reading everything is cheap
89 mark = (file.readlines() or [''])[0]
91 mark = (file.readlines() or [''])[0]
90 if mark == '':
92 if mark == '':
91 mark = None
93 mark = None
92 file.close()
94 file.close()
93 repo._bookmarkcurrent = mark
95 repo._bookmarkcurrent = mark
94 return mark
96 return mark
95
97
96 def setcurrent(repo, mark):
98 def setcurrent(repo, mark):
97 '''Set the name of the bookmark that we are currently on
99 '''Set the name of the bookmark that we are currently on
98
100
99 Set the name of the bookmark that we are on (hg update <bookmark>).
101 Set the name of the bookmark that we are on (hg update <bookmark>).
100 The name is recorded in .hg/bookmarks.current
102 The name is recorded in .hg/bookmarks.current
101 '''
103 '''
102 if current(repo) == mark:
104 if current(repo) == mark:
103 return
105 return
104
106
105 refs = parse(repo)
107 refs = parse(repo)
106
108
107 # do not update if we do update to a rev equal to the current bookmark
109 # do not update if we do update to a rev equal to the current bookmark
108 if (mark and mark not in refs and
110 if (mark and mark not in refs and
109 current(repo) and refs[current(repo)] == repo.changectx('.').node()):
111 current(repo) and refs[current(repo)] == repo.changectx('.').node()):
110 return
112 return
111 if mark not in refs:
113 if mark not in refs:
112 mark = ''
114 mark = ''
113 wlock = repo.wlock()
115 wlock = repo.wlock()
114 try:
116 try:
115 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
117 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
116 file.write(mark)
118 file.write(mark)
117 file.rename()
119 file.rename()
118 finally:
120 finally:
119 wlock.release()
121 wlock.release()
120 repo._bookmarkcurrent = mark
122 repo._bookmarkcurrent = mark
121
123
122 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
124 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
123 '''Mercurial bookmarks
125 '''track a line of development with movable markers
124
126
125 Bookmarks are pointers to certain commits that move when
127 Bookmarks are pointers to certain commits that move when
126 commiting. Bookmarks are local. They can be renamed, copied and
128 committing. Bookmarks are local. They can be renamed, copied and
127 deleted. It is possible to use bookmark names in 'hg merge' and
129 deleted. It is possible to use bookmark names in 'hg merge' and
128 'hg update' to merge and update respectively to a given bookmark.
130 'hg update' to merge and update respectively to a given bookmark.
129
131
130 You can use 'hg bookmark NAME' to set a bookmark on the working
132 You can use 'hg bookmark NAME' to set a bookmark on the working
131 directory's parent revision with the given name. If you specify
133 directory's parent revision with the given name. If you specify
132 a revision using -r REV (where REV may be an existing bookmark),
134 a revision using -r REV (where REV may be an existing bookmark),
133 the bookmark is assigned to that revision.
135 the bookmark is assigned to that revision.
134 '''
136 '''
135 hexfn = ui.debugflag and hex or short
137 hexfn = ui.debugflag and hex or short
136 marks = parse(repo)
138 marks = parse(repo)
137 cur = repo.changectx('.').node()
139 cur = repo.changectx('.').node()
138
140
139 if rename:
141 if rename:
140 if rename not in marks:
142 if rename not in marks:
141 raise util.Abort(_("a bookmark of this name does not exist"))
143 raise util.Abort(_("a bookmark of this name does not exist"))
142 if mark in marks and not force:
144 if mark in marks and not force:
143 raise util.Abort(_("a bookmark of the same name already exists"))
145 raise util.Abort(_("a bookmark of the same name already exists"))
144 if mark is None:
146 if mark is None:
145 raise util.Abort(_("new bookmark name required"))
147 raise util.Abort(_("new bookmark name required"))
146 marks[mark] = marks[rename]
148 marks[mark] = marks[rename]
147 del marks[rename]
149 del marks[rename]
148 if current(repo) == rename:
150 if current(repo) == rename:
149 setcurrent(repo, mark)
151 setcurrent(repo, mark)
150 write(repo, marks)
152 write(repo, marks)
151 return
153 return
152
154
153 if delete:
155 if delete:
154 if mark is None:
156 if mark is None:
155 raise util.Abort(_("bookmark name required"))
157 raise util.Abort(_("bookmark name required"))
156 if mark not in marks:
158 if mark not in marks:
157 raise util.Abort(_("a bookmark of this name does not exist"))
159 raise util.Abort(_("a bookmark of this name does not exist"))
158 if mark == current(repo):
160 if mark == current(repo):
159 setcurrent(repo, None)
161 setcurrent(repo, None)
160 del marks[mark]
162 del marks[mark]
161 write(repo, marks)
163 write(repo, marks)
162 return
164 return
163
165
164 if mark != None:
166 if mark != None:
165 if "\n" in mark:
167 if "\n" in mark:
166 raise util.Abort(_("bookmark name cannot contain newlines"))
168 raise util.Abort(_("bookmark name cannot contain newlines"))
167 mark = mark.strip()
169 mark = mark.strip()
168 if mark in marks and not force:
170 if mark in marks and not force:
169 raise util.Abort(_("a bookmark of the same name already exists"))
171 raise util.Abort(_("a bookmark of the same name already exists"))
170 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
172 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
171 and not force):
173 and not force):
172 raise util.Abort(
174 raise util.Abort(
173 _("a bookmark cannot have the name of an existing branch"))
175 _("a bookmark cannot have the name of an existing branch"))
174 if rev:
176 if rev:
175 marks[mark] = repo.lookup(rev)
177 marks[mark] = repo.lookup(rev)
176 else:
178 else:
177 marks[mark] = repo.changectx('.').node()
179 marks[mark] = repo.changectx('.').node()
178 setcurrent(repo, mark)
180 setcurrent(repo, mark)
179 write(repo, marks)
181 write(repo, marks)
180 return
182 return
181
183
182 if mark is None:
184 if mark is None:
183 if rev:
185 if rev:
184 raise util.Abort(_("bookmark name required"))
186 raise util.Abort(_("bookmark name required"))
185 if len(marks) == 0:
187 if len(marks) == 0:
186 ui.status("no bookmarks set\n")
188 ui.status("no bookmarks set\n")
187 else:
189 else:
188 for bmark, n in marks.iteritems():
190 for bmark, n in marks.iteritems():
189 if ui.configbool('bookmarks', 'track.current'):
191 if ui.configbool('bookmarks', 'track.current'):
190 prefix = (bmark == current(repo) and n == cur) and '*' or ' '
192 prefix = (bmark == current(repo) and n == cur) and '*' or ' '
191 else:
193 else:
192 prefix = (n == cur) and '*' or ' '
194 prefix = (n == cur) and '*' or ' '
193
195
194 ui.write(" %s %-25s %d:%s\n" % (
196 ui.write(" %s %-25s %d:%s\n" % (
195 prefix, bmark, repo.changelog.rev(n), hexfn(n)))
197 prefix, bmark, repo.changelog.rev(n), hexfn(n)))
196 return
198 return
197
199
198 def _revstostrip(changelog, node):
200 def _revstostrip(changelog, node):
199 srev = changelog.rev(node)
201 srev = changelog.rev(node)
200 tostrip = [srev]
202 tostrip = [srev]
201 saveheads = []
203 saveheads = []
202 for r in xrange(srev, len(changelog)):
204 for r in xrange(srev, len(changelog)):
203 parents = changelog.parentrevs(r)
205 parents = changelog.parentrevs(r)
204 if parents[0] in tostrip or parents[1] in tostrip:
206 if parents[0] in tostrip or parents[1] in tostrip:
205 tostrip.append(r)
207 tostrip.append(r)
206 if parents[1] != nullrev:
208 if parents[1] != nullrev:
207 for p in parents:
209 for p in parents:
208 if p not in tostrip and p > srev:
210 if p not in tostrip and p > srev:
209 saveheads.append(p)
211 saveheads.append(p)
210 return [r for r in tostrip if r not in saveheads]
212 return [r for r in tostrip if r not in saveheads]
211
213
212 def strip(oldstrip, ui, repo, node, backup="all"):
214 def strip(oldstrip, ui, repo, node, backup="all"):
213 """Strip bookmarks if revisions are stripped using
215 """Strip bookmarks if revisions are stripped using
214 the mercurial.strip method. This usually happens during
216 the mercurial.strip method. This usually happens during
215 qpush and qpop"""
217 qpush and qpop"""
216 revisions = _revstostrip(repo.changelog, node)
218 revisions = _revstostrip(repo.changelog, node)
217 marks = parse(repo)
219 marks = parse(repo)
218 update = []
220 update = []
219 for mark, n in marks.iteritems():
221 for mark, n in marks.iteritems():
220 if repo.changelog.rev(n) in revisions:
222 if repo.changelog.rev(n) in revisions:
221 update.append(mark)
223 update.append(mark)
222 oldstrip(ui, repo, node, backup)
224 oldstrip(ui, repo, node, backup)
223 if len(update) > 0:
225 if len(update) > 0:
224 for m in update:
226 for m in update:
225 marks[m] = repo.changectx('.').node()
227 marks[m] = repo.changectx('.').node()
226 write(repo, marks)
228 write(repo, marks)
227
229
228 def reposetup(ui, repo):
230 def reposetup(ui, repo):
229 if not isinstance(repo, localrepo.localrepository):
231 if not isinstance(repo, localrepo.localrepository):
230 return
232 return
231
233
232 # init a bookmark cache as otherwise we would get a infinite reading
234 # init a bookmark cache as otherwise we would get a infinite reading
233 # in lookup()
235 # in lookup()
234 repo._bookmarks = None
236 repo._bookmarks = None
235 repo._bookmarkcurrent = None
237 repo._bookmarkcurrent = None
236
238
237 class bookmark_repo(repo.__class__):
239 class bookmark_repo(repo.__class__):
238 def rollback(self):
240 def rollback(self):
239 if os.path.exists(self.join('undo.bookmarks')):
241 if os.path.exists(self.join('undo.bookmarks')):
240 util.rename(self.join('undo.bookmarks'), self.join('bookmarks'))
242 util.rename(self.join('undo.bookmarks'), self.join('bookmarks'))
241 return super(bookmark_repo, self).rollback()
243 return super(bookmark_repo, self).rollback()
242
244
243 def lookup(self, key):
245 def lookup(self, key):
244 if self._bookmarks is None:
246 if self._bookmarks is None:
245 self._bookmarks = parse(self)
247 self._bookmarks = parse(self)
246 if key in self._bookmarks:
248 if key in self._bookmarks:
247 key = self._bookmarks[key]
249 key = self._bookmarks[key]
248 return super(bookmark_repo, self).lookup(key)
250 return super(bookmark_repo, self).lookup(key)
249
251
250 def commit(self, *k, **kw):
252 def commit(self, *k, **kw):
251 """Add a revision to the repository and
253 """Add a revision to the repository and
252 move the bookmark"""
254 move the bookmark"""
253 wlock = self.wlock() # do both commit and bookmark with lock held
255 wlock = self.wlock() # do both commit and bookmark with lock held
254 try:
256 try:
255 node = super(bookmark_repo, self).commit(*k, **kw)
257 node = super(bookmark_repo, self).commit(*k, **kw)
256 if node is None:
258 if node is None:
257 return None
259 return None
258 parents = repo.changelog.parents(node)
260 parents = repo.changelog.parents(node)
259 if parents[1] == nullid:
261 if parents[1] == nullid:
260 parents = (parents[0],)
262 parents = (parents[0],)
261 marks = parse(repo)
263 marks = parse(repo)
262 update = False
264 update = False
263 for mark, n in marks.items():
265 for mark, n in marks.items():
264 if ui.configbool('bookmarks', 'track.current'):
266 if ui.configbool('bookmarks', 'track.current'):
265 if mark == current(repo) and n in parents:
267 if mark == current(repo) and n in parents:
266 marks[mark] = node
268 marks[mark] = node
267 update = True
269 update = True
268 else:
270 else:
269 if n in parents:
271 if n in parents:
270 marks[mark] = node
272 marks[mark] = node
271 update = True
273 update = True
272 if update:
274 if update:
273 write(repo, marks)
275 write(repo, marks)
274 return node
276 return node
275 finally:
277 finally:
276 wlock.release()
278 wlock.release()
277
279
278 def addchangegroup(self, source, srctype, url, emptyok=False):
280 def addchangegroup(self, source, srctype, url, emptyok=False):
279 parents = repo.dirstate.parents()
281 parents = repo.dirstate.parents()
280
282
281 result = super(bookmark_repo, self).addchangegroup(
283 result = super(bookmark_repo, self).addchangegroup(
282 source, srctype, url, emptyok)
284 source, srctype, url, emptyok)
283 if result > 1:
285 if result > 1:
284 # We have more heads than before
286 # We have more heads than before
285 return result
287 return result
286 node = repo.changelog.tip()
288 node = repo.changelog.tip()
287 marks = parse(repo)
289 marks = parse(repo)
288 update = False
290 update = False
289 for mark, n in marks.items():
291 for mark, n in marks.items():
290 if n in parents:
292 if n in parents:
291 marks[mark] = node
293 marks[mark] = node
292 update = True
294 update = True
293 if update:
295 if update:
294 write(repo, marks)
296 write(repo, marks)
295 return result
297 return result
296
298
297 def tags(self):
299 def tags(self):
298 """Merge bookmarks with normal tags"""
300 """Merge bookmarks with normal tags"""
299 if self.tagscache:
301 if self.tagscache:
300 return self.tagscache
302 return self.tagscache
301
303
302 tagscache = super(bookmark_repo, self).tags()
304 tagscache = super(bookmark_repo, self).tags()
303 tagscache.update(parse(repo))
305 tagscache.update(parse(repo))
304 return tagscache
306 return tagscache
305
307
306 repo.__class__ = bookmark_repo
308 repo.__class__ = bookmark_repo
307
309
308 def uisetup(ui):
310 def uisetup(ui):
309 extensions.wrapfunction(repair, "strip", strip)
311 extensions.wrapfunction(repair, "strip", strip)
310 if ui.configbool('bookmarks', 'track.current'):
312 if ui.configbool('bookmarks', 'track.current'):
311 extensions.wrapcommand(commands.table, 'update', updatecurbookmark)
313 extensions.wrapcommand(commands.table, 'update', updatecurbookmark)
312
314
313 def updatecurbookmark(orig, ui, repo, *args, **opts):
315 def updatecurbookmark(orig, ui, repo, *args, **opts):
314 '''Set the current bookmark
316 '''Set the current bookmark
315
317
316 If the user updates to a bookmark we update the .hg/bookmarks.current
318 If the user updates to a bookmark we update the .hg/bookmarks.current
317 file.
319 file.
318 '''
320 '''
319 res = orig(ui, repo, *args, **opts)
321 res = orig(ui, repo, *args, **opts)
320 rev = opts['rev']
322 rev = opts['rev']
321 if not rev and len(args) > 0:
323 if not rev and len(args) > 0:
322 rev = args[0]
324 rev = args[0]
323 setcurrent(repo, rev)
325 setcurrent(repo, rev)
324 return res
326 return res
325
327
326 cmdtable = {
328 cmdtable = {
327 "bookmarks":
329 "bookmarks":
328 (bookmark,
330 (bookmark,
329 [('f', 'force', False, _('force')),
331 [('f', 'force', False, _('force')),
330 ('r', 'rev', '', _('revision')),
332 ('r', 'rev', '', _('revision')),
331 ('d', 'delete', False, _('delete a given bookmark')),
333 ('d', 'delete', False, _('delete a given bookmark')),
332 ('m', 'rename', '', _('rename a given bookmark'))],
334 ('m', 'rename', '', _('rename a given bookmark'))],
333 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
335 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
334 }
336 }
@@ -1,416 +1,416 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''Bugzilla integration
8 '''integrate Mercurial with a Bugzilla bug tracker
9
9
10 This hook extension adds comments on bugs in Bugzilla when changesets
10 This hook extension adds comments on bugs in Bugzilla when changesets
11 that refer to bugs by Bugzilla ID are seen. The hook does not change
11 that refer to bugs by Bugzilla ID are seen. The hook does not change
12 bug status.
12 bug status.
13
13
14 The hook updates the Bugzilla database directly. Only Bugzilla
14 The hook updates the Bugzilla database directly. Only Bugzilla
15 installations using MySQL are supported.
15 installations using MySQL are supported.
16
16
17 The hook relies on a Bugzilla script to send bug change notification
17 The hook relies on a Bugzilla script to send bug change notification
18 emails. That script changes between Bugzilla versions; the
18 emails. That script changes between Bugzilla versions; the
19 'processmail' script used prior to 2.18 is replaced in 2.18 and
19 'processmail' script used prior to 2.18 is replaced in 2.18 and
20 subsequent versions by 'config/sendbugmail.pl'. Note that these will
20 subsequent versions by 'config/sendbugmail.pl'. Note that these will
21 be run by Mercurial as the user pushing the change; you will need to
21 be run by Mercurial as the user pushing the change; you will need to
22 ensure the Bugzilla install file permissions are set appropriately.
22 ensure the Bugzilla install file permissions are set appropriately.
23
23
24 Configuring the extension:
24 Configuring the extension:
25
25
26 [bugzilla]
26 [bugzilla]
27
27
28 host Hostname of the MySQL server holding the Bugzilla
28 host Hostname of the MySQL server holding the Bugzilla
29 database.
29 database.
30 db Name of the Bugzilla database in MySQL. Default 'bugs'.
30 db Name of the Bugzilla database in MySQL. Default 'bugs'.
31 user Username to use to access MySQL server. Default 'bugs'.
31 user Username to use to access MySQL server. Default 'bugs'.
32 password Password to use to access MySQL server.
32 password Password to use to access MySQL server.
33 timeout Database connection timeout (seconds). Default 5.
33 timeout Database connection timeout (seconds). Default 5.
34 version Bugzilla version. Specify '3.0' for Bugzilla versions
34 version Bugzilla version. Specify '3.0' for Bugzilla versions
35 3.0 and later, '2.18' for Bugzilla versions from 2.18
35 3.0 and later, '2.18' for Bugzilla versions from 2.18
36 and '2.16' for versions prior to 2.18.
36 and '2.16' for versions prior to 2.18.
37 bzuser Fallback Bugzilla user name to record comments with, if
37 bzuser Fallback Bugzilla user name to record comments with, if
38 changeset committer cannot be found as a Bugzilla user.
38 changeset committer cannot be found as a Bugzilla user.
39 bzdir Bugzilla install directory. Used by default notify.
39 bzdir Bugzilla install directory. Used by default notify.
40 Default '/var/www/html/bugzilla'.
40 Default '/var/www/html/bugzilla'.
41 notify The command to run to get Bugzilla to send bug change
41 notify The command to run to get Bugzilla to send bug change
42 notification emails. Substitutes from a map with 3
42 notification emails. Substitutes from a map with 3
43 keys, 'bzdir', 'id' (bug id) and 'user' (committer
43 keys, 'bzdir', 'id' (bug id) and 'user' (committer
44 bugzilla email). Default depends on version; from 2.18
44 bugzilla email). Default depends on version; from 2.18
45 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl
45 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl
46 %(id)s %(user)s".
46 %(id)s %(user)s".
47 regexp Regular expression to match bug IDs in changeset commit
47 regexp Regular expression to match bug IDs in changeset commit
48 message. Must contain one "()" group. The default
48 message. Must contain one "()" group. The default
49 expression matches 'Bug 1234', 'Bug no. 1234', 'Bug
49 expression matches 'Bug 1234', 'Bug no. 1234', 'Bug
50 number 1234', 'Bugs 1234,5678', 'Bug 1234 and 5678' and
50 number 1234', 'Bugs 1234,5678', 'Bug 1234 and 5678' and
51 variations thereof. Matching is case insensitive.
51 variations thereof. Matching is case insensitive.
52 style The style file to use when formatting comments.
52 style The style file to use when formatting comments.
53 template Template to use when formatting comments. Overrides
53 template Template to use when formatting comments. Overrides
54 style if specified. In addition to the usual Mercurial
54 style if specified. In addition to the usual Mercurial
55 keywords, the extension specifies:
55 keywords, the extension specifies:
56 {bug} The Bugzilla bug ID.
56 {bug} The Bugzilla bug ID.
57 {root} The full pathname of the Mercurial
57 {root} The full pathname of the Mercurial
58 repository.
58 repository.
59 {webroot} Stripped pathname of the Mercurial
59 {webroot} Stripped pathname of the Mercurial
60 repository.
60 repository.
61 {hgweb} Base URL for browsing Mercurial
61 {hgweb} Base URL for browsing Mercurial
62 repositories.
62 repositories.
63 Default 'changeset {node|short} in repo {root} refers '
63 Default 'changeset {node|short} in repo {root} refers '
64 'to bug {bug}.\\ndetails:\\n\\t{desc|tabindent}'
64 'to bug {bug}.\\ndetails:\\n\\t{desc|tabindent}'
65 strip The number of slashes to strip from the front of {root}
65 strip The number of slashes to strip from the front of {root}
66 to produce {webroot}. Default 0.
66 to produce {webroot}. Default 0.
67 usermap Path of file containing Mercurial committer ID to
67 usermap Path of file containing Mercurial committer ID to
68 Bugzilla user ID mappings. If specified, the file
68 Bugzilla user ID mappings. If specified, the file
69 should contain one mapping per line,
69 should contain one mapping per line,
70 "committer"="Bugzilla user". See also the [usermap]
70 "committer"="Bugzilla user". See also the [usermap]
71 section.
71 section.
72
72
73 [usermap]
73 [usermap]
74 Any entries in this section specify mappings of Mercurial
74 Any entries in this section specify mappings of Mercurial
75 committer ID to Bugzilla user ID. See also [bugzilla].usermap.
75 committer ID to Bugzilla user ID. See also [bugzilla].usermap.
76 "committer"="Bugzilla user"
76 "committer"="Bugzilla user"
77
77
78 [web]
78 [web]
79 baseurl Base URL for browsing Mercurial repositories. Reference
79 baseurl Base URL for browsing Mercurial repositories. Reference
80 from templates as {hgweb}.
80 from templates as {hgweb}.
81
81
82 Activating the extension:
82 Activating the extension:
83
83
84 [extensions]
84 [extensions]
85 hgext.bugzilla =
85 hgext.bugzilla =
86
86
87 [hooks]
87 [hooks]
88 # run bugzilla hook on every change pulled or pushed in here
88 # run bugzilla hook on every change pulled or pushed in here
89 incoming.bugzilla = python:hgext.bugzilla.hook
89 incoming.bugzilla = python:hgext.bugzilla.hook
90
90
91 Example configuration:
91 Example configuration:
92
92
93 This example configuration is for a collection of Mercurial
93 This example configuration is for a collection of Mercurial
94 repositories in /var/local/hg/repos/ used with a local Bugzilla 3.2
94 repositories in /var/local/hg/repos/ used with a local Bugzilla 3.2
95 installation in /opt/bugzilla-3.2.
95 installation in /opt/bugzilla-3.2.
96
96
97 [bugzilla]
97 [bugzilla]
98 host=localhost
98 host=localhost
99 password=XYZZY
99 password=XYZZY
100 version=3.0
100 version=3.0
101 bzuser=unknown@domain.com
101 bzuser=unknown@domain.com
102 bzdir=/opt/bugzilla-3.2
102 bzdir=/opt/bugzilla-3.2
103 template=Changeset {node|short} in {root|basename}.\\n{hgweb}/{webroot}/rev/{node|short}\\n\\n{desc}\\n
103 template=Changeset {node|short} in {root|basename}.\\n{hgweb}/{webroot}/rev/{node|short}\\n\\n{desc}\\n
104 strip=5
104 strip=5
105
105
106 [web]
106 [web]
107 baseurl=http://dev.domain.com/hg
107 baseurl=http://dev.domain.com/hg
108
108
109 [usermap]
109 [usermap]
110 user@emaildomain.com=user.name@bugzilladomain.com
110 user@emaildomain.com=user.name@bugzilladomain.com
111
111
112 Commits add a comment to the Bugzilla bug record of the form:
112 Commits add a comment to the Bugzilla bug record of the form:
113
113
114 Changeset 3b16791d6642 in repository-name.
114 Changeset 3b16791d6642 in repository-name.
115 http://dev.domain.com/hg/repository-name/rev/3b16791d6642
115 http://dev.domain.com/hg/repository-name/rev/3b16791d6642
116
116
117 Changeset commit comment. Bug 1234.
117 Changeset commit comment. Bug 1234.
118 '''
118 '''
119
119
120 from mercurial.i18n import _
120 from mercurial.i18n import _
121 from mercurial.node import short
121 from mercurial.node import short
122 from mercurial import cmdutil, templater, util
122 from mercurial import cmdutil, templater, util
123 import re, time
123 import re, time
124
124
125 MySQLdb = None
125 MySQLdb = None
126
126
127 def buglist(ids):
127 def buglist(ids):
128 return '(' + ','.join(map(str, ids)) + ')'
128 return '(' + ','.join(map(str, ids)) + ')'
129
129
130 class bugzilla_2_16(object):
130 class bugzilla_2_16(object):
131 '''support for bugzilla version 2.16.'''
131 '''support for bugzilla version 2.16.'''
132
132
133 def __init__(self, ui):
133 def __init__(self, ui):
134 self.ui = ui
134 self.ui = ui
135 host = self.ui.config('bugzilla', 'host', 'localhost')
135 host = self.ui.config('bugzilla', 'host', 'localhost')
136 user = self.ui.config('bugzilla', 'user', 'bugs')
136 user = self.ui.config('bugzilla', 'user', 'bugs')
137 passwd = self.ui.config('bugzilla', 'password')
137 passwd = self.ui.config('bugzilla', 'password')
138 db = self.ui.config('bugzilla', 'db', 'bugs')
138 db = self.ui.config('bugzilla', 'db', 'bugs')
139 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
139 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
140 usermap = self.ui.config('bugzilla', 'usermap')
140 usermap = self.ui.config('bugzilla', 'usermap')
141 if usermap:
141 if usermap:
142 self.ui.readconfig(usermap, sections=['usermap'])
142 self.ui.readconfig(usermap, sections=['usermap'])
143 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
143 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
144 (host, db, user, '*' * len(passwd)))
144 (host, db, user, '*' * len(passwd)))
145 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
145 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
146 db=db, connect_timeout=timeout)
146 db=db, connect_timeout=timeout)
147 self.cursor = self.conn.cursor()
147 self.cursor = self.conn.cursor()
148 self.longdesc_id = self.get_longdesc_id()
148 self.longdesc_id = self.get_longdesc_id()
149 self.user_ids = {}
149 self.user_ids = {}
150 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
150 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
151
151
152 def run(self, *args, **kwargs):
152 def run(self, *args, **kwargs):
153 '''run a query.'''
153 '''run a query.'''
154 self.ui.note(_('query: %s %s\n') % (args, kwargs))
154 self.ui.note(_('query: %s %s\n') % (args, kwargs))
155 try:
155 try:
156 self.cursor.execute(*args, **kwargs)
156 self.cursor.execute(*args, **kwargs)
157 except MySQLdb.MySQLError:
157 except MySQLdb.MySQLError:
158 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
158 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
159 raise
159 raise
160
160
161 def get_longdesc_id(self):
161 def get_longdesc_id(self):
162 '''get identity of longdesc field'''
162 '''get identity of longdesc field'''
163 self.run('select fieldid from fielddefs where name = "longdesc"')
163 self.run('select fieldid from fielddefs where name = "longdesc"')
164 ids = self.cursor.fetchall()
164 ids = self.cursor.fetchall()
165 if len(ids) != 1:
165 if len(ids) != 1:
166 raise util.Abort(_('unknown database schema'))
166 raise util.Abort(_('unknown database schema'))
167 return ids[0][0]
167 return ids[0][0]
168
168
169 def filter_real_bug_ids(self, ids):
169 def filter_real_bug_ids(self, ids):
170 '''filter not-existing bug ids from list.'''
170 '''filter not-existing bug ids from list.'''
171 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
171 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
172 return sorted([c[0] for c in self.cursor.fetchall()])
172 return sorted([c[0] for c in self.cursor.fetchall()])
173
173
174 def filter_unknown_bug_ids(self, node, ids):
174 def filter_unknown_bug_ids(self, node, ids):
175 '''filter bug ids from list that already refer to this changeset.'''
175 '''filter bug ids from list that already refer to this changeset.'''
176
176
177 self.run('''select bug_id from longdescs where
177 self.run('''select bug_id from longdescs where
178 bug_id in %s and thetext like "%%%s%%"''' %
178 bug_id in %s and thetext like "%%%s%%"''' %
179 (buglist(ids), short(node)))
179 (buglist(ids), short(node)))
180 unknown = set(ids)
180 unknown = set(ids)
181 for (id,) in self.cursor.fetchall():
181 for (id,) in self.cursor.fetchall():
182 self.ui.status(_('bug %d already knows about changeset %s\n') %
182 self.ui.status(_('bug %d already knows about changeset %s\n') %
183 (id, short(node)))
183 (id, short(node)))
184 unknown.discard(id)
184 unknown.discard(id)
185 return sorted(unknown)
185 return sorted(unknown)
186
186
187 def notify(self, ids, committer):
187 def notify(self, ids, committer):
188 '''tell bugzilla to send mail.'''
188 '''tell bugzilla to send mail.'''
189
189
190 self.ui.status(_('telling bugzilla to send mail:\n'))
190 self.ui.status(_('telling bugzilla to send mail:\n'))
191 (user, userid) = self.get_bugzilla_user(committer)
191 (user, userid) = self.get_bugzilla_user(committer)
192 for id in ids:
192 for id in ids:
193 self.ui.status(_(' bug %s\n') % id)
193 self.ui.status(_(' bug %s\n') % id)
194 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
194 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
195 bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
195 bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
196 try:
196 try:
197 # Backwards-compatible with old notify string, which
197 # Backwards-compatible with old notify string, which
198 # took one string. This will throw with a new format
198 # took one string. This will throw with a new format
199 # string.
199 # string.
200 cmd = cmdfmt % id
200 cmd = cmdfmt % id
201 except TypeError:
201 except TypeError:
202 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
202 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
203 self.ui.note(_('running notify command %s\n') % cmd)
203 self.ui.note(_('running notify command %s\n') % cmd)
204 fp = util.popen('(%s) 2>&1' % cmd)
204 fp = util.popen('(%s) 2>&1' % cmd)
205 out = fp.read()
205 out = fp.read()
206 ret = fp.close()
206 ret = fp.close()
207 if ret:
207 if ret:
208 self.ui.warn(out)
208 self.ui.warn(out)
209 raise util.Abort(_('bugzilla notify command %s') %
209 raise util.Abort(_('bugzilla notify command %s') %
210 util.explain_exit(ret)[0])
210 util.explain_exit(ret)[0])
211 self.ui.status(_('done\n'))
211 self.ui.status(_('done\n'))
212
212
213 def get_user_id(self, user):
213 def get_user_id(self, user):
214 '''look up numeric bugzilla user id.'''
214 '''look up numeric bugzilla user id.'''
215 try:
215 try:
216 return self.user_ids[user]
216 return self.user_ids[user]
217 except KeyError:
217 except KeyError:
218 try:
218 try:
219 userid = int(user)
219 userid = int(user)
220 except ValueError:
220 except ValueError:
221 self.ui.note(_('looking up user %s\n') % user)
221 self.ui.note(_('looking up user %s\n') % user)
222 self.run('''select userid from profiles
222 self.run('''select userid from profiles
223 where login_name like %s''', user)
223 where login_name like %s''', user)
224 all = self.cursor.fetchall()
224 all = self.cursor.fetchall()
225 if len(all) != 1:
225 if len(all) != 1:
226 raise KeyError(user)
226 raise KeyError(user)
227 userid = int(all[0][0])
227 userid = int(all[0][0])
228 self.user_ids[user] = userid
228 self.user_ids[user] = userid
229 return userid
229 return userid
230
230
231 def map_committer(self, user):
231 def map_committer(self, user):
232 '''map name of committer to bugzilla user name.'''
232 '''map name of committer to bugzilla user name.'''
233 for committer, bzuser in self.ui.configitems('usermap'):
233 for committer, bzuser in self.ui.configitems('usermap'):
234 if committer.lower() == user.lower():
234 if committer.lower() == user.lower():
235 return bzuser
235 return bzuser
236 return user
236 return user
237
237
238 def get_bugzilla_user(self, committer):
238 def get_bugzilla_user(self, committer):
239 '''see if committer is a registered bugzilla user. Return
239 '''see if committer is a registered bugzilla user. Return
240 bugzilla username and userid if so. If not, return default
240 bugzilla username and userid if so. If not, return default
241 bugzilla username and userid.'''
241 bugzilla username and userid.'''
242 user = self.map_committer(committer)
242 user = self.map_committer(committer)
243 try:
243 try:
244 userid = self.get_user_id(user)
244 userid = self.get_user_id(user)
245 except KeyError:
245 except KeyError:
246 try:
246 try:
247 defaultuser = self.ui.config('bugzilla', 'bzuser')
247 defaultuser = self.ui.config('bugzilla', 'bzuser')
248 if not defaultuser:
248 if not defaultuser:
249 raise util.Abort(_('cannot find bugzilla user id for %s') %
249 raise util.Abort(_('cannot find bugzilla user id for %s') %
250 user)
250 user)
251 userid = self.get_user_id(defaultuser)
251 userid = self.get_user_id(defaultuser)
252 user = defaultuser
252 user = defaultuser
253 except KeyError:
253 except KeyError:
254 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
254 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
255 (user, defaultuser))
255 (user, defaultuser))
256 return (user, userid)
256 return (user, userid)
257
257
258 def add_comment(self, bugid, text, committer):
258 def add_comment(self, bugid, text, committer):
259 '''add comment to bug. try adding comment as committer of
259 '''add comment to bug. try adding comment as committer of
260 changeset, otherwise as default bugzilla user.'''
260 changeset, otherwise as default bugzilla user.'''
261 (user, userid) = self.get_bugzilla_user(committer)
261 (user, userid) = self.get_bugzilla_user(committer)
262 now = time.strftime('%Y-%m-%d %H:%M:%S')
262 now = time.strftime('%Y-%m-%d %H:%M:%S')
263 self.run('''insert into longdescs
263 self.run('''insert into longdescs
264 (bug_id, who, bug_when, thetext)
264 (bug_id, who, bug_when, thetext)
265 values (%s, %s, %s, %s)''',
265 values (%s, %s, %s, %s)''',
266 (bugid, userid, now, text))
266 (bugid, userid, now, text))
267 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
267 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
268 values (%s, %s, %s, %s)''',
268 values (%s, %s, %s, %s)''',
269 (bugid, userid, now, self.longdesc_id))
269 (bugid, userid, now, self.longdesc_id))
270 self.conn.commit()
270 self.conn.commit()
271
271
272 class bugzilla_2_18(bugzilla_2_16):
272 class bugzilla_2_18(bugzilla_2_16):
273 '''support for bugzilla 2.18 series.'''
273 '''support for bugzilla 2.18 series.'''
274
274
275 def __init__(self, ui):
275 def __init__(self, ui):
276 bugzilla_2_16.__init__(self, ui)
276 bugzilla_2_16.__init__(self, ui)
277 self.default_notify = "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
277 self.default_notify = "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
278
278
279 class bugzilla_3_0(bugzilla_2_18):
279 class bugzilla_3_0(bugzilla_2_18):
280 '''support for bugzilla 3.0 series.'''
280 '''support for bugzilla 3.0 series.'''
281
281
282 def __init__(self, ui):
282 def __init__(self, ui):
283 bugzilla_2_18.__init__(self, ui)
283 bugzilla_2_18.__init__(self, ui)
284
284
285 def get_longdesc_id(self):
285 def get_longdesc_id(self):
286 '''get identity of longdesc field'''
286 '''get identity of longdesc field'''
287 self.run('select id from fielddefs where name = "longdesc"')
287 self.run('select id from fielddefs where name = "longdesc"')
288 ids = self.cursor.fetchall()
288 ids = self.cursor.fetchall()
289 if len(ids) != 1:
289 if len(ids) != 1:
290 raise util.Abort(_('unknown database schema'))
290 raise util.Abort(_('unknown database schema'))
291 return ids[0][0]
291 return ids[0][0]
292
292
293 class bugzilla(object):
293 class bugzilla(object):
294 # supported versions of bugzilla. different versions have
294 # supported versions of bugzilla. different versions have
295 # different schemas.
295 # different schemas.
296 _versions = {
296 _versions = {
297 '2.16': bugzilla_2_16,
297 '2.16': bugzilla_2_16,
298 '2.18': bugzilla_2_18,
298 '2.18': bugzilla_2_18,
299 '3.0': bugzilla_3_0
299 '3.0': bugzilla_3_0
300 }
300 }
301
301
302 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
302 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
303 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
303 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
304
304
305 _bz = None
305 _bz = None
306
306
307 def __init__(self, ui, repo):
307 def __init__(self, ui, repo):
308 self.ui = ui
308 self.ui = ui
309 self.repo = repo
309 self.repo = repo
310
310
311 def bz(self):
311 def bz(self):
312 '''return object that knows how to talk to bugzilla version in
312 '''return object that knows how to talk to bugzilla version in
313 use.'''
313 use.'''
314
314
315 if bugzilla._bz is None:
315 if bugzilla._bz is None:
316 bzversion = self.ui.config('bugzilla', 'version')
316 bzversion = self.ui.config('bugzilla', 'version')
317 try:
317 try:
318 bzclass = bugzilla._versions[bzversion]
318 bzclass = bugzilla._versions[bzversion]
319 except KeyError:
319 except KeyError:
320 raise util.Abort(_('bugzilla version %s not supported') %
320 raise util.Abort(_('bugzilla version %s not supported') %
321 bzversion)
321 bzversion)
322 bugzilla._bz = bzclass(self.ui)
322 bugzilla._bz = bzclass(self.ui)
323 return bugzilla._bz
323 return bugzilla._bz
324
324
325 def __getattr__(self, key):
325 def __getattr__(self, key):
326 return getattr(self.bz(), key)
326 return getattr(self.bz(), key)
327
327
328 _bug_re = None
328 _bug_re = None
329 _split_re = None
329 _split_re = None
330
330
331 def find_bug_ids(self, ctx):
331 def find_bug_ids(self, ctx):
332 '''find valid bug ids that are referred to in changeset
332 '''find valid bug ids that are referred to in changeset
333 comments and that do not already have references to this
333 comments and that do not already have references to this
334 changeset.'''
334 changeset.'''
335
335
336 if bugzilla._bug_re is None:
336 if bugzilla._bug_re is None:
337 bugzilla._bug_re = re.compile(
337 bugzilla._bug_re = re.compile(
338 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
338 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
339 re.IGNORECASE)
339 re.IGNORECASE)
340 bugzilla._split_re = re.compile(r'\D+')
340 bugzilla._split_re = re.compile(r'\D+')
341 start = 0
341 start = 0
342 ids = set()
342 ids = set()
343 while True:
343 while True:
344 m = bugzilla._bug_re.search(ctx.description(), start)
344 m = bugzilla._bug_re.search(ctx.description(), start)
345 if not m:
345 if not m:
346 break
346 break
347 start = m.end()
347 start = m.end()
348 for id in bugzilla._split_re.split(m.group(1)):
348 for id in bugzilla._split_re.split(m.group(1)):
349 if not id: continue
349 if not id: continue
350 ids.add(int(id))
350 ids.add(int(id))
351 if ids:
351 if ids:
352 ids = self.filter_real_bug_ids(ids)
352 ids = self.filter_real_bug_ids(ids)
353 if ids:
353 if ids:
354 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
354 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
355 return ids
355 return ids
356
356
357 def update(self, bugid, ctx):
357 def update(self, bugid, ctx):
358 '''update bugzilla bug with reference to changeset.'''
358 '''update bugzilla bug with reference to changeset.'''
359
359
360 def webroot(root):
360 def webroot(root):
361 '''strip leading prefix of repo root and turn into
361 '''strip leading prefix of repo root and turn into
362 url-safe path.'''
362 url-safe path.'''
363 count = int(self.ui.config('bugzilla', 'strip', 0))
363 count = int(self.ui.config('bugzilla', 'strip', 0))
364 root = util.pconvert(root)
364 root = util.pconvert(root)
365 while count > 0:
365 while count > 0:
366 c = root.find('/')
366 c = root.find('/')
367 if c == -1:
367 if c == -1:
368 break
368 break
369 root = root[c+1:]
369 root = root[c+1:]
370 count -= 1
370 count -= 1
371 return root
371 return root
372
372
373 mapfile = self.ui.config('bugzilla', 'style')
373 mapfile = self.ui.config('bugzilla', 'style')
374 tmpl = self.ui.config('bugzilla', 'template')
374 tmpl = self.ui.config('bugzilla', 'template')
375 t = cmdutil.changeset_templater(self.ui, self.repo,
375 t = cmdutil.changeset_templater(self.ui, self.repo,
376 False, None, mapfile, False)
376 False, None, mapfile, False)
377 if not mapfile and not tmpl:
377 if not mapfile and not tmpl:
378 tmpl = _('changeset {node|short} in repo {root} refers '
378 tmpl = _('changeset {node|short} in repo {root} refers '
379 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
379 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
380 if tmpl:
380 if tmpl:
381 tmpl = templater.parsestring(tmpl, quoted=False)
381 tmpl = templater.parsestring(tmpl, quoted=False)
382 t.use_template(tmpl)
382 t.use_template(tmpl)
383 self.ui.pushbuffer()
383 self.ui.pushbuffer()
384 t.show(ctx, changes=ctx.changeset(),
384 t.show(ctx, changes=ctx.changeset(),
385 bug=str(bugid),
385 bug=str(bugid),
386 hgweb=self.ui.config('web', 'baseurl'),
386 hgweb=self.ui.config('web', 'baseurl'),
387 root=self.repo.root,
387 root=self.repo.root,
388 webroot=webroot(self.repo.root))
388 webroot=webroot(self.repo.root))
389 data = self.ui.popbuffer()
389 data = self.ui.popbuffer()
390 self.add_comment(bugid, data, util.email(ctx.user()))
390 self.add_comment(bugid, data, util.email(ctx.user()))
391
391
392 def hook(ui, repo, hooktype, node=None, **kwargs):
392 def hook(ui, repo, hooktype, node=None, **kwargs):
393 '''add comment to bugzilla for each changeset that refers to a
393 '''add comment to bugzilla for each changeset that refers to a
394 bugzilla bug id. only add a comment once per bug, so same change
394 bugzilla bug id. only add a comment once per bug, so same change
395 seen multiple times does not fill bug with duplicate data.'''
395 seen multiple times does not fill bug with duplicate data.'''
396 try:
396 try:
397 import MySQLdb as mysql
397 import MySQLdb as mysql
398 global MySQLdb
398 global MySQLdb
399 MySQLdb = mysql
399 MySQLdb = mysql
400 except ImportError, err:
400 except ImportError, err:
401 raise util.Abort(_('python mysql support not available: %s') % err)
401 raise util.Abort(_('python mysql support not available: %s') % err)
402
402
403 if node is None:
403 if node is None:
404 raise util.Abort(_('hook type %s does not pass a changeset id') %
404 raise util.Abort(_('hook type %s does not pass a changeset id') %
405 hooktype)
405 hooktype)
406 try:
406 try:
407 bz = bugzilla(ui, repo)
407 bz = bugzilla(ui, repo)
408 ctx = repo[node]
408 ctx = repo[node]
409 ids = bz.find_bug_ids(ctx)
409 ids = bz.find_bug_ids(ctx)
410 if ids:
410 if ids:
411 for id in ids:
411 for id in ids:
412 bz.update(id, ctx)
412 bz.update(id, ctx)
413 bz.notify(ids, util.email(ctx.user()))
413 bz.notify(ids, util.email(ctx.user()))
414 except MySQLdb.MySQLError, err:
414 except MySQLdb.MySQLError, err:
415 raise util.Abort(_('database error: %s') % err[1])
415 raise util.Abort(_('database error: %s') % err[1])
416
416
@@ -1,44 +1,44 b''
1 # Mercurial extension to provide the 'hg children' command
1 # Mercurial extension to provide the 'hg children' command
2 #
2 #
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 #
4 #
5 # Author(s):
5 # Author(s):
6 # Thomas Arendsen Hein <thomas@intevation.de>
6 # Thomas Arendsen Hein <thomas@intevation.de>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2, incorporated herein by reference.
9 # GNU General Public License version 2, incorporated herein by reference.
10
10
11 '''provides children command to show children changesets'''
11 '''display children changesets'''
12
12
13 from mercurial import cmdutil
13 from mercurial import cmdutil
14 from mercurial.commands import templateopts
14 from mercurial.commands import templateopts
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17
17
18 def children(ui, repo, file_=None, **opts):
18 def children(ui, repo, file_=None, **opts):
19 """show the children of the given or working directory revision
19 """show the children of the given or working directory revision
20
20
21 Print the children of the working directory's revisions. If a
21 Print the children of the working directory's revisions. If a
22 revision is given via -r/--rev, the children of that revision will
22 revision is given via -r/--rev, the children of that revision will
23 be printed. If a file argument is given, revision in which the
23 be printed. If a file argument is given, revision in which the
24 file was last changed (after the working directory revision or the
24 file was last changed (after the working directory revision or the
25 argument to --rev if given) is printed.
25 argument to --rev if given) is printed.
26 """
26 """
27 rev = opts.get('rev')
27 rev = opts.get('rev')
28 if file_:
28 if file_:
29 ctx = repo.filectx(file_, changeid=rev)
29 ctx = repo.filectx(file_, changeid=rev)
30 else:
30 else:
31 ctx = repo[rev]
31 ctx = repo[rev]
32
32
33 displayer = cmdutil.show_changeset(ui, repo, opts)
33 displayer = cmdutil.show_changeset(ui, repo, opts)
34 for cctx in ctx.children():
34 for cctx in ctx.children():
35 displayer.show(cctx)
35 displayer.show(cctx)
36
36
37
37
38 cmdtable = {
38 cmdtable = {
39 "children":
39 "children":
40 (children,
40 (children,
41 [('r', 'rev', '', _('show children of the specified revision')),
41 [('r', 'rev', '', _('show children of the specified revision')),
42 ] + templateopts,
42 ] + templateopts,
43 _('hg children [-r REV] [FILE]')),
43 _('hg children [-r REV] [FILE]')),
44 }
44 }
@@ -1,174 +1,174 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 '''command to show certain statistics about revision history'''
9 '''display statistics about repository history'''
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import patch, cmdutil, util, templater
12 from mercurial import patch, cmdutil, util, templater
13 import sys, os
13 import sys, os
14 import time, datetime
14 import time, datetime
15
15
16 def maketemplater(ui, repo, tmpl):
16 def maketemplater(ui, repo, tmpl):
17 tmpl = templater.parsestring(tmpl, quoted=False)
17 tmpl = templater.parsestring(tmpl, quoted=False)
18 try:
18 try:
19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
20 except SyntaxError, inst:
20 except SyntaxError, inst:
21 raise util.Abort(inst.args[0])
21 raise util.Abort(inst.args[0])
22 t.use_template(tmpl)
22 t.use_template(tmpl)
23 return t
23 return t
24
24
25 def changedlines(ui, repo, ctx1, ctx2, fns):
25 def changedlines(ui, repo, ctx1, ctx2, fns):
26 lines = 0
26 lines = 0
27 fmatch = cmdutil.match(repo, pats=fns)
27 fmatch = cmdutil.match(repo, pats=fns)
28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
29 for l in diff.split('\n'):
29 for l in diff.split('\n'):
30 if (l.startswith("+") and not l.startswith("+++ ") or
30 if (l.startswith("+") and not l.startswith("+++ ") or
31 l.startswith("-") and not l.startswith("--- ")):
31 l.startswith("-") and not l.startswith("--- ")):
32 lines += 1
32 lines += 1
33 return lines
33 return lines
34
34
35 def countrate(ui, repo, amap, *pats, **opts):
35 def countrate(ui, repo, amap, *pats, **opts):
36 """Calculate stats"""
36 """Calculate stats"""
37 if opts.get('dateformat'):
37 if opts.get('dateformat'):
38 def getkey(ctx):
38 def getkey(ctx):
39 t, tz = ctx.date()
39 t, tz = ctx.date()
40 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
40 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
41 return date.strftime(opts['dateformat'])
41 return date.strftime(opts['dateformat'])
42 else:
42 else:
43 tmpl = opts.get('template', '{author|email}')
43 tmpl = opts.get('template', '{author|email}')
44 tmpl = maketemplater(ui, repo, tmpl)
44 tmpl = maketemplater(ui, repo, tmpl)
45 def getkey(ctx):
45 def getkey(ctx):
46 ui.pushbuffer()
46 ui.pushbuffer()
47 tmpl.show(ctx)
47 tmpl.show(ctx)
48 return ui.popbuffer()
48 return ui.popbuffer()
49
49
50 count = pct = 0
50 count = pct = 0
51 rate = {}
51 rate = {}
52 df = False
52 df = False
53 if opts.get('date'):
53 if opts.get('date'):
54 df = util.matchdate(opts['date'])
54 df = util.matchdate(opts['date'])
55
55
56 get = util.cachefunc(lambda r: repo[r].changeset())
56 get = util.cachefunc(lambda r: repo[r].changeset())
57 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
57 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
58 for st, rev, fns in changeiter:
58 for st, rev, fns in changeiter:
59 if not st == 'add':
59 if not st == 'add':
60 continue
60 continue
61 if df and not df(get(rev)[2][0]): # doesn't match date format
61 if df and not df(get(rev)[2][0]): # doesn't match date format
62 continue
62 continue
63
63
64 ctx = repo[rev]
64 ctx = repo[rev]
65 key = getkey(ctx)
65 key = getkey(ctx)
66 key = amap.get(key, key) # alias remap
66 key = amap.get(key, key) # alias remap
67 if opts.get('changesets'):
67 if opts.get('changesets'):
68 rate[key] = rate.get(key, 0) + 1
68 rate[key] = rate.get(key, 0) + 1
69 else:
69 else:
70 parents = ctx.parents()
70 parents = ctx.parents()
71 if len(parents) > 1:
71 if len(parents) > 1:
72 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
72 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
73 continue
73 continue
74
74
75 ctx1 = parents[0]
75 ctx1 = parents[0]
76 lines = changedlines(ui, repo, ctx1, ctx, fns)
76 lines = changedlines(ui, repo, ctx1, ctx, fns)
77 rate[key] = rate.get(key, 0) + lines
77 rate[key] = rate.get(key, 0) + lines
78
78
79 if opts.get('progress'):
79 if opts.get('progress'):
80 count += 1
80 count += 1
81 newpct = int(100.0 * count / max(len(repo), 1))
81 newpct = int(100.0 * count / max(len(repo), 1))
82 if pct < newpct:
82 if pct < newpct:
83 pct = newpct
83 pct = newpct
84 ui.write("\r" + _("generating stats: %d%%") % pct)
84 ui.write("\r" + _("generating stats: %d%%") % pct)
85 sys.stdout.flush()
85 sys.stdout.flush()
86
86
87 if opts.get('progress'):
87 if opts.get('progress'):
88 ui.write("\r")
88 ui.write("\r")
89 sys.stdout.flush()
89 sys.stdout.flush()
90
90
91 return rate
91 return rate
92
92
93
93
94 def churn(ui, repo, *pats, **opts):
94 def churn(ui, repo, *pats, **opts):
95 '''histogram of changes to the repository
95 '''histogram of changes to the repository
96
96
97 This command will display a histogram representing the number
97 This command will display a histogram representing the number
98 of changed lines or revisions, grouped according to the given
98 of changed lines or revisions, grouped according to the given
99 template. The default template will group changes by author.
99 template. The default template will group changes by author.
100 The --dateformat option may be used to group the results by
100 The --dateformat option may be used to group the results by
101 date instead.
101 date instead.
102
102
103 Statistics are based on the number of changed lines, or
103 Statistics are based on the number of changed lines, or
104 alternatively the number of matching revisions if the
104 alternatively the number of matching revisions if the
105 --changesets option is specified.
105 --changesets option is specified.
106
106
107 Examples:
107 Examples:
108
108
109 # display count of changed lines for every committer
109 # display count of changed lines for every committer
110 hg churn -t '{author|email}'
110 hg churn -t '{author|email}'
111
111
112 # display daily activity graph
112 # display daily activity graph
113 hg churn -f '%H' -s -c
113 hg churn -f '%H' -s -c
114
114
115 # display activity of developers by month
115 # display activity of developers by month
116 hg churn -f '%Y-%m' -s -c
116 hg churn -f '%Y-%m' -s -c
117
117
118 # display count of lines changed in every year
118 # display count of lines changed in every year
119 hg churn -f '%Y' -s
119 hg churn -f '%Y' -s
120
120
121 It is possible to map alternate email addresses to a main address
121 It is possible to map alternate email addresses to a main address
122 by providing a file using the following format:
122 by providing a file using the following format:
123
123
124 <alias email> <actual email>
124 <alias email> <actual email>
125
125
126 Such a file may be specified with the --aliases option, otherwise a
126 Such a file may be specified with the --aliases option, otherwise a
127 .hgchurn file will be looked for in the working directory root.
127 .hgchurn file will be looked for in the working directory root.
128 '''
128 '''
129 def pad(s, l):
129 def pad(s, l):
130 return (s + " " * l)[:l]
130 return (s + " " * l)[:l]
131
131
132 amap = {}
132 amap = {}
133 aliases = opts.get('aliases')
133 aliases = opts.get('aliases')
134 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
134 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
135 aliases = repo.wjoin('.hgchurn')
135 aliases = repo.wjoin('.hgchurn')
136 if aliases:
136 if aliases:
137 for l in open(aliases, "r"):
137 for l in open(aliases, "r"):
138 l = l.strip()
138 l = l.strip()
139 alias, actual = l.split()
139 alias, actual = l.split()
140 amap[alias] = actual
140 amap[alias] = actual
141
141
142 rate = countrate(ui, repo, amap, *pats, **opts).items()
142 rate = countrate(ui, repo, amap, *pats, **opts).items()
143 if not rate:
143 if not rate:
144 return
144 return
145
145
146 sortfn = ((not opts.get('sort')) and (lambda a, b: cmp(b[1], a[1])) or None)
146 sortfn = ((not opts.get('sort')) and (lambda a, b: cmp(b[1], a[1])) or None)
147 rate.sort(sortfn)
147 rate.sort(sortfn)
148
148
149 maxcount = float(max([v for k, v in rate]))
149 maxcount = float(max([v for k, v in rate]))
150 maxname = max([len(k) for k, v in rate])
150 maxname = max([len(k) for k, v in rate])
151
151
152 ttywidth = util.termwidth()
152 ttywidth = util.termwidth()
153 ui.debug(_("assuming %i character terminal\n") % ttywidth)
153 ui.debug(_("assuming %i character terminal\n") % ttywidth)
154 width = ttywidth - maxname - 2 - 6 - 2 - 2
154 width = ttywidth - maxname - 2 - 6 - 2 - 2
155
155
156 for date, count in rate:
156 for date, count in rate:
157 print "%s %6d %s" % (pad(date, maxname), count,
157 print "%s %6d %s" % (pad(date, maxname), count,
158 "*" * int(count * width / maxcount))
158 "*" * int(count * width / maxcount))
159
159
160
160
161 cmdtable = {
161 cmdtable = {
162 "churn":
162 "churn":
163 (churn,
163 (churn,
164 [('r', 'rev', [], _('count rate for the specified revision or range')),
164 [('r', 'rev', [], _('count rate for the specified revision or range')),
165 ('d', 'date', '', _('count rate for revisions matching date spec')),
165 ('d', 'date', '', _('count rate for revisions matching date spec')),
166 ('t', 'template', '{author|email}', _('template to group changesets')),
166 ('t', 'template', '{author|email}', _('template to group changesets')),
167 ('f', 'dateformat', '',
167 ('f', 'dateformat', '',
168 _('strftime-compatible format for grouping by date')),
168 _('strftime-compatible format for grouping by date')),
169 ('c', 'changesets', False, _('count rate by number of changesets')),
169 ('c', 'changesets', False, _('count rate by number of changesets')),
170 ('s', 'sort', False, _('sort by key (default: sort by count)')),
170 ('s', 'sort', False, _('sort by key (default: sort by count)')),
171 ('', 'aliases', '', _('file with email aliases')),
171 ('', 'aliases', '', _('file with email aliases')),
172 ('', 'progress', None, _('show progress'))],
172 ('', 'progress', None, _('show progress'))],
173 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
173 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
174 }
174 }
@@ -1,266 +1,266 b''
1 # color.py color output for the status and qseries commands
1 # color.py color output for the status and qseries commands
2 #
2 #
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
4 #
4 #
5 # This program is free software; you can redistribute it and/or modify it
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the
6 # under the terms of the GNU General Public License as published by the
7 # Free Software Foundation; either version 2 of the License, or (at your
7 # Free Software Foundation; either version 2 of the License, or (at your
8 # option) any later version.
8 # option) any later version.
9 #
9 #
10 # This program is distributed in the hope that it will be useful, but
10 # This program is distributed in the hope that it will be useful, but
11 # WITHOUT ANY WARRANTY; without even the implied warranty of
11 # WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
13 # Public License for more details.
13 # Public License for more details.
14 #
14 #
15 # You should have received a copy of the GNU General Public License along
15 # You should have received a copy of the GNU General Public License along
16 # with this program; if not, write to the Free Software Foundation, Inc.,
16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
18
19 '''add color output to status, qseries, and diff-related commands
19 '''colorize output from some commands
20
20
21 This extension modifies the status command to add color to its output
21 This extension modifies the status command to add color to its output
22 to reflect file status, the qseries command to add color to reflect
22 to reflect file status, the qseries command to add color to reflect
23 patch status (applied, unapplied, missing), and to diff-related
23 patch status (applied, unapplied, missing), and to diff-related
24 commands to highlight additions, removals, diff headers, and trailing
24 commands to highlight additions, removals, diff headers, and trailing
25 whitespace.
25 whitespace.
26
26
27 Other effects in addition to color, like bold and underlined text, are
27 Other effects in addition to color, like bold and underlined text, are
28 also available. Effects are rendered with the ECMA-48 SGR control
28 also available. Effects are rendered with the ECMA-48 SGR control
29 function (aka ANSI escape codes). This module also provides the
29 function (aka ANSI escape codes). This module also provides the
30 render_text function, which can be used to add effects to any text.
30 render_text function, which can be used to add effects to any text.
31
31
32 Default effects may be overridden from the .hgrc file:
32 Default effects may be overridden from the .hgrc file:
33
33
34 [color]
34 [color]
35 status.modified = blue bold underline red_background
35 status.modified = blue bold underline red_background
36 status.added = green bold
36 status.added = green bold
37 status.removed = red bold blue_background
37 status.removed = red bold blue_background
38 status.deleted = cyan bold underline
38 status.deleted = cyan bold underline
39 status.unknown = magenta bold underline
39 status.unknown = magenta bold underline
40 status.ignored = black bold
40 status.ignored = black bold
41
41
42 # 'none' turns off all effects
42 # 'none' turns off all effects
43 status.clean = none
43 status.clean = none
44 status.copied = none
44 status.copied = none
45
45
46 qseries.applied = blue bold underline
46 qseries.applied = blue bold underline
47 qseries.unapplied = black bold
47 qseries.unapplied = black bold
48 qseries.missing = red bold
48 qseries.missing = red bold
49
49
50 diff.diffline = bold
50 diff.diffline = bold
51 diff.extended = cyan bold
51 diff.extended = cyan bold
52 diff.file_a = red bold
52 diff.file_a = red bold
53 diff.file_b = green bold
53 diff.file_b = green bold
54 diff.hunk = magenta
54 diff.hunk = magenta
55 diff.deleted = red
55 diff.deleted = red
56 diff.inserted = green
56 diff.inserted = green
57 diff.changed = white
57 diff.changed = white
58 diff.trailingwhitespace = bold red_background
58 diff.trailingwhitespace = bold red_background
59 '''
59 '''
60
60
61 import os, sys
61 import os, sys
62
62
63 from mercurial import cmdutil, commands, extensions
63 from mercurial import cmdutil, commands, extensions
64 from mercurial.i18n import _
64 from mercurial.i18n import _
65
65
66 # start and stop parameters for effects
66 # start and stop parameters for effects
67 _effect_params = {'none': 0,
67 _effect_params = {'none': 0,
68 'black': 30,
68 'black': 30,
69 'red': 31,
69 'red': 31,
70 'green': 32,
70 'green': 32,
71 'yellow': 33,
71 'yellow': 33,
72 'blue': 34,
72 'blue': 34,
73 'magenta': 35,
73 'magenta': 35,
74 'cyan': 36,
74 'cyan': 36,
75 'white': 37,
75 'white': 37,
76 'bold': 1,
76 'bold': 1,
77 'italic': 3,
77 'italic': 3,
78 'underline': 4,
78 'underline': 4,
79 'inverse': 7,
79 'inverse': 7,
80 'black_background': 40,
80 'black_background': 40,
81 'red_background': 41,
81 'red_background': 41,
82 'green_background': 42,
82 'green_background': 42,
83 'yellow_background': 43,
83 'yellow_background': 43,
84 'blue_background': 44,
84 'blue_background': 44,
85 'purple_background': 45,
85 'purple_background': 45,
86 'cyan_background': 46,
86 'cyan_background': 46,
87 'white_background': 47}
87 'white_background': 47}
88
88
89 def render_effects(text, effects):
89 def render_effects(text, effects):
90 'Wrap text in commands to turn on each effect.'
90 'Wrap text in commands to turn on each effect.'
91 start = [str(_effect_params[e]) for e in ['none'] + effects]
91 start = [str(_effect_params[e]) for e in ['none'] + effects]
92 start = '\033[' + ';'.join(start) + 'm'
92 start = '\033[' + ';'.join(start) + 'm'
93 stop = '\033[' + str(_effect_params['none']) + 'm'
93 stop = '\033[' + str(_effect_params['none']) + 'm'
94 return ''.join([start, text, stop])
94 return ''.join([start, text, stop])
95
95
96 def colorstatus(orig, ui, repo, *pats, **opts):
96 def colorstatus(orig, ui, repo, *pats, **opts):
97 '''run the status command with colored output'''
97 '''run the status command with colored output'''
98
98
99 delimiter = opts['print0'] and '\0' or '\n'
99 delimiter = opts['print0'] and '\0' or '\n'
100
100
101 nostatus = opts.get('no_status')
101 nostatus = opts.get('no_status')
102 opts['no_status'] = False
102 opts['no_status'] = False
103 # run status and capture its output
103 # run status and capture its output
104 ui.pushbuffer()
104 ui.pushbuffer()
105 retval = orig(ui, repo, *pats, **opts)
105 retval = orig(ui, repo, *pats, **opts)
106 # filter out empty strings
106 # filter out empty strings
107 lines_with_status = [ line for line in ui.popbuffer().split(delimiter) if line ]
107 lines_with_status = [ line for line in ui.popbuffer().split(delimiter) if line ]
108
108
109 if nostatus:
109 if nostatus:
110 lines = [l[2:] for l in lines_with_status]
110 lines = [l[2:] for l in lines_with_status]
111 else:
111 else:
112 lines = lines_with_status
112 lines = lines_with_status
113
113
114 # apply color to output and display it
114 # apply color to output and display it
115 for i in xrange(len(lines)):
115 for i in xrange(len(lines)):
116 status = _status_abbreviations[lines_with_status[i][0]]
116 status = _status_abbreviations[lines_with_status[i][0]]
117 effects = _status_effects[status]
117 effects = _status_effects[status]
118 if effects:
118 if effects:
119 lines[i] = render_effects(lines[i], effects)
119 lines[i] = render_effects(lines[i], effects)
120 ui.write(lines[i] + delimiter)
120 ui.write(lines[i] + delimiter)
121 return retval
121 return retval
122
122
123 _status_abbreviations = { 'M': 'modified',
123 _status_abbreviations = { 'M': 'modified',
124 'A': 'added',
124 'A': 'added',
125 'R': 'removed',
125 'R': 'removed',
126 '!': 'deleted',
126 '!': 'deleted',
127 '?': 'unknown',
127 '?': 'unknown',
128 'I': 'ignored',
128 'I': 'ignored',
129 'C': 'clean',
129 'C': 'clean',
130 ' ': 'copied', }
130 ' ': 'copied', }
131
131
132 _status_effects = { 'modified': ['blue', 'bold'],
132 _status_effects = { 'modified': ['blue', 'bold'],
133 'added': ['green', 'bold'],
133 'added': ['green', 'bold'],
134 'removed': ['red', 'bold'],
134 'removed': ['red', 'bold'],
135 'deleted': ['cyan', 'bold', 'underline'],
135 'deleted': ['cyan', 'bold', 'underline'],
136 'unknown': ['magenta', 'bold', 'underline'],
136 'unknown': ['magenta', 'bold', 'underline'],
137 'ignored': ['black', 'bold'],
137 'ignored': ['black', 'bold'],
138 'clean': ['none'],
138 'clean': ['none'],
139 'copied': ['none'], }
139 'copied': ['none'], }
140
140
141 def colorqseries(orig, ui, repo, *dummy, **opts):
141 def colorqseries(orig, ui, repo, *dummy, **opts):
142 '''run the qseries command with colored output'''
142 '''run the qseries command with colored output'''
143 ui.pushbuffer()
143 ui.pushbuffer()
144 retval = orig(ui, repo, **opts)
144 retval = orig(ui, repo, **opts)
145 patches = ui.popbuffer().splitlines()
145 patches = ui.popbuffer().splitlines()
146 for patch in patches:
146 for patch in patches:
147 patchname = patch
147 patchname = patch
148 if opts['summary']:
148 if opts['summary']:
149 patchname = patchname.split(': ')[0]
149 patchname = patchname.split(': ')[0]
150 if ui.verbose:
150 if ui.verbose:
151 patchname = patchname.split(' ', 2)[-1]
151 patchname = patchname.split(' ', 2)[-1]
152
152
153 if opts['missing']:
153 if opts['missing']:
154 effects = _patch_effects['missing']
154 effects = _patch_effects['missing']
155 # Determine if patch is applied.
155 # Determine if patch is applied.
156 elif [ applied for applied in repo.mq.applied
156 elif [ applied for applied in repo.mq.applied
157 if patchname == applied.name ]:
157 if patchname == applied.name ]:
158 effects = _patch_effects['applied']
158 effects = _patch_effects['applied']
159 else:
159 else:
160 effects = _patch_effects['unapplied']
160 effects = _patch_effects['unapplied']
161 ui.write(render_effects(patch, effects) + '\n')
161 ui.write(render_effects(patch, effects) + '\n')
162 return retval
162 return retval
163
163
164 _patch_effects = { 'applied': ['blue', 'bold', 'underline'],
164 _patch_effects = { 'applied': ['blue', 'bold', 'underline'],
165 'missing': ['red', 'bold'],
165 'missing': ['red', 'bold'],
166 'unapplied': ['black', 'bold'], }
166 'unapplied': ['black', 'bold'], }
167
167
168 def colorwrap(orig, s):
168 def colorwrap(orig, s):
169 '''wrap ui.write for colored diff output'''
169 '''wrap ui.write for colored diff output'''
170 lines = s.split('\n')
170 lines = s.split('\n')
171 for i, line in enumerate(lines):
171 for i, line in enumerate(lines):
172 stripline = line
172 stripline = line
173 if line and line[0] in '+-':
173 if line and line[0] in '+-':
174 # highlight trailing whitespace, but only in changed lines
174 # highlight trailing whitespace, but only in changed lines
175 stripline = line.rstrip()
175 stripline = line.rstrip()
176 for prefix, style in _diff_prefixes:
176 for prefix, style in _diff_prefixes:
177 if stripline.startswith(prefix):
177 if stripline.startswith(prefix):
178 lines[i] = render_effects(stripline, _diff_effects[style])
178 lines[i] = render_effects(stripline, _diff_effects[style])
179 break
179 break
180 if line != stripline:
180 if line != stripline:
181 lines[i] += render_effects(
181 lines[i] += render_effects(
182 line[len(stripline):], _diff_effects['trailingwhitespace'])
182 line[len(stripline):], _diff_effects['trailingwhitespace'])
183 orig('\n'.join(lines))
183 orig('\n'.join(lines))
184
184
185 def colorshowpatch(orig, self, node):
185 def colorshowpatch(orig, self, node):
186 '''wrap cmdutil.changeset_printer.showpatch with colored output'''
186 '''wrap cmdutil.changeset_printer.showpatch with colored output'''
187 oldwrite = extensions.wrapfunction(self.ui, 'write', colorwrap)
187 oldwrite = extensions.wrapfunction(self.ui, 'write', colorwrap)
188 try:
188 try:
189 orig(self, node)
189 orig(self, node)
190 finally:
190 finally:
191 self.ui.write = oldwrite
191 self.ui.write = oldwrite
192
192
193 def colordiff(orig, ui, repo, *pats, **opts):
193 def colordiff(orig, ui, repo, *pats, **opts):
194 '''run the diff command with colored output'''
194 '''run the diff command with colored output'''
195 oldwrite = extensions.wrapfunction(ui, 'write', colorwrap)
195 oldwrite = extensions.wrapfunction(ui, 'write', colorwrap)
196 try:
196 try:
197 orig(ui, repo, *pats, **opts)
197 orig(ui, repo, *pats, **opts)
198 finally:
198 finally:
199 ui.write = oldwrite
199 ui.write = oldwrite
200
200
201 _diff_prefixes = [('diff', 'diffline'),
201 _diff_prefixes = [('diff', 'diffline'),
202 ('copy', 'extended'),
202 ('copy', 'extended'),
203 ('rename', 'extended'),
203 ('rename', 'extended'),
204 ('old', 'extended'),
204 ('old', 'extended'),
205 ('new', 'extended'),
205 ('new', 'extended'),
206 ('deleted', 'extended'),
206 ('deleted', 'extended'),
207 ('---', 'file_a'),
207 ('---', 'file_a'),
208 ('+++', 'file_b'),
208 ('+++', 'file_b'),
209 ('@', 'hunk'),
209 ('@', 'hunk'),
210 ('-', 'deleted'),
210 ('-', 'deleted'),
211 ('+', 'inserted')]
211 ('+', 'inserted')]
212
212
213 _diff_effects = {'diffline': ['bold'],
213 _diff_effects = {'diffline': ['bold'],
214 'extended': ['cyan', 'bold'],
214 'extended': ['cyan', 'bold'],
215 'file_a': ['red', 'bold'],
215 'file_a': ['red', 'bold'],
216 'file_b': ['green', 'bold'],
216 'file_b': ['green', 'bold'],
217 'hunk': ['magenta'],
217 'hunk': ['magenta'],
218 'deleted': ['red'],
218 'deleted': ['red'],
219 'inserted': ['green'],
219 'inserted': ['green'],
220 'changed': ['white'],
220 'changed': ['white'],
221 'trailingwhitespace': ['bold', 'red_background']}
221 'trailingwhitespace': ['bold', 'red_background']}
222
222
223 def uisetup(ui):
223 def uisetup(ui):
224 '''Initialize the extension.'''
224 '''Initialize the extension.'''
225 _setupcmd(ui, 'diff', commands.table, colordiff, _diff_effects)
225 _setupcmd(ui, 'diff', commands.table, colordiff, _diff_effects)
226 _setupcmd(ui, 'incoming', commands.table, None, _diff_effects)
226 _setupcmd(ui, 'incoming', commands.table, None, _diff_effects)
227 _setupcmd(ui, 'log', commands.table, None, _diff_effects)
227 _setupcmd(ui, 'log', commands.table, None, _diff_effects)
228 _setupcmd(ui, 'outgoing', commands.table, None, _diff_effects)
228 _setupcmd(ui, 'outgoing', commands.table, None, _diff_effects)
229 _setupcmd(ui, 'tip', commands.table, None, _diff_effects)
229 _setupcmd(ui, 'tip', commands.table, None, _diff_effects)
230 _setupcmd(ui, 'status', commands.table, colorstatus, _status_effects)
230 _setupcmd(ui, 'status', commands.table, colorstatus, _status_effects)
231 try:
231 try:
232 mq = extensions.find('mq')
232 mq = extensions.find('mq')
233 _setupcmd(ui, 'qdiff', mq.cmdtable, colordiff, _diff_effects)
233 _setupcmd(ui, 'qdiff', mq.cmdtable, colordiff, _diff_effects)
234 _setupcmd(ui, 'qseries', mq.cmdtable, colorqseries, _patch_effects)
234 _setupcmd(ui, 'qseries', mq.cmdtable, colorqseries, _patch_effects)
235 except KeyError:
235 except KeyError:
236 # The mq extension is not enabled
236 # The mq extension is not enabled
237 pass
237 pass
238
238
239 def _setupcmd(ui, cmd, table, func, effectsmap):
239 def _setupcmd(ui, cmd, table, func, effectsmap):
240 '''patch in command to command table and load effect map'''
240 '''patch in command to command table and load effect map'''
241 def nocolor(orig, *args, **opts):
241 def nocolor(orig, *args, **opts):
242
242
243 if (opts['no_color'] or opts['color'] == 'never' or
243 if (opts['no_color'] or opts['color'] == 'never' or
244 (opts['color'] == 'auto' and (os.environ.get('TERM') == 'dumb'
244 (opts['color'] == 'auto' and (os.environ.get('TERM') == 'dumb'
245 or not sys.__stdout__.isatty()))):
245 or not sys.__stdout__.isatty()))):
246 return orig(*args, **opts)
246 return orig(*args, **opts)
247
247
248 oldshowpatch = extensions.wrapfunction(cmdutil.changeset_printer,
248 oldshowpatch = extensions.wrapfunction(cmdutil.changeset_printer,
249 'showpatch', colorshowpatch)
249 'showpatch', colorshowpatch)
250 try:
250 try:
251 if func is not None:
251 if func is not None:
252 return func(orig, *args, **opts)
252 return func(orig, *args, **opts)
253 return orig(*args, **opts)
253 return orig(*args, **opts)
254 finally:
254 finally:
255 cmdutil.changeset_printer.showpatch = oldshowpatch
255 cmdutil.changeset_printer.showpatch = oldshowpatch
256
256
257 entry = extensions.wrapcommand(table, cmd, nocolor)
257 entry = extensions.wrapcommand(table, cmd, nocolor)
258 entry[1].extend([
258 entry[1].extend([
259 ('', 'color', 'auto', _("when to colorize (always, auto, or never)")),
259 ('', 'color', 'auto', _("when to colorize (always, auto, or never)")),
260 ('', 'no-color', None, _("don't colorize output")),
260 ('', 'no-color', None, _("don't colorize output")),
261 ])
261 ])
262
262
263 for status in effectsmap:
263 for status in effectsmap:
264 effects = ui.configlist('color', cmd + '.' + status)
264 effects = ui.configlist('color', cmd + '.' + status)
265 if effects:
265 if effects:
266 effectsmap[status] = effects
266 effectsmap[status] = effects
@@ -1,288 +1,288 b''
1 # convert.py Foreign SCM converter
1 # convert.py Foreign SCM converter
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''converting foreign VCS repositories to Mercurial'''
8 '''import from foreign VCS repositories into Mercurial'''
9
9
10 import convcmd
10 import convcmd
11 import cvsps
11 import cvsps
12 import subversion
12 import subversion
13 from mercurial import commands
13 from mercurial import commands
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 # Commands definition was moved elsewhere to ease demandload job.
16 # Commands definition was moved elsewhere to ease demandload job.
17
17
18 def convert(ui, src, dest=None, revmapfile=None, **opts):
18 def convert(ui, src, dest=None, revmapfile=None, **opts):
19 """convert a foreign SCM repository to a Mercurial one.
19 """convert a foreign SCM repository to a Mercurial one.
20
20
21 Accepted source formats [identifiers]:
21 Accepted source formats [identifiers]:
22 - Mercurial [hg]
22 - Mercurial [hg]
23 - CVS [cvs]
23 - CVS [cvs]
24 - Darcs [darcs]
24 - Darcs [darcs]
25 - git [git]
25 - git [git]
26 - Subversion [svn]
26 - Subversion [svn]
27 - Monotone [mtn]
27 - Monotone [mtn]
28 - GNU Arch [gnuarch]
28 - GNU Arch [gnuarch]
29 - Bazaar [bzr]
29 - Bazaar [bzr]
30 - Perforce [p4]
30 - Perforce [p4]
31
31
32 Accepted destination formats [identifiers]:
32 Accepted destination formats [identifiers]:
33 - Mercurial [hg]
33 - Mercurial [hg]
34 - Subversion [svn] (history on branches is not preserved)
34 - Subversion [svn] (history on branches is not preserved)
35
35
36 If no revision is given, all revisions will be converted.
36 If no revision is given, all revisions will be converted.
37 Otherwise, convert will only import up to the named revision
37 Otherwise, convert will only import up to the named revision
38 (given in a format understood by the source).
38 (given in a format understood by the source).
39
39
40 If no destination directory name is specified, it defaults to the
40 If no destination directory name is specified, it defaults to the
41 basename of the source with '-hg' appended. If the destination
41 basename of the source with '-hg' appended. If the destination
42 repository doesn't exist, it will be created.
42 repository doesn't exist, it will be created.
43
43
44 By default, all sources except Mercurial will use
44 By default, all sources except Mercurial will use
45 --branchsort. Mercurial uses --sourcesort to preserve original
45 --branchsort. Mercurial uses --sourcesort to preserve original
46 revision numbers order. Sort modes have the following effects:
46 revision numbers order. Sort modes have the following effects:
47 --branchsort: convert from parent to child revision when
47 --branchsort: convert from parent to child revision when
48 possible, which means branches are usually converted one after
48 possible, which means branches are usually converted one after
49 the other. It generates more compact repositories.
49 the other. It generates more compact repositories.
50 --datesort: sort revisions by date. Converted repositories have
50 --datesort: sort revisions by date. Converted repositories have
51 good-looking changelogs but are often an order of magnitude
51 good-looking changelogs but are often an order of magnitude
52 larger than the same ones generated by --branchsort.
52 larger than the same ones generated by --branchsort.
53 --sourcesort: try to preserve source revisions order, only
53 --sourcesort: try to preserve source revisions order, only
54 supported by Mercurial sources.
54 supported by Mercurial sources.
55
55
56 If <REVMAP> isn't given, it will be put in a default location
56 If <REVMAP> isn't given, it will be put in a default location
57 (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file
57 (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file
58 that maps each source commit ID to the destination ID for that
58 that maps each source commit ID to the destination ID for that
59 revision, like so:
59 revision, like so:
60 <source ID> <destination ID>
60 <source ID> <destination ID>
61
61
62 If the file doesn't exist, it's automatically created. It's
62 If the file doesn't exist, it's automatically created. It's
63 updated on each commit copied, so convert-repo can be interrupted
63 updated on each commit copied, so convert-repo can be interrupted
64 and can be run repeatedly to copy new commits.
64 and can be run repeatedly to copy new commits.
65
65
66 The [username mapping] file is a simple text file that maps each
66 The [username mapping] file is a simple text file that maps each
67 source commit author to a destination commit author. It is handy
67 source commit author to a destination commit author. It is handy
68 for source SCMs that use unix logins to identify authors (eg:
68 for source SCMs that use unix logins to identify authors (eg:
69 CVS). One line per author mapping and the line format is:
69 CVS). One line per author mapping and the line format is:
70 srcauthor=whatever string you want
70 srcauthor=whatever string you want
71
71
72 The filemap is a file that allows filtering and remapping of files
72 The filemap is a file that allows filtering and remapping of files
73 and directories. Comment lines start with '#'. Each line can
73 and directories. Comment lines start with '#'. Each line can
74 contain one of the following directives:
74 contain one of the following directives:
75
75
76 include path/to/file
76 include path/to/file
77
77
78 exclude path/to/file
78 exclude path/to/file
79
79
80 rename from/file to/file
80 rename from/file to/file
81
81
82 The 'include' directive causes a file, or all files under a
82 The 'include' directive causes a file, or all files under a
83 directory, to be included in the destination repository, and the
83 directory, to be included in the destination repository, and the
84 exclusion of all other files and directories not explicitly included.
84 exclusion of all other files and directories not explicitly included.
85 The 'exclude' directive causes files or directories to be omitted.
85 The 'exclude' directive causes files or directories to be omitted.
86 The 'rename' directive renames a file or directory. To rename from
86 The 'rename' directive renames a file or directory. To rename from
87 a subdirectory into the root of the repository, use '.' as the
87 a subdirectory into the root of the repository, use '.' as the
88 path to rename to.
88 path to rename to.
89
89
90 The splicemap is a file that allows insertion of synthetic
90 The splicemap is a file that allows insertion of synthetic
91 history, letting you specify the parents of a revision. This is
91 history, letting you specify the parents of a revision. This is
92 useful if you want to e.g. give a Subversion merge two parents, or
92 useful if you want to e.g. give a Subversion merge two parents, or
93 graft two disconnected series of history together. Each entry
93 graft two disconnected series of history together. Each entry
94 contains a key, followed by a space, followed by one or two
94 contains a key, followed by a space, followed by one or two
95 comma-separated values. The key is the revision ID in the source
95 comma-separated values. The key is the revision ID in the source
96 revision control system whose parents should be modified (same
96 revision control system whose parents should be modified (same
97 format as a key in .hg/shamap). The values are the revision IDs
97 format as a key in .hg/shamap). The values are the revision IDs
98 (in either the source or destination revision control system) that
98 (in either the source or destination revision control system) that
99 should be used as the new parents for that node.
99 should be used as the new parents for that node.
100
100
101 The branchmap is a file that allows you to rename a branch when it is
101 The branchmap is a file that allows you to rename a branch when it is
102 being brought in from whatever external repository. When used in
102 being brought in from whatever external repository. When used in
103 conjunction with a splicemap, it allows for a powerful combination
103 conjunction with a splicemap, it allows for a powerful combination
104 to help fix even the most badly mismanaged repositories and turn them
104 to help fix even the most badly mismanaged repositories and turn them
105 into nicely structured Mercurial repositories. The branchmap contains
105 into nicely structured Mercurial repositories. The branchmap contains
106 lines of the form "original_branch_name new_branch_name".
106 lines of the form "original_branch_name new_branch_name".
107 "original_branch_name" is the name of the branch in the source
107 "original_branch_name" is the name of the branch in the source
108 repository, and "new_branch_name" is the name of the branch is the
108 repository, and "new_branch_name" is the name of the branch is the
109 destination repository. This can be used to (for instance) move code
109 destination repository. This can be used to (for instance) move code
110 in one repository from "default" to a named branch.
110 in one repository from "default" to a named branch.
111
111
112 Mercurial Source
112 Mercurial Source
113 -----------------
113 -----------------
114
114
115 --config convert.hg.ignoreerrors=False (boolean)
115 --config convert.hg.ignoreerrors=False (boolean)
116 ignore integrity errors when reading. Use it to fix Mercurial
116 ignore integrity errors when reading. Use it to fix Mercurial
117 repositories with missing revlogs, by converting from and to
117 repositories with missing revlogs, by converting from and to
118 Mercurial.
118 Mercurial.
119 --config convert.hg.saverev=False (boolean)
119 --config convert.hg.saverev=False (boolean)
120 store original revision ID in changeset (forces target IDs to
120 store original revision ID in changeset (forces target IDs to
121 change)
121 change)
122 --config convert.hg.startrev=0 (hg revision identifier)
122 --config convert.hg.startrev=0 (hg revision identifier)
123 convert start revision and its descendants
123 convert start revision and its descendants
124
124
125 CVS Source
125 CVS Source
126 ----------
126 ----------
127
127
128 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
128 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
129 to indicate the starting point of what will be converted. Direct
129 to indicate the starting point of what will be converted. Direct
130 access to the repository files is not needed, unless of course the
130 access to the repository files is not needed, unless of course the
131 repository is :local:. The conversion uses the top level directory
131 repository is :local:. The conversion uses the top level directory
132 in the sandbox to find the CVS repository, and then uses CVS rlog
132 in the sandbox to find the CVS repository, and then uses CVS rlog
133 commands to find files to convert. This means that unless a
133 commands to find files to convert. This means that unless a
134 filemap is given, all files under the starting directory will be
134 filemap is given, all files under the starting directory will be
135 converted, and that any directory reorganization in the CVS
135 converted, and that any directory reorganization in the CVS
136 sandbox is ignored.
136 sandbox is ignored.
137
137
138 Because CVS does not have changesets, it is necessary to collect
138 Because CVS does not have changesets, it is necessary to collect
139 individual commits to CVS and merge them into changesets. CVS
139 individual commits to CVS and merge them into changesets. CVS
140 source uses its internal changeset merging code by default but can
140 source uses its internal changeset merging code by default but can
141 be configured to call the external 'cvsps' program by setting:
141 be configured to call the external 'cvsps' program by setting:
142 --config convert.cvsps='cvsps -A -u --cvs-direct -q'
142 --config convert.cvsps='cvsps -A -u --cvs-direct -q'
143 This option is deprecated and will be removed in Mercurial 1.4.
143 This option is deprecated and will be removed in Mercurial 1.4.
144
144
145 The options shown are the defaults.
145 The options shown are the defaults.
146
146
147 Internal cvsps is selected by setting
147 Internal cvsps is selected by setting
148 --config convert.cvsps=builtin
148 --config convert.cvsps=builtin
149 and has a few more configurable options:
149 and has a few more configurable options:
150 --config convert.cvsps.cache=True (boolean)
150 --config convert.cvsps.cache=True (boolean)
151 Set to False to disable remote log caching, for testing and
151 Set to False to disable remote log caching, for testing and
152 debugging purposes.
152 debugging purposes.
153 --config convert.cvsps.fuzz=60 (integer)
153 --config convert.cvsps.fuzz=60 (integer)
154 Specify the maximum time (in seconds) that is allowed
154 Specify the maximum time (in seconds) that is allowed
155 between commits with identical user and log message in a
155 between commits with identical user and log message in a
156 single changeset. When very large files were checked in as
156 single changeset. When very large files were checked in as
157 part of a changeset then the default may not be long
157 part of a changeset then the default may not be long
158 enough.
158 enough.
159 --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}'
159 --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}'
160 Specify a regular expression to which commit log messages
160 Specify a regular expression to which commit log messages
161 are matched. If a match occurs, then the conversion
161 are matched. If a match occurs, then the conversion
162 process will insert a dummy revision merging the branch on
162 process will insert a dummy revision merging the branch on
163 which this log message occurs to the branch indicated in
163 which this log message occurs to the branch indicated in
164 the regex.
164 the regex.
165 --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}'
165 --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}'
166 Specify a regular expression to which commit log messages
166 Specify a regular expression to which commit log messages
167 are matched. If a match occurs, then the conversion
167 are matched. If a match occurs, then the conversion
168 process will add the most recent revision on the branch
168 process will add the most recent revision on the branch
169 indicated in the regex as the second parent of the
169 indicated in the regex as the second parent of the
170 changeset.
170 changeset.
171
171
172 The hgext/convert/cvsps wrapper script allows the builtin
172 The hgext/convert/cvsps wrapper script allows the builtin
173 changeset merging code to be run without doing a conversion. Its
173 changeset merging code to be run without doing a conversion. Its
174 parameters and output are similar to that of cvsps 2.1.
174 parameters and output are similar to that of cvsps 2.1.
175
175
176 Subversion Source
176 Subversion Source
177 -----------------
177 -----------------
178
178
179 Subversion source detects classical trunk/branches/tags layouts.
179 Subversion source detects classical trunk/branches/tags layouts.
180 By default, the supplied "svn://repo/path/" source URL is
180 By default, the supplied "svn://repo/path/" source URL is
181 converted as a single branch. If "svn://repo/path/trunk" exists it
181 converted as a single branch. If "svn://repo/path/trunk" exists it
182 replaces the default branch. If "svn://repo/path/branches" exists,
182 replaces the default branch. If "svn://repo/path/branches" exists,
183 its subdirectories are listed as possible branches. If
183 its subdirectories are listed as possible branches. If
184 "svn://repo/path/tags" exists, it is looked for tags referencing
184 "svn://repo/path/tags" exists, it is looked for tags referencing
185 converted branches. Default "trunk", "branches" and "tags" values
185 converted branches. Default "trunk", "branches" and "tags" values
186 can be overridden with following options. Set them to paths
186 can be overridden with following options. Set them to paths
187 relative to the source URL, or leave them blank to disable auto
187 relative to the source URL, or leave them blank to disable auto
188 detection.
188 detection.
189
189
190 --config convert.svn.branches=branches (directory name)
190 --config convert.svn.branches=branches (directory name)
191 specify the directory containing branches
191 specify the directory containing branches
192 --config convert.svn.tags=tags (directory name)
192 --config convert.svn.tags=tags (directory name)
193 specify the directory containing tags
193 specify the directory containing tags
194 --config convert.svn.trunk=trunk (directory name)
194 --config convert.svn.trunk=trunk (directory name)
195 specify the name of the trunk branch
195 specify the name of the trunk branch
196
196
197 Source history can be retrieved starting at a specific revision,
197 Source history can be retrieved starting at a specific revision,
198 instead of being integrally converted. Only single branch
198 instead of being integrally converted. Only single branch
199 conversions are supported.
199 conversions are supported.
200
200
201 --config convert.svn.startrev=0 (svn revision number)
201 --config convert.svn.startrev=0 (svn revision number)
202 specify start Subversion revision.
202 specify start Subversion revision.
203
203
204 Perforce Source
204 Perforce Source
205 ---------------
205 ---------------
206
206
207 The Perforce (P4) importer can be given a p4 depot path or a
207 The Perforce (P4) importer can be given a p4 depot path or a
208 client specification as source. It will convert all files in the
208 client specification as source. It will convert all files in the
209 source to a flat Mercurial repository, ignoring labels, branches
209 source to a flat Mercurial repository, ignoring labels, branches
210 and integrations. Note that when a depot path is given you then
210 and integrations. Note that when a depot path is given you then
211 usually should specify a target directory, because otherwise the
211 usually should specify a target directory, because otherwise the
212 target may be named ...-hg.
212 target may be named ...-hg.
213
213
214 It is possible to limit the amount of source history to be
214 It is possible to limit the amount of source history to be
215 converted by specifying an initial Perforce revision.
215 converted by specifying an initial Perforce revision.
216
216
217 --config convert.p4.startrev=0 (perforce changelist number)
217 --config convert.p4.startrev=0 (perforce changelist number)
218 specify initial Perforce revision.
218 specify initial Perforce revision.
219
219
220
220
221 Mercurial Destination
221 Mercurial Destination
222 ---------------------
222 ---------------------
223
223
224 --config convert.hg.clonebranches=False (boolean)
224 --config convert.hg.clonebranches=False (boolean)
225 dispatch source branches in separate clones.
225 dispatch source branches in separate clones.
226 --config convert.hg.tagsbranch=default (branch name)
226 --config convert.hg.tagsbranch=default (branch name)
227 tag revisions branch name
227 tag revisions branch name
228 --config convert.hg.usebranchnames=True (boolean)
228 --config convert.hg.usebranchnames=True (boolean)
229 preserve branch names
229 preserve branch names
230
230
231 """
231 """
232 return convcmd.convert(ui, src, dest, revmapfile, **opts)
232 return convcmd.convert(ui, src, dest, revmapfile, **opts)
233
233
234 def debugsvnlog(ui, **opts):
234 def debugsvnlog(ui, **opts):
235 return subversion.debugsvnlog(ui, **opts)
235 return subversion.debugsvnlog(ui, **opts)
236
236
237 def debugcvsps(ui, *args, **opts):
237 def debugcvsps(ui, *args, **opts):
238 '''create changeset information from CVS
238 '''create changeset information from CVS
239
239
240 This command is intended as a debugging tool for the CVS to
240 This command is intended as a debugging tool for the CVS to
241 Mercurial converter, and can be used as a direct replacement for
241 Mercurial converter, and can be used as a direct replacement for
242 cvsps.
242 cvsps.
243
243
244 Hg debugcvsps reads the CVS rlog for current directory (or any
244 Hg debugcvsps reads the CVS rlog for current directory (or any
245 named directory) in the CVS repository, and converts the log to a
245 named directory) in the CVS repository, and converts the log to a
246 series of changesets based on matching commit log entries and
246 series of changesets based on matching commit log entries and
247 dates.'''
247 dates.'''
248 return cvsps.debugcvsps(ui, *args, **opts)
248 return cvsps.debugcvsps(ui, *args, **opts)
249
249
250 commands.norepo += " convert debugsvnlog debugcvsps"
250 commands.norepo += " convert debugsvnlog debugcvsps"
251
251
252 cmdtable = {
252 cmdtable = {
253 "convert":
253 "convert":
254 (convert,
254 (convert,
255 [('A', 'authors', '', _('username mapping filename')),
255 [('A', 'authors', '', _('username mapping filename')),
256 ('d', 'dest-type', '', _('destination repository type')),
256 ('d', 'dest-type', '', _('destination repository type')),
257 ('', 'filemap', '', _('remap file names using contents of file')),
257 ('', 'filemap', '', _('remap file names using contents of file')),
258 ('r', 'rev', '', _('import up to target revision REV')),
258 ('r', 'rev', '', _('import up to target revision REV')),
259 ('s', 'source-type', '', _('source repository type')),
259 ('s', 'source-type', '', _('source repository type')),
260 ('', 'splicemap', '', _('splice synthesized history into place')),
260 ('', 'splicemap', '', _('splice synthesized history into place')),
261 ('', 'branchmap', '', _('change branch names while converting')),
261 ('', 'branchmap', '', _('change branch names while converting')),
262 ('', 'branchsort', None, _('try to sort changesets by branches')),
262 ('', 'branchsort', None, _('try to sort changesets by branches')),
263 ('', 'datesort', None, _('try to sort changesets by date')),
263 ('', 'datesort', None, _('try to sort changesets by date')),
264 ('', 'sourcesort', None, _('preserve source changesets order'))],
264 ('', 'sourcesort', None, _('preserve source changesets order'))],
265 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]')),
265 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]')),
266 "debugsvnlog":
266 "debugsvnlog":
267 (debugsvnlog,
267 (debugsvnlog,
268 [],
268 [],
269 'hg debugsvnlog'),
269 'hg debugsvnlog'),
270 "debugcvsps":
270 "debugcvsps":
271 (debugcvsps,
271 (debugcvsps,
272 [
272 [
273 # Main options shared with cvsps-2.1
273 # Main options shared with cvsps-2.1
274 ('b', 'branches', [], _('only return changes on specified branches')),
274 ('b', 'branches', [], _('only return changes on specified branches')),
275 ('p', 'prefix', '', _('prefix to remove from file names')),
275 ('p', 'prefix', '', _('prefix to remove from file names')),
276 ('r', 'revisions', [], _('only return changes after or between specified tags')),
276 ('r', 'revisions', [], _('only return changes after or between specified tags')),
277 ('u', 'update-cache', None, _("update cvs log cache")),
277 ('u', 'update-cache', None, _("update cvs log cache")),
278 ('x', 'new-cache', None, _("create new cvs log cache")),
278 ('x', 'new-cache', None, _("create new cvs log cache")),
279 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
279 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
280 ('', 'root', '', _('specify cvsroot')),
280 ('', 'root', '', _('specify cvsroot')),
281 # Options specific to builtin cvsps
281 # Options specific to builtin cvsps
282 ('', 'parents', '', _('show parent changesets')),
282 ('', 'parents', '', _('show parent changesets')),
283 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
283 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
284 # Options that are ignored for compatibility with cvsps-2.1
284 # Options that are ignored for compatibility with cvsps-2.1
285 ('A', 'cvs-direct', None, _('ignored for compatibility')),
285 ('A', 'cvs-direct', None, _('ignored for compatibility')),
286 ],
286 ],
287 _('hg debugcvsps [OPTION]... [PATH]...')),
287 _('hg debugcvsps [OPTION]... [PATH]...')),
288 }
288 }
@@ -1,384 +1,389 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import base64, errno
8 import base64, errno
9 import os
9 import os
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 def encodeargs(args):
14 def encodeargs(args):
15 def encodearg(s):
15 def encodearg(s):
16 lines = base64.encodestring(s)
16 lines = base64.encodestring(s)
17 lines = [l.splitlines()[0] for l in lines]
17 lines = [l.splitlines()[0] for l in lines]
18 return ''.join(lines)
18 return ''.join(lines)
19
19
20 s = pickle.dumps(args)
20 s = pickle.dumps(args)
21 return encodearg(s)
21 return encodearg(s)
22
22
23 def decodeargs(s):
23 def decodeargs(s):
24 s = base64.decodestring(s)
24 s = base64.decodestring(s)
25 return pickle.loads(s)
25 return pickle.loads(s)
26
26
27 class MissingTool(Exception): pass
27 class MissingTool(Exception): pass
28
28
29 def checktool(exe, name=None, abort=True):
29 def checktool(exe, name=None, abort=True):
30 name = name or exe
30 name = name or exe
31 if not util.find_exe(exe):
31 if not util.find_exe(exe):
32 exc = abort and util.Abort or MissingTool
32 exc = abort and util.Abort or MissingTool
33 raise exc(_('cannot find required "%s" tool') % name)
33 raise exc(_('cannot find required "%s" tool') % name)
34
34
35 class NoRepo(Exception): pass
35 class NoRepo(Exception): pass
36
36
37 SKIPREV = 'SKIP'
37 SKIPREV = 'SKIP'
38
38
39 class commit(object):
39 class commit(object):
40 def __init__(self, author, date, desc, parents, branch=None, rev=None,
40 def __init__(self, author, date, desc, parents, branch=None, rev=None,
41 extra={}, sortkey=None):
41 extra={}, sortkey=None):
42 self.author = author or 'unknown'
42 self.author = author or 'unknown'
43 self.date = date or '0 0'
43 self.date = date or '0 0'
44 self.desc = desc
44 self.desc = desc
45 self.parents = parents
45 self.parents = parents
46 self.branch = branch
46 self.branch = branch
47 self.rev = rev
47 self.rev = rev
48 self.extra = extra
48 self.extra = extra
49 self.sortkey = sortkey
49 self.sortkey = sortkey
50
50
51 class converter_source(object):
51 class converter_source(object):
52 """Conversion source interface"""
52 """Conversion source interface"""
53
53
54 def __init__(self, ui, path=None, rev=None):
54 def __init__(self, ui, path=None, rev=None):
55 """Initialize conversion source (or raise NoRepo("message")
55 """Initialize conversion source (or raise NoRepo("message")
56 exception if path is not a valid repository)"""
56 exception if path is not a valid repository)"""
57 self.ui = ui
57 self.ui = ui
58 self.path = path
58 self.path = path
59 self.rev = rev
59 self.rev = rev
60
60
61 self.encoding = 'utf-8'
61 self.encoding = 'utf-8'
62
62
63 def before(self):
63 def before(self):
64 pass
64 pass
65
65
66 def after(self):
66 def after(self):
67 pass
67 pass
68
68
69 def setrevmap(self, revmap):
69 def setrevmap(self, revmap):
70 """set the map of already-converted revisions"""
70 """set the map of already-converted revisions"""
71 pass
71 pass
72
72
73 def getheads(self):
73 def getheads(self):
74 """Return a list of this repository's heads"""
74 """Return a list of this repository's heads"""
75 raise NotImplementedError()
75 raise NotImplementedError()
76
76
77 def getfile(self, name, rev):
77 def getfile(self, name, rev):
78 """Return file contents as a string. rev is the identifier returned
78 """Return file contents as a string. rev is the identifier returned
79 by a previous call to getchanges(). Raise IOError to indicate that
79 by a previous call to getchanges(). Raise IOError to indicate that
80 name was deleted in rev.
80 name was deleted in rev.
81 """
81 """
82 raise NotImplementedError()
82 raise NotImplementedError()
83
83
84 def getmode(self, name, rev):
84 def getmode(self, name, rev):
85 """Return file mode, eg. '', 'x', or 'l'. rev is the identifier
85 """Return file mode, eg. '', 'x', or 'l'. rev is the identifier
86 returned by a previous call to getchanges().
86 returned by a previous call to getchanges().
87 """
87 """
88 raise NotImplementedError()
88 raise NotImplementedError()
89
89
90 def getchanges(self, version):
90 def getchanges(self, version):
91 """Returns a tuple of (files, copies).
91 """Returns a tuple of (files, copies).
92
92
93 files is a sorted list of (filename, id) tuples for all files
93 files is a sorted list of (filename, id) tuples for all files
94 changed between version and its first parent returned by
94 changed between version and its first parent returned by
95 getcommit(). id is the source revision id of the file.
95 getcommit(). id is the source revision id of the file.
96
96
97 copies is a dictionary of dest: source
97 copies is a dictionary of dest: source
98 """
98 """
99 raise NotImplementedError()
99 raise NotImplementedError()
100
100
101 def getcommit(self, version):
101 def getcommit(self, version):
102 """Return the commit object for version"""
102 """Return the commit object for version"""
103 raise NotImplementedError()
103 raise NotImplementedError()
104
104
105 def gettags(self):
105 def gettags(self):
106 """Return the tags as a dictionary of name: revision"""
106 """Return the tags as a dictionary of name: revision
107
108 Tag names must be UTF-8 strings.
109 """
107 raise NotImplementedError()
110 raise NotImplementedError()
108
111
109 def recode(self, s, encoding=None):
112 def recode(self, s, encoding=None):
110 if not encoding:
113 if not encoding:
111 encoding = self.encoding or 'utf-8'
114 encoding = self.encoding or 'utf-8'
112
115
113 if isinstance(s, unicode):
116 if isinstance(s, unicode):
114 return s.encode("utf-8")
117 return s.encode("utf-8")
115 try:
118 try:
116 return s.decode(encoding).encode("utf-8")
119 return s.decode(encoding).encode("utf-8")
117 except:
120 except:
118 try:
121 try:
119 return s.decode("latin-1").encode("utf-8")
122 return s.decode("latin-1").encode("utf-8")
120 except:
123 except:
121 return s.decode(encoding, "replace").encode("utf-8")
124 return s.decode(encoding, "replace").encode("utf-8")
122
125
123 def getchangedfiles(self, rev, i):
126 def getchangedfiles(self, rev, i):
124 """Return the files changed by rev compared to parent[i].
127 """Return the files changed by rev compared to parent[i].
125
128
126 i is an index selecting one of the parents of rev. The return
129 i is an index selecting one of the parents of rev. The return
127 value should be the list of files that are different in rev and
130 value should be the list of files that are different in rev and
128 this parent.
131 this parent.
129
132
130 If rev has no parents, i is None.
133 If rev has no parents, i is None.
131
134
132 This function is only needed to support --filemap
135 This function is only needed to support --filemap
133 """
136 """
134 raise NotImplementedError()
137 raise NotImplementedError()
135
138
136 def converted(self, rev, sinkrev):
139 def converted(self, rev, sinkrev):
137 '''Notify the source that a revision has been converted.'''
140 '''Notify the source that a revision has been converted.'''
138 pass
141 pass
139
142
140 def hasnativeorder(self):
143 def hasnativeorder(self):
141 """Return true if this source has a meaningful, native revision
144 """Return true if this source has a meaningful, native revision
142 order. For instance, Mercurial revisions are store sequentially
145 order. For instance, Mercurial revisions are store sequentially
143 while there is no such global ordering with Darcs.
146 while there is no such global ordering with Darcs.
144 """
147 """
145 return False
148 return False
146
149
147 def lookuprev(self, rev):
150 def lookuprev(self, rev):
148 """If rev is a meaningful revision reference in source, return
151 """If rev is a meaningful revision reference in source, return
149 the referenced identifier in the same format used by getcommit().
152 the referenced identifier in the same format used by getcommit().
150 return None otherwise.
153 return None otherwise.
151 """
154 """
152 return None
155 return None
153
156
154 class converter_sink(object):
157 class converter_sink(object):
155 """Conversion sink (target) interface"""
158 """Conversion sink (target) interface"""
156
159
157 def __init__(self, ui, path):
160 def __init__(self, ui, path):
158 """Initialize conversion sink (or raise NoRepo("message")
161 """Initialize conversion sink (or raise NoRepo("message")
159 exception if path is not a valid repository)
162 exception if path is not a valid repository)
160
163
161 created is a list of paths to remove if a fatal error occurs
164 created is a list of paths to remove if a fatal error occurs
162 later"""
165 later"""
163 self.ui = ui
166 self.ui = ui
164 self.path = path
167 self.path = path
165 self.created = []
168 self.created = []
166
169
167 def getheads(self):
170 def getheads(self):
168 """Return a list of this repository's heads"""
171 """Return a list of this repository's heads"""
169 raise NotImplementedError()
172 raise NotImplementedError()
170
173
171 def revmapfile(self):
174 def revmapfile(self):
172 """Path to a file that will contain lines
175 """Path to a file that will contain lines
173 source_rev_id sink_rev_id
176 source_rev_id sink_rev_id
174 mapping equivalent revision identifiers for each system."""
177 mapping equivalent revision identifiers for each system."""
175 raise NotImplementedError()
178 raise NotImplementedError()
176
179
177 def authorfile(self):
180 def authorfile(self):
178 """Path to a file that will contain lines
181 """Path to a file that will contain lines
179 srcauthor=dstauthor
182 srcauthor=dstauthor
180 mapping equivalent authors identifiers for each system."""
183 mapping equivalent authors identifiers for each system."""
181 return None
184 return None
182
185
183 def putcommit(self, files, copies, parents, commit, source, revmap):
186 def putcommit(self, files, copies, parents, commit, source, revmap):
184 """Create a revision with all changed files listed in 'files'
187 """Create a revision with all changed files listed in 'files'
185 and having listed parents. 'commit' is a commit object
188 and having listed parents. 'commit' is a commit object
186 containing at a minimum the author, date, and message for this
189 containing at a minimum the author, date, and message for this
187 changeset. 'files' is a list of (path, version) tuples,
190 changeset. 'files' is a list of (path, version) tuples,
188 'copies' is a dictionary mapping destinations to sources,
191 'copies' is a dictionary mapping destinations to sources,
189 'source' is the source repository, and 'revmap' is a mapfile
192 'source' is the source repository, and 'revmap' is a mapfile
190 of source revisions to converted revisions. Only getfile(),
193 of source revisions to converted revisions. Only getfile(),
191 getmode(), and lookuprev() should be called on 'source'.
194 getmode(), and lookuprev() should be called on 'source'.
192
195
193 Note that the sink repository is not told to update itself to
196 Note that the sink repository is not told to update itself to
194 a particular revision (or even what that revision would be)
197 a particular revision (or even what that revision would be)
195 before it receives the file data.
198 before it receives the file data.
196 """
199 """
197 raise NotImplementedError()
200 raise NotImplementedError()
198
201
199 def puttags(self, tags):
202 def puttags(self, tags):
200 """Put tags into sink.
203 """Put tags into sink.
201 tags: {tagname: sink_rev_id, ...}"""
204
205 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
206 """
202 raise NotImplementedError()
207 raise NotImplementedError()
203
208
204 def setbranch(self, branch, pbranches):
209 def setbranch(self, branch, pbranches):
205 """Set the current branch name. Called before the first putcommit
210 """Set the current branch name. Called before the first putcommit
206 on the branch.
211 on the branch.
207 branch: branch name for subsequent commits
212 branch: branch name for subsequent commits
208 pbranches: (converted parent revision, parent branch) tuples"""
213 pbranches: (converted parent revision, parent branch) tuples"""
209 pass
214 pass
210
215
211 def setfilemapmode(self, active):
216 def setfilemapmode(self, active):
212 """Tell the destination that we're using a filemap
217 """Tell the destination that we're using a filemap
213
218
214 Some converter_sources (svn in particular) can claim that a file
219 Some converter_sources (svn in particular) can claim that a file
215 was changed in a revision, even if there was no change. This method
220 was changed in a revision, even if there was no change. This method
216 tells the destination that we're using a filemap and that it should
221 tells the destination that we're using a filemap and that it should
217 filter empty revisions.
222 filter empty revisions.
218 """
223 """
219 pass
224 pass
220
225
221 def before(self):
226 def before(self):
222 pass
227 pass
223
228
224 def after(self):
229 def after(self):
225 pass
230 pass
226
231
227
232
228 class commandline(object):
233 class commandline(object):
229 def __init__(self, ui, command):
234 def __init__(self, ui, command):
230 self.ui = ui
235 self.ui = ui
231 self.command = command
236 self.command = command
232
237
233 def prerun(self):
238 def prerun(self):
234 pass
239 pass
235
240
236 def postrun(self):
241 def postrun(self):
237 pass
242 pass
238
243
239 def _cmdline(self, cmd, *args, **kwargs):
244 def _cmdline(self, cmd, *args, **kwargs):
240 cmdline = [self.command, cmd] + list(args)
245 cmdline = [self.command, cmd] + list(args)
241 for k, v in kwargs.iteritems():
246 for k, v in kwargs.iteritems():
242 if len(k) == 1:
247 if len(k) == 1:
243 cmdline.append('-' + k)
248 cmdline.append('-' + k)
244 else:
249 else:
245 cmdline.append('--' + k.replace('_', '-'))
250 cmdline.append('--' + k.replace('_', '-'))
246 try:
251 try:
247 if len(k) == 1:
252 if len(k) == 1:
248 cmdline.append('' + v)
253 cmdline.append('' + v)
249 else:
254 else:
250 cmdline[-1] += '=' + v
255 cmdline[-1] += '=' + v
251 except TypeError:
256 except TypeError:
252 pass
257 pass
253 cmdline = [util.shellquote(arg) for arg in cmdline]
258 cmdline = [util.shellquote(arg) for arg in cmdline]
254 if not self.ui.debugflag:
259 if not self.ui.debugflag:
255 cmdline += ['2>', util.nulldev]
260 cmdline += ['2>', util.nulldev]
256 cmdline += ['<', util.nulldev]
261 cmdline += ['<', util.nulldev]
257 cmdline = ' '.join(cmdline)
262 cmdline = ' '.join(cmdline)
258 return cmdline
263 return cmdline
259
264
260 def _run(self, cmd, *args, **kwargs):
265 def _run(self, cmd, *args, **kwargs):
261 cmdline = self._cmdline(cmd, *args, **kwargs)
266 cmdline = self._cmdline(cmd, *args, **kwargs)
262 self.ui.debug(_('running: %s\n') % (cmdline,))
267 self.ui.debug(_('running: %s\n') % (cmdline,))
263 self.prerun()
268 self.prerun()
264 try:
269 try:
265 return util.popen(cmdline)
270 return util.popen(cmdline)
266 finally:
271 finally:
267 self.postrun()
272 self.postrun()
268
273
269 def run(self, cmd, *args, **kwargs):
274 def run(self, cmd, *args, **kwargs):
270 fp = self._run(cmd, *args, **kwargs)
275 fp = self._run(cmd, *args, **kwargs)
271 output = fp.read()
276 output = fp.read()
272 self.ui.debug(output)
277 self.ui.debug(output)
273 return output, fp.close()
278 return output, fp.close()
274
279
275 def runlines(self, cmd, *args, **kwargs):
280 def runlines(self, cmd, *args, **kwargs):
276 fp = self._run(cmd, *args, **kwargs)
281 fp = self._run(cmd, *args, **kwargs)
277 output = fp.readlines()
282 output = fp.readlines()
278 self.ui.debug(''.join(output))
283 self.ui.debug(''.join(output))
279 return output, fp.close()
284 return output, fp.close()
280
285
281 def checkexit(self, status, output=''):
286 def checkexit(self, status, output=''):
282 if status:
287 if status:
283 if output:
288 if output:
284 self.ui.warn(_('%s error:\n') % self.command)
289 self.ui.warn(_('%s error:\n') % self.command)
285 self.ui.warn(output)
290 self.ui.warn(output)
286 msg = util.explain_exit(status)[0]
291 msg = util.explain_exit(status)[0]
287 raise util.Abort(_('%s %s') % (self.command, msg))
292 raise util.Abort(_('%s %s') % (self.command, msg))
288
293
289 def run0(self, cmd, *args, **kwargs):
294 def run0(self, cmd, *args, **kwargs):
290 output, status = self.run(cmd, *args, **kwargs)
295 output, status = self.run(cmd, *args, **kwargs)
291 self.checkexit(status, output)
296 self.checkexit(status, output)
292 return output
297 return output
293
298
294 def runlines0(self, cmd, *args, **kwargs):
299 def runlines0(self, cmd, *args, **kwargs):
295 output, status = self.runlines(cmd, *args, **kwargs)
300 output, status = self.runlines(cmd, *args, **kwargs)
296 self.checkexit(status, ''.join(output))
301 self.checkexit(status, ''.join(output))
297 return output
302 return output
298
303
299 def getargmax(self):
304 def getargmax(self):
300 if '_argmax' in self.__dict__:
305 if '_argmax' in self.__dict__:
301 return self._argmax
306 return self._argmax
302
307
303 # POSIX requires at least 4096 bytes for ARG_MAX
308 # POSIX requires at least 4096 bytes for ARG_MAX
304 self._argmax = 4096
309 self._argmax = 4096
305 try:
310 try:
306 self._argmax = os.sysconf("SC_ARG_MAX")
311 self._argmax = os.sysconf("SC_ARG_MAX")
307 except:
312 except:
308 pass
313 pass
309
314
310 # Windows shells impose their own limits on command line length,
315 # Windows shells impose their own limits on command line length,
311 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
316 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
312 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
317 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
313 # details about cmd.exe limitations.
318 # details about cmd.exe limitations.
314
319
315 # Since ARG_MAX is for command line _and_ environment, lower our limit
320 # Since ARG_MAX is for command line _and_ environment, lower our limit
316 # (and make happy Windows shells while doing this).
321 # (and make happy Windows shells while doing this).
317
322
318 self._argmax = self._argmax/2 - 1
323 self._argmax = self._argmax/2 - 1
319 return self._argmax
324 return self._argmax
320
325
321 def limit_arglist(self, arglist, cmd, *args, **kwargs):
326 def limit_arglist(self, arglist, cmd, *args, **kwargs):
322 limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
327 limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
323 bytes = 0
328 bytes = 0
324 fl = []
329 fl = []
325 for fn in arglist:
330 for fn in arglist:
326 b = len(fn) + 3
331 b = len(fn) + 3
327 if bytes + b < limit or len(fl) == 0:
332 if bytes + b < limit or len(fl) == 0:
328 fl.append(fn)
333 fl.append(fn)
329 bytes += b
334 bytes += b
330 else:
335 else:
331 yield fl
336 yield fl
332 fl = [fn]
337 fl = [fn]
333 bytes = b
338 bytes = b
334 if fl:
339 if fl:
335 yield fl
340 yield fl
336
341
337 def xargs(self, arglist, cmd, *args, **kwargs):
342 def xargs(self, arglist, cmd, *args, **kwargs):
338 for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
343 for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
339 self.run0(cmd, *(list(args) + l), **kwargs)
344 self.run0(cmd, *(list(args) + l), **kwargs)
340
345
341 class mapfile(dict):
346 class mapfile(dict):
342 def __init__(self, ui, path):
347 def __init__(self, ui, path):
343 super(mapfile, self).__init__()
348 super(mapfile, self).__init__()
344 self.ui = ui
349 self.ui = ui
345 self.path = path
350 self.path = path
346 self.fp = None
351 self.fp = None
347 self.order = []
352 self.order = []
348 self._read()
353 self._read()
349
354
350 def _read(self):
355 def _read(self):
351 if not self.path:
356 if not self.path:
352 return
357 return
353 try:
358 try:
354 fp = open(self.path, 'r')
359 fp = open(self.path, 'r')
355 except IOError, err:
360 except IOError, err:
356 if err.errno != errno.ENOENT:
361 if err.errno != errno.ENOENT:
357 raise
362 raise
358 return
363 return
359 for i, line in enumerate(fp):
364 for i, line in enumerate(fp):
360 try:
365 try:
361 key, value = line[:-1].rsplit(' ', 1)
366 key, value = line[:-1].rsplit(' ', 1)
362 except ValueError:
367 except ValueError:
363 raise util.Abort(_('syntax error in %s(%d): key/value pair expected')
368 raise util.Abort(_('syntax error in %s(%d): key/value pair expected')
364 % (self.path, i+1))
369 % (self.path, i+1))
365 if key not in self:
370 if key not in self:
366 self.order.append(key)
371 self.order.append(key)
367 super(mapfile, self).__setitem__(key, value)
372 super(mapfile, self).__setitem__(key, value)
368 fp.close()
373 fp.close()
369
374
370 def __setitem__(self, key, value):
375 def __setitem__(self, key, value):
371 if self.fp is None:
376 if self.fp is None:
372 try:
377 try:
373 self.fp = open(self.path, 'a')
378 self.fp = open(self.path, 'a')
374 except IOError, err:
379 except IOError, err:
375 raise util.Abort(_('could not open map file %r: %s') %
380 raise util.Abort(_('could not open map file %r: %s') %
376 (self.path, err.strerror))
381 (self.path, err.strerror))
377 self.fp.write('%s %s\n' % (key, value))
382 self.fp.write('%s %s\n' % (key, value))
378 self.fp.flush()
383 self.fp.flush()
379 super(mapfile, self).__setitem__(key, value)
384 super(mapfile, self).__setitem__(key, value)
380
385
381 def close(self):
386 def close(self):
382 if self.fp:
387 if self.fp:
383 self.fp.close()
388 self.fp.close()
384 self.fp = None
389 self.fp = None
@@ -1,836 +1,838 b''
1 #
1 #
2 # Mercurial built-in replacement for cvsps.
2 # Mercurial built-in replacement for cvsps.
3 #
3 #
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import os
9 import os
10 import re
10 import re
11 import cPickle as pickle
11 import cPickle as pickle
12 from mercurial import util
12 from mercurial import util
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 def listsort(list, key):
15 def listsort(list, key):
16 "helper to sort by key in Python 2.3"
16 "helper to sort by key in Python 2.3"
17 try:
17 try:
18 list.sort(key=key)
18 list.sort(key=key)
19 except TypeError:
19 except TypeError:
20 list.sort(lambda l, r: cmp(key(l), key(r)))
20 list.sort(lambda l, r: cmp(key(l), key(r)))
21
21
22 class logentry(object):
22 class logentry(object):
23 '''Class logentry has the following attributes:
23 '''Class logentry has the following attributes:
24 .author - author name as CVS knows it
24 .author - author name as CVS knows it
25 .branch - name of branch this revision is on
25 .branch - name of branch this revision is on
26 .branches - revision tuple of branches starting at this revision
26 .branches - revision tuple of branches starting at this revision
27 .comment - commit message
27 .comment - commit message
28 .date - the commit date as a (time, tz) tuple
28 .date - the commit date as a (time, tz) tuple
29 .dead - true if file revision is dead
29 .dead - true if file revision is dead
30 .file - Name of file
30 .file - Name of file
31 .lines - a tuple (+lines, -lines) or None
31 .lines - a tuple (+lines, -lines) or None
32 .parent - Previous revision of this entry
32 .parent - Previous revision of this entry
33 .rcs - name of file as returned from CVS
33 .rcs - name of file as returned from CVS
34 .revision - revision number as tuple
34 .revision - revision number as tuple
35 .tags - list of tags on the file
35 .tags - list of tags on the file
36 .synthetic - is this a synthetic "file ... added on ..." revision?
36 .synthetic - is this a synthetic "file ... added on ..." revision?
37 .mergepoint- the branch that has been merged from
37 .mergepoint- the branch that has been merged from
38 (if present in rlog output)
38 (if present in rlog output)
39 .branchpoints- the branches that start at the current entry
39 .branchpoints- the branches that start at the current entry
40 '''
40 '''
41 def __init__(self, **entries):
41 def __init__(self, **entries):
42 self.__dict__.update(entries)
42 self.__dict__.update(entries)
43
43
44 def __repr__(self):
44 def __repr__(self):
45 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
45 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
46 id(self),
46 id(self),
47 self.file,
47 self.file,
48 ".".join(map(str, self.revision)))
48 ".".join(map(str, self.revision)))
49
49
50 class logerror(Exception):
50 class logerror(Exception):
51 pass
51 pass
52
52
53 def getrepopath(cvspath):
53 def getrepopath(cvspath):
54 """Return the repository path from a CVS path.
54 """Return the repository path from a CVS path.
55
55
56 >>> getrepopath('/foo/bar')
56 >>> getrepopath('/foo/bar')
57 '/foo/bar'
57 '/foo/bar'
58 >>> getrepopath('c:/foo/bar')
58 >>> getrepopath('c:/foo/bar')
59 'c:/foo/bar'
59 'c:/foo/bar'
60 >>> getrepopath(':pserver:10/foo/bar')
60 >>> getrepopath(':pserver:10/foo/bar')
61 '/foo/bar'
61 '/foo/bar'
62 >>> getrepopath(':pserver:10c:/foo/bar')
62 >>> getrepopath(':pserver:10c:/foo/bar')
63 '/foo/bar'
63 '/foo/bar'
64 >>> getrepopath(':pserver:/foo/bar')
64 >>> getrepopath(':pserver:/foo/bar')
65 '/foo/bar'
65 '/foo/bar'
66 >>> getrepopath(':pserver:c:/foo/bar')
66 >>> getrepopath(':pserver:c:/foo/bar')
67 'c:/foo/bar'
67 'c:/foo/bar'
68 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
68 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
69 '/foo/bar'
69 '/foo/bar'
70 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
70 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
71 'c:/foo/bar'
71 'c:/foo/bar'
72 """
72 """
73 # According to CVS manual, CVS paths are expressed like:
73 # According to CVS manual, CVS paths are expressed like:
74 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
74 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
75 #
75 #
76 # Unfortunately, Windows absolute paths start with a drive letter
76 # Unfortunately, Windows absolute paths start with a drive letter
77 # like 'c:' making it harder to parse. Here we assume that drive
77 # like 'c:' making it harder to parse. Here we assume that drive
78 # letters are only one character long and any CVS component before
78 # letters are only one character long and any CVS component before
79 # the repository path is at least 2 characters long, and use this
79 # the repository path is at least 2 characters long, and use this
80 # to disambiguate.
80 # to disambiguate.
81 parts = cvspath.split(':')
81 parts = cvspath.split(':')
82 if len(parts) == 1:
82 if len(parts) == 1:
83 return parts[0]
83 return parts[0]
84 # Here there is an ambiguous case if we have a port number
84 # Here there is an ambiguous case if we have a port number
85 # immediately followed by a Windows driver letter. We assume this
85 # immediately followed by a Windows driver letter. We assume this
86 # never happens and decide it must be CVS path component,
86 # never happens and decide it must be CVS path component,
87 # therefore ignoring it.
87 # therefore ignoring it.
88 if len(parts[-2]) > 1:
88 if len(parts[-2]) > 1:
89 return parts[-1].lstrip('0123456789')
89 return parts[-1].lstrip('0123456789')
90 return parts[-2] + ':' + parts[-1]
90 return parts[-2] + ':' + parts[-1]
91
91
92 def createlog(ui, directory=None, root="", rlog=True, cache=None):
92 def createlog(ui, directory=None, root="", rlog=True, cache=None):
93 '''Collect the CVS rlog'''
93 '''Collect the CVS rlog'''
94
94
95 # Because we store many duplicate commit log messages, reusing strings
95 # Because we store many duplicate commit log messages, reusing strings
96 # saves a lot of memory and pickle storage space.
96 # saves a lot of memory and pickle storage space.
97 _scache = {}
97 _scache = {}
98 def scache(s):
98 def scache(s):
99 "return a shared version of a string"
99 "return a shared version of a string"
100 return _scache.setdefault(s, s)
100 return _scache.setdefault(s, s)
101
101
102 ui.status(_('collecting CVS rlog\n'))
102 ui.status(_('collecting CVS rlog\n'))
103
103
104 log = [] # list of logentry objects containing the CVS state
104 log = [] # list of logentry objects containing the CVS state
105
105
106 # patterns to match in CVS (r)log output, by state of use
106 # patterns to match in CVS (r)log output, by state of use
107 re_00 = re.compile('RCS file: (.+)$')
107 re_00 = re.compile('RCS file: (.+)$')
108 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
108 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
109 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
109 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
110 re_03 = re.compile("(Cannot access.+CVSROOT)|"
110 re_03 = re.compile("(Cannot access.+CVSROOT)|"
111 "(can't create temporary directory.+)$")
111 "(can't create temporary directory.+)$")
112 re_10 = re.compile('Working file: (.+)$')
112 re_10 = re.compile('Working file: (.+)$')
113 re_20 = re.compile('symbolic names:')
113 re_20 = re.compile('symbolic names:')
114 re_30 = re.compile('\t(.+): ([\\d.]+)$')
114 re_30 = re.compile('\t(.+): ([\\d.]+)$')
115 re_31 = re.compile('----------------------------$')
115 re_31 = re.compile('----------------------------$')
116 re_32 = re.compile('======================================='
116 re_32 = re.compile('======================================='
117 '======================================$')
117 '======================================$')
118 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
118 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
119 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
119 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
120 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
120 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
121 r'(.*mergepoint:\s+([^;]+);)?')
121 r'(.*mergepoint:\s+([^;]+);)?')
122 re_70 = re.compile('branches: (.+);$')
122 re_70 = re.compile('branches: (.+);$')
123
123
124 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
124 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
125
125
126 prefix = '' # leading path to strip of what we get from CVS
126 prefix = '' # leading path to strip of what we get from CVS
127
127
128 if directory is None:
128 if directory is None:
129 # Current working directory
129 # Current working directory
130
130
131 # Get the real directory in the repository
131 # Get the real directory in the repository
132 try:
132 try:
133 prefix = file(os.path.join('CVS','Repository')).read().strip()
133 prefix = file(os.path.join('CVS','Repository')).read().strip()
134 if prefix == ".":
134 if prefix == ".":
135 prefix = ""
135 prefix = ""
136 directory = prefix
136 directory = prefix
137 except IOError:
137 except IOError:
138 raise logerror('Not a CVS sandbox')
138 raise logerror('Not a CVS sandbox')
139
139
140 if prefix and not prefix.endswith(os.sep):
140 if prefix and not prefix.endswith(os.sep):
141 prefix += os.sep
141 prefix += os.sep
142
142
143 # Use the Root file in the sandbox, if it exists
143 # Use the Root file in the sandbox, if it exists
144 try:
144 try:
145 root = file(os.path.join('CVS','Root')).read().strip()
145 root = file(os.path.join('CVS','Root')).read().strip()
146 except IOError:
146 except IOError:
147 pass
147 pass
148
148
149 if not root:
149 if not root:
150 root = os.environ.get('CVSROOT', '')
150 root = os.environ.get('CVSROOT', '')
151
151
152 # read log cache if one exists
152 # read log cache if one exists
153 oldlog = []
153 oldlog = []
154 date = None
154 date = None
155
155
156 if cache:
156 if cache:
157 cachedir = os.path.expanduser('~/.hg.cvsps')
157 cachedir = os.path.expanduser('~/.hg.cvsps')
158 if not os.path.exists(cachedir):
158 if not os.path.exists(cachedir):
159 os.mkdir(cachedir)
159 os.mkdir(cachedir)
160
160
161 # The cvsps cache pickle needs a uniquified name, based on the
161 # The cvsps cache pickle needs a uniquified name, based on the
162 # repository location. The address may have all sort of nasties
162 # repository location. The address may have all sort of nasties
163 # in it, slashes, colons and such. So here we take just the
163 # in it, slashes, colons and such. So here we take just the
164 # alphanumerics, concatenated in a way that does not mix up the
164 # alphanumerics, concatenated in a way that does not mix up the
165 # various components, so that
165 # various components, so that
166 # :pserver:user@server:/path
166 # :pserver:user@server:/path
167 # and
167 # and
168 # /pserver/user/server/path
168 # /pserver/user/server/path
169 # are mapped to different cache file names.
169 # are mapped to different cache file names.
170 cachefile = root.split(":") + [directory, "cache"]
170 cachefile = root.split(":") + [directory, "cache"]
171 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
171 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
172 cachefile = os.path.join(cachedir,
172 cachefile = os.path.join(cachedir,
173 '.'.join([s for s in cachefile if s]))
173 '.'.join([s for s in cachefile if s]))
174
174
175 if cache == 'update':
175 if cache == 'update':
176 try:
176 try:
177 ui.note(_('reading cvs log cache %s\n') % cachefile)
177 ui.note(_('reading cvs log cache %s\n') % cachefile)
178 oldlog = pickle.load(file(cachefile))
178 oldlog = pickle.load(file(cachefile))
179 ui.note(_('cache has %d log entries\n') % len(oldlog))
179 ui.note(_('cache has %d log entries\n') % len(oldlog))
180 except Exception, e:
180 except Exception, e:
181 ui.note(_('error reading cache: %r\n') % e)
181 ui.note(_('error reading cache: %r\n') % e)
182
182
183 if oldlog:
183 if oldlog:
184 date = oldlog[-1].date # last commit date as a (time,tz) tuple
184 date = oldlog[-1].date # last commit date as a (time,tz) tuple
185 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
185 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
186
186
187 # build the CVS commandline
187 # build the CVS commandline
188 cmd = ['cvs', '-q']
188 cmd = ['cvs', '-q']
189 if root:
189 if root:
190 cmd.append('-d%s' % root)
190 cmd.append('-d%s' % root)
191 p = util.normpath(getrepopath(root))
191 p = util.normpath(getrepopath(root))
192 if not p.endswith('/'):
192 if not p.endswith('/'):
193 p += '/'
193 p += '/'
194 prefix = p + util.normpath(prefix)
194 prefix = p + util.normpath(prefix)
195 cmd.append(['log', 'rlog'][rlog])
195 cmd.append(['log', 'rlog'][rlog])
196 if date:
196 if date:
197 # no space between option and date string
197 # no space between option and date string
198 cmd.append('-d>%s' % date)
198 cmd.append('-d>%s' % date)
199 cmd.append(directory)
199 cmd.append(directory)
200
200
201 # state machine begins here
201 # state machine begins here
202 tags = {} # dictionary of revisions on current file with their tags
202 tags = {} # dictionary of revisions on current file with their tags
203 branchmap = {} # mapping between branch names and revision numbers
203 branchmap = {} # mapping between branch names and revision numbers
204 state = 0
204 state = 0
205 store = False # set when a new record can be appended
205 store = False # set when a new record can be appended
206
206
207 cmd = [util.shellquote(arg) for arg in cmd]
207 cmd = [util.shellquote(arg) for arg in cmd]
208 ui.note(_("running %s\n") % (' '.join(cmd)))
208 ui.note(_("running %s\n") % (' '.join(cmd)))
209 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
209 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
210
210
211 pfp = util.popen(' '.join(cmd))
211 pfp = util.popen(' '.join(cmd))
212 peek = pfp.readline()
212 peek = pfp.readline()
213 while True:
213 while True:
214 line = peek
214 line = peek
215 if line == '':
215 if line == '':
216 break
216 break
217 peek = pfp.readline()
217 peek = pfp.readline()
218 if line.endswith('\n'):
218 if line.endswith('\n'):
219 line = line[:-1]
219 line = line[:-1]
220 #ui.debug('state=%d line=%r\n' % (state, line))
220 #ui.debug('state=%d line=%r\n' % (state, line))
221
221
222 if state == 0:
222 if state == 0:
223 # initial state, consume input until we see 'RCS file'
223 # initial state, consume input until we see 'RCS file'
224 match = re_00.match(line)
224 match = re_00.match(line)
225 if match:
225 if match:
226 rcs = match.group(1)
226 rcs = match.group(1)
227 tags = {}
227 tags = {}
228 if rlog:
228 if rlog:
229 filename = util.normpath(rcs[:-2])
229 filename = util.normpath(rcs[:-2])
230 if filename.startswith(prefix):
230 if filename.startswith(prefix):
231 filename = filename[len(prefix):]
231 filename = filename[len(prefix):]
232 if filename.startswith('/'):
232 if filename.startswith('/'):
233 filename = filename[1:]
233 filename = filename[1:]
234 if filename.startswith('Attic/'):
234 if filename.startswith('Attic/'):
235 filename = filename[6:]
235 filename = filename[6:]
236 else:
236 else:
237 filename = filename.replace('/Attic/', '/')
237 filename = filename.replace('/Attic/', '/')
238 state = 2
238 state = 2
239 continue
239 continue
240 state = 1
240 state = 1
241 continue
241 continue
242 match = re_01.match(line)
242 match = re_01.match(line)
243 if match:
243 if match:
244 raise Exception(match.group(1))
244 raise Exception(match.group(1))
245 match = re_02.match(line)
245 match = re_02.match(line)
246 if match:
246 if match:
247 raise Exception(match.group(2))
247 raise Exception(match.group(2))
248 if re_03.match(line):
248 if re_03.match(line):
249 raise Exception(line)
249 raise Exception(line)
250
250
251 elif state == 1:
251 elif state == 1:
252 # expect 'Working file' (only when using log instead of rlog)
252 # expect 'Working file' (only when using log instead of rlog)
253 match = re_10.match(line)
253 match = re_10.match(line)
254 assert match, _('RCS file must be followed by working file')
254 assert match, _('RCS file must be followed by working file')
255 filename = util.normpath(match.group(1))
255 filename = util.normpath(match.group(1))
256 state = 2
256 state = 2
257
257
258 elif state == 2:
258 elif state == 2:
259 # expect 'symbolic names'
259 # expect 'symbolic names'
260 if re_20.match(line):
260 if re_20.match(line):
261 branchmap = {}
261 branchmap = {}
262 state = 3
262 state = 3
263
263
264 elif state == 3:
264 elif state == 3:
265 # read the symbolic names and store as tags
265 # read the symbolic names and store as tags
266 match = re_30.match(line)
266 match = re_30.match(line)
267 if match:
267 if match:
268 rev = [int(x) for x in match.group(2).split('.')]
268 rev = [int(x) for x in match.group(2).split('.')]
269
269
270 # Convert magic branch number to an odd-numbered one
270 # Convert magic branch number to an odd-numbered one
271 revn = len(rev)
271 revn = len(rev)
272 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
272 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
273 rev = rev[:-2] + rev[-1:]
273 rev = rev[:-2] + rev[-1:]
274 rev = tuple(rev)
274 rev = tuple(rev)
275
275
276 if rev not in tags:
276 if rev not in tags:
277 tags[rev] = []
277 tags[rev] = []
278 tags[rev].append(match.group(1))
278 tags[rev].append(match.group(1))
279 branchmap[match.group(1)] = match.group(2)
279 branchmap[match.group(1)] = match.group(2)
280
280
281 elif re_31.match(line):
281 elif re_31.match(line):
282 state = 5
282 state = 5
283 elif re_32.match(line):
283 elif re_32.match(line):
284 state = 0
284 state = 0
285
285
286 elif state == 4:
286 elif state == 4:
287 # expecting '------' separator before first revision
287 # expecting '------' separator before first revision
288 if re_31.match(line):
288 if re_31.match(line):
289 state = 5
289 state = 5
290 else:
290 else:
291 assert not re_32.match(line), _('must have at least '
291 assert not re_32.match(line), _('must have at least '
292 'some revisions')
292 'some revisions')
293
293
294 elif state == 5:
294 elif state == 5:
295 # expecting revision number and possibly (ignored) lock indication
295 # expecting revision number and possibly (ignored) lock indication
296 # we create the logentry here from values stored in states 0 to 4,
296 # we create the logentry here from values stored in states 0 to 4,
297 # as this state is re-entered for subsequent revisions of a file.
297 # as this state is re-entered for subsequent revisions of a file.
298 match = re_50.match(line)
298 match = re_50.match(line)
299 assert match, _('expected revision number')
299 assert match, _('expected revision number')
300 e = logentry(rcs=scache(rcs), file=scache(filename),
300 e = logentry(rcs=scache(rcs), file=scache(filename),
301 revision=tuple([int(x) for x in match.group(1).split('.')]),
301 revision=tuple([int(x) for x in match.group(1).split('.')]),
302 branches=[], parent=None,
302 branches=[], parent=None,
303 synthetic=False)
303 synthetic=False)
304 state = 6
304 state = 6
305
305
306 elif state == 6:
306 elif state == 6:
307 # expecting date, author, state, lines changed
307 # expecting date, author, state, lines changed
308 match = re_60.match(line)
308 match = re_60.match(line)
309 assert match, _('revision must be followed by date line')
309 assert match, _('revision must be followed by date line')
310 d = match.group(1)
310 d = match.group(1)
311 if d[2] == '/':
311 if d[2] == '/':
312 # Y2K
312 # Y2K
313 d = '19' + d
313 d = '19' + d
314
314
315 if len(d.split()) != 3:
315 if len(d.split()) != 3:
316 # cvs log dates always in GMT
316 # cvs log dates always in GMT
317 d = d + ' UTC'
317 d = d + ' UTC'
318 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
318 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
319 '%Y/%m/%d %H:%M:%S',
319 '%Y/%m/%d %H:%M:%S',
320 '%Y-%m-%d %H:%M:%S'])
320 '%Y-%m-%d %H:%M:%S'])
321 e.author = scache(match.group(2))
321 e.author = scache(match.group(2))
322 e.dead = match.group(3).lower() == 'dead'
322 e.dead = match.group(3).lower() == 'dead'
323
323
324 if match.group(5):
324 if match.group(5):
325 if match.group(6):
325 if match.group(6):
326 e.lines = (int(match.group(5)), int(match.group(6)))
326 e.lines = (int(match.group(5)), int(match.group(6)))
327 else:
327 else:
328 e.lines = (int(match.group(5)), 0)
328 e.lines = (int(match.group(5)), 0)
329 elif match.group(6):
329 elif match.group(6):
330 e.lines = (0, int(match.group(6)))
330 e.lines = (0, int(match.group(6)))
331 else:
331 else:
332 e.lines = None
332 e.lines = None
333
333
334 if match.group(7): # cvsnt mergepoint
334 if match.group(7): # cvsnt mergepoint
335 myrev = match.group(8).split('.')
335 myrev = match.group(8).split('.')
336 if len(myrev) == 2: # head
336 if len(myrev) == 2: # head
337 e.mergepoint = 'HEAD'
337 e.mergepoint = 'HEAD'
338 else:
338 else:
339 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
339 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
340 branches = [b for b in branchmap if branchmap[b] == myrev]
340 branches = [b for b in branchmap if branchmap[b] == myrev]
341 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
341 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
342 e.mergepoint = branches[0]
342 e.mergepoint = branches[0]
343 else:
343 else:
344 e.mergepoint = None
344 e.mergepoint = None
345 e.comment = []
345 e.comment = []
346 state = 7
346 state = 7
347
347
348 elif state == 7:
348 elif state == 7:
349 # read the revision numbers of branches that start at this revision
349 # read the revision numbers of branches that start at this revision
350 # or store the commit log message otherwise
350 # or store the commit log message otherwise
351 m = re_70.match(line)
351 m = re_70.match(line)
352 if m:
352 if m:
353 e.branches = [tuple([int(y) for y in x.strip().split('.')])
353 e.branches = [tuple([int(y) for y in x.strip().split('.')])
354 for x in m.group(1).split(';')]
354 for x in m.group(1).split(';')]
355 state = 8
355 state = 8
356 elif re_31.match(line) and re_50.match(peek):
356 elif re_31.match(line) and re_50.match(peek):
357 state = 5
357 state = 5
358 store = True
358 store = True
359 elif re_32.match(line):
359 elif re_32.match(line):
360 state = 0
360 state = 0
361 store = True
361 store = True
362 else:
362 else:
363 e.comment.append(line)
363 e.comment.append(line)
364
364
365 elif state == 8:
365 elif state == 8:
366 # store commit log message
366 # store commit log message
367 if re_31.match(line):
367 if re_31.match(line):
368 state = 5
368 state = 5
369 store = True
369 store = True
370 elif re_32.match(line):
370 elif re_32.match(line):
371 state = 0
371 state = 0
372 store = True
372 store = True
373 else:
373 else:
374 e.comment.append(line)
374 e.comment.append(line)
375
375
376 # When a file is added on a branch B1, CVS creates a synthetic
376 # When a file is added on a branch B1, CVS creates a synthetic
377 # dead trunk revision 1.1 so that the branch has a root.
377 # dead trunk revision 1.1 so that the branch has a root.
378 # Likewise, if you merge such a file to a later branch B2 (one
378 # Likewise, if you merge such a file to a later branch B2 (one
379 # that already existed when the file was added on B1), CVS
379 # that already existed when the file was added on B1), CVS
380 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
380 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
381 # these revisions now, but mark them synthetic so
381 # these revisions now, but mark them synthetic so
382 # createchangeset() can take care of them.
382 # createchangeset() can take care of them.
383 if (store and
383 if (store and
384 e.dead and
384 e.dead and
385 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
385 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
386 len(e.comment) == 1 and
386 len(e.comment) == 1 and
387 file_added_re.match(e.comment[0])):
387 file_added_re.match(e.comment[0])):
388 ui.debug(_('found synthetic revision in %s: %r\n')
388 ui.debug(_('found synthetic revision in %s: %r\n')
389 % (e.rcs, e.comment[0]))
389 % (e.rcs, e.comment[0]))
390 e.synthetic = True
390 e.synthetic = True
391
391
392 if store:
392 if store:
393 # clean up the results and save in the log.
393 # clean up the results and save in the log.
394 store = False
394 store = False
395 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
395 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
396 e.comment = scache('\n'.join(e.comment))
396 e.comment = scache('\n'.join(e.comment))
397
397
398 revn = len(e.revision)
398 revn = len(e.revision)
399 if revn > 3 and (revn % 2) == 0:
399 if revn > 3 and (revn % 2) == 0:
400 e.branch = tags.get(e.revision[:-1], [None])[0]
400 e.branch = tags.get(e.revision[:-1], [None])[0]
401 else:
401 else:
402 e.branch = None
402 e.branch = None
403
403
404 # find the branches starting from this revision
404 # find the branches starting from this revision
405 branchpoints = set()
405 branchpoints = set()
406 for branch, revision in branchmap.iteritems():
406 for branch, revision in branchmap.iteritems():
407 revparts = tuple([int(i) for i in revision.split('.')])
407 revparts = tuple([int(i) for i in revision.split('.')])
408 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
408 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
409 # normal branch
409 # normal branch
410 if revparts[:-2] == e.revision:
410 if revparts[:-2] == e.revision:
411 branchpoints.add(branch)
411 branchpoints.add(branch)
412 elif revparts == (1,1,1): # vendor branch
412 elif revparts == (1,1,1): # vendor branch
413 if revparts in e.branches:
413 if revparts in e.branches:
414 branchpoints.add(branch)
414 branchpoints.add(branch)
415 e.branchpoints = branchpoints
415 e.branchpoints = branchpoints
416
416
417 log.append(e)
417 log.append(e)
418
418
419 if len(log) % 100 == 0:
419 if len(log) % 100 == 0:
420 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
420 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
421
421
422 listsort(log, key=lambda x:(x.rcs, x.revision))
422 listsort(log, key=lambda x:(x.rcs, x.revision))
423
423
424 # find parent revisions of individual files
424 # find parent revisions of individual files
425 versions = {}
425 versions = {}
426 for e in log:
426 for e in log:
427 branch = e.revision[:-1]
427 branch = e.revision[:-1]
428 p = versions.get((e.rcs, branch), None)
428 p = versions.get((e.rcs, branch), None)
429 if p is None:
429 if p is None:
430 p = e.revision[:-2]
430 p = e.revision[:-2]
431 e.parent = p
431 e.parent = p
432 versions[(e.rcs, branch)] = e.revision
432 versions[(e.rcs, branch)] = e.revision
433
433
434 # update the log cache
434 # update the log cache
435 if cache:
435 if cache:
436 if log:
436 if log:
437 # join up the old and new logs
437 # join up the old and new logs
438 listsort(log, key=lambda x:x.date)
438 listsort(log, key=lambda x:x.date)
439
439
440 if oldlog and oldlog[-1].date >= log[0].date:
440 if oldlog and oldlog[-1].date >= log[0].date:
441 raise logerror('Log cache overlaps with new log entries,'
441 raise logerror('Log cache overlaps with new log entries,'
442 ' re-run without cache.')
442 ' re-run without cache.')
443
443
444 log = oldlog + log
444 log = oldlog + log
445
445
446 # write the new cachefile
446 # write the new cachefile
447 ui.note(_('writing cvs log cache %s\n') % cachefile)
447 ui.note(_('writing cvs log cache %s\n') % cachefile)
448 pickle.dump(log, file(cachefile, 'w'))
448 pickle.dump(log, file(cachefile, 'w'))
449 else:
449 else:
450 log = oldlog
450 log = oldlog
451
451
452 ui.status(_('%d log entries\n') % len(log))
452 ui.status(_('%d log entries\n') % len(log))
453
453
454 return log
454 return log
455
455
456
456
457 class changeset(object):
457 class changeset(object):
458 '''Class changeset has the following attributes:
458 '''Class changeset has the following attributes:
459 .id - integer identifying this changeset (list index)
459 .id - integer identifying this changeset (list index)
460 .author - author name as CVS knows it
460 .author - author name as CVS knows it
461 .branch - name of branch this changeset is on, or None
461 .branch - name of branch this changeset is on, or None
462 .comment - commit message
462 .comment - commit message
463 .date - the commit date as a (time,tz) tuple
463 .date - the commit date as a (time,tz) tuple
464 .entries - list of logentry objects in this changeset
464 .entries - list of logentry objects in this changeset
465 .parents - list of one or two parent changesets
465 .parents - list of one or two parent changesets
466 .tags - list of tags on this changeset
466 .tags - list of tags on this changeset
467 .synthetic - from synthetic revision "file ... added on branch ..."
467 .synthetic - from synthetic revision "file ... added on branch ..."
468 .mergepoint- the branch that has been merged from
468 .mergepoint- the branch that has been merged from
469 (if present in rlog output)
469 (if present in rlog output)
470 .branchpoints- the branches that start at the current entry
470 .branchpoints- the branches that start at the current entry
471 '''
471 '''
472 def __init__(self, **entries):
472 def __init__(self, **entries):
473 self.__dict__.update(entries)
473 self.__dict__.update(entries)
474
474
475 def __repr__(self):
475 def __repr__(self):
476 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
476 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
477 id(self),
477 id(self),
478 getattr(self, 'id', "(no id)"))
478 getattr(self, 'id', "(no id)"))
479
479
480 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
480 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
481 '''Convert log into changesets.'''
481 '''Convert log into changesets.'''
482
482
483 ui.status(_('creating changesets\n'))
483 ui.status(_('creating changesets\n'))
484
484
485 # Merge changesets
485 # Merge changesets
486
486
487 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
487 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
488
488
489 changesets = []
489 changesets = []
490 files = set()
490 files = set()
491 c = None
491 c = None
492 for i, e in enumerate(log):
492 for i, e in enumerate(log):
493
493
494 # Check if log entry belongs to the current changeset or not.
494 # Check if log entry belongs to the current changeset or not.
495
495
496 # Since CVS is file centric, two different file revisions with
496 # Since CVS is file centric, two different file revisions with
497 # different branchpoints should be treated as belonging to two
497 # different branchpoints should be treated as belonging to two
498 # different changesets (and the ordering is important and not
498 # different changesets (and the ordering is important and not
499 # honoured by cvsps at this point).
499 # honoured by cvsps at this point).
500 #
500 #
501 # Consider the following case:
501 # Consider the following case:
502 # foo 1.1 branchpoints: [MYBRANCH]
502 # foo 1.1 branchpoints: [MYBRANCH]
503 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
503 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
504 #
504 #
505 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
505 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
506 # later version of foo may be in MYBRANCH2, so foo should be the
506 # later version of foo may be in MYBRANCH2, so foo should be the
507 # first changeset and bar the next and MYBRANCH and MYBRANCH2
507 # first changeset and bar the next and MYBRANCH and MYBRANCH2
508 # should both start off of the bar changeset. No provisions are
508 # should both start off of the bar changeset. No provisions are
509 # made to ensure that this is, in fact, what happens.
509 # made to ensure that this is, in fact, what happens.
510 if not (c and
510 if not (c and
511 e.comment == c.comment and
511 e.comment == c.comment and
512 e.author == c.author and
512 e.author == c.author and
513 e.branch == c.branch and
513 e.branch == c.branch and
514 e.branchpoints == c.branchpoints and
514 (not hasattr(e, 'branchpoints') or
515 not hasattr (c, 'branchpoints') or
516 e.branchpoints == c.branchpoints) and
515 ((c.date[0] + c.date[1]) <=
517 ((c.date[0] + c.date[1]) <=
516 (e.date[0] + e.date[1]) <=
518 (e.date[0] + e.date[1]) <=
517 (c.date[0] + c.date[1]) + fuzz) and
519 (c.date[0] + c.date[1]) + fuzz) and
518 e.file not in files):
520 e.file not in files):
519 c = changeset(comment=e.comment, author=e.author,
521 c = changeset(comment=e.comment, author=e.author,
520 branch=e.branch, date=e.date, entries=[],
522 branch=e.branch, date=e.date, entries=[],
521 mergepoint=getattr(e, 'mergepoint', None),
523 mergepoint=getattr(e, 'mergepoint', None),
522 branchpoints=getattr(e, 'branchpoints', set()))
524 branchpoints=getattr(e, 'branchpoints', set()))
523 changesets.append(c)
525 changesets.append(c)
524 files = set()
526 files = set()
525 if len(changesets) % 100 == 0:
527 if len(changesets) % 100 == 0:
526 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
528 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
527 ui.status(util.ellipsis(t, 80) + '\n')
529 ui.status(util.ellipsis(t, 80) + '\n')
528
530
529 c.entries.append(e)
531 c.entries.append(e)
530 files.add(e.file)
532 files.add(e.file)
531 c.date = e.date # changeset date is date of latest commit in it
533 c.date = e.date # changeset date is date of latest commit in it
532
534
533 # Mark synthetic changesets
535 # Mark synthetic changesets
534
536
535 for c in changesets:
537 for c in changesets:
536 # Synthetic revisions always get their own changeset, because
538 # Synthetic revisions always get their own changeset, because
537 # the log message includes the filename. E.g. if you add file3
539 # the log message includes the filename. E.g. if you add file3
538 # and file4 on a branch, you get four log entries and three
540 # and file4 on a branch, you get four log entries and three
539 # changesets:
541 # changesets:
540 # "File file3 was added on branch ..." (synthetic, 1 entry)
542 # "File file3 was added on branch ..." (synthetic, 1 entry)
541 # "File file4 was added on branch ..." (synthetic, 1 entry)
543 # "File file4 was added on branch ..." (synthetic, 1 entry)
542 # "Add file3 and file4 to fix ..." (real, 2 entries)
544 # "Add file3 and file4 to fix ..." (real, 2 entries)
543 # Hence the check for 1 entry here.
545 # Hence the check for 1 entry here.
544 synth = getattr(c.entries[0], 'synthetic', None)
546 synth = getattr(c.entries[0], 'synthetic', None)
545 c.synthetic = (len(c.entries) == 1 and synth)
547 c.synthetic = (len(c.entries) == 1 and synth)
546
548
547 # Sort files in each changeset
549 # Sort files in each changeset
548
550
549 for c in changesets:
551 for c in changesets:
550 def pathcompare(l, r):
552 def pathcompare(l, r):
551 'Mimic cvsps sorting order'
553 'Mimic cvsps sorting order'
552 l = l.split('/')
554 l = l.split('/')
553 r = r.split('/')
555 r = r.split('/')
554 nl = len(l)
556 nl = len(l)
555 nr = len(r)
557 nr = len(r)
556 n = min(nl, nr)
558 n = min(nl, nr)
557 for i in range(n):
559 for i in range(n):
558 if i + 1 == nl and nl < nr:
560 if i + 1 == nl and nl < nr:
559 return -1
561 return -1
560 elif i + 1 == nr and nl > nr:
562 elif i + 1 == nr and nl > nr:
561 return +1
563 return +1
562 elif l[i] < r[i]:
564 elif l[i] < r[i]:
563 return -1
565 return -1
564 elif l[i] > r[i]:
566 elif l[i] > r[i]:
565 return +1
567 return +1
566 return 0
568 return 0
567 def entitycompare(l, r):
569 def entitycompare(l, r):
568 return pathcompare(l.file, r.file)
570 return pathcompare(l.file, r.file)
569
571
570 c.entries.sort(entitycompare)
572 c.entries.sort(entitycompare)
571
573
572 # Sort changesets by date
574 # Sort changesets by date
573
575
574 def cscmp(l, r):
576 def cscmp(l, r):
575 d = sum(l.date) - sum(r.date)
577 d = sum(l.date) - sum(r.date)
576 if d:
578 if d:
577 return d
579 return d
578
580
579 # detect vendor branches and initial commits on a branch
581 # detect vendor branches and initial commits on a branch
580 le = {}
582 le = {}
581 for e in l.entries:
583 for e in l.entries:
582 le[e.rcs] = e.revision
584 le[e.rcs] = e.revision
583 re = {}
585 re = {}
584 for e in r.entries:
586 for e in r.entries:
585 re[e.rcs] = e.revision
587 re[e.rcs] = e.revision
586
588
587 d = 0
589 d = 0
588 for e in l.entries:
590 for e in l.entries:
589 if re.get(e.rcs, None) == e.parent:
591 if re.get(e.rcs, None) == e.parent:
590 assert not d
592 assert not d
591 d = 1
593 d = 1
592 break
594 break
593
595
594 for e in r.entries:
596 for e in r.entries:
595 if le.get(e.rcs, None) == e.parent:
597 if le.get(e.rcs, None) == e.parent:
596 assert not d
598 assert not d
597 d = -1
599 d = -1
598 break
600 break
599
601
600 return d
602 return d
601
603
602 changesets.sort(cscmp)
604 changesets.sort(cscmp)
603
605
604 # Collect tags
606 # Collect tags
605
607
606 globaltags = {}
608 globaltags = {}
607 for c in changesets:
609 for c in changesets:
608 for e in c.entries:
610 for e in c.entries:
609 for tag in e.tags:
611 for tag in e.tags:
610 # remember which is the latest changeset to have this tag
612 # remember which is the latest changeset to have this tag
611 globaltags[tag] = c
613 globaltags[tag] = c
612
614
613 for c in changesets:
615 for c in changesets:
614 tags = set()
616 tags = set()
615 for e in c.entries:
617 for e in c.entries:
616 tags.update(e.tags)
618 tags.update(e.tags)
617 # remember tags only if this is the latest changeset to have it
619 # remember tags only if this is the latest changeset to have it
618 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
620 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
619
621
620 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
622 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
621 # by inserting dummy changesets with two parents, and handle
623 # by inserting dummy changesets with two parents, and handle
622 # {{mergefrombranch BRANCHNAME}} by setting two parents.
624 # {{mergefrombranch BRANCHNAME}} by setting two parents.
623
625
624 if mergeto is None:
626 if mergeto is None:
625 mergeto = r'{{mergetobranch ([-\w]+)}}'
627 mergeto = r'{{mergetobranch ([-\w]+)}}'
626 if mergeto:
628 if mergeto:
627 mergeto = re.compile(mergeto)
629 mergeto = re.compile(mergeto)
628
630
629 if mergefrom is None:
631 if mergefrom is None:
630 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
632 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
631 if mergefrom:
633 if mergefrom:
632 mergefrom = re.compile(mergefrom)
634 mergefrom = re.compile(mergefrom)
633
635
634 versions = {} # changeset index where we saw any particular file version
636 versions = {} # changeset index where we saw any particular file version
635 branches = {} # changeset index where we saw a branch
637 branches = {} # changeset index where we saw a branch
636 n = len(changesets)
638 n = len(changesets)
637 i = 0
639 i = 0
638 while i<n:
640 while i<n:
639 c = changesets[i]
641 c = changesets[i]
640
642
641 for f in c.entries:
643 for f in c.entries:
642 versions[(f.rcs, f.revision)] = i
644 versions[(f.rcs, f.revision)] = i
643
645
644 p = None
646 p = None
645 if c.branch in branches:
647 if c.branch in branches:
646 p = branches[c.branch]
648 p = branches[c.branch]
647 else:
649 else:
648 # first changeset on a new branch
650 # first changeset on a new branch
649 # the parent is a changeset with the branch in its
651 # the parent is a changeset with the branch in its
650 # branchpoints such that it is the latest possible
652 # branchpoints such that it is the latest possible
651 # commit without any intervening, unrelated commits.
653 # commit without any intervening, unrelated commits.
652
654
653 for candidate in xrange(i):
655 for candidate in xrange(i):
654 if c.branch not in changesets[candidate].branchpoints:
656 if c.branch not in changesets[candidate].branchpoints:
655 if p is not None:
657 if p is not None:
656 break
658 break
657 continue
659 continue
658 p = candidate
660 p = candidate
659
661
660 c.parents = []
662 c.parents = []
661 if p is not None:
663 if p is not None:
662 p = changesets[p]
664 p = changesets[p]
663
665
664 # Ensure no changeset has a synthetic changeset as a parent.
666 # Ensure no changeset has a synthetic changeset as a parent.
665 while p.synthetic:
667 while p.synthetic:
666 assert len(p.parents) <= 1, \
668 assert len(p.parents) <= 1, \
667 _('synthetic changeset cannot have multiple parents')
669 _('synthetic changeset cannot have multiple parents')
668 if p.parents:
670 if p.parents:
669 p = p.parents[0]
671 p = p.parents[0]
670 else:
672 else:
671 p = None
673 p = None
672 break
674 break
673
675
674 if p is not None:
676 if p is not None:
675 c.parents.append(p)
677 c.parents.append(p)
676
678
677 if c.mergepoint:
679 if c.mergepoint:
678 if c.mergepoint == 'HEAD':
680 if c.mergepoint == 'HEAD':
679 c.mergepoint = None
681 c.mergepoint = None
680 c.parents.append(changesets[branches[c.mergepoint]])
682 c.parents.append(changesets[branches[c.mergepoint]])
681
683
682 if mergefrom:
684 if mergefrom:
683 m = mergefrom.search(c.comment)
685 m = mergefrom.search(c.comment)
684 if m:
686 if m:
685 m = m.group(1)
687 m = m.group(1)
686 if m == 'HEAD':
688 if m == 'HEAD':
687 m = None
689 m = None
688 try:
690 try:
689 candidate = changesets[branches[m]]
691 candidate = changesets[branches[m]]
690 except KeyError:
692 except KeyError:
691 ui.warn(_("warning: CVS commit message references "
693 ui.warn(_("warning: CVS commit message references "
692 "non-existent branch %r:\n%s\n")
694 "non-existent branch %r:\n%s\n")
693 % (m, c.comment))
695 % (m, c.comment))
694 if m in branches and c.branch != m and not candidate.synthetic:
696 if m in branches and c.branch != m and not candidate.synthetic:
695 c.parents.append(candidate)
697 c.parents.append(candidate)
696
698
697 if mergeto:
699 if mergeto:
698 m = mergeto.search(c.comment)
700 m = mergeto.search(c.comment)
699 if m:
701 if m:
700 try:
702 try:
701 m = m.group(1)
703 m = m.group(1)
702 if m == 'HEAD':
704 if m == 'HEAD':
703 m = None
705 m = None
704 except:
706 except:
705 m = None # if no group found then merge to HEAD
707 m = None # if no group found then merge to HEAD
706 if m in branches and c.branch != m:
708 if m in branches and c.branch != m:
707 # insert empty changeset for merge
709 # insert empty changeset for merge
708 cc = changeset(author=c.author, branch=m, date=c.date,
710 cc = changeset(author=c.author, branch=m, date=c.date,
709 comment='convert-repo: CVS merge from branch %s' % c.branch,
711 comment='convert-repo: CVS merge from branch %s' % c.branch,
710 entries=[], tags=[], parents=[changesets[branches[m]], c])
712 entries=[], tags=[], parents=[changesets[branches[m]], c])
711 changesets.insert(i + 1, cc)
713 changesets.insert(i + 1, cc)
712 branches[m] = i + 1
714 branches[m] = i + 1
713
715
714 # adjust our loop counters now we have inserted a new entry
716 # adjust our loop counters now we have inserted a new entry
715 n += 1
717 n += 1
716 i += 2
718 i += 2
717 continue
719 continue
718
720
719 branches[c.branch] = i
721 branches[c.branch] = i
720 i += 1
722 i += 1
721
723
722 # Drop synthetic changesets (safe now that we have ensured no other
724 # Drop synthetic changesets (safe now that we have ensured no other
723 # changesets can have them as parents).
725 # changesets can have them as parents).
724 i = 0
726 i = 0
725 while i < len(changesets):
727 while i < len(changesets):
726 if changesets[i].synthetic:
728 if changesets[i].synthetic:
727 del changesets[i]
729 del changesets[i]
728 else:
730 else:
729 i += 1
731 i += 1
730
732
731 # Number changesets
733 # Number changesets
732
734
733 for i, c in enumerate(changesets):
735 for i, c in enumerate(changesets):
734 c.id = i + 1
736 c.id = i + 1
735
737
736 ui.status(_('%d changeset entries\n') % len(changesets))
738 ui.status(_('%d changeset entries\n') % len(changesets))
737
739
738 return changesets
740 return changesets
739
741
740
742
741 def debugcvsps(ui, *args, **opts):
743 def debugcvsps(ui, *args, **opts):
742 '''Read CVS rlog for current directory or named path in
744 '''Read CVS rlog for current directory or named path in
743 repository, and convert the log to changesets based on matching
745 repository, and convert the log to changesets based on matching
744 commit log entries and dates.
746 commit log entries and dates.
745 '''
747 '''
746 if opts["new_cache"]:
748 if opts["new_cache"]:
747 cache = "write"
749 cache = "write"
748 elif opts["update_cache"]:
750 elif opts["update_cache"]:
749 cache = "update"
751 cache = "update"
750 else:
752 else:
751 cache = None
753 cache = None
752
754
753 revisions = opts["revisions"]
755 revisions = opts["revisions"]
754
756
755 try:
757 try:
756 if args:
758 if args:
757 log = []
759 log = []
758 for d in args:
760 for d in args:
759 log += createlog(ui, d, root=opts["root"], cache=cache)
761 log += createlog(ui, d, root=opts["root"], cache=cache)
760 else:
762 else:
761 log = createlog(ui, root=opts["root"], cache=cache)
763 log = createlog(ui, root=opts["root"], cache=cache)
762 except logerror, e:
764 except logerror, e:
763 ui.write("%r\n"%e)
765 ui.write("%r\n"%e)
764 return
766 return
765
767
766 changesets = createchangeset(ui, log, opts["fuzz"])
768 changesets = createchangeset(ui, log, opts["fuzz"])
767 del log
769 del log
768
770
769 # Print changesets (optionally filtered)
771 # Print changesets (optionally filtered)
770
772
771 off = len(revisions)
773 off = len(revisions)
772 branches = {} # latest version number in each branch
774 branches = {} # latest version number in each branch
773 ancestors = {} # parent branch
775 ancestors = {} # parent branch
774 for cs in changesets:
776 for cs in changesets:
775
777
776 if opts["ancestors"]:
778 if opts["ancestors"]:
777 if cs.branch not in branches and cs.parents and cs.parents[0].id:
779 if cs.branch not in branches and cs.parents and cs.parents[0].id:
778 ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch,
780 ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch,
779 cs.parents[0].id)
781 cs.parents[0].id)
780 branches[cs.branch] = cs.id
782 branches[cs.branch] = cs.id
781
783
782 # limit by branches
784 # limit by branches
783 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
785 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
784 continue
786 continue
785
787
786 if not off:
788 if not off:
787 # Note: trailing spaces on several lines here are needed to have
789 # Note: trailing spaces on several lines here are needed to have
788 # bug-for-bug compatibility with cvsps.
790 # bug-for-bug compatibility with cvsps.
789 ui.write('---------------------\n')
791 ui.write('---------------------\n')
790 ui.write('PatchSet %d \n' % cs.id)
792 ui.write('PatchSet %d \n' % cs.id)
791 ui.write('Date: %s\n' % util.datestr(cs.date,
793 ui.write('Date: %s\n' % util.datestr(cs.date,
792 '%Y/%m/%d %H:%M:%S %1%2'))
794 '%Y/%m/%d %H:%M:%S %1%2'))
793 ui.write('Author: %s\n' % cs.author)
795 ui.write('Author: %s\n' % cs.author)
794 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
796 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
795 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1],
797 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1],
796 ','.join(cs.tags) or '(none)'))
798 ','.join(cs.tags) or '(none)'))
797 branchpoints = getattr(cs, 'branchpoints', None)
799 branchpoints = getattr(cs, 'branchpoints', None)
798 if branchpoints:
800 if branchpoints:
799 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
801 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
800 if opts["parents"] and cs.parents:
802 if opts["parents"] and cs.parents:
801 if len(cs.parents)>1:
803 if len(cs.parents)>1:
802 ui.write('Parents: %s\n' % (','.join([str(p.id) for p in cs.parents])))
804 ui.write('Parents: %s\n' % (','.join([str(p.id) for p in cs.parents])))
803 else:
805 else:
804 ui.write('Parent: %d\n' % cs.parents[0].id)
806 ui.write('Parent: %d\n' % cs.parents[0].id)
805
807
806 if opts["ancestors"]:
808 if opts["ancestors"]:
807 b = cs.branch
809 b = cs.branch
808 r = []
810 r = []
809 while b:
811 while b:
810 b, c = ancestors[b]
812 b, c = ancestors[b]
811 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
813 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
812 if r:
814 if r:
813 ui.write('Ancestors: %s\n' % (','.join(r)))
815 ui.write('Ancestors: %s\n' % (','.join(r)))
814
816
815 ui.write('Log:\n')
817 ui.write('Log:\n')
816 ui.write('%s\n\n' % cs.comment)
818 ui.write('%s\n\n' % cs.comment)
817 ui.write('Members: \n')
819 ui.write('Members: \n')
818 for f in cs.entries:
820 for f in cs.entries:
819 fn = f.file
821 fn = f.file
820 if fn.startswith(opts["prefix"]):
822 if fn.startswith(opts["prefix"]):
821 fn = fn[len(opts["prefix"]):]
823 fn = fn[len(opts["prefix"]):]
822 ui.write('\t%s:%s->%s%s \n' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
824 ui.write('\t%s:%s->%s%s \n' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
823 '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]))
825 '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]))
824 ui.write('\n')
826 ui.write('\n')
825
827
826 # have we seen the start tag?
828 # have we seen the start tag?
827 if revisions and off:
829 if revisions and off:
828 if revisions[0] == str(cs.id) or \
830 if revisions[0] == str(cs.id) or \
829 revisions[0] in cs.tags:
831 revisions[0] in cs.tags:
830 off = False
832 off = False
831
833
832 # see if we reached the end tag
834 # see if we reached the end tag
833 if len(revisions)>1 and not off:
835 if len(revisions)>1 and not off:
834 if revisions[1] == str(cs.id) or \
836 if revisions[1] == str(cs.id) or \
835 revisions[1] in cs.tags:
837 revisions[1] in cs.tags:
836 break
838 break
@@ -1,1242 +1,1136 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
5 # Configuration options:
6 #
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
11 # convert.svn.tags
12 # Relative path to tree of tags (default: "tags")
13 #
14 # Set these in a hgrc, or on the command line as follows:
15 #
16 # hg convert --config convert.svn.trunk=wackoname [...]
17
4
18 import locale
5 import locale
19 import os
6 import os
20 import re
7 import re
21 import sys
8 import sys
22 import cPickle as pickle
9 import cPickle as pickle
23 import tempfile
10 import tempfile
24 import urllib
11 import urllib
25
12
26 from mercurial import strutil, util
13 from mercurial import strutil, util, encoding
27 from mercurial.i18n import _
14 from mercurial.i18n import _
28
15
29 # Subversion stuff. Works best with very recent Python SVN bindings
16 # Subversion stuff. Works best with very recent Python SVN bindings
30 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
31 # these bindings.
18 # these bindings.
32
19
33 from cStringIO import StringIO
20 from cStringIO import StringIO
34
21
35 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
36 from common import commandline, converter_source, converter_sink, mapfile
23 from common import commandline, converter_source, converter_sink, mapfile
37
24
38 try:
25 try:
39 from svn.core import SubversionException, Pool
26 from svn.core import SubversionException, Pool
40 import svn
27 import svn
41 import svn.client
28 import svn.client
42 import svn.core
29 import svn.core
43 import svn.ra
30 import svn.ra
44 import svn.delta
31 import svn.delta
45 import transport
32 import transport
46 import warnings
33 import warnings
47 warnings.filterwarnings('ignore',
34 warnings.filterwarnings('ignore',
48 module='svn.core',
35 module='svn.core',
49 category=DeprecationWarning)
36 category=DeprecationWarning)
50
37
51 except ImportError:
38 except ImportError:
52 pass
39 pass
53
40
54 class SvnPathNotFound(Exception):
41 class SvnPathNotFound(Exception):
55 pass
42 pass
56
43
57 def geturl(path):
44 def geturl(path):
58 try:
45 try:
59 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
46 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
60 except SubversionException:
47 except SubversionException:
61 pass
48 pass
62 if os.path.isdir(path):
49 if os.path.isdir(path):
63 path = os.path.normpath(os.path.abspath(path))
50 path = os.path.normpath(os.path.abspath(path))
64 if os.name == 'nt':
51 if os.name == 'nt':
65 path = '/' + util.normpath(path)
52 path = '/' + util.normpath(path)
53 # Module URL is later compared with the repository URL returned
54 # by svn API, which is UTF-8.
55 path = encoding.tolocal(path)
66 return 'file://%s' % urllib.quote(path)
56 return 'file://%s' % urllib.quote(path)
67 return path
57 return path
68
58
69 def optrev(number):
59 def optrev(number):
70 optrev = svn.core.svn_opt_revision_t()
60 optrev = svn.core.svn_opt_revision_t()
71 optrev.kind = svn.core.svn_opt_revision_number
61 optrev.kind = svn.core.svn_opt_revision_number
72 optrev.value.number = number
62 optrev.value.number = number
73 return optrev
63 return optrev
74
64
75 class changedpath(object):
65 class changedpath(object):
76 def __init__(self, p):
66 def __init__(self, p):
77 self.copyfrom_path = p.copyfrom_path
67 self.copyfrom_path = p.copyfrom_path
78 self.copyfrom_rev = p.copyfrom_rev
68 self.copyfrom_rev = p.copyfrom_rev
79 self.action = p.action
69 self.action = p.action
80
70
81 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
71 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
82 strict_node_history=False):
72 strict_node_history=False):
83 protocol = -1
73 protocol = -1
84 def receiver(orig_paths, revnum, author, date, message, pool):
74 def receiver(orig_paths, revnum, author, date, message, pool):
85 if orig_paths is not None:
75 if orig_paths is not None:
86 for k, v in orig_paths.iteritems():
76 for k, v in orig_paths.iteritems():
87 orig_paths[k] = changedpath(v)
77 orig_paths[k] = changedpath(v)
88 pickle.dump((orig_paths, revnum, author, date, message),
78 pickle.dump((orig_paths, revnum, author, date, message),
89 fp, protocol)
79 fp, protocol)
90
80
91 try:
81 try:
92 # Use an ra of our own so that our parent can consume
82 # Use an ra of our own so that our parent can consume
93 # our results without confusing the server.
83 # our results without confusing the server.
94 t = transport.SvnRaTransport(url=url)
84 t = transport.SvnRaTransport(url=url)
95 svn.ra.get_log(t.ra, paths, start, end, limit,
85 svn.ra.get_log(t.ra, paths, start, end, limit,
96 discover_changed_paths,
86 discover_changed_paths,
97 strict_node_history,
87 strict_node_history,
98 receiver)
88 receiver)
99 except SubversionException, (inst, num):
89 except SubversionException, (inst, num):
100 pickle.dump(num, fp, protocol)
90 pickle.dump(num, fp, protocol)
101 except IOError:
91 except IOError:
102 # Caller may interrupt the iteration
92 # Caller may interrupt the iteration
103 pickle.dump(None, fp, protocol)
93 pickle.dump(None, fp, protocol)
104 else:
94 else:
105 pickle.dump(None, fp, protocol)
95 pickle.dump(None, fp, protocol)
106 fp.close()
96 fp.close()
107 # With large history, cleanup process goes crazy and suddenly
97 # With large history, cleanup process goes crazy and suddenly
108 # consumes *huge* amount of memory. The output file being closed,
98 # consumes *huge* amount of memory. The output file being closed,
109 # there is no need for clean termination.
99 # there is no need for clean termination.
110 os._exit(0)
100 os._exit(0)
111
101
112 def debugsvnlog(ui, **opts):
102 def debugsvnlog(ui, **opts):
113 """Fetch SVN log in a subprocess and channel them back to parent to
103 """Fetch SVN log in a subprocess and channel them back to parent to
114 avoid memory collection issues.
104 avoid memory collection issues.
115 """
105 """
116 util.set_binary(sys.stdin)
106 util.set_binary(sys.stdin)
117 util.set_binary(sys.stdout)
107 util.set_binary(sys.stdout)
118 args = decodeargs(sys.stdin.read())
108 args = decodeargs(sys.stdin.read())
119 get_log_child(sys.stdout, *args)
109 get_log_child(sys.stdout, *args)
120
110
121 class logstream(object):
111 class logstream(object):
122 """Interruptible revision log iterator."""
112 """Interruptible revision log iterator."""
123 def __init__(self, stdout):
113 def __init__(self, stdout):
124 self._stdout = stdout
114 self._stdout = stdout
125
115
126 def __iter__(self):
116 def __iter__(self):
127 while True:
117 while True:
128 entry = pickle.load(self._stdout)
118 entry = pickle.load(self._stdout)
129 try:
119 try:
130 orig_paths, revnum, author, date, message = entry
120 orig_paths, revnum, author, date, message = entry
131 except:
121 except:
132 if entry is None:
122 if entry is None:
133 break
123 break
134 raise SubversionException("child raised exception", entry)
124 raise SubversionException("child raised exception", entry)
135 yield entry
125 yield entry
136
126
137 def close(self):
127 def close(self):
138 if self._stdout:
128 if self._stdout:
139 self._stdout.close()
129 self._stdout.close()
140 self._stdout = None
130 self._stdout = None
141
131
142
132
143 # Check to see if the given path is a local Subversion repo. Verify this by
133 # Check to see if the given path is a local Subversion repo. Verify this by
144 # looking for several svn-specific files and directories in the given
134 # looking for several svn-specific files and directories in the given
145 # directory.
135 # directory.
146 def filecheck(path, proto):
136 def filecheck(path, proto):
147 for x in ('locks', 'hooks', 'format', 'db', ):
137 for x in ('locks', 'hooks', 'format', 'db', ):
148 if not os.path.exists(os.path.join(path, x)):
138 if not os.path.exists(os.path.join(path, x)):
149 return False
139 return False
150 return True
140 return True
151
141
152 # Check to see if a given path is the root of an svn repo over http. We verify
142 # Check to see if a given path is the root of an svn repo over http. We verify
153 # this by requesting a version-controlled URL we know can't exist and looking
143 # this by requesting a version-controlled URL we know can't exist and looking
154 # for the svn-specific "not found" XML.
144 # for the svn-specific "not found" XML.
155 def httpcheck(path, proto):
145 def httpcheck(path, proto):
156 return ('<m:human-readable errcode="160013">' in
146 return ('<m:human-readable errcode="160013">' in
157 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
147 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
158
148
159 protomap = {'http': httpcheck,
149 protomap = {'http': httpcheck,
160 'https': httpcheck,
150 'https': httpcheck,
161 'file': filecheck,
151 'file': filecheck,
162 }
152 }
163 def issvnurl(url):
153 def issvnurl(url):
164 try:
154 try:
165 proto, path = url.split('://', 1)
155 proto, path = url.split('://', 1)
166 path = urllib.url2pathname(path)
156 path = urllib.url2pathname(path)
167 except ValueError:
157 except ValueError:
168 proto = 'file'
158 proto = 'file'
169 path = os.path.abspath(url)
159 path = os.path.abspath(url)
170 path = path.replace(os.sep, '/')
160 path = path.replace(os.sep, '/')
171 check = protomap.get(proto, lambda p, p2: False)
161 check = protomap.get(proto, lambda p, p2: False)
172 while '/' in path:
162 while '/' in path:
173 if check(path, proto):
163 if check(path, proto):
174 return True
164 return True
175 path = path.rsplit('/', 1)[0]
165 path = path.rsplit('/', 1)[0]
176 return False
166 return False
177
167
178 # SVN conversion code stolen from bzr-svn and tailor
168 # SVN conversion code stolen from bzr-svn and tailor
179 #
169 #
180 # Subversion looks like a versioned filesystem, branches structures
170 # Subversion looks like a versioned filesystem, branches structures
181 # are defined by conventions and not enforced by the tool. First,
171 # are defined by conventions and not enforced by the tool. First,
182 # we define the potential branches (modules) as "trunk" and "branches"
172 # we define the potential branches (modules) as "trunk" and "branches"
183 # children directories. Revisions are then identified by their
173 # children directories. Revisions are then identified by their
184 # module and revision number (and a repository identifier).
174 # module and revision number (and a repository identifier).
185 #
175 #
186 # The revision graph is really a tree (or a forest). By default, a
176 # The revision graph is really a tree (or a forest). By default, a
187 # revision parent is the previous revision in the same module. If the
177 # revision parent is the previous revision in the same module. If the
188 # module directory is copied/moved from another module then the
178 # module directory is copied/moved from another module then the
189 # revision is the module root and its parent the source revision in
179 # revision is the module root and its parent the source revision in
190 # the parent module. A revision has at most one parent.
180 # the parent module. A revision has at most one parent.
191 #
181 #
192 class svn_source(converter_source):
182 class svn_source(converter_source):
193 def __init__(self, ui, url, rev=None):
183 def __init__(self, ui, url, rev=None):
194 super(svn_source, self).__init__(ui, url, rev=rev)
184 super(svn_source, self).__init__(ui, url, rev=rev)
195
185
196 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
186 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
197 (os.path.exists(url) and
187 (os.path.exists(url) and
198 os.path.exists(os.path.join(url, '.svn'))) or
188 os.path.exists(os.path.join(url, '.svn'))) or
199 issvnurl(url)):
189 issvnurl(url)):
200 raise NoRepo("%s does not look like a Subversion repo" % url)
190 raise NoRepo("%s does not look like a Subversion repo" % url)
201
191
202 try:
192 try:
203 SubversionException
193 SubversionException
204 except NameError:
194 except NameError:
205 raise MissingTool(_('Subversion python bindings could not be loaded'))
195 raise MissingTool(_('Subversion python bindings could not be loaded'))
206
196
207 try:
197 try:
208 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
198 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
209 if version < (1, 4):
199 if version < (1, 4):
210 raise MissingTool(_('Subversion python bindings %d.%d found, '
200 raise MissingTool(_('Subversion python bindings %d.%d found, '
211 '1.4 or later required') % version)
201 '1.4 or later required') % version)
212 except AttributeError:
202 except AttributeError:
213 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
203 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
214 'or later required'))
204 'or later required'))
215
205
216 self.encoding = locale.getpreferredencoding()
217 self.lastrevs = {}
206 self.lastrevs = {}
218
207
219 latest = None
208 latest = None
220 try:
209 try:
221 # Support file://path@rev syntax. Useful e.g. to convert
210 # Support file://path@rev syntax. Useful e.g. to convert
222 # deleted branches.
211 # deleted branches.
223 at = url.rfind('@')
212 at = url.rfind('@')
224 if at >= 0:
213 if at >= 0:
225 latest = int(url[at+1:])
214 latest = int(url[at+1:])
226 url = url[:at]
215 url = url[:at]
227 except ValueError:
216 except ValueError:
228 pass
217 pass
229 self.url = geturl(url)
218 self.url = geturl(url)
230 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
219 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
231 try:
220 try:
232 self.transport = transport.SvnRaTransport(url=self.url)
221 self.transport = transport.SvnRaTransport(url=self.url)
233 self.ra = self.transport.ra
222 self.ra = self.transport.ra
234 self.ctx = self.transport.client
223 self.ctx = self.transport.client
235 self.baseurl = svn.ra.get_repos_root(self.ra)
224 self.baseurl = svn.ra.get_repos_root(self.ra)
236 # Module is either empty or a repository path starting with
225 # Module is either empty or a repository path starting with
237 # a slash and not ending with a slash.
226 # a slash and not ending with a slash.
238 self.module = urllib.unquote(self.url[len(self.baseurl):])
227 self.module = urllib.unquote(self.url[len(self.baseurl):])
239 self.prevmodule = None
228 self.prevmodule = None
240 self.rootmodule = self.module
229 self.rootmodule = self.module
241 self.commits = {}
230 self.commits = {}
242 self.paths = {}
231 self.paths = {}
243 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
232 self.uuid = svn.ra.get_uuid(self.ra)
244 except SubversionException:
233 except SubversionException:
245 ui.traceback()
234 ui.traceback()
246 raise NoRepo("%s does not look like a Subversion repo" % self.url)
235 raise NoRepo("%s does not look like a Subversion repo" % self.url)
247
236
248 if rev:
237 if rev:
249 try:
238 try:
250 latest = int(rev)
239 latest = int(rev)
251 except ValueError:
240 except ValueError:
252 raise util.Abort(_('svn: revision %s is not an integer') % rev)
241 raise util.Abort(_('svn: revision %s is not an integer') % rev)
253
242
254 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
243 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
255 try:
244 try:
256 self.startrev = int(self.startrev)
245 self.startrev = int(self.startrev)
257 if self.startrev < 0:
246 if self.startrev < 0:
258 self.startrev = 0
247 self.startrev = 0
259 except ValueError:
248 except ValueError:
260 raise util.Abort(_('svn: start revision %s is not an integer')
249 raise util.Abort(_('svn: start revision %s is not an integer')
261 % self.startrev)
250 % self.startrev)
262
251
263 try:
264 self.get_blacklist()
265 except IOError:
266 pass
267
268 self.head = self.latest(self.module, latest)
252 self.head = self.latest(self.module, latest)
269 if not self.head:
253 if not self.head:
270 raise util.Abort(_('no revision found in module %s') %
254 raise util.Abort(_('no revision found in module %s')
271 self.module.encode(self.encoding))
255 % self.module)
272 self.last_changed = self.revnum(self.head)
256 self.last_changed = self.revnum(self.head)
273
257
274 self._changescache = None
258 self._changescache = None
275
259
276 if os.path.exists(os.path.join(url, '.svn/entries')):
260 if os.path.exists(os.path.join(url, '.svn/entries')):
277 self.wc = url
261 self.wc = url
278 else:
262 else:
279 self.wc = None
263 self.wc = None
280 self.convertfp = None
264 self.convertfp = None
281
265
282 def setrevmap(self, revmap):
266 def setrevmap(self, revmap):
283 lastrevs = {}
267 lastrevs = {}
284 for revid in revmap.iterkeys():
268 for revid in revmap.iterkeys():
285 uuid, module, revnum = self.revsplit(revid)
269 uuid, module, revnum = self.revsplit(revid)
286 lastrevnum = lastrevs.setdefault(module, revnum)
270 lastrevnum = lastrevs.setdefault(module, revnum)
287 if revnum > lastrevnum:
271 if revnum > lastrevnum:
288 lastrevs[module] = revnum
272 lastrevs[module] = revnum
289 self.lastrevs = lastrevs
273 self.lastrevs = lastrevs
290
274
291 def exists(self, path, optrev):
275 def exists(self, path, optrev):
292 try:
276 try:
293 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
277 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
294 optrev, False, self.ctx)
278 optrev, False, self.ctx)
295 return True
279 return True
296 except SubversionException:
280 except SubversionException:
297 return False
281 return False
298
282
299 def getheads(self):
283 def getheads(self):
300
284
301 def isdir(path, revnum):
285 def isdir(path, revnum):
302 kind = self._checkpath(path, revnum)
286 kind = self._checkpath(path, revnum)
303 return kind == svn.core.svn_node_dir
287 return kind == svn.core.svn_node_dir
304
288
305 def getcfgpath(name, rev):
289 def getcfgpath(name, rev):
306 cfgpath = self.ui.config('convert', 'svn.' + name)
290 cfgpath = self.ui.config('convert', 'svn.' + name)
307 if cfgpath is not None and cfgpath.strip() == '':
291 if cfgpath is not None and cfgpath.strip() == '':
308 return None
292 return None
309 path = (cfgpath or name).strip('/')
293 path = (cfgpath or name).strip('/')
310 if not self.exists(path, rev):
294 if not self.exists(path, rev):
311 if cfgpath:
295 if cfgpath:
312 raise util.Abort(_('expected %s to be at %r, but not found')
296 raise util.Abort(_('expected %s to be at %r, but not found')
313 % (name, path))
297 % (name, path))
314 return None
298 return None
315 self.ui.note(_('found %s at %r\n') % (name, path))
299 self.ui.note(_('found %s at %r\n') % (name, path))
316 return path
300 return path
317
301
318 rev = optrev(self.last_changed)
302 rev = optrev(self.last_changed)
319 oldmodule = ''
303 oldmodule = ''
320 trunk = getcfgpath('trunk', rev)
304 trunk = getcfgpath('trunk', rev)
321 self.tags = getcfgpath('tags', rev)
305 self.tags = getcfgpath('tags', rev)
322 branches = getcfgpath('branches', rev)
306 branches = getcfgpath('branches', rev)
323
307
324 # If the project has a trunk or branches, we will extract heads
308 # If the project has a trunk or branches, we will extract heads
325 # from them. We keep the project root otherwise.
309 # from them. We keep the project root otherwise.
326 if trunk:
310 if trunk:
327 oldmodule = self.module or ''
311 oldmodule = self.module or ''
328 self.module += '/' + trunk
312 self.module += '/' + trunk
329 self.head = self.latest(self.module, self.last_changed)
313 self.head = self.latest(self.module, self.last_changed)
330 if not self.head:
314 if not self.head:
331 raise util.Abort(_('no revision found in module %s') %
315 raise util.Abort(_('no revision found in module %s')
332 self.module.encode(self.encoding))
316 % self.module)
333
317
334 # First head in the list is the module's head
318 # First head in the list is the module's head
335 self.heads = [self.head]
319 self.heads = [self.head]
336 if self.tags is not None:
320 if self.tags is not None:
337 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
321 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
338
322
339 # Check if branches bring a few more heads to the list
323 # Check if branches bring a few more heads to the list
340 if branches:
324 if branches:
341 rpath = self.url.strip('/')
325 rpath = self.url.strip('/')
342 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
326 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
343 rev, False, self.ctx)
327 rev, False, self.ctx)
344 for branch in branchnames.keys():
328 for branch in branchnames.keys():
345 module = '%s/%s/%s' % (oldmodule, branches, branch)
329 module = '%s/%s/%s' % (oldmodule, branches, branch)
346 if not isdir(module, self.last_changed):
330 if not isdir(module, self.last_changed):
347 continue
331 continue
348 brevid = self.latest(module, self.last_changed)
332 brevid = self.latest(module, self.last_changed)
349 if not brevid:
333 if not brevid:
350 self.ui.note(_('ignoring empty branch %s\n') %
334 self.ui.note(_('ignoring empty branch %s\n') % branch)
351 branch.encode(self.encoding))
352 continue
335 continue
353 self.ui.note(_('found branch %s at %d\n') %
336 self.ui.note(_('found branch %s at %d\n') %
354 (branch, self.revnum(brevid)))
337 (branch, self.revnum(brevid)))
355 self.heads.append(brevid)
338 self.heads.append(brevid)
356
339
357 if self.startrev and self.heads:
340 if self.startrev and self.heads:
358 if len(self.heads) > 1:
341 if len(self.heads) > 1:
359 raise util.Abort(_('svn: start revision is not supported '
342 raise util.Abort(_('svn: start revision is not supported '
360 'with more than one branch'))
343 'with more than one branch'))
361 revnum = self.revnum(self.heads[0])
344 revnum = self.revnum(self.heads[0])
362 if revnum < self.startrev:
345 if revnum < self.startrev:
363 raise util.Abort(_('svn: no revision found after start revision %d')
346 raise util.Abort(_('svn: no revision found after start revision %d')
364 % self.startrev)
347 % self.startrev)
365
348
366 return self.heads
349 return self.heads
367
350
368 def getfile(self, file, rev):
351 def getfile(self, file, rev):
369 data, mode = self._getfile(file, rev)
352 data, mode = self._getfile(file, rev)
370 self.modecache[(file, rev)] = mode
353 self.modecache[(file, rev)] = mode
371 return data
354 return data
372
355
373 def getmode(self, file, rev):
356 def getmode(self, file, rev):
374 return self.modecache[(file, rev)]
357 return self.modecache[(file, rev)]
375
358
376 def getchanges(self, rev):
359 def getchanges(self, rev):
377 if self._changescache and self._changescache[0] == rev:
360 if self._changescache and self._changescache[0] == rev:
378 return self._changescache[1]
361 return self._changescache[1]
379 self._changescache = None
362 self._changescache = None
380 self.modecache = {}
363 self.modecache = {}
381 (paths, parents) = self.paths[rev]
364 (paths, parents) = self.paths[rev]
382 if parents:
365 if parents:
383 files, copies = self.expandpaths(rev, paths, parents)
366 files, copies = self.expandpaths(rev, paths, parents)
384 else:
367 else:
385 # Perform a full checkout on roots
368 # Perform a full checkout on roots
386 uuid, module, revnum = self.revsplit(rev)
369 uuid, module, revnum = self.revsplit(rev)
387 entries = svn.client.ls(self.baseurl + urllib.quote(module),
370 entries = svn.client.ls(self.baseurl + urllib.quote(module),
388 optrev(revnum), True, self.ctx)
371 optrev(revnum), True, self.ctx)
389 files = [n for n,e in entries.iteritems()
372 files = [n for n,e in entries.iteritems()
390 if e.kind == svn.core.svn_node_file]
373 if e.kind == svn.core.svn_node_file]
391 copies = {}
374 copies = {}
392
375
393 files.sort()
376 files.sort()
394 files = zip(files, [rev] * len(files))
377 files = zip(files, [rev] * len(files))
395
378
396 # caller caches the result, so free it here to release memory
379 # caller caches the result, so free it here to release memory
397 del self.paths[rev]
380 del self.paths[rev]
398 return (files, copies)
381 return (files, copies)
399
382
400 def getchangedfiles(self, rev, i):
383 def getchangedfiles(self, rev, i):
401 changes = self.getchanges(rev)
384 changes = self.getchanges(rev)
402 self._changescache = (rev, changes)
385 self._changescache = (rev, changes)
403 return [f[0] for f in changes[0]]
386 return [f[0] for f in changes[0]]
404
387
405 def getcommit(self, rev):
388 def getcommit(self, rev):
406 if rev not in self.commits:
389 if rev not in self.commits:
407 uuid, module, revnum = self.revsplit(rev)
390 uuid, module, revnum = self.revsplit(rev)
408 self.module = module
391 self.module = module
409 self.reparent(module)
392 self.reparent(module)
410 # We assume that:
393 # We assume that:
411 # - requests for revisions after "stop" come from the
394 # - requests for revisions after "stop" come from the
412 # revision graph backward traversal. Cache all of them
395 # revision graph backward traversal. Cache all of them
413 # down to stop, they will be used eventually.
396 # down to stop, they will be used eventually.
414 # - requests for revisions before "stop" come to get
397 # - requests for revisions before "stop" come to get
415 # isolated branches parents. Just fetch what is needed.
398 # isolated branches parents. Just fetch what is needed.
416 stop = self.lastrevs.get(module, 0)
399 stop = self.lastrevs.get(module, 0)
417 if revnum < stop:
400 if revnum < stop:
418 stop = revnum + 1
401 stop = revnum + 1
419 self._fetch_revisions(revnum, stop)
402 self._fetch_revisions(revnum, stop)
420 commit = self.commits[rev]
403 commit = self.commits[rev]
421 # caller caches the result, so free it here to release memory
404 # caller caches the result, so free it here to release memory
422 del self.commits[rev]
405 del self.commits[rev]
423 return commit
406 return commit
424
407
425 def gettags(self):
408 def gettags(self):
426 tags = {}
409 tags = {}
427 if self.tags is None:
410 if self.tags is None:
428 return tags
411 return tags
429
412
430 # svn tags are just a convention, project branches left in a
413 # svn tags are just a convention, project branches left in a
431 # 'tags' directory. There is no other relationship than
414 # 'tags' directory. There is no other relationship than
432 # ancestry, which is expensive to discover and makes them hard
415 # ancestry, which is expensive to discover and makes them hard
433 # to update incrementally. Worse, past revisions may be
416 # to update incrementally. Worse, past revisions may be
434 # referenced by tags far away in the future, requiring a deep
417 # referenced by tags far away in the future, requiring a deep
435 # history traversal on every calculation. Current code
418 # history traversal on every calculation. Current code
436 # performs a single backward traversal, tracking moves within
419 # performs a single backward traversal, tracking moves within
437 # the tags directory (tag renaming) and recording a new tag
420 # the tags directory (tag renaming) and recording a new tag
438 # everytime a project is copied from outside the tags
421 # everytime a project is copied from outside the tags
439 # directory. It also lists deleted tags, this behaviour may
422 # directory. It also lists deleted tags, this behaviour may
440 # change in the future.
423 # change in the future.
441 pendings = []
424 pendings = []
442 tagspath = self.tags
425 tagspath = self.tags
443 start = svn.ra.get_latest_revnum(self.ra)
426 start = svn.ra.get_latest_revnum(self.ra)
444 try:
427 try:
445 for entry in self._getlog([self.tags], start, self.startrev):
428 for entry in self._getlog([self.tags], start, self.startrev):
446 origpaths, revnum, author, date, message = entry
429 origpaths, revnum, author, date, message = entry
447 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
430 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
448 in origpaths.iteritems() if e.copyfrom_path]
431 in origpaths.iteritems() if e.copyfrom_path]
449 # Apply moves/copies from more specific to general
432 # Apply moves/copies from more specific to general
450 copies.sort(reverse=True)
433 copies.sort(reverse=True)
451
434
452 srctagspath = tagspath
435 srctagspath = tagspath
453 if copies and copies[-1][2] == tagspath:
436 if copies and copies[-1][2] == tagspath:
454 # Track tags directory moves
437 # Track tags directory moves
455 srctagspath = copies.pop()[0]
438 srctagspath = copies.pop()[0]
456
439
457 for source, sourcerev, dest in copies:
440 for source, sourcerev, dest in copies:
458 if not dest.startswith(tagspath + '/'):
441 if not dest.startswith(tagspath + '/'):
459 continue
442 continue
460 for tag in pendings:
443 for tag in pendings:
461 if tag[0].startswith(dest):
444 if tag[0].startswith(dest):
462 tagpath = source + tag[0][len(dest):]
445 tagpath = source + tag[0][len(dest):]
463 tag[:2] = [tagpath, sourcerev]
446 tag[:2] = [tagpath, sourcerev]
464 break
447 break
465 else:
448 else:
466 pendings.append([source, sourcerev, dest])
449 pendings.append([source, sourcerev, dest])
467
450
468 # Filter out tags with children coming from different
451 # Filter out tags with children coming from different
469 # parts of the repository like:
452 # parts of the repository like:
470 # /tags/tag.1 (from /trunk:10)
453 # /tags/tag.1 (from /trunk:10)
471 # /tags/tag.1/foo (from /branches/foo:12)
454 # /tags/tag.1/foo (from /branches/foo:12)
472 # Here/tags/tag.1 discarded as well as its children.
455 # Here/tags/tag.1 discarded as well as its children.
473 # It happens with tools like cvs2svn. Such tags cannot
456 # It happens with tools like cvs2svn. Such tags cannot
474 # be represented in mercurial.
457 # be represented in mercurial.
475 addeds = dict((p, e.copyfrom_path) for p, e
458 addeds = dict((p, e.copyfrom_path) for p, e
476 in origpaths.iteritems() if e.action == 'A')
459 in origpaths.iteritems() if e.action == 'A')
477 badroots = set()
460 badroots = set()
478 for destroot in addeds:
461 for destroot in addeds:
479 for source, sourcerev, dest in pendings:
462 for source, sourcerev, dest in pendings:
480 if (not dest.startswith(destroot + '/')
463 if (not dest.startswith(destroot + '/')
481 or source.startswith(addeds[destroot] + '/')):
464 or source.startswith(addeds[destroot] + '/')):
482 continue
465 continue
483 badroots.add(destroot)
466 badroots.add(destroot)
484 break
467 break
485
468
486 for badroot in badroots:
469 for badroot in badroots:
487 pendings = [p for p in pendings if p[2] != badroot
470 pendings = [p for p in pendings if p[2] != badroot
488 and not p[2].startswith(badroot + '/')]
471 and not p[2].startswith(badroot + '/')]
489
472
490 # Tell tag renamings from tag creations
473 # Tell tag renamings from tag creations
491 remainings = []
474 remainings = []
492 for source, sourcerev, dest in pendings:
475 for source, sourcerev, dest in pendings:
493 tagname = dest.split('/')[-1]
476 tagname = dest.split('/')[-1]
494 if source.startswith(srctagspath):
477 if source.startswith(srctagspath):
495 remainings.append([source, sourcerev, tagname])
478 remainings.append([source, sourcerev, tagname])
496 continue
479 continue
497 if tagname in tags:
480 if tagname in tags:
498 # Keep the latest tag value
481 # Keep the latest tag value
499 continue
482 continue
500 # From revision may be fake, get one with changes
483 # From revision may be fake, get one with changes
501 try:
484 try:
502 tagid = self.latest(source, sourcerev)
485 tagid = self.latest(source, sourcerev)
503 if tagid and tagname not in tags:
486 if tagid and tagname not in tags:
504 tags[tagname] = tagid
487 tags[tagname] = tagid
505 except SvnPathNotFound:
488 except SvnPathNotFound:
506 # It happens when we are following directories
489 # It happens when we are following directories
507 # we assumed were copied with their parents
490 # we assumed were copied with their parents
508 # but were really created in the tag
491 # but were really created in the tag
509 # directory.
492 # directory.
510 pass
493 pass
511 pendings = remainings
494 pendings = remainings
512 tagspath = srctagspath
495 tagspath = srctagspath
513
496
514 except SubversionException:
497 except SubversionException:
515 self.ui.note(_('no tags found at revision %d\n') % start)
498 self.ui.note(_('no tags found at revision %d\n') % start)
516 return tags
499 return tags
517
500
518 def converted(self, rev, destrev):
501 def converted(self, rev, destrev):
519 if not self.wc:
502 if not self.wc:
520 return
503 return
521 if self.convertfp is None:
504 if self.convertfp is None:
522 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
505 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
523 'a')
506 'a')
524 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
507 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
525 self.convertfp.flush()
508 self.convertfp.flush()
526
509
527 # -- helper functions --
528
529 def revid(self, revnum, module=None):
510 def revid(self, revnum, module=None):
530 if not module:
511 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
531 module = self.module
532 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
533 revnum)
534
512
535 def revnum(self, rev):
513 def revnum(self, rev):
536 return int(rev.split('@')[-1])
514 return int(rev.split('@')[-1])
537
515
538 def revsplit(self, rev):
516 def revsplit(self, rev):
539 url, revnum = rev.encode(self.encoding).rsplit('@', 1)
517 url, revnum = rev.rsplit('@', 1)
540 revnum = int(revnum)
518 revnum = int(revnum)
541 parts = url.split('/', 1)
519 parts = url.split('/', 1)
542 uuid = parts.pop(0)[4:]
520 uuid = parts.pop(0)[4:]
543 mod = ''
521 mod = ''
544 if parts:
522 if parts:
545 mod = '/' + parts[0]
523 mod = '/' + parts[0]
546 return uuid, mod, revnum
524 return uuid, mod, revnum
547
525
548 def latest(self, path, stop=0):
526 def latest(self, path, stop=0):
549 """Find the latest revid affecting path, up to stop. It may return
527 """Find the latest revid affecting path, up to stop. It may return
550 a revision in a different module, since a branch may be moved without
528 a revision in a different module, since a branch may be moved without
551 a change being reported. Return None if computed module does not
529 a change being reported. Return None if computed module does not
552 belong to rootmodule subtree.
530 belong to rootmodule subtree.
553 """
531 """
554 if not path.startswith(self.rootmodule):
532 if not path.startswith(self.rootmodule):
555 # Requests on foreign branches may be forbidden at server level
533 # Requests on foreign branches may be forbidden at server level
556 self.ui.debug(_('ignoring foreign branch %r\n') % path)
534 self.ui.debug(_('ignoring foreign branch %r\n') % path)
557 return None
535 return None
558
536
559 if not stop:
537 if not stop:
560 stop = svn.ra.get_latest_revnum(self.ra)
538 stop = svn.ra.get_latest_revnum(self.ra)
561 try:
539 try:
562 prevmodule = self.reparent('')
540 prevmodule = self.reparent('')
563 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
541 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
564 self.reparent(prevmodule)
542 self.reparent(prevmodule)
565 except SubversionException:
543 except SubversionException:
566 dirent = None
544 dirent = None
567 if not dirent:
545 if not dirent:
568 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
546 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
569
547
570 # stat() gives us the previous revision on this line of
548 # stat() gives us the previous revision on this line of
571 # development, but it might be in *another module*. Fetch the
549 # development, but it might be in *another module*. Fetch the
572 # log and detect renames down to the latest revision.
550 # log and detect renames down to the latest revision.
573 stream = self._getlog([path], stop, dirent.created_rev)
551 stream = self._getlog([path], stop, dirent.created_rev)
574 try:
552 try:
575 for entry in stream:
553 for entry in stream:
576 paths, revnum, author, date, message = entry
554 paths, revnum, author, date, message = entry
577 if revnum <= dirent.created_rev:
555 if revnum <= dirent.created_rev:
578 break
556 break
579
557
580 for p in paths:
558 for p in paths:
581 if not path.startswith(p) or not paths[p].copyfrom_path:
559 if not path.startswith(p) or not paths[p].copyfrom_path:
582 continue
560 continue
583 newpath = paths[p].copyfrom_path + path[len(p):]
561 newpath = paths[p].copyfrom_path + path[len(p):]
584 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
562 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
585 (path, newpath, revnum))
563 (path, newpath, revnum))
586 path = newpath
564 path = newpath
587 break
565 break
588 finally:
566 finally:
589 stream.close()
567 stream.close()
590
568
591 if not path.startswith(self.rootmodule):
569 if not path.startswith(self.rootmodule):
592 self.ui.debug(_('ignoring foreign branch %r\n') % path)
570 self.ui.debug(_('ignoring foreign branch %r\n') % path)
593 return None
571 return None
594 return self.revid(dirent.created_rev, path)
572 return self.revid(dirent.created_rev, path)
595
573
596 def get_blacklist(self):
597 """Avoid certain revision numbers.
598 It is not uncommon for two nearby revisions to cancel each other
599 out, e.g. 'I copied trunk into a subdirectory of itself instead
600 of making a branch'. The converted repository is significantly
601 smaller if we ignore such revisions."""
602 self.blacklist = set()
603 blacklist = self.blacklist
604 for line in file("blacklist.txt", "r"):
605 if not line.startswith("#"):
606 try:
607 svn_rev = int(line.strip())
608 blacklist.add(svn_rev)
609 except ValueError:
610 pass # not an integer or a comment
611
612 def is_blacklisted(self, svn_rev):
613 return svn_rev in self.blacklist
614
615 def reparent(self, module):
574 def reparent(self, module):
616 """Reparent the svn transport and return the previous parent."""
575 """Reparent the svn transport and return the previous parent."""
617 if self.prevmodule == module:
576 if self.prevmodule == module:
618 return module
577 return module
619 svnurl = self.baseurl + urllib.quote(module)
578 svnurl = self.baseurl + urllib.quote(module)
620 prevmodule = self.prevmodule
579 prevmodule = self.prevmodule
621 if prevmodule is None:
580 if prevmodule is None:
622 prevmodule = ''
581 prevmodule = ''
623 self.ui.debug(_("reparent to %s\n") % svnurl)
582 self.ui.debug(_("reparent to %s\n") % svnurl)
624 svn.ra.reparent(self.ra, svnurl)
583 svn.ra.reparent(self.ra, svnurl)
625 self.prevmodule = module
584 self.prevmodule = module
626 return prevmodule
585 return prevmodule
627
586
628 def expandpaths(self, rev, paths, parents):
587 def expandpaths(self, rev, paths, parents):
629 entries = []
588 entries = []
630 # Map of entrypath, revision for finding source of deleted
589 # Map of entrypath, revision for finding source of deleted
631 # revisions.
590 # revisions.
632 copyfrom = {}
591 copyfrom = {}
633 copies = {}
592 copies = {}
634
593
635 new_module, revnum = self.revsplit(rev)[1:]
594 new_module, revnum = self.revsplit(rev)[1:]
636 if new_module != self.module:
595 if new_module != self.module:
637 self.module = new_module
596 self.module = new_module
638 self.reparent(self.module)
597 self.reparent(self.module)
639
598
640 for path, ent in paths:
599 for path, ent in paths:
641 entrypath = self.getrelpath(path)
600 entrypath = self.getrelpath(path)
642 entry = entrypath.decode(self.encoding)
643
601
644 kind = self._checkpath(entrypath, revnum)
602 kind = self._checkpath(entrypath, revnum)
645 if kind == svn.core.svn_node_file:
603 if kind == svn.core.svn_node_file:
646 entries.append(self.recode(entry))
604 entries.append(self.recode(entrypath))
647 if not ent.copyfrom_path or not parents:
605 if not ent.copyfrom_path or not parents:
648 continue
606 continue
649 # Copy sources not in parent revisions cannot be
607 # Copy sources not in parent revisions cannot be
650 # represented, ignore their origin for now
608 # represented, ignore their origin for now
651 pmodule, prevnum = self.revsplit(parents[0])[1:]
609 pmodule, prevnum = self.revsplit(parents[0])[1:]
652 if ent.copyfrom_rev < prevnum:
610 if ent.copyfrom_rev < prevnum:
653 continue
611 continue
654 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
612 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
655 if not copyfrom_path:
613 if not copyfrom_path:
656 continue
614 continue
657 self.ui.debug(_("copied to %s from %s@%s\n") %
615 self.ui.debug(_("copied to %s from %s@%s\n") %
658 (entrypath, copyfrom_path, ent.copyfrom_rev))
616 (entrypath, copyfrom_path, ent.copyfrom_rev))
659 copies[self.recode(entry)] = self.recode(copyfrom_path)
617 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
660 elif kind == 0: # gone, but had better be a deleted *file*
618 elif kind == 0: # gone, but had better be a deleted *file*
661 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
619 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
662
620 pmodule, prevnum = self.revsplit(parents[0])[1:]
663 # if a branch is created but entries are removed in
621 parentpath = pmodule + "/" + entrypath
664 # the same changeset, get the right fromrev
622 self.ui.debug(_("entry %s\n") % parentpath)
665 # parents cannot be empty here, you cannot remove
666 # things from a root revision.
667 uuid, old_module, fromrev = self.revsplit(parents[0])
668
669 basepath = old_module + "/" + self.getrelpath(path)
670 entrypath = basepath
671
672 def lookup_parts(p):
673 rc = None
674 parts = p.split("/")
675 for i in range(len(parts)):
676 part = "/".join(parts[:i])
677 info = part, copyfrom.get(part, None)
678 if info[1] is not None:
679 self.ui.debug(_("found parent directory %s\n") % info[1])
680 rc = info
681 return rc
682
683 self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
684
685 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
686
687 # need to remove fragment from lookup_parts and
688 # replace with copyfrom_path
689 if frompath is not None:
690 self.ui.debug(_("munge-o-matic\n"))
691 self.ui.debug(entrypath + '\n')
692 self.ui.debug(entrypath[len(frompath):] + '\n')
693 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
694 fromrev = froment.copyfrom_rev
695 self.ui.debug(_("info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
696
623
697 # We can avoid the reparent calls if the module has
624 # We can avoid the reparent calls if the module has
698 # not changed but it probably does not worth the pain.
625 # not changed but it probably does not worth the pain.
699 prevmodule = self.reparent('')
626 prevmodule = self.reparent('')
700 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
627 fromkind = svn.ra.check_path(self.ra, parentpath.strip('/'), prevnum)
701 self.reparent(prevmodule)
628 self.reparent(prevmodule)
702
629
703 if fromkind == svn.core.svn_node_file: # a deleted file
630 if fromkind == svn.core.svn_node_file:
704 entries.append(self.recode(entry))
631 entries.append(self.recode(entrypath))
705 elif fromkind == svn.core.svn_node_dir:
632 elif fromkind == svn.core.svn_node_dir:
706 # print "Deleted/moved non-file:", revnum, path, ent
707 # children = self._find_children(path, revnum - 1)
708 # print ("find children %s@%d from %d action %s" %
709 # (path, revnum, ent.copyfrom_rev, ent.action))
710 # Sometimes this is tricky. For example: in
711 # The Subversion Repository revision 6940 a dir
712 # was copied and one of its files was deleted
713 # from the new location in the same commit. This
714 # code can't deal with that yet.
715 if ent.action == 'C':
633 if ent.action == 'C':
716 children = self._find_children(path, fromrev)
634 children = self._find_children(path, prevnum)
717 else:
635 else:
718 oroot = entrypath.strip('/')
636 oroot = parentpath.strip('/')
719 nroot = path.strip('/')
637 nroot = path.strip('/')
720 children = self._find_children(oroot, fromrev)
638 children = self._find_children(oroot, prevnum)
721 children = [s.replace(oroot,nroot) for s in children]
639 children = [s.replace(oroot,nroot) for s in children]
722 # Mark all [files, not directories] as deleted.
640
723 for child in children:
641 for child in children:
724 # Can we move a child directory and its
642 childpath = self.getrelpath("/" + child, pmodule)
725 # parent in the same commit? (probably can). Could
643 if not childpath:
726 # cause problems if instead of revnum -1,
644 continue
727 # we have to look in (copyfrom_path, revnum - 1)
645 if childpath in copies:
728 entrypath = self.getrelpath("/" + child, module=old_module)
646 del copies[childpath]
729 if entrypath:
647 entries.append(childpath)
730 entry = self.recode(entrypath.decode(self.encoding))
731 if entry in copies:
732 # deleted file within a copy
733 del copies[entry]
734 else:
735 entries.append(entry)
736 else:
648 else:
737 self.ui.debug(_('unknown path in revision %d: %s\n') % \
649 self.ui.debug(_('unknown path in revision %d: %s\n') % \
738 (revnum, path))
650 (revnum, path))
739 elif kind == svn.core.svn_node_dir:
651 elif kind == svn.core.svn_node_dir:
740 # Should probably synthesize normal file entries
741 # and handle as above to clean up copy/rename handling.
742
743 # If the directory just had a prop change,
652 # If the directory just had a prop change,
744 # then we shouldn't need to look for its children.
653 # then we shouldn't need to look for its children.
745 if ent.action == 'M':
654 if ent.action == 'M':
746 continue
655 continue
747
656
748 # Also this could create duplicate entries. Not sure
749 # whether this will matter. Maybe should make entries a set.
750 # print "Changed directory", revnum, path, ent.action, \
751 # ent.copyfrom_path, ent.copyfrom_rev
752 # This will fail if a directory was copied
753 # from another branch and then some of its files
754 # were deleted in the same transaction.
755 children = sorted(self._find_children(path, revnum))
657 children = sorted(self._find_children(path, revnum))
756 for child in children:
658 for child in children:
757 # Can we move a child directory and its
659 # Can we move a child directory and its
758 # parent in the same commit? (probably can). Could
660 # parent in the same commit? (probably can). Could
759 # cause problems if instead of revnum -1,
661 # cause problems if instead of revnum -1,
760 # we have to look in (copyfrom_path, revnum - 1)
662 # we have to look in (copyfrom_path, revnum - 1)
761 entrypath = self.getrelpath("/" + child)
663 entrypath = self.getrelpath("/" + child)
762 # print child, self.module, entrypath
763 if entrypath:
664 if entrypath:
764 # Need to filter out directories here...
665 # Need to filter out directories here...
765 kind = self._checkpath(entrypath, revnum)
666 kind = self._checkpath(entrypath, revnum)
766 if kind != svn.core.svn_node_dir:
667 if kind != svn.core.svn_node_dir:
767 entries.append(self.recode(entrypath))
668 entries.append(self.recode(entrypath))
768
669
769 # Copies here (must copy all from source) Probably not
670 # Handle directory copies
770 # a real problem for us if source does not exist
771 if not ent.copyfrom_path or not parents:
671 if not ent.copyfrom_path or not parents:
772 continue
672 continue
773 # Copy sources not in parent revisions cannot be
673 # Copy sources not in parent revisions cannot be
774 # represented, ignore their origin for now
674 # represented, ignore their origin for now
775 pmodule, prevnum = self.revsplit(parents[0])[1:]
675 pmodule, prevnum = self.revsplit(parents[0])[1:]
776 if ent.copyfrom_rev < prevnum:
676 if ent.copyfrom_rev < prevnum:
777 continue
677 continue
778 copyfrompath = ent.copyfrom_path.decode(self.encoding)
678 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
779 copyfrompath = self.getrelpath(copyfrompath, pmodule)
780 if not copyfrompath:
679 if not copyfrompath:
781 continue
680 continue
782 copyfrom[path] = ent
681 copyfrom[path] = ent
783 self.ui.debug(_("mark %s came from %s:%d\n")
682 self.ui.debug(_("mark %s came from %s:%d\n")
784 % (path, copyfrompath, ent.copyfrom_rev))
683 % (path, copyfrompath, ent.copyfrom_rev))
785 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
684 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
786 children.sort()
685 children.sort()
787 for child in children:
686 for child in children:
788 entrypath = self.getrelpath("/" + child, pmodule)
687 entrypath = self.getrelpath("/" + child, pmodule)
789 if not entrypath:
688 if not entrypath:
790 continue
689 continue
791 entry = entrypath.decode(self.encoding)
690 copytopath = path + entrypath[len(copyfrompath):]
792 copytopath = path + entry[len(copyfrompath):]
793 copytopath = self.getrelpath(copytopath)
691 copytopath = self.getrelpath(copytopath)
794 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
692 copies[self.recode(copytopath)] = self.recode(entrypath)
795
693
796 return (list(set(entries)), copies)
694 return (list(set(entries)), copies)
797
695
798 def _fetch_revisions(self, from_revnum, to_revnum):
696 def _fetch_revisions(self, from_revnum, to_revnum):
799 if from_revnum < to_revnum:
697 if from_revnum < to_revnum:
800 from_revnum, to_revnum = to_revnum, from_revnum
698 from_revnum, to_revnum = to_revnum, from_revnum
801
699
802 self.child_cset = None
700 self.child_cset = None
803
701
804 def parselogentry(orig_paths, revnum, author, date, message):
702 def parselogentry(orig_paths, revnum, author, date, message):
805 """Return the parsed commit object or None, and True if
703 """Return the parsed commit object or None, and True if
806 the revision is a branch root.
704 the revision is a branch root.
807 """
705 """
808 self.ui.debug(_("parsing revision %d (%d changes)\n") %
706 self.ui.debug(_("parsing revision %d (%d changes)\n") %
809 (revnum, len(orig_paths)))
707 (revnum, len(orig_paths)))
810
708
811 branched = False
709 branched = False
812 rev = self.revid(revnum)
710 rev = self.revid(revnum)
813 # branch log might return entries for a parent we already have
711 # branch log might return entries for a parent we already have
814
712
815 if rev in self.commits or revnum < to_revnum:
713 if rev in self.commits or revnum < to_revnum:
816 return None, branched
714 return None, branched
817
715
818 parents = []
716 parents = []
819 # check whether this revision is the start of a branch or part
717 # check whether this revision is the start of a branch or part
820 # of a branch renaming
718 # of a branch renaming
821 orig_paths = sorted(orig_paths.iteritems())
719 orig_paths = sorted(orig_paths.iteritems())
822 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
720 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
823 if root_paths:
721 if root_paths:
824 path, ent = root_paths[-1]
722 path, ent = root_paths[-1]
825 if ent.copyfrom_path:
723 if ent.copyfrom_path:
826 branched = True
724 branched = True
827 newpath = ent.copyfrom_path + self.module[len(path):]
725 newpath = ent.copyfrom_path + self.module[len(path):]
828 # ent.copyfrom_rev may not be the actual last revision
726 # ent.copyfrom_rev may not be the actual last revision
829 previd = self.latest(newpath, ent.copyfrom_rev)
727 previd = self.latest(newpath, ent.copyfrom_rev)
830 if previd is not None:
728 if previd is not None:
831 prevmodule, prevnum = self.revsplit(previd)[1:]
729 prevmodule, prevnum = self.revsplit(previd)[1:]
832 if prevnum >= self.startrev:
730 if prevnum >= self.startrev:
833 parents = [previd]
731 parents = [previd]
834 self.ui.note(_('found parent of branch %s at %d: %s\n') %
732 self.ui.note(_('found parent of branch %s at %d: %s\n') %
835 (self.module, prevnum, prevmodule))
733 (self.module, prevnum, prevmodule))
836 else:
734 else:
837 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
735 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
838
736
839 paths = []
737 paths = []
840 # filter out unrelated paths
738 # filter out unrelated paths
841 for path, ent in orig_paths:
739 for path, ent in orig_paths:
842 if self.getrelpath(path) is None:
740 if self.getrelpath(path) is None:
843 continue
741 continue
844 paths.append((path, ent))
742 paths.append((path, ent))
845
743
846 # Example SVN datetime. Includes microseconds.
744 # Example SVN datetime. Includes microseconds.
847 # ISO-8601 conformant
745 # ISO-8601 conformant
848 # '2007-01-04T17:35:00.902377Z'
746 # '2007-01-04T17:35:00.902377Z'
849 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
747 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
850
748
851 log = message and self.recode(message) or ''
749 log = message and self.recode(message) or ''
852 author = author and self.recode(author) or ''
750 author = author and self.recode(author) or ''
853 try:
751 try:
854 branch = self.module.split("/")[-1]
752 branch = self.module.split("/")[-1]
855 if branch == 'trunk':
753 if branch == 'trunk':
856 branch = ''
754 branch = ''
857 except IndexError:
755 except IndexError:
858 branch = None
756 branch = None
859
757
860 cset = commit(author=author,
758 cset = commit(author=author,
861 date=util.datestr(date),
759 date=util.datestr(date),
862 desc=log,
760 desc=log,
863 parents=parents,
761 parents=parents,
864 branch=branch,
762 branch=branch,
865 rev=rev.encode('utf-8'))
763 rev=rev)
866
764
867 self.commits[rev] = cset
765 self.commits[rev] = cset
868 # The parents list is *shared* among self.paths and the
766 # The parents list is *shared* among self.paths and the
869 # commit object. Both will be updated below.
767 # commit object. Both will be updated below.
870 self.paths[rev] = (paths, cset.parents)
768 self.paths[rev] = (paths, cset.parents)
871 if self.child_cset and not self.child_cset.parents:
769 if self.child_cset and not self.child_cset.parents:
872 self.child_cset.parents[:] = [rev]
770 self.child_cset.parents[:] = [rev]
873 self.child_cset = cset
771 self.child_cset = cset
874 return cset, branched
772 return cset, branched
875
773
876 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
774 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
877 (self.module, from_revnum, to_revnum))
775 (self.module, from_revnum, to_revnum))
878
776
879 try:
777 try:
880 firstcset = None
778 firstcset = None
881 lastonbranch = False
779 lastonbranch = False
882 stream = self._getlog([self.module], from_revnum, to_revnum)
780 stream = self._getlog([self.module], from_revnum, to_revnum)
883 try:
781 try:
884 for entry in stream:
782 for entry in stream:
885 paths, revnum, author, date, message = entry
783 paths, revnum, author, date, message = entry
886 if revnum < self.startrev:
784 if revnum < self.startrev:
887 lastonbranch = True
785 lastonbranch = True
888 break
786 break
889 if self.is_blacklisted(revnum):
890 self.ui.note(_('skipping blacklisted revision %d\n')
891 % revnum)
892 continue
893 if not paths:
787 if not paths:
894 self.ui.debug(_('revision %d has no entries\n') % revnum)
788 self.ui.debug(_('revision %d has no entries\n') % revnum)
895 continue
789 continue
896 cset, lastonbranch = parselogentry(paths, revnum, author,
790 cset, lastonbranch = parselogentry(paths, revnum, author,
897 date, message)
791 date, message)
898 if cset:
792 if cset:
899 firstcset = cset
793 firstcset = cset
900 if lastonbranch:
794 if lastonbranch:
901 break
795 break
902 finally:
796 finally:
903 stream.close()
797 stream.close()
904
798
905 if not lastonbranch and firstcset and not firstcset.parents:
799 if not lastonbranch and firstcset and not firstcset.parents:
906 # The first revision of the sequence (the last fetched one)
800 # The first revision of the sequence (the last fetched one)
907 # has invalid parents if not a branch root. Find the parent
801 # has invalid parents if not a branch root. Find the parent
908 # revision now, if any.
802 # revision now, if any.
909 try:
803 try:
910 firstrevnum = self.revnum(firstcset.rev)
804 firstrevnum = self.revnum(firstcset.rev)
911 if firstrevnum > 1:
805 if firstrevnum > 1:
912 latest = self.latest(self.module, firstrevnum - 1)
806 latest = self.latest(self.module, firstrevnum - 1)
913 if latest:
807 if latest:
914 firstcset.parents.append(latest)
808 firstcset.parents.append(latest)
915 except SvnPathNotFound:
809 except SvnPathNotFound:
916 pass
810 pass
917 except SubversionException, (inst, num):
811 except SubversionException, (inst, num):
918 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
812 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
919 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
813 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
920 raise
814 raise
921
815
922 def _getfile(self, file, rev):
816 def _getfile(self, file, rev):
923 # TODO: ra.get_file transmits the whole file instead of diffs.
817 # TODO: ra.get_file transmits the whole file instead of diffs.
924 mode = ''
818 mode = ''
925 try:
819 try:
926 new_module, revnum = self.revsplit(rev)[1:]
820 new_module, revnum = self.revsplit(rev)[1:]
927 if self.module != new_module:
821 if self.module != new_module:
928 self.module = new_module
822 self.module = new_module
929 self.reparent(self.module)
823 self.reparent(self.module)
930 io = StringIO()
824 io = StringIO()
931 info = svn.ra.get_file(self.ra, file, revnum, io)
825 info = svn.ra.get_file(self.ra, file, revnum, io)
932 data = io.getvalue()
826 data = io.getvalue()
933 # ra.get_files() seems to keep a reference on the input buffer
827 # ra.get_files() seems to keep a reference on the input buffer
934 # preventing collection. Release it explicitely.
828 # preventing collection. Release it explicitely.
935 io.close()
829 io.close()
936 if isinstance(info, list):
830 if isinstance(info, list):
937 info = info[-1]
831 info = info[-1]
938 mode = ("svn:executable" in info) and 'x' or ''
832 mode = ("svn:executable" in info) and 'x' or ''
939 mode = ("svn:special" in info) and 'l' or mode
833 mode = ("svn:special" in info) and 'l' or mode
940 except SubversionException, e:
834 except SubversionException, e:
941 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
835 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
942 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
836 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
943 if e.apr_err in notfound: # File not found
837 if e.apr_err in notfound: # File not found
944 raise IOError()
838 raise IOError()
945 raise
839 raise
946 if mode == 'l':
840 if mode == 'l':
947 link_prefix = "link "
841 link_prefix = "link "
948 if data.startswith(link_prefix):
842 if data.startswith(link_prefix):
949 data = data[len(link_prefix):]
843 data = data[len(link_prefix):]
950 return data, mode
844 return data, mode
951
845
952 def _find_children(self, path, revnum):
846 def _find_children(self, path, revnum):
953 path = path.strip('/')
847 path = path.strip('/')
954 pool = Pool()
848 pool = Pool()
955 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
849 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
956 return ['%s/%s' % (path, x) for x in
850 return ['%s/%s' % (path, x) for x in
957 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
851 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
958
852
959 def getrelpath(self, path, module=None):
853 def getrelpath(self, path, module=None):
960 if module is None:
854 if module is None:
961 module = self.module
855 module = self.module
962 # Given the repository url of this wc, say
856 # Given the repository url of this wc, say
963 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
857 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
964 # extract the "entry" portion (a relative path) from what
858 # extract the "entry" portion (a relative path) from what
965 # svn log --xml says, ie
859 # svn log --xml says, ie
966 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
860 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
967 # that is to say "tests/PloneTestCase.py"
861 # that is to say "tests/PloneTestCase.py"
968 if path.startswith(module):
862 if path.startswith(module):
969 relative = path.rstrip('/')[len(module):]
863 relative = path.rstrip('/')[len(module):]
970 if relative.startswith('/'):
864 if relative.startswith('/'):
971 return relative[1:]
865 return relative[1:]
972 elif relative == '':
866 elif relative == '':
973 return relative
867 return relative
974
868
975 # The path is outside our tracked tree...
869 # The path is outside our tracked tree...
976 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
870 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
977 return None
871 return None
978
872
979 def _checkpath(self, path, revnum):
873 def _checkpath(self, path, revnum):
980 # ra.check_path does not like leading slashes very much, it leads
874 # ra.check_path does not like leading slashes very much, it leads
981 # to PROPFIND subversion errors
875 # to PROPFIND subversion errors
982 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
876 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
983
877
984 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
878 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
985 strict_node_history=False):
879 strict_node_history=False):
986 # Normalize path names, svn >= 1.5 only wants paths relative to
880 # Normalize path names, svn >= 1.5 only wants paths relative to
987 # supplied URL
881 # supplied URL
988 relpaths = []
882 relpaths = []
989 for p in paths:
883 for p in paths:
990 if not p.startswith('/'):
884 if not p.startswith('/'):
991 p = self.module + '/' + p
885 p = self.module + '/' + p
992 relpaths.append(p.strip('/'))
886 relpaths.append(p.strip('/'))
993 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
887 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
994 strict_node_history]
888 strict_node_history]
995 arg = encodeargs(args)
889 arg = encodeargs(args)
996 hgexe = util.hgexecutable()
890 hgexe = util.hgexecutable()
997 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
891 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
998 stdin, stdout = util.popen2(cmd)
892 stdin, stdout = util.popen2(cmd)
999 stdin.write(arg)
893 stdin.write(arg)
1000 stdin.close()
894 stdin.close()
1001 return logstream(stdout)
895 return logstream(stdout)
1002
896
1003 pre_revprop_change = '''#!/bin/sh
897 pre_revprop_change = '''#!/bin/sh
1004
898
1005 REPOS="$1"
899 REPOS="$1"
1006 REV="$2"
900 REV="$2"
1007 USER="$3"
901 USER="$3"
1008 PROPNAME="$4"
902 PROPNAME="$4"
1009 ACTION="$5"
903 ACTION="$5"
1010
904
1011 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
905 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1012 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
906 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1013 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
907 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1014
908
1015 echo "Changing prohibited revision property" >&2
909 echo "Changing prohibited revision property" >&2
1016 exit 1
910 exit 1
1017 '''
911 '''
1018
912
1019 class svn_sink(converter_sink, commandline):
913 class svn_sink(converter_sink, commandline):
1020 commit_re = re.compile(r'Committed revision (\d+).', re.M)
914 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1021
915
1022 def prerun(self):
916 def prerun(self):
1023 if self.wc:
917 if self.wc:
1024 os.chdir(self.wc)
918 os.chdir(self.wc)
1025
919
1026 def postrun(self):
920 def postrun(self):
1027 if self.wc:
921 if self.wc:
1028 os.chdir(self.cwd)
922 os.chdir(self.cwd)
1029
923
1030 def join(self, name):
924 def join(self, name):
1031 return os.path.join(self.wc, '.svn', name)
925 return os.path.join(self.wc, '.svn', name)
1032
926
1033 def revmapfile(self):
927 def revmapfile(self):
1034 return self.join('hg-shamap')
928 return self.join('hg-shamap')
1035
929
1036 def authorfile(self):
930 def authorfile(self):
1037 return self.join('hg-authormap')
931 return self.join('hg-authormap')
1038
932
1039 def __init__(self, ui, path):
933 def __init__(self, ui, path):
1040 converter_sink.__init__(self, ui, path)
934 converter_sink.__init__(self, ui, path)
1041 commandline.__init__(self, ui, 'svn')
935 commandline.__init__(self, ui, 'svn')
1042 self.delete = []
936 self.delete = []
1043 self.setexec = []
937 self.setexec = []
1044 self.delexec = []
938 self.delexec = []
1045 self.copies = []
939 self.copies = []
1046 self.wc = None
940 self.wc = None
1047 self.cwd = os.getcwd()
941 self.cwd = os.getcwd()
1048
942
1049 path = os.path.realpath(path)
943 path = os.path.realpath(path)
1050
944
1051 created = False
945 created = False
1052 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
946 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1053 self.wc = path
947 self.wc = path
1054 self.run0('update')
948 self.run0('update')
1055 else:
949 else:
1056 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
950 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1057
951
1058 if os.path.isdir(os.path.dirname(path)):
952 if os.path.isdir(os.path.dirname(path)):
1059 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
953 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1060 ui.status(_('initializing svn repo %r\n') %
954 ui.status(_('initializing svn repo %r\n') %
1061 os.path.basename(path))
955 os.path.basename(path))
1062 commandline(ui, 'svnadmin').run0('create', path)
956 commandline(ui, 'svnadmin').run0('create', path)
1063 created = path
957 created = path
1064 path = util.normpath(path)
958 path = util.normpath(path)
1065 if not path.startswith('/'):
959 if not path.startswith('/'):
1066 path = '/' + path
960 path = '/' + path
1067 path = 'file://' + path
961 path = 'file://' + path
1068
962
1069 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
963 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
1070 self.run0('checkout', path, wcpath)
964 self.run0('checkout', path, wcpath)
1071
965
1072 self.wc = wcpath
966 self.wc = wcpath
1073 self.opener = util.opener(self.wc)
967 self.opener = util.opener(self.wc)
1074 self.wopener = util.opener(self.wc)
968 self.wopener = util.opener(self.wc)
1075 self.childmap = mapfile(ui, self.join('hg-childmap'))
969 self.childmap = mapfile(ui, self.join('hg-childmap'))
1076 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
970 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1077
971
1078 if created:
972 if created:
1079 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
973 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1080 fp = open(hook, 'w')
974 fp = open(hook, 'w')
1081 fp.write(pre_revprop_change)
975 fp.write(pre_revprop_change)
1082 fp.close()
976 fp.close()
1083 util.set_flags(hook, False, True)
977 util.set_flags(hook, False, True)
1084
978
1085 xport = transport.SvnRaTransport(url=geturl(path))
979 xport = transport.SvnRaTransport(url=geturl(path))
1086 self.uuid = svn.ra.get_uuid(xport.ra)
980 self.uuid = svn.ra.get_uuid(xport.ra)
1087
981
1088 def wjoin(self, *names):
982 def wjoin(self, *names):
1089 return os.path.join(self.wc, *names)
983 return os.path.join(self.wc, *names)
1090
984
1091 def putfile(self, filename, flags, data):
985 def putfile(self, filename, flags, data):
1092 if 'l' in flags:
986 if 'l' in flags:
1093 self.wopener.symlink(data, filename)
987 self.wopener.symlink(data, filename)
1094 else:
988 else:
1095 try:
989 try:
1096 if os.path.islink(self.wjoin(filename)):
990 if os.path.islink(self.wjoin(filename)):
1097 os.unlink(filename)
991 os.unlink(filename)
1098 except OSError:
992 except OSError:
1099 pass
993 pass
1100 self.wopener(filename, 'w').write(data)
994 self.wopener(filename, 'w').write(data)
1101
995
1102 if self.is_exec:
996 if self.is_exec:
1103 was_exec = self.is_exec(self.wjoin(filename))
997 was_exec = self.is_exec(self.wjoin(filename))
1104 else:
998 else:
1105 # On filesystems not supporting execute-bit, there is no way
999 # On filesystems not supporting execute-bit, there is no way
1106 # to know if it is set but asking subversion. Setting it
1000 # to know if it is set but asking subversion. Setting it
1107 # systematically is just as expensive and much simpler.
1001 # systematically is just as expensive and much simpler.
1108 was_exec = 'x' not in flags
1002 was_exec = 'x' not in flags
1109
1003
1110 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1004 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1111 if was_exec:
1005 if was_exec:
1112 if 'x' not in flags:
1006 if 'x' not in flags:
1113 self.delexec.append(filename)
1007 self.delexec.append(filename)
1114 else:
1008 else:
1115 if 'x' in flags:
1009 if 'x' in flags:
1116 self.setexec.append(filename)
1010 self.setexec.append(filename)
1117
1011
1118 def _copyfile(self, source, dest):
1012 def _copyfile(self, source, dest):
1119 # SVN's copy command pukes if the destination file exists, but
1013 # SVN's copy command pukes if the destination file exists, but
1120 # our copyfile method expects to record a copy that has
1014 # our copyfile method expects to record a copy that has
1121 # already occurred. Cross the semantic gap.
1015 # already occurred. Cross the semantic gap.
1122 wdest = self.wjoin(dest)
1016 wdest = self.wjoin(dest)
1123 exists = os.path.exists(wdest)
1017 exists = os.path.exists(wdest)
1124 if exists:
1018 if exists:
1125 fd, tempname = tempfile.mkstemp(
1019 fd, tempname = tempfile.mkstemp(
1126 prefix='hg-copy-', dir=os.path.dirname(wdest))
1020 prefix='hg-copy-', dir=os.path.dirname(wdest))
1127 os.close(fd)
1021 os.close(fd)
1128 os.unlink(tempname)
1022 os.unlink(tempname)
1129 os.rename(wdest, tempname)
1023 os.rename(wdest, tempname)
1130 try:
1024 try:
1131 self.run0('copy', source, dest)
1025 self.run0('copy', source, dest)
1132 finally:
1026 finally:
1133 if exists:
1027 if exists:
1134 try:
1028 try:
1135 os.unlink(wdest)
1029 os.unlink(wdest)
1136 except OSError:
1030 except OSError:
1137 pass
1031 pass
1138 os.rename(tempname, wdest)
1032 os.rename(tempname, wdest)
1139
1033
1140 def dirs_of(self, files):
1034 def dirs_of(self, files):
1141 dirs = set()
1035 dirs = set()
1142 for f in files:
1036 for f in files:
1143 if os.path.isdir(self.wjoin(f)):
1037 if os.path.isdir(self.wjoin(f)):
1144 dirs.add(f)
1038 dirs.add(f)
1145 for i in strutil.rfindall(f, '/'):
1039 for i in strutil.rfindall(f, '/'):
1146 dirs.add(f[:i])
1040 dirs.add(f[:i])
1147 return dirs
1041 return dirs
1148
1042
1149 def add_dirs(self, files):
1043 def add_dirs(self, files):
1150 add_dirs = [d for d in sorted(self.dirs_of(files))
1044 add_dirs = [d for d in sorted(self.dirs_of(files))
1151 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1045 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1152 if add_dirs:
1046 if add_dirs:
1153 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1047 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1154 return add_dirs
1048 return add_dirs
1155
1049
1156 def add_files(self, files):
1050 def add_files(self, files):
1157 if files:
1051 if files:
1158 self.xargs(files, 'add', quiet=True)
1052 self.xargs(files, 'add', quiet=True)
1159 return files
1053 return files
1160
1054
1161 def tidy_dirs(self, names):
1055 def tidy_dirs(self, names):
1162 deleted = []
1056 deleted = []
1163 for d in sorted(self.dirs_of(names), reverse=True):
1057 for d in sorted(self.dirs_of(names), reverse=True):
1164 wd = self.wjoin(d)
1058 wd = self.wjoin(d)
1165 if os.listdir(wd) == '.svn':
1059 if os.listdir(wd) == '.svn':
1166 self.run0('delete', d)
1060 self.run0('delete', d)
1167 deleted.append(d)
1061 deleted.append(d)
1168 return deleted
1062 return deleted
1169
1063
1170 def addchild(self, parent, child):
1064 def addchild(self, parent, child):
1171 self.childmap[parent] = child
1065 self.childmap[parent] = child
1172
1066
1173 def revid(self, rev):
1067 def revid(self, rev):
1174 return u"svn:%s@%s" % (self.uuid, rev)
1068 return u"svn:%s@%s" % (self.uuid, rev)
1175
1069
1176 def putcommit(self, files, copies, parents, commit, source, revmap):
1070 def putcommit(self, files, copies, parents, commit, source, revmap):
1177 # Apply changes to working copy
1071 # Apply changes to working copy
1178 for f, v in files:
1072 for f, v in files:
1179 try:
1073 try:
1180 data = source.getfile(f, v)
1074 data = source.getfile(f, v)
1181 except IOError:
1075 except IOError:
1182 self.delete.append(f)
1076 self.delete.append(f)
1183 else:
1077 else:
1184 e = source.getmode(f, v)
1078 e = source.getmode(f, v)
1185 self.putfile(f, e, data)
1079 self.putfile(f, e, data)
1186 if f in copies:
1080 if f in copies:
1187 self.copies.append([copies[f], f])
1081 self.copies.append([copies[f], f])
1188 files = [f[0] for f in files]
1082 files = [f[0] for f in files]
1189
1083
1190 for parent in parents:
1084 for parent in parents:
1191 try:
1085 try:
1192 return self.revid(self.childmap[parent])
1086 return self.revid(self.childmap[parent])
1193 except KeyError:
1087 except KeyError:
1194 pass
1088 pass
1195 entries = set(self.delete)
1089 entries = set(self.delete)
1196 files = frozenset(files)
1090 files = frozenset(files)
1197 entries.update(self.add_dirs(files.difference(entries)))
1091 entries.update(self.add_dirs(files.difference(entries)))
1198 if self.copies:
1092 if self.copies:
1199 for s, d in self.copies:
1093 for s, d in self.copies:
1200 self._copyfile(s, d)
1094 self._copyfile(s, d)
1201 self.copies = []
1095 self.copies = []
1202 if self.delete:
1096 if self.delete:
1203 self.xargs(self.delete, 'delete')
1097 self.xargs(self.delete, 'delete')
1204 self.delete = []
1098 self.delete = []
1205 entries.update(self.add_files(files.difference(entries)))
1099 entries.update(self.add_files(files.difference(entries)))
1206 entries.update(self.tidy_dirs(entries))
1100 entries.update(self.tidy_dirs(entries))
1207 if self.delexec:
1101 if self.delexec:
1208 self.xargs(self.delexec, 'propdel', 'svn:executable')
1102 self.xargs(self.delexec, 'propdel', 'svn:executable')
1209 self.delexec = []
1103 self.delexec = []
1210 if self.setexec:
1104 if self.setexec:
1211 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1105 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1212 self.setexec = []
1106 self.setexec = []
1213
1107
1214 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1108 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1215 fp = os.fdopen(fd, 'w')
1109 fp = os.fdopen(fd, 'w')
1216 fp.write(commit.desc)
1110 fp.write(commit.desc)
1217 fp.close()
1111 fp.close()
1218 try:
1112 try:
1219 output = self.run0('commit',
1113 output = self.run0('commit',
1220 username=util.shortuser(commit.author),
1114 username=util.shortuser(commit.author),
1221 file=messagefile,
1115 file=messagefile,
1222 encoding='utf-8')
1116 encoding='utf-8')
1223 try:
1117 try:
1224 rev = self.commit_re.search(output).group(1)
1118 rev = self.commit_re.search(output).group(1)
1225 except AttributeError:
1119 except AttributeError:
1226 self.ui.warn(_('unexpected svn output:\n'))
1120 self.ui.warn(_('unexpected svn output:\n'))
1227 self.ui.warn(output)
1121 self.ui.warn(output)
1228 raise util.Abort(_('unable to cope with svn output'))
1122 raise util.Abort(_('unable to cope with svn output'))
1229 if commit.rev:
1123 if commit.rev:
1230 self.run('propset', 'hg:convert-rev', commit.rev,
1124 self.run('propset', 'hg:convert-rev', commit.rev,
1231 revprop=True, revision=rev)
1125 revprop=True, revision=rev)
1232 if commit.branch and commit.branch != 'default':
1126 if commit.branch and commit.branch != 'default':
1233 self.run('propset', 'hg:convert-branch', commit.branch,
1127 self.run('propset', 'hg:convert-branch', commit.branch,
1234 revprop=True, revision=rev)
1128 revprop=True, revision=rev)
1235 for parent in parents:
1129 for parent in parents:
1236 self.addchild(parent, rev)
1130 self.addchild(parent, rev)
1237 return self.revid(rev)
1131 return self.revid(rev)
1238 finally:
1132 finally:
1239 os.unlink(messagefile)
1133 os.unlink(messagefile)
1240
1134
1241 def puttags(self, tags):
1135 def puttags(self, tags):
1242 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
1136 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,148 +1,148 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''pulling, updating and merging in one command'''
8 '''pull, update and merge in one command'''
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial.node import nullid, short
11 from mercurial.node import nullid, short
12 from mercurial import commands, cmdutil, hg, util, url, error
12 from mercurial import commands, cmdutil, hg, util, url, error
13 from mercurial.lock import release
13 from mercurial.lock import release
14
14
15 def fetch(ui, repo, source='default', **opts):
15 def fetch(ui, repo, source='default', **opts):
16 '''pull changes from a remote repository, merge new changes if needed.
16 '''pull changes from a remote repository, merge new changes if needed.
17
17
18 This finds all changes from the repository at the specified path
18 This finds all changes from the repository at the specified path
19 or URL and adds them to the local repository.
19 or URL and adds them to the local repository.
20
20
21 If the pulled changes add a new branch head, the head is
21 If the pulled changes add a new branch head, the head is
22 automatically merged, and the result of the merge is committed.
22 automatically merged, and the result of the merge is committed.
23 Otherwise, the working directory is updated to include the new
23 Otherwise, the working directory is updated to include the new
24 changes.
24 changes.
25
25
26 When a merge occurs, the newly pulled changes are assumed to be
26 When a merge occurs, the newly pulled changes are assumed to be
27 "authoritative". The head of the new changes is used as the first
27 "authoritative". The head of the new changes is used as the first
28 parent, with local changes as the second. To switch the merge
28 parent, with local changes as the second. To switch the merge
29 order, use --switch-parent.
29 order, use --switch-parent.
30
30
31 See 'hg help dates' for a list of formats valid for -d/--date.
31 See 'hg help dates' for a list of formats valid for -d/--date.
32 '''
32 '''
33
33
34 date = opts.get('date')
34 date = opts.get('date')
35 if date:
35 if date:
36 opts['date'] = util.parsedate(date)
36 opts['date'] = util.parsedate(date)
37
37
38 parent, p2 = repo.dirstate.parents()
38 parent, p2 = repo.dirstate.parents()
39 branch = repo.dirstate.branch()
39 branch = repo.dirstate.branch()
40 branchnode = repo.branchtags().get(branch)
40 branchnode = repo.branchtags().get(branch)
41 if parent != branchnode:
41 if parent != branchnode:
42 raise util.Abort(_('working dir not at branch tip '
42 raise util.Abort(_('working dir not at branch tip '
43 '(use "hg update" to check out branch tip)'))
43 '(use "hg update" to check out branch tip)'))
44
44
45 if p2 != nullid:
45 if p2 != nullid:
46 raise util.Abort(_('outstanding uncommitted merge'))
46 raise util.Abort(_('outstanding uncommitted merge'))
47
47
48 wlock = lock = None
48 wlock = lock = None
49 try:
49 try:
50 wlock = repo.wlock()
50 wlock = repo.wlock()
51 lock = repo.lock()
51 lock = repo.lock()
52 mod, add, rem, del_ = repo.status()[:4]
52 mod, add, rem, del_ = repo.status()[:4]
53
53
54 if mod or add or rem:
54 if mod or add or rem:
55 raise util.Abort(_('outstanding uncommitted changes'))
55 raise util.Abort(_('outstanding uncommitted changes'))
56 if del_:
56 if del_:
57 raise util.Abort(_('working directory is missing some files'))
57 raise util.Abort(_('working directory is missing some files'))
58 bheads = repo.branchheads(branch)
58 bheads = repo.branchheads(branch)
59 bheads = [head for head in bheads if len(repo[head].children()) == 0]
59 bheads = [head for head in bheads if len(repo[head].children()) == 0]
60 if len(bheads) > 1:
60 if len(bheads) > 1:
61 raise util.Abort(_('multiple heads in this branch '
61 raise util.Abort(_('multiple heads in this branch '
62 '(use "hg heads ." and "hg merge" to merge)'))
62 '(use "hg heads ." and "hg merge" to merge)'))
63
63
64 other = hg.repository(cmdutil.remoteui(repo, opts),
64 other = hg.repository(cmdutil.remoteui(repo, opts),
65 ui.expandpath(source))
65 ui.expandpath(source))
66 ui.status(_('pulling from %s\n') %
66 ui.status(_('pulling from %s\n') %
67 url.hidepassword(ui.expandpath(source)))
67 url.hidepassword(ui.expandpath(source)))
68 revs = None
68 revs = None
69 if opts['rev']:
69 if opts['rev']:
70 try:
70 try:
71 revs = [other.lookup(rev) for rev in opts['rev']]
71 revs = [other.lookup(rev) for rev in opts['rev']]
72 except error.CapabilityError:
72 except error.CapabilityError:
73 err = _("Other repository doesn't support revision lookup, "
73 err = _("Other repository doesn't support revision lookup, "
74 "so a rev cannot be specified.")
74 "so a rev cannot be specified.")
75 raise util.Abort(err)
75 raise util.Abort(err)
76
76
77 # Are there any changes at all?
77 # Are there any changes at all?
78 modheads = repo.pull(other, heads=revs)
78 modheads = repo.pull(other, heads=revs)
79 if modheads == 0:
79 if modheads == 0:
80 return 0
80 return 0
81
81
82 # Is this a simple fast-forward along the current branch?
82 # Is this a simple fast-forward along the current branch?
83 newheads = repo.branchheads(branch)
83 newheads = repo.branchheads(branch)
84 newheads = [head for head in newheads if len(repo[head].children()) == 0]
84 newheads = [head for head in newheads if len(repo[head].children()) == 0]
85 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
85 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
86 if len(newheads) == 1:
86 if len(newheads) == 1:
87 if newchildren[0] != parent:
87 if newchildren[0] != parent:
88 return hg.clean(repo, newchildren[0])
88 return hg.clean(repo, newchildren[0])
89 else:
89 else:
90 return
90 return
91
91
92 # Are there more than one additional branch heads?
92 # Are there more than one additional branch heads?
93 newchildren = [n for n in newchildren if n != parent]
93 newchildren = [n for n in newchildren if n != parent]
94 newparent = parent
94 newparent = parent
95 if newchildren:
95 if newchildren:
96 newparent = newchildren[0]
96 newparent = newchildren[0]
97 hg.clean(repo, newparent)
97 hg.clean(repo, newparent)
98 newheads = [n for n in newheads if n != newparent]
98 newheads = [n for n in newheads if n != newparent]
99 if len(newheads) > 1:
99 if len(newheads) > 1:
100 ui.status(_('not merging with %d other new branch heads '
100 ui.status(_('not merging with %d other new branch heads '
101 '(use "hg heads ." and "hg merge" to merge them)\n') %
101 '(use "hg heads ." and "hg merge" to merge them)\n') %
102 (len(newheads) - 1))
102 (len(newheads) - 1))
103 return
103 return
104
104
105 # Otherwise, let's merge.
105 # Otherwise, let's merge.
106 err = False
106 err = False
107 if newheads:
107 if newheads:
108 # By default, we consider the repository we're pulling
108 # By default, we consider the repository we're pulling
109 # *from* as authoritative, so we merge our changes into
109 # *from* as authoritative, so we merge our changes into
110 # theirs.
110 # theirs.
111 if opts['switch_parent']:
111 if opts['switch_parent']:
112 firstparent, secondparent = newparent, newheads[0]
112 firstparent, secondparent = newparent, newheads[0]
113 else:
113 else:
114 firstparent, secondparent = newheads[0], newparent
114 firstparent, secondparent = newheads[0], newparent
115 ui.status(_('updating to %d:%s\n') %
115 ui.status(_('updating to %d:%s\n') %
116 (repo.changelog.rev(firstparent),
116 (repo.changelog.rev(firstparent),
117 short(firstparent)))
117 short(firstparent)))
118 hg.clean(repo, firstparent)
118 hg.clean(repo, firstparent)
119 ui.status(_('merging with %d:%s\n') %
119 ui.status(_('merging with %d:%s\n') %
120 (repo.changelog.rev(secondparent), short(secondparent)))
120 (repo.changelog.rev(secondparent), short(secondparent)))
121 err = hg.merge(repo, secondparent, remind=False)
121 err = hg.merge(repo, secondparent, remind=False)
122
122
123 if not err:
123 if not err:
124 message = (cmdutil.logmessage(opts) or
124 message = (cmdutil.logmessage(opts) or
125 (_('Automated merge with %s') %
125 (_('Automated merge with %s') %
126 url.removeauth(other.url())))
126 url.removeauth(other.url())))
127 editor = cmdutil.commiteditor
127 editor = cmdutil.commiteditor
128 if opts.get('force_editor') or opts.get('edit'):
128 if opts.get('force_editor') or opts.get('edit'):
129 editor = cmdutil.commitforceeditor
129 editor = cmdutil.commitforceeditor
130 n = repo.commit(message, opts['user'], opts['date'],
130 n = repo.commit(message, opts['user'], opts['date'],
131 force=True, editor=editor)
131 force=True, editor=editor)
132 ui.status(_('new changeset %d:%s merges remote changes '
132 ui.status(_('new changeset %d:%s merges remote changes '
133 'with local\n') % (repo.changelog.rev(n),
133 'with local\n') % (repo.changelog.rev(n),
134 short(n)))
134 short(n)))
135
135
136 finally:
136 finally:
137 release(lock, wlock)
137 release(lock, wlock)
138
138
139 cmdtable = {
139 cmdtable = {
140 'fetch':
140 'fetch':
141 (fetch,
141 (fetch,
142 [('r', 'rev', [], _('a specific revision you would like to pull')),
142 [('r', 'rev', [], _('a specific revision you would like to pull')),
143 ('e', 'edit', None, _('edit commit message')),
143 ('e', 'edit', None, _('edit commit message')),
144 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
144 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
145 ('', 'switch-parent', None, _('switch parents when merging')),
145 ('', 'switch-parent', None, _('switch parents when merging')),
146 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
146 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
147 _('hg fetch [SOURCE]')),
147 _('hg fetch [SOURCE]')),
148 }
148 }
@@ -1,283 +1,283 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2, incorporated herein by reference.
4 # GNU General Public License version 2, incorporated herein by reference.
5
5
6 '''GnuPG signing extension for Mercurial'''
6 '''sign and verify changesets'''
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands, match
9 from mercurial import util, commands, match
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 class gpg(object):
13 class gpg(object):
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 sigfile = datafile = None
24 sigfile = datafile = None
25 try:
25 try:
26 # create temporary files
26 # create temporary files
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 fp = os.fdopen(fd, 'wb')
28 fp = os.fdopen(fd, 'wb')
29 fp.write(sig)
29 fp.write(sig)
30 fp.close()
30 fp.close()
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(data)
33 fp.write(data)
34 fp.close()
34 fp.close()
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 ret = util.filter("", gpgcmd)
37 ret = util.filter("", gpgcmd)
38 finally:
38 finally:
39 for f in (sigfile, datafile):
39 for f in (sigfile, datafile):
40 try:
40 try:
41 if f: os.unlink(f)
41 if f: os.unlink(f)
42 except: pass
42 except: pass
43 keys = []
43 keys = []
44 key, fingerprint = None, None
44 key, fingerprint = None, None
45 err = ""
45 err = ""
46 for l in ret.splitlines():
46 for l in ret.splitlines():
47 # see DETAILS in the gnupg documentation
47 # see DETAILS in the gnupg documentation
48 # filter the logger output
48 # filter the logger output
49 if not l.startswith("[GNUPG:]"):
49 if not l.startswith("[GNUPG:]"):
50 continue
50 continue
51 l = l[9:]
51 l = l[9:]
52 if l.startswith("ERRSIG"):
52 if l.startswith("ERRSIG"):
53 err = _("error while verifying signature")
53 err = _("error while verifying signature")
54 break
54 break
55 elif l.startswith("VALIDSIG"):
55 elif l.startswith("VALIDSIG"):
56 # fingerprint of the primary key
56 # fingerprint of the primary key
57 fingerprint = l.split()[10]
57 fingerprint = l.split()[10]
58 elif (l.startswith("GOODSIG") or
58 elif (l.startswith("GOODSIG") or
59 l.startswith("EXPSIG") or
59 l.startswith("EXPSIG") or
60 l.startswith("EXPKEYSIG") or
60 l.startswith("EXPKEYSIG") or
61 l.startswith("BADSIG")):
61 l.startswith("BADSIG")):
62 if key is not None:
62 if key is not None:
63 keys.append(key + [fingerprint])
63 keys.append(key + [fingerprint])
64 key = l.split(" ", 2)
64 key = l.split(" ", 2)
65 fingerprint = None
65 fingerprint = None
66 if err:
66 if err:
67 return err, []
67 return err, []
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 return err, keys
70 return err, keys
71
71
72 def newgpg(ui, **opts):
72 def newgpg(ui, **opts):
73 """create a new gpg instance"""
73 """create a new gpg instance"""
74 gpgpath = ui.config("gpg", "cmd", "gpg")
74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 gpgkey = opts.get('key')
75 gpgkey = opts.get('key')
76 if not gpgkey:
76 if not gpgkey:
77 gpgkey = ui.config("gpg", "key", None)
77 gpgkey = ui.config("gpg", "key", None)
78 return gpg(gpgpath, gpgkey)
78 return gpg(gpgpath, gpgkey)
79
79
80 def sigwalk(repo):
80 def sigwalk(repo):
81 """
81 """
82 walk over every sigs, yields a couple
82 walk over every sigs, yields a couple
83 ((node, version, sig), (filename, linenumber))
83 ((node, version, sig), (filename, linenumber))
84 """
84 """
85 def parsefile(fileiter, context):
85 def parsefile(fileiter, context):
86 ln = 1
86 ln = 1
87 for l in fileiter:
87 for l in fileiter:
88 if not l:
88 if not l:
89 continue
89 continue
90 yield (l.split(" ", 2), (context, ln))
90 yield (l.split(" ", 2), (context, ln))
91 ln +=1
91 ln +=1
92
92
93 # read the heads
93 # read the heads
94 fl = repo.file(".hgsigs")
94 fl = repo.file(".hgsigs")
95 for r in reversed(fl.heads()):
95 for r in reversed(fl.heads()):
96 fn = ".hgsigs|%s" % hgnode.short(r)
96 fn = ".hgsigs|%s" % hgnode.short(r)
97 for item in parsefile(fl.read(r).splitlines(), fn):
97 for item in parsefile(fl.read(r).splitlines(), fn):
98 yield item
98 yield item
99 try:
99 try:
100 # read local signatures
100 # read local signatures
101 fn = "localsigs"
101 fn = "localsigs"
102 for item in parsefile(repo.opener(fn), fn):
102 for item in parsefile(repo.opener(fn), fn):
103 yield item
103 yield item
104 except IOError:
104 except IOError:
105 pass
105 pass
106
106
107 def getkeys(ui, repo, mygpg, sigdata, context):
107 def getkeys(ui, repo, mygpg, sigdata, context):
108 """get the keys who signed a data"""
108 """get the keys who signed a data"""
109 fn, ln = context
109 fn, ln = context
110 node, version, sig = sigdata
110 node, version, sig = sigdata
111 prefix = "%s:%d" % (fn, ln)
111 prefix = "%s:%d" % (fn, ln)
112 node = hgnode.bin(node)
112 node = hgnode.bin(node)
113
113
114 data = node2txt(repo, node, version)
114 data = node2txt(repo, node, version)
115 sig = binascii.a2b_base64(sig)
115 sig = binascii.a2b_base64(sig)
116 err, keys = mygpg.verify(data, sig)
116 err, keys = mygpg.verify(data, sig)
117 if err:
117 if err:
118 ui.warn("%s:%d %s\n" % (fn, ln , err))
118 ui.warn("%s:%d %s\n" % (fn, ln , err))
119 return None
119 return None
120
120
121 validkeys = []
121 validkeys = []
122 # warn for expired key and/or sigs
122 # warn for expired key and/or sigs
123 for key in keys:
123 for key in keys:
124 if key[0] == "BADSIG":
124 if key[0] == "BADSIG":
125 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
125 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
126 continue
126 continue
127 if key[0] == "EXPSIG":
127 if key[0] == "EXPSIG":
128 ui.write(_("%s Note: Signature has expired"
128 ui.write(_("%s Note: Signature has expired"
129 " (signed by: \"%s\")\n") % (prefix, key[2]))
129 " (signed by: \"%s\")\n") % (prefix, key[2]))
130 elif key[0] == "EXPKEYSIG":
130 elif key[0] == "EXPKEYSIG":
131 ui.write(_("%s Note: This key has expired"
131 ui.write(_("%s Note: This key has expired"
132 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 " (signed by: \"%s\")\n") % (prefix, key[2]))
133 validkeys.append((key[1], key[2], key[3]))
133 validkeys.append((key[1], key[2], key[3]))
134 return validkeys
134 return validkeys
135
135
136 def sigs(ui, repo):
136 def sigs(ui, repo):
137 """list signed changesets"""
137 """list signed changesets"""
138 mygpg = newgpg(ui)
138 mygpg = newgpg(ui)
139 revs = {}
139 revs = {}
140
140
141 for data, context in sigwalk(repo):
141 for data, context in sigwalk(repo):
142 node, version, sig = data
142 node, version, sig = data
143 fn, ln = context
143 fn, ln = context
144 try:
144 try:
145 n = repo.lookup(node)
145 n = repo.lookup(node)
146 except KeyError:
146 except KeyError:
147 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
147 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
148 continue
148 continue
149 r = repo.changelog.rev(n)
149 r = repo.changelog.rev(n)
150 keys = getkeys(ui, repo, mygpg, data, context)
150 keys = getkeys(ui, repo, mygpg, data, context)
151 if not keys:
151 if not keys:
152 continue
152 continue
153 revs.setdefault(r, [])
153 revs.setdefault(r, [])
154 revs[r].extend(keys)
154 revs[r].extend(keys)
155 for rev in sorted(revs, reverse=True):
155 for rev in sorted(revs, reverse=True):
156 for k in revs[rev]:
156 for k in revs[rev]:
157 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
157 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
158 ui.write("%-30s %s\n" % (keystr(ui, k), r))
158 ui.write("%-30s %s\n" % (keystr(ui, k), r))
159
159
160 def check(ui, repo, rev):
160 def check(ui, repo, rev):
161 """verify all the signatures there may be for a particular revision"""
161 """verify all the signatures there may be for a particular revision"""
162 mygpg = newgpg(ui)
162 mygpg = newgpg(ui)
163 rev = repo.lookup(rev)
163 rev = repo.lookup(rev)
164 hexrev = hgnode.hex(rev)
164 hexrev = hgnode.hex(rev)
165 keys = []
165 keys = []
166
166
167 for data, context in sigwalk(repo):
167 for data, context in sigwalk(repo):
168 node, version, sig = data
168 node, version, sig = data
169 if node == hexrev:
169 if node == hexrev:
170 k = getkeys(ui, repo, mygpg, data, context)
170 k = getkeys(ui, repo, mygpg, data, context)
171 if k:
171 if k:
172 keys.extend(k)
172 keys.extend(k)
173
173
174 if not keys:
174 if not keys:
175 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
175 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
176 return
176 return
177
177
178 # print summary
178 # print summary
179 ui.write("%s is signed by:\n" % hgnode.short(rev))
179 ui.write("%s is signed by:\n" % hgnode.short(rev))
180 for key in keys:
180 for key in keys:
181 ui.write(" %s\n" % keystr(ui, key))
181 ui.write(" %s\n" % keystr(ui, key))
182
182
183 def keystr(ui, key):
183 def keystr(ui, key):
184 """associate a string to a key (username, comment)"""
184 """associate a string to a key (username, comment)"""
185 keyid, user, fingerprint = key
185 keyid, user, fingerprint = key
186 comment = ui.config("gpg", fingerprint, None)
186 comment = ui.config("gpg", fingerprint, None)
187 if comment:
187 if comment:
188 return "%s (%s)" % (user, comment)
188 return "%s (%s)" % (user, comment)
189 else:
189 else:
190 return user
190 return user
191
191
192 def sign(ui, repo, *revs, **opts):
192 def sign(ui, repo, *revs, **opts):
193 """add a signature for the current or given revision
193 """add a signature for the current or given revision
194
194
195 If no revision is given, the parent of the working directory is used,
195 If no revision is given, the parent of the working directory is used,
196 or tip if no revision is checked out.
196 or tip if no revision is checked out.
197
197
198 See 'hg help dates' for a list of formats valid for -d/--date.
198 See 'hg help dates' for a list of formats valid for -d/--date.
199 """
199 """
200
200
201 mygpg = newgpg(ui, **opts)
201 mygpg = newgpg(ui, **opts)
202 sigver = "0"
202 sigver = "0"
203 sigmessage = ""
203 sigmessage = ""
204
204
205 date = opts.get('date')
205 date = opts.get('date')
206 if date:
206 if date:
207 opts['date'] = util.parsedate(date)
207 opts['date'] = util.parsedate(date)
208
208
209 if revs:
209 if revs:
210 nodes = [repo.lookup(n) for n in revs]
210 nodes = [repo.lookup(n) for n in revs]
211 else:
211 else:
212 nodes = [node for node in repo.dirstate.parents()
212 nodes = [node for node in repo.dirstate.parents()
213 if node != hgnode.nullid]
213 if node != hgnode.nullid]
214 if len(nodes) > 1:
214 if len(nodes) > 1:
215 raise util.Abort(_('uncommitted merge - please provide a '
215 raise util.Abort(_('uncommitted merge - please provide a '
216 'specific revision'))
216 'specific revision'))
217 if not nodes:
217 if not nodes:
218 nodes = [repo.changelog.tip()]
218 nodes = [repo.changelog.tip()]
219
219
220 for n in nodes:
220 for n in nodes:
221 hexnode = hgnode.hex(n)
221 hexnode = hgnode.hex(n)
222 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
222 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
223 hgnode.short(n)))
223 hgnode.short(n)))
224 # build data
224 # build data
225 data = node2txt(repo, n, sigver)
225 data = node2txt(repo, n, sigver)
226 sig = mygpg.sign(data)
226 sig = mygpg.sign(data)
227 if not sig:
227 if not sig:
228 raise util.Abort(_("Error while signing"))
228 raise util.Abort(_("Error while signing"))
229 sig = binascii.b2a_base64(sig)
229 sig = binascii.b2a_base64(sig)
230 sig = sig.replace("\n", "")
230 sig = sig.replace("\n", "")
231 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
231 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
232
232
233 # write it
233 # write it
234 if opts['local']:
234 if opts['local']:
235 repo.opener("localsigs", "ab").write(sigmessage)
235 repo.opener("localsigs", "ab").write(sigmessage)
236 return
236 return
237
237
238 for x in repo.status(unknown=True)[:5]:
238 for x in repo.status(unknown=True)[:5]:
239 if ".hgsigs" in x and not opts["force"]:
239 if ".hgsigs" in x and not opts["force"]:
240 raise util.Abort(_("working copy of .hgsigs is changed "
240 raise util.Abort(_("working copy of .hgsigs is changed "
241 "(please commit .hgsigs manually "
241 "(please commit .hgsigs manually "
242 "or use --force)"))
242 "or use --force)"))
243
243
244 repo.wfile(".hgsigs", "ab").write(sigmessage)
244 repo.wfile(".hgsigs", "ab").write(sigmessage)
245
245
246 if '.hgsigs' not in repo.dirstate:
246 if '.hgsigs' not in repo.dirstate:
247 repo.add([".hgsigs"])
247 repo.add([".hgsigs"])
248
248
249 if opts["no_commit"]:
249 if opts["no_commit"]:
250 return
250 return
251
251
252 message = opts['message']
252 message = opts['message']
253 if not message:
253 if not message:
254 message = "\n".join([_("Added signature for changeset %s")
254 message = "\n".join([_("Added signature for changeset %s")
255 % hgnode.short(n)
255 % hgnode.short(n)
256 for n in nodes])
256 for n in nodes])
257 try:
257 try:
258 m = match.exact(repo.root, '', ['.hgsigs'])
258 m = match.exact(repo.root, '', ['.hgsigs'])
259 repo.commit(message, opts['user'], opts['date'], match=m)
259 repo.commit(message, opts['user'], opts['date'], match=m)
260 except ValueError, inst:
260 except ValueError, inst:
261 raise util.Abort(str(inst))
261 raise util.Abort(str(inst))
262
262
263 def node2txt(repo, node, ver):
263 def node2txt(repo, node, ver):
264 """map a manifest into some text"""
264 """map a manifest into some text"""
265 if ver == "0":
265 if ver == "0":
266 return "%s\n" % hgnode.hex(node)
266 return "%s\n" % hgnode.hex(node)
267 else:
267 else:
268 raise util.Abort(_("unknown signature version"))
268 raise util.Abort(_("unknown signature version"))
269
269
270 cmdtable = {
270 cmdtable = {
271 "sign":
271 "sign":
272 (sign,
272 (sign,
273 [('l', 'local', None, _('make the signature local')),
273 [('l', 'local', None, _('make the signature local')),
274 ('f', 'force', None, _('sign even if the sigfile is modified')),
274 ('f', 'force', None, _('sign even if the sigfile is modified')),
275 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
275 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
276 ('k', 'key', '', _('the key id to sign with')),
276 ('k', 'key', '', _('the key id to sign with')),
277 ('m', 'message', '', _('commit message')),
277 ('m', 'message', '', _('commit message')),
278 ] + commands.commitopts2,
278 ] + commands.commitopts2,
279 _('hg sign [OPTION]... [REVISION]...')),
279 _('hg sign [OPTION]... [REVISION]...')),
280 "sigcheck": (check, [], _('hg sigcheck REVISION')),
280 "sigcheck": (check, [], _('hg sigcheck REVISION')),
281 "sigs": (sigs, [], _('hg sigs')),
281 "sigs": (sigs, [], _('hg sigs')),
282 }
282 }
283
283
@@ -1,376 +1,376 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''show revision graphs in terminal windows
8 '''show revision graphs in terminals
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 import os, sys
15 import os, sys
16 from mercurial.cmdutil import revrange, show_changeset
16 from mercurial.cmdutil import revrange, show_changeset
17 from mercurial.commands import templateopts
17 from mercurial.commands import templateopts
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.node import nullrev
19 from mercurial.node import nullrev
20 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
20 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
21 from mercurial import hg, url, util, graphmod
21 from mercurial import hg, url, util, graphmod
22
22
23 ASCIIDATA = 'ASC'
23 ASCIIDATA = 'ASC'
24
24
25 def asciiformat(ui, repo, revdag, opts):
25 def asciiformat(ui, repo, revdag, opts):
26 """formats a changelog DAG walk for ASCII output"""
26 """formats a changelog DAG walk for ASCII output"""
27 showparents = [ctx.node() for ctx in repo[None].parents()]
27 showparents = [ctx.node() for ctx in repo[None].parents()]
28 displayer = show_changeset(ui, repo, opts, buffered=True)
28 displayer = show_changeset(ui, repo, opts, buffered=True)
29 for (id, type, ctx, parentids) in revdag:
29 for (id, type, ctx, parentids) in revdag:
30 if type != graphmod.CHANGESET:
30 if type != graphmod.CHANGESET:
31 continue
31 continue
32 displayer.show(ctx)
32 displayer.show(ctx)
33 lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1]
33 lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1]
34 char = ctx.node() in showparents and '@' or 'o'
34 char = ctx.node() in showparents and '@' or 'o'
35 yield (id, ASCIIDATA, (char, lines), parentids)
35 yield (id, ASCIIDATA, (char, lines), parentids)
36
36
37 def asciiedges(nodes):
37 def asciiedges(nodes):
38 """adds edge info to changelog DAG walk suitable for ascii()"""
38 """adds edge info to changelog DAG walk suitable for ascii()"""
39 seen = []
39 seen = []
40 for node, type, data, parents in nodes:
40 for node, type, data, parents in nodes:
41 if node not in seen:
41 if node not in seen:
42 seen.append(node)
42 seen.append(node)
43 nodeidx = seen.index(node)
43 nodeidx = seen.index(node)
44
44
45 knownparents = []
45 knownparents = []
46 newparents = []
46 newparents = []
47 for parent in parents:
47 for parent in parents:
48 if parent in seen:
48 if parent in seen:
49 knownparents.append(parent)
49 knownparents.append(parent)
50 else:
50 else:
51 newparents.append(parent)
51 newparents.append(parent)
52
52
53 ncols = len(seen)
53 ncols = len(seen)
54 nextseen = seen[:]
54 nextseen = seen[:]
55 nextseen[nodeidx:nodeidx + 1] = newparents
55 nextseen[nodeidx:nodeidx + 1] = newparents
56 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
56 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
57
57
58 if len(newparents) > 0:
58 if len(newparents) > 0:
59 edges.append((nodeidx, nodeidx))
59 edges.append((nodeidx, nodeidx))
60 if len(newparents) > 1:
60 if len(newparents) > 1:
61 edges.append((nodeidx, nodeidx + 1))
61 edges.append((nodeidx, nodeidx + 1))
62 nmorecols = len(nextseen) - ncols
62 nmorecols = len(nextseen) - ncols
63 seen = nextseen
63 seen = nextseen
64 yield (nodeidx, type, data, edges, ncols, nmorecols)
64 yield (nodeidx, type, data, edges, ncols, nmorecols)
65
65
66 def fix_long_right_edges(edges):
66 def fix_long_right_edges(edges):
67 for (i, (start, end)) in enumerate(edges):
67 for (i, (start, end)) in enumerate(edges):
68 if end > start:
68 if end > start:
69 edges[i] = (start, end + 1)
69 edges[i] = (start, end + 1)
70
70
71 def get_nodeline_edges_tail(
71 def get_nodeline_edges_tail(
72 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
72 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
73 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
73 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
74 # Still going in the same non-vertical direction.
74 # Still going in the same non-vertical direction.
75 if n_columns_diff == -1:
75 if n_columns_diff == -1:
76 start = max(node_index + 1, p_node_index)
76 start = max(node_index + 1, p_node_index)
77 tail = ["|", " "] * (start - node_index - 1)
77 tail = ["|", " "] * (start - node_index - 1)
78 tail.extend(["/", " "] * (n_columns - start))
78 tail.extend(["/", " "] * (n_columns - start))
79 return tail
79 return tail
80 else:
80 else:
81 return ["\\", " "] * (n_columns - node_index - 1)
81 return ["\\", " "] * (n_columns - node_index - 1)
82 else:
82 else:
83 return ["|", " "] * (n_columns - node_index - 1)
83 return ["|", " "] * (n_columns - node_index - 1)
84
84
85 def draw_edges(edges, nodeline, interline):
85 def draw_edges(edges, nodeline, interline):
86 for (start, end) in edges:
86 for (start, end) in edges:
87 if start == end + 1:
87 if start == end + 1:
88 interline[2 * end + 1] = "/"
88 interline[2 * end + 1] = "/"
89 elif start == end - 1:
89 elif start == end - 1:
90 interline[2 * start + 1] = "\\"
90 interline[2 * start + 1] = "\\"
91 elif start == end:
91 elif start == end:
92 interline[2 * start] = "|"
92 interline[2 * start] = "|"
93 else:
93 else:
94 nodeline[2 * end] = "+"
94 nodeline[2 * end] = "+"
95 if start > end:
95 if start > end:
96 (start, end) = (end,start)
96 (start, end) = (end,start)
97 for i in range(2 * start + 1, 2 * end):
97 for i in range(2 * start + 1, 2 * end):
98 if nodeline[i] != "+":
98 if nodeline[i] != "+":
99 nodeline[i] = "-"
99 nodeline[i] = "-"
100
100
101 def get_padding_line(ni, n_columns, edges):
101 def get_padding_line(ni, n_columns, edges):
102 line = []
102 line = []
103 line.extend(["|", " "] * ni)
103 line.extend(["|", " "] * ni)
104 if (ni, ni - 1) in edges or (ni, ni) in edges:
104 if (ni, ni - 1) in edges or (ni, ni) in edges:
105 # (ni, ni - 1) (ni, ni)
105 # (ni, ni - 1) (ni, ni)
106 # | | | | | | | |
106 # | | | | | | | |
107 # +---o | | o---+
107 # +---o | | o---+
108 # | | c | | c | |
108 # | | c | | c | |
109 # | |/ / | |/ /
109 # | |/ / | |/ /
110 # | | | | | |
110 # | | | | | |
111 c = "|"
111 c = "|"
112 else:
112 else:
113 c = " "
113 c = " "
114 line.extend([c, " "])
114 line.extend([c, " "])
115 line.extend(["|", " "] * (n_columns - ni - 1))
115 line.extend(["|", " "] * (n_columns - ni - 1))
116 return line
116 return line
117
117
118 def ascii(ui, dag):
118 def ascii(ui, dag):
119 """prints an ASCII graph of the DAG
119 """prints an ASCII graph of the DAG
120
120
121 dag is a generator that emits tuples with the following elements:
121 dag is a generator that emits tuples with the following elements:
122
122
123 - Column of the current node in the set of ongoing edges.
123 - Column of the current node in the set of ongoing edges.
124 - Type indicator of node data == ASCIIDATA.
124 - Type indicator of node data == ASCIIDATA.
125 - Payload: (char, lines):
125 - Payload: (char, lines):
126 - Character to use as node's symbol.
126 - Character to use as node's symbol.
127 - List of lines to display as the node's text.
127 - List of lines to display as the node's text.
128 - Edges; a list of (col, next_col) indicating the edges between
128 - Edges; a list of (col, next_col) indicating the edges between
129 the current node and its parents.
129 the current node and its parents.
130 - Number of columns (ongoing edges) in the current revision.
130 - Number of columns (ongoing edges) in the current revision.
131 - The difference between the number of columns (ongoing edges)
131 - The difference between the number of columns (ongoing edges)
132 in the next revision and the number of columns (ongoing edges)
132 in the next revision and the number of columns (ongoing edges)
133 in the current revision. That is: -1 means one column removed;
133 in the current revision. That is: -1 means one column removed;
134 0 means no columns added or removed; 1 means one column added.
134 0 means no columns added or removed; 1 means one column added.
135 """
135 """
136 prev_n_columns_diff = 0
136 prev_n_columns_diff = 0
137 prev_node_index = 0
137 prev_node_index = 0
138 for (node_index, type, (node_ch, node_lines), edges, n_columns, n_columns_diff) in dag:
138 for (node_index, type, (node_ch, node_lines), edges, n_columns, n_columns_diff) in dag:
139
139
140 assert -2 < n_columns_diff < 2
140 assert -2 < n_columns_diff < 2
141 if n_columns_diff == -1:
141 if n_columns_diff == -1:
142 # Transform
142 # Transform
143 #
143 #
144 # | | | | | |
144 # | | | | | |
145 # o | | into o---+
145 # o | | into o---+
146 # |X / |/ /
146 # |X / |/ /
147 # | | | |
147 # | | | |
148 fix_long_right_edges(edges)
148 fix_long_right_edges(edges)
149
149
150 # add_padding_line says whether to rewrite
150 # add_padding_line says whether to rewrite
151 #
151 #
152 # | | | | | | | |
152 # | | | | | | | |
153 # | o---+ into | o---+
153 # | o---+ into | o---+
154 # | / / | | | # <--- padding line
154 # | / / | | | # <--- padding line
155 # o | | | / /
155 # o | | | / /
156 # o | |
156 # o | |
157 add_padding_line = (len(node_lines) > 2 and
157 add_padding_line = (len(node_lines) > 2 and
158 n_columns_diff == -1 and
158 n_columns_diff == -1 and
159 [x for (x, y) in edges if x + 1 < y])
159 [x for (x, y) in edges if x + 1 < y])
160
160
161 # fix_nodeline_tail says whether to rewrite
161 # fix_nodeline_tail says whether to rewrite
162 #
162 #
163 # | | o | | | | o | |
163 # | | o | | | | o | |
164 # | | |/ / | | |/ /
164 # | | |/ / | | |/ /
165 # | o | | into | o / / # <--- fixed nodeline tail
165 # | o | | into | o / / # <--- fixed nodeline tail
166 # | |/ / | |/ /
166 # | |/ / | |/ /
167 # o | | o | |
167 # o | | o | |
168 fix_nodeline_tail = len(node_lines) <= 2 and not add_padding_line
168 fix_nodeline_tail = len(node_lines) <= 2 and not add_padding_line
169
169
170 # nodeline is the line containing the node character (typically o)
170 # nodeline is the line containing the node character (typically o)
171 nodeline = ["|", " "] * node_index
171 nodeline = ["|", " "] * node_index
172 nodeline.extend([node_ch, " "])
172 nodeline.extend([node_ch, " "])
173
173
174 nodeline.extend(
174 nodeline.extend(
175 get_nodeline_edges_tail(
175 get_nodeline_edges_tail(
176 node_index, prev_node_index, n_columns, n_columns_diff,
176 node_index, prev_node_index, n_columns, n_columns_diff,
177 prev_n_columns_diff, fix_nodeline_tail))
177 prev_n_columns_diff, fix_nodeline_tail))
178
178
179 # shift_interline is the line containing the non-vertical
179 # shift_interline is the line containing the non-vertical
180 # edges between this entry and the next
180 # edges between this entry and the next
181 shift_interline = ["|", " "] * node_index
181 shift_interline = ["|", " "] * node_index
182 if n_columns_diff == -1:
182 if n_columns_diff == -1:
183 n_spaces = 1
183 n_spaces = 1
184 edge_ch = "/"
184 edge_ch = "/"
185 elif n_columns_diff == 0:
185 elif n_columns_diff == 0:
186 n_spaces = 2
186 n_spaces = 2
187 edge_ch = "|"
187 edge_ch = "|"
188 else:
188 else:
189 n_spaces = 3
189 n_spaces = 3
190 edge_ch = "\\"
190 edge_ch = "\\"
191 shift_interline.extend(n_spaces * [" "])
191 shift_interline.extend(n_spaces * [" "])
192 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
192 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
193
193
194 # draw edges from the current node to its parents
194 # draw edges from the current node to its parents
195 draw_edges(edges, nodeline, shift_interline)
195 draw_edges(edges, nodeline, shift_interline)
196
196
197 # lines is the list of all graph lines to print
197 # lines is the list of all graph lines to print
198 lines = [nodeline]
198 lines = [nodeline]
199 if add_padding_line:
199 if add_padding_line:
200 lines.append(get_padding_line(node_index, n_columns, edges))
200 lines.append(get_padding_line(node_index, n_columns, edges))
201 lines.append(shift_interline)
201 lines.append(shift_interline)
202
202
203 # make sure that there are as many graph lines as there are
203 # make sure that there are as many graph lines as there are
204 # log strings
204 # log strings
205 while len(node_lines) < len(lines):
205 while len(node_lines) < len(lines):
206 node_lines.append("")
206 node_lines.append("")
207 if len(lines) < len(node_lines):
207 if len(lines) < len(node_lines):
208 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
208 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
209 while len(lines) < len(node_lines):
209 while len(lines) < len(node_lines):
210 lines.append(extra_interline)
210 lines.append(extra_interline)
211
211
212 # print lines
212 # print lines
213 indentation_level = max(n_columns, n_columns + n_columns_diff)
213 indentation_level = max(n_columns, n_columns + n_columns_diff)
214 for (line, logstr) in zip(lines, node_lines):
214 for (line, logstr) in zip(lines, node_lines):
215 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
215 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
216 ui.write(ln.rstrip() + '\n')
216 ui.write(ln.rstrip() + '\n')
217
217
218 # ... and start over
218 # ... and start over
219 prev_node_index = node_index
219 prev_node_index = node_index
220 prev_n_columns_diff = n_columns_diff
220 prev_n_columns_diff = n_columns_diff
221
221
222 def get_revs(repo, rev_opt):
222 def get_revs(repo, rev_opt):
223 if rev_opt:
223 if rev_opt:
224 revs = revrange(repo, rev_opt)
224 revs = revrange(repo, rev_opt)
225 return (max(revs), min(revs))
225 return (max(revs), min(revs))
226 else:
226 else:
227 return (len(repo) - 1, 0)
227 return (len(repo) - 1, 0)
228
228
229 def check_unsupported_flags(opts):
229 def check_unsupported_flags(opts):
230 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
230 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
231 "only_merges", "user", "only_branch", "prune", "newest_first",
231 "only_merges", "user", "only_branch", "prune", "newest_first",
232 "no_merges", "include", "exclude"]:
232 "no_merges", "include", "exclude"]:
233 if op in opts and opts[op]:
233 if op in opts and opts[op]:
234 raise util.Abort(_("--graph option is incompatible with --%s") % op)
234 raise util.Abort(_("--graph option is incompatible with --%s") % op)
235
235
236 def graphlog(ui, repo, path=None, **opts):
236 def graphlog(ui, repo, path=None, **opts):
237 """show revision history alongside an ASCII revision graph
237 """show revision history alongside an ASCII revision graph
238
238
239 Print a revision history alongside a revision graph drawn with
239 Print a revision history alongside a revision graph drawn with
240 ASCII characters.
240 ASCII characters.
241
241
242 Nodes printed as an @ character are parents of the working
242 Nodes printed as an @ character are parents of the working
243 directory.
243 directory.
244 """
244 """
245
245
246 check_unsupported_flags(opts)
246 check_unsupported_flags(opts)
247 limit = cmdutil.loglimit(opts)
247 limit = cmdutil.loglimit(opts)
248 start, stop = get_revs(repo, opts["rev"])
248 start, stop = get_revs(repo, opts["rev"])
249 stop = max(stop, start - limit + 1)
249 stop = max(stop, start - limit + 1)
250 if start == nullrev:
250 if start == nullrev:
251 return
251 return
252
252
253 if path:
253 if path:
254 path = util.canonpath(repo.root, os.getcwd(), path)
254 path = util.canonpath(repo.root, os.getcwd(), path)
255 if path: # could be reset in canonpath
255 if path: # could be reset in canonpath
256 revdag = graphmod.filerevs(repo, path, start, stop)
256 revdag = graphmod.filerevs(repo, path, start, stop)
257 else:
257 else:
258 revdag = graphmod.revisions(repo, start, stop)
258 revdag = graphmod.revisions(repo, start, stop)
259
259
260 fmtdag = asciiformat(ui, repo, revdag, opts)
260 fmtdag = asciiformat(ui, repo, revdag, opts)
261 ascii(ui, asciiedges(fmtdag))
261 ascii(ui, asciiedges(fmtdag))
262
262
263 def graphrevs(repo, nodes, opts):
263 def graphrevs(repo, nodes, opts):
264 limit = cmdutil.loglimit(opts)
264 limit = cmdutil.loglimit(opts)
265 nodes.reverse()
265 nodes.reverse()
266 if limit < sys.maxint:
266 if limit < sys.maxint:
267 nodes = nodes[:limit]
267 nodes = nodes[:limit]
268 return graphmod.nodes(repo, nodes)
268 return graphmod.nodes(repo, nodes)
269
269
270 def goutgoing(ui, repo, dest=None, **opts):
270 def goutgoing(ui, repo, dest=None, **opts):
271 """show the outgoing changesets alongside an ASCII revision graph
271 """show the outgoing changesets alongside an ASCII revision graph
272
272
273 Print the outgoing changesets alongside a revision graph drawn with
273 Print the outgoing changesets alongside a revision graph drawn with
274 ASCII characters.
274 ASCII characters.
275
275
276 Nodes printed as an @ character are parents of the working
276 Nodes printed as an @ character are parents of the working
277 directory.
277 directory.
278 """
278 """
279
279
280 check_unsupported_flags(opts)
280 check_unsupported_flags(opts)
281 dest, revs, checkout = hg.parseurl(
281 dest, revs, checkout = hg.parseurl(
282 ui.expandpath(dest or 'default-push', dest or 'default'),
282 ui.expandpath(dest or 'default-push', dest or 'default'),
283 opts.get('rev'))
283 opts.get('rev'))
284 if revs:
284 if revs:
285 revs = [repo.lookup(rev) for rev in revs]
285 revs = [repo.lookup(rev) for rev in revs]
286 other = hg.repository(cmdutil.remoteui(ui, opts), dest)
286 other = hg.repository(cmdutil.remoteui(ui, opts), dest)
287 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
287 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
288 o = repo.findoutgoing(other, force=opts.get('force'))
288 o = repo.findoutgoing(other, force=opts.get('force'))
289 if not o:
289 if not o:
290 ui.status(_("no changes found\n"))
290 ui.status(_("no changes found\n"))
291 return
291 return
292
292
293 o = repo.changelog.nodesbetween(o, revs)[0]
293 o = repo.changelog.nodesbetween(o, revs)[0]
294 revdag = graphrevs(repo, o, opts)
294 revdag = graphrevs(repo, o, opts)
295 fmtdag = asciiformat(ui, repo, revdag, opts)
295 fmtdag = asciiformat(ui, repo, revdag, opts)
296 ascii(ui, asciiedges(fmtdag))
296 ascii(ui, asciiedges(fmtdag))
297
297
298 def gincoming(ui, repo, source="default", **opts):
298 def gincoming(ui, repo, source="default", **opts):
299 """show the incoming changesets alongside an ASCII revision graph
299 """show the incoming changesets alongside an ASCII revision graph
300
300
301 Print the incoming changesets alongside a revision graph drawn with
301 Print the incoming changesets alongside a revision graph drawn with
302 ASCII characters.
302 ASCII characters.
303
303
304 Nodes printed as an @ character are parents of the working
304 Nodes printed as an @ character are parents of the working
305 directory.
305 directory.
306 """
306 """
307
307
308 check_unsupported_flags(opts)
308 check_unsupported_flags(opts)
309 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
309 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
310 other = hg.repository(cmdutil.remoteui(repo, opts), source)
310 other = hg.repository(cmdutil.remoteui(repo, opts), source)
311 ui.status(_('comparing with %s\n') % url.hidepassword(source))
311 ui.status(_('comparing with %s\n') % url.hidepassword(source))
312 if revs:
312 if revs:
313 revs = [other.lookup(rev) for rev in revs]
313 revs = [other.lookup(rev) for rev in revs]
314 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
314 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
315 if not incoming:
315 if not incoming:
316 try:
316 try:
317 os.unlink(opts["bundle"])
317 os.unlink(opts["bundle"])
318 except:
318 except:
319 pass
319 pass
320 ui.status(_("no changes found\n"))
320 ui.status(_("no changes found\n"))
321 return
321 return
322
322
323 cleanup = None
323 cleanup = None
324 try:
324 try:
325
325
326 fname = opts["bundle"]
326 fname = opts["bundle"]
327 if fname or not other.local():
327 if fname or not other.local():
328 # create a bundle (uncompressed if other repo is not local)
328 # create a bundle (uncompressed if other repo is not local)
329 if revs is None:
329 if revs is None:
330 cg = other.changegroup(incoming, "incoming")
330 cg = other.changegroup(incoming, "incoming")
331 else:
331 else:
332 cg = other.changegroupsubset(incoming, revs, 'incoming')
332 cg = other.changegroupsubset(incoming, revs, 'incoming')
333 bundletype = other.local() and "HG10BZ" or "HG10UN"
333 bundletype = other.local() and "HG10BZ" or "HG10UN"
334 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
334 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
335 # keep written bundle?
335 # keep written bundle?
336 if opts["bundle"]:
336 if opts["bundle"]:
337 cleanup = None
337 cleanup = None
338 if not other.local():
338 if not other.local():
339 # use the created uncompressed bundlerepo
339 # use the created uncompressed bundlerepo
340 other = bundlerepo.bundlerepository(ui, repo.root, fname)
340 other = bundlerepo.bundlerepository(ui, repo.root, fname)
341
341
342 chlist = other.changelog.nodesbetween(incoming, revs)[0]
342 chlist = other.changelog.nodesbetween(incoming, revs)[0]
343 revdag = graphrevs(other, chlist, opts)
343 revdag = graphrevs(other, chlist, opts)
344 fmtdag = asciiformat(ui, repo, revdag, opts)
344 fmtdag = asciiformat(ui, repo, revdag, opts)
345 ascii(ui, asciiedges(fmtdag))
345 ascii(ui, asciiedges(fmtdag))
346
346
347 finally:
347 finally:
348 if hasattr(other, 'close'):
348 if hasattr(other, 'close'):
349 other.close()
349 other.close()
350 if cleanup:
350 if cleanup:
351 os.unlink(cleanup)
351 os.unlink(cleanup)
352
352
353 def uisetup(ui):
353 def uisetup(ui):
354 '''Initialize the extension.'''
354 '''Initialize the extension.'''
355 _wrapcmd(ui, 'log', commands.table, graphlog)
355 _wrapcmd(ui, 'log', commands.table, graphlog)
356 _wrapcmd(ui, 'incoming', commands.table, gincoming)
356 _wrapcmd(ui, 'incoming', commands.table, gincoming)
357 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
357 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
358
358
359 def _wrapcmd(ui, cmd, table, wrapfn):
359 def _wrapcmd(ui, cmd, table, wrapfn):
360 '''wrap the command'''
360 '''wrap the command'''
361 def graph(orig, *args, **kwargs):
361 def graph(orig, *args, **kwargs):
362 if kwargs['graph']:
362 if kwargs['graph']:
363 return wrapfn(*args, **kwargs)
363 return wrapfn(*args, **kwargs)
364 return orig(*args, **kwargs)
364 return orig(*args, **kwargs)
365 entry = extensions.wrapcommand(table, cmd, graph)
365 entry = extensions.wrapcommand(table, cmd, graph)
366 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
366 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
367
367
368 cmdtable = {
368 cmdtable = {
369 "glog":
369 "glog":
370 (graphlog,
370 (graphlog,
371 [('l', 'limit', '', _('limit number of changes displayed')),
371 [('l', 'limit', '', _('limit number of changes displayed')),
372 ('p', 'patch', False, _('show patch')),
372 ('p', 'patch', False, _('show patch')),
373 ('r', 'rev', [], _('show the specified revision or range')),
373 ('r', 'rev', [], _('show the specified revision or range')),
374 ] + templateopts,
374 ] + templateopts,
375 _('hg glog [OPTION]... [FILE]')),
375 _('hg glog [OPTION]... [FILE]')),
376 }
376 }
@@ -1,246 +1,246 b''
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
2 # Published under the GNU GPL
2 # Published under the GNU GPL
3
3
4 """CIA notification
4 """integrate Mercurial with a CIA notification service
5
5
6 This is meant to be run as a changegroup or incoming hook.
6 This is meant to be run as a changegroup or incoming hook.
7 To configure it, set the following options in your hgrc:
7 To configure it, set the following options in your hgrc:
8
8
9 [cia]
9 [cia]
10 # your registered CIA user name
10 # your registered CIA user name
11 user = foo
11 user = foo
12 # the name of the project in CIA
12 # the name of the project in CIA
13 project = foo
13 project = foo
14 # the module (subproject) (optional)
14 # the module (subproject) (optional)
15 #module = foo
15 #module = foo
16 # Append a diffstat to the log message (optional)
16 # Append a diffstat to the log message (optional)
17 #diffstat = False
17 #diffstat = False
18 # Template to use for log messages (optional)
18 # Template to use for log messages (optional)
19 #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat}
19 #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat}
20 # Style to use (optional)
20 # Style to use (optional)
21 #style = foo
21 #style = foo
22 # The URL of the CIA notification service (optional)
22 # The URL of the CIA notification service (optional)
23 # You can use mailto: URLs to send by email, eg
23 # You can use mailto: URLs to send by email, eg
24 # mailto:cia@cia.vc
24 # mailto:cia@cia.vc
25 # Make sure to set email.from if you do this.
25 # Make sure to set email.from if you do this.
26 #url = http://cia.vc/
26 #url = http://cia.vc/
27 # print message instead of sending it (optional)
27 # print message instead of sending it (optional)
28 #test = False
28 #test = False
29
29
30 [hooks]
30 [hooks]
31 # one of these:
31 # one of these:
32 changegroup.cia = python:hgcia.hook
32 changegroup.cia = python:hgcia.hook
33 #incoming.cia = python:hgcia.hook
33 #incoming.cia = python:hgcia.hook
34
34
35 [web]
35 [web]
36 # If you want hyperlinks (optional)
36 # If you want hyperlinks (optional)
37 baseurl = http://server/path/to/repo
37 baseurl = http://server/path/to/repo
38 """
38 """
39
39
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41 from mercurial.node import *
41 from mercurial.node import *
42 from mercurial import cmdutil, patch, templater, util, mail
42 from mercurial import cmdutil, patch, templater, util, mail
43 import email.Parser
43 import email.Parser
44
44
45 import xmlrpclib
45 import xmlrpclib
46 from xml.sax import saxutils
46 from xml.sax import saxutils
47
47
48 socket_timeout = 30 # seconds
48 socket_timeout = 30 # seconds
49 try:
49 try:
50 # set a timeout for the socket so you don't have to wait so looooong
50 # set a timeout for the socket so you don't have to wait so looooong
51 # when cia.vc is having problems. requires python >= 2.3:
51 # when cia.vc is having problems. requires python >= 2.3:
52 import socket
52 import socket
53 socket.setdefaulttimeout(socket_timeout)
53 socket.setdefaulttimeout(socket_timeout)
54 except:
54 except:
55 pass
55 pass
56
56
57 HGCIA_VERSION = '0.1'
57 HGCIA_VERSION = '0.1'
58 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
58 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
59
59
60
60
61 class ciamsg(object):
61 class ciamsg(object):
62 """ A CIA message """
62 """ A CIA message """
63 def __init__(self, cia, ctx):
63 def __init__(self, cia, ctx):
64 self.cia = cia
64 self.cia = cia
65 self.ctx = ctx
65 self.ctx = ctx
66 self.url = self.cia.url
66 self.url = self.cia.url
67
67
68 def fileelem(self, path, uri, action):
68 def fileelem(self, path, uri, action):
69 if uri:
69 if uri:
70 uri = ' uri=%s' % saxutils.quoteattr(uri)
70 uri = ' uri=%s' % saxutils.quoteattr(uri)
71 return '<file%s action=%s>%s</file>' % (
71 return '<file%s action=%s>%s</file>' % (
72 uri, saxutils.quoteattr(action), saxutils.escape(path))
72 uri, saxutils.quoteattr(action), saxutils.escape(path))
73
73
74 def fileelems(self):
74 def fileelems(self):
75 n = self.ctx.node()
75 n = self.ctx.node()
76 f = self.cia.repo.status(self.ctx.parents()[0].node(), n)
76 f = self.cia.repo.status(self.ctx.parents()[0].node(), n)
77 url = self.url or ''
77 url = self.url or ''
78 elems = []
78 elems = []
79 for path in f[0]:
79 for path in f[0]:
80 uri = '%s/diff/%s/%s' % (url, short(n), path)
80 uri = '%s/diff/%s/%s' % (url, short(n), path)
81 elems.append(self.fileelem(path, url and uri, 'modify'))
81 elems.append(self.fileelem(path, url and uri, 'modify'))
82 for path in f[1]:
82 for path in f[1]:
83 # TODO: copy/rename ?
83 # TODO: copy/rename ?
84 uri = '%s/file/%s/%s' % (url, short(n), path)
84 uri = '%s/file/%s/%s' % (url, short(n), path)
85 elems.append(self.fileelem(path, url and uri, 'add'))
85 elems.append(self.fileelem(path, url and uri, 'add'))
86 for path in f[2]:
86 for path in f[2]:
87 elems.append(self.fileelem(path, '', 'remove'))
87 elems.append(self.fileelem(path, '', 'remove'))
88
88
89 return '\n'.join(elems)
89 return '\n'.join(elems)
90
90
91 def sourceelem(self, project, module=None, branch=None):
91 def sourceelem(self, project, module=None, branch=None):
92 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
92 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
93 if module:
93 if module:
94 msg.append('<module>%s</module>' % saxutils.escape(module))
94 msg.append('<module>%s</module>' % saxutils.escape(module))
95 if branch:
95 if branch:
96 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
96 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
97 msg.append('</source>')
97 msg.append('</source>')
98
98
99 return '\n'.join(msg)
99 return '\n'.join(msg)
100
100
101 def diffstat(self):
101 def diffstat(self):
102 class patchbuf(object):
102 class patchbuf(object):
103 def __init__(self):
103 def __init__(self):
104 self.lines = []
104 self.lines = []
105 # diffstat is stupid
105 # diffstat is stupid
106 self.name = 'cia'
106 self.name = 'cia'
107 def write(self, data):
107 def write(self, data):
108 self.lines.append(data)
108 self.lines.append(data)
109 def close(self):
109 def close(self):
110 pass
110 pass
111
111
112 n = self.ctx.node()
112 n = self.ctx.node()
113 pbuf = patchbuf()
113 pbuf = patchbuf()
114 patch.export(self.cia.repo, [n], fp=pbuf)
114 patch.export(self.cia.repo, [n], fp=pbuf)
115 return patch.diffstat(pbuf.lines) or ''
115 return patch.diffstat(pbuf.lines) or ''
116
116
117 def logmsg(self):
117 def logmsg(self):
118 diffstat = self.cia.diffstat and self.diffstat() or ''
118 diffstat = self.cia.diffstat and self.diffstat() or ''
119 self.cia.ui.pushbuffer()
119 self.cia.ui.pushbuffer()
120 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
120 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
121 url=self.cia.url, diffstat=diffstat)
121 url=self.cia.url, diffstat=diffstat)
122 return self.cia.ui.popbuffer()
122 return self.cia.ui.popbuffer()
123
123
124 def xml(self):
124 def xml(self):
125 n = short(self.ctx.node())
125 n = short(self.ctx.node())
126 src = self.sourceelem(self.cia.project, module=self.cia.module,
126 src = self.sourceelem(self.cia.project, module=self.cia.module,
127 branch=self.ctx.branch())
127 branch=self.ctx.branch())
128 # unix timestamp
128 # unix timestamp
129 dt = self.ctx.date()
129 dt = self.ctx.date()
130 timestamp = dt[0]
130 timestamp = dt[0]
131
131
132 author = saxutils.escape(self.ctx.user())
132 author = saxutils.escape(self.ctx.user())
133 rev = '%d:%s' % (self.ctx.rev(), n)
133 rev = '%d:%s' % (self.ctx.rev(), n)
134 log = saxutils.escape(self.logmsg())
134 log = saxutils.escape(self.logmsg())
135
135
136 url = self.url and '<url>%s/rev/%s</url>' % (saxutils.escape(self.url),
136 url = self.url and '<url>%s/rev/%s</url>' % (saxutils.escape(self.url),
137 n) or ''
137 n) or ''
138
138
139 msg = """
139 msg = """
140 <message>
140 <message>
141 <generator>
141 <generator>
142 <name>Mercurial (hgcia)</name>
142 <name>Mercurial (hgcia)</name>
143 <version>%s</version>
143 <version>%s</version>
144 <url>%s</url>
144 <url>%s</url>
145 <user>%s</user>
145 <user>%s</user>
146 </generator>
146 </generator>
147 %s
147 %s
148 <body>
148 <body>
149 <commit>
149 <commit>
150 <author>%s</author>
150 <author>%s</author>
151 <version>%s</version>
151 <version>%s</version>
152 <log>%s</log>
152 <log>%s</log>
153 %s
153 %s
154 <files>%s</files>
154 <files>%s</files>
155 </commit>
155 </commit>
156 </body>
156 </body>
157 <timestamp>%d</timestamp>
157 <timestamp>%d</timestamp>
158 </message>
158 </message>
159 """ % \
159 """ % \
160 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
160 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
161 saxutils.escape(self.cia.user), src, author, rev, log, url,
161 saxutils.escape(self.cia.user), src, author, rev, log, url,
162 self.fileelems(), timestamp)
162 self.fileelems(), timestamp)
163
163
164 return msg
164 return msg
165
165
166
166
167 class hgcia(object):
167 class hgcia(object):
168 """ CIA notification class """
168 """ CIA notification class """
169
169
170 deftemplate = '{desc}'
170 deftemplate = '{desc}'
171 dstemplate = '{desc}\n-- \n{diffstat}'
171 dstemplate = '{desc}\n-- \n{diffstat}'
172
172
173 def __init__(self, ui, repo):
173 def __init__(self, ui, repo):
174 self.ui = ui
174 self.ui = ui
175 self.repo = repo
175 self.repo = repo
176
176
177 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
177 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
178 self.user = self.ui.config('cia', 'user')
178 self.user = self.ui.config('cia', 'user')
179 self.project = self.ui.config('cia', 'project')
179 self.project = self.ui.config('cia', 'project')
180 self.module = self.ui.config('cia', 'module')
180 self.module = self.ui.config('cia', 'module')
181 self.diffstat = self.ui.configbool('cia', 'diffstat')
181 self.diffstat = self.ui.configbool('cia', 'diffstat')
182 self.emailfrom = self.ui.config('email', 'from')
182 self.emailfrom = self.ui.config('email', 'from')
183 self.dryrun = self.ui.configbool('cia', 'test')
183 self.dryrun = self.ui.configbool('cia', 'test')
184 self.url = self.ui.config('web', 'baseurl')
184 self.url = self.ui.config('web', 'baseurl')
185
185
186 style = self.ui.config('cia', 'style')
186 style = self.ui.config('cia', 'style')
187 template = self.ui.config('cia', 'template')
187 template = self.ui.config('cia', 'template')
188 if not template:
188 if not template:
189 template = self.diffstat and self.dstemplate or self.deftemplate
189 template = self.diffstat and self.dstemplate or self.deftemplate
190 template = templater.parsestring(template, quoted=False)
190 template = templater.parsestring(template, quoted=False)
191 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
191 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
192 style, False)
192 style, False)
193 t.use_template(template)
193 t.use_template(template)
194 self.templater = t
194 self.templater = t
195
195
196 def sendrpc(self, msg):
196 def sendrpc(self, msg):
197 srv = xmlrpclib.Server(self.ciaurl)
197 srv = xmlrpclib.Server(self.ciaurl)
198 srv.hub.deliver(msg)
198 srv.hub.deliver(msg)
199
199
200 def sendemail(self, address, data):
200 def sendemail(self, address, data):
201 p = email.Parser.Parser()
201 p = email.Parser.Parser()
202 msg = p.parsestr(data)
202 msg = p.parsestr(data)
203 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
203 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
204 msg['To'] = address
204 msg['To'] = address
205 msg['From'] = self.emailfrom
205 msg['From'] = self.emailfrom
206 msg['Subject'] = 'DeliverXML'
206 msg['Subject'] = 'DeliverXML'
207 msg['Content-type'] = 'text/xml'
207 msg['Content-type'] = 'text/xml'
208 msgtext = msg.as_string(0)
208 msgtext = msg.as_string(0)
209
209
210 self.ui.status(_('hgcia: sending update to %s\n') % address)
210 self.ui.status(_('hgcia: sending update to %s\n') % address)
211 mail.sendmail(self.ui, util.email(self.emailfrom),
211 mail.sendmail(self.ui, util.email(self.emailfrom),
212 [address], msgtext)
212 [address], msgtext)
213
213
214
214
215 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
215 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
216 """ send CIA notification """
216 """ send CIA notification """
217 def sendmsg(cia, ctx):
217 def sendmsg(cia, ctx):
218 msg = ciamsg(cia, ctx).xml()
218 msg = ciamsg(cia, ctx).xml()
219 if cia.dryrun:
219 if cia.dryrun:
220 ui.write(msg)
220 ui.write(msg)
221 elif cia.ciaurl.startswith('mailto:'):
221 elif cia.ciaurl.startswith('mailto:'):
222 if not cia.emailfrom:
222 if not cia.emailfrom:
223 raise util.Abort(_('email.from must be defined when '
223 raise util.Abort(_('email.from must be defined when '
224 'sending by email'))
224 'sending by email'))
225 cia.sendemail(cia.ciaurl[7:], msg)
225 cia.sendemail(cia.ciaurl[7:], msg)
226 else:
226 else:
227 cia.sendrpc(msg)
227 cia.sendrpc(msg)
228
228
229 n = bin(node)
229 n = bin(node)
230 cia = hgcia(ui, repo)
230 cia = hgcia(ui, repo)
231 if not cia.user:
231 if not cia.user:
232 ui.debug(_('cia: no user specified'))
232 ui.debug(_('cia: no user specified'))
233 return
233 return
234 if not cia.project:
234 if not cia.project:
235 ui.debug(_('cia: no project specified'))
235 ui.debug(_('cia: no project specified'))
236 return
236 return
237 if hooktype == 'changegroup':
237 if hooktype == 'changegroup':
238 start = repo.changelog.rev(n)
238 start = repo.changelog.rev(n)
239 end = len(repo.changelog)
239 end = len(repo.changelog)
240 for rev in xrange(start, end):
240 for rev in xrange(start, end):
241 n = repo.changelog.node(rev)
241 n = repo.changelog.node(rev)
242 ctx = repo.changectx(n)
242 ctx = repo.changectx(n)
243 sendmsg(cia, ctx)
243 sendmsg(cia, ctx)
244 else:
244 else:
245 ctx = repo.changectx(n)
245 ctx = repo.changectx(n)
246 sendmsg(cia, ctx)
246 sendmsg(cia, ctx)
@@ -1,346 +1,346 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''browsing the repository in a graphical way
8 '''browse the repository in a graphical way
9
9
10 The hgk extension allows browsing the history of a repository in a
10 The hgk extension allows browsing the history of a repository in a
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
12 distributed with Mercurial.)
12 distributed with Mercurial.)
13
13
14 hgk consists of two parts: a Tcl script that does the displaying and
14 hgk consists of two parts: a Tcl script that does the displaying and
15 querying of information, and an extension to Mercurial named hgk.py,
15 querying of information, and an extension to Mercurial named hgk.py,
16 which provides hooks for hgk to get information. hgk can be found in
16 which provides hooks for hgk to get information. hgk can be found in
17 the contrib directory, and the extension is shipped in the hgext
17 the contrib directory, and the extension is shipped in the hgext
18 repository, and needs to be enabled.
18 repository, and needs to be enabled.
19
19
20 The hg view command will launch the hgk Tcl script. For this command
20 The hg view command will launch the hgk Tcl script. For this command
21 to work, hgk must be in your search path. Alternately, you can specify
21 to work, hgk must be in your search path. Alternately, you can specify
22 the path to hgk in your .hgrc file:
22 the path to hgk in your .hgrc file:
23
23
24 [hgk]
24 [hgk]
25 path=/location/of/hgk
25 path=/location/of/hgk
26
26
27 hgk can make use of the extdiff extension to visualize revisions.
27 hgk can make use of the extdiff extension to visualize revisions.
28 Assuming you had already configured extdiff vdiff command, just add:
28 Assuming you had already configured extdiff vdiff command, just add:
29
29
30 [hgk]
30 [hgk]
31 vdiff=vdiff
31 vdiff=vdiff
32
32
33 Revisions context menu will now display additional entries to fire
33 Revisions context menu will now display additional entries to fire
34 vdiff on hovered and selected revisions.'''
34 vdiff on hovered and selected revisions.'''
35
35
36 import os
36 import os
37 from mercurial import commands, util, patch, revlog, cmdutil
37 from mercurial import commands, util, patch, revlog, cmdutil
38 from mercurial.node import nullid, nullrev, short
38 from mercurial.node import nullid, nullrev, short
39 from mercurial.i18n import _
39 from mercurial.i18n import _
40
40
41 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
41 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
42 """diff trees from two commits"""
42 """diff trees from two commits"""
43 def __difftree(repo, node1, node2, files=[]):
43 def __difftree(repo, node1, node2, files=[]):
44 assert node2 is not None
44 assert node2 is not None
45 mmap = repo[node1].manifest()
45 mmap = repo[node1].manifest()
46 mmap2 = repo[node2].manifest()
46 mmap2 = repo[node2].manifest()
47 m = cmdutil.match(repo, files)
47 m = cmdutil.match(repo, files)
48 modified, added, removed = repo.status(node1, node2, m)[:3]
48 modified, added, removed = repo.status(node1, node2, m)[:3]
49 empty = short(nullid)
49 empty = short(nullid)
50
50
51 for f in modified:
51 for f in modified:
52 # TODO get file permissions
52 # TODO get file permissions
53 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
53 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
54 (short(mmap[f]), short(mmap2[f]), f, f))
54 (short(mmap[f]), short(mmap2[f]), f, f))
55 for f in added:
55 for f in added:
56 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
56 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
57 (empty, short(mmap2[f]), f, f))
57 (empty, short(mmap2[f]), f, f))
58 for f in removed:
58 for f in removed:
59 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
59 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
60 (short(mmap[f]), empty, f, f))
60 (short(mmap[f]), empty, f, f))
61 ##
61 ##
62
62
63 while True:
63 while True:
64 if opts['stdin']:
64 if opts['stdin']:
65 try:
65 try:
66 line = raw_input().split(' ')
66 line = raw_input().split(' ')
67 node1 = line[0]
67 node1 = line[0]
68 if len(line) > 1:
68 if len(line) > 1:
69 node2 = line[1]
69 node2 = line[1]
70 else:
70 else:
71 node2 = None
71 node2 = None
72 except EOFError:
72 except EOFError:
73 break
73 break
74 node1 = repo.lookup(node1)
74 node1 = repo.lookup(node1)
75 if node2:
75 if node2:
76 node2 = repo.lookup(node2)
76 node2 = repo.lookup(node2)
77 else:
77 else:
78 node2 = node1
78 node2 = node1
79 node1 = repo.changelog.parents(node1)[0]
79 node1 = repo.changelog.parents(node1)[0]
80 if opts['patch']:
80 if opts['patch']:
81 if opts['pretty']:
81 if opts['pretty']:
82 catcommit(ui, repo, node2, "")
82 catcommit(ui, repo, node2, "")
83 m = cmdutil.match(repo, files)
83 m = cmdutil.match(repo, files)
84 chunks = patch.diff(repo, node1, node2, match=m,
84 chunks = patch.diff(repo, node1, node2, match=m,
85 opts=patch.diffopts(ui, {'git': True}))
85 opts=patch.diffopts(ui, {'git': True}))
86 for chunk in chunks:
86 for chunk in chunks:
87 ui.write(chunk)
87 ui.write(chunk)
88 else:
88 else:
89 __difftree(repo, node1, node2, files=files)
89 __difftree(repo, node1, node2, files=files)
90 if not opts['stdin']:
90 if not opts['stdin']:
91 break
91 break
92
92
93 def catcommit(ui, repo, n, prefix, ctx=None):
93 def catcommit(ui, repo, n, prefix, ctx=None):
94 nlprefix = '\n' + prefix;
94 nlprefix = '\n' + prefix;
95 if ctx is None:
95 if ctx is None:
96 ctx = repo[n]
96 ctx = repo[n]
97 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
97 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
98 for p in ctx.parents():
98 for p in ctx.parents():
99 ui.write("parent %s\n" % p)
99 ui.write("parent %s\n" % p)
100
100
101 date = ctx.date()
101 date = ctx.date()
102 description = ctx.description().replace("\0", "")
102 description = ctx.description().replace("\0", "")
103 lines = description.splitlines()
103 lines = description.splitlines()
104 if lines and lines[-1].startswith('committer:'):
104 if lines and lines[-1].startswith('committer:'):
105 committer = lines[-1].split(': ')[1].rstrip()
105 committer = lines[-1].split(': ')[1].rstrip()
106 else:
106 else:
107 committer = ctx.user()
107 committer = ctx.user()
108
108
109 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
109 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
110 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
110 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
111 ui.write("revision %d\n" % ctx.rev())
111 ui.write("revision %d\n" % ctx.rev())
112 ui.write("branch %s\n\n" % ctx.branch())
112 ui.write("branch %s\n\n" % ctx.branch())
113
113
114 if prefix != "":
114 if prefix != "":
115 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
115 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
116 else:
116 else:
117 ui.write(description + "\n")
117 ui.write(description + "\n")
118 if prefix:
118 if prefix:
119 ui.write('\0')
119 ui.write('\0')
120
120
121 def base(ui, repo, node1, node2):
121 def base(ui, repo, node1, node2):
122 """output common ancestor information"""
122 """output common ancestor information"""
123 node1 = repo.lookup(node1)
123 node1 = repo.lookup(node1)
124 node2 = repo.lookup(node2)
124 node2 = repo.lookup(node2)
125 n = repo.changelog.ancestor(node1, node2)
125 n = repo.changelog.ancestor(node1, node2)
126 ui.write(short(n) + "\n")
126 ui.write(short(n) + "\n")
127
127
128 def catfile(ui, repo, type=None, r=None, **opts):
128 def catfile(ui, repo, type=None, r=None, **opts):
129 """cat a specific revision"""
129 """cat a specific revision"""
130 # in stdin mode, every line except the commit is prefixed with two
130 # in stdin mode, every line except the commit is prefixed with two
131 # spaces. This way the our caller can find the commit without magic
131 # spaces. This way the our caller can find the commit without magic
132 # strings
132 # strings
133 #
133 #
134 prefix = ""
134 prefix = ""
135 if opts['stdin']:
135 if opts['stdin']:
136 try:
136 try:
137 (type, r) = raw_input().split(' ');
137 (type, r) = raw_input().split(' ');
138 prefix = " "
138 prefix = " "
139 except EOFError:
139 except EOFError:
140 return
140 return
141
141
142 else:
142 else:
143 if not type or not r:
143 if not type or not r:
144 ui.warn(_("cat-file: type or revision not supplied\n"))
144 ui.warn(_("cat-file: type or revision not supplied\n"))
145 commands.help_(ui, 'cat-file')
145 commands.help_(ui, 'cat-file')
146
146
147 while r:
147 while r:
148 if type != "commit":
148 if type != "commit":
149 ui.warn(_("aborting hg cat-file only understands commits\n"))
149 ui.warn(_("aborting hg cat-file only understands commits\n"))
150 return 1;
150 return 1;
151 n = repo.lookup(r)
151 n = repo.lookup(r)
152 catcommit(ui, repo, n, prefix)
152 catcommit(ui, repo, n, prefix)
153 if opts['stdin']:
153 if opts['stdin']:
154 try:
154 try:
155 (type, r) = raw_input().split(' ');
155 (type, r) = raw_input().split(' ');
156 except EOFError:
156 except EOFError:
157 break
157 break
158 else:
158 else:
159 break
159 break
160
160
161 # git rev-tree is a confusing thing. You can supply a number of
161 # git rev-tree is a confusing thing. You can supply a number of
162 # commit sha1s on the command line, and it walks the commit history
162 # commit sha1s on the command line, and it walks the commit history
163 # telling you which commits are reachable from the supplied ones via
163 # telling you which commits are reachable from the supplied ones via
164 # a bitmask based on arg position.
164 # a bitmask based on arg position.
165 # you can specify a commit to stop at by starting the sha1 with ^
165 # you can specify a commit to stop at by starting the sha1 with ^
166 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
166 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
167 def chlogwalk():
167 def chlogwalk():
168 count = len(repo)
168 count = len(repo)
169 i = count
169 i = count
170 l = [0] * 100
170 l = [0] * 100
171 chunk = 100
171 chunk = 100
172 while True:
172 while True:
173 if chunk > i:
173 if chunk > i:
174 chunk = i
174 chunk = i
175 i = 0
175 i = 0
176 else:
176 else:
177 i -= chunk
177 i -= chunk
178
178
179 for x in xrange(chunk):
179 for x in xrange(chunk):
180 if i + x >= count:
180 if i + x >= count:
181 l[chunk - x:] = [0] * (chunk - x)
181 l[chunk - x:] = [0] * (chunk - x)
182 break
182 break
183 if full != None:
183 if full != None:
184 l[x] = repo[i + x]
184 l[x] = repo[i + x]
185 l[x].changeset() # force reading
185 l[x].changeset() # force reading
186 else:
186 else:
187 l[x] = 1
187 l[x] = 1
188 for x in xrange(chunk-1, -1, -1):
188 for x in xrange(chunk-1, -1, -1):
189 if l[x] != 0:
189 if l[x] != 0:
190 yield (i + x, full != None and l[x] or None)
190 yield (i + x, full != None and l[x] or None)
191 if i == 0:
191 if i == 0:
192 break
192 break
193
193
194 # calculate and return the reachability bitmask for sha
194 # calculate and return the reachability bitmask for sha
195 def is_reachable(ar, reachable, sha):
195 def is_reachable(ar, reachable, sha):
196 if len(ar) == 0:
196 if len(ar) == 0:
197 return 1
197 return 1
198 mask = 0
198 mask = 0
199 for i in xrange(len(ar)):
199 for i in xrange(len(ar)):
200 if sha in reachable[i]:
200 if sha in reachable[i]:
201 mask |= 1 << i
201 mask |= 1 << i
202
202
203 return mask
203 return mask
204
204
205 reachable = []
205 reachable = []
206 stop_sha1 = []
206 stop_sha1 = []
207 want_sha1 = []
207 want_sha1 = []
208 count = 0
208 count = 0
209
209
210 # figure out which commits they are asking for and which ones they
210 # figure out which commits they are asking for and which ones they
211 # want us to stop on
211 # want us to stop on
212 for i, arg in enumerate(args):
212 for i, arg in enumerate(args):
213 if arg.startswith('^'):
213 if arg.startswith('^'):
214 s = repo.lookup(arg[1:])
214 s = repo.lookup(arg[1:])
215 stop_sha1.append(s)
215 stop_sha1.append(s)
216 want_sha1.append(s)
216 want_sha1.append(s)
217 elif arg != 'HEAD':
217 elif arg != 'HEAD':
218 want_sha1.append(repo.lookup(arg))
218 want_sha1.append(repo.lookup(arg))
219
219
220 # calculate the graph for the supplied commits
220 # calculate the graph for the supplied commits
221 for i, n in enumerate(want_sha1):
221 for i, n in enumerate(want_sha1):
222 reachable.append(set());
222 reachable.append(set());
223 visit = [n];
223 visit = [n];
224 reachable[i].add(n)
224 reachable[i].add(n)
225 while visit:
225 while visit:
226 n = visit.pop(0)
226 n = visit.pop(0)
227 if n in stop_sha1:
227 if n in stop_sha1:
228 continue
228 continue
229 for p in repo.changelog.parents(n):
229 for p in repo.changelog.parents(n):
230 if p not in reachable[i]:
230 if p not in reachable[i]:
231 reachable[i].add(p)
231 reachable[i].add(p)
232 visit.append(p)
232 visit.append(p)
233 if p in stop_sha1:
233 if p in stop_sha1:
234 continue
234 continue
235
235
236 # walk the repository looking for commits that are in our
236 # walk the repository looking for commits that are in our
237 # reachability graph
237 # reachability graph
238 for i, ctx in chlogwalk():
238 for i, ctx in chlogwalk():
239 n = repo.changelog.node(i)
239 n = repo.changelog.node(i)
240 mask = is_reachable(want_sha1, reachable, n)
240 mask = is_reachable(want_sha1, reachable, n)
241 if mask:
241 if mask:
242 parentstr = ""
242 parentstr = ""
243 if parents:
243 if parents:
244 pp = repo.changelog.parents(n)
244 pp = repo.changelog.parents(n)
245 if pp[0] != nullid:
245 if pp[0] != nullid:
246 parentstr += " " + short(pp[0])
246 parentstr += " " + short(pp[0])
247 if pp[1] != nullid:
247 if pp[1] != nullid:
248 parentstr += " " + short(pp[1])
248 parentstr += " " + short(pp[1])
249 if not full:
249 if not full:
250 ui.write("%s%s\n" % (short(n), parentstr))
250 ui.write("%s%s\n" % (short(n), parentstr))
251 elif full == "commit":
251 elif full == "commit":
252 ui.write("%s%s\n" % (short(n), parentstr))
252 ui.write("%s%s\n" % (short(n), parentstr))
253 catcommit(ui, repo, n, ' ', ctx)
253 catcommit(ui, repo, n, ' ', ctx)
254 else:
254 else:
255 (p1, p2) = repo.changelog.parents(n)
255 (p1, p2) = repo.changelog.parents(n)
256 (h, h1, h2) = map(short, (n, p1, p2))
256 (h, h1, h2) = map(short, (n, p1, p2))
257 (i1, i2) = map(repo.changelog.rev, (p1, p2))
257 (i1, i2) = map(repo.changelog.rev, (p1, p2))
258
258
259 date = ctx.date()[0]
259 date = ctx.date()[0]
260 ui.write("%s %s:%s" % (date, h, mask))
260 ui.write("%s %s:%s" % (date, h, mask))
261 mask = is_reachable(want_sha1, reachable, p1)
261 mask = is_reachable(want_sha1, reachable, p1)
262 if i1 != nullrev and mask > 0:
262 if i1 != nullrev and mask > 0:
263 ui.write("%s:%s " % (h1, mask)),
263 ui.write("%s:%s " % (h1, mask)),
264 mask = is_reachable(want_sha1, reachable, p2)
264 mask = is_reachable(want_sha1, reachable, p2)
265 if i2 != nullrev and mask > 0:
265 if i2 != nullrev and mask > 0:
266 ui.write("%s:%s " % (h2, mask))
266 ui.write("%s:%s " % (h2, mask))
267 ui.write("\n")
267 ui.write("\n")
268 if maxnr and count >= maxnr:
268 if maxnr and count >= maxnr:
269 break
269 break
270 count += 1
270 count += 1
271
271
272 def revparse(ui, repo, *revs, **opts):
272 def revparse(ui, repo, *revs, **opts):
273 """parse given revisions"""
273 """parse given revisions"""
274 def revstr(rev):
274 def revstr(rev):
275 if rev == 'HEAD':
275 if rev == 'HEAD':
276 rev = 'tip'
276 rev = 'tip'
277 return revlog.hex(repo.lookup(rev))
277 return revlog.hex(repo.lookup(rev))
278
278
279 for r in revs:
279 for r in revs:
280 revrange = r.split(':', 1)
280 revrange = r.split(':', 1)
281 ui.write('%s\n' % revstr(revrange[0]))
281 ui.write('%s\n' % revstr(revrange[0]))
282 if len(revrange) == 2:
282 if len(revrange) == 2:
283 ui.write('^%s\n' % revstr(revrange[1]))
283 ui.write('^%s\n' % revstr(revrange[1]))
284
284
285 # git rev-list tries to order things by date, and has the ability to stop
285 # git rev-list tries to order things by date, and has the ability to stop
286 # at a given commit without walking the whole repo. TODO add the stop
286 # at a given commit without walking the whole repo. TODO add the stop
287 # parameter
287 # parameter
288 def revlist(ui, repo, *revs, **opts):
288 def revlist(ui, repo, *revs, **opts):
289 """print revisions"""
289 """print revisions"""
290 if opts['header']:
290 if opts['header']:
291 full = "commit"
291 full = "commit"
292 else:
292 else:
293 full = None
293 full = None
294 copy = [x for x in revs]
294 copy = [x for x in revs]
295 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
295 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
296
296
297 def config(ui, repo, **opts):
297 def config(ui, repo, **opts):
298 """print extension options"""
298 """print extension options"""
299 def writeopt(name, value):
299 def writeopt(name, value):
300 ui.write('k=%s\nv=%s\n' % (name, value))
300 ui.write('k=%s\nv=%s\n' % (name, value))
301
301
302 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
302 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
303
303
304
304
305 def view(ui, repo, *etc, **opts):
305 def view(ui, repo, *etc, **opts):
306 "start interactive history viewer"
306 "start interactive history viewer"
307 os.chdir(repo.root)
307 os.chdir(repo.root)
308 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
308 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
309 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
309 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
310 ui.debug(_("running %s\n") % cmd)
310 ui.debug(_("running %s\n") % cmd)
311 util.system(cmd)
311 util.system(cmd)
312
312
313 cmdtable = {
313 cmdtable = {
314 "^view":
314 "^view":
315 (view,
315 (view,
316 [('l', 'limit', '', _('limit number of changes displayed'))],
316 [('l', 'limit', '', _('limit number of changes displayed'))],
317 _('hg view [-l LIMIT] [REVRANGE]')),
317 _('hg view [-l LIMIT] [REVRANGE]')),
318 "debug-diff-tree":
318 "debug-diff-tree":
319 (difftree,
319 (difftree,
320 [('p', 'patch', None, _('generate patch')),
320 [('p', 'patch', None, _('generate patch')),
321 ('r', 'recursive', None, _('recursive')),
321 ('r', 'recursive', None, _('recursive')),
322 ('P', 'pretty', None, _('pretty')),
322 ('P', 'pretty', None, _('pretty')),
323 ('s', 'stdin', None, _('stdin')),
323 ('s', 'stdin', None, _('stdin')),
324 ('C', 'copy', None, _('detect copies')),
324 ('C', 'copy', None, _('detect copies')),
325 ('S', 'search', "", _('search'))],
325 ('S', 'search', "", _('search'))],
326 _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')),
326 _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')),
327 "debug-cat-file":
327 "debug-cat-file":
328 (catfile,
328 (catfile,
329 [('s', 'stdin', None, _('stdin'))],
329 [('s', 'stdin', None, _('stdin'))],
330 _('hg debug-cat-file [OPTION]... TYPE FILE')),
330 _('hg debug-cat-file [OPTION]... TYPE FILE')),
331 "debug-config":
331 "debug-config":
332 (config, [], _('hg debug-config')),
332 (config, [], _('hg debug-config')),
333 "debug-merge-base":
333 "debug-merge-base":
334 (base, [], _('hg debug-merge-base node node')),
334 (base, [], _('hg debug-merge-base node node')),
335 "debug-rev-parse":
335 "debug-rev-parse":
336 (revparse,
336 (revparse,
337 [('', 'default', '', _('ignored'))],
337 [('', 'default', '', _('ignored'))],
338 _('hg debug-rev-parse REV')),
338 _('hg debug-rev-parse REV')),
339 "debug-rev-list":
339 "debug-rev-list":
340 (revlist,
340 (revlist,
341 [('H', 'header', None, _('header')),
341 [('H', 'header', None, _('header')),
342 ('t', 'topo-order', None, _('topo-order')),
342 ('t', 'topo-order', None, _('topo-order')),
343 ('p', 'parents', None, _('parents')),
343 ('p', 'parents', None, _('parents')),
344 ('n', 'max-count', 0, _('max-count'))],
344 ('n', 'max-count', 0, _('max-count'))],
345 _('hg debug-rev-list [options] revs')),
345 _('hg debug-rev-list [options] revs')),
346 }
346 }
@@ -1,62 +1,62 b''
1 # highlight - syntax highlighting in hgweb, based on Pygments
1 # highlight - syntax highlighting in hgweb, based on Pygments
2 #
2 #
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 """syntax highlighting in hgweb, based on Pygments
11 """syntax highlighting for hgweb
12
12
13 It depends on the Pygments syntax highlighting library:
13 It depends on the Pygments syntax highlighting library:
14 http://pygments.org/
14 http://pygments.org/
15
15
16 There is a single configuration option:
16 There is a single configuration option:
17
17
18 [web]
18 [web]
19 pygments_style = <style>
19 pygments_style = <style>
20
20
21 The default is 'colorful'.
21 The default is 'colorful'.
22
22
23 -- Adam Hupp <adam@hupp.org>
23 -- Adam Hupp <adam@hupp.org>
24 """
24 """
25
25
26 import highlight
26 import highlight
27 from mercurial.hgweb import webcommands, webutil, common
27 from mercurial.hgweb import webcommands, webutil, common
28 from mercurial import extensions, encoding
28 from mercurial import extensions, encoding
29
29
30 def filerevision_highlight(orig, web, tmpl, fctx):
30 def filerevision_highlight(orig, web, tmpl, fctx):
31 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
31 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
32 # only pygmentize for mimetype containing 'html' so we both match
32 # only pygmentize for mimetype containing 'html' so we both match
33 # 'text/html' and possibly 'application/xhtml+xml' in the future
33 # 'text/html' and possibly 'application/xhtml+xml' in the future
34 # so that we don't have to touch the extension when the mimetype
34 # so that we don't have to touch the extension when the mimetype
35 # for a template changes; also hgweb optimizes the case that a
35 # for a template changes; also hgweb optimizes the case that a
36 # raw file is sent using rawfile() and doesn't call us, so we
36 # raw file is sent using rawfile() and doesn't call us, so we
37 # can't clash with the file's content-type here in case we
37 # can't clash with the file's content-type here in case we
38 # pygmentize a html file
38 # pygmentize a html file
39 if 'html' in mt:
39 if 'html' in mt:
40 style = web.config('web', 'pygments_style', 'colorful')
40 style = web.config('web', 'pygments_style', 'colorful')
41 highlight.pygmentize('fileline', fctx, style, tmpl)
41 highlight.pygmentize('fileline', fctx, style, tmpl)
42 return orig(web, tmpl, fctx)
42 return orig(web, tmpl, fctx)
43
43
44 def annotate_highlight(orig, web, req, tmpl):
44 def annotate_highlight(orig, web, req, tmpl):
45 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
45 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
46 if 'html' in mt:
46 if 'html' in mt:
47 fctx = webutil.filectx(web.repo, req)
47 fctx = webutil.filectx(web.repo, req)
48 style = web.config('web', 'pygments_style', 'colorful')
48 style = web.config('web', 'pygments_style', 'colorful')
49 highlight.pygmentize('annotateline', fctx, style, tmpl)
49 highlight.pygmentize('annotateline', fctx, style, tmpl)
50 return orig(web, req, tmpl)
50 return orig(web, req, tmpl)
51
51
52 def generate_css(web, req, tmpl):
52 def generate_css(web, req, tmpl):
53 pg_style = web.config('web', 'pygments_style', 'colorful')
53 pg_style = web.config('web', 'pygments_style', 'colorful')
54 fmter = highlight.HtmlFormatter(style = pg_style)
54 fmter = highlight.HtmlFormatter(style = pg_style)
55 req.respond(common.HTTP_OK, 'text/css')
55 req.respond(common.HTTP_OK, 'text/css')
56 return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
56 return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
57
57
58 # monkeypatch in the new version
58 # monkeypatch in the new version
59 extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
59 extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
60 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
60 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
61 webcommands.highlightcss = generate_css
61 webcommands.highlightcss = generate_css
62 webcommands.__all__.append('highlightcss')
62 webcommands.__all__.append('highlightcss')
@@ -1,112 +1,111 b''
1 # __init__.py - inotify-based status acceleration for Linux
1 # __init__.py - inotify-based status acceleration for Linux
2 #
2 #
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 '''inotify-based status acceleration for Linux systems
9 '''accelerate status report using system level services'''
10 '''
11
10
12 # todo: socket permissions
11 # todo: socket permissions
13
12
14 from mercurial.i18n import _
13 from mercurial.i18n import _
15 from mercurial import cmdutil, util
14 from mercurial import cmdutil, util
16 import server
15 import server
17 from weakref import proxy
16 from weakref import proxy
18 from client import client, QueryFailed
17 from client import client, QueryFailed
19
18
20 def serve(ui, repo, **opts):
19 def serve(ui, repo, **opts):
21 '''start an inotify server for this repository'''
20 '''start an inotify server for this repository'''
22 timeout = opts.get('timeout')
21 timeout = opts.get('timeout')
23 if timeout:
22 if timeout:
24 timeout = float(timeout) * 1e3
23 timeout = float(timeout) * 1e3
25
24
26 class service(object):
25 class service(object):
27 def init(self):
26 def init(self):
28 try:
27 try:
29 self.master = server.master(ui, repo, timeout)
28 self.master = server.master(ui, repo, timeout)
30 except server.AlreadyStartedException, inst:
29 except server.AlreadyStartedException, inst:
31 raise util.Abort(str(inst))
30 raise util.Abort(str(inst))
32
31
33 def run(self):
32 def run(self):
34 try:
33 try:
35 self.master.run()
34 self.master.run()
36 finally:
35 finally:
37 self.master.shutdown()
36 self.master.shutdown()
38
37
39 service = service()
38 service = service()
40 logfile = ui.config('inotify', 'log')
39 logfile = ui.config('inotify', 'log')
41 cmdutil.service(opts, initfn=service.init, runfn=service.run,
40 cmdutil.service(opts, initfn=service.init, runfn=service.run,
42 logfile=logfile)
41 logfile=logfile)
43
42
44 def debuginotify(ui, repo, **opts):
43 def debuginotify(ui, repo, **opts):
45 '''debugging information for inotify extension
44 '''debugging information for inotify extension
46
45
47 Prints the list of directories being watched by the inotify server.
46 Prints the list of directories being watched by the inotify server.
48 '''
47 '''
49 cli = client(ui, repo)
48 cli = client(ui, repo)
50 response = cli.debugquery()
49 response = cli.debugquery()
51
50
52 ui.write(_('directories being watched:\n'))
51 ui.write(_('directories being watched:\n'))
53 for path in response:
52 for path in response:
54 ui.write((' %s/\n') % path)
53 ui.write((' %s/\n') % path)
55
54
56 def reposetup(ui, repo):
55 def reposetup(ui, repo):
57 if not hasattr(repo, 'dirstate'):
56 if not hasattr(repo, 'dirstate'):
58 return
57 return
59
58
60 # XXX: weakref until hg stops relying on __del__
59 # XXX: weakref until hg stops relying on __del__
61 repo = proxy(repo)
60 repo = proxy(repo)
62
61
63 class inotifydirstate(repo.dirstate.__class__):
62 class inotifydirstate(repo.dirstate.__class__):
64
63
65 # We'll set this to false after an unsuccessful attempt so that
64 # We'll set this to false after an unsuccessful attempt so that
66 # next calls of status() within the same instance don't try again
65 # next calls of status() within the same instance don't try again
67 # to start an inotify server if it won't start.
66 # to start an inotify server if it won't start.
68 _inotifyon = True
67 _inotifyon = True
69
68
70 def status(self, match, ignored, clean, unknown=True):
69 def status(self, match, ignored, clean, unknown=True):
71 files = match.files()
70 files = match.files()
72 if '.' in files:
71 if '.' in files:
73 files = []
72 files = []
74 if self._inotifyon and not ignored:
73 if self._inotifyon and not ignored:
75 cli = client(ui, repo)
74 cli = client(ui, repo)
76 try:
75 try:
77 result = cli.statusquery(files, match, False,
76 result = cli.statusquery(files, match, False,
78 clean, unknown)
77 clean, unknown)
79 except QueryFailed, instr:
78 except QueryFailed, instr:
80 ui.debug(str(instr))
79 ui.debug(str(instr))
81 # don't retry within the same hg instance
80 # don't retry within the same hg instance
82 inotifydirstate._inotifyon = False
81 inotifydirstate._inotifyon = False
83 pass
82 pass
84 else:
83 else:
85 if ui.config('inotify', 'debug'):
84 if ui.config('inotify', 'debug'):
86 r2 = super(inotifydirstate, self).status(
85 r2 = super(inotifydirstate, self).status(
87 match, False, clean, unknown)
86 match, False, clean, unknown)
88 for c,a,b in zip('LMARDUIC', result, r2):
87 for c,a,b in zip('LMARDUIC', result, r2):
89 for f in a:
88 for f in a:
90 if f not in b:
89 if f not in b:
91 ui.warn('*** inotify: %s +%s\n' % (c, f))
90 ui.warn('*** inotify: %s +%s\n' % (c, f))
92 for f in b:
91 for f in b:
93 if f not in a:
92 if f not in a:
94 ui.warn('*** inotify: %s -%s\n' % (c, f))
93 ui.warn('*** inotify: %s -%s\n' % (c, f))
95 result = r2
94 result = r2
96 return result
95 return result
97 return super(inotifydirstate, self).status(
96 return super(inotifydirstate, self).status(
98 match, ignored, clean, unknown)
97 match, ignored, clean, unknown)
99
98
100 repo.dirstate.__class__ = inotifydirstate
99 repo.dirstate.__class__ = inotifydirstate
101
100
102 cmdtable = {
101 cmdtable = {
103 'debuginotify':
102 'debuginotify':
104 (debuginotify, [], ('hg debuginotify')),
103 (debuginotify, [], ('hg debuginotify')),
105 '^inserve':
104 '^inserve':
106 (serve,
105 (serve,
107 [('d', 'daemon', None, _('run server in background')),
106 [('d', 'daemon', None, _('run server in background')),
108 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
107 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
109 ('t', 'idle-timeout', '', _('minutes to sit idle before exiting')),
108 ('t', 'idle-timeout', '', _('minutes to sit idle before exiting')),
110 ('', 'pid-file', '', _('name of file to write process ID to'))],
109 ('', 'pid-file', '', _('name of file to write process ID to'))],
111 _('hg inserve [OPT]...')),
110 _('hg inserve [OPT]...')),
112 }
111 }
@@ -1,528 +1,528 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
28
28
29 '''keyword expansion in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Configuration is done in the [keyword] and [keywordmaps] sections of
38 Configuration is done in the [keyword] and [keywordmaps] sections of
39 hgrc files.
39 hgrc files.
40
40
41 Example:
41 Example:
42
42
43 [keyword]
43 [keyword]
44 # expand keywords in every python file except those matching "x*"
44 # expand keywords in every python file except those matching "x*"
45 **.py =
45 **.py =
46 x* = ignore
46 x* = ignore
47
47
48 Note: the more specific you are in your filename patterns
48 Note: the more specific you are in your filename patterns
49 the less you lose speed in huge repositories.
49 the less you lose speed in huge repositories.
50
50
51 For [keywordmaps] template mapping and expansion demonstration and
51 For [keywordmaps] template mapping and expansion demonstration and
52 control run "hg kwdemo".
52 control run "hg kwdemo".
53
53
54 An additional date template filter {date|utcdate} is provided.
54 An additional date template filter {date|utcdate} is provided.
55
55
56 The default template mappings (view with "hg kwdemo -d") can be
56 The default template mappings (view with "hg kwdemo -d") can be
57 replaced with customized keywords and templates. Again, run "hg
57 replaced with customized keywords and templates. Again, run "hg
58 kwdemo" to control the results of your config changes.
58 kwdemo" to control the results of your config changes.
59
59
60 Before changing/disabling active keywords, run "hg kwshrink" to avoid
60 Before changing/disabling active keywords, run "hg kwshrink" to avoid
61 the risk of inadvertently storing expanded keywords in the change
61 the risk of inadvertently storing expanded keywords in the change
62 history.
62 history.
63
63
64 To force expansion after enabling it, or a configuration change, run
64 To force expansion after enabling it, or a configuration change, run
65 "hg kwexpand".
65 "hg kwexpand".
66
66
67 Also, when committing with the record extension or using mq's qrecord,
67 Also, when committing with the record extension or using mq's qrecord,
68 be aware that keywords cannot be updated. Again, run "hg kwexpand" on
68 be aware that keywords cannot be updated. Again, run "hg kwexpand" on
69 the files in question to update keyword expansions after all changes
69 the files in question to update keyword expansions after all changes
70 have been checked in.
70 have been checked in.
71
71
72 Expansions spanning more than one line and incremental expansions,
72 Expansions spanning more than one line and incremental expansions,
73 like CVS' $Log$, are not supported. A keyword template map
73 like CVS' $Log$, are not supported. A keyword template map
74 "Log = {desc}" expands to the first line of the changeset description.
74 "Log = {desc}" expands to the first line of the changeset description.
75 '''
75 '''
76
76
77 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
77 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
78 from mercurial import patch, localrepo, templater, templatefilters, util, match
78 from mercurial import patch, localrepo, templater, templatefilters, util, match
79 from mercurial.hgweb import webcommands
79 from mercurial.hgweb import webcommands
80 from mercurial.lock import release
80 from mercurial.lock import release
81 from mercurial.node import nullid, hex
81 from mercurial.node import nullid, hex
82 from mercurial.i18n import _
82 from mercurial.i18n import _
83 import re, shutil, tempfile, time
83 import re, shutil, tempfile, time
84
84
85 commands.optionalrepo += ' kwdemo'
85 commands.optionalrepo += ' kwdemo'
86
86
87 # hg commands that do not act on keywords
87 # hg commands that do not act on keywords
88 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
88 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
89 ' log outgoing push rename rollback tip verify'
89 ' log outgoing push rename rollback tip verify'
90 ' convert email glog')
90 ' convert email glog')
91
91
92 # hg commands that trigger expansion only when writing to working dir,
92 # hg commands that trigger expansion only when writing to working dir,
93 # not when reading filelog, and unexpand when reading from working dir
93 # not when reading filelog, and unexpand when reading from working dir
94 restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
94 restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
95
95
96 def utcdate(date):
96 def utcdate(date):
97 '''Returns hgdate in cvs-like UTC format.'''
97 '''Returns hgdate in cvs-like UTC format.'''
98 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
98 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
99
99
100 # make keyword tools accessible
100 # make keyword tools accessible
101 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
101 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
102
102
103
103
104 class kwtemplater(object):
104 class kwtemplater(object):
105 '''
105 '''
106 Sets up keyword templates, corresponding keyword regex, and
106 Sets up keyword templates, corresponding keyword regex, and
107 provides keyword substitution functions.
107 provides keyword substitution functions.
108 '''
108 '''
109 templates = {
109 templates = {
110 'Revision': '{node|short}',
110 'Revision': '{node|short}',
111 'Author': '{author|user}',
111 'Author': '{author|user}',
112 'Date': '{date|utcdate}',
112 'Date': '{date|utcdate}',
113 'RCSFile': '{file|basename},v',
113 'RCSFile': '{file|basename},v',
114 'Source': '{root}/{file},v',
114 'Source': '{root}/{file},v',
115 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
115 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
116 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
116 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
117 }
117 }
118
118
119 def __init__(self, ui, repo):
119 def __init__(self, ui, repo):
120 self.ui = ui
120 self.ui = ui
121 self.repo = repo
121 self.repo = repo
122 self.match = match.match(repo.root, '', [],
122 self.match = match.match(repo.root, '', [],
123 kwtools['inc'], kwtools['exc'])
123 kwtools['inc'], kwtools['exc'])
124 self.restrict = kwtools['hgcmd'] in restricted.split()
124 self.restrict = kwtools['hgcmd'] in restricted.split()
125
125
126 kwmaps = self.ui.configitems('keywordmaps')
126 kwmaps = self.ui.configitems('keywordmaps')
127 if kwmaps: # override default templates
127 if kwmaps: # override default templates
128 kwmaps = [(k, templater.parsestring(v, False))
128 kwmaps = [(k, templater.parsestring(v, False))
129 for (k, v) in kwmaps]
129 for (k, v) in kwmaps]
130 self.templates = dict(kwmaps)
130 self.templates = dict(kwmaps)
131 escaped = map(re.escape, self.templates.keys())
131 escaped = map(re.escape, self.templates.keys())
132 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
132 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
133 self.re_kw = re.compile(kwpat)
133 self.re_kw = re.compile(kwpat)
134
134
135 templatefilters.filters['utcdate'] = utcdate
135 templatefilters.filters['utcdate'] = utcdate
136 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
136 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
137 False, None, '', False)
137 False, None, '', False)
138
138
139 def substitute(self, data, path, ctx, subfunc):
139 def substitute(self, data, path, ctx, subfunc):
140 '''Replaces keywords in data with expanded template.'''
140 '''Replaces keywords in data with expanded template.'''
141 def kwsub(mobj):
141 def kwsub(mobj):
142 kw = mobj.group(1)
142 kw = mobj.group(1)
143 self.ct.use_template(self.templates[kw])
143 self.ct.use_template(self.templates[kw])
144 self.ui.pushbuffer()
144 self.ui.pushbuffer()
145 self.ct.show(ctx, root=self.repo.root, file=path)
145 self.ct.show(ctx, root=self.repo.root, file=path)
146 ekw = templatefilters.firstline(self.ui.popbuffer())
146 ekw = templatefilters.firstline(self.ui.popbuffer())
147 return '$%s: %s $' % (kw, ekw)
147 return '$%s: %s $' % (kw, ekw)
148 return subfunc(kwsub, data)
148 return subfunc(kwsub, data)
149
149
150 def expand(self, path, node, data):
150 def expand(self, path, node, data):
151 '''Returns data with keywords expanded.'''
151 '''Returns data with keywords expanded.'''
152 if not self.restrict and self.match(path) and not util.binary(data):
152 if not self.restrict and self.match(path) and not util.binary(data):
153 ctx = self.repo.filectx(path, fileid=node).changectx()
153 ctx = self.repo.filectx(path, fileid=node).changectx()
154 return self.substitute(data, path, ctx, self.re_kw.sub)
154 return self.substitute(data, path, ctx, self.re_kw.sub)
155 return data
155 return data
156
156
157 def iskwfile(self, path, flagfunc):
157 def iskwfile(self, path, flagfunc):
158 '''Returns true if path matches [keyword] pattern
158 '''Returns true if path matches [keyword] pattern
159 and is not a symbolic link.
159 and is not a symbolic link.
160 Caveat: localrepository._link fails on Windows.'''
160 Caveat: localrepository._link fails on Windows.'''
161 return self.match(path) and not 'l' in flagfunc(path)
161 return self.match(path) and not 'l' in flagfunc(path)
162
162
163 def overwrite(self, node, expand, files):
163 def overwrite(self, node, expand, files):
164 '''Overwrites selected files expanding/shrinking keywords.'''
164 '''Overwrites selected files expanding/shrinking keywords.'''
165 ctx = self.repo[node]
165 ctx = self.repo[node]
166 mf = ctx.manifest()
166 mf = ctx.manifest()
167 if node is not None: # commit
167 if node is not None: # commit
168 files = [f for f in ctx.files() if f in mf]
168 files = [f for f in ctx.files() if f in mf]
169 notify = self.ui.debug
169 notify = self.ui.debug
170 else: # kwexpand/kwshrink
170 else: # kwexpand/kwshrink
171 notify = self.ui.note
171 notify = self.ui.note
172 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
172 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
173 if candidates:
173 if candidates:
174 self.restrict = True # do not expand when reading
174 self.restrict = True # do not expand when reading
175 msg = (expand and _('overwriting %s expanding keywords\n')
175 msg = (expand and _('overwriting %s expanding keywords\n')
176 or _('overwriting %s shrinking keywords\n'))
176 or _('overwriting %s shrinking keywords\n'))
177 for f in candidates:
177 for f in candidates:
178 fp = self.repo.file(f)
178 fp = self.repo.file(f)
179 data = fp.read(mf[f])
179 data = fp.read(mf[f])
180 if util.binary(data):
180 if util.binary(data):
181 continue
181 continue
182 if expand:
182 if expand:
183 if node is None:
183 if node is None:
184 ctx = self.repo.filectx(f, fileid=mf[f]).changectx()
184 ctx = self.repo.filectx(f, fileid=mf[f]).changectx()
185 data, found = self.substitute(data, f, ctx,
185 data, found = self.substitute(data, f, ctx,
186 self.re_kw.subn)
186 self.re_kw.subn)
187 else:
187 else:
188 found = self.re_kw.search(data)
188 found = self.re_kw.search(data)
189 if found:
189 if found:
190 notify(msg % f)
190 notify(msg % f)
191 self.repo.wwrite(f, data, mf.flags(f))
191 self.repo.wwrite(f, data, mf.flags(f))
192 self.repo.dirstate.normal(f)
192 self.repo.dirstate.normal(f)
193 self.restrict = False
193 self.restrict = False
194
194
195 def shrinktext(self, text):
195 def shrinktext(self, text):
196 '''Unconditionally removes all keyword substitutions from text.'''
196 '''Unconditionally removes all keyword substitutions from text.'''
197 return self.re_kw.sub(r'$\1$', text)
197 return self.re_kw.sub(r'$\1$', text)
198
198
199 def shrink(self, fname, text):
199 def shrink(self, fname, text):
200 '''Returns text with all keyword substitutions removed.'''
200 '''Returns text with all keyword substitutions removed.'''
201 if self.match(fname) and not util.binary(text):
201 if self.match(fname) and not util.binary(text):
202 return self.shrinktext(text)
202 return self.shrinktext(text)
203 return text
203 return text
204
204
205 def shrinklines(self, fname, lines):
205 def shrinklines(self, fname, lines):
206 '''Returns lines with keyword substitutions removed.'''
206 '''Returns lines with keyword substitutions removed.'''
207 if self.match(fname):
207 if self.match(fname):
208 text = ''.join(lines)
208 text = ''.join(lines)
209 if not util.binary(text):
209 if not util.binary(text):
210 return self.shrinktext(text).splitlines(True)
210 return self.shrinktext(text).splitlines(True)
211 return lines
211 return lines
212
212
213 def wread(self, fname, data):
213 def wread(self, fname, data):
214 '''If in restricted mode returns data read from wdir with
214 '''If in restricted mode returns data read from wdir with
215 keyword substitutions removed.'''
215 keyword substitutions removed.'''
216 return self.restrict and self.shrink(fname, data) or data
216 return self.restrict and self.shrink(fname, data) or data
217
217
218 class kwfilelog(filelog.filelog):
218 class kwfilelog(filelog.filelog):
219 '''
219 '''
220 Subclass of filelog to hook into its read, add, cmp methods.
220 Subclass of filelog to hook into its read, add, cmp methods.
221 Keywords are "stored" unexpanded, and processed on reading.
221 Keywords are "stored" unexpanded, and processed on reading.
222 '''
222 '''
223 def __init__(self, opener, kwt, path):
223 def __init__(self, opener, kwt, path):
224 super(kwfilelog, self).__init__(opener, path)
224 super(kwfilelog, self).__init__(opener, path)
225 self.kwt = kwt
225 self.kwt = kwt
226 self.path = path
226 self.path = path
227
227
228 def read(self, node):
228 def read(self, node):
229 '''Expands keywords when reading filelog.'''
229 '''Expands keywords when reading filelog.'''
230 data = super(kwfilelog, self).read(node)
230 data = super(kwfilelog, self).read(node)
231 return self.kwt.expand(self.path, node, data)
231 return self.kwt.expand(self.path, node, data)
232
232
233 def add(self, text, meta, tr, link, p1=None, p2=None):
233 def add(self, text, meta, tr, link, p1=None, p2=None):
234 '''Removes keyword substitutions when adding to filelog.'''
234 '''Removes keyword substitutions when adding to filelog.'''
235 text = self.kwt.shrink(self.path, text)
235 text = self.kwt.shrink(self.path, text)
236 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
236 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
237
237
238 def cmp(self, node, text):
238 def cmp(self, node, text):
239 '''Removes keyword substitutions for comparison.'''
239 '''Removes keyword substitutions for comparison.'''
240 text = self.kwt.shrink(self.path, text)
240 text = self.kwt.shrink(self.path, text)
241 if self.renamed(node):
241 if self.renamed(node):
242 t2 = super(kwfilelog, self).read(node)
242 t2 = super(kwfilelog, self).read(node)
243 return t2 != text
243 return t2 != text
244 return revlog.revlog.cmp(self, node, text)
244 return revlog.revlog.cmp(self, node, text)
245
245
246 def _status(ui, repo, kwt, unknown, *pats, **opts):
246 def _status(ui, repo, kwt, unknown, *pats, **opts):
247 '''Bails out if [keyword] configuration is not active.
247 '''Bails out if [keyword] configuration is not active.
248 Returns status of working directory.'''
248 Returns status of working directory.'''
249 if kwt:
249 if kwt:
250 match = cmdutil.match(repo, pats, opts)
250 match = cmdutil.match(repo, pats, opts)
251 return repo.status(match=match, unknown=unknown, clean=True)
251 return repo.status(match=match, unknown=unknown, clean=True)
252 if ui.configitems('keyword'):
252 if ui.configitems('keyword'):
253 raise util.Abort(_('[keyword] patterns cannot match'))
253 raise util.Abort(_('[keyword] patterns cannot match'))
254 raise util.Abort(_('no [keyword] patterns configured'))
254 raise util.Abort(_('no [keyword] patterns configured'))
255
255
256 def _kwfwrite(ui, repo, expand, *pats, **opts):
256 def _kwfwrite(ui, repo, expand, *pats, **opts):
257 '''Selects files and passes them to kwtemplater.overwrite.'''
257 '''Selects files and passes them to kwtemplater.overwrite.'''
258 if repo.dirstate.parents()[1] != nullid:
258 if repo.dirstate.parents()[1] != nullid:
259 raise util.Abort(_('outstanding uncommitted merge'))
259 raise util.Abort(_('outstanding uncommitted merge'))
260 kwt = kwtools['templater']
260 kwt = kwtools['templater']
261 status = _status(ui, repo, kwt, False, *pats, **opts)
261 status = _status(ui, repo, kwt, False, *pats, **opts)
262 modified, added, removed, deleted = status[:4]
262 modified, added, removed, deleted = status[:4]
263 if modified or added or removed or deleted:
263 if modified or added or removed or deleted:
264 raise util.Abort(_('outstanding uncommitted changes'))
264 raise util.Abort(_('outstanding uncommitted changes'))
265 wlock = lock = None
265 wlock = lock = None
266 try:
266 try:
267 wlock = repo.wlock()
267 wlock = repo.wlock()
268 lock = repo.lock()
268 lock = repo.lock()
269 kwt.overwrite(None, expand, status[6])
269 kwt.overwrite(None, expand, status[6])
270 finally:
270 finally:
271 release(lock, wlock)
271 release(lock, wlock)
272
272
273 def demo(ui, repo, *args, **opts):
273 def demo(ui, repo, *args, **opts):
274 '''print [keywordmaps] configuration and an expansion example
274 '''print [keywordmaps] configuration and an expansion example
275
275
276 Show current, custom, or default keyword template maps and their
276 Show current, custom, or default keyword template maps and their
277 expansions.
277 expansions.
278
278
279 Extend current configuration by specifying maps as arguments and
279 Extend current configuration by specifying maps as arguments and
280 optionally by reading from an additional hgrc file.
280 optionally by reading from an additional hgrc file.
281
281
282 Override current keyword template maps with "default" option.
282 Override current keyword template maps with "default" option.
283 '''
283 '''
284 def demostatus(stat):
284 def demostatus(stat):
285 ui.status(_('\n\t%s\n') % stat)
285 ui.status(_('\n\t%s\n') % stat)
286
286
287 def demoitems(section, items):
287 def demoitems(section, items):
288 ui.write('[%s]\n' % section)
288 ui.write('[%s]\n' % section)
289 for k, v in items:
289 for k, v in items:
290 ui.write('%s = %s\n' % (k, v))
290 ui.write('%s = %s\n' % (k, v))
291
291
292 msg = 'hg keyword config and expansion example'
292 msg = 'hg keyword config and expansion example'
293 kwstatus = 'current'
293 kwstatus = 'current'
294 fn = 'demo.txt'
294 fn = 'demo.txt'
295 branchname = 'demobranch'
295 branchname = 'demobranch'
296 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
296 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
297 ui.note(_('creating temporary repository at %s\n') % tmpdir)
297 ui.note(_('creating temporary repository at %s\n') % tmpdir)
298 repo = localrepo.localrepository(ui, tmpdir, True)
298 repo = localrepo.localrepository(ui, tmpdir, True)
299 ui.setconfig('keyword', fn, '')
299 ui.setconfig('keyword', fn, '')
300 if args or opts.get('rcfile'):
300 if args or opts.get('rcfile'):
301 kwstatus = 'custom'
301 kwstatus = 'custom'
302 if opts.get('rcfile'):
302 if opts.get('rcfile'):
303 ui.readconfig(opts.get('rcfile'))
303 ui.readconfig(opts.get('rcfile'))
304 if opts.get('default'):
304 if opts.get('default'):
305 kwstatus = 'default'
305 kwstatus = 'default'
306 kwmaps = kwtemplater.templates
306 kwmaps = kwtemplater.templates
307 if ui.configitems('keywordmaps'):
307 if ui.configitems('keywordmaps'):
308 # override maps from optional rcfile
308 # override maps from optional rcfile
309 for k, v in kwmaps.iteritems():
309 for k, v in kwmaps.iteritems():
310 ui.setconfig('keywordmaps', k, v)
310 ui.setconfig('keywordmaps', k, v)
311 elif args:
311 elif args:
312 # simulate hgrc parsing
312 # simulate hgrc parsing
313 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
313 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
314 fp = repo.opener('hgrc', 'w')
314 fp = repo.opener('hgrc', 'w')
315 fp.writelines(rcmaps)
315 fp.writelines(rcmaps)
316 fp.close()
316 fp.close()
317 ui.readconfig(repo.join('hgrc'))
317 ui.readconfig(repo.join('hgrc'))
318 if not opts.get('default'):
318 if not opts.get('default'):
319 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
319 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
320 uisetup(ui)
320 uisetup(ui)
321 reposetup(ui, repo)
321 reposetup(ui, repo)
322 for k, v in ui.configitems('extensions'):
322 for k, v in ui.configitems('extensions'):
323 if k.endswith('keyword'):
323 if k.endswith('keyword'):
324 extension = '%s = %s' % (k, v)
324 extension = '%s = %s' % (k, v)
325 break
325 break
326 demostatus('config using %s keyword template maps' % kwstatus)
326 demostatus('config using %s keyword template maps' % kwstatus)
327 ui.write('[extensions]\n%s\n' % extension)
327 ui.write('[extensions]\n%s\n' % extension)
328 demoitems('keyword', ui.configitems('keyword'))
328 demoitems('keyword', ui.configitems('keyword'))
329 demoitems('keywordmaps', kwmaps.iteritems())
329 demoitems('keywordmaps', kwmaps.iteritems())
330 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
330 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
331 repo.wopener(fn, 'w').write(keywords)
331 repo.wopener(fn, 'w').write(keywords)
332 repo.add([fn])
332 repo.add([fn])
333 path = repo.wjoin(fn)
333 path = repo.wjoin(fn)
334 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
334 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
335 ui.note(keywords)
335 ui.note(keywords)
336 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
336 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
337 # silence branch command if not verbose
337 # silence branch command if not verbose
338 quiet = ui.quiet
338 quiet = ui.quiet
339 ui.quiet = not ui.verbose
339 ui.quiet = not ui.verbose
340 commands.branch(ui, repo, branchname)
340 commands.branch(ui, repo, branchname)
341 ui.quiet = quiet
341 ui.quiet = quiet
342 for name, cmd in ui.configitems('hooks'):
342 for name, cmd in ui.configitems('hooks'):
343 if name.split('.', 1)[0].find('commit') > -1:
343 if name.split('.', 1)[0].find('commit') > -1:
344 repo.ui.setconfig('hooks', name, '')
344 repo.ui.setconfig('hooks', name, '')
345 ui.note(_('unhooked all commit hooks\n'))
345 ui.note(_('unhooked all commit hooks\n'))
346 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
346 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
347 repo.commit(text=msg)
347 repo.commit(text=msg)
348 fmt = ui.verbose and ' in %s' % path or ''
348 fmt = ui.verbose and ' in %s' % path or ''
349 demostatus('%s keywords expanded%s' % (kwstatus, fmt))
349 demostatus('%s keywords expanded%s' % (kwstatus, fmt))
350 ui.write(repo.wread(fn))
350 ui.write(repo.wread(fn))
351 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
351 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
352 shutil.rmtree(tmpdir, ignore_errors=True)
352 shutil.rmtree(tmpdir, ignore_errors=True)
353
353
354 def expand(ui, repo, *pats, **opts):
354 def expand(ui, repo, *pats, **opts):
355 '''expand keywords in the working directory
355 '''expand keywords in the working directory
356
356
357 Run after (re)enabling keyword expansion.
357 Run after (re)enabling keyword expansion.
358
358
359 kwexpand refuses to run if given files contain local changes.
359 kwexpand refuses to run if given files contain local changes.
360 '''
360 '''
361 # 3rd argument sets expansion to True
361 # 3rd argument sets expansion to True
362 _kwfwrite(ui, repo, True, *pats, **opts)
362 _kwfwrite(ui, repo, True, *pats, **opts)
363
363
364 def files(ui, repo, *pats, **opts):
364 def files(ui, repo, *pats, **opts):
365 '''print files currently configured for keyword expansion
365 '''print files currently configured for keyword expansion
366
366
367 Crosscheck which files in working directory are potential targets
367 Crosscheck which files in working directory are potential targets
368 for keyword expansion. That is, files matched by [keyword] config
368 for keyword expansion. That is, files matched by [keyword] config
369 patterns but not symlinks.
369 patterns but not symlinks.
370 '''
370 '''
371 kwt = kwtools['templater']
371 kwt = kwtools['templater']
372 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
372 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
373 modified, added, removed, deleted, unknown, ignored, clean = status
373 modified, added, removed, deleted, unknown, ignored, clean = status
374 files = sorted(modified + added + clean + unknown)
374 files = sorted(modified + added + clean + unknown)
375 wctx = repo[None]
375 wctx = repo[None]
376 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
376 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
377 cwd = pats and repo.getcwd() or ''
377 cwd = pats and repo.getcwd() or ''
378 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
378 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
379 if opts.get('all') or opts.get('ignore'):
379 if opts.get('all') or opts.get('ignore'):
380 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
380 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
381 for char, filenames in kwfstats:
381 for char, filenames in kwfstats:
382 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
382 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
383 for f in filenames:
383 for f in filenames:
384 ui.write(fmt % repo.pathto(f, cwd))
384 ui.write(fmt % repo.pathto(f, cwd))
385
385
386 def shrink(ui, repo, *pats, **opts):
386 def shrink(ui, repo, *pats, **opts):
387 '''revert expanded keywords in the working directory
387 '''revert expanded keywords in the working directory
388
388
389 Run before changing/disabling active keywords or if you experience
389 Run before changing/disabling active keywords or if you experience
390 problems with "hg import" or "hg merge".
390 problems with "hg import" or "hg merge".
391
391
392 kwshrink refuses to run if given files contain local changes.
392 kwshrink refuses to run if given files contain local changes.
393 '''
393 '''
394 # 3rd argument sets expansion to False
394 # 3rd argument sets expansion to False
395 _kwfwrite(ui, repo, False, *pats, **opts)
395 _kwfwrite(ui, repo, False, *pats, **opts)
396
396
397
397
398 def uisetup(ui):
398 def uisetup(ui):
399 '''Collects [keyword] config in kwtools.
399 '''Collects [keyword] config in kwtools.
400 Monkeypatches dispatch._parse if needed.'''
400 Monkeypatches dispatch._parse if needed.'''
401
401
402 for pat, opt in ui.configitems('keyword'):
402 for pat, opt in ui.configitems('keyword'):
403 if opt != 'ignore':
403 if opt != 'ignore':
404 kwtools['inc'].append(pat)
404 kwtools['inc'].append(pat)
405 else:
405 else:
406 kwtools['exc'].append(pat)
406 kwtools['exc'].append(pat)
407
407
408 if kwtools['inc']:
408 if kwtools['inc']:
409 def kwdispatch_parse(orig, ui, args):
409 def kwdispatch_parse(orig, ui, args):
410 '''Monkeypatch dispatch._parse to obtain running hg command.'''
410 '''Monkeypatch dispatch._parse to obtain running hg command.'''
411 cmd, func, args, options, cmdoptions = orig(ui, args)
411 cmd, func, args, options, cmdoptions = orig(ui, args)
412 kwtools['hgcmd'] = cmd
412 kwtools['hgcmd'] = cmd
413 return cmd, func, args, options, cmdoptions
413 return cmd, func, args, options, cmdoptions
414
414
415 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
415 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
416
416
417 def reposetup(ui, repo):
417 def reposetup(ui, repo):
418 '''Sets up repo as kwrepo for keyword substitution.
418 '''Sets up repo as kwrepo for keyword substitution.
419 Overrides file method to return kwfilelog instead of filelog
419 Overrides file method to return kwfilelog instead of filelog
420 if file matches user configuration.
420 if file matches user configuration.
421 Wraps commit to overwrite configured files with updated
421 Wraps commit to overwrite configured files with updated
422 keyword substitutions.
422 keyword substitutions.
423 Monkeypatches patch and webcommands.'''
423 Monkeypatches patch and webcommands.'''
424
424
425 try:
425 try:
426 if (not repo.local() or not kwtools['inc']
426 if (not repo.local() or not kwtools['inc']
427 or kwtools['hgcmd'] in nokwcommands.split()
427 or kwtools['hgcmd'] in nokwcommands.split()
428 or '.hg' in util.splitpath(repo.root)
428 or '.hg' in util.splitpath(repo.root)
429 or repo._url.startswith('bundle:')):
429 or repo._url.startswith('bundle:')):
430 return
430 return
431 except AttributeError:
431 except AttributeError:
432 pass
432 pass
433
433
434 kwtools['templater'] = kwt = kwtemplater(ui, repo)
434 kwtools['templater'] = kwt = kwtemplater(ui, repo)
435
435
436 class kwrepo(repo.__class__):
436 class kwrepo(repo.__class__):
437 def file(self, f):
437 def file(self, f):
438 if f[0] == '/':
438 if f[0] == '/':
439 f = f[1:]
439 f = f[1:]
440 return kwfilelog(self.sopener, kwt, f)
440 return kwfilelog(self.sopener, kwt, f)
441
441
442 def wread(self, filename):
442 def wread(self, filename):
443 data = super(kwrepo, self).wread(filename)
443 data = super(kwrepo, self).wread(filename)
444 return kwt.wread(filename, data)
444 return kwt.wread(filename, data)
445
445
446 def commit(self, text='', user=None, date=None, match=None,
446 def commit(self, text='', user=None, date=None, match=None,
447 force=False, editor=None, extra={}):
447 force=False, editor=None, extra={}):
448 wlock = lock = None
448 wlock = lock = None
449 _p1 = _p2 = None
449 _p1 = _p2 = None
450 try:
450 try:
451 wlock = self.wlock()
451 wlock = self.wlock()
452 lock = self.lock()
452 lock = self.lock()
453 # store and postpone commit hooks
453 # store and postpone commit hooks
454 commithooks = {}
454 commithooks = {}
455 for name, cmd in ui.configitems('hooks'):
455 for name, cmd in ui.configitems('hooks'):
456 if name.split('.', 1)[0] == 'commit':
456 if name.split('.', 1)[0] == 'commit':
457 commithooks[name] = cmd
457 commithooks[name] = cmd
458 ui.setconfig('hooks', name, None)
458 ui.setconfig('hooks', name, None)
459 if commithooks:
459 if commithooks:
460 # store parents for commit hook environment
460 # store parents for commit hook environment
461 _p1, _p2 = repo.dirstate.parents()
461 _p1, _p2 = repo.dirstate.parents()
462 _p1 = hex(_p1)
462 _p1 = hex(_p1)
463 if _p2 == nullid:
463 if _p2 == nullid:
464 _p2 = ''
464 _p2 = ''
465 else:
465 else:
466 _p2 = hex(_p2)
466 _p2 = hex(_p2)
467
467
468 n = super(kwrepo, self).commit(text, user, date, match, force,
468 n = super(kwrepo, self).commit(text, user, date, match, force,
469 editor, extra)
469 editor, extra)
470
470
471 # restore commit hooks
471 # restore commit hooks
472 for name, cmd in commithooks.iteritems():
472 for name, cmd in commithooks.iteritems():
473 ui.setconfig('hooks', name, cmd)
473 ui.setconfig('hooks', name, cmd)
474 if n is not None:
474 if n is not None:
475 kwt.overwrite(n, True, None)
475 kwt.overwrite(n, True, None)
476 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
476 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
477 return n
477 return n
478 finally:
478 finally:
479 release(lock, wlock)
479 release(lock, wlock)
480
480
481 # monkeypatches
481 # monkeypatches
482 def kwpatchfile_init(orig, self, ui, fname, opener, missing=False, eol=None):
482 def kwpatchfile_init(orig, self, ui, fname, opener, missing=False, eol=None):
483 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
483 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
484 rejects or conflicts due to expanded keywords in working dir.'''
484 rejects or conflicts due to expanded keywords in working dir.'''
485 orig(self, ui, fname, opener, missing, eol)
485 orig(self, ui, fname, opener, missing, eol)
486 # shrink keywords read from working dir
486 # shrink keywords read from working dir
487 self.lines = kwt.shrinklines(self.fname, self.lines)
487 self.lines = kwt.shrinklines(self.fname, self.lines)
488
488
489 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
489 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
490 opts=None):
490 opts=None):
491 '''Monkeypatch patch.diff to avoid expansion except when
491 '''Monkeypatch patch.diff to avoid expansion except when
492 comparing against working dir.'''
492 comparing against working dir.'''
493 if node2 is not None:
493 if node2 is not None:
494 kwt.match = util.never
494 kwt.match = util.never
495 elif node1 is not None and node1 != repo['.'].node():
495 elif node1 is not None and node1 != repo['.'].node():
496 kwt.restrict = True
496 kwt.restrict = True
497 return orig(repo, node1, node2, match, changes, opts)
497 return orig(repo, node1, node2, match, changes, opts)
498
498
499 def kwweb_skip(orig, web, req, tmpl):
499 def kwweb_skip(orig, web, req, tmpl):
500 '''Wraps webcommands.x turning off keyword expansion.'''
500 '''Wraps webcommands.x turning off keyword expansion.'''
501 kwt.match = util.never
501 kwt.match = util.never
502 return orig(web, req, tmpl)
502 return orig(web, req, tmpl)
503
503
504 repo.__class__ = kwrepo
504 repo.__class__ = kwrepo
505
505
506 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
506 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
507 extensions.wrapfunction(patch, 'diff', kw_diff)
507 extensions.wrapfunction(patch, 'diff', kw_diff)
508 for c in 'annotate changeset rev filediff diff'.split():
508 for c in 'annotate changeset rev filediff diff'.split():
509 extensions.wrapfunction(webcommands, c, kwweb_skip)
509 extensions.wrapfunction(webcommands, c, kwweb_skip)
510
510
511 cmdtable = {
511 cmdtable = {
512 'kwdemo':
512 'kwdemo':
513 (demo,
513 (demo,
514 [('d', 'default', None, _('show default keyword template maps')),
514 [('d', 'default', None, _('show default keyword template maps')),
515 ('f', 'rcfile', [], _('read maps from rcfile'))],
515 ('f', 'rcfile', [], _('read maps from rcfile'))],
516 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
516 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
517 'kwexpand': (expand, commands.walkopts,
517 'kwexpand': (expand, commands.walkopts,
518 _('hg kwexpand [OPTION]... [FILE]...')),
518 _('hg kwexpand [OPTION]... [FILE]...')),
519 'kwfiles':
519 'kwfiles':
520 (files,
520 (files,
521 [('a', 'all', None, _('show keyword status flags of all files')),
521 [('a', 'all', None, _('show keyword status flags of all files')),
522 ('i', 'ignore', None, _('show files excluded from expansion')),
522 ('i', 'ignore', None, _('show files excluded from expansion')),
523 ('u', 'untracked', None, _('additionally show untracked files')),
523 ('u', 'untracked', None, _('additionally show untracked files')),
524 ] + commands.walkopts,
524 ] + commands.walkopts,
525 _('hg kwfiles [OPTION]... [FILE]...')),
525 _('hg kwfiles [OPTION]... [FILE]...')),
526 'kwshrink': (shrink, commands.walkopts,
526 'kwshrink': (shrink, commands.walkopts,
527 _('hg kwshrink [OPTION]... [FILE]...')),
527 _('hg kwshrink [OPTION]... [FILE]...')),
528 }
528 }
@@ -1,2630 +1,2630 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''work with a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial.node import bin, hex, short, nullid, nullrev
33 from mercurial.node import bin, hex, short, nullid, nullrev
34 from mercurial.lock import release
34 from mercurial.lock import release
35 from mercurial import commands, cmdutil, hg, patch, util
35 from mercurial import commands, cmdutil, hg, patch, util
36 from mercurial import repair, extensions, url, error
36 from mercurial import repair, extensions, url, error
37 import os, sys, re, errno
37 import os, sys, re, errno
38
38
39 commands.norepo += " qclone"
39 commands.norepo += " qclone"
40
40
41 # Patch names looks like unix-file names.
41 # Patch names looks like unix-file names.
42 # They must be joinable with queue directory and result in the patch path.
42 # They must be joinable with queue directory and result in the patch path.
43 normname = util.normpath
43 normname = util.normpath
44
44
45 class statusentry(object):
45 class statusentry(object):
46 def __init__(self, rev, name=None):
46 def __init__(self, rev, name=None):
47 if not name:
47 if not name:
48 fields = rev.split(':', 1)
48 fields = rev.split(':', 1)
49 if len(fields) == 2:
49 if len(fields) == 2:
50 self.rev, self.name = fields
50 self.rev, self.name = fields
51 else:
51 else:
52 self.rev, self.name = None, None
52 self.rev, self.name = None, None
53 else:
53 else:
54 self.rev, self.name = rev, name
54 self.rev, self.name = rev, name
55
55
56 def __str__(self):
56 def __str__(self):
57 return self.rev + ':' + self.name
57 return self.rev + ':' + self.name
58
58
59 class patchheader(object):
59 class patchheader(object):
60 def __init__(self, pf):
60 def __init__(self, pf):
61 def eatdiff(lines):
61 def eatdiff(lines):
62 while lines:
62 while lines:
63 l = lines[-1]
63 l = lines[-1]
64 if (l.startswith("diff -") or
64 if (l.startswith("diff -") or
65 l.startswith("Index:") or
65 l.startswith("Index:") or
66 l.startswith("===========")):
66 l.startswith("===========")):
67 del lines[-1]
67 del lines[-1]
68 else:
68 else:
69 break
69 break
70 def eatempty(lines):
70 def eatempty(lines):
71 while lines:
71 while lines:
72 l = lines[-1]
72 l = lines[-1]
73 if re.match('\s*$', l):
73 if re.match('\s*$', l):
74 del lines[-1]
74 del lines[-1]
75 else:
75 else:
76 break
76 break
77
77
78 message = []
78 message = []
79 comments = []
79 comments = []
80 user = None
80 user = None
81 date = None
81 date = None
82 format = None
82 format = None
83 subject = None
83 subject = None
84 diffstart = 0
84 diffstart = 0
85
85
86 for line in file(pf):
86 for line in file(pf):
87 line = line.rstrip()
87 line = line.rstrip()
88 if line.startswith('diff --git'):
88 if line.startswith('diff --git'):
89 diffstart = 2
89 diffstart = 2
90 break
90 break
91 if diffstart:
91 if diffstart:
92 if line.startswith('+++ '):
92 if line.startswith('+++ '):
93 diffstart = 2
93 diffstart = 2
94 break
94 break
95 if line.startswith("--- "):
95 if line.startswith("--- "):
96 diffstart = 1
96 diffstart = 1
97 continue
97 continue
98 elif format == "hgpatch":
98 elif format == "hgpatch":
99 # parse values when importing the result of an hg export
99 # parse values when importing the result of an hg export
100 if line.startswith("# User "):
100 if line.startswith("# User "):
101 user = line[7:]
101 user = line[7:]
102 elif line.startswith("# Date "):
102 elif line.startswith("# Date "):
103 date = line[7:]
103 date = line[7:]
104 elif not line.startswith("# ") and line:
104 elif not line.startswith("# ") and line:
105 message.append(line)
105 message.append(line)
106 format = None
106 format = None
107 elif line == '# HG changeset patch':
107 elif line == '# HG changeset patch':
108 format = "hgpatch"
108 format = "hgpatch"
109 elif (format != "tagdone" and (line.startswith("Subject: ") or
109 elif (format != "tagdone" and (line.startswith("Subject: ") or
110 line.startswith("subject: "))):
110 line.startswith("subject: "))):
111 subject = line[9:]
111 subject = line[9:]
112 format = "tag"
112 format = "tag"
113 elif (format != "tagdone" and (line.startswith("From: ") or
113 elif (format != "tagdone" and (line.startswith("From: ") or
114 line.startswith("from: "))):
114 line.startswith("from: "))):
115 user = line[6:]
115 user = line[6:]
116 format = "tag"
116 format = "tag"
117 elif format == "tag" and line == "":
117 elif format == "tag" and line == "":
118 # when looking for tags (subject: from: etc) they
118 # when looking for tags (subject: from: etc) they
119 # end once you find a blank line in the source
119 # end once you find a blank line in the source
120 format = "tagdone"
120 format = "tagdone"
121 elif message or line:
121 elif message or line:
122 message.append(line)
122 message.append(line)
123 comments.append(line)
123 comments.append(line)
124
124
125 eatdiff(message)
125 eatdiff(message)
126 eatdiff(comments)
126 eatdiff(comments)
127 eatempty(message)
127 eatempty(message)
128 eatempty(comments)
128 eatempty(comments)
129
129
130 # make sure message isn't empty
130 # make sure message isn't empty
131 if format and format.startswith("tag") and subject:
131 if format and format.startswith("tag") and subject:
132 message.insert(0, "")
132 message.insert(0, "")
133 message.insert(0, subject)
133 message.insert(0, subject)
134
134
135 self.message = message
135 self.message = message
136 self.comments = comments
136 self.comments = comments
137 self.user = user
137 self.user = user
138 self.date = date
138 self.date = date
139 self.haspatch = diffstart > 1
139 self.haspatch = diffstart > 1
140
140
141 def setuser(self, user):
141 def setuser(self, user):
142 if not self.updateheader(['From: ', '# User '], user):
142 if not self.updateheader(['From: ', '# User '], user):
143 try:
143 try:
144 patchheaderat = self.comments.index('# HG changeset patch')
144 patchheaderat = self.comments.index('# HG changeset patch')
145 self.comments.insert(patchheaderat + 1,'# User ' + user)
145 self.comments.insert(patchheaderat + 1,'# User ' + user)
146 except ValueError:
146 except ValueError:
147 self.comments = ['From: ' + user, ''] + self.comments
147 self.comments = ['From: ' + user, ''] + self.comments
148 self.user = user
148 self.user = user
149
149
150 def setdate(self, date):
150 def setdate(self, date):
151 if self.updateheader(['# Date '], date):
151 if self.updateheader(['# Date '], date):
152 self.date = date
152 self.date = date
153
153
154 def setmessage(self, message):
154 def setmessage(self, message):
155 if self.comments:
155 if self.comments:
156 self._delmsg()
156 self._delmsg()
157 self.message = [message]
157 self.message = [message]
158 self.comments += self.message
158 self.comments += self.message
159
159
160 def updateheader(self, prefixes, new):
160 def updateheader(self, prefixes, new):
161 '''Update all references to a field in the patch header.
161 '''Update all references to a field in the patch header.
162 Return whether the field is present.'''
162 Return whether the field is present.'''
163 res = False
163 res = False
164 for prefix in prefixes:
164 for prefix in prefixes:
165 for i in xrange(len(self.comments)):
165 for i in xrange(len(self.comments)):
166 if self.comments[i].startswith(prefix):
166 if self.comments[i].startswith(prefix):
167 self.comments[i] = prefix + new
167 self.comments[i] = prefix + new
168 res = True
168 res = True
169 break
169 break
170 return res
170 return res
171
171
172 def __str__(self):
172 def __str__(self):
173 if not self.comments:
173 if not self.comments:
174 return ''
174 return ''
175 return '\n'.join(self.comments) + '\n\n'
175 return '\n'.join(self.comments) + '\n\n'
176
176
177 def _delmsg(self):
177 def _delmsg(self):
178 '''Remove existing message, keeping the rest of the comments fields.
178 '''Remove existing message, keeping the rest of the comments fields.
179 If comments contains 'subject: ', message will prepend
179 If comments contains 'subject: ', message will prepend
180 the field and a blank line.'''
180 the field and a blank line.'''
181 if self.message:
181 if self.message:
182 subj = 'subject: ' + self.message[0].lower()
182 subj = 'subject: ' + self.message[0].lower()
183 for i in xrange(len(self.comments)):
183 for i in xrange(len(self.comments)):
184 if subj == self.comments[i].lower():
184 if subj == self.comments[i].lower():
185 del self.comments[i]
185 del self.comments[i]
186 self.message = self.message[2:]
186 self.message = self.message[2:]
187 break
187 break
188 ci = 0
188 ci = 0
189 for mi in self.message:
189 for mi in self.message:
190 while mi != self.comments[ci]:
190 while mi != self.comments[ci]:
191 ci += 1
191 ci += 1
192 del self.comments[ci]
192 del self.comments[ci]
193
193
194 class queue(object):
194 class queue(object):
195 def __init__(self, ui, path, patchdir=None):
195 def __init__(self, ui, path, patchdir=None):
196 self.basepath = path
196 self.basepath = path
197 self.path = patchdir or os.path.join(path, "patches")
197 self.path = patchdir or os.path.join(path, "patches")
198 self.opener = util.opener(self.path)
198 self.opener = util.opener(self.path)
199 self.ui = ui
199 self.ui = ui
200 self.applied_dirty = 0
200 self.applied_dirty = 0
201 self.series_dirty = 0
201 self.series_dirty = 0
202 self.series_path = "series"
202 self.series_path = "series"
203 self.status_path = "status"
203 self.status_path = "status"
204 self.guards_path = "guards"
204 self.guards_path = "guards"
205 self.active_guards = None
205 self.active_guards = None
206 self.guards_dirty = False
206 self.guards_dirty = False
207 self._diffopts = None
207 self._diffopts = None
208
208
209 @util.propertycache
209 @util.propertycache
210 def applied(self):
210 def applied(self):
211 if os.path.exists(self.join(self.status_path)):
211 if os.path.exists(self.join(self.status_path)):
212 lines = self.opener(self.status_path).read().splitlines()
212 lines = self.opener(self.status_path).read().splitlines()
213 return [statusentry(l) for l in lines]
213 return [statusentry(l) for l in lines]
214 return []
214 return []
215
215
216 @util.propertycache
216 @util.propertycache
217 def full_series(self):
217 def full_series(self):
218 if os.path.exists(self.join(self.series_path)):
218 if os.path.exists(self.join(self.series_path)):
219 return self.opener(self.series_path).read().splitlines()
219 return self.opener(self.series_path).read().splitlines()
220 return []
220 return []
221
221
222 @util.propertycache
222 @util.propertycache
223 def series(self):
223 def series(self):
224 self.parse_series()
224 self.parse_series()
225 return self.series
225 return self.series
226
226
227 @util.propertycache
227 @util.propertycache
228 def series_guards(self):
228 def series_guards(self):
229 self.parse_series()
229 self.parse_series()
230 return self.series_guards
230 return self.series_guards
231
231
232 def invalidate(self):
232 def invalidate(self):
233 for a in 'applied full_series series series_guards'.split():
233 for a in 'applied full_series series series_guards'.split():
234 if a in self.__dict__:
234 if a in self.__dict__:
235 delattr(self, a)
235 delattr(self, a)
236 self.applied_dirty = 0
236 self.applied_dirty = 0
237 self.series_dirty = 0
237 self.series_dirty = 0
238 self.guards_dirty = False
238 self.guards_dirty = False
239 self.active_guards = None
239 self.active_guards = None
240
240
241 def diffopts(self):
241 def diffopts(self):
242 if self._diffopts is None:
242 if self._diffopts is None:
243 self._diffopts = patch.diffopts(self.ui)
243 self._diffopts = patch.diffopts(self.ui)
244 return self._diffopts
244 return self._diffopts
245
245
246 def join(self, *p):
246 def join(self, *p):
247 return os.path.join(self.path, *p)
247 return os.path.join(self.path, *p)
248
248
249 def find_series(self, patch):
249 def find_series(self, patch):
250 pre = re.compile("(\s*)([^#]+)")
250 pre = re.compile("(\s*)([^#]+)")
251 index = 0
251 index = 0
252 for l in self.full_series:
252 for l in self.full_series:
253 m = pre.match(l)
253 m = pre.match(l)
254 if m:
254 if m:
255 s = m.group(2)
255 s = m.group(2)
256 s = s.rstrip()
256 s = s.rstrip()
257 if s == patch:
257 if s == patch:
258 return index
258 return index
259 index += 1
259 index += 1
260 return None
260 return None
261
261
262 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
262 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
263
263
264 def parse_series(self):
264 def parse_series(self):
265 self.series = []
265 self.series = []
266 self.series_guards = []
266 self.series_guards = []
267 for l in self.full_series:
267 for l in self.full_series:
268 h = l.find('#')
268 h = l.find('#')
269 if h == -1:
269 if h == -1:
270 patch = l
270 patch = l
271 comment = ''
271 comment = ''
272 elif h == 0:
272 elif h == 0:
273 continue
273 continue
274 else:
274 else:
275 patch = l[:h]
275 patch = l[:h]
276 comment = l[h:]
276 comment = l[h:]
277 patch = patch.strip()
277 patch = patch.strip()
278 if patch:
278 if patch:
279 if patch in self.series:
279 if patch in self.series:
280 raise util.Abort(_('%s appears more than once in %s') %
280 raise util.Abort(_('%s appears more than once in %s') %
281 (patch, self.join(self.series_path)))
281 (patch, self.join(self.series_path)))
282 self.series.append(patch)
282 self.series.append(patch)
283 self.series_guards.append(self.guard_re.findall(comment))
283 self.series_guards.append(self.guard_re.findall(comment))
284
284
285 def check_guard(self, guard):
285 def check_guard(self, guard):
286 if not guard:
286 if not guard:
287 return _('guard cannot be an empty string')
287 return _('guard cannot be an empty string')
288 bad_chars = '# \t\r\n\f'
288 bad_chars = '# \t\r\n\f'
289 first = guard[0]
289 first = guard[0]
290 if first in '-+':
290 if first in '-+':
291 return (_('guard %r starts with invalid character: %r') %
291 return (_('guard %r starts with invalid character: %r') %
292 (guard, first))
292 (guard, first))
293 for c in bad_chars:
293 for c in bad_chars:
294 if c in guard:
294 if c in guard:
295 return _('invalid character in guard %r: %r') % (guard, c)
295 return _('invalid character in guard %r: %r') % (guard, c)
296
296
297 def set_active(self, guards):
297 def set_active(self, guards):
298 for guard in guards:
298 for guard in guards:
299 bad = self.check_guard(guard)
299 bad = self.check_guard(guard)
300 if bad:
300 if bad:
301 raise util.Abort(bad)
301 raise util.Abort(bad)
302 guards = sorted(set(guards))
302 guards = sorted(set(guards))
303 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
303 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
304 self.active_guards = guards
304 self.active_guards = guards
305 self.guards_dirty = True
305 self.guards_dirty = True
306
306
307 def active(self):
307 def active(self):
308 if self.active_guards is None:
308 if self.active_guards is None:
309 self.active_guards = []
309 self.active_guards = []
310 try:
310 try:
311 guards = self.opener(self.guards_path).read().split()
311 guards = self.opener(self.guards_path).read().split()
312 except IOError, err:
312 except IOError, err:
313 if err.errno != errno.ENOENT: raise
313 if err.errno != errno.ENOENT: raise
314 guards = []
314 guards = []
315 for i, guard in enumerate(guards):
315 for i, guard in enumerate(guards):
316 bad = self.check_guard(guard)
316 bad = self.check_guard(guard)
317 if bad:
317 if bad:
318 self.ui.warn('%s:%d: %s\n' %
318 self.ui.warn('%s:%d: %s\n' %
319 (self.join(self.guards_path), i + 1, bad))
319 (self.join(self.guards_path), i + 1, bad))
320 else:
320 else:
321 self.active_guards.append(guard)
321 self.active_guards.append(guard)
322 return self.active_guards
322 return self.active_guards
323
323
324 def set_guards(self, idx, guards):
324 def set_guards(self, idx, guards):
325 for g in guards:
325 for g in guards:
326 if len(g) < 2:
326 if len(g) < 2:
327 raise util.Abort(_('guard %r too short') % g)
327 raise util.Abort(_('guard %r too short') % g)
328 if g[0] not in '-+':
328 if g[0] not in '-+':
329 raise util.Abort(_('guard %r starts with invalid char') % g)
329 raise util.Abort(_('guard %r starts with invalid char') % g)
330 bad = self.check_guard(g[1:])
330 bad = self.check_guard(g[1:])
331 if bad:
331 if bad:
332 raise util.Abort(bad)
332 raise util.Abort(bad)
333 drop = self.guard_re.sub('', self.full_series[idx])
333 drop = self.guard_re.sub('', self.full_series[idx])
334 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
334 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
335 self.parse_series()
335 self.parse_series()
336 self.series_dirty = True
336 self.series_dirty = True
337
337
338 def pushable(self, idx):
338 def pushable(self, idx):
339 if isinstance(idx, str):
339 if isinstance(idx, str):
340 idx = self.series.index(idx)
340 idx = self.series.index(idx)
341 patchguards = self.series_guards[idx]
341 patchguards = self.series_guards[idx]
342 if not patchguards:
342 if not patchguards:
343 return True, None
343 return True, None
344 guards = self.active()
344 guards = self.active()
345 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
345 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
346 if exactneg:
346 if exactneg:
347 return False, exactneg[0]
347 return False, exactneg[0]
348 pos = [g for g in patchguards if g[0] == '+']
348 pos = [g for g in patchguards if g[0] == '+']
349 exactpos = [g for g in pos if g[1:] in guards]
349 exactpos = [g for g in pos if g[1:] in guards]
350 if pos:
350 if pos:
351 if exactpos:
351 if exactpos:
352 return True, exactpos[0]
352 return True, exactpos[0]
353 return False, pos
353 return False, pos
354 return True, ''
354 return True, ''
355
355
356 def explain_pushable(self, idx, all_patches=False):
356 def explain_pushable(self, idx, all_patches=False):
357 write = all_patches and self.ui.write or self.ui.warn
357 write = all_patches and self.ui.write or self.ui.warn
358 if all_patches or self.ui.verbose:
358 if all_patches or self.ui.verbose:
359 if isinstance(idx, str):
359 if isinstance(idx, str):
360 idx = self.series.index(idx)
360 idx = self.series.index(idx)
361 pushable, why = self.pushable(idx)
361 pushable, why = self.pushable(idx)
362 if all_patches and pushable:
362 if all_patches and pushable:
363 if why is None:
363 if why is None:
364 write(_('allowing %s - no guards in effect\n') %
364 write(_('allowing %s - no guards in effect\n') %
365 self.series[idx])
365 self.series[idx])
366 else:
366 else:
367 if not why:
367 if not why:
368 write(_('allowing %s - no matching negative guards\n') %
368 write(_('allowing %s - no matching negative guards\n') %
369 self.series[idx])
369 self.series[idx])
370 else:
370 else:
371 write(_('allowing %s - guarded by %r\n') %
371 write(_('allowing %s - guarded by %r\n') %
372 (self.series[idx], why))
372 (self.series[idx], why))
373 if not pushable:
373 if not pushable:
374 if why:
374 if why:
375 write(_('skipping %s - guarded by %r\n') %
375 write(_('skipping %s - guarded by %r\n') %
376 (self.series[idx], why))
376 (self.series[idx], why))
377 else:
377 else:
378 write(_('skipping %s - no matching guards\n') %
378 write(_('skipping %s - no matching guards\n') %
379 self.series[idx])
379 self.series[idx])
380
380
381 def save_dirty(self):
381 def save_dirty(self):
382 def write_list(items, path):
382 def write_list(items, path):
383 fp = self.opener(path, 'w')
383 fp = self.opener(path, 'w')
384 for i in items:
384 for i in items:
385 fp.write("%s\n" % i)
385 fp.write("%s\n" % i)
386 fp.close()
386 fp.close()
387 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
387 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
388 if self.series_dirty: write_list(self.full_series, self.series_path)
388 if self.series_dirty: write_list(self.full_series, self.series_path)
389 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
389 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
390
390
391 def removeundo(self, repo):
391 def removeundo(self, repo):
392 undo = repo.sjoin('undo')
392 undo = repo.sjoin('undo')
393 if not os.path.exists(undo):
393 if not os.path.exists(undo):
394 return
394 return
395 try:
395 try:
396 os.unlink(undo)
396 os.unlink(undo)
397 except OSError, inst:
397 except OSError, inst:
398 self.ui.warn(_('error removing undo: %s\n') % str(inst))
398 self.ui.warn(_('error removing undo: %s\n') % str(inst))
399
399
400 def printdiff(self, repo, node1, node2=None, files=None,
400 def printdiff(self, repo, node1, node2=None, files=None,
401 fp=None, changes=None, opts={}):
401 fp=None, changes=None, opts={}):
402 m = cmdutil.match(repo, files, opts)
402 m = cmdutil.match(repo, files, opts)
403 chunks = patch.diff(repo, node1, node2, m, changes, self.diffopts())
403 chunks = patch.diff(repo, node1, node2, m, changes, self.diffopts())
404 write = fp is None and repo.ui.write or fp.write
404 write = fp is None and repo.ui.write or fp.write
405 for chunk in chunks:
405 for chunk in chunks:
406 write(chunk)
406 write(chunk)
407
407
408 def mergeone(self, repo, mergeq, head, patch, rev):
408 def mergeone(self, repo, mergeq, head, patch, rev):
409 # first try just applying the patch
409 # first try just applying the patch
410 (err, n) = self.apply(repo, [ patch ], update_status=False,
410 (err, n) = self.apply(repo, [ patch ], update_status=False,
411 strict=True, merge=rev)
411 strict=True, merge=rev)
412
412
413 if err == 0:
413 if err == 0:
414 return (err, n)
414 return (err, n)
415
415
416 if n is None:
416 if n is None:
417 raise util.Abort(_("apply failed for patch %s") % patch)
417 raise util.Abort(_("apply failed for patch %s") % patch)
418
418
419 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
419 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
420
420
421 # apply failed, strip away that rev and merge.
421 # apply failed, strip away that rev and merge.
422 hg.clean(repo, head)
422 hg.clean(repo, head)
423 self.strip(repo, n, update=False, backup='strip')
423 self.strip(repo, n, update=False, backup='strip')
424
424
425 ctx = repo[rev]
425 ctx = repo[rev]
426 ret = hg.merge(repo, rev)
426 ret = hg.merge(repo, rev)
427 if ret:
427 if ret:
428 raise util.Abort(_("update returned %d") % ret)
428 raise util.Abort(_("update returned %d") % ret)
429 n = repo.commit(ctx.description(), ctx.user(), force=True)
429 n = repo.commit(ctx.description(), ctx.user(), force=True)
430 if n is None:
430 if n is None:
431 raise util.Abort(_("repo commit failed"))
431 raise util.Abort(_("repo commit failed"))
432 try:
432 try:
433 ph = patchheader(mergeq.join(patch))
433 ph = patchheader(mergeq.join(patch))
434 except:
434 except:
435 raise util.Abort(_("unable to read %s") % patch)
435 raise util.Abort(_("unable to read %s") % patch)
436
436
437 patchf = self.opener(patch, "w")
437 patchf = self.opener(patch, "w")
438 comments = str(ph)
438 comments = str(ph)
439 if comments:
439 if comments:
440 patchf.write(comments)
440 patchf.write(comments)
441 self.printdiff(repo, head, n, fp=patchf)
441 self.printdiff(repo, head, n, fp=patchf)
442 patchf.close()
442 patchf.close()
443 self.removeundo(repo)
443 self.removeundo(repo)
444 return (0, n)
444 return (0, n)
445
445
446 def qparents(self, repo, rev=None):
446 def qparents(self, repo, rev=None):
447 if rev is None:
447 if rev is None:
448 (p1, p2) = repo.dirstate.parents()
448 (p1, p2) = repo.dirstate.parents()
449 if p2 == nullid:
449 if p2 == nullid:
450 return p1
450 return p1
451 if len(self.applied) == 0:
451 if len(self.applied) == 0:
452 return None
452 return None
453 return bin(self.applied[-1].rev)
453 return bin(self.applied[-1].rev)
454 pp = repo.changelog.parents(rev)
454 pp = repo.changelog.parents(rev)
455 if pp[1] != nullid:
455 if pp[1] != nullid:
456 arevs = [ x.rev for x in self.applied ]
456 arevs = [ x.rev for x in self.applied ]
457 p0 = hex(pp[0])
457 p0 = hex(pp[0])
458 p1 = hex(pp[1])
458 p1 = hex(pp[1])
459 if p0 in arevs:
459 if p0 in arevs:
460 return pp[0]
460 return pp[0]
461 if p1 in arevs:
461 if p1 in arevs:
462 return pp[1]
462 return pp[1]
463 return pp[0]
463 return pp[0]
464
464
465 def mergepatch(self, repo, mergeq, series):
465 def mergepatch(self, repo, mergeq, series):
466 if len(self.applied) == 0:
466 if len(self.applied) == 0:
467 # each of the patches merged in will have two parents. This
467 # each of the patches merged in will have two parents. This
468 # can confuse the qrefresh, qdiff, and strip code because it
468 # can confuse the qrefresh, qdiff, and strip code because it
469 # needs to know which parent is actually in the patch queue.
469 # needs to know which parent is actually in the patch queue.
470 # so, we insert a merge marker with only one parent. This way
470 # so, we insert a merge marker with only one parent. This way
471 # the first patch in the queue is never a merge patch
471 # the first patch in the queue is never a merge patch
472 #
472 #
473 pname = ".hg.patches.merge.marker"
473 pname = ".hg.patches.merge.marker"
474 n = repo.commit('[mq]: merge marker', force=True)
474 n = repo.commit('[mq]: merge marker', force=True)
475 self.removeundo(repo)
475 self.removeundo(repo)
476 self.applied.append(statusentry(hex(n), pname))
476 self.applied.append(statusentry(hex(n), pname))
477 self.applied_dirty = 1
477 self.applied_dirty = 1
478
478
479 head = self.qparents(repo)
479 head = self.qparents(repo)
480
480
481 for patch in series:
481 for patch in series:
482 patch = mergeq.lookup(patch, strict=True)
482 patch = mergeq.lookup(patch, strict=True)
483 if not patch:
483 if not patch:
484 self.ui.warn(_("patch %s does not exist\n") % patch)
484 self.ui.warn(_("patch %s does not exist\n") % patch)
485 return (1, None)
485 return (1, None)
486 pushable, reason = self.pushable(patch)
486 pushable, reason = self.pushable(patch)
487 if not pushable:
487 if not pushable:
488 self.explain_pushable(patch, all_patches=True)
488 self.explain_pushable(patch, all_patches=True)
489 continue
489 continue
490 info = mergeq.isapplied(patch)
490 info = mergeq.isapplied(patch)
491 if not info:
491 if not info:
492 self.ui.warn(_("patch %s is not applied\n") % patch)
492 self.ui.warn(_("patch %s is not applied\n") % patch)
493 return (1, None)
493 return (1, None)
494 rev = bin(info[1])
494 rev = bin(info[1])
495 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
495 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
496 if head:
496 if head:
497 self.applied.append(statusentry(hex(head), patch))
497 self.applied.append(statusentry(hex(head), patch))
498 self.applied_dirty = 1
498 self.applied_dirty = 1
499 if err:
499 if err:
500 return (err, head)
500 return (err, head)
501 self.save_dirty()
501 self.save_dirty()
502 return (0, head)
502 return (0, head)
503
503
504 def patch(self, repo, patchfile):
504 def patch(self, repo, patchfile):
505 '''Apply patchfile to the working directory.
505 '''Apply patchfile to the working directory.
506 patchfile: name of patch file'''
506 patchfile: name of patch file'''
507 files = {}
507 files = {}
508 try:
508 try:
509 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
509 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
510 files=files, eolmode=None)
510 files=files, eolmode=None)
511 except Exception, inst:
511 except Exception, inst:
512 self.ui.note(str(inst) + '\n')
512 self.ui.note(str(inst) + '\n')
513 if not self.ui.verbose:
513 if not self.ui.verbose:
514 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
514 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
515 return (False, files, False)
515 return (False, files, False)
516
516
517 return (True, files, fuzz)
517 return (True, files, fuzz)
518
518
519 def apply(self, repo, series, list=False, update_status=True,
519 def apply(self, repo, series, list=False, update_status=True,
520 strict=False, patchdir=None, merge=None, all_files={}):
520 strict=False, patchdir=None, merge=None, all_files={}):
521 wlock = lock = tr = None
521 wlock = lock = tr = None
522 try:
522 try:
523 wlock = repo.wlock()
523 wlock = repo.wlock()
524 lock = repo.lock()
524 lock = repo.lock()
525 tr = repo.transaction()
525 tr = repo.transaction()
526 try:
526 try:
527 ret = self._apply(repo, series, list, update_status,
527 ret = self._apply(repo, series, list, update_status,
528 strict, patchdir, merge, all_files=all_files)
528 strict, patchdir, merge, all_files=all_files)
529 tr.close()
529 tr.close()
530 self.save_dirty()
530 self.save_dirty()
531 return ret
531 return ret
532 except:
532 except:
533 try:
533 try:
534 tr.abort()
534 tr.abort()
535 finally:
535 finally:
536 repo.invalidate()
536 repo.invalidate()
537 repo.dirstate.invalidate()
537 repo.dirstate.invalidate()
538 raise
538 raise
539 finally:
539 finally:
540 del tr
540 del tr
541 release(lock, wlock)
541 release(lock, wlock)
542 self.removeundo(repo)
542 self.removeundo(repo)
543
543
544 def _apply(self, repo, series, list=False, update_status=True,
544 def _apply(self, repo, series, list=False, update_status=True,
545 strict=False, patchdir=None, merge=None, all_files={}):
545 strict=False, patchdir=None, merge=None, all_files={}):
546 '''returns (error, hash)
546 '''returns (error, hash)
547 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
547 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
548 # TODO unify with commands.py
548 # TODO unify with commands.py
549 if not patchdir:
549 if not patchdir:
550 patchdir = self.path
550 patchdir = self.path
551 err = 0
551 err = 0
552 n = None
552 n = None
553 for patchname in series:
553 for patchname in series:
554 pushable, reason = self.pushable(patchname)
554 pushable, reason = self.pushable(patchname)
555 if not pushable:
555 if not pushable:
556 self.explain_pushable(patchname, all_patches=True)
556 self.explain_pushable(patchname, all_patches=True)
557 continue
557 continue
558 self.ui.warn(_("applying %s\n") % patchname)
558 self.ui.warn(_("applying %s\n") % patchname)
559 pf = os.path.join(patchdir, patchname)
559 pf = os.path.join(patchdir, patchname)
560
560
561 try:
561 try:
562 ph = patchheader(self.join(patchname))
562 ph = patchheader(self.join(patchname))
563 except:
563 except:
564 self.ui.warn(_("unable to read %s\n") % patchname)
564 self.ui.warn(_("unable to read %s\n") % patchname)
565 err = 1
565 err = 1
566 break
566 break
567
567
568 message = ph.message
568 message = ph.message
569 if not message:
569 if not message:
570 message = _("imported patch %s\n") % patchname
570 message = _("imported patch %s\n") % patchname
571 else:
571 else:
572 if list:
572 if list:
573 message.append(_("\nimported patch %s") % patchname)
573 message.append(_("\nimported patch %s") % patchname)
574 message = '\n'.join(message)
574 message = '\n'.join(message)
575
575
576 if ph.haspatch:
576 if ph.haspatch:
577 (patcherr, files, fuzz) = self.patch(repo, pf)
577 (patcherr, files, fuzz) = self.patch(repo, pf)
578 all_files.update(files)
578 all_files.update(files)
579 patcherr = not patcherr
579 patcherr = not patcherr
580 else:
580 else:
581 self.ui.warn(_("patch %s is empty\n") % patchname)
581 self.ui.warn(_("patch %s is empty\n") % patchname)
582 patcherr, files, fuzz = 0, [], 0
582 patcherr, files, fuzz = 0, [], 0
583
583
584 if merge and files:
584 if merge and files:
585 # Mark as removed/merged and update dirstate parent info
585 # Mark as removed/merged and update dirstate parent info
586 removed = []
586 removed = []
587 merged = []
587 merged = []
588 for f in files:
588 for f in files:
589 if os.path.exists(repo.wjoin(f)):
589 if os.path.exists(repo.wjoin(f)):
590 merged.append(f)
590 merged.append(f)
591 else:
591 else:
592 removed.append(f)
592 removed.append(f)
593 for f in removed:
593 for f in removed:
594 repo.dirstate.remove(f)
594 repo.dirstate.remove(f)
595 for f in merged:
595 for f in merged:
596 repo.dirstate.merge(f)
596 repo.dirstate.merge(f)
597 p1, p2 = repo.dirstate.parents()
597 p1, p2 = repo.dirstate.parents()
598 repo.dirstate.setparents(p1, merge)
598 repo.dirstate.setparents(p1, merge)
599
599
600 files = patch.updatedir(self.ui, repo, files)
600 files = patch.updatedir(self.ui, repo, files)
601 match = cmdutil.matchfiles(repo, files or [])
601 match = cmdutil.matchfiles(repo, files or [])
602 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
602 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
603
603
604 if n is None:
604 if n is None:
605 raise util.Abort(_("repo commit failed"))
605 raise util.Abort(_("repo commit failed"))
606
606
607 if update_status:
607 if update_status:
608 self.applied.append(statusentry(hex(n), patchname))
608 self.applied.append(statusentry(hex(n), patchname))
609
609
610 if patcherr:
610 if patcherr:
611 self.ui.warn(_("patch failed, rejects left in working dir\n"))
611 self.ui.warn(_("patch failed, rejects left in working dir\n"))
612 err = 2
612 err = 2
613 break
613 break
614
614
615 if fuzz and strict:
615 if fuzz and strict:
616 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
616 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
617 err = 3
617 err = 3
618 break
618 break
619 return (err, n)
619 return (err, n)
620
620
621 def _cleanup(self, patches, numrevs, keep=False):
621 def _cleanup(self, patches, numrevs, keep=False):
622 if not keep:
622 if not keep:
623 r = self.qrepo()
623 r = self.qrepo()
624 if r:
624 if r:
625 r.remove(patches, True)
625 r.remove(patches, True)
626 else:
626 else:
627 for p in patches:
627 for p in patches:
628 os.unlink(self.join(p))
628 os.unlink(self.join(p))
629
629
630 if numrevs:
630 if numrevs:
631 del self.applied[:numrevs]
631 del self.applied[:numrevs]
632 self.applied_dirty = 1
632 self.applied_dirty = 1
633
633
634 for i in sorted([self.find_series(p) for p in patches], reverse=True):
634 for i in sorted([self.find_series(p) for p in patches], reverse=True):
635 del self.full_series[i]
635 del self.full_series[i]
636 self.parse_series()
636 self.parse_series()
637 self.series_dirty = 1
637 self.series_dirty = 1
638
638
639 def _revpatches(self, repo, revs):
639 def _revpatches(self, repo, revs):
640 firstrev = repo[self.applied[0].rev].rev()
640 firstrev = repo[self.applied[0].rev].rev()
641 patches = []
641 patches = []
642 for i, rev in enumerate(revs):
642 for i, rev in enumerate(revs):
643
643
644 if rev < firstrev:
644 if rev < firstrev:
645 raise util.Abort(_('revision %d is not managed') % rev)
645 raise util.Abort(_('revision %d is not managed') % rev)
646
646
647 ctx = repo[rev]
647 ctx = repo[rev]
648 base = bin(self.applied[i].rev)
648 base = bin(self.applied[i].rev)
649 if ctx.node() != base:
649 if ctx.node() != base:
650 msg = _('cannot delete revision %d above applied patches')
650 msg = _('cannot delete revision %d above applied patches')
651 raise util.Abort(msg % rev)
651 raise util.Abort(msg % rev)
652
652
653 patch = self.applied[i].name
653 patch = self.applied[i].name
654 for fmt in ('[mq]: %s', 'imported patch %s'):
654 for fmt in ('[mq]: %s', 'imported patch %s'):
655 if ctx.description() == fmt % patch:
655 if ctx.description() == fmt % patch:
656 msg = _('patch %s finalized without changeset message\n')
656 msg = _('patch %s finalized without changeset message\n')
657 repo.ui.status(msg % patch)
657 repo.ui.status(msg % patch)
658 break
658 break
659
659
660 patches.append(patch)
660 patches.append(patch)
661 return patches
661 return patches
662
662
663 def finish(self, repo, revs):
663 def finish(self, repo, revs):
664 patches = self._revpatches(repo, sorted(revs))
664 patches = self._revpatches(repo, sorted(revs))
665 self._cleanup(patches, len(patches))
665 self._cleanup(patches, len(patches))
666
666
667 def delete(self, repo, patches, opts):
667 def delete(self, repo, patches, opts):
668 if not patches and not opts.get('rev'):
668 if not patches and not opts.get('rev'):
669 raise util.Abort(_('qdelete requires at least one revision or '
669 raise util.Abort(_('qdelete requires at least one revision or '
670 'patch name'))
670 'patch name'))
671
671
672 realpatches = []
672 realpatches = []
673 for patch in patches:
673 for patch in patches:
674 patch = self.lookup(patch, strict=True)
674 patch = self.lookup(patch, strict=True)
675 info = self.isapplied(patch)
675 info = self.isapplied(patch)
676 if info:
676 if info:
677 raise util.Abort(_("cannot delete applied patch %s") % patch)
677 raise util.Abort(_("cannot delete applied patch %s") % patch)
678 if patch not in self.series:
678 if patch not in self.series:
679 raise util.Abort(_("patch %s not in series file") % patch)
679 raise util.Abort(_("patch %s not in series file") % patch)
680 realpatches.append(patch)
680 realpatches.append(patch)
681
681
682 numrevs = 0
682 numrevs = 0
683 if opts.get('rev'):
683 if opts.get('rev'):
684 if not self.applied:
684 if not self.applied:
685 raise util.Abort(_('no patches applied'))
685 raise util.Abort(_('no patches applied'))
686 revs = cmdutil.revrange(repo, opts['rev'])
686 revs = cmdutil.revrange(repo, opts['rev'])
687 if len(revs) > 1 and revs[0] > revs[1]:
687 if len(revs) > 1 and revs[0] > revs[1]:
688 revs.reverse()
688 revs.reverse()
689 revpatches = self._revpatches(repo, revs)
689 revpatches = self._revpatches(repo, revs)
690 realpatches += revpatches
690 realpatches += revpatches
691 numrevs = len(revpatches)
691 numrevs = len(revpatches)
692
692
693 self._cleanup(realpatches, numrevs, opts.get('keep'))
693 self._cleanup(realpatches, numrevs, opts.get('keep'))
694
694
695 def check_toppatch(self, repo):
695 def check_toppatch(self, repo):
696 if len(self.applied) > 0:
696 if len(self.applied) > 0:
697 top = bin(self.applied[-1].rev)
697 top = bin(self.applied[-1].rev)
698 pp = repo.dirstate.parents()
698 pp = repo.dirstate.parents()
699 if top not in pp:
699 if top not in pp:
700 raise util.Abort(_("working directory revision is not qtip"))
700 raise util.Abort(_("working directory revision is not qtip"))
701 return top
701 return top
702 return None
702 return None
703 def check_localchanges(self, repo, force=False, refresh=True):
703 def check_localchanges(self, repo, force=False, refresh=True):
704 m, a, r, d = repo.status()[:4]
704 m, a, r, d = repo.status()[:4]
705 if m or a or r or d:
705 if m or a or r or d:
706 if not force:
706 if not force:
707 if refresh:
707 if refresh:
708 raise util.Abort(_("local changes found, refresh first"))
708 raise util.Abort(_("local changes found, refresh first"))
709 else:
709 else:
710 raise util.Abort(_("local changes found"))
710 raise util.Abort(_("local changes found"))
711 return m, a, r, d
711 return m, a, r, d
712
712
713 _reserved = ('series', 'status', 'guards')
713 _reserved = ('series', 'status', 'guards')
714 def check_reserved_name(self, name):
714 def check_reserved_name(self, name):
715 if (name in self._reserved or name.startswith('.hg')
715 if (name in self._reserved or name.startswith('.hg')
716 or name.startswith('.mq')):
716 or name.startswith('.mq')):
717 raise util.Abort(_('"%s" cannot be used as the name of a patch')
717 raise util.Abort(_('"%s" cannot be used as the name of a patch')
718 % name)
718 % name)
719
719
720 def new(self, repo, patchfn, *pats, **opts):
720 def new(self, repo, patchfn, *pats, **opts):
721 """options:
721 """options:
722 msg: a string or a no-argument function returning a string
722 msg: a string or a no-argument function returning a string
723 """
723 """
724 msg = opts.get('msg')
724 msg = opts.get('msg')
725 force = opts.get('force')
725 force = opts.get('force')
726 user = opts.get('user')
726 user = opts.get('user')
727 date = opts.get('date')
727 date = opts.get('date')
728 if date:
728 if date:
729 date = util.parsedate(date)
729 date = util.parsedate(date)
730 self.check_reserved_name(patchfn)
730 self.check_reserved_name(patchfn)
731 if os.path.exists(self.join(patchfn)):
731 if os.path.exists(self.join(patchfn)):
732 raise util.Abort(_('patch "%s" already exists') % patchfn)
732 raise util.Abort(_('patch "%s" already exists') % patchfn)
733 if opts.get('include') or opts.get('exclude') or pats:
733 if opts.get('include') or opts.get('exclude') or pats:
734 match = cmdutil.match(repo, pats, opts)
734 match = cmdutil.match(repo, pats, opts)
735 # detect missing files in pats
735 # detect missing files in pats
736 def badfn(f, msg):
736 def badfn(f, msg):
737 raise util.Abort('%s: %s' % (f, msg))
737 raise util.Abort('%s: %s' % (f, msg))
738 match.bad = badfn
738 match.bad = badfn
739 m, a, r, d = repo.status(match=match)[:4]
739 m, a, r, d = repo.status(match=match)[:4]
740 else:
740 else:
741 m, a, r, d = self.check_localchanges(repo, force)
741 m, a, r, d = self.check_localchanges(repo, force)
742 match = cmdutil.matchfiles(repo, m + a + r)
742 match = cmdutil.matchfiles(repo, m + a + r)
743 commitfiles = m + a + r
743 commitfiles = m + a + r
744 self.check_toppatch(repo)
744 self.check_toppatch(repo)
745 insert = self.full_series_end()
745 insert = self.full_series_end()
746 wlock = repo.wlock()
746 wlock = repo.wlock()
747 try:
747 try:
748 # if patch file write fails, abort early
748 # if patch file write fails, abort early
749 p = self.opener(patchfn, "w")
749 p = self.opener(patchfn, "w")
750 try:
750 try:
751 if date:
751 if date:
752 p.write("# HG changeset patch\n")
752 p.write("# HG changeset patch\n")
753 if user:
753 if user:
754 p.write("# User " + user + "\n")
754 p.write("# User " + user + "\n")
755 p.write("# Date %d %d\n\n" % date)
755 p.write("# Date %d %d\n\n" % date)
756 elif user:
756 elif user:
757 p.write("From: " + user + "\n\n")
757 p.write("From: " + user + "\n\n")
758
758
759 if hasattr(msg, '__call__'):
759 if hasattr(msg, '__call__'):
760 msg = msg()
760 msg = msg()
761 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
761 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
762 n = repo.commit(commitmsg, user, date, match=match, force=True)
762 n = repo.commit(commitmsg, user, date, match=match, force=True)
763 if n is None:
763 if n is None:
764 raise util.Abort(_("repo commit failed"))
764 raise util.Abort(_("repo commit failed"))
765 try:
765 try:
766 self.full_series[insert:insert] = [patchfn]
766 self.full_series[insert:insert] = [patchfn]
767 self.applied.append(statusentry(hex(n), patchfn))
767 self.applied.append(statusentry(hex(n), patchfn))
768 self.parse_series()
768 self.parse_series()
769 self.series_dirty = 1
769 self.series_dirty = 1
770 self.applied_dirty = 1
770 self.applied_dirty = 1
771 if msg:
771 if msg:
772 msg = msg + "\n\n"
772 msg = msg + "\n\n"
773 p.write(msg)
773 p.write(msg)
774 if commitfiles:
774 if commitfiles:
775 diffopts = self.diffopts()
775 diffopts = self.diffopts()
776 if opts.get('git'): diffopts.git = True
776 if opts.get('git'): diffopts.git = True
777 parent = self.qparents(repo, n)
777 parent = self.qparents(repo, n)
778 chunks = patch.diff(repo, node1=parent, node2=n,
778 chunks = patch.diff(repo, node1=parent, node2=n,
779 match=match, opts=diffopts)
779 match=match, opts=diffopts)
780 for chunk in chunks:
780 for chunk in chunks:
781 p.write(chunk)
781 p.write(chunk)
782 p.close()
782 p.close()
783 wlock.release()
783 wlock.release()
784 wlock = None
784 wlock = None
785 r = self.qrepo()
785 r = self.qrepo()
786 if r: r.add([patchfn])
786 if r: r.add([patchfn])
787 except:
787 except:
788 repo.rollback()
788 repo.rollback()
789 raise
789 raise
790 except Exception:
790 except Exception:
791 patchpath = self.join(patchfn)
791 patchpath = self.join(patchfn)
792 try:
792 try:
793 os.unlink(patchpath)
793 os.unlink(patchpath)
794 except:
794 except:
795 self.ui.warn(_('error unlinking %s\n') % patchpath)
795 self.ui.warn(_('error unlinking %s\n') % patchpath)
796 raise
796 raise
797 self.removeundo(repo)
797 self.removeundo(repo)
798 finally:
798 finally:
799 release(wlock)
799 release(wlock)
800
800
801 def strip(self, repo, rev, update=True, backup="all", force=None):
801 def strip(self, repo, rev, update=True, backup="all", force=None):
802 wlock = lock = None
802 wlock = lock = None
803 try:
803 try:
804 wlock = repo.wlock()
804 wlock = repo.wlock()
805 lock = repo.lock()
805 lock = repo.lock()
806
806
807 if update:
807 if update:
808 self.check_localchanges(repo, force=force, refresh=False)
808 self.check_localchanges(repo, force=force, refresh=False)
809 urev = self.qparents(repo, rev)
809 urev = self.qparents(repo, rev)
810 hg.clean(repo, urev)
810 hg.clean(repo, urev)
811 repo.dirstate.write()
811 repo.dirstate.write()
812
812
813 self.removeundo(repo)
813 self.removeundo(repo)
814 repair.strip(self.ui, repo, rev, backup)
814 repair.strip(self.ui, repo, rev, backup)
815 # strip may have unbundled a set of backed up revisions after
815 # strip may have unbundled a set of backed up revisions after
816 # the actual strip
816 # the actual strip
817 self.removeundo(repo)
817 self.removeundo(repo)
818 finally:
818 finally:
819 release(lock, wlock)
819 release(lock, wlock)
820
820
821 def isapplied(self, patch):
821 def isapplied(self, patch):
822 """returns (index, rev, patch)"""
822 """returns (index, rev, patch)"""
823 for i, a in enumerate(self.applied):
823 for i, a in enumerate(self.applied):
824 if a.name == patch:
824 if a.name == patch:
825 return (i, a.rev, a.name)
825 return (i, a.rev, a.name)
826 return None
826 return None
827
827
828 # if the exact patch name does not exist, we try a few
828 # if the exact patch name does not exist, we try a few
829 # variations. If strict is passed, we try only #1
829 # variations. If strict is passed, we try only #1
830 #
830 #
831 # 1) a number to indicate an offset in the series file
831 # 1) a number to indicate an offset in the series file
832 # 2) a unique substring of the patch name was given
832 # 2) a unique substring of the patch name was given
833 # 3) patchname[-+]num to indicate an offset in the series file
833 # 3) patchname[-+]num to indicate an offset in the series file
834 def lookup(self, patch, strict=False):
834 def lookup(self, patch, strict=False):
835 patch = patch and str(patch)
835 patch = patch and str(patch)
836
836
837 def partial_name(s):
837 def partial_name(s):
838 if s in self.series:
838 if s in self.series:
839 return s
839 return s
840 matches = [x for x in self.series if s in x]
840 matches = [x for x in self.series if s in x]
841 if len(matches) > 1:
841 if len(matches) > 1:
842 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
842 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
843 for m in matches:
843 for m in matches:
844 self.ui.warn(' %s\n' % m)
844 self.ui.warn(' %s\n' % m)
845 return None
845 return None
846 if matches:
846 if matches:
847 return matches[0]
847 return matches[0]
848 if len(self.series) > 0 and len(self.applied) > 0:
848 if len(self.series) > 0 and len(self.applied) > 0:
849 if s == 'qtip':
849 if s == 'qtip':
850 return self.series[self.series_end(True)-1]
850 return self.series[self.series_end(True)-1]
851 if s == 'qbase':
851 if s == 'qbase':
852 return self.series[0]
852 return self.series[0]
853 return None
853 return None
854
854
855 if patch is None:
855 if patch is None:
856 return None
856 return None
857 if patch in self.series:
857 if patch in self.series:
858 return patch
858 return patch
859
859
860 if not os.path.isfile(self.join(patch)):
860 if not os.path.isfile(self.join(patch)):
861 try:
861 try:
862 sno = int(patch)
862 sno = int(patch)
863 except(ValueError, OverflowError):
863 except(ValueError, OverflowError):
864 pass
864 pass
865 else:
865 else:
866 if -len(self.series) <= sno < len(self.series):
866 if -len(self.series) <= sno < len(self.series):
867 return self.series[sno]
867 return self.series[sno]
868
868
869 if not strict:
869 if not strict:
870 res = partial_name(patch)
870 res = partial_name(patch)
871 if res:
871 if res:
872 return res
872 return res
873 minus = patch.rfind('-')
873 minus = patch.rfind('-')
874 if minus >= 0:
874 if minus >= 0:
875 res = partial_name(patch[:minus])
875 res = partial_name(patch[:minus])
876 if res:
876 if res:
877 i = self.series.index(res)
877 i = self.series.index(res)
878 try:
878 try:
879 off = int(patch[minus+1:] or 1)
879 off = int(patch[minus+1:] or 1)
880 except(ValueError, OverflowError):
880 except(ValueError, OverflowError):
881 pass
881 pass
882 else:
882 else:
883 if i - off >= 0:
883 if i - off >= 0:
884 return self.series[i - off]
884 return self.series[i - off]
885 plus = patch.rfind('+')
885 plus = patch.rfind('+')
886 if plus >= 0:
886 if plus >= 0:
887 res = partial_name(patch[:plus])
887 res = partial_name(patch[:plus])
888 if res:
888 if res:
889 i = self.series.index(res)
889 i = self.series.index(res)
890 try:
890 try:
891 off = int(patch[plus+1:] or 1)
891 off = int(patch[plus+1:] or 1)
892 except(ValueError, OverflowError):
892 except(ValueError, OverflowError):
893 pass
893 pass
894 else:
894 else:
895 if i + off < len(self.series):
895 if i + off < len(self.series):
896 return self.series[i + off]
896 return self.series[i + off]
897 raise util.Abort(_("patch %s not in series") % patch)
897 raise util.Abort(_("patch %s not in series") % patch)
898
898
899 def push(self, repo, patch=None, force=False, list=False,
899 def push(self, repo, patch=None, force=False, list=False,
900 mergeq=None, all=False):
900 mergeq=None, all=False):
901 wlock = repo.wlock()
901 wlock = repo.wlock()
902 try:
902 try:
903 if repo.dirstate.parents()[0] not in repo.heads():
903 if repo.dirstate.parents()[0] not in repo.heads():
904 self.ui.status(_("(working directory not at a head)\n"))
904 self.ui.status(_("(working directory not at a head)\n"))
905
905
906 if not self.series:
906 if not self.series:
907 self.ui.warn(_('no patches in series\n'))
907 self.ui.warn(_('no patches in series\n'))
908 return 0
908 return 0
909
909
910 patch = self.lookup(patch)
910 patch = self.lookup(patch)
911 # Suppose our series file is: A B C and the current 'top'
911 # Suppose our series file is: A B C and the current 'top'
912 # patch is B. qpush C should be performed (moving forward)
912 # patch is B. qpush C should be performed (moving forward)
913 # qpush B is a NOP (no change) qpush A is an error (can't
913 # qpush B is a NOP (no change) qpush A is an error (can't
914 # go backwards with qpush)
914 # go backwards with qpush)
915 if patch:
915 if patch:
916 info = self.isapplied(patch)
916 info = self.isapplied(patch)
917 if info:
917 if info:
918 if info[0] < len(self.applied) - 1:
918 if info[0] < len(self.applied) - 1:
919 raise util.Abort(
919 raise util.Abort(
920 _("cannot push to a previous patch: %s") % patch)
920 _("cannot push to a previous patch: %s") % patch)
921 self.ui.warn(
921 self.ui.warn(
922 _('qpush: %s is already at the top\n') % patch)
922 _('qpush: %s is already at the top\n') % patch)
923 return
923 return
924 pushable, reason = self.pushable(patch)
924 pushable, reason = self.pushable(patch)
925 if not pushable:
925 if not pushable:
926 if reason:
926 if reason:
927 reason = _('guarded by %r') % reason
927 reason = _('guarded by %r') % reason
928 else:
928 else:
929 reason = _('no matching guards')
929 reason = _('no matching guards')
930 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
930 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
931 return 1
931 return 1
932 elif all:
932 elif all:
933 patch = self.series[-1]
933 patch = self.series[-1]
934 if self.isapplied(patch):
934 if self.isapplied(patch):
935 self.ui.warn(_('all patches are currently applied\n'))
935 self.ui.warn(_('all patches are currently applied\n'))
936 return 0
936 return 0
937
937
938 # Following the above example, starting at 'top' of B:
938 # Following the above example, starting at 'top' of B:
939 # qpush should be performed (pushes C), but a subsequent
939 # qpush should be performed (pushes C), but a subsequent
940 # qpush without an argument is an error (nothing to
940 # qpush without an argument is an error (nothing to
941 # apply). This allows a loop of "...while hg qpush..." to
941 # apply). This allows a loop of "...while hg qpush..." to
942 # work as it detects an error when done
942 # work as it detects an error when done
943 start = self.series_end()
943 start = self.series_end()
944 if start == len(self.series):
944 if start == len(self.series):
945 self.ui.warn(_('patch series already fully applied\n'))
945 self.ui.warn(_('patch series already fully applied\n'))
946 return 1
946 return 1
947 if not force:
947 if not force:
948 self.check_localchanges(repo)
948 self.check_localchanges(repo)
949
949
950 self.applied_dirty = 1
950 self.applied_dirty = 1
951 if start > 0:
951 if start > 0:
952 self.check_toppatch(repo)
952 self.check_toppatch(repo)
953 if not patch:
953 if not patch:
954 patch = self.series[start]
954 patch = self.series[start]
955 end = start + 1
955 end = start + 1
956 else:
956 else:
957 end = self.series.index(patch, start) + 1
957 end = self.series.index(patch, start) + 1
958
958
959 s = self.series[start:end]
959 s = self.series[start:end]
960 all_files = {}
960 all_files = {}
961 try:
961 try:
962 if mergeq:
962 if mergeq:
963 ret = self.mergepatch(repo, mergeq, s)
963 ret = self.mergepatch(repo, mergeq, s)
964 else:
964 else:
965 ret = self.apply(repo, s, list, all_files=all_files)
965 ret = self.apply(repo, s, list, all_files=all_files)
966 except:
966 except:
967 self.ui.warn(_('cleaning up working directory...'))
967 self.ui.warn(_('cleaning up working directory...'))
968 node = repo.dirstate.parents()[0]
968 node = repo.dirstate.parents()[0]
969 hg.revert(repo, node, None)
969 hg.revert(repo, node, None)
970 unknown = repo.status(unknown=True)[4]
970 unknown = repo.status(unknown=True)[4]
971 # only remove unknown files that we know we touched or
971 # only remove unknown files that we know we touched or
972 # created while patching
972 # created while patching
973 for f in unknown:
973 for f in unknown:
974 if f in all_files:
974 if f in all_files:
975 util.unlink(repo.wjoin(f))
975 util.unlink(repo.wjoin(f))
976 self.ui.warn(_('done\n'))
976 self.ui.warn(_('done\n'))
977 raise
977 raise
978
978
979 top = self.applied[-1].name
979 top = self.applied[-1].name
980 if ret[0] and ret[0] > 1:
980 if ret[0] and ret[0] > 1:
981 msg = _("errors during apply, please fix and refresh %s\n")
981 msg = _("errors during apply, please fix and refresh %s\n")
982 self.ui.write(msg % top)
982 self.ui.write(msg % top)
983 else:
983 else:
984 self.ui.write(_("now at: %s\n") % top)
984 self.ui.write(_("now at: %s\n") % top)
985 return ret[0]
985 return ret[0]
986
986
987 finally:
987 finally:
988 wlock.release()
988 wlock.release()
989
989
990 def pop(self, repo, patch=None, force=False, update=True, all=False):
990 def pop(self, repo, patch=None, force=False, update=True, all=False):
991 def getfile(f, rev, flags):
991 def getfile(f, rev, flags):
992 t = repo.file(f).read(rev)
992 t = repo.file(f).read(rev)
993 repo.wwrite(f, t, flags)
993 repo.wwrite(f, t, flags)
994
994
995 wlock = repo.wlock()
995 wlock = repo.wlock()
996 try:
996 try:
997 if patch:
997 if patch:
998 # index, rev, patch
998 # index, rev, patch
999 info = self.isapplied(patch)
999 info = self.isapplied(patch)
1000 if not info:
1000 if not info:
1001 patch = self.lookup(patch)
1001 patch = self.lookup(patch)
1002 info = self.isapplied(patch)
1002 info = self.isapplied(patch)
1003 if not info:
1003 if not info:
1004 raise util.Abort(_("patch %s is not applied") % patch)
1004 raise util.Abort(_("patch %s is not applied") % patch)
1005
1005
1006 if len(self.applied) == 0:
1006 if len(self.applied) == 0:
1007 # Allow qpop -a to work repeatedly,
1007 # Allow qpop -a to work repeatedly,
1008 # but not qpop without an argument
1008 # but not qpop without an argument
1009 self.ui.warn(_("no patches applied\n"))
1009 self.ui.warn(_("no patches applied\n"))
1010 return not all
1010 return not all
1011
1011
1012 if all:
1012 if all:
1013 start = 0
1013 start = 0
1014 elif patch:
1014 elif patch:
1015 start = info[0] + 1
1015 start = info[0] + 1
1016 else:
1016 else:
1017 start = len(self.applied) - 1
1017 start = len(self.applied) - 1
1018
1018
1019 if start >= len(self.applied):
1019 if start >= len(self.applied):
1020 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1020 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1021 return
1021 return
1022
1022
1023 if not update:
1023 if not update:
1024 parents = repo.dirstate.parents()
1024 parents = repo.dirstate.parents()
1025 rr = [ bin(x.rev) for x in self.applied ]
1025 rr = [ bin(x.rev) for x in self.applied ]
1026 for p in parents:
1026 for p in parents:
1027 if p in rr:
1027 if p in rr:
1028 self.ui.warn(_("qpop: forcing dirstate update\n"))
1028 self.ui.warn(_("qpop: forcing dirstate update\n"))
1029 update = True
1029 update = True
1030 else:
1030 else:
1031 parents = [p.hex() for p in repo[None].parents()]
1031 parents = [p.hex() for p in repo[None].parents()]
1032 needupdate = False
1032 needupdate = False
1033 for entry in self.applied[start:]:
1033 for entry in self.applied[start:]:
1034 if entry.rev in parents:
1034 if entry.rev in parents:
1035 needupdate = True
1035 needupdate = True
1036 break
1036 break
1037 update = needupdate
1037 update = needupdate
1038
1038
1039 if not force and update:
1039 if not force and update:
1040 self.check_localchanges(repo)
1040 self.check_localchanges(repo)
1041
1041
1042 self.applied_dirty = 1
1042 self.applied_dirty = 1
1043 end = len(self.applied)
1043 end = len(self.applied)
1044 rev = bin(self.applied[start].rev)
1044 rev = bin(self.applied[start].rev)
1045 if update:
1045 if update:
1046 top = self.check_toppatch(repo)
1046 top = self.check_toppatch(repo)
1047
1047
1048 try:
1048 try:
1049 heads = repo.changelog.heads(rev)
1049 heads = repo.changelog.heads(rev)
1050 except error.LookupError:
1050 except error.LookupError:
1051 node = short(rev)
1051 node = short(rev)
1052 raise util.Abort(_('trying to pop unknown node %s') % node)
1052 raise util.Abort(_('trying to pop unknown node %s') % node)
1053
1053
1054 if heads != [bin(self.applied[-1].rev)]:
1054 if heads != [bin(self.applied[-1].rev)]:
1055 raise util.Abort(_("popping would remove a revision not "
1055 raise util.Abort(_("popping would remove a revision not "
1056 "managed by this patch queue"))
1056 "managed by this patch queue"))
1057
1057
1058 # we know there are no local changes, so we can make a simplified
1058 # we know there are no local changes, so we can make a simplified
1059 # form of hg.update.
1059 # form of hg.update.
1060 if update:
1060 if update:
1061 qp = self.qparents(repo, rev)
1061 qp = self.qparents(repo, rev)
1062 changes = repo.changelog.read(qp)
1062 changes = repo.changelog.read(qp)
1063 mmap = repo.manifest.read(changes[0])
1063 mmap = repo.manifest.read(changes[0])
1064 m, a, r, d = repo.status(qp, top)[:4]
1064 m, a, r, d = repo.status(qp, top)[:4]
1065 if d:
1065 if d:
1066 raise util.Abort(_("deletions found between repo revs"))
1066 raise util.Abort(_("deletions found between repo revs"))
1067 for f in m:
1067 for f in m:
1068 getfile(f, mmap[f], mmap.flags(f))
1068 getfile(f, mmap[f], mmap.flags(f))
1069 for f in r:
1069 for f in r:
1070 getfile(f, mmap[f], mmap.flags(f))
1070 getfile(f, mmap[f], mmap.flags(f))
1071 for f in m + r:
1071 for f in m + r:
1072 repo.dirstate.normal(f)
1072 repo.dirstate.normal(f)
1073 for f in a:
1073 for f in a:
1074 try:
1074 try:
1075 os.unlink(repo.wjoin(f))
1075 os.unlink(repo.wjoin(f))
1076 except OSError, e:
1076 except OSError, e:
1077 if e.errno != errno.ENOENT:
1077 if e.errno != errno.ENOENT:
1078 raise
1078 raise
1079 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
1079 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
1080 except: pass
1080 except: pass
1081 repo.dirstate.forget(f)
1081 repo.dirstate.forget(f)
1082 repo.dirstate.setparents(qp, nullid)
1082 repo.dirstate.setparents(qp, nullid)
1083 del self.applied[start:end]
1083 del self.applied[start:end]
1084 self.strip(repo, rev, update=False, backup='strip')
1084 self.strip(repo, rev, update=False, backup='strip')
1085 if len(self.applied):
1085 if len(self.applied):
1086 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1086 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1087 else:
1087 else:
1088 self.ui.write(_("patch queue now empty\n"))
1088 self.ui.write(_("patch queue now empty\n"))
1089 finally:
1089 finally:
1090 wlock.release()
1090 wlock.release()
1091
1091
1092 def diff(self, repo, pats, opts):
1092 def diff(self, repo, pats, opts):
1093 top = self.check_toppatch(repo)
1093 top = self.check_toppatch(repo)
1094 if not top:
1094 if not top:
1095 self.ui.write(_("no patches applied\n"))
1095 self.ui.write(_("no patches applied\n"))
1096 return
1096 return
1097 qp = self.qparents(repo, top)
1097 qp = self.qparents(repo, top)
1098 self._diffopts = patch.diffopts(self.ui, opts)
1098 self._diffopts = patch.diffopts(self.ui, opts)
1099 self.printdiff(repo, qp, files=pats, opts=opts)
1099 self.printdiff(repo, qp, files=pats, opts=opts)
1100
1100
1101 def refresh(self, repo, pats=None, **opts):
1101 def refresh(self, repo, pats=None, **opts):
1102 if len(self.applied) == 0:
1102 if len(self.applied) == 0:
1103 self.ui.write(_("no patches applied\n"))
1103 self.ui.write(_("no patches applied\n"))
1104 return 1
1104 return 1
1105 msg = opts.get('msg', '').rstrip()
1105 msg = opts.get('msg', '').rstrip()
1106 newuser = opts.get('user')
1106 newuser = opts.get('user')
1107 newdate = opts.get('date')
1107 newdate = opts.get('date')
1108 if newdate:
1108 if newdate:
1109 newdate = '%d %d' % util.parsedate(newdate)
1109 newdate = '%d %d' % util.parsedate(newdate)
1110 wlock = repo.wlock()
1110 wlock = repo.wlock()
1111 try:
1111 try:
1112 self.check_toppatch(repo)
1112 self.check_toppatch(repo)
1113 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1113 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1114 top = bin(top)
1114 top = bin(top)
1115 if repo.changelog.heads(top) != [top]:
1115 if repo.changelog.heads(top) != [top]:
1116 raise util.Abort(_("cannot refresh a revision with children"))
1116 raise util.Abort(_("cannot refresh a revision with children"))
1117 cparents = repo.changelog.parents(top)
1117 cparents = repo.changelog.parents(top)
1118 patchparent = self.qparents(repo, top)
1118 patchparent = self.qparents(repo, top)
1119 ph = patchheader(self.join(patchfn))
1119 ph = patchheader(self.join(patchfn))
1120
1120
1121 patchf = self.opener(patchfn, 'r')
1121 patchf = self.opener(patchfn, 'r')
1122
1122
1123 # if the patch was a git patch, refresh it as a git patch
1123 # if the patch was a git patch, refresh it as a git patch
1124 for line in patchf:
1124 for line in patchf:
1125 if line.startswith('diff --git'):
1125 if line.startswith('diff --git'):
1126 self.diffopts().git = True
1126 self.diffopts().git = True
1127 break
1127 break
1128
1128
1129 if msg:
1129 if msg:
1130 ph.setmessage(msg)
1130 ph.setmessage(msg)
1131 if newuser:
1131 if newuser:
1132 ph.setuser(newuser)
1132 ph.setuser(newuser)
1133 if newdate:
1133 if newdate:
1134 ph.setdate(newdate)
1134 ph.setdate(newdate)
1135
1135
1136 # only commit new patch when write is complete
1136 # only commit new patch when write is complete
1137 patchf = self.opener(patchfn, 'w', atomictemp=True)
1137 patchf = self.opener(patchfn, 'w', atomictemp=True)
1138
1138
1139 patchf.seek(0)
1139 patchf.seek(0)
1140 patchf.truncate()
1140 patchf.truncate()
1141
1141
1142 comments = str(ph)
1142 comments = str(ph)
1143 if comments:
1143 if comments:
1144 patchf.write(comments)
1144 patchf.write(comments)
1145
1145
1146 if opts.get('git'):
1146 if opts.get('git'):
1147 self.diffopts().git = True
1147 self.diffopts().git = True
1148 tip = repo.changelog.tip()
1148 tip = repo.changelog.tip()
1149 if top == tip:
1149 if top == tip:
1150 # if the top of our patch queue is also the tip, there is an
1150 # if the top of our patch queue is also the tip, there is an
1151 # optimization here. We update the dirstate in place and strip
1151 # optimization here. We update the dirstate in place and strip
1152 # off the tip commit. Then just commit the current directory
1152 # off the tip commit. Then just commit the current directory
1153 # tree. We can also send repo.commit the list of files
1153 # tree. We can also send repo.commit the list of files
1154 # changed to speed up the diff
1154 # changed to speed up the diff
1155 #
1155 #
1156 # in short mode, we only diff the files included in the
1156 # in short mode, we only diff the files included in the
1157 # patch already plus specified files
1157 # patch already plus specified files
1158 #
1158 #
1159 # this should really read:
1159 # this should really read:
1160 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1160 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1161 # but we do it backwards to take advantage of manifest/chlog
1161 # but we do it backwards to take advantage of manifest/chlog
1162 # caching against the next repo.status call
1162 # caching against the next repo.status call
1163 #
1163 #
1164 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1164 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1165 changes = repo.changelog.read(tip)
1165 changes = repo.changelog.read(tip)
1166 man = repo.manifest.read(changes[0])
1166 man = repo.manifest.read(changes[0])
1167 aaa = aa[:]
1167 aaa = aa[:]
1168 matchfn = cmdutil.match(repo, pats, opts)
1168 matchfn = cmdutil.match(repo, pats, opts)
1169 if opts.get('short'):
1169 if opts.get('short'):
1170 # if amending a patch, we start with existing
1170 # if amending a patch, we start with existing
1171 # files plus specified files - unfiltered
1171 # files plus specified files - unfiltered
1172 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1172 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1173 # filter with inc/exl options
1173 # filter with inc/exl options
1174 matchfn = cmdutil.match(repo, opts=opts)
1174 matchfn = cmdutil.match(repo, opts=opts)
1175 else:
1175 else:
1176 match = cmdutil.matchall(repo)
1176 match = cmdutil.matchall(repo)
1177 m, a, r, d = repo.status(match=match)[:4]
1177 m, a, r, d = repo.status(match=match)[:4]
1178
1178
1179 # we might end up with files that were added between
1179 # we might end up with files that were added between
1180 # tip and the dirstate parent, but then changed in the
1180 # tip and the dirstate parent, but then changed in the
1181 # local dirstate. in this case, we want them to only
1181 # local dirstate. in this case, we want them to only
1182 # show up in the added section
1182 # show up in the added section
1183 for x in m:
1183 for x in m:
1184 if x not in aa:
1184 if x not in aa:
1185 mm.append(x)
1185 mm.append(x)
1186 # we might end up with files added by the local dirstate that
1186 # we might end up with files added by the local dirstate that
1187 # were deleted by the patch. In this case, they should only
1187 # were deleted by the patch. In this case, they should only
1188 # show up in the changed section.
1188 # show up in the changed section.
1189 for x in a:
1189 for x in a:
1190 if x in dd:
1190 if x in dd:
1191 del dd[dd.index(x)]
1191 del dd[dd.index(x)]
1192 mm.append(x)
1192 mm.append(x)
1193 else:
1193 else:
1194 aa.append(x)
1194 aa.append(x)
1195 # make sure any files deleted in the local dirstate
1195 # make sure any files deleted in the local dirstate
1196 # are not in the add or change column of the patch
1196 # are not in the add or change column of the patch
1197 forget = []
1197 forget = []
1198 for x in d + r:
1198 for x in d + r:
1199 if x in aa:
1199 if x in aa:
1200 del aa[aa.index(x)]
1200 del aa[aa.index(x)]
1201 forget.append(x)
1201 forget.append(x)
1202 continue
1202 continue
1203 elif x in mm:
1203 elif x in mm:
1204 del mm[mm.index(x)]
1204 del mm[mm.index(x)]
1205 dd.append(x)
1205 dd.append(x)
1206
1206
1207 m = list(set(mm))
1207 m = list(set(mm))
1208 r = list(set(dd))
1208 r = list(set(dd))
1209 a = list(set(aa))
1209 a = list(set(aa))
1210 c = [filter(matchfn, l) for l in (m, a, r)]
1210 c = [filter(matchfn, l) for l in (m, a, r)]
1211 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1211 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1212 chunks = patch.diff(repo, patchparent, match=match,
1212 chunks = patch.diff(repo, patchparent, match=match,
1213 changes=c, opts=self.diffopts())
1213 changes=c, opts=self.diffopts())
1214 for chunk in chunks:
1214 for chunk in chunks:
1215 patchf.write(chunk)
1215 patchf.write(chunk)
1216
1216
1217 try:
1217 try:
1218 if self.diffopts().git:
1218 if self.diffopts().git:
1219 copies = {}
1219 copies = {}
1220 for dst in a:
1220 for dst in a:
1221 src = repo.dirstate.copied(dst)
1221 src = repo.dirstate.copied(dst)
1222 # during qfold, the source file for copies may
1222 # during qfold, the source file for copies may
1223 # be removed. Treat this as a simple add.
1223 # be removed. Treat this as a simple add.
1224 if src is not None and src in repo.dirstate:
1224 if src is not None and src in repo.dirstate:
1225 copies.setdefault(src, []).append(dst)
1225 copies.setdefault(src, []).append(dst)
1226 repo.dirstate.add(dst)
1226 repo.dirstate.add(dst)
1227 # remember the copies between patchparent and tip
1227 # remember the copies between patchparent and tip
1228 for dst in aaa:
1228 for dst in aaa:
1229 f = repo.file(dst)
1229 f = repo.file(dst)
1230 src = f.renamed(man[dst])
1230 src = f.renamed(man[dst])
1231 if src:
1231 if src:
1232 copies.setdefault(src[0], []).extend(copies.get(dst, []))
1232 copies.setdefault(src[0], []).extend(copies.get(dst, []))
1233 if dst in a:
1233 if dst in a:
1234 copies[src[0]].append(dst)
1234 copies[src[0]].append(dst)
1235 # we can't copy a file created by the patch itself
1235 # we can't copy a file created by the patch itself
1236 if dst in copies:
1236 if dst in copies:
1237 del copies[dst]
1237 del copies[dst]
1238 for src, dsts in copies.iteritems():
1238 for src, dsts in copies.iteritems():
1239 for dst in dsts:
1239 for dst in dsts:
1240 repo.dirstate.copy(src, dst)
1240 repo.dirstate.copy(src, dst)
1241 else:
1241 else:
1242 for dst in a:
1242 for dst in a:
1243 repo.dirstate.add(dst)
1243 repo.dirstate.add(dst)
1244 # Drop useless copy information
1244 # Drop useless copy information
1245 for f in list(repo.dirstate.copies()):
1245 for f in list(repo.dirstate.copies()):
1246 repo.dirstate.copy(None, f)
1246 repo.dirstate.copy(None, f)
1247 for f in r:
1247 for f in r:
1248 repo.dirstate.remove(f)
1248 repo.dirstate.remove(f)
1249 # if the patch excludes a modified file, mark that
1249 # if the patch excludes a modified file, mark that
1250 # file with mtime=0 so status can see it.
1250 # file with mtime=0 so status can see it.
1251 mm = []
1251 mm = []
1252 for i in xrange(len(m)-1, -1, -1):
1252 for i in xrange(len(m)-1, -1, -1):
1253 if not matchfn(m[i]):
1253 if not matchfn(m[i]):
1254 mm.append(m[i])
1254 mm.append(m[i])
1255 del m[i]
1255 del m[i]
1256 for f in m:
1256 for f in m:
1257 repo.dirstate.normal(f)
1257 repo.dirstate.normal(f)
1258 for f in mm:
1258 for f in mm:
1259 repo.dirstate.normallookup(f)
1259 repo.dirstate.normallookup(f)
1260 for f in forget:
1260 for f in forget:
1261 repo.dirstate.forget(f)
1261 repo.dirstate.forget(f)
1262
1262
1263 if not msg:
1263 if not msg:
1264 if not ph.message:
1264 if not ph.message:
1265 message = "[mq]: %s\n" % patchfn
1265 message = "[mq]: %s\n" % patchfn
1266 else:
1266 else:
1267 message = "\n".join(ph.message)
1267 message = "\n".join(ph.message)
1268 else:
1268 else:
1269 message = msg
1269 message = msg
1270
1270
1271 user = ph.user or changes[1]
1271 user = ph.user or changes[1]
1272
1272
1273 # assumes strip can roll itself back if interrupted
1273 # assumes strip can roll itself back if interrupted
1274 repo.dirstate.setparents(*cparents)
1274 repo.dirstate.setparents(*cparents)
1275 self.applied.pop()
1275 self.applied.pop()
1276 self.applied_dirty = 1
1276 self.applied_dirty = 1
1277 self.strip(repo, top, update=False,
1277 self.strip(repo, top, update=False,
1278 backup='strip')
1278 backup='strip')
1279 except:
1279 except:
1280 repo.dirstate.invalidate()
1280 repo.dirstate.invalidate()
1281 raise
1281 raise
1282
1282
1283 try:
1283 try:
1284 # might be nice to attempt to roll back strip after this
1284 # might be nice to attempt to roll back strip after this
1285 patchf.rename()
1285 patchf.rename()
1286 n = repo.commit(message, user, ph.date, match=match,
1286 n = repo.commit(message, user, ph.date, match=match,
1287 force=True)
1287 force=True)
1288 self.applied.append(statusentry(hex(n), patchfn))
1288 self.applied.append(statusentry(hex(n), patchfn))
1289 except:
1289 except:
1290 ctx = repo[cparents[0]]
1290 ctx = repo[cparents[0]]
1291 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1291 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1292 self.save_dirty()
1292 self.save_dirty()
1293 self.ui.warn(_('refresh interrupted while patch was popped! '
1293 self.ui.warn(_('refresh interrupted while patch was popped! '
1294 '(revert --all, qpush to recover)\n'))
1294 '(revert --all, qpush to recover)\n'))
1295 raise
1295 raise
1296 else:
1296 else:
1297 self.printdiff(repo, patchparent, fp=patchf)
1297 self.printdiff(repo, patchparent, fp=patchf)
1298 patchf.rename()
1298 patchf.rename()
1299 added = repo.status()[1]
1299 added = repo.status()[1]
1300 for a in added:
1300 for a in added:
1301 f = repo.wjoin(a)
1301 f = repo.wjoin(a)
1302 try:
1302 try:
1303 os.unlink(f)
1303 os.unlink(f)
1304 except OSError, e:
1304 except OSError, e:
1305 if e.errno != errno.ENOENT:
1305 if e.errno != errno.ENOENT:
1306 raise
1306 raise
1307 try: os.removedirs(os.path.dirname(f))
1307 try: os.removedirs(os.path.dirname(f))
1308 except: pass
1308 except: pass
1309 # forget the file copies in the dirstate
1309 # forget the file copies in the dirstate
1310 # push should readd the files later on
1310 # push should readd the files later on
1311 repo.dirstate.forget(a)
1311 repo.dirstate.forget(a)
1312 self.pop(repo, force=True)
1312 self.pop(repo, force=True)
1313 self.push(repo, force=True)
1313 self.push(repo, force=True)
1314 finally:
1314 finally:
1315 wlock.release()
1315 wlock.release()
1316 self.removeundo(repo)
1316 self.removeundo(repo)
1317
1317
1318 def init(self, repo, create=False):
1318 def init(self, repo, create=False):
1319 if not create and os.path.isdir(self.path):
1319 if not create and os.path.isdir(self.path):
1320 raise util.Abort(_("patch queue directory already exists"))
1320 raise util.Abort(_("patch queue directory already exists"))
1321 try:
1321 try:
1322 os.mkdir(self.path)
1322 os.mkdir(self.path)
1323 except OSError, inst:
1323 except OSError, inst:
1324 if inst.errno != errno.EEXIST or not create:
1324 if inst.errno != errno.EEXIST or not create:
1325 raise
1325 raise
1326 if create:
1326 if create:
1327 return self.qrepo(create=True)
1327 return self.qrepo(create=True)
1328
1328
1329 def unapplied(self, repo, patch=None):
1329 def unapplied(self, repo, patch=None):
1330 if patch and patch not in self.series:
1330 if patch and patch not in self.series:
1331 raise util.Abort(_("patch %s is not in series file") % patch)
1331 raise util.Abort(_("patch %s is not in series file") % patch)
1332 if not patch:
1332 if not patch:
1333 start = self.series_end()
1333 start = self.series_end()
1334 else:
1334 else:
1335 start = self.series.index(patch) + 1
1335 start = self.series.index(patch) + 1
1336 unapplied = []
1336 unapplied = []
1337 for i in xrange(start, len(self.series)):
1337 for i in xrange(start, len(self.series)):
1338 pushable, reason = self.pushable(i)
1338 pushable, reason = self.pushable(i)
1339 if pushable:
1339 if pushable:
1340 unapplied.append((i, self.series[i]))
1340 unapplied.append((i, self.series[i]))
1341 self.explain_pushable(i)
1341 self.explain_pushable(i)
1342 return unapplied
1342 return unapplied
1343
1343
1344 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1344 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1345 summary=False):
1345 summary=False):
1346 def displayname(patchname):
1346 def displayname(patchname):
1347 if summary:
1347 if summary:
1348 ph = patchheader(self.join(patchname))
1348 ph = patchheader(self.join(patchname))
1349 msg = ph.message
1349 msg = ph.message
1350 msg = msg and ': ' + msg[0] or ': '
1350 msg = msg and ': ' + msg[0] or ': '
1351 else:
1351 else:
1352 msg = ''
1352 msg = ''
1353 return '%s%s' % (patchname, msg)
1353 return '%s%s' % (patchname, msg)
1354
1354
1355 applied = set([p.name for p in self.applied])
1355 applied = set([p.name for p in self.applied])
1356 if length is None:
1356 if length is None:
1357 length = len(self.series) - start
1357 length = len(self.series) - start
1358 if not missing:
1358 if not missing:
1359 for i in xrange(start, start+length):
1359 for i in xrange(start, start+length):
1360 patch = self.series[i]
1360 patch = self.series[i]
1361 if patch in applied:
1361 if patch in applied:
1362 stat = 'A'
1362 stat = 'A'
1363 elif self.pushable(i)[0]:
1363 elif self.pushable(i)[0]:
1364 stat = 'U'
1364 stat = 'U'
1365 else:
1365 else:
1366 stat = 'G'
1366 stat = 'G'
1367 pfx = ''
1367 pfx = ''
1368 if self.ui.verbose:
1368 if self.ui.verbose:
1369 pfx = '%d %s ' % (i, stat)
1369 pfx = '%d %s ' % (i, stat)
1370 elif status and status != stat:
1370 elif status and status != stat:
1371 continue
1371 continue
1372 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1372 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1373 else:
1373 else:
1374 msng_list = []
1374 msng_list = []
1375 for root, dirs, files in os.walk(self.path):
1375 for root, dirs, files in os.walk(self.path):
1376 d = root[len(self.path) + 1:]
1376 d = root[len(self.path) + 1:]
1377 for f in files:
1377 for f in files:
1378 fl = os.path.join(d, f)
1378 fl = os.path.join(d, f)
1379 if (fl not in self.series and
1379 if (fl not in self.series and
1380 fl not in (self.status_path, self.series_path,
1380 fl not in (self.status_path, self.series_path,
1381 self.guards_path)
1381 self.guards_path)
1382 and not fl.startswith('.')):
1382 and not fl.startswith('.')):
1383 msng_list.append(fl)
1383 msng_list.append(fl)
1384 for x in sorted(msng_list):
1384 for x in sorted(msng_list):
1385 pfx = self.ui.verbose and ('D ') or ''
1385 pfx = self.ui.verbose and ('D ') or ''
1386 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1386 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1387
1387
1388 def issaveline(self, l):
1388 def issaveline(self, l):
1389 if l.name == '.hg.patches.save.line':
1389 if l.name == '.hg.patches.save.line':
1390 return True
1390 return True
1391
1391
1392 def qrepo(self, create=False):
1392 def qrepo(self, create=False):
1393 if create or os.path.isdir(self.join(".hg")):
1393 if create or os.path.isdir(self.join(".hg")):
1394 return hg.repository(self.ui, path=self.path, create=create)
1394 return hg.repository(self.ui, path=self.path, create=create)
1395
1395
1396 def restore(self, repo, rev, delete=None, qupdate=None):
1396 def restore(self, repo, rev, delete=None, qupdate=None):
1397 c = repo.changelog.read(rev)
1397 c = repo.changelog.read(rev)
1398 desc = c[4].strip()
1398 desc = c[4].strip()
1399 lines = desc.splitlines()
1399 lines = desc.splitlines()
1400 i = 0
1400 i = 0
1401 datastart = None
1401 datastart = None
1402 series = []
1402 series = []
1403 applied = []
1403 applied = []
1404 qpp = None
1404 qpp = None
1405 for i, line in enumerate(lines):
1405 for i, line in enumerate(lines):
1406 if line == 'Patch Data:':
1406 if line == 'Patch Data:':
1407 datastart = i + 1
1407 datastart = i + 1
1408 elif line.startswith('Dirstate:'):
1408 elif line.startswith('Dirstate:'):
1409 l = line.rstrip()
1409 l = line.rstrip()
1410 l = l[10:].split(' ')
1410 l = l[10:].split(' ')
1411 qpp = [ bin(x) for x in l ]
1411 qpp = [ bin(x) for x in l ]
1412 elif datastart != None:
1412 elif datastart != None:
1413 l = line.rstrip()
1413 l = line.rstrip()
1414 se = statusentry(l)
1414 se = statusentry(l)
1415 file_ = se.name
1415 file_ = se.name
1416 if se.rev:
1416 if se.rev:
1417 applied.append(se)
1417 applied.append(se)
1418 else:
1418 else:
1419 series.append(file_)
1419 series.append(file_)
1420 if datastart is None:
1420 if datastart is None:
1421 self.ui.warn(_("No saved patch data found\n"))
1421 self.ui.warn(_("No saved patch data found\n"))
1422 return 1
1422 return 1
1423 self.ui.warn(_("restoring status: %s\n") % lines[0])
1423 self.ui.warn(_("restoring status: %s\n") % lines[0])
1424 self.full_series = series
1424 self.full_series = series
1425 self.applied = applied
1425 self.applied = applied
1426 self.parse_series()
1426 self.parse_series()
1427 self.series_dirty = 1
1427 self.series_dirty = 1
1428 self.applied_dirty = 1
1428 self.applied_dirty = 1
1429 heads = repo.changelog.heads()
1429 heads = repo.changelog.heads()
1430 if delete:
1430 if delete:
1431 if rev not in heads:
1431 if rev not in heads:
1432 self.ui.warn(_("save entry has children, leaving it alone\n"))
1432 self.ui.warn(_("save entry has children, leaving it alone\n"))
1433 else:
1433 else:
1434 self.ui.warn(_("removing save entry %s\n") % short(rev))
1434 self.ui.warn(_("removing save entry %s\n") % short(rev))
1435 pp = repo.dirstate.parents()
1435 pp = repo.dirstate.parents()
1436 if rev in pp:
1436 if rev in pp:
1437 update = True
1437 update = True
1438 else:
1438 else:
1439 update = False
1439 update = False
1440 self.strip(repo, rev, update=update, backup='strip')
1440 self.strip(repo, rev, update=update, backup='strip')
1441 if qpp:
1441 if qpp:
1442 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1442 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1443 (short(qpp[0]), short(qpp[1])))
1443 (short(qpp[0]), short(qpp[1])))
1444 if qupdate:
1444 if qupdate:
1445 self.ui.status(_("queue directory updating\n"))
1445 self.ui.status(_("queue directory updating\n"))
1446 r = self.qrepo()
1446 r = self.qrepo()
1447 if not r:
1447 if not r:
1448 self.ui.warn(_("Unable to load queue repository\n"))
1448 self.ui.warn(_("Unable to load queue repository\n"))
1449 return 1
1449 return 1
1450 hg.clean(r, qpp[0])
1450 hg.clean(r, qpp[0])
1451
1451
1452 def save(self, repo, msg=None):
1452 def save(self, repo, msg=None):
1453 if len(self.applied) == 0:
1453 if len(self.applied) == 0:
1454 self.ui.warn(_("save: no patches applied, exiting\n"))
1454 self.ui.warn(_("save: no patches applied, exiting\n"))
1455 return 1
1455 return 1
1456 if self.issaveline(self.applied[-1]):
1456 if self.issaveline(self.applied[-1]):
1457 self.ui.warn(_("status is already saved\n"))
1457 self.ui.warn(_("status is already saved\n"))
1458 return 1
1458 return 1
1459
1459
1460 ar = [ ':' + x for x in self.full_series ]
1460 ar = [ ':' + x for x in self.full_series ]
1461 if not msg:
1461 if not msg:
1462 msg = _("hg patches saved state")
1462 msg = _("hg patches saved state")
1463 else:
1463 else:
1464 msg = "hg patches: " + msg.rstrip('\r\n')
1464 msg = "hg patches: " + msg.rstrip('\r\n')
1465 r = self.qrepo()
1465 r = self.qrepo()
1466 if r:
1466 if r:
1467 pp = r.dirstate.parents()
1467 pp = r.dirstate.parents()
1468 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1468 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1469 msg += "\n\nPatch Data:\n"
1469 msg += "\n\nPatch Data:\n"
1470 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1470 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1471 "\n".join(ar) + '\n' or "")
1471 "\n".join(ar) + '\n' or "")
1472 n = repo.commit(text, force=True)
1472 n = repo.commit(text, force=True)
1473 if not n:
1473 if not n:
1474 self.ui.warn(_("repo commit failed\n"))
1474 self.ui.warn(_("repo commit failed\n"))
1475 return 1
1475 return 1
1476 self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
1476 self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
1477 self.applied_dirty = 1
1477 self.applied_dirty = 1
1478 self.removeundo(repo)
1478 self.removeundo(repo)
1479
1479
1480 def full_series_end(self):
1480 def full_series_end(self):
1481 if len(self.applied) > 0:
1481 if len(self.applied) > 0:
1482 p = self.applied[-1].name
1482 p = self.applied[-1].name
1483 end = self.find_series(p)
1483 end = self.find_series(p)
1484 if end is None:
1484 if end is None:
1485 return len(self.full_series)
1485 return len(self.full_series)
1486 return end + 1
1486 return end + 1
1487 return 0
1487 return 0
1488
1488
1489 def series_end(self, all_patches=False):
1489 def series_end(self, all_patches=False):
1490 """If all_patches is False, return the index of the next pushable patch
1490 """If all_patches is False, return the index of the next pushable patch
1491 in the series, or the series length. If all_patches is True, return the
1491 in the series, or the series length. If all_patches is True, return the
1492 index of the first patch past the last applied one.
1492 index of the first patch past the last applied one.
1493 """
1493 """
1494 end = 0
1494 end = 0
1495 def next(start):
1495 def next(start):
1496 if all_patches:
1496 if all_patches:
1497 return start
1497 return start
1498 i = start
1498 i = start
1499 while i < len(self.series):
1499 while i < len(self.series):
1500 p, reason = self.pushable(i)
1500 p, reason = self.pushable(i)
1501 if p:
1501 if p:
1502 break
1502 break
1503 self.explain_pushable(i)
1503 self.explain_pushable(i)
1504 i += 1
1504 i += 1
1505 return i
1505 return i
1506 if len(self.applied) > 0:
1506 if len(self.applied) > 0:
1507 p = self.applied[-1].name
1507 p = self.applied[-1].name
1508 try:
1508 try:
1509 end = self.series.index(p)
1509 end = self.series.index(p)
1510 except ValueError:
1510 except ValueError:
1511 return 0
1511 return 0
1512 return next(end + 1)
1512 return next(end + 1)
1513 return next(end)
1513 return next(end)
1514
1514
1515 def appliedname(self, index):
1515 def appliedname(self, index):
1516 pname = self.applied[index].name
1516 pname = self.applied[index].name
1517 if not self.ui.verbose:
1517 if not self.ui.verbose:
1518 p = pname
1518 p = pname
1519 else:
1519 else:
1520 p = str(self.series.index(pname)) + " " + pname
1520 p = str(self.series.index(pname)) + " " + pname
1521 return p
1521 return p
1522
1522
1523 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1523 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1524 force=None, git=False):
1524 force=None, git=False):
1525 def checkseries(patchname):
1525 def checkseries(patchname):
1526 if patchname in self.series:
1526 if patchname in self.series:
1527 raise util.Abort(_('patch %s is already in the series file')
1527 raise util.Abort(_('patch %s is already in the series file')
1528 % patchname)
1528 % patchname)
1529 def checkfile(patchname):
1529 def checkfile(patchname):
1530 if not force and os.path.exists(self.join(patchname)):
1530 if not force and os.path.exists(self.join(patchname)):
1531 raise util.Abort(_('patch "%s" already exists')
1531 raise util.Abort(_('patch "%s" already exists')
1532 % patchname)
1532 % patchname)
1533
1533
1534 if rev:
1534 if rev:
1535 if files:
1535 if files:
1536 raise util.Abort(_('option "-r" not valid when importing '
1536 raise util.Abort(_('option "-r" not valid when importing '
1537 'files'))
1537 'files'))
1538 rev = cmdutil.revrange(repo, rev)
1538 rev = cmdutil.revrange(repo, rev)
1539 rev.sort(lambda x, y: cmp(y, x))
1539 rev.sort(lambda x, y: cmp(y, x))
1540 if (len(files) > 1 or len(rev) > 1) and patchname:
1540 if (len(files) > 1 or len(rev) > 1) and patchname:
1541 raise util.Abort(_('option "-n" not valid when importing multiple '
1541 raise util.Abort(_('option "-n" not valid when importing multiple '
1542 'patches'))
1542 'patches'))
1543 i = 0
1543 i = 0
1544 added = []
1544 added = []
1545 if rev:
1545 if rev:
1546 # If mq patches are applied, we can only import revisions
1546 # If mq patches are applied, we can only import revisions
1547 # that form a linear path to qbase.
1547 # that form a linear path to qbase.
1548 # Otherwise, they should form a linear path to a head.
1548 # Otherwise, they should form a linear path to a head.
1549 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1549 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1550 if len(heads) > 1:
1550 if len(heads) > 1:
1551 raise util.Abort(_('revision %d is the root of more than one '
1551 raise util.Abort(_('revision %d is the root of more than one '
1552 'branch') % rev[-1])
1552 'branch') % rev[-1])
1553 if self.applied:
1553 if self.applied:
1554 base = hex(repo.changelog.node(rev[0]))
1554 base = hex(repo.changelog.node(rev[0]))
1555 if base in [n.rev for n in self.applied]:
1555 if base in [n.rev for n in self.applied]:
1556 raise util.Abort(_('revision %d is already managed')
1556 raise util.Abort(_('revision %d is already managed')
1557 % rev[0])
1557 % rev[0])
1558 if heads != [bin(self.applied[-1].rev)]:
1558 if heads != [bin(self.applied[-1].rev)]:
1559 raise util.Abort(_('revision %d is not the parent of '
1559 raise util.Abort(_('revision %d is not the parent of '
1560 'the queue') % rev[0])
1560 'the queue') % rev[0])
1561 base = repo.changelog.rev(bin(self.applied[0].rev))
1561 base = repo.changelog.rev(bin(self.applied[0].rev))
1562 lastparent = repo.changelog.parentrevs(base)[0]
1562 lastparent = repo.changelog.parentrevs(base)[0]
1563 else:
1563 else:
1564 if heads != [repo.changelog.node(rev[0])]:
1564 if heads != [repo.changelog.node(rev[0])]:
1565 raise util.Abort(_('revision %d has unmanaged children')
1565 raise util.Abort(_('revision %d has unmanaged children')
1566 % rev[0])
1566 % rev[0])
1567 lastparent = None
1567 lastparent = None
1568
1568
1569 if git:
1569 if git:
1570 self.diffopts().git = True
1570 self.diffopts().git = True
1571
1571
1572 for r in rev:
1572 for r in rev:
1573 p1, p2 = repo.changelog.parentrevs(r)
1573 p1, p2 = repo.changelog.parentrevs(r)
1574 n = repo.changelog.node(r)
1574 n = repo.changelog.node(r)
1575 if p2 != nullrev:
1575 if p2 != nullrev:
1576 raise util.Abort(_('cannot import merge revision %d') % r)
1576 raise util.Abort(_('cannot import merge revision %d') % r)
1577 if lastparent and lastparent != r:
1577 if lastparent and lastparent != r:
1578 raise util.Abort(_('revision %d is not the parent of %d')
1578 raise util.Abort(_('revision %d is not the parent of %d')
1579 % (r, lastparent))
1579 % (r, lastparent))
1580 lastparent = p1
1580 lastparent = p1
1581
1581
1582 if not patchname:
1582 if not patchname:
1583 patchname = normname('%d.diff' % r)
1583 patchname = normname('%d.diff' % r)
1584 self.check_reserved_name(patchname)
1584 self.check_reserved_name(patchname)
1585 checkseries(patchname)
1585 checkseries(patchname)
1586 checkfile(patchname)
1586 checkfile(patchname)
1587 self.full_series.insert(0, patchname)
1587 self.full_series.insert(0, patchname)
1588
1588
1589 patchf = self.opener(patchname, "w")
1589 patchf = self.opener(patchname, "w")
1590 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1590 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1591 patchf.close()
1591 patchf.close()
1592
1592
1593 se = statusentry(hex(n), patchname)
1593 se = statusentry(hex(n), patchname)
1594 self.applied.insert(0, se)
1594 self.applied.insert(0, se)
1595
1595
1596 added.append(patchname)
1596 added.append(patchname)
1597 patchname = None
1597 patchname = None
1598 self.parse_series()
1598 self.parse_series()
1599 self.applied_dirty = 1
1599 self.applied_dirty = 1
1600
1600
1601 for filename in files:
1601 for filename in files:
1602 if existing:
1602 if existing:
1603 if filename == '-':
1603 if filename == '-':
1604 raise util.Abort(_('-e is incompatible with import from -'))
1604 raise util.Abort(_('-e is incompatible with import from -'))
1605 if not patchname:
1605 if not patchname:
1606 patchname = normname(filename)
1606 patchname = normname(filename)
1607 self.check_reserved_name(patchname)
1607 self.check_reserved_name(patchname)
1608 if not os.path.isfile(self.join(patchname)):
1608 if not os.path.isfile(self.join(patchname)):
1609 raise util.Abort(_("patch %s does not exist") % patchname)
1609 raise util.Abort(_("patch %s does not exist") % patchname)
1610 else:
1610 else:
1611 try:
1611 try:
1612 if filename == '-':
1612 if filename == '-':
1613 if not patchname:
1613 if not patchname:
1614 raise util.Abort(_('need --name to import a patch from -'))
1614 raise util.Abort(_('need --name to import a patch from -'))
1615 text = sys.stdin.read()
1615 text = sys.stdin.read()
1616 else:
1616 else:
1617 text = url.open(self.ui, filename).read()
1617 text = url.open(self.ui, filename).read()
1618 except (OSError, IOError):
1618 except (OSError, IOError):
1619 raise util.Abort(_("unable to read %s") % filename)
1619 raise util.Abort(_("unable to read %s") % filename)
1620 if not patchname:
1620 if not patchname:
1621 patchname = normname(os.path.basename(filename))
1621 patchname = normname(os.path.basename(filename))
1622 self.check_reserved_name(patchname)
1622 self.check_reserved_name(patchname)
1623 checkfile(patchname)
1623 checkfile(patchname)
1624 patchf = self.opener(patchname, "w")
1624 patchf = self.opener(patchname, "w")
1625 patchf.write(text)
1625 patchf.write(text)
1626 if not force:
1626 if not force:
1627 checkseries(patchname)
1627 checkseries(patchname)
1628 if patchname not in self.series:
1628 if patchname not in self.series:
1629 index = self.full_series_end() + i
1629 index = self.full_series_end() + i
1630 self.full_series[index:index] = [patchname]
1630 self.full_series[index:index] = [patchname]
1631 self.parse_series()
1631 self.parse_series()
1632 self.ui.warn(_("adding %s to series file\n") % patchname)
1632 self.ui.warn(_("adding %s to series file\n") % patchname)
1633 i += 1
1633 i += 1
1634 added.append(patchname)
1634 added.append(patchname)
1635 patchname = None
1635 patchname = None
1636 self.series_dirty = 1
1636 self.series_dirty = 1
1637 qrepo = self.qrepo()
1637 qrepo = self.qrepo()
1638 if qrepo:
1638 if qrepo:
1639 qrepo.add(added)
1639 qrepo.add(added)
1640
1640
1641 def delete(ui, repo, *patches, **opts):
1641 def delete(ui, repo, *patches, **opts):
1642 """remove patches from queue
1642 """remove patches from queue
1643
1643
1644 The patches must not be applied, unless they are arguments to the
1644 The patches must not be applied, unless they are arguments to the
1645 -r/--rev parameter. At least one patch or revision is required.
1645 -r/--rev parameter. At least one patch or revision is required.
1646
1646
1647 With --rev, mq will stop managing the named revisions (converting
1647 With --rev, mq will stop managing the named revisions (converting
1648 them to regular Mercurial changesets). The qfinish command should
1648 them to regular Mercurial changesets). The qfinish command should
1649 be used as an alternative for qdelete -r, as the latter option is
1649 be used as an alternative for qdelete -r, as the latter option is
1650 deprecated.
1650 deprecated.
1651
1651
1652 With -k/--keep, the patch files are preserved in the patch
1652 With -k/--keep, the patch files are preserved in the patch
1653 directory."""
1653 directory."""
1654 q = repo.mq
1654 q = repo.mq
1655 q.delete(repo, patches, opts)
1655 q.delete(repo, patches, opts)
1656 q.save_dirty()
1656 q.save_dirty()
1657 return 0
1657 return 0
1658
1658
1659 def applied(ui, repo, patch=None, **opts):
1659 def applied(ui, repo, patch=None, **opts):
1660 """print the patches already applied"""
1660 """print the patches already applied"""
1661 q = repo.mq
1661 q = repo.mq
1662 if patch:
1662 if patch:
1663 if patch not in q.series:
1663 if patch not in q.series:
1664 raise util.Abort(_("patch %s is not in series file") % patch)
1664 raise util.Abort(_("patch %s is not in series file") % patch)
1665 end = q.series.index(patch) + 1
1665 end = q.series.index(patch) + 1
1666 else:
1666 else:
1667 end = q.series_end(True)
1667 end = q.series_end(True)
1668 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1668 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1669
1669
1670 def unapplied(ui, repo, patch=None, **opts):
1670 def unapplied(ui, repo, patch=None, **opts):
1671 """print the patches not yet applied"""
1671 """print the patches not yet applied"""
1672 q = repo.mq
1672 q = repo.mq
1673 if patch:
1673 if patch:
1674 if patch not in q.series:
1674 if patch not in q.series:
1675 raise util.Abort(_("patch %s is not in series file") % patch)
1675 raise util.Abort(_("patch %s is not in series file") % patch)
1676 start = q.series.index(patch) + 1
1676 start = q.series.index(patch) + 1
1677 else:
1677 else:
1678 start = q.series_end(True)
1678 start = q.series_end(True)
1679 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1679 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1680
1680
1681 def qimport(ui, repo, *filename, **opts):
1681 def qimport(ui, repo, *filename, **opts):
1682 """import a patch
1682 """import a patch
1683
1683
1684 The patch is inserted into the series after the last applied
1684 The patch is inserted into the series after the last applied
1685 patch. If no patches have been applied, qimport prepends the patch
1685 patch. If no patches have been applied, qimport prepends the patch
1686 to the series.
1686 to the series.
1687
1687
1688 The patch will have the same name as its source file unless you
1688 The patch will have the same name as its source file unless you
1689 give it a new one with -n/--name.
1689 give it a new one with -n/--name.
1690
1690
1691 You can register an existing patch inside the patch directory with
1691 You can register an existing patch inside the patch directory with
1692 the -e/--existing flag.
1692 the -e/--existing flag.
1693
1693
1694 With -f/--force, an existing patch of the same name will be
1694 With -f/--force, an existing patch of the same name will be
1695 overwritten.
1695 overwritten.
1696
1696
1697 An existing changeset may be placed under mq control with -r/--rev
1697 An existing changeset may be placed under mq control with -r/--rev
1698 (e.g. qimport --rev tip -n patch will place tip under mq control).
1698 (e.g. qimport --rev tip -n patch will place tip under mq control).
1699 With -g/--git, patches imported with --rev will use the git diff
1699 With -g/--git, patches imported with --rev will use the git diff
1700 format. See the diffs help topic for information on why this is
1700 format. See the diffs help topic for information on why this is
1701 important for preserving rename/copy information and permission
1701 important for preserving rename/copy information and permission
1702 changes.
1702 changes.
1703
1703
1704 To import a patch from standard input, pass - as the patch file.
1704 To import a patch from standard input, pass - as the patch file.
1705 When importing from standard input, a patch name must be specified
1705 When importing from standard input, a patch name must be specified
1706 using the --name flag.
1706 using the --name flag.
1707 """
1707 """
1708 q = repo.mq
1708 q = repo.mq
1709 q.qimport(repo, filename, patchname=opts['name'],
1709 q.qimport(repo, filename, patchname=opts['name'],
1710 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1710 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1711 git=opts['git'])
1711 git=opts['git'])
1712 q.save_dirty()
1712 q.save_dirty()
1713
1713
1714 if opts.get('push') and not opts.get('rev'):
1714 if opts.get('push') and not opts.get('rev'):
1715 return q.push(repo, None)
1715 return q.push(repo, None)
1716 return 0
1716 return 0
1717
1717
1718 def init(ui, repo, **opts):
1718 def init(ui, repo, **opts):
1719 """init a new queue repository
1719 """init a new queue repository
1720
1720
1721 The queue repository is unversioned by default. If
1721 The queue repository is unversioned by default. If
1722 -c/--create-repo is specified, qinit will create a separate nested
1722 -c/--create-repo is specified, qinit will create a separate nested
1723 repository for patches (qinit -c may also be run later to convert
1723 repository for patches (qinit -c may also be run later to convert
1724 an unversioned patch repository into a versioned one). You can use
1724 an unversioned patch repository into a versioned one). You can use
1725 qcommit to commit changes to this queue repository."""
1725 qcommit to commit changes to this queue repository."""
1726 q = repo.mq
1726 q = repo.mq
1727 r = q.init(repo, create=opts['create_repo'])
1727 r = q.init(repo, create=opts['create_repo'])
1728 q.save_dirty()
1728 q.save_dirty()
1729 if r:
1729 if r:
1730 if not os.path.exists(r.wjoin('.hgignore')):
1730 if not os.path.exists(r.wjoin('.hgignore')):
1731 fp = r.wopener('.hgignore', 'w')
1731 fp = r.wopener('.hgignore', 'w')
1732 fp.write('^\\.hg\n')
1732 fp.write('^\\.hg\n')
1733 fp.write('^\\.mq\n')
1733 fp.write('^\\.mq\n')
1734 fp.write('syntax: glob\n')
1734 fp.write('syntax: glob\n')
1735 fp.write('status\n')
1735 fp.write('status\n')
1736 fp.write('guards\n')
1736 fp.write('guards\n')
1737 fp.close()
1737 fp.close()
1738 if not os.path.exists(r.wjoin('series')):
1738 if not os.path.exists(r.wjoin('series')):
1739 r.wopener('series', 'w').close()
1739 r.wopener('series', 'w').close()
1740 r.add(['.hgignore', 'series'])
1740 r.add(['.hgignore', 'series'])
1741 commands.add(ui, r)
1741 commands.add(ui, r)
1742 return 0
1742 return 0
1743
1743
1744 def clone(ui, source, dest=None, **opts):
1744 def clone(ui, source, dest=None, **opts):
1745 '''clone main and patch repository at same time
1745 '''clone main and patch repository at same time
1746
1746
1747 If source is local, destination will have no patches applied. If
1747 If source is local, destination will have no patches applied. If
1748 source is remote, this command can not check if patches are
1748 source is remote, this command can not check if patches are
1749 applied in source, so cannot guarantee that patches are not
1749 applied in source, so cannot guarantee that patches are not
1750 applied in destination. If you clone remote repository, be sure
1750 applied in destination. If you clone remote repository, be sure
1751 before that it has no patches applied.
1751 before that it has no patches applied.
1752
1752
1753 Source patch repository is looked for in <src>/.hg/patches by
1753 Source patch repository is looked for in <src>/.hg/patches by
1754 default. Use -p <url> to change.
1754 default. Use -p <url> to change.
1755
1755
1756 The patch directory must be a nested Mercurial repository, as
1756 The patch directory must be a nested Mercurial repository, as
1757 would be created by qinit -c.
1757 would be created by qinit -c.
1758 '''
1758 '''
1759 def patchdir(repo):
1759 def patchdir(repo):
1760 url = repo.url()
1760 url = repo.url()
1761 if url.endswith('/'):
1761 if url.endswith('/'):
1762 url = url[:-1]
1762 url = url[:-1]
1763 return url + '/.hg/patches'
1763 return url + '/.hg/patches'
1764 if dest is None:
1764 if dest is None:
1765 dest = hg.defaultdest(source)
1765 dest = hg.defaultdest(source)
1766 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1766 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1767 if opts['patches']:
1767 if opts['patches']:
1768 patchespath = ui.expandpath(opts['patches'])
1768 patchespath = ui.expandpath(opts['patches'])
1769 else:
1769 else:
1770 patchespath = patchdir(sr)
1770 patchespath = patchdir(sr)
1771 try:
1771 try:
1772 hg.repository(ui, patchespath)
1772 hg.repository(ui, patchespath)
1773 except error.RepoError:
1773 except error.RepoError:
1774 raise util.Abort(_('versioned patch repository not found'
1774 raise util.Abort(_('versioned patch repository not found'
1775 ' (see qinit -c)'))
1775 ' (see qinit -c)'))
1776 qbase, destrev = None, None
1776 qbase, destrev = None, None
1777 if sr.local():
1777 if sr.local():
1778 if sr.mq.applied:
1778 if sr.mq.applied:
1779 qbase = bin(sr.mq.applied[0].rev)
1779 qbase = bin(sr.mq.applied[0].rev)
1780 if not hg.islocal(dest):
1780 if not hg.islocal(dest):
1781 heads = set(sr.heads())
1781 heads = set(sr.heads())
1782 destrev = list(heads.difference(sr.heads(qbase)))
1782 destrev = list(heads.difference(sr.heads(qbase)))
1783 destrev.append(sr.changelog.parents(qbase)[0])
1783 destrev.append(sr.changelog.parents(qbase)[0])
1784 elif sr.capable('lookup'):
1784 elif sr.capable('lookup'):
1785 try:
1785 try:
1786 qbase = sr.lookup('qbase')
1786 qbase = sr.lookup('qbase')
1787 except error.RepoError:
1787 except error.RepoError:
1788 pass
1788 pass
1789 ui.note(_('cloning main repository\n'))
1789 ui.note(_('cloning main repository\n'))
1790 sr, dr = hg.clone(ui, sr.url(), dest,
1790 sr, dr = hg.clone(ui, sr.url(), dest,
1791 pull=opts['pull'],
1791 pull=opts['pull'],
1792 rev=destrev,
1792 rev=destrev,
1793 update=False,
1793 update=False,
1794 stream=opts['uncompressed'])
1794 stream=opts['uncompressed'])
1795 ui.note(_('cloning patch repository\n'))
1795 ui.note(_('cloning patch repository\n'))
1796 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1796 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1797 pull=opts['pull'], update=not opts['noupdate'],
1797 pull=opts['pull'], update=not opts['noupdate'],
1798 stream=opts['uncompressed'])
1798 stream=opts['uncompressed'])
1799 if dr.local():
1799 if dr.local():
1800 if qbase:
1800 if qbase:
1801 ui.note(_('stripping applied patches from destination '
1801 ui.note(_('stripping applied patches from destination '
1802 'repository\n'))
1802 'repository\n'))
1803 dr.mq.strip(dr, qbase, update=False, backup=None)
1803 dr.mq.strip(dr, qbase, update=False, backup=None)
1804 if not opts['noupdate']:
1804 if not opts['noupdate']:
1805 ui.note(_('updating destination repository\n'))
1805 ui.note(_('updating destination repository\n'))
1806 hg.update(dr, dr.changelog.tip())
1806 hg.update(dr, dr.changelog.tip())
1807
1807
1808 def commit(ui, repo, *pats, **opts):
1808 def commit(ui, repo, *pats, **opts):
1809 """commit changes in the queue repository"""
1809 """commit changes in the queue repository"""
1810 q = repo.mq
1810 q = repo.mq
1811 r = q.qrepo()
1811 r = q.qrepo()
1812 if not r: raise util.Abort('no queue repository')
1812 if not r: raise util.Abort('no queue repository')
1813 commands.commit(r.ui, r, *pats, **opts)
1813 commands.commit(r.ui, r, *pats, **opts)
1814
1814
1815 def series(ui, repo, **opts):
1815 def series(ui, repo, **opts):
1816 """print the entire series file"""
1816 """print the entire series file"""
1817 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1817 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1818 return 0
1818 return 0
1819
1819
1820 def top(ui, repo, **opts):
1820 def top(ui, repo, **opts):
1821 """print the name of the current patch"""
1821 """print the name of the current patch"""
1822 q = repo.mq
1822 q = repo.mq
1823 t = q.applied and q.series_end(True) or 0
1823 t = q.applied and q.series_end(True) or 0
1824 if t:
1824 if t:
1825 return q.qseries(repo, start=t-1, length=1, status='A',
1825 return q.qseries(repo, start=t-1, length=1, status='A',
1826 summary=opts.get('summary'))
1826 summary=opts.get('summary'))
1827 else:
1827 else:
1828 ui.write(_("no patches applied\n"))
1828 ui.write(_("no patches applied\n"))
1829 return 1
1829 return 1
1830
1830
1831 def next(ui, repo, **opts):
1831 def next(ui, repo, **opts):
1832 """print the name of the next patch"""
1832 """print the name of the next patch"""
1833 q = repo.mq
1833 q = repo.mq
1834 end = q.series_end()
1834 end = q.series_end()
1835 if end == len(q.series):
1835 if end == len(q.series):
1836 ui.write(_("all patches applied\n"))
1836 ui.write(_("all patches applied\n"))
1837 return 1
1837 return 1
1838 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1838 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1839
1839
1840 def prev(ui, repo, **opts):
1840 def prev(ui, repo, **opts):
1841 """print the name of the previous patch"""
1841 """print the name of the previous patch"""
1842 q = repo.mq
1842 q = repo.mq
1843 l = len(q.applied)
1843 l = len(q.applied)
1844 if l == 1:
1844 if l == 1:
1845 ui.write(_("only one patch applied\n"))
1845 ui.write(_("only one patch applied\n"))
1846 return 1
1846 return 1
1847 if not l:
1847 if not l:
1848 ui.write(_("no patches applied\n"))
1848 ui.write(_("no patches applied\n"))
1849 return 1
1849 return 1
1850 return q.qseries(repo, start=l-2, length=1, status='A',
1850 return q.qseries(repo, start=l-2, length=1, status='A',
1851 summary=opts.get('summary'))
1851 summary=opts.get('summary'))
1852
1852
1853 def setupheaderopts(ui, opts):
1853 def setupheaderopts(ui, opts):
1854 def do(opt,val):
1854 def do(opt,val):
1855 if not opts[opt] and opts['current' + opt]:
1855 if not opts[opt] and opts['current' + opt]:
1856 opts[opt] = val
1856 opts[opt] = val
1857 do('user', ui.username())
1857 do('user', ui.username())
1858 do('date', "%d %d" % util.makedate())
1858 do('date', "%d %d" % util.makedate())
1859
1859
1860 def new(ui, repo, patch, *args, **opts):
1860 def new(ui, repo, patch, *args, **opts):
1861 """create a new patch
1861 """create a new patch
1862
1862
1863 qnew creates a new patch on top of the currently-applied patch (if
1863 qnew creates a new patch on top of the currently-applied patch (if
1864 any). It will refuse to run if there are any outstanding changes
1864 any). It will refuse to run if there are any outstanding changes
1865 unless -f/--force is specified, in which case the patch will be
1865 unless -f/--force is specified, in which case the patch will be
1866 initialized with them. You may also use -I/--include,
1866 initialized with them. You may also use -I/--include,
1867 -X/--exclude, and/or a list of files after the patch name to add
1867 -X/--exclude, and/or a list of files after the patch name to add
1868 only changes to matching files to the new patch, leaving the rest
1868 only changes to matching files to the new patch, leaving the rest
1869 as uncommitted modifications.
1869 as uncommitted modifications.
1870
1870
1871 -u/--user and -d/--date can be used to set the (given) user and
1871 -u/--user and -d/--date can be used to set the (given) user and
1872 date, respectively. -U/--currentuser and -D/--currentdate set user
1872 date, respectively. -U/--currentuser and -D/--currentdate set user
1873 to current user and date to current date.
1873 to current user and date to current date.
1874
1874
1875 -e/--edit, -m/--message or -l/--logfile set the patch header as
1875 -e/--edit, -m/--message or -l/--logfile set the patch header as
1876 well as the commit message. If none is specified, the header is
1876 well as the commit message. If none is specified, the header is
1877 empty and the commit message is '[mq]: PATCH'.
1877 empty and the commit message is '[mq]: PATCH'.
1878
1878
1879 Use the -g/--git option to keep the patch in the git extended diff
1879 Use the -g/--git option to keep the patch in the git extended diff
1880 format. Read the diffs help topic for more information on why this
1880 format. Read the diffs help topic for more information on why this
1881 is important for preserving permission changes and copy/rename
1881 is important for preserving permission changes and copy/rename
1882 information.
1882 information.
1883 """
1883 """
1884 msg = cmdutil.logmessage(opts)
1884 msg = cmdutil.logmessage(opts)
1885 def getmsg(): return ui.edit(msg, ui.username())
1885 def getmsg(): return ui.edit(msg, ui.username())
1886 q = repo.mq
1886 q = repo.mq
1887 opts['msg'] = msg
1887 opts['msg'] = msg
1888 if opts.get('edit'):
1888 if opts.get('edit'):
1889 opts['msg'] = getmsg
1889 opts['msg'] = getmsg
1890 else:
1890 else:
1891 opts['msg'] = msg
1891 opts['msg'] = msg
1892 setupheaderopts(ui, opts)
1892 setupheaderopts(ui, opts)
1893 q.new(repo, patch, *args, **opts)
1893 q.new(repo, patch, *args, **opts)
1894 q.save_dirty()
1894 q.save_dirty()
1895 return 0
1895 return 0
1896
1896
1897 def refresh(ui, repo, *pats, **opts):
1897 def refresh(ui, repo, *pats, **opts):
1898 """update the current patch
1898 """update the current patch
1899
1899
1900 If any file patterns are provided, the refreshed patch will
1900 If any file patterns are provided, the refreshed patch will
1901 contain only the modifications that match those patterns; the
1901 contain only the modifications that match those patterns; the
1902 remaining modifications will remain in the working directory.
1902 remaining modifications will remain in the working directory.
1903
1903
1904 If -s/--short is specified, files currently included in the patch
1904 If -s/--short is specified, files currently included in the patch
1905 will be refreshed just like matched files and remain in the patch.
1905 will be refreshed just like matched files and remain in the patch.
1906
1906
1907 hg add/remove/copy/rename work as usual, though you might want to
1907 hg add/remove/copy/rename work as usual, though you might want to
1908 use git-style patches (-g/--git or [diff] git=1) to track copies
1908 use git-style patches (-g/--git or [diff] git=1) to track copies
1909 and renames. See the diffs help topic for more information on the
1909 and renames. See the diffs help topic for more information on the
1910 git diff format.
1910 git diff format.
1911 """
1911 """
1912 q = repo.mq
1912 q = repo.mq
1913 message = cmdutil.logmessage(opts)
1913 message = cmdutil.logmessage(opts)
1914 if opts['edit']:
1914 if opts['edit']:
1915 if not q.applied:
1915 if not q.applied:
1916 ui.write(_("no patches applied\n"))
1916 ui.write(_("no patches applied\n"))
1917 return 1
1917 return 1
1918 if message:
1918 if message:
1919 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1919 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1920 patch = q.applied[-1].name
1920 patch = q.applied[-1].name
1921 ph = patchheader(q.join(patch))
1921 ph = patchheader(q.join(patch))
1922 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
1922 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
1923 setupheaderopts(ui, opts)
1923 setupheaderopts(ui, opts)
1924 ret = q.refresh(repo, pats, msg=message, **opts)
1924 ret = q.refresh(repo, pats, msg=message, **opts)
1925 q.save_dirty()
1925 q.save_dirty()
1926 return ret
1926 return ret
1927
1927
1928 def diff(ui, repo, *pats, **opts):
1928 def diff(ui, repo, *pats, **opts):
1929 """diff of the current patch and subsequent modifications
1929 """diff of the current patch and subsequent modifications
1930
1930
1931 Shows a diff which includes the current patch as well as any
1931 Shows a diff which includes the current patch as well as any
1932 changes which have been made in the working directory since the
1932 changes which have been made in the working directory since the
1933 last refresh (thus showing what the current patch would become
1933 last refresh (thus showing what the current patch would become
1934 after a qrefresh).
1934 after a qrefresh).
1935
1935
1936 Use 'hg diff' if you only want to see the changes made since the
1936 Use 'hg diff' if you only want to see the changes made since the
1937 last qrefresh, or 'hg export qtip' if you want to see changes made
1937 last qrefresh, or 'hg export qtip' if you want to see changes made
1938 by the current patch without including changes made since the
1938 by the current patch without including changes made since the
1939 qrefresh.
1939 qrefresh.
1940 """
1940 """
1941 repo.mq.diff(repo, pats, opts)
1941 repo.mq.diff(repo, pats, opts)
1942 return 0
1942 return 0
1943
1943
1944 def fold(ui, repo, *files, **opts):
1944 def fold(ui, repo, *files, **opts):
1945 """fold the named patches into the current patch
1945 """fold the named patches into the current patch
1946
1946
1947 Patches must not yet be applied. Each patch will be successively
1947 Patches must not yet be applied. Each patch will be successively
1948 applied to the current patch in the order given. If all the
1948 applied to the current patch in the order given. If all the
1949 patches apply successfully, the current patch will be refreshed
1949 patches apply successfully, the current patch will be refreshed
1950 with the new cumulative patch, and the folded patches will be
1950 with the new cumulative patch, and the folded patches will be
1951 deleted. With -k/--keep, the folded patch files will not be
1951 deleted. With -k/--keep, the folded patch files will not be
1952 removed afterwards.
1952 removed afterwards.
1953
1953
1954 The header for each folded patch will be concatenated with the
1954 The header for each folded patch will be concatenated with the
1955 current patch header, separated by a line of '* * *'."""
1955 current patch header, separated by a line of '* * *'."""
1956
1956
1957 q = repo.mq
1957 q = repo.mq
1958
1958
1959 if not files:
1959 if not files:
1960 raise util.Abort(_('qfold requires at least one patch name'))
1960 raise util.Abort(_('qfold requires at least one patch name'))
1961 if not q.check_toppatch(repo):
1961 if not q.check_toppatch(repo):
1962 raise util.Abort(_('No patches applied'))
1962 raise util.Abort(_('No patches applied'))
1963 q.check_localchanges(repo)
1963 q.check_localchanges(repo)
1964
1964
1965 message = cmdutil.logmessage(opts)
1965 message = cmdutil.logmessage(opts)
1966 if opts['edit']:
1966 if opts['edit']:
1967 if message:
1967 if message:
1968 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1968 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1969
1969
1970 parent = q.lookup('qtip')
1970 parent = q.lookup('qtip')
1971 patches = []
1971 patches = []
1972 messages = []
1972 messages = []
1973 for f in files:
1973 for f in files:
1974 p = q.lookup(f)
1974 p = q.lookup(f)
1975 if p in patches or p == parent:
1975 if p in patches or p == parent:
1976 ui.warn(_('Skipping already folded patch %s') % p)
1976 ui.warn(_('Skipping already folded patch %s') % p)
1977 if q.isapplied(p):
1977 if q.isapplied(p):
1978 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1978 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1979 patches.append(p)
1979 patches.append(p)
1980
1980
1981 for p in patches:
1981 for p in patches:
1982 if not message:
1982 if not message:
1983 ph = patchheader(q.join(p))
1983 ph = patchheader(q.join(p))
1984 if ph.message:
1984 if ph.message:
1985 messages.append(ph.message)
1985 messages.append(ph.message)
1986 pf = q.join(p)
1986 pf = q.join(p)
1987 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1987 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1988 if not patchsuccess:
1988 if not patchsuccess:
1989 raise util.Abort(_('Error folding patch %s') % p)
1989 raise util.Abort(_('Error folding patch %s') % p)
1990 patch.updatedir(ui, repo, files)
1990 patch.updatedir(ui, repo, files)
1991
1991
1992 if not message:
1992 if not message:
1993 ph = patchheader(q.join(parent))
1993 ph = patchheader(q.join(parent))
1994 message, user = ph.message, ph.user
1994 message, user = ph.message, ph.user
1995 for msg in messages:
1995 for msg in messages:
1996 message.append('* * *')
1996 message.append('* * *')
1997 message.extend(msg)
1997 message.extend(msg)
1998 message = '\n'.join(message)
1998 message = '\n'.join(message)
1999
1999
2000 if opts['edit']:
2000 if opts['edit']:
2001 message = ui.edit(message, user or ui.username())
2001 message = ui.edit(message, user or ui.username())
2002
2002
2003 q.refresh(repo, msg=message)
2003 q.refresh(repo, msg=message)
2004 q.delete(repo, patches, opts)
2004 q.delete(repo, patches, opts)
2005 q.save_dirty()
2005 q.save_dirty()
2006
2006
2007 def goto(ui, repo, patch, **opts):
2007 def goto(ui, repo, patch, **opts):
2008 '''push or pop patches until named patch is at top of stack'''
2008 '''push or pop patches until named patch is at top of stack'''
2009 q = repo.mq
2009 q = repo.mq
2010 patch = q.lookup(patch)
2010 patch = q.lookup(patch)
2011 if q.isapplied(patch):
2011 if q.isapplied(patch):
2012 ret = q.pop(repo, patch, force=opts['force'])
2012 ret = q.pop(repo, patch, force=opts['force'])
2013 else:
2013 else:
2014 ret = q.push(repo, patch, force=opts['force'])
2014 ret = q.push(repo, patch, force=opts['force'])
2015 q.save_dirty()
2015 q.save_dirty()
2016 return ret
2016 return ret
2017
2017
2018 def guard(ui, repo, *args, **opts):
2018 def guard(ui, repo, *args, **opts):
2019 '''set or print guards for a patch
2019 '''set or print guards for a patch
2020
2020
2021 Guards control whether a patch can be pushed. A patch with no
2021 Guards control whether a patch can be pushed. A patch with no
2022 guards is always pushed. A patch with a positive guard ("+foo") is
2022 guards is always pushed. A patch with a positive guard ("+foo") is
2023 pushed only if the qselect command has activated it. A patch with
2023 pushed only if the qselect command has activated it. A patch with
2024 a negative guard ("-foo") is never pushed if the qselect command
2024 a negative guard ("-foo") is never pushed if the qselect command
2025 has activated it.
2025 has activated it.
2026
2026
2027 With no arguments, print the currently active guards.
2027 With no arguments, print the currently active guards.
2028 With arguments, set guards for the named patch.
2028 With arguments, set guards for the named patch.
2029 NOTE: Specifying negative guards now requires '--'.
2029 NOTE: Specifying negative guards now requires '--'.
2030
2030
2031 To set guards on another patch:
2031 To set guards on another patch:
2032 hg qguard -- other.patch +2.6.17 -stable
2032 hg qguard -- other.patch +2.6.17 -stable
2033 '''
2033 '''
2034 def status(idx):
2034 def status(idx):
2035 guards = q.series_guards[idx] or ['unguarded']
2035 guards = q.series_guards[idx] or ['unguarded']
2036 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
2036 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
2037 q = repo.mq
2037 q = repo.mq
2038 patch = None
2038 patch = None
2039 args = list(args)
2039 args = list(args)
2040 if opts['list']:
2040 if opts['list']:
2041 if args or opts['none']:
2041 if args or opts['none']:
2042 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2042 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2043 for i in xrange(len(q.series)):
2043 for i in xrange(len(q.series)):
2044 status(i)
2044 status(i)
2045 return
2045 return
2046 if not args or args[0][0:1] in '-+':
2046 if not args or args[0][0:1] in '-+':
2047 if not q.applied:
2047 if not q.applied:
2048 raise util.Abort(_('no patches applied'))
2048 raise util.Abort(_('no patches applied'))
2049 patch = q.applied[-1].name
2049 patch = q.applied[-1].name
2050 if patch is None and args[0][0:1] not in '-+':
2050 if patch is None and args[0][0:1] not in '-+':
2051 patch = args.pop(0)
2051 patch = args.pop(0)
2052 if patch is None:
2052 if patch is None:
2053 raise util.Abort(_('no patch to work with'))
2053 raise util.Abort(_('no patch to work with'))
2054 if args or opts['none']:
2054 if args or opts['none']:
2055 idx = q.find_series(patch)
2055 idx = q.find_series(patch)
2056 if idx is None:
2056 if idx is None:
2057 raise util.Abort(_('no patch named %s') % patch)
2057 raise util.Abort(_('no patch named %s') % patch)
2058 q.set_guards(idx, args)
2058 q.set_guards(idx, args)
2059 q.save_dirty()
2059 q.save_dirty()
2060 else:
2060 else:
2061 status(q.series.index(q.lookup(patch)))
2061 status(q.series.index(q.lookup(patch)))
2062
2062
2063 def header(ui, repo, patch=None):
2063 def header(ui, repo, patch=None):
2064 """print the header of the topmost or specified patch"""
2064 """print the header of the topmost or specified patch"""
2065 q = repo.mq
2065 q = repo.mq
2066
2066
2067 if patch:
2067 if patch:
2068 patch = q.lookup(patch)
2068 patch = q.lookup(patch)
2069 else:
2069 else:
2070 if not q.applied:
2070 if not q.applied:
2071 ui.write('no patches applied\n')
2071 ui.write('no patches applied\n')
2072 return 1
2072 return 1
2073 patch = q.lookup('qtip')
2073 patch = q.lookup('qtip')
2074 ph = patchheader(repo.mq.join(patch))
2074 ph = patchheader(repo.mq.join(patch))
2075
2075
2076 ui.write('\n'.join(ph.message) + '\n')
2076 ui.write('\n'.join(ph.message) + '\n')
2077
2077
2078 def lastsavename(path):
2078 def lastsavename(path):
2079 (directory, base) = os.path.split(path)
2079 (directory, base) = os.path.split(path)
2080 names = os.listdir(directory)
2080 names = os.listdir(directory)
2081 namere = re.compile("%s.([0-9]+)" % base)
2081 namere = re.compile("%s.([0-9]+)" % base)
2082 maxindex = None
2082 maxindex = None
2083 maxname = None
2083 maxname = None
2084 for f in names:
2084 for f in names:
2085 m = namere.match(f)
2085 m = namere.match(f)
2086 if m:
2086 if m:
2087 index = int(m.group(1))
2087 index = int(m.group(1))
2088 if maxindex is None or index > maxindex:
2088 if maxindex is None or index > maxindex:
2089 maxindex = index
2089 maxindex = index
2090 maxname = f
2090 maxname = f
2091 if maxname:
2091 if maxname:
2092 return (os.path.join(directory, maxname), maxindex)
2092 return (os.path.join(directory, maxname), maxindex)
2093 return (None, None)
2093 return (None, None)
2094
2094
2095 def savename(path):
2095 def savename(path):
2096 (last, index) = lastsavename(path)
2096 (last, index) = lastsavename(path)
2097 if last is None:
2097 if last is None:
2098 index = 0
2098 index = 0
2099 newpath = path + ".%d" % (index + 1)
2099 newpath = path + ".%d" % (index + 1)
2100 return newpath
2100 return newpath
2101
2101
2102 def push(ui, repo, patch=None, **opts):
2102 def push(ui, repo, patch=None, **opts):
2103 """push the next patch onto the stack
2103 """push the next patch onto the stack
2104
2104
2105 When -f/--force is applied, all local changes in patched files
2105 When -f/--force is applied, all local changes in patched files
2106 will be lost.
2106 will be lost.
2107 """
2107 """
2108 q = repo.mq
2108 q = repo.mq
2109 mergeq = None
2109 mergeq = None
2110
2110
2111 if opts['merge']:
2111 if opts['merge']:
2112 if opts['name']:
2112 if opts['name']:
2113 newpath = repo.join(opts['name'])
2113 newpath = repo.join(opts['name'])
2114 else:
2114 else:
2115 newpath, i = lastsavename(q.path)
2115 newpath, i = lastsavename(q.path)
2116 if not newpath:
2116 if not newpath:
2117 ui.warn(_("no saved queues found, please use -n\n"))
2117 ui.warn(_("no saved queues found, please use -n\n"))
2118 return 1
2118 return 1
2119 mergeq = queue(ui, repo.join(""), newpath)
2119 mergeq = queue(ui, repo.join(""), newpath)
2120 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2120 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2121 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2121 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2122 mergeq=mergeq, all=opts.get('all'))
2122 mergeq=mergeq, all=opts.get('all'))
2123 return ret
2123 return ret
2124
2124
2125 def pop(ui, repo, patch=None, **opts):
2125 def pop(ui, repo, patch=None, **opts):
2126 """pop the current patch off the stack
2126 """pop the current patch off the stack
2127
2127
2128 By default, pops off the top of the patch stack. If given a patch
2128 By default, pops off the top of the patch stack. If given a patch
2129 name, keeps popping off patches until the named patch is at the
2129 name, keeps popping off patches until the named patch is at the
2130 top of the stack.
2130 top of the stack.
2131 """
2131 """
2132 localupdate = True
2132 localupdate = True
2133 if opts['name']:
2133 if opts['name']:
2134 q = queue(ui, repo.join(""), repo.join(opts['name']))
2134 q = queue(ui, repo.join(""), repo.join(opts['name']))
2135 ui.warn(_('using patch queue: %s\n') % q.path)
2135 ui.warn(_('using patch queue: %s\n') % q.path)
2136 localupdate = False
2136 localupdate = False
2137 else:
2137 else:
2138 q = repo.mq
2138 q = repo.mq
2139 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2139 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2140 all=opts['all'])
2140 all=opts['all'])
2141 q.save_dirty()
2141 q.save_dirty()
2142 return ret
2142 return ret
2143
2143
2144 def rename(ui, repo, patch, name=None, **opts):
2144 def rename(ui, repo, patch, name=None, **opts):
2145 """rename a patch
2145 """rename a patch
2146
2146
2147 With one argument, renames the current patch to PATCH1.
2147 With one argument, renames the current patch to PATCH1.
2148 With two arguments, renames PATCH1 to PATCH2."""
2148 With two arguments, renames PATCH1 to PATCH2."""
2149
2149
2150 q = repo.mq
2150 q = repo.mq
2151
2151
2152 if not name:
2152 if not name:
2153 name = patch
2153 name = patch
2154 patch = None
2154 patch = None
2155
2155
2156 if patch:
2156 if patch:
2157 patch = q.lookup(patch)
2157 patch = q.lookup(patch)
2158 else:
2158 else:
2159 if not q.applied:
2159 if not q.applied:
2160 ui.write(_('no patches applied\n'))
2160 ui.write(_('no patches applied\n'))
2161 return
2161 return
2162 patch = q.lookup('qtip')
2162 patch = q.lookup('qtip')
2163 absdest = q.join(name)
2163 absdest = q.join(name)
2164 if os.path.isdir(absdest):
2164 if os.path.isdir(absdest):
2165 name = normname(os.path.join(name, os.path.basename(patch)))
2165 name = normname(os.path.join(name, os.path.basename(patch)))
2166 absdest = q.join(name)
2166 absdest = q.join(name)
2167 if os.path.exists(absdest):
2167 if os.path.exists(absdest):
2168 raise util.Abort(_('%s already exists') % absdest)
2168 raise util.Abort(_('%s already exists') % absdest)
2169
2169
2170 if name in q.series:
2170 if name in q.series:
2171 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2171 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2172
2172
2173 if ui.verbose:
2173 if ui.verbose:
2174 ui.write('renaming %s to %s\n' % (patch, name))
2174 ui.write('renaming %s to %s\n' % (patch, name))
2175 i = q.find_series(patch)
2175 i = q.find_series(patch)
2176 guards = q.guard_re.findall(q.full_series[i])
2176 guards = q.guard_re.findall(q.full_series[i])
2177 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2177 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2178 q.parse_series()
2178 q.parse_series()
2179 q.series_dirty = 1
2179 q.series_dirty = 1
2180
2180
2181 info = q.isapplied(patch)
2181 info = q.isapplied(patch)
2182 if info:
2182 if info:
2183 q.applied[info[0]] = statusentry(info[1], name)
2183 q.applied[info[0]] = statusentry(info[1], name)
2184 q.applied_dirty = 1
2184 q.applied_dirty = 1
2185
2185
2186 util.rename(q.join(patch), absdest)
2186 util.rename(q.join(patch), absdest)
2187 r = q.qrepo()
2187 r = q.qrepo()
2188 if r:
2188 if r:
2189 wlock = r.wlock()
2189 wlock = r.wlock()
2190 try:
2190 try:
2191 if r.dirstate[patch] == 'a':
2191 if r.dirstate[patch] == 'a':
2192 r.dirstate.forget(patch)
2192 r.dirstate.forget(patch)
2193 r.dirstate.add(name)
2193 r.dirstate.add(name)
2194 else:
2194 else:
2195 if r.dirstate[name] == 'r':
2195 if r.dirstate[name] == 'r':
2196 r.undelete([name])
2196 r.undelete([name])
2197 r.copy(patch, name)
2197 r.copy(patch, name)
2198 r.remove([patch], False)
2198 r.remove([patch], False)
2199 finally:
2199 finally:
2200 wlock.release()
2200 wlock.release()
2201
2201
2202 q.save_dirty()
2202 q.save_dirty()
2203
2203
2204 def restore(ui, repo, rev, **opts):
2204 def restore(ui, repo, rev, **opts):
2205 """restore the queue state saved by a revision"""
2205 """restore the queue state saved by a revision"""
2206 rev = repo.lookup(rev)
2206 rev = repo.lookup(rev)
2207 q = repo.mq
2207 q = repo.mq
2208 q.restore(repo, rev, delete=opts['delete'],
2208 q.restore(repo, rev, delete=opts['delete'],
2209 qupdate=opts['update'])
2209 qupdate=opts['update'])
2210 q.save_dirty()
2210 q.save_dirty()
2211 return 0
2211 return 0
2212
2212
2213 def save(ui, repo, **opts):
2213 def save(ui, repo, **opts):
2214 """save current queue state"""
2214 """save current queue state"""
2215 q = repo.mq
2215 q = repo.mq
2216 message = cmdutil.logmessage(opts)
2216 message = cmdutil.logmessage(opts)
2217 ret = q.save(repo, msg=message)
2217 ret = q.save(repo, msg=message)
2218 if ret:
2218 if ret:
2219 return ret
2219 return ret
2220 q.save_dirty()
2220 q.save_dirty()
2221 if opts['copy']:
2221 if opts['copy']:
2222 path = q.path
2222 path = q.path
2223 if opts['name']:
2223 if opts['name']:
2224 newpath = os.path.join(q.basepath, opts['name'])
2224 newpath = os.path.join(q.basepath, opts['name'])
2225 if os.path.exists(newpath):
2225 if os.path.exists(newpath):
2226 if not os.path.isdir(newpath):
2226 if not os.path.isdir(newpath):
2227 raise util.Abort(_('destination %s exists and is not '
2227 raise util.Abort(_('destination %s exists and is not '
2228 'a directory') % newpath)
2228 'a directory') % newpath)
2229 if not opts['force']:
2229 if not opts['force']:
2230 raise util.Abort(_('destination %s exists, '
2230 raise util.Abort(_('destination %s exists, '
2231 'use -f to force') % newpath)
2231 'use -f to force') % newpath)
2232 else:
2232 else:
2233 newpath = savename(path)
2233 newpath = savename(path)
2234 ui.warn(_("copy %s to %s\n") % (path, newpath))
2234 ui.warn(_("copy %s to %s\n") % (path, newpath))
2235 util.copyfiles(path, newpath)
2235 util.copyfiles(path, newpath)
2236 if opts['empty']:
2236 if opts['empty']:
2237 try:
2237 try:
2238 os.unlink(q.join(q.status_path))
2238 os.unlink(q.join(q.status_path))
2239 except:
2239 except:
2240 pass
2240 pass
2241 return 0
2241 return 0
2242
2242
2243 def strip(ui, repo, rev, **opts):
2243 def strip(ui, repo, rev, **opts):
2244 """strip a revision and all its descendants from the repository
2244 """strip a revision and all its descendants from the repository
2245
2245
2246 If one of the working directory's parent revisions is stripped, the
2246 If one of the working directory's parent revisions is stripped, the
2247 working directory will be updated to the parent of the stripped
2247 working directory will be updated to the parent of the stripped
2248 revision.
2248 revision.
2249 """
2249 """
2250 backup = 'all'
2250 backup = 'all'
2251 if opts['backup']:
2251 if opts['backup']:
2252 backup = 'strip'
2252 backup = 'strip'
2253 elif opts['nobackup']:
2253 elif opts['nobackup']:
2254 backup = 'none'
2254 backup = 'none'
2255
2255
2256 rev = repo.lookup(rev)
2256 rev = repo.lookup(rev)
2257 p = repo.dirstate.parents()
2257 p = repo.dirstate.parents()
2258 cl = repo.changelog
2258 cl = repo.changelog
2259 update = True
2259 update = True
2260 if p[0] == nullid:
2260 if p[0] == nullid:
2261 update = False
2261 update = False
2262 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2262 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2263 update = False
2263 update = False
2264 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2264 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2265 update = False
2265 update = False
2266
2266
2267 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2267 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2268 return 0
2268 return 0
2269
2269
2270 def select(ui, repo, *args, **opts):
2270 def select(ui, repo, *args, **opts):
2271 '''set or print guarded patches to push
2271 '''set or print guarded patches to push
2272
2272
2273 Use the qguard command to set or print guards on patch, then use
2273 Use the qguard command to set or print guards on patch, then use
2274 qselect to tell mq which guards to use. A patch will be pushed if
2274 qselect to tell mq which guards to use. A patch will be pushed if
2275 it has no guards or any positive guards match the currently
2275 it has no guards or any positive guards match the currently
2276 selected guard, but will not be pushed if any negative guards
2276 selected guard, but will not be pushed if any negative guards
2277 match the current guard. For example:
2277 match the current guard. For example:
2278
2278
2279 qguard foo.patch -stable (negative guard)
2279 qguard foo.patch -stable (negative guard)
2280 qguard bar.patch +stable (positive guard)
2280 qguard bar.patch +stable (positive guard)
2281 qselect stable
2281 qselect stable
2282
2282
2283 This activates the "stable" guard. mq will skip foo.patch (because
2283 This activates the "stable" guard. mq will skip foo.patch (because
2284 it has a negative match) but push bar.patch (because it has a
2284 it has a negative match) but push bar.patch (because it has a
2285 positive match).
2285 positive match).
2286
2286
2287 With no arguments, prints the currently active guards.
2287 With no arguments, prints the currently active guards.
2288 With one argument, sets the active guard.
2288 With one argument, sets the active guard.
2289
2289
2290 Use -n/--none to deactivate guards (no other arguments needed).
2290 Use -n/--none to deactivate guards (no other arguments needed).
2291 When no guards are active, patches with positive guards are
2291 When no guards are active, patches with positive guards are
2292 skipped and patches with negative guards are pushed.
2292 skipped and patches with negative guards are pushed.
2293
2293
2294 qselect can change the guards on applied patches. It does not pop
2294 qselect can change the guards on applied patches. It does not pop
2295 guarded patches by default. Use --pop to pop back to the last
2295 guarded patches by default. Use --pop to pop back to the last
2296 applied patch that is not guarded. Use --reapply (which implies
2296 applied patch that is not guarded. Use --reapply (which implies
2297 --pop) to push back to the current patch afterwards, but skip
2297 --pop) to push back to the current patch afterwards, but skip
2298 guarded patches.
2298 guarded patches.
2299
2299
2300 Use -s/--series to print a list of all guards in the series file
2300 Use -s/--series to print a list of all guards in the series file
2301 (no other arguments needed). Use -v for more information.'''
2301 (no other arguments needed). Use -v for more information.'''
2302
2302
2303 q = repo.mq
2303 q = repo.mq
2304 guards = q.active()
2304 guards = q.active()
2305 if args or opts['none']:
2305 if args or opts['none']:
2306 old_unapplied = q.unapplied(repo)
2306 old_unapplied = q.unapplied(repo)
2307 old_guarded = [i for i in xrange(len(q.applied)) if
2307 old_guarded = [i for i in xrange(len(q.applied)) if
2308 not q.pushable(i)[0]]
2308 not q.pushable(i)[0]]
2309 q.set_active(args)
2309 q.set_active(args)
2310 q.save_dirty()
2310 q.save_dirty()
2311 if not args:
2311 if not args:
2312 ui.status(_('guards deactivated\n'))
2312 ui.status(_('guards deactivated\n'))
2313 if not opts['pop'] and not opts['reapply']:
2313 if not opts['pop'] and not opts['reapply']:
2314 unapplied = q.unapplied(repo)
2314 unapplied = q.unapplied(repo)
2315 guarded = [i for i in xrange(len(q.applied))
2315 guarded = [i for i in xrange(len(q.applied))
2316 if not q.pushable(i)[0]]
2316 if not q.pushable(i)[0]]
2317 if len(unapplied) != len(old_unapplied):
2317 if len(unapplied) != len(old_unapplied):
2318 ui.status(_('number of unguarded, unapplied patches has '
2318 ui.status(_('number of unguarded, unapplied patches has '
2319 'changed from %d to %d\n') %
2319 'changed from %d to %d\n') %
2320 (len(old_unapplied), len(unapplied)))
2320 (len(old_unapplied), len(unapplied)))
2321 if len(guarded) != len(old_guarded):
2321 if len(guarded) != len(old_guarded):
2322 ui.status(_('number of guarded, applied patches has changed '
2322 ui.status(_('number of guarded, applied patches has changed '
2323 'from %d to %d\n') %
2323 'from %d to %d\n') %
2324 (len(old_guarded), len(guarded)))
2324 (len(old_guarded), len(guarded)))
2325 elif opts['series']:
2325 elif opts['series']:
2326 guards = {}
2326 guards = {}
2327 noguards = 0
2327 noguards = 0
2328 for gs in q.series_guards:
2328 for gs in q.series_guards:
2329 if not gs:
2329 if not gs:
2330 noguards += 1
2330 noguards += 1
2331 for g in gs:
2331 for g in gs:
2332 guards.setdefault(g, 0)
2332 guards.setdefault(g, 0)
2333 guards[g] += 1
2333 guards[g] += 1
2334 if ui.verbose:
2334 if ui.verbose:
2335 guards['NONE'] = noguards
2335 guards['NONE'] = noguards
2336 guards = guards.items()
2336 guards = guards.items()
2337 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2337 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2338 if guards:
2338 if guards:
2339 ui.note(_('guards in series file:\n'))
2339 ui.note(_('guards in series file:\n'))
2340 for guard, count in guards:
2340 for guard, count in guards:
2341 ui.note('%2d ' % count)
2341 ui.note('%2d ' % count)
2342 ui.write(guard, '\n')
2342 ui.write(guard, '\n')
2343 else:
2343 else:
2344 ui.note(_('no guards in series file\n'))
2344 ui.note(_('no guards in series file\n'))
2345 else:
2345 else:
2346 if guards:
2346 if guards:
2347 ui.note(_('active guards:\n'))
2347 ui.note(_('active guards:\n'))
2348 for g in guards:
2348 for g in guards:
2349 ui.write(g, '\n')
2349 ui.write(g, '\n')
2350 else:
2350 else:
2351 ui.write(_('no active guards\n'))
2351 ui.write(_('no active guards\n'))
2352 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2352 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2353 popped = False
2353 popped = False
2354 if opts['pop'] or opts['reapply']:
2354 if opts['pop'] or opts['reapply']:
2355 for i in xrange(len(q.applied)):
2355 for i in xrange(len(q.applied)):
2356 pushable, reason = q.pushable(i)
2356 pushable, reason = q.pushable(i)
2357 if not pushable:
2357 if not pushable:
2358 ui.status(_('popping guarded patches\n'))
2358 ui.status(_('popping guarded patches\n'))
2359 popped = True
2359 popped = True
2360 if i == 0:
2360 if i == 0:
2361 q.pop(repo, all=True)
2361 q.pop(repo, all=True)
2362 else:
2362 else:
2363 q.pop(repo, i-1)
2363 q.pop(repo, i-1)
2364 break
2364 break
2365 if popped:
2365 if popped:
2366 try:
2366 try:
2367 if reapply:
2367 if reapply:
2368 ui.status(_('reapplying unguarded patches\n'))
2368 ui.status(_('reapplying unguarded patches\n'))
2369 q.push(repo, reapply)
2369 q.push(repo, reapply)
2370 finally:
2370 finally:
2371 q.save_dirty()
2371 q.save_dirty()
2372
2372
2373 def finish(ui, repo, *revrange, **opts):
2373 def finish(ui, repo, *revrange, **opts):
2374 """move applied patches into repository history
2374 """move applied patches into repository history
2375
2375
2376 Finishes the specified revisions (corresponding to applied
2376 Finishes the specified revisions (corresponding to applied
2377 patches) by moving them out of mq control into regular repository
2377 patches) by moving them out of mq control into regular repository
2378 history.
2378 history.
2379
2379
2380 Accepts a revision range or the -a/--applied option. If --applied
2380 Accepts a revision range or the -a/--applied option. If --applied
2381 is specified, all applied mq revisions are removed from mq
2381 is specified, all applied mq revisions are removed from mq
2382 control. Otherwise, the given revisions must be at the base of the
2382 control. Otherwise, the given revisions must be at the base of the
2383 stack of applied patches.
2383 stack of applied patches.
2384
2384
2385 This can be especially useful if your changes have been applied to
2385 This can be especially useful if your changes have been applied to
2386 an upstream repository, or if you are about to push your changes
2386 an upstream repository, or if you are about to push your changes
2387 to upstream.
2387 to upstream.
2388 """
2388 """
2389 if not opts['applied'] and not revrange:
2389 if not opts['applied'] and not revrange:
2390 raise util.Abort(_('no revisions specified'))
2390 raise util.Abort(_('no revisions specified'))
2391 elif opts['applied']:
2391 elif opts['applied']:
2392 revrange = ('qbase:qtip',) + revrange
2392 revrange = ('qbase:qtip',) + revrange
2393
2393
2394 q = repo.mq
2394 q = repo.mq
2395 if not q.applied:
2395 if not q.applied:
2396 ui.status(_('no patches applied\n'))
2396 ui.status(_('no patches applied\n'))
2397 return 0
2397 return 0
2398
2398
2399 revs = cmdutil.revrange(repo, revrange)
2399 revs = cmdutil.revrange(repo, revrange)
2400 q.finish(repo, revs)
2400 q.finish(repo, revs)
2401 q.save_dirty()
2401 q.save_dirty()
2402 return 0
2402 return 0
2403
2403
2404 def reposetup(ui, repo):
2404 def reposetup(ui, repo):
2405 class mqrepo(repo.__class__):
2405 class mqrepo(repo.__class__):
2406 @util.propertycache
2406 @util.propertycache
2407 def mq(self):
2407 def mq(self):
2408 return queue(self.ui, self.join(""))
2408 return queue(self.ui, self.join(""))
2409
2409
2410 def abort_if_wdir_patched(self, errmsg, force=False):
2410 def abort_if_wdir_patched(self, errmsg, force=False):
2411 if self.mq.applied and not force:
2411 if self.mq.applied and not force:
2412 parent = hex(self.dirstate.parents()[0])
2412 parent = hex(self.dirstate.parents()[0])
2413 if parent in [s.rev for s in self.mq.applied]:
2413 if parent in [s.rev for s in self.mq.applied]:
2414 raise util.Abort(errmsg)
2414 raise util.Abort(errmsg)
2415
2415
2416 def commit(self, text="", user=None, date=None, match=None,
2416 def commit(self, text="", user=None, date=None, match=None,
2417 force=False, editor=False, extra={}):
2417 force=False, editor=False, extra={}):
2418 self.abort_if_wdir_patched(
2418 self.abort_if_wdir_patched(
2419 _('cannot commit over an applied mq patch'),
2419 _('cannot commit over an applied mq patch'),
2420 force)
2420 force)
2421
2421
2422 return super(mqrepo, self).commit(text, user, date, match, force,
2422 return super(mqrepo, self).commit(text, user, date, match, force,
2423 editor, extra)
2423 editor, extra)
2424
2424
2425 def push(self, remote, force=False, revs=None):
2425 def push(self, remote, force=False, revs=None):
2426 if self.mq.applied and not force and not revs:
2426 if self.mq.applied and not force and not revs:
2427 raise util.Abort(_('source has mq patches applied'))
2427 raise util.Abort(_('source has mq patches applied'))
2428 return super(mqrepo, self).push(remote, force, revs)
2428 return super(mqrepo, self).push(remote, force, revs)
2429
2429
2430 def tags(self):
2430 def tags(self):
2431 if self.tagscache:
2431 if self.tagscache:
2432 return self.tagscache
2432 return self.tagscache
2433
2433
2434 tagscache = super(mqrepo, self).tags()
2434 tagscache = super(mqrepo, self).tags()
2435
2435
2436 q = self.mq
2436 q = self.mq
2437 if not q.applied:
2437 if not q.applied:
2438 return tagscache
2438 return tagscache
2439
2439
2440 mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
2440 mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
2441
2441
2442 if mqtags[-1][0] not in self.changelog.nodemap:
2442 if mqtags[-1][0] not in self.changelog.nodemap:
2443 self.ui.warn(_('mq status file refers to unknown node %s\n')
2443 self.ui.warn(_('mq status file refers to unknown node %s\n')
2444 % short(mqtags[-1][0]))
2444 % short(mqtags[-1][0]))
2445 return tagscache
2445 return tagscache
2446
2446
2447 mqtags.append((mqtags[-1][0], 'qtip'))
2447 mqtags.append((mqtags[-1][0], 'qtip'))
2448 mqtags.append((mqtags[0][0], 'qbase'))
2448 mqtags.append((mqtags[0][0], 'qbase'))
2449 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2449 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2450 for patch in mqtags:
2450 for patch in mqtags:
2451 if patch[1] in tagscache:
2451 if patch[1] in tagscache:
2452 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2452 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2453 % patch[1])
2453 % patch[1])
2454 else:
2454 else:
2455 tagscache[patch[1]] = patch[0]
2455 tagscache[patch[1]] = patch[0]
2456
2456
2457 return tagscache
2457 return tagscache
2458
2458
2459 def _branchtags(self, partial, lrev):
2459 def _branchtags(self, partial, lrev):
2460 q = self.mq
2460 q = self.mq
2461 if not q.applied:
2461 if not q.applied:
2462 return super(mqrepo, self)._branchtags(partial, lrev)
2462 return super(mqrepo, self)._branchtags(partial, lrev)
2463
2463
2464 cl = self.changelog
2464 cl = self.changelog
2465 qbasenode = bin(q.applied[0].rev)
2465 qbasenode = bin(q.applied[0].rev)
2466 if qbasenode not in cl.nodemap:
2466 if qbasenode not in cl.nodemap:
2467 self.ui.warn(_('mq status file refers to unknown node %s\n')
2467 self.ui.warn(_('mq status file refers to unknown node %s\n')
2468 % short(qbasenode))
2468 % short(qbasenode))
2469 return super(mqrepo, self)._branchtags(partial, lrev)
2469 return super(mqrepo, self)._branchtags(partial, lrev)
2470
2470
2471 qbase = cl.rev(qbasenode)
2471 qbase = cl.rev(qbasenode)
2472 start = lrev + 1
2472 start = lrev + 1
2473 if start < qbase:
2473 if start < qbase:
2474 # update the cache (excluding the patches) and save it
2474 # update the cache (excluding the patches) and save it
2475 self._updatebranchcache(partial, lrev+1, qbase)
2475 self._updatebranchcache(partial, lrev+1, qbase)
2476 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2476 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2477 start = qbase
2477 start = qbase
2478 # if start = qbase, the cache is as updated as it should be.
2478 # if start = qbase, the cache is as updated as it should be.
2479 # if start > qbase, the cache includes (part of) the patches.
2479 # if start > qbase, the cache includes (part of) the patches.
2480 # we might as well use it, but we won't save it.
2480 # we might as well use it, but we won't save it.
2481
2481
2482 # update the cache up to the tip
2482 # update the cache up to the tip
2483 self._updatebranchcache(partial, start, len(cl))
2483 self._updatebranchcache(partial, start, len(cl))
2484
2484
2485 return partial
2485 return partial
2486
2486
2487 if repo.local():
2487 if repo.local():
2488 repo.__class__ = mqrepo
2488 repo.__class__ = mqrepo
2489
2489
2490 def mqimport(orig, ui, repo, *args, **kwargs):
2490 def mqimport(orig, ui, repo, *args, **kwargs):
2491 if hasattr(repo, 'abort_if_wdir_patched'):
2491 if hasattr(repo, 'abort_if_wdir_patched'):
2492 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2492 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2493 kwargs.get('force'))
2493 kwargs.get('force'))
2494 return orig(ui, repo, *args, **kwargs)
2494 return orig(ui, repo, *args, **kwargs)
2495
2495
2496 def uisetup(ui):
2496 def uisetup(ui):
2497 extensions.wrapcommand(commands.table, 'import', mqimport)
2497 extensions.wrapcommand(commands.table, 'import', mqimport)
2498
2498
2499 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2499 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2500
2500
2501 cmdtable = {
2501 cmdtable = {
2502 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2502 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2503 "qclone":
2503 "qclone":
2504 (clone,
2504 (clone,
2505 [('', 'pull', None, _('use pull protocol to copy metadata')),
2505 [('', 'pull', None, _('use pull protocol to copy metadata')),
2506 ('U', 'noupdate', None, _('do not update the new working directories')),
2506 ('U', 'noupdate', None, _('do not update the new working directories')),
2507 ('', 'uncompressed', None,
2507 ('', 'uncompressed', None,
2508 _('use uncompressed transfer (fast over LAN)')),
2508 _('use uncompressed transfer (fast over LAN)')),
2509 ('p', 'patches', '', _('location of source patch repository')),
2509 ('p', 'patches', '', _('location of source patch repository')),
2510 ] + commands.remoteopts,
2510 ] + commands.remoteopts,
2511 _('hg qclone [OPTION]... SOURCE [DEST]')),
2511 _('hg qclone [OPTION]... SOURCE [DEST]')),
2512 "qcommit|qci":
2512 "qcommit|qci":
2513 (commit,
2513 (commit,
2514 commands.table["^commit|ci"][1],
2514 commands.table["^commit|ci"][1],
2515 _('hg qcommit [OPTION]... [FILE]...')),
2515 _('hg qcommit [OPTION]... [FILE]...')),
2516 "^qdiff":
2516 "^qdiff":
2517 (diff,
2517 (diff,
2518 commands.diffopts + commands.diffopts2 + commands.walkopts,
2518 commands.diffopts + commands.diffopts2 + commands.walkopts,
2519 _('hg qdiff [OPTION]... [FILE]...')),
2519 _('hg qdiff [OPTION]... [FILE]...')),
2520 "qdelete|qremove|qrm":
2520 "qdelete|qremove|qrm":
2521 (delete,
2521 (delete,
2522 [('k', 'keep', None, _('keep patch file')),
2522 [('k', 'keep', None, _('keep patch file')),
2523 ('r', 'rev', [], _('stop managing a revision'))],
2523 ('r', 'rev', [], _('stop managing a revision'))],
2524 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2524 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2525 'qfold':
2525 'qfold':
2526 (fold,
2526 (fold,
2527 [('e', 'edit', None, _('edit patch header')),
2527 [('e', 'edit', None, _('edit patch header')),
2528 ('k', 'keep', None, _('keep folded patch files')),
2528 ('k', 'keep', None, _('keep folded patch files')),
2529 ] + commands.commitopts,
2529 ] + commands.commitopts,
2530 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2530 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2531 'qgoto':
2531 'qgoto':
2532 (goto,
2532 (goto,
2533 [('f', 'force', None, _('overwrite any local changes'))],
2533 [('f', 'force', None, _('overwrite any local changes'))],
2534 _('hg qgoto [OPTION]... PATCH')),
2534 _('hg qgoto [OPTION]... PATCH')),
2535 'qguard':
2535 'qguard':
2536 (guard,
2536 (guard,
2537 [('l', 'list', None, _('list all patches and guards')),
2537 [('l', 'list', None, _('list all patches and guards')),
2538 ('n', 'none', None, _('drop all guards'))],
2538 ('n', 'none', None, _('drop all guards'))],
2539 _('hg qguard [-l] [-n] -- [PATCH] [+GUARD]... [-GUARD]...')),
2539 _('hg qguard [-l] [-n] -- [PATCH] [+GUARD]... [-GUARD]...')),
2540 'qheader': (header, [], _('hg qheader [PATCH]')),
2540 'qheader': (header, [], _('hg qheader [PATCH]')),
2541 "^qimport":
2541 "^qimport":
2542 (qimport,
2542 (qimport,
2543 [('e', 'existing', None, _('import file in patch directory')),
2543 [('e', 'existing', None, _('import file in patch directory')),
2544 ('n', 'name', '', _('name of patch file')),
2544 ('n', 'name', '', _('name of patch file')),
2545 ('f', 'force', None, _('overwrite existing files')),
2545 ('f', 'force', None, _('overwrite existing files')),
2546 ('r', 'rev', [], _('place existing revisions under mq control')),
2546 ('r', 'rev', [], _('place existing revisions under mq control')),
2547 ('g', 'git', None, _('use git extended diff format')),
2547 ('g', 'git', None, _('use git extended diff format')),
2548 ('P', 'push', None, _('qpush after importing'))],
2548 ('P', 'push', None, _('qpush after importing'))],
2549 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2549 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2550 "^qinit":
2550 "^qinit":
2551 (init,
2551 (init,
2552 [('c', 'create-repo', None, _('create queue repository'))],
2552 [('c', 'create-repo', None, _('create queue repository'))],
2553 _('hg qinit [-c]')),
2553 _('hg qinit [-c]')),
2554 "qnew":
2554 "qnew":
2555 (new,
2555 (new,
2556 [('e', 'edit', None, _('edit commit message')),
2556 [('e', 'edit', None, _('edit commit message')),
2557 ('f', 'force', None, _('import uncommitted changes into patch')),
2557 ('f', 'force', None, _('import uncommitted changes into patch')),
2558 ('g', 'git', None, _('use git extended diff format')),
2558 ('g', 'git', None, _('use git extended diff format')),
2559 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2559 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2560 ('u', 'user', '', _('add "From: <given user>" to patch')),
2560 ('u', 'user', '', _('add "From: <given user>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2562 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2562 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2563 ] + commands.walkopts + commands.commitopts,
2563 ] + commands.walkopts + commands.commitopts,
2564 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2564 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2565 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2565 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2566 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2566 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2567 "^qpop":
2567 "^qpop":
2568 (pop,
2568 (pop,
2569 [('a', 'all', None, _('pop all patches')),
2569 [('a', 'all', None, _('pop all patches')),
2570 ('n', 'name', '', _('queue name to pop')),
2570 ('n', 'name', '', _('queue name to pop')),
2571 ('f', 'force', None, _('forget any local changes'))],
2571 ('f', 'force', None, _('forget any local changes'))],
2572 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2572 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2573 "^qpush":
2573 "^qpush":
2574 (push,
2574 (push,
2575 [('f', 'force', None, _('apply if the patch has rejects')),
2575 [('f', 'force', None, _('apply if the patch has rejects')),
2576 ('l', 'list', None, _('list patch name in commit text')),
2576 ('l', 'list', None, _('list patch name in commit text')),
2577 ('a', 'all', None, _('apply all patches')),
2577 ('a', 'all', None, _('apply all patches')),
2578 ('m', 'merge', None, _('merge from another queue')),
2578 ('m', 'merge', None, _('merge from another queue')),
2579 ('n', 'name', '', _('merge queue name'))],
2579 ('n', 'name', '', _('merge queue name'))],
2580 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2580 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2581 "^qrefresh":
2581 "^qrefresh":
2582 (refresh,
2582 (refresh,
2583 [('e', 'edit', None, _('edit commit message')),
2583 [('e', 'edit', None, _('edit commit message')),
2584 ('g', 'git', None, _('use git extended diff format')),
2584 ('g', 'git', None, _('use git extended diff format')),
2585 ('s', 'short', None, _('refresh only files already in the patch and specified files')),
2585 ('s', 'short', None, _('refresh only files already in the patch and specified files')),
2586 ('U', 'currentuser', None, _('add/update "From: <current user>" in patch')),
2586 ('U', 'currentuser', None, _('add/update "From: <current user>" in patch')),
2587 ('u', 'user', '', _('add/update "From: <given user>" in patch')),
2587 ('u', 'user', '', _('add/update "From: <given user>" in patch')),
2588 ('D', 'currentdate', None, _('update "Date: <current date>" in patch (if present)')),
2588 ('D', 'currentdate', None, _('update "Date: <current date>" in patch (if present)')),
2589 ('d', 'date', '', _('update "Date: <given date>" in patch (if present)'))
2589 ('d', 'date', '', _('update "Date: <given date>" in patch (if present)'))
2590 ] + commands.walkopts + commands.commitopts,
2590 ] + commands.walkopts + commands.commitopts,
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2592 'qrename|qmv':
2592 'qrename|qmv':
2593 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2593 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2594 "qrestore":
2594 "qrestore":
2595 (restore,
2595 (restore,
2596 [('d', 'delete', None, _('delete save entry')),
2596 [('d', 'delete', None, _('delete save entry')),
2597 ('u', 'update', None, _('update queue working directory'))],
2597 ('u', 'update', None, _('update queue working directory'))],
2598 _('hg qrestore [-d] [-u] REV')),
2598 _('hg qrestore [-d] [-u] REV')),
2599 "qsave":
2599 "qsave":
2600 (save,
2600 (save,
2601 [('c', 'copy', None, _('copy patch directory')),
2601 [('c', 'copy', None, _('copy patch directory')),
2602 ('n', 'name', '', _('copy directory name')),
2602 ('n', 'name', '', _('copy directory name')),
2603 ('e', 'empty', None, _('clear queue status file')),
2603 ('e', 'empty', None, _('clear queue status file')),
2604 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2604 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2605 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2605 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2606 "qselect":
2606 "qselect":
2607 (select,
2607 (select,
2608 [('n', 'none', None, _('disable all guards')),
2608 [('n', 'none', None, _('disable all guards')),
2609 ('s', 'series', None, _('list all guards in series file')),
2609 ('s', 'series', None, _('list all guards in series file')),
2610 ('', 'pop', None, _('pop to before first guarded applied patch')),
2610 ('', 'pop', None, _('pop to before first guarded applied patch')),
2611 ('', 'reapply', None, _('pop, then reapply patches'))],
2611 ('', 'reapply', None, _('pop, then reapply patches'))],
2612 _('hg qselect [OPTION]... [GUARD]...')),
2612 _('hg qselect [OPTION]... [GUARD]...')),
2613 "qseries":
2613 "qseries":
2614 (series,
2614 (series,
2615 [('m', 'missing', None, _('print patches not in series')),
2615 [('m', 'missing', None, _('print patches not in series')),
2616 ] + seriesopts,
2616 ] + seriesopts,
2617 _('hg qseries [-ms]')),
2617 _('hg qseries [-ms]')),
2618 "^strip":
2618 "^strip":
2619 (strip,
2619 (strip,
2620 [('f', 'force', None, _('force removal with local changes')),
2620 [('f', 'force', None, _('force removal with local changes')),
2621 ('b', 'backup', None, _('bundle unrelated changesets')),
2621 ('b', 'backup', None, _('bundle unrelated changesets')),
2622 ('n', 'nobackup', None, _('no backups'))],
2622 ('n', 'nobackup', None, _('no backups'))],
2623 _('hg strip [-f] [-b] [-n] REV')),
2623 _('hg strip [-f] [-b] [-n] REV')),
2624 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2624 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2625 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2625 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2626 "qfinish":
2626 "qfinish":
2627 (finish,
2627 (finish,
2628 [('a', 'applied', None, _('finish all applied changesets'))],
2628 [('a', 'applied', None, _('finish all applied changesets'))],
2629 _('hg qfinish [-a] [REV...]')),
2629 _('hg qfinish [-a] [REV...]')),
2630 }
2630 }
@@ -1,289 +1,289 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''hook extension to email notifications on commits/pushes
8 '''send e-mail notifications for commits/pushes
9
9
10 Subscriptions can be managed through hgrc. Default mode is to print
10 Subscriptions can be managed through hgrc. Default mode is to print
11 messages to stdout, for testing and configuring.
11 messages to stdout, for testing and configuring.
12
12
13 To use, configure notify extension and enable in hgrc like this:
13 To use, configure notify extension and enable in hgrc like this:
14
14
15 [extensions]
15 [extensions]
16 hgext.notify =
16 hgext.notify =
17
17
18 [hooks]
18 [hooks]
19 # one email for each incoming changeset
19 # one email for each incoming changeset
20 incoming.notify = python:hgext.notify.hook
20 incoming.notify = python:hgext.notify.hook
21 # batch emails when many changesets incoming at one time
21 # batch emails when many changesets incoming at one time
22 changegroup.notify = python:hgext.notify.hook
22 changegroup.notify = python:hgext.notify.hook
23
23
24 [notify]
24 [notify]
25 # config items go in here
25 # config items go in here
26
26
27 config items:
27 config items:
28
28
29 REQUIRED:
29 REQUIRED:
30 config = /path/to/file # file containing subscriptions
30 config = /path/to/file # file containing subscriptions
31
31
32 OPTIONAL:
32 OPTIONAL:
33 test = True # print messages to stdout for testing
33 test = True # print messages to stdout for testing
34 strip = 3 # number of slashes to strip for url paths
34 strip = 3 # number of slashes to strip for url paths
35 domain = example.com # domain to use if committer missing domain
35 domain = example.com # domain to use if committer missing domain
36 style = ... # style file to use when formatting email
36 style = ... # style file to use when formatting email
37 template = ... # template to use when formatting email
37 template = ... # template to use when formatting email
38 incoming = ... # template to use when run as incoming hook
38 incoming = ... # template to use when run as incoming hook
39 changegroup = ... # template when run as changegroup hook
39 changegroup = ... # template when run as changegroup hook
40 maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
40 maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 maxsubject = 67 # truncate subject line longer than this
41 maxsubject = 67 # truncate subject line longer than this
42 diffstat = True # add a diffstat before the diff content
42 diffstat = True # add a diffstat before the diff content
43 sources = serve # notify if source of incoming changes in this list
43 sources = serve # notify if source of incoming changes in this list
44 # (serve == ssh or http, push, pull, bundle)
44 # (serve == ssh or http, push, pull, bundle)
45 [email]
45 [email]
46 from = user@host.com # email address to send as if none given
46 from = user@host.com # email address to send as if none given
47 [web]
47 [web]
48 baseurl = http://hgserver/... # root of hg web site for browsing commits
48 baseurl = http://hgserver/... # root of hg web site for browsing commits
49
49
50 notify config file has same format as regular hgrc. it has two
50 notify config file has same format as regular hgrc. it has two
51 sections so you can express subscriptions in whatever way is handier
51 sections so you can express subscriptions in whatever way is handier
52 for you.
52 for you.
53
53
54 [usersubs]
54 [usersubs]
55 # key is subscriber email, value is ","-separated list of glob patterns
55 # key is subscriber email, value is ","-separated list of glob patterns
56 user@host = pattern
56 user@host = pattern
57
57
58 [reposubs]
58 [reposubs]
59 # key is glob pattern, value is ","-separated list of subscriber emails
59 # key is glob pattern, value is ","-separated list of subscriber emails
60 pattern = user@host
60 pattern = user@host
61
61
62 glob patterns are matched against path to repository root.
62 glob patterns are matched against path to repository root.
63
63
64 if you like, you can put notify config file in repository that users
64 if you like, you can put notify config file in repository that users
65 can push changes to, they can manage their own subscriptions.'''
65 can push changes to, they can manage their own subscriptions.'''
66
66
67 from mercurial.i18n import _
67 from mercurial.i18n import _
68 from mercurial import patch, cmdutil, templater, util, mail
68 from mercurial import patch, cmdutil, templater, util, mail
69 import email.Parser, fnmatch, socket, time
69 import email.Parser, fnmatch, socket, time
70
70
71 # template for single changeset can include email headers.
71 # template for single changeset can include email headers.
72 single_template = '''
72 single_template = '''
73 Subject: changeset in {webroot}: {desc|firstline|strip}
73 Subject: changeset in {webroot}: {desc|firstline|strip}
74 From: {author}
74 From: {author}
75
75
76 changeset {node|short} in {root}
76 changeset {node|short} in {root}
77 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
77 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
78 description:
78 description:
79 \t{desc|tabindent|strip}
79 \t{desc|tabindent|strip}
80 '''.lstrip()
80 '''.lstrip()
81
81
82 # template for multiple changesets should not contain email headers,
82 # template for multiple changesets should not contain email headers,
83 # because only first set of headers will be used and result will look
83 # because only first set of headers will be used and result will look
84 # strange.
84 # strange.
85 multiple_template = '''
85 multiple_template = '''
86 changeset {node|short} in {root}
86 changeset {node|short} in {root}
87 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
87 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
88 summary: {desc|firstline}
88 summary: {desc|firstline}
89 '''
89 '''
90
90
91 deftemplates = {
91 deftemplates = {
92 'changegroup': multiple_template,
92 'changegroup': multiple_template,
93 }
93 }
94
94
95 class notifier(object):
95 class notifier(object):
96 '''email notification class.'''
96 '''email notification class.'''
97
97
98 def __init__(self, ui, repo, hooktype):
98 def __init__(self, ui, repo, hooktype):
99 self.ui = ui
99 self.ui = ui
100 cfg = self.ui.config('notify', 'config')
100 cfg = self.ui.config('notify', 'config')
101 if cfg:
101 if cfg:
102 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
102 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
103 self.repo = repo
103 self.repo = repo
104 self.stripcount = int(self.ui.config('notify', 'strip', 0))
104 self.stripcount = int(self.ui.config('notify', 'strip', 0))
105 self.root = self.strip(self.repo.root)
105 self.root = self.strip(self.repo.root)
106 self.domain = self.ui.config('notify', 'domain')
106 self.domain = self.ui.config('notify', 'domain')
107 self.test = self.ui.configbool('notify', 'test', True)
107 self.test = self.ui.configbool('notify', 'test', True)
108 self.charsets = mail._charsets(self.ui)
108 self.charsets = mail._charsets(self.ui)
109 self.subs = self.subscribers()
109 self.subs = self.subscribers()
110
110
111 mapfile = self.ui.config('notify', 'style')
111 mapfile = self.ui.config('notify', 'style')
112 template = (self.ui.config('notify', hooktype) or
112 template = (self.ui.config('notify', hooktype) or
113 self.ui.config('notify', 'template'))
113 self.ui.config('notify', 'template'))
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 False, None, mapfile, False)
115 False, None, mapfile, False)
116 if not mapfile and not template:
116 if not mapfile and not template:
117 template = deftemplates.get(hooktype) or single_template
117 template = deftemplates.get(hooktype) or single_template
118 if template:
118 if template:
119 template = templater.parsestring(template, quoted=False)
119 template = templater.parsestring(template, quoted=False)
120 self.t.use_template(template)
120 self.t.use_template(template)
121
121
122 def strip(self, path):
122 def strip(self, path):
123 '''strip leading slashes from local path, turn into web-safe path.'''
123 '''strip leading slashes from local path, turn into web-safe path.'''
124
124
125 path = util.pconvert(path)
125 path = util.pconvert(path)
126 count = self.stripcount
126 count = self.stripcount
127 while count > 0:
127 while count > 0:
128 c = path.find('/')
128 c = path.find('/')
129 if c == -1:
129 if c == -1:
130 break
130 break
131 path = path[c+1:]
131 path = path[c+1:]
132 count -= 1
132 count -= 1
133 return path
133 return path
134
134
135 def fixmail(self, addr):
135 def fixmail(self, addr):
136 '''try to clean up email addresses.'''
136 '''try to clean up email addresses.'''
137
137
138 addr = util.email(addr.strip())
138 addr = util.email(addr.strip())
139 if self.domain:
139 if self.domain:
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149 subs = set()
149 subs = set()
150 for user, pats in self.ui.configitems('usersubs'):
150 for user, pats in self.ui.configitems('usersubs'):
151 for pat in pats.split(','):
151 for pat in pats.split(','):
152 if fnmatch.fnmatch(self.repo.root, pat.strip()):
152 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 subs.add(self.fixmail(user))
153 subs.add(self.fixmail(user))
154 for pat, users in self.ui.configitems('reposubs'):
154 for pat, users in self.ui.configitems('reposubs'):
155 if fnmatch.fnmatch(self.repo.root, pat):
155 if fnmatch.fnmatch(self.repo.root, pat):
156 for user in users.split(','):
156 for user in users.split(','):
157 subs.add(self.fixmail(user))
157 subs.add(self.fixmail(user))
158 return [mail.addressencode(self.ui, s, self.charsets, self.test)
158 return [mail.addressencode(self.ui, s, self.charsets, self.test)
159 for s in sorted(subs)]
159 for s in sorted(subs)]
160
160
161 def url(self, path=None):
161 def url(self, path=None):
162 return self.ui.config('web', 'baseurl') + (path or self.root)
162 return self.ui.config('web', 'baseurl') + (path or self.root)
163
163
164 def node(self, ctx):
164 def node(self, ctx):
165 '''format one changeset.'''
165 '''format one changeset.'''
166 self.t.show(ctx, changes=ctx.changeset(),
166 self.t.show(ctx, changes=ctx.changeset(),
167 baseurl=self.ui.config('web', 'baseurl'),
167 baseurl=self.ui.config('web', 'baseurl'),
168 root=self.repo.root, webroot=self.root)
168 root=self.repo.root, webroot=self.root)
169
169
170 def skipsource(self, source):
170 def skipsource(self, source):
171 '''true if incoming changes from this source should be skipped.'''
171 '''true if incoming changes from this source should be skipped.'''
172 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
172 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
173 return source not in ok_sources
173 return source not in ok_sources
174
174
175 def send(self, ctx, count, data):
175 def send(self, ctx, count, data):
176 '''send message.'''
176 '''send message.'''
177
177
178 p = email.Parser.Parser()
178 p = email.Parser.Parser()
179 msg = p.parsestr(data)
179 msg = p.parsestr(data)
180
180
181 # store sender and subject
181 # store sender and subject
182 sender, subject = msg['From'], msg['Subject']
182 sender, subject = msg['From'], msg['Subject']
183 del msg['From'], msg['Subject']
183 del msg['From'], msg['Subject']
184 # store remaining headers
184 # store remaining headers
185 headers = msg.items()
185 headers = msg.items()
186 # create fresh mime message from msg body
186 # create fresh mime message from msg body
187 text = msg.get_payload()
187 text = msg.get_payload()
188 # for notification prefer readability over data precision
188 # for notification prefer readability over data precision
189 msg = mail.mimeencode(self.ui, text, self.charsets, self.test)
189 msg = mail.mimeencode(self.ui, text, self.charsets, self.test)
190 # reinstate custom headers
190 # reinstate custom headers
191 for k, v in headers:
191 for k, v in headers:
192 msg[k] = v
192 msg[k] = v
193
193
194 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
194 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
195
195
196 # try to make subject line exist and be useful
196 # try to make subject line exist and be useful
197 if not subject:
197 if not subject:
198 if count > 1:
198 if count > 1:
199 subject = _('%s: %d new changesets') % (self.root, count)
199 subject = _('%s: %d new changesets') % (self.root, count)
200 else:
200 else:
201 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
201 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
202 subject = '%s: %s' % (self.root, s)
202 subject = '%s: %s' % (self.root, s)
203 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
203 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
204 if maxsubject and len(subject) > maxsubject:
204 if maxsubject and len(subject) > maxsubject:
205 subject = subject[:maxsubject-3] + '...'
205 subject = subject[:maxsubject-3] + '...'
206 msg['Subject'] = mail.headencode(self.ui, subject,
206 msg['Subject'] = mail.headencode(self.ui, subject,
207 self.charsets, self.test)
207 self.charsets, self.test)
208
208
209 # try to make message have proper sender
209 # try to make message have proper sender
210 if not sender:
210 if not sender:
211 sender = self.ui.config('email', 'from') or self.ui.username()
211 sender = self.ui.config('email', 'from') or self.ui.username()
212 if '@' not in sender or '@localhost' in sender:
212 if '@' not in sender or '@localhost' in sender:
213 sender = self.fixmail(sender)
213 sender = self.fixmail(sender)
214 msg['From'] = mail.addressencode(self.ui, sender,
214 msg['From'] = mail.addressencode(self.ui, sender,
215 self.charsets, self.test)
215 self.charsets, self.test)
216
216
217 msg['X-Hg-Notification'] = 'changeset %s' % ctx
217 msg['X-Hg-Notification'] = 'changeset %s' % ctx
218 if not msg['Message-Id']:
218 if not msg['Message-Id']:
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 (ctx, int(time.time()),
220 (ctx, int(time.time()),
221 hash(self.repo.root), socket.getfqdn()))
221 hash(self.repo.root), socket.getfqdn()))
222 msg['To'] = ', '.join(self.subs)
222 msg['To'] = ', '.join(self.subs)
223
223
224 msgtext = msg.as_string(0)
224 msgtext = msg.as_string(0)
225 if self.test:
225 if self.test:
226 self.ui.write(msgtext)
226 self.ui.write(msgtext)
227 if not msgtext.endswith('\n'):
227 if not msgtext.endswith('\n'):
228 self.ui.write('\n')
228 self.ui.write('\n')
229 else:
229 else:
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 (len(self.subs), count))
231 (len(self.subs), count))
232 mail.sendmail(self.ui, util.email(msg['From']),
232 mail.sendmail(self.ui, util.email(msg['From']),
233 self.subs, msgtext)
233 self.subs, msgtext)
234
234
235 def diff(self, ctx, ref=None):
235 def diff(self, ctx, ref=None):
236
236
237 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
238 prev = ctx.parents()[0].node()
238 prev = ctx.parents()[0].node()
239 ref = ref and ref.node() or ctx.node()
239 ref = ref and ref.node() or ctx.node()
240 chunks = patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui))
240 chunks = patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui))
241 difflines = ''.join(chunks).splitlines()
241 difflines = ''.join(chunks).splitlines()
242
242
243 if self.ui.configbool('notify', 'diffstat', True):
243 if self.ui.configbool('notify', 'diffstat', True):
244 s = patch.diffstat(difflines)
244 s = patch.diffstat(difflines)
245 # s may be nil, don't include the header if it is
245 # s may be nil, don't include the header if it is
246 if s:
246 if s:
247 self.ui.write('\ndiffstat:\n\n%s' % s)
247 self.ui.write('\ndiffstat:\n\n%s' % s)
248
248
249 if maxdiff == 0:
249 if maxdiff == 0:
250 return
250 return
251 elif maxdiff > 0 and len(difflines) > maxdiff:
251 elif maxdiff > 0 and len(difflines) > maxdiff:
252 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
252 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
253 self.ui.write(msg % (len(difflines), maxdiff))
253 self.ui.write(msg % (len(difflines), maxdiff))
254 difflines = difflines[:maxdiff]
254 difflines = difflines[:maxdiff]
255 elif difflines:
255 elif difflines:
256 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
256 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
257
257
258 self.ui.write("\n".join(difflines))
258 self.ui.write("\n".join(difflines))
259
259
260 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
260 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
261 '''send email notifications to interested subscribers.
261 '''send email notifications to interested subscribers.
262
262
263 if used as changegroup hook, send one email for all changesets in
263 if used as changegroup hook, send one email for all changesets in
264 changegroup. else send one email per changeset.'''
264 changegroup. else send one email per changeset.'''
265
265
266 n = notifier(ui, repo, hooktype)
266 n = notifier(ui, repo, hooktype)
267 ctx = repo[node]
267 ctx = repo[node]
268
268
269 if not n.subs:
269 if not n.subs:
270 ui.debug(_('notify: no subscribers to repository %s\n') % n.root)
270 ui.debug(_('notify: no subscribers to repository %s\n') % n.root)
271 return
271 return
272 if n.skipsource(source):
272 if n.skipsource(source):
273 ui.debug(_('notify: changes have source "%s" - skipping\n') % source)
273 ui.debug(_('notify: changes have source "%s" - skipping\n') % source)
274 return
274 return
275
275
276 ui.pushbuffer()
276 ui.pushbuffer()
277 if hooktype == 'changegroup':
277 if hooktype == 'changegroup':
278 start, end = ctx.rev(), len(repo)
278 start, end = ctx.rev(), len(repo)
279 count = end - start
279 count = end - start
280 for rev in xrange(start, end):
280 for rev in xrange(start, end):
281 n.node(repo[rev])
281 n.node(repo[rev])
282 n.diff(ctx, repo['tip'])
282 n.diff(ctx, repo['tip'])
283 else:
283 else:
284 count = 1
284 count = 1
285 n.node(ctx)
285 n.node(ctx)
286 n.diff(ctx)
286 n.diff(ctx)
287
287
288 data = ui.popbuffer()
288 data = ui.popbuffer()
289 n.send(ctx, count, data)
289 n.send(ctx, count, data)
@@ -1,64 +1,64 b''
1 # pager.py - display output using a pager
1 # pager.py - display output using a pager
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7 #
7 #
8 # To load the extension, add it to your .hgrc file:
8 # To load the extension, add it to your .hgrc file:
9 #
9 #
10 # [extension]
10 # [extension]
11 # hgext.pager =
11 # hgext.pager =
12 #
12 #
13 # Run "hg help pager" to get info on configuration.
13 # Run "hg help pager" to get info on configuration.
14
14
15 '''browse command output with external pager
15 '''browse command output with an external pager
16
16
17 To set the pager that should be used, set the application variable:
17 To set the pager that should be used, set the application variable:
18
18
19 [pager]
19 [pager]
20 pager = LESS='FSRX' less
20 pager = LESS='FSRX' less
21
21
22 If no pager is set, the pager extensions uses the environment variable
22 If no pager is set, the pager extensions uses the environment variable
23 $PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
23 $PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
24
24
25 If you notice "BROKEN PIPE" error messages, you can disable them by
25 If you notice "BROKEN PIPE" error messages, you can disable them by
26 setting:
26 setting:
27
27
28 [pager]
28 [pager]
29 quiet = True
29 quiet = True
30
30
31 You can disable the pager for certain commands by adding them to the
31 You can disable the pager for certain commands by adding them to the
32 pager.ignore list:
32 pager.ignore list:
33
33
34 [pager]
34 [pager]
35 ignore = version, help, update
35 ignore = version, help, update
36
36
37 You can also enable the pager only for certain commands using
37 You can also enable the pager only for certain commands using
38 pager.attend:
38 pager.attend:
39
39
40 [pager]
40 [pager]
41 attend = log
41 attend = log
42
42
43 If pager.attend is present, pager.ignore will be ignored.
43 If pager.attend is present, pager.ignore will be ignored.
44
44
45 To ignore global commands like "hg version" or "hg help", you have to
45 To ignore global commands like "hg version" or "hg help", you have to
46 specify them in the global .hgrc
46 specify them in the global .hgrc
47 '''
47 '''
48
48
49 import sys, os, signal
49 import sys, os, signal
50 from mercurial import dispatch, util, extensions
50 from mercurial import dispatch, util, extensions
51
51
52 def uisetup(ui):
52 def uisetup(ui):
53 def pagecmd(orig, ui, options, cmd, cmdfunc):
53 def pagecmd(orig, ui, options, cmd, cmdfunc):
54 p = ui.config("pager", "pager", os.environ.get("PAGER"))
54 p = ui.config("pager", "pager", os.environ.get("PAGER"))
55 if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
55 if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
56 attend = ui.configlist('pager', 'attend')
56 attend = ui.configlist('pager', 'attend')
57 if (cmd in attend or
57 if (cmd in attend or
58 (cmd not in ui.configlist('pager', 'ignore') and not attend)):
58 (cmd not in ui.configlist('pager', 'ignore') and not attend)):
59 sys.stderr = sys.stdout = util.popen(p, "wb")
59 sys.stderr = sys.stdout = util.popen(p, "wb")
60 if ui.configbool('pager', 'quiet'):
60 if ui.configbool('pager', 'quiet'):
61 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
61 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
62 return orig(ui, options, cmd, cmdfunc)
62 return orig(ui, options, cmd, cmdfunc)
63
63
64 extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
64 extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
@@ -1,96 +1,96 b''
1 # Mercurial extension to make it easy to refer to the parent of a revision
1 # Mercurial extension to make it easy to refer to the parent of a revision
2 #
2 #
3 # Copyright (C) 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
3 # Copyright (C) 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''use suffixes to refer to ancestor revisions
8 '''interpret suffixes to refer to ancestor revisions
9
9
10 This extension allows you to use git-style suffixes to refer to the
10 This extension allows you to use git-style suffixes to refer to the
11 ancestors of a specific revision.
11 ancestors of a specific revision.
12
12
13 For example, if you can refer to a revision as "foo", then:
13 For example, if you can refer to a revision as "foo", then:
14
14
15 - foo^N = Nth parent of foo
15 - foo^N = Nth parent of foo
16 foo^0 = foo
16 foo^0 = foo
17 foo^1 = first parent of foo
17 foo^1 = first parent of foo
18 foo^2 = second parent of foo
18 foo^2 = second parent of foo
19 foo^ = foo^1
19 foo^ = foo^1
20
20
21 - foo~N = Nth first grandparent of foo
21 - foo~N = Nth first grandparent of foo
22 foo~0 = foo
22 foo~0 = foo
23 foo~1 = foo^1 = foo^ = first parent of foo
23 foo~1 = foo^1 = foo^ = first parent of foo
24 foo~2 = foo^1^1 = foo^^ = first parent of first parent of foo
24 foo~2 = foo^1^1 = foo^^ = first parent of first parent of foo
25 '''
25 '''
26 from mercurial import error
26 from mercurial import error
27
27
28 def reposetup(ui, repo):
28 def reposetup(ui, repo):
29 if not repo.local():
29 if not repo.local():
30 return
30 return
31
31
32 class parentrevspecrepo(repo.__class__):
32 class parentrevspecrepo(repo.__class__):
33 def lookup(self, key):
33 def lookup(self, key):
34 try:
34 try:
35 _super = super(parentrevspecrepo, self)
35 _super = super(parentrevspecrepo, self)
36 return _super.lookup(key)
36 return _super.lookup(key)
37 except error.RepoError:
37 except error.RepoError:
38 pass
38 pass
39
39
40 circ = key.find('^')
40 circ = key.find('^')
41 tilde = key.find('~')
41 tilde = key.find('~')
42 if circ < 0 and tilde < 0:
42 if circ < 0 and tilde < 0:
43 raise
43 raise
44 elif circ >= 0 and tilde >= 0:
44 elif circ >= 0 and tilde >= 0:
45 end = min(circ, tilde)
45 end = min(circ, tilde)
46 else:
46 else:
47 end = max(circ, tilde)
47 end = max(circ, tilde)
48
48
49 cl = self.changelog
49 cl = self.changelog
50 base = key[:end]
50 base = key[:end]
51 try:
51 try:
52 node = _super.lookup(base)
52 node = _super.lookup(base)
53 except error.RepoError:
53 except error.RepoError:
54 # eek - reraise the first error
54 # eek - reraise the first error
55 return _super.lookup(key)
55 return _super.lookup(key)
56
56
57 rev = cl.rev(node)
57 rev = cl.rev(node)
58 suffix = key[end:]
58 suffix = key[end:]
59 i = 0
59 i = 0
60 while i < len(suffix):
60 while i < len(suffix):
61 # foo^N => Nth parent of foo
61 # foo^N => Nth parent of foo
62 # foo^0 == foo
62 # foo^0 == foo
63 # foo^1 == foo^ == 1st parent of foo
63 # foo^1 == foo^ == 1st parent of foo
64 # foo^2 == 2nd parent of foo
64 # foo^2 == 2nd parent of foo
65 if suffix[i] == '^':
65 if suffix[i] == '^':
66 j = i + 1
66 j = i + 1
67 p = cl.parentrevs(rev)
67 p = cl.parentrevs(rev)
68 if j < len(suffix) and suffix[j].isdigit():
68 if j < len(suffix) and suffix[j].isdigit():
69 j += 1
69 j += 1
70 n = int(suffix[i+1:j])
70 n = int(suffix[i+1:j])
71 if n > 2 or n == 2 and p[1] == -1:
71 if n > 2 or n == 2 and p[1] == -1:
72 raise
72 raise
73 else:
73 else:
74 n = 1
74 n = 1
75 if n:
75 if n:
76 rev = p[n - 1]
76 rev = p[n - 1]
77 i = j
77 i = j
78 # foo~N => Nth first grandparent of foo
78 # foo~N => Nth first grandparent of foo
79 # foo~0 = foo
79 # foo~0 = foo
80 # foo~1 = foo^1 == foo^ == 1st parent of foo
80 # foo~1 = foo^1 == foo^ == 1st parent of foo
81 # foo~2 = foo^1^1 == foo^^ == 1st parent of 1st parent of foo
81 # foo~2 = foo^1^1 == foo^^ == 1st parent of 1st parent of foo
82 elif suffix[i] == '~':
82 elif suffix[i] == '~':
83 j = i + 1
83 j = i + 1
84 while j < len(suffix) and suffix[j].isdigit():
84 while j < len(suffix) and suffix[j].isdigit():
85 j += 1
85 j += 1
86 if j == i + 1:
86 if j == i + 1:
87 raise
87 raise
88 n = int(suffix[i+1:j])
88 n = int(suffix[i+1:j])
89 for k in xrange(n):
89 for k in xrange(n):
90 rev = cl.parentrevs(rev)[0]
90 rev = cl.parentrevs(rev)[0]
91 i = j
91 i = j
92 else:
92 else:
93 raise
93 raise
94 return cl.node(rev)
94 return cl.node(rev)
95
95
96 repo.__class__ = parentrevspecrepo
96 repo.__class__ = parentrevspecrepo
@@ -1,507 +1,507 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''sending Mercurial changesets as a series of patch emails
8 '''send Mercurial changesets as a series of patch e-mails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 The changeset description.
17 The changeset description.
18
18
19 [Optional] The result of running diffstat on the patch.
19 [Optional] The result of running diffstat on the patch.
20
20
21 The patch itself, as generated by "hg export".
21 The patch itself, as generated by "hg export".
22
22
23 Each message refers to the first in the series using the In-Reply-To
23 Each message refers to the first in the series using the In-Reply-To
24 and References headers, so they will show up as a sequence in threaded
24 and References headers, so they will show up as a sequence in threaded
25 mail and news readers, and in mail archives.
25 mail and news readers, and in mail archives.
26
26
27 With the -d/--diffstat option, you will be prompted for each changeset
27 With the -d/--diffstat option, you will be prompted for each changeset
28 with a diffstat summary and the changeset summary, so you can be sure
28 with a diffstat summary and the changeset summary, so you can be sure
29 you are sending the right changes.
29 you are sending the right changes.
30
30
31 To configure other defaults, add a section like this to your hgrc
31 To configure other defaults, add a section like this to your hgrc
32 file:
32 file:
33
33
34 [email]
34 [email]
35 from = My Name <my@email>
35 from = My Name <my@email>
36 to = recipient1, recipient2, ...
36 to = recipient1, recipient2, ...
37 cc = cc1, cc2, ...
37 cc = cc1, cc2, ...
38 bcc = bcc1, bcc2, ...
38 bcc = bcc1, bcc2, ...
39
39
40 Then you can use the "hg email" command to mail a series of changesets
40 Then you can use the "hg email" command to mail a series of changesets
41 as a patchbomb.
41 as a patchbomb.
42
42
43 To avoid sending patches prematurely, it is a good idea to first run
43 To avoid sending patches prematurely, it is a good idea to first run
44 the "email" command with the "-n" option (test only). You will be
44 the "email" command with the "-n" option (test only). You will be
45 prompted for an email recipient address, a subject and an introductory
45 prompted for an email recipient address, a subject and an introductory
46 message describing the patches of your patchbomb. Then when all is
46 message describing the patches of your patchbomb. Then when all is
47 done, patchbomb messages are displayed. If the PAGER environment
47 done, patchbomb messages are displayed. If the PAGER environment
48 variable is set, your pager will be fired up once for each patchbomb
48 variable is set, your pager will be fired up once for each patchbomb
49 message, so you can verify everything is alright.
49 message, so you can verify everything is alright.
50
50
51 The -m/--mbox option is also very useful. Instead of previewing each
51 The -m/--mbox option is also very useful. Instead of previewing each
52 patchbomb message in a pager or sending the messages directly, it will
52 patchbomb message in a pager or sending the messages directly, it will
53 create a UNIX mailbox file with the patch emails. This mailbox file
53 create a UNIX mailbox file with the patch emails. This mailbox file
54 can be previewed with any mail user agent which supports UNIX mbox
54 can be previewed with any mail user agent which supports UNIX mbox
55 files, e.g. with mutt:
55 files, e.g. with mutt:
56
56
57 % mutt -R -f mbox
57 % mutt -R -f mbox
58
58
59 When you are previewing the patchbomb messages, you can use `formail'
59 When you are previewing the patchbomb messages, you can use `formail'
60 (a utility that is commonly installed as part of the procmail
60 (a utility that is commonly installed as part of the procmail
61 package), to send each message out:
61 package), to send each message out:
62
62
63 % formail -s sendmail -bm -t < mbox
63 % formail -s sendmail -bm -t < mbox
64
64
65 That should be all. Now your patchbomb is on its way out.
65 That should be all. Now your patchbomb is on its way out.
66
66
67 You can also either configure the method option in the email section
67 You can also either configure the method option in the email section
68 to be a sendmail compatible mailer or fill out the [smtp] section so
68 to be a sendmail compatible mailer or fill out the [smtp] section so
69 that the patchbomb extension can automatically send patchbombs
69 that the patchbomb extension can automatically send patchbombs
70 directly from the commandline. See the [email] and [smtp] sections in
70 directly from the commandline. See the [email] and [smtp] sections in
71 hgrc(5) for details.'''
71 hgrc(5) for details.'''
72
72
73 import os, errno, socket, tempfile, cStringIO
73 import os, errno, socket, tempfile, cStringIO
74 import email.MIMEMultipart, email.MIMEBase
74 import email.MIMEMultipart, email.MIMEBase
75 import email.Utils, email.Encoders, email.Generator
75 import email.Utils, email.Encoders, email.Generator
76 from mercurial import cmdutil, commands, hg, mail, patch, util
76 from mercurial import cmdutil, commands, hg, mail, patch, util
77 from mercurial.i18n import _
77 from mercurial.i18n import _
78 from mercurial.node import bin
78 from mercurial.node import bin
79
79
80 def prompt(ui, prompt, default=None, rest=': ', empty_ok=False):
80 def prompt(ui, prompt, default=None, rest=': ', empty_ok=False):
81 if not ui.interactive():
81 if not ui.interactive():
82 return default
82 return default
83 if default:
83 if default:
84 prompt += ' [%s]' % default
84 prompt += ' [%s]' % default
85 prompt += rest
85 prompt += rest
86 while True:
86 while True:
87 r = ui.prompt(prompt, default=default)
87 r = ui.prompt(prompt, default=default)
88 if r:
88 if r:
89 return r
89 return r
90 if default is not None:
90 if default is not None:
91 return default
91 return default
92 if empty_ok:
92 if empty_ok:
93 return r
93 return r
94 ui.warn(_('Please enter a valid value.\n'))
94 ui.warn(_('Please enter a valid value.\n'))
95
95
96 def cdiffstat(ui, summary, patchlines):
96 def cdiffstat(ui, summary, patchlines):
97 s = patch.diffstat(patchlines)
97 s = patch.diffstat(patchlines)
98 if summary:
98 if summary:
99 ui.write(summary, '\n')
99 ui.write(summary, '\n')
100 ui.write(s, '\n')
100 ui.write(s, '\n')
101 ans = prompt(ui, _('does the diffstat above look okay? '), 'y')
101 ans = prompt(ui, _('does the diffstat above look okay? '), 'y')
102 if not ans.lower().startswith('y'):
102 if not ans.lower().startswith('y'):
103 raise util.Abort(_('diffstat rejected'))
103 raise util.Abort(_('diffstat rejected'))
104 return s
104 return s
105
105
106 def makepatch(ui, repo, patch, opts, _charsets, idx, total, patchname=None):
106 def makepatch(ui, repo, patch, opts, _charsets, idx, total, patchname=None):
107
107
108 desc = []
108 desc = []
109 node = None
109 node = None
110 body = ''
110 body = ''
111
111
112 for line in patch:
112 for line in patch:
113 if line.startswith('#'):
113 if line.startswith('#'):
114 if line.startswith('# Node ID'):
114 if line.startswith('# Node ID'):
115 node = line.split()[-1]
115 node = line.split()[-1]
116 continue
116 continue
117 if line.startswith('diff -r') or line.startswith('diff --git'):
117 if line.startswith('diff -r') or line.startswith('diff --git'):
118 break
118 break
119 desc.append(line)
119 desc.append(line)
120
120
121 if not patchname and not node:
121 if not patchname and not node:
122 raise ValueError
122 raise ValueError
123
123
124 if opts.get('attach'):
124 if opts.get('attach'):
125 body = ('\n'.join(desc[1:]).strip() or
125 body = ('\n'.join(desc[1:]).strip() or
126 'Patch subject is complete summary.')
126 'Patch subject is complete summary.')
127 body += '\n\n\n'
127 body += '\n\n\n'
128
128
129 if opts.get('plain'):
129 if opts.get('plain'):
130 while patch and patch[0].startswith('# '):
130 while patch and patch[0].startswith('# '):
131 patch.pop(0)
131 patch.pop(0)
132 if patch:
132 if patch:
133 patch.pop(0)
133 patch.pop(0)
134 while patch and not patch[0].strip():
134 while patch and not patch[0].strip():
135 patch.pop(0)
135 patch.pop(0)
136
136
137 if opts.get('diffstat'):
137 if opts.get('diffstat'):
138 body += cdiffstat(ui, '\n'.join(desc), patch) + '\n\n'
138 body += cdiffstat(ui, '\n'.join(desc), patch) + '\n\n'
139
139
140 if opts.get('attach') or opts.get('inline'):
140 if opts.get('attach') or opts.get('inline'):
141 msg = email.MIMEMultipart.MIMEMultipart()
141 msg = email.MIMEMultipart.MIMEMultipart()
142 if body:
142 if body:
143 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
143 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
144 p = mail.mimetextpatch('\n'.join(patch), 'x-patch', opts.get('test'))
144 p = mail.mimetextpatch('\n'.join(patch), 'x-patch', opts.get('test'))
145 binnode = bin(node)
145 binnode = bin(node)
146 # if node is mq patch, it will have the patch file's name as a tag
146 # if node is mq patch, it will have the patch file's name as a tag
147 if not patchname:
147 if not patchname:
148 patchtags = [t for t in repo.nodetags(binnode)
148 patchtags = [t for t in repo.nodetags(binnode)
149 if t.endswith('.patch') or t.endswith('.diff')]
149 if t.endswith('.patch') or t.endswith('.diff')]
150 if patchtags:
150 if patchtags:
151 patchname = patchtags[0]
151 patchname = patchtags[0]
152 elif total > 1:
152 elif total > 1:
153 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
153 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
154 binnode, seqno=idx, total=total)
154 binnode, seqno=idx, total=total)
155 else:
155 else:
156 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
156 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
157 disposition = 'inline'
157 disposition = 'inline'
158 if opts.get('attach'):
158 if opts.get('attach'):
159 disposition = 'attachment'
159 disposition = 'attachment'
160 p['Content-Disposition'] = disposition + '; filename=' + patchname
160 p['Content-Disposition'] = disposition + '; filename=' + patchname
161 msg.attach(p)
161 msg.attach(p)
162 else:
162 else:
163 body += '\n'.join(patch)
163 body += '\n'.join(patch)
164 msg = mail.mimetextpatch(body, display=opts.get('test'))
164 msg = mail.mimetextpatch(body, display=opts.get('test'))
165
165
166 subj = desc[0].strip().rstrip('. ')
166 subj = desc[0].strip().rstrip('. ')
167 if total == 1 and not opts.get('intro'):
167 if total == 1 and not opts.get('intro'):
168 subj = '[PATCH] ' + (opts.get('subject') or subj)
168 subj = '[PATCH] ' + (opts.get('subject') or subj)
169 else:
169 else:
170 tlen = len(str(total))
170 tlen = len(str(total))
171 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
171 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
172 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
172 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
173 msg['X-Mercurial-Node'] = node
173 msg['X-Mercurial-Node'] = node
174 return msg, subj
174 return msg, subj
175
175
176 def patchbomb(ui, repo, *revs, **opts):
176 def patchbomb(ui, repo, *revs, **opts):
177 '''send changesets by email
177 '''send changesets by email
178
178
179 By default, diffs are sent in the format generated by hg export,
179 By default, diffs are sent in the format generated by hg export,
180 one per message. The series starts with a "[PATCH 0 of N]"
180 one per message. The series starts with a "[PATCH 0 of N]"
181 introduction, which describes the series as a whole.
181 introduction, which describes the series as a whole.
182
182
183 Each patch email has a Subject line of "[PATCH M of N] ...", using
183 Each patch email has a Subject line of "[PATCH M of N] ...", using
184 the first line of the changeset description as the subject text.
184 the first line of the changeset description as the subject text.
185 The message contains two or three parts. First, the changeset
185 The message contains two or three parts. First, the changeset
186 description. Next, (optionally) if the diffstat program is
186 description. Next, (optionally) if the diffstat program is
187 installed and -d/--diffstat is used, the result of running
187 installed and -d/--diffstat is used, the result of running
188 diffstat on the patch. Finally, the patch itself, as generated by
188 diffstat on the patch. Finally, the patch itself, as generated by
189 "hg export".
189 "hg export".
190
190
191 By default the patch is included as text in the email body for
191 By default the patch is included as text in the email body for
192 easy reviewing. Using the -a/--attach option will instead create
192 easy reviewing. Using the -a/--attach option will instead create
193 an attachment for the patch. With -i/--inline an inline attachment
193 an attachment for the patch. With -i/--inline an inline attachment
194 will be created.
194 will be created.
195
195
196 With -o/--outgoing, emails will be generated for patches not found
196 With -o/--outgoing, emails will be generated for patches not found
197 in the destination repository (or only those which are ancestors
197 in the destination repository (or only those which are ancestors
198 of the specified revisions if any are provided)
198 of the specified revisions if any are provided)
199
199
200 With -b/--bundle, changesets are selected as for --outgoing, but a
200 With -b/--bundle, changesets are selected as for --outgoing, but a
201 single email containing a binary Mercurial bundle as an attachment
201 single email containing a binary Mercurial bundle as an attachment
202 will be sent.
202 will be sent.
203
203
204 Examples:
204 Examples:
205
205
206 hg email -r 3000 # send patch 3000 only
206 hg email -r 3000 # send patch 3000 only
207 hg email -r 3000 -r 3001 # send patches 3000 and 3001
207 hg email -r 3000 -r 3001 # send patches 3000 and 3001
208 hg email -r 3000:3005 # send patches 3000 through 3005
208 hg email -r 3000:3005 # send patches 3000 through 3005
209 hg email 3000 # send patch 3000 (deprecated)
209 hg email 3000 # send patch 3000 (deprecated)
210
210
211 hg email -o # send all patches not in default
211 hg email -o # send all patches not in default
212 hg email -o DEST # send all patches not in DEST
212 hg email -o DEST # send all patches not in DEST
213 hg email -o -r 3000 # send all ancestors of 3000 not in default
213 hg email -o -r 3000 # send all ancestors of 3000 not in default
214 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
214 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
215
215
216 hg email -b # send bundle of all patches not in default
216 hg email -b # send bundle of all patches not in default
217 hg email -b DEST # send bundle of all patches not in DEST
217 hg email -b DEST # send bundle of all patches not in DEST
218 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
218 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
219 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
219 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
220
220
221 Before using this command, you will need to enable email in your
221 Before using this command, you will need to enable email in your
222 hgrc. See the [email] section in hgrc(5) for details.
222 hgrc. See the [email] section in hgrc(5) for details.
223 '''
223 '''
224
224
225 _charsets = mail._charsets(ui)
225 _charsets = mail._charsets(ui)
226
226
227 def outgoing(dest, revs):
227 def outgoing(dest, revs):
228 '''Return the revisions present locally but not in dest'''
228 '''Return the revisions present locally but not in dest'''
229 dest = ui.expandpath(dest or 'default-push', dest or 'default')
229 dest = ui.expandpath(dest or 'default-push', dest or 'default')
230 revs = [repo.lookup(rev) for rev in revs]
230 revs = [repo.lookup(rev) for rev in revs]
231 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
231 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
232 ui.status(_('comparing with %s\n') % dest)
232 ui.status(_('comparing with %s\n') % dest)
233 o = repo.findoutgoing(other)
233 o = repo.findoutgoing(other)
234 if not o:
234 if not o:
235 ui.status(_("no changes found\n"))
235 ui.status(_("no changes found\n"))
236 return []
236 return []
237 o = repo.changelog.nodesbetween(o, revs or None)[0]
237 o = repo.changelog.nodesbetween(o, revs or None)[0]
238 return [str(repo.changelog.rev(r)) for r in o]
238 return [str(repo.changelog.rev(r)) for r in o]
239
239
240 def getpatches(revs):
240 def getpatches(revs):
241 for r in cmdutil.revrange(repo, revs):
241 for r in cmdutil.revrange(repo, revs):
242 output = cStringIO.StringIO()
242 output = cStringIO.StringIO()
243 patch.export(repo, [r], fp=output,
243 patch.export(repo, [r], fp=output,
244 opts=patch.diffopts(ui, opts))
244 opts=patch.diffopts(ui, opts))
245 yield output.getvalue().split('\n')
245 yield output.getvalue().split('\n')
246
246
247 def getbundle(dest):
247 def getbundle(dest):
248 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
248 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
249 tmpfn = os.path.join(tmpdir, 'bundle')
249 tmpfn = os.path.join(tmpdir, 'bundle')
250 try:
250 try:
251 commands.bundle(ui, repo, tmpfn, dest, **opts)
251 commands.bundle(ui, repo, tmpfn, dest, **opts)
252 return open(tmpfn, 'rb').read()
252 return open(tmpfn, 'rb').read()
253 finally:
253 finally:
254 try:
254 try:
255 os.unlink(tmpfn)
255 os.unlink(tmpfn)
256 except:
256 except:
257 pass
257 pass
258 os.rmdir(tmpdir)
258 os.rmdir(tmpdir)
259
259
260 if not (opts.get('test') or opts.get('mbox')):
260 if not (opts.get('test') or opts.get('mbox')):
261 # really sending
261 # really sending
262 mail.validateconfig(ui)
262 mail.validateconfig(ui)
263
263
264 if not (revs or opts.get('rev')
264 if not (revs or opts.get('rev')
265 or opts.get('outgoing') or opts.get('bundle')
265 or opts.get('outgoing') or opts.get('bundle')
266 or opts.get('patches')):
266 or opts.get('patches')):
267 raise util.Abort(_('specify at least one changeset with -r or -o'))
267 raise util.Abort(_('specify at least one changeset with -r or -o'))
268
268
269 if opts.get('outgoing') and opts.get('bundle'):
269 if opts.get('outgoing') and opts.get('bundle'):
270 raise util.Abort(_("--outgoing mode always on with --bundle;"
270 raise util.Abort(_("--outgoing mode always on with --bundle;"
271 " do not re-specify --outgoing"))
271 " do not re-specify --outgoing"))
272
272
273 if opts.get('outgoing') or opts.get('bundle'):
273 if opts.get('outgoing') or opts.get('bundle'):
274 if len(revs) > 1:
274 if len(revs) > 1:
275 raise util.Abort(_("too many destinations"))
275 raise util.Abort(_("too many destinations"))
276 dest = revs and revs[0] or None
276 dest = revs and revs[0] or None
277 revs = []
277 revs = []
278
278
279 if opts.get('rev'):
279 if opts.get('rev'):
280 if revs:
280 if revs:
281 raise util.Abort(_('use only one form to specify the revision'))
281 raise util.Abort(_('use only one form to specify the revision'))
282 revs = opts.get('rev')
282 revs = opts.get('rev')
283
283
284 if opts.get('outgoing'):
284 if opts.get('outgoing'):
285 revs = outgoing(dest, opts.get('rev'))
285 revs = outgoing(dest, opts.get('rev'))
286 if opts.get('bundle'):
286 if opts.get('bundle'):
287 opts['revs'] = revs
287 opts['revs'] = revs
288
288
289 # start
289 # start
290 if opts.get('date'):
290 if opts.get('date'):
291 start_time = util.parsedate(opts.get('date'))
291 start_time = util.parsedate(opts.get('date'))
292 else:
292 else:
293 start_time = util.makedate()
293 start_time = util.makedate()
294
294
295 def genmsgid(id):
295 def genmsgid(id):
296 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
296 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
297
297
298 def getdescription(body, sender):
298 def getdescription(body, sender):
299 if opts.get('desc'):
299 if opts.get('desc'):
300 body = open(opts.get('desc')).read()
300 body = open(opts.get('desc')).read()
301 else:
301 else:
302 ui.write(_('\nWrite the introductory message for the '
302 ui.write(_('\nWrite the introductory message for the '
303 'patch series.\n\n'))
303 'patch series.\n\n'))
304 body = ui.edit(body, sender)
304 body = ui.edit(body, sender)
305 return body
305 return body
306
306
307 def getpatchmsgs(patches, patchnames=None):
307 def getpatchmsgs(patches, patchnames=None):
308 jumbo = []
308 jumbo = []
309 msgs = []
309 msgs = []
310
310
311 ui.write(_('This patch series consists of %d patches.\n\n')
311 ui.write(_('This patch series consists of %d patches.\n\n')
312 % len(patches))
312 % len(patches))
313
313
314 name = None
314 name = None
315 for i, p in enumerate(patches):
315 for i, p in enumerate(patches):
316 jumbo.extend(p)
316 jumbo.extend(p)
317 if patchnames:
317 if patchnames:
318 name = patchnames[i]
318 name = patchnames[i]
319 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
319 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
320 len(patches), name)
320 len(patches), name)
321 msgs.append(msg)
321 msgs.append(msg)
322
322
323 if len(patches) > 1 or opts.get('intro'):
323 if len(patches) > 1 or opts.get('intro'):
324 tlen = len(str(len(patches)))
324 tlen = len(str(len(patches)))
325
325
326 subj = '[PATCH %0*d of %d] %s' % (
326 subj = '[PATCH %0*d of %d] %s' % (
327 tlen, 0, len(patches),
327 tlen, 0, len(patches),
328 opts.get('subject') or
328 opts.get('subject') or
329 prompt(ui, 'Subject:',
329 prompt(ui, 'Subject:',
330 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
330 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
331
331
332 body = ''
332 body = ''
333 if opts.get('diffstat'):
333 if opts.get('diffstat'):
334 d = cdiffstat(ui, _('Final summary:\n'), jumbo)
334 d = cdiffstat(ui, _('Final summary:\n'), jumbo)
335 if d:
335 if d:
336 body = '\n' + d
336 body = '\n' + d
337
337
338 body = getdescription(body, sender)
338 body = getdescription(body, sender)
339 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
339 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
340 msg['Subject'] = mail.headencode(ui, subj, _charsets,
340 msg['Subject'] = mail.headencode(ui, subj, _charsets,
341 opts.get('test'))
341 opts.get('test'))
342
342
343 msgs.insert(0, (msg, subj))
343 msgs.insert(0, (msg, subj))
344 return msgs
344 return msgs
345
345
346 def getbundlemsgs(bundle):
346 def getbundlemsgs(bundle):
347 subj = (opts.get('subject')
347 subj = (opts.get('subject')
348 or prompt(ui, 'Subject:', 'A bundle for your repository'))
348 or prompt(ui, 'Subject:', 'A bundle for your repository'))
349
349
350 body = getdescription('', sender)
350 body = getdescription('', sender)
351 msg = email.MIMEMultipart.MIMEMultipart()
351 msg = email.MIMEMultipart.MIMEMultipart()
352 if body:
352 if body:
353 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
353 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
354 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
354 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
355 datapart.set_payload(bundle)
355 datapart.set_payload(bundle)
356 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
356 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
357 datapart.add_header('Content-Disposition', 'attachment',
357 datapart.add_header('Content-Disposition', 'attachment',
358 filename=bundlename)
358 filename=bundlename)
359 email.Encoders.encode_base64(datapart)
359 email.Encoders.encode_base64(datapart)
360 msg.attach(datapart)
360 msg.attach(datapart)
361 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
361 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
362 return [(msg, subj)]
362 return [(msg, subj)]
363
363
364 sender = (opts.get('from') or ui.config('email', 'from') or
364 sender = (opts.get('from') or ui.config('email', 'from') or
365 ui.config('patchbomb', 'from') or
365 ui.config('patchbomb', 'from') or
366 prompt(ui, 'From', ui.username()))
366 prompt(ui, 'From', ui.username()))
367
367
368 # internal option used by pbranches
368 # internal option used by pbranches
369 patches = opts.get('patches')
369 patches = opts.get('patches')
370 if patches:
370 if patches:
371 msgs = getpatchmsgs(patches, opts.get('patchnames'))
371 msgs = getpatchmsgs(patches, opts.get('patchnames'))
372 elif opts.get('bundle'):
372 elif opts.get('bundle'):
373 msgs = getbundlemsgs(getbundle(dest))
373 msgs = getbundlemsgs(getbundle(dest))
374 else:
374 else:
375 msgs = getpatchmsgs(list(getpatches(revs)))
375 msgs = getpatchmsgs(list(getpatches(revs)))
376
376
377 def getaddrs(opt, prpt, default = None):
377 def getaddrs(opt, prpt, default = None):
378 addrs = opts.get(opt) or (ui.config('email', opt) or
378 addrs = opts.get(opt) or (ui.config('email', opt) or
379 ui.config('patchbomb', opt) or
379 ui.config('patchbomb', opt) or
380 prompt(ui, prpt, default)).split(',')
380 prompt(ui, prpt, default)).split(',')
381 return [mail.addressencode(ui, a.strip(), _charsets, opts.get('test'))
381 return [mail.addressencode(ui, a.strip(), _charsets, opts.get('test'))
382 for a in addrs if a.strip()]
382 for a in addrs if a.strip()]
383
383
384 to = getaddrs('to', 'To')
384 to = getaddrs('to', 'To')
385 cc = getaddrs('cc', 'Cc', '')
385 cc = getaddrs('cc', 'Cc', '')
386
386
387 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
387 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
388 ui.config('patchbomb', 'bcc') or '').split(',')
388 ui.config('patchbomb', 'bcc') or '').split(',')
389 bcc = [mail.addressencode(ui, a.strip(), _charsets, opts.get('test'))
389 bcc = [mail.addressencode(ui, a.strip(), _charsets, opts.get('test'))
390 for a in bcc if a.strip()]
390 for a in bcc if a.strip()]
391
391
392 ui.write('\n')
392 ui.write('\n')
393
393
394 parent = opts.get('in_reply_to') or None
394 parent = opts.get('in_reply_to') or None
395 # angle brackets may be omitted, they're not semantically part of the msg-id
395 # angle brackets may be omitted, they're not semantically part of the msg-id
396 if parent is not None:
396 if parent is not None:
397 if not parent.startswith('<'):
397 if not parent.startswith('<'):
398 parent = '<' + parent
398 parent = '<' + parent
399 if not parent.endswith('>'):
399 if not parent.endswith('>'):
400 parent += '>'
400 parent += '>'
401
401
402 first = True
402 first = True
403
403
404 sender_addr = email.Utils.parseaddr(sender)[1]
404 sender_addr = email.Utils.parseaddr(sender)[1]
405 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
405 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
406 sendmail = None
406 sendmail = None
407 for m, subj in msgs:
407 for m, subj in msgs:
408 try:
408 try:
409 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
409 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
410 except TypeError:
410 except TypeError:
411 m['Message-Id'] = genmsgid('patchbomb')
411 m['Message-Id'] = genmsgid('patchbomb')
412 if parent:
412 if parent:
413 m['In-Reply-To'] = parent
413 m['In-Reply-To'] = parent
414 m['References'] = parent
414 m['References'] = parent
415 if first:
415 if first:
416 parent = m['Message-Id']
416 parent = m['Message-Id']
417 first = False
417 first = False
418
418
419 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
419 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
420 m['Date'] = email.Utils.formatdate(start_time[0])
420 m['Date'] = email.Utils.formatdate(start_time[0])
421
421
422 start_time = (start_time[0] + 1, start_time[1])
422 start_time = (start_time[0] + 1, start_time[1])
423 m['From'] = sender
423 m['From'] = sender
424 m['To'] = ', '.join(to)
424 m['To'] = ', '.join(to)
425 if cc:
425 if cc:
426 m['Cc'] = ', '.join(cc)
426 m['Cc'] = ', '.join(cc)
427 if bcc:
427 if bcc:
428 m['Bcc'] = ', '.join(bcc)
428 m['Bcc'] = ', '.join(bcc)
429 if opts.get('test'):
429 if opts.get('test'):
430 ui.status(_('Displaying '), subj, ' ...\n')
430 ui.status(_('Displaying '), subj, ' ...\n')
431 ui.flush()
431 ui.flush()
432 if 'PAGER' in os.environ:
432 if 'PAGER' in os.environ:
433 fp = util.popen(os.environ['PAGER'], 'w')
433 fp = util.popen(os.environ['PAGER'], 'w')
434 else:
434 else:
435 fp = ui
435 fp = ui
436 generator = email.Generator.Generator(fp, mangle_from_=False)
436 generator = email.Generator.Generator(fp, mangle_from_=False)
437 try:
437 try:
438 generator.flatten(m, 0)
438 generator.flatten(m, 0)
439 fp.write('\n')
439 fp.write('\n')
440 except IOError, inst:
440 except IOError, inst:
441 if inst.errno != errno.EPIPE:
441 if inst.errno != errno.EPIPE:
442 raise
442 raise
443 if fp is not ui:
443 if fp is not ui:
444 fp.close()
444 fp.close()
445 elif opts.get('mbox'):
445 elif opts.get('mbox'):
446 ui.status(_('Writing '), subj, ' ...\n')
446 ui.status(_('Writing '), subj, ' ...\n')
447 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
447 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
448 generator = email.Generator.Generator(fp, mangle_from_=True)
448 generator = email.Generator.Generator(fp, mangle_from_=True)
449 date = util.datestr(start_time, '%a %b %d %H:%M:%S %Y')
449 date = util.datestr(start_time, '%a %b %d %H:%M:%S %Y')
450 fp.write('From %s %s\n' % (sender_addr, date))
450 fp.write('From %s %s\n' % (sender_addr, date))
451 generator.flatten(m, 0)
451 generator.flatten(m, 0)
452 fp.write('\n\n')
452 fp.write('\n\n')
453 fp.close()
453 fp.close()
454 else:
454 else:
455 if not sendmail:
455 if not sendmail:
456 sendmail = mail.connect(ui)
456 sendmail = mail.connect(ui)
457 ui.status(_('Sending '), subj, ' ...\n')
457 ui.status(_('Sending '), subj, ' ...\n')
458 # Exim does not remove the Bcc field
458 # Exim does not remove the Bcc field
459 del m['Bcc']
459 del m['Bcc']
460 fp = cStringIO.StringIO()
460 fp = cStringIO.StringIO()
461 generator = email.Generator.Generator(fp, mangle_from_=False)
461 generator = email.Generator.Generator(fp, mangle_from_=False)
462 generator.flatten(m, 0)
462 generator.flatten(m, 0)
463 sendmail(sender, to + bcc + cc, fp.getvalue())
463 sendmail(sender, to + bcc + cc, fp.getvalue())
464
464
465 emailopts = [
465 emailopts = [
466 ('a', 'attach', None, _('send patches as attachments')),
466 ('a', 'attach', None, _('send patches as attachments')),
467 ('i', 'inline', None, _('send patches as inline attachments')),
467 ('i', 'inline', None, _('send patches as inline attachments')),
468 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
468 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
469 ('c', 'cc', [], _('email addresses of copy recipients')),
469 ('c', 'cc', [], _('email addresses of copy recipients')),
470 ('d', 'diffstat', None, _('add diffstat output to messages')),
470 ('d', 'diffstat', None, _('add diffstat output to messages')),
471 ('', 'date', '', _('use the given date as the sending date')),
471 ('', 'date', '', _('use the given date as the sending date')),
472 ('', 'desc', '', _('use the given file as the series description')),
472 ('', 'desc', '', _('use the given file as the series description')),
473 ('f', 'from', '', _('email address of sender')),
473 ('f', 'from', '', _('email address of sender')),
474 ('n', 'test', None, _('print messages that would be sent')),
474 ('n', 'test', None, _('print messages that would be sent')),
475 ('m', 'mbox', '',
475 ('m', 'mbox', '',
476 _('write messages to mbox file instead of sending them')),
476 _('write messages to mbox file instead of sending them')),
477 ('s', 'subject', '',
477 ('s', 'subject', '',
478 _('subject of first message (intro or single patch)')),
478 _('subject of first message (intro or single patch)')),
479 ('', 'in-reply-to', '',
479 ('', 'in-reply-to', '',
480 _('message identifier to reply to')),
480 _('message identifier to reply to')),
481 ('t', 'to', [], _('email addresses of recipients')),
481 ('t', 'to', [], _('email addresses of recipients')),
482 ]
482 ]
483
483
484
484
485 cmdtable = {
485 cmdtable = {
486 "email":
486 "email":
487 (patchbomb,
487 (patchbomb,
488 [('g', 'git', None, _('use git extended diff format')),
488 [('g', 'git', None, _('use git extended diff format')),
489 ('', 'plain', None, _('omit hg patch header')),
489 ('', 'plain', None, _('omit hg patch header')),
490 ('o', 'outgoing', None,
490 ('o', 'outgoing', None,
491 _('send changes not found in the target repository')),
491 _('send changes not found in the target repository')),
492 ('b', 'bundle', None,
492 ('b', 'bundle', None,
493 _('send changes not in target as a binary bundle')),
493 _('send changes not in target as a binary bundle')),
494 ('', 'bundlename', 'bundle',
494 ('', 'bundlename', 'bundle',
495 _('name of the bundle attachment file')),
495 _('name of the bundle attachment file')),
496 ('r', 'rev', [], _('a revision to send')),
496 ('r', 'rev', [], _('a revision to send')),
497 ('', 'force', None,
497 ('', 'force', None,
498 _('run even when remote repository is unrelated '
498 _('run even when remote repository is unrelated '
499 '(with -b/--bundle)')),
499 '(with -b/--bundle)')),
500 ('', 'base', [],
500 ('', 'base', [],
501 _('a base changeset to specify instead of a destination '
501 _('a base changeset to specify instead of a destination '
502 '(with -b/--bundle)')),
502 '(with -b/--bundle)')),
503 ('', 'intro', None,
503 ('', 'intro', None,
504 _('send an introduction email for a single patch')),
504 _('send an introduction email for a single patch')),
505 ] + emailopts + commands.remoteopts,
505 ] + emailopts + commands.remoteopts,
506 _('hg email [OPTION]... [DEST]...'))
506 _('hg email [OPTION]... [DEST]...'))
507 }
507 }
@@ -1,108 +1,108 b''
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 #
2 #
3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 # that removes files not known to mercurial
4 # that removes files not known to mercurial
5 #
5 #
6 # This program was inspired by the "cvspurge" script contained in CVS utilities
6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 # (http://www.red-bean.com/cvsutils/).
7 # (http://www.red-bean.com/cvsutils/).
8 #
8 #
9 # For help on the usage of "hg purge" use:
9 # For help on the usage of "hg purge" use:
10 # hg help purge
10 # hg help purge
11 #
11 #
12 # This program is free software; you can redistribute it and/or modify
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
15 # (at your option) any later version.
16 #
16 #
17 # This program is distributed in the hope that it will be useful,
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
20 # GNU General Public License for more details.
21 #
21 #
22 # You should have received a copy of the GNU General Public License
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
24 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
25
25
26 '''enable removing untracked files only'''
26 '''delete files not tracked from the working directory'''
27
27
28 from mercurial import util, commands, cmdutil
28 from mercurial import util, commands, cmdutil
29 from mercurial.i18n import _
29 from mercurial.i18n import _
30 import os, stat
30 import os, stat
31
31
32 def purge(ui, repo, *dirs, **opts):
32 def purge(ui, repo, *dirs, **opts):
33 '''removes files not tracked by Mercurial
33 '''removes files not tracked by Mercurial
34
34
35 Delete files not known to Mercurial. This is useful to test local
35 Delete files not known to Mercurial. This is useful to test local
36 and uncommitted changes in an otherwise-clean source tree.
36 and uncommitted changes in an otherwise-clean source tree.
37
37
38 This means that purge will delete:
38 This means that purge will delete:
39 - Unknown files: files marked with "?" by "hg status"
39 - Unknown files: files marked with "?" by "hg status"
40 - Empty directories: in fact Mercurial ignores directories unless
40 - Empty directories: in fact Mercurial ignores directories unless
41 they contain files under source control management
41 they contain files under source control management
42 But it will leave untouched:
42 But it will leave untouched:
43 - Modified and unmodified tracked files
43 - Modified and unmodified tracked files
44 - Ignored files (unless --all is specified)
44 - Ignored files (unless --all is specified)
45 - New files added to the repository (with "hg add")
45 - New files added to the repository (with "hg add")
46
46
47 If directories are given on the command line, only files in these
47 If directories are given on the command line, only files in these
48 directories are considered.
48 directories are considered.
49
49
50 Be careful with purge, as you could irreversibly delete some files
50 Be careful with purge, as you could irreversibly delete some files
51 you forgot to add to the repository. If you only want to print the
51 you forgot to add to the repository. If you only want to print the
52 list of files that this program would delete, use the --print
52 list of files that this program would delete, use the --print
53 option.
53 option.
54 '''
54 '''
55 act = not opts['print']
55 act = not opts['print']
56 eol = '\n'
56 eol = '\n'
57 if opts['print0']:
57 if opts['print0']:
58 eol = '\0'
58 eol = '\0'
59 act = False # --print0 implies --print
59 act = False # --print0 implies --print
60
60
61 def remove(remove_func, name):
61 def remove(remove_func, name):
62 if act:
62 if act:
63 try:
63 try:
64 remove_func(repo.wjoin(name))
64 remove_func(repo.wjoin(name))
65 except OSError:
65 except OSError:
66 m = _('%s cannot be removed') % name
66 m = _('%s cannot be removed') % name
67 if opts['abort_on_err']:
67 if opts['abort_on_err']:
68 raise util.Abort(m)
68 raise util.Abort(m)
69 ui.warn(_('warning: %s\n') % m)
69 ui.warn(_('warning: %s\n') % m)
70 else:
70 else:
71 ui.write('%s%s' % (name, eol))
71 ui.write('%s%s' % (name, eol))
72
72
73 def removefile(path):
73 def removefile(path):
74 try:
74 try:
75 os.remove(path)
75 os.remove(path)
76 except OSError:
76 except OSError:
77 # read-only files cannot be unlinked under Windows
77 # read-only files cannot be unlinked under Windows
78 s = os.stat(path)
78 s = os.stat(path)
79 if (s.st_mode & stat.S_IWRITE) != 0:
79 if (s.st_mode & stat.S_IWRITE) != 0:
80 raise
80 raise
81 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
81 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
82 os.remove(path)
82 os.remove(path)
83
83
84 directories = []
84 directories = []
85 match = cmdutil.match(repo, dirs, opts)
85 match = cmdutil.match(repo, dirs, opts)
86 match.dir = directories.append
86 match.dir = directories.append
87 status = repo.status(match=match, ignored=opts['all'], unknown=True)
87 status = repo.status(match=match, ignored=opts['all'], unknown=True)
88
88
89 for f in sorted(status[4] + status[5]):
89 for f in sorted(status[4] + status[5]):
90 ui.note(_('Removing file %s\n') % f)
90 ui.note(_('Removing file %s\n') % f)
91 remove(removefile, f)
91 remove(removefile, f)
92
92
93 for f in sorted(directories, reverse=True):
93 for f in sorted(directories, reverse=True):
94 if match(f) and not os.listdir(repo.wjoin(f)):
94 if match(f) and not os.listdir(repo.wjoin(f)):
95 ui.note(_('Removing directory %s\n') % f)
95 ui.note(_('Removing directory %s\n') % f)
96 remove(os.rmdir, f)
96 remove(os.rmdir, f)
97
97
98 cmdtable = {
98 cmdtable = {
99 'purge|clean':
99 'purge|clean':
100 (purge,
100 (purge,
101 [('a', 'abort-on-err', None, _('abort if an error occurs')),
101 [('a', 'abort-on-err', None, _('abort if an error occurs')),
102 ('', 'all', None, _('purge ignored files too')),
102 ('', 'all', None, _('purge ignored files too')),
103 ('p', 'print', None, _('print filenames instead of deleting them')),
103 ('p', 'print', None, _('print filenames instead of deleting them')),
104 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
104 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
105 ' (implies -p/--print)')),
105 ' (implies -p/--print)')),
106 ] + commands.walkopts,
106 ] + commands.walkopts,
107 _('hg purge [OPTION]... [DIR]...'))
107 _('hg purge [OPTION]... [DIR]...'))
108 }
108 }
@@ -1,548 +1,548 b''
1 # record.py
1 # record.py
2 #
2 #
3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''interactive change selection during commit or qrefresh'''
8 '''interactively select which sets of changes to commit/qrefresh'''
9
9
10 from mercurial.i18n import gettext, _
10 from mercurial.i18n import gettext, _
11 from mercurial import cmdutil, commands, extensions, hg, mdiff, patch
11 from mercurial import cmdutil, commands, extensions, hg, mdiff, patch
12 from mercurial import util
12 from mercurial import util
13 import copy, cStringIO, errno, operator, os, re, tempfile
13 import copy, cStringIO, errno, operator, os, re, tempfile
14
14
15 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
15 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
16
16
17 def scanpatch(fp):
17 def scanpatch(fp):
18 """like patch.iterhunks, but yield different events
18 """like patch.iterhunks, but yield different events
19
19
20 - ('file', [header_lines + fromfile + tofile])
20 - ('file', [header_lines + fromfile + tofile])
21 - ('context', [context_lines])
21 - ('context', [context_lines])
22 - ('hunk', [hunk_lines])
22 - ('hunk', [hunk_lines])
23 - ('range', (-start,len, +start,len, diffp))
23 - ('range', (-start,len, +start,len, diffp))
24 """
24 """
25 lr = patch.linereader(fp)
25 lr = patch.linereader(fp)
26
26
27 def scanwhile(first, p):
27 def scanwhile(first, p):
28 """scan lr while predicate holds"""
28 """scan lr while predicate holds"""
29 lines = [first]
29 lines = [first]
30 while True:
30 while True:
31 line = lr.readline()
31 line = lr.readline()
32 if not line:
32 if not line:
33 break
33 break
34 if p(line):
34 if p(line):
35 lines.append(line)
35 lines.append(line)
36 else:
36 else:
37 lr.push(line)
37 lr.push(line)
38 break
38 break
39 return lines
39 return lines
40
40
41 while True:
41 while True:
42 line = lr.readline()
42 line = lr.readline()
43 if not line:
43 if not line:
44 break
44 break
45 if line.startswith('diff --git a/'):
45 if line.startswith('diff --git a/'):
46 def notheader(line):
46 def notheader(line):
47 s = line.split(None, 1)
47 s = line.split(None, 1)
48 return not s or s[0] not in ('---', 'diff')
48 return not s or s[0] not in ('---', 'diff')
49 header = scanwhile(line, notheader)
49 header = scanwhile(line, notheader)
50 fromfile = lr.readline()
50 fromfile = lr.readline()
51 if fromfile.startswith('---'):
51 if fromfile.startswith('---'):
52 tofile = lr.readline()
52 tofile = lr.readline()
53 header += [fromfile, tofile]
53 header += [fromfile, tofile]
54 else:
54 else:
55 lr.push(fromfile)
55 lr.push(fromfile)
56 yield 'file', header
56 yield 'file', header
57 elif line[0] == ' ':
57 elif line[0] == ' ':
58 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
58 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
59 elif line[0] in '-+':
59 elif line[0] in '-+':
60 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
60 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
61 else:
61 else:
62 m = lines_re.match(line)
62 m = lines_re.match(line)
63 if m:
63 if m:
64 yield 'range', m.groups()
64 yield 'range', m.groups()
65 else:
65 else:
66 raise patch.PatchError('unknown patch content: %r' % line)
66 raise patch.PatchError('unknown patch content: %r' % line)
67
67
68 class header(object):
68 class header(object):
69 """patch header
69 """patch header
70
70
71 XXX shoudn't we move this to mercurial/patch.py ?
71 XXX shoudn't we move this to mercurial/patch.py ?
72 """
72 """
73 diff_re = re.compile('diff --git a/(.*) b/(.*)$')
73 diff_re = re.compile('diff --git a/(.*) b/(.*)$')
74 allhunks_re = re.compile('(?:index|new file|deleted file) ')
74 allhunks_re = re.compile('(?:index|new file|deleted file) ')
75 pretty_re = re.compile('(?:new file|deleted file) ')
75 pretty_re = re.compile('(?:new file|deleted file) ')
76 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
76 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
77
77
78 def __init__(self, header):
78 def __init__(self, header):
79 self.header = header
79 self.header = header
80 self.hunks = []
80 self.hunks = []
81
81
82 def binary(self):
82 def binary(self):
83 for h in self.header:
83 for h in self.header:
84 if h.startswith('index '):
84 if h.startswith('index '):
85 return True
85 return True
86
86
87 def pretty(self, fp):
87 def pretty(self, fp):
88 for h in self.header:
88 for h in self.header:
89 if h.startswith('index '):
89 if h.startswith('index '):
90 fp.write(_('this modifies a binary file (all or nothing)\n'))
90 fp.write(_('this modifies a binary file (all or nothing)\n'))
91 break
91 break
92 if self.pretty_re.match(h):
92 if self.pretty_re.match(h):
93 fp.write(h)
93 fp.write(h)
94 if self.binary():
94 if self.binary():
95 fp.write(_('this is a binary file\n'))
95 fp.write(_('this is a binary file\n'))
96 break
96 break
97 if h.startswith('---'):
97 if h.startswith('---'):
98 fp.write(_('%d hunks, %d lines changed\n') %
98 fp.write(_('%d hunks, %d lines changed\n') %
99 (len(self.hunks),
99 (len(self.hunks),
100 sum([h.added + h.removed for h in self.hunks])))
100 sum([h.added + h.removed for h in self.hunks])))
101 break
101 break
102 fp.write(h)
102 fp.write(h)
103
103
104 def write(self, fp):
104 def write(self, fp):
105 fp.write(''.join(self.header))
105 fp.write(''.join(self.header))
106
106
107 def allhunks(self):
107 def allhunks(self):
108 for h in self.header:
108 for h in self.header:
109 if self.allhunks_re.match(h):
109 if self.allhunks_re.match(h):
110 return True
110 return True
111
111
112 def files(self):
112 def files(self):
113 fromfile, tofile = self.diff_re.match(self.header[0]).groups()
113 fromfile, tofile = self.diff_re.match(self.header[0]).groups()
114 if fromfile == tofile:
114 if fromfile == tofile:
115 return [fromfile]
115 return [fromfile]
116 return [fromfile, tofile]
116 return [fromfile, tofile]
117
117
118 def filename(self):
118 def filename(self):
119 return self.files()[-1]
119 return self.files()[-1]
120
120
121 def __repr__(self):
121 def __repr__(self):
122 return '<header %s>' % (' '.join(map(repr, self.files())))
122 return '<header %s>' % (' '.join(map(repr, self.files())))
123
123
124 def special(self):
124 def special(self):
125 for h in self.header:
125 for h in self.header:
126 if self.special_re.match(h):
126 if self.special_re.match(h):
127 return True
127 return True
128
128
129 def countchanges(hunk):
129 def countchanges(hunk):
130 """hunk -> (n+,n-)"""
130 """hunk -> (n+,n-)"""
131 add = len([h for h in hunk if h[0] == '+'])
131 add = len([h for h in hunk if h[0] == '+'])
132 rem = len([h for h in hunk if h[0] == '-'])
132 rem = len([h for h in hunk if h[0] == '-'])
133 return add, rem
133 return add, rem
134
134
135 class hunk(object):
135 class hunk(object):
136 """patch hunk
136 """patch hunk
137
137
138 XXX shouldn't we merge this with patch.hunk ?
138 XXX shouldn't we merge this with patch.hunk ?
139 """
139 """
140 maxcontext = 3
140 maxcontext = 3
141
141
142 def __init__(self, header, fromline, toline, proc, before, hunk, after):
142 def __init__(self, header, fromline, toline, proc, before, hunk, after):
143 def trimcontext(number, lines):
143 def trimcontext(number, lines):
144 delta = len(lines) - self.maxcontext
144 delta = len(lines) - self.maxcontext
145 if False and delta > 0:
145 if False and delta > 0:
146 return number + delta, lines[:self.maxcontext]
146 return number + delta, lines[:self.maxcontext]
147 return number, lines
147 return number, lines
148
148
149 self.header = header
149 self.header = header
150 self.fromline, self.before = trimcontext(fromline, before)
150 self.fromline, self.before = trimcontext(fromline, before)
151 self.toline, self.after = trimcontext(toline, after)
151 self.toline, self.after = trimcontext(toline, after)
152 self.proc = proc
152 self.proc = proc
153 self.hunk = hunk
153 self.hunk = hunk
154 self.added, self.removed = countchanges(self.hunk)
154 self.added, self.removed = countchanges(self.hunk)
155
155
156 def write(self, fp):
156 def write(self, fp):
157 delta = len(self.before) + len(self.after)
157 delta = len(self.before) + len(self.after)
158 if self.after and self.after[-1] == '\\ No newline at end of file\n':
158 if self.after and self.after[-1] == '\\ No newline at end of file\n':
159 delta -= 1
159 delta -= 1
160 fromlen = delta + self.removed
160 fromlen = delta + self.removed
161 tolen = delta + self.added
161 tolen = delta + self.added
162 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
162 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
163 (self.fromline, fromlen, self.toline, tolen,
163 (self.fromline, fromlen, self.toline, tolen,
164 self.proc and (' ' + self.proc)))
164 self.proc and (' ' + self.proc)))
165 fp.write(''.join(self.before + self.hunk + self.after))
165 fp.write(''.join(self.before + self.hunk + self.after))
166
166
167 pretty = write
167 pretty = write
168
168
169 def filename(self):
169 def filename(self):
170 return self.header.filename()
170 return self.header.filename()
171
171
172 def __repr__(self):
172 def __repr__(self):
173 return '<hunk %r@%d>' % (self.filename(), self.fromline)
173 return '<hunk %r@%d>' % (self.filename(), self.fromline)
174
174
175 def parsepatch(fp):
175 def parsepatch(fp):
176 """patch -> [] of hunks """
176 """patch -> [] of hunks """
177 class parser(object):
177 class parser(object):
178 """patch parsing state machine"""
178 """patch parsing state machine"""
179 def __init__(self):
179 def __init__(self):
180 self.fromline = 0
180 self.fromline = 0
181 self.toline = 0
181 self.toline = 0
182 self.proc = ''
182 self.proc = ''
183 self.header = None
183 self.header = None
184 self.context = []
184 self.context = []
185 self.before = []
185 self.before = []
186 self.hunk = []
186 self.hunk = []
187 self.stream = []
187 self.stream = []
188
188
189 def addrange(self, (fromstart, fromend, tostart, toend, proc)):
189 def addrange(self, (fromstart, fromend, tostart, toend, proc)):
190 self.fromline = int(fromstart)
190 self.fromline = int(fromstart)
191 self.toline = int(tostart)
191 self.toline = int(tostart)
192 self.proc = proc
192 self.proc = proc
193
193
194 def addcontext(self, context):
194 def addcontext(self, context):
195 if self.hunk:
195 if self.hunk:
196 h = hunk(self.header, self.fromline, self.toline, self.proc,
196 h = hunk(self.header, self.fromline, self.toline, self.proc,
197 self.before, self.hunk, context)
197 self.before, self.hunk, context)
198 self.header.hunks.append(h)
198 self.header.hunks.append(h)
199 self.stream.append(h)
199 self.stream.append(h)
200 self.fromline += len(self.before) + h.removed
200 self.fromline += len(self.before) + h.removed
201 self.toline += len(self.before) + h.added
201 self.toline += len(self.before) + h.added
202 self.before = []
202 self.before = []
203 self.hunk = []
203 self.hunk = []
204 self.proc = ''
204 self.proc = ''
205 self.context = context
205 self.context = context
206
206
207 def addhunk(self, hunk):
207 def addhunk(self, hunk):
208 if self.context:
208 if self.context:
209 self.before = self.context
209 self.before = self.context
210 self.context = []
210 self.context = []
211 self.hunk = hunk
211 self.hunk = hunk
212
212
213 def newfile(self, hdr):
213 def newfile(self, hdr):
214 self.addcontext([])
214 self.addcontext([])
215 h = header(hdr)
215 h = header(hdr)
216 self.stream.append(h)
216 self.stream.append(h)
217 self.header = h
217 self.header = h
218
218
219 def finished(self):
219 def finished(self):
220 self.addcontext([])
220 self.addcontext([])
221 return self.stream
221 return self.stream
222
222
223 transitions = {
223 transitions = {
224 'file': {'context': addcontext,
224 'file': {'context': addcontext,
225 'file': newfile,
225 'file': newfile,
226 'hunk': addhunk,
226 'hunk': addhunk,
227 'range': addrange},
227 'range': addrange},
228 'context': {'file': newfile,
228 'context': {'file': newfile,
229 'hunk': addhunk,
229 'hunk': addhunk,
230 'range': addrange},
230 'range': addrange},
231 'hunk': {'context': addcontext,
231 'hunk': {'context': addcontext,
232 'file': newfile,
232 'file': newfile,
233 'range': addrange},
233 'range': addrange},
234 'range': {'context': addcontext,
234 'range': {'context': addcontext,
235 'hunk': addhunk},
235 'hunk': addhunk},
236 }
236 }
237
237
238 p = parser()
238 p = parser()
239
239
240 state = 'context'
240 state = 'context'
241 for newstate, data in scanpatch(fp):
241 for newstate, data in scanpatch(fp):
242 try:
242 try:
243 p.transitions[state][newstate](p, data)
243 p.transitions[state][newstate](p, data)
244 except KeyError:
244 except KeyError:
245 raise patch.PatchError('unhandled transition: %s -> %s' %
245 raise patch.PatchError('unhandled transition: %s -> %s' %
246 (state, newstate))
246 (state, newstate))
247 state = newstate
247 state = newstate
248 return p.finished()
248 return p.finished()
249
249
250 def filterpatch(ui, chunks):
250 def filterpatch(ui, chunks):
251 """Interactively filter patch chunks into applied-only chunks"""
251 """Interactively filter patch chunks into applied-only chunks"""
252 chunks = list(chunks)
252 chunks = list(chunks)
253 chunks.reverse()
253 chunks.reverse()
254 seen = set()
254 seen = set()
255 def consumefile():
255 def consumefile():
256 """fetch next portion from chunks until a 'header' is seen
256 """fetch next portion from chunks until a 'header' is seen
257 NB: header == new-file mark
257 NB: header == new-file mark
258 """
258 """
259 consumed = []
259 consumed = []
260 while chunks:
260 while chunks:
261 if isinstance(chunks[-1], header):
261 if isinstance(chunks[-1], header):
262 break
262 break
263 else:
263 else:
264 consumed.append(chunks.pop())
264 consumed.append(chunks.pop())
265 return consumed
265 return consumed
266
266
267 resp_all = [None] # this two are changed from inside prompt,
267 resp_all = [None] # this two are changed from inside prompt,
268 resp_file = [None] # so can't be usual variables
268 resp_file = [None] # so can't be usual variables
269 applied = {} # 'filename' -> [] of chunks
269 applied = {} # 'filename' -> [] of chunks
270 def prompt(query):
270 def prompt(query):
271 """prompt query, and process base inputs
271 """prompt query, and process base inputs
272
272
273 - y/n for the rest of file
273 - y/n for the rest of file
274 - y/n for the rest
274 - y/n for the rest
275 - ? (help)
275 - ? (help)
276 - q (quit)
276 - q (quit)
277
277
278 else, input is returned to the caller.
278 else, input is returned to the caller.
279 """
279 """
280 if resp_all[0] is not None:
280 if resp_all[0] is not None:
281 return resp_all[0]
281 return resp_all[0]
282 if resp_file[0] is not None:
282 if resp_file[0] is not None:
283 return resp_file[0]
283 return resp_file[0]
284 while True:
284 while True:
285 resps = _('[Ynsfdaq?]')
285 resps = _('[Ynsfdaq?]')
286 choices = (_('&Yes, record this change'),
286 choices = (_('&Yes, record this change'),
287 _('&No, skip this change'),
287 _('&No, skip this change'),
288 _('&Skip remaining changes to this file'),
288 _('&Skip remaining changes to this file'),
289 _('Record remaining changes to this &file'),
289 _('Record remaining changes to this &file'),
290 _('&Done, skip remaining changes and files'),
290 _('&Done, skip remaining changes and files'),
291 _('Record &all changes to all remaining files'),
291 _('Record &all changes to all remaining files'),
292 _('&Quit, recording no changes'),
292 _('&Quit, recording no changes'),
293 _('&?'))
293 _('&?'))
294 r = (ui.prompt("%s %s " % (query, resps), choices)
294 r = (ui.prompt("%s %s " % (query, resps), choices)
295 or _('y')).lower()
295 or _('y')).lower()
296 if r == _('?'):
296 if r == _('?'):
297 doc = gettext(record.__doc__)
297 doc = gettext(record.__doc__)
298 c = doc.find(_('y - record this change'))
298 c = doc.find(_('y - record this change'))
299 for l in doc[c:].splitlines():
299 for l in doc[c:].splitlines():
300 if l: ui.write(l.strip(), '\n')
300 if l: ui.write(l.strip(), '\n')
301 continue
301 continue
302 elif r == _('s'):
302 elif r == _('s'):
303 r = resp_file[0] = 'n'
303 r = resp_file[0] = 'n'
304 elif r == _('f'):
304 elif r == _('f'):
305 r = resp_file[0] = 'y'
305 r = resp_file[0] = 'y'
306 elif r == _('d'):
306 elif r == _('d'):
307 r = resp_all[0] = 'n'
307 r = resp_all[0] = 'n'
308 elif r == _('a'):
308 elif r == _('a'):
309 r = resp_all[0] = 'y'
309 r = resp_all[0] = 'y'
310 elif r == _('q'):
310 elif r == _('q'):
311 raise util.Abort(_('user quit'))
311 raise util.Abort(_('user quit'))
312 return r
312 return r
313 pos, total = 0, len(chunks) - 1
313 pos, total = 0, len(chunks) - 1
314 while chunks:
314 while chunks:
315 chunk = chunks.pop()
315 chunk = chunks.pop()
316 if isinstance(chunk, header):
316 if isinstance(chunk, header):
317 # new-file mark
317 # new-file mark
318 resp_file = [None]
318 resp_file = [None]
319 fixoffset = 0
319 fixoffset = 0
320 hdr = ''.join(chunk.header)
320 hdr = ''.join(chunk.header)
321 if hdr in seen:
321 if hdr in seen:
322 consumefile()
322 consumefile()
323 continue
323 continue
324 seen.add(hdr)
324 seen.add(hdr)
325 if resp_all[0] is None:
325 if resp_all[0] is None:
326 chunk.pretty(ui)
326 chunk.pretty(ui)
327 r = prompt(_('examine changes to %s?') %
327 r = prompt(_('examine changes to %s?') %
328 _(' and ').join(map(repr, chunk.files())))
328 _(' and ').join(map(repr, chunk.files())))
329 if r == _('y'):
329 if r == _('y'):
330 applied[chunk.filename()] = [chunk]
330 applied[chunk.filename()] = [chunk]
331 if chunk.allhunks():
331 if chunk.allhunks():
332 applied[chunk.filename()] += consumefile()
332 applied[chunk.filename()] += consumefile()
333 else:
333 else:
334 consumefile()
334 consumefile()
335 else:
335 else:
336 # new hunk
336 # new hunk
337 if resp_file[0] is None and resp_all[0] is None:
337 if resp_file[0] is None and resp_all[0] is None:
338 chunk.pretty(ui)
338 chunk.pretty(ui)
339 r = total == 1 and prompt(_('record this change to %r?') %
339 r = total == 1 and prompt(_('record this change to %r?') %
340 chunk.filename()) \
340 chunk.filename()) \
341 or prompt(_('record change %d/%d to %r?') %
341 or prompt(_('record change %d/%d to %r?') %
342 (pos, total, chunk.filename()))
342 (pos, total, chunk.filename()))
343 if r == _('y'):
343 if r == _('y'):
344 if fixoffset:
344 if fixoffset:
345 chunk = copy.copy(chunk)
345 chunk = copy.copy(chunk)
346 chunk.toline += fixoffset
346 chunk.toline += fixoffset
347 applied[chunk.filename()].append(chunk)
347 applied[chunk.filename()].append(chunk)
348 else:
348 else:
349 fixoffset += chunk.removed - chunk.added
349 fixoffset += chunk.removed - chunk.added
350 pos = pos + 1
350 pos = pos + 1
351 return reduce(operator.add, [h for h in applied.itervalues()
351 return reduce(operator.add, [h for h in applied.itervalues()
352 if h[0].special() or len(h) > 1], [])
352 if h[0].special() or len(h) > 1], [])
353
353
354 def record(ui, repo, *pats, **opts):
354 def record(ui, repo, *pats, **opts):
355 '''interactively select changes to commit
355 '''interactively select changes to commit
356
356
357 If a list of files is omitted, all changes reported by "hg status"
357 If a list of files is omitted, all changes reported by "hg status"
358 will be candidates for recording.
358 will be candidates for recording.
359
359
360 See 'hg help dates' for a list of formats valid for -d/--date.
360 See 'hg help dates' for a list of formats valid for -d/--date.
361
361
362 You will be prompted for whether to record changes to each
362 You will be prompted for whether to record changes to each
363 modified file, and for files with multiple changes, for each
363 modified file, and for files with multiple changes, for each
364 change to use. For each query, the following responses are
364 change to use. For each query, the following responses are
365 possible:
365 possible:
366
366
367 y - record this change
367 y - record this change
368 n - skip this change
368 n - skip this change
369
369
370 s - skip remaining changes to this file
370 s - skip remaining changes to this file
371 f - record remaining changes to this file
371 f - record remaining changes to this file
372
372
373 d - done, skip remaining changes and files
373 d - done, skip remaining changes and files
374 a - record all changes to all remaining files
374 a - record all changes to all remaining files
375 q - quit, recording no changes
375 q - quit, recording no changes
376
376
377 ? - display help'''
377 ? - display help'''
378
378
379 def record_committer(ui, repo, pats, opts):
379 def record_committer(ui, repo, pats, opts):
380 commands.commit(ui, repo, *pats, **opts)
380 commands.commit(ui, repo, *pats, **opts)
381
381
382 dorecord(ui, repo, record_committer, *pats, **opts)
382 dorecord(ui, repo, record_committer, *pats, **opts)
383
383
384
384
385 def qrecord(ui, repo, patch, *pats, **opts):
385 def qrecord(ui, repo, patch, *pats, **opts):
386 '''interactively record a new patch
386 '''interactively record a new patch
387
387
388 See 'hg help qnew' & 'hg help record' for more information and
388 See 'hg help qnew' & 'hg help record' for more information and
389 usage.
389 usage.
390 '''
390 '''
391
391
392 try:
392 try:
393 mq = extensions.find('mq')
393 mq = extensions.find('mq')
394 except KeyError:
394 except KeyError:
395 raise util.Abort(_("'mq' extension not loaded"))
395 raise util.Abort(_("'mq' extension not loaded"))
396
396
397 def qrecord_committer(ui, repo, pats, opts):
397 def qrecord_committer(ui, repo, pats, opts):
398 mq.new(ui, repo, patch, *pats, **opts)
398 mq.new(ui, repo, patch, *pats, **opts)
399
399
400 opts = opts.copy()
400 opts = opts.copy()
401 opts['force'] = True # always 'qnew -f'
401 opts['force'] = True # always 'qnew -f'
402 dorecord(ui, repo, qrecord_committer, *pats, **opts)
402 dorecord(ui, repo, qrecord_committer, *pats, **opts)
403
403
404
404
405 def dorecord(ui, repo, committer, *pats, **opts):
405 def dorecord(ui, repo, committer, *pats, **opts):
406 if not ui.interactive():
406 if not ui.interactive():
407 raise util.Abort(_('running non-interactively, use commit instead'))
407 raise util.Abort(_('running non-interactively, use commit instead'))
408
408
409 def recordfunc(ui, repo, message, match, opts):
409 def recordfunc(ui, repo, message, match, opts):
410 """This is generic record driver.
410 """This is generic record driver.
411
411
412 It's job is to interactively filter local changes, and accordingly
412 It's job is to interactively filter local changes, and accordingly
413 prepare working dir into a state, where the job can be delegated to
413 prepare working dir into a state, where the job can be delegated to
414 non-interactive commit command such as 'commit' or 'qrefresh'.
414 non-interactive commit command such as 'commit' or 'qrefresh'.
415
415
416 After the actual job is done by non-interactive command, working dir
416 After the actual job is done by non-interactive command, working dir
417 state is restored to original.
417 state is restored to original.
418
418
419 In the end we'll record intresting changes, and everything else will be
419 In the end we'll record intresting changes, and everything else will be
420 left in place, so the user can continue his work.
420 left in place, so the user can continue his work.
421 """
421 """
422
422
423 changes = repo.status(match=match)[:3]
423 changes = repo.status(match=match)[:3]
424 diffopts = mdiff.diffopts(git=True, nodates=True)
424 diffopts = mdiff.diffopts(git=True, nodates=True)
425 chunks = patch.diff(repo, changes=changes, opts=diffopts)
425 chunks = patch.diff(repo, changes=changes, opts=diffopts)
426 fp = cStringIO.StringIO()
426 fp = cStringIO.StringIO()
427 fp.write(''.join(chunks))
427 fp.write(''.join(chunks))
428 fp.seek(0)
428 fp.seek(0)
429
429
430 # 1. filter patch, so we have intending-to apply subset of it
430 # 1. filter patch, so we have intending-to apply subset of it
431 chunks = filterpatch(ui, parsepatch(fp))
431 chunks = filterpatch(ui, parsepatch(fp))
432 del fp
432 del fp
433
433
434 contenders = set()
434 contenders = set()
435 for h in chunks:
435 for h in chunks:
436 try: contenders.update(set(h.files()))
436 try: contenders.update(set(h.files()))
437 except AttributeError: pass
437 except AttributeError: pass
438
438
439 changed = changes[0] + changes[1] + changes[2]
439 changed = changes[0] + changes[1] + changes[2]
440 newfiles = [f for f in changed if f in contenders]
440 newfiles = [f for f in changed if f in contenders]
441 if not newfiles:
441 if not newfiles:
442 ui.status(_('no changes to record\n'))
442 ui.status(_('no changes to record\n'))
443 return 0
443 return 0
444
444
445 modified = set(changes[0])
445 modified = set(changes[0])
446
446
447 # 2. backup changed files, so we can restore them in the end
447 # 2. backup changed files, so we can restore them in the end
448 backups = {}
448 backups = {}
449 backupdir = repo.join('record-backups')
449 backupdir = repo.join('record-backups')
450 try:
450 try:
451 os.mkdir(backupdir)
451 os.mkdir(backupdir)
452 except OSError, err:
452 except OSError, err:
453 if err.errno != errno.EEXIST:
453 if err.errno != errno.EEXIST:
454 raise
454 raise
455 try:
455 try:
456 # backup continues
456 # backup continues
457 for f in newfiles:
457 for f in newfiles:
458 if f not in modified:
458 if f not in modified:
459 continue
459 continue
460 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
460 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
461 dir=backupdir)
461 dir=backupdir)
462 os.close(fd)
462 os.close(fd)
463 ui.debug(_('backup %r as %r\n') % (f, tmpname))
463 ui.debug(_('backup %r as %r\n') % (f, tmpname))
464 util.copyfile(repo.wjoin(f), tmpname)
464 util.copyfile(repo.wjoin(f), tmpname)
465 backups[f] = tmpname
465 backups[f] = tmpname
466
466
467 fp = cStringIO.StringIO()
467 fp = cStringIO.StringIO()
468 for c in chunks:
468 for c in chunks:
469 if c.filename() in backups:
469 if c.filename() in backups:
470 c.write(fp)
470 c.write(fp)
471 dopatch = fp.tell()
471 dopatch = fp.tell()
472 fp.seek(0)
472 fp.seek(0)
473
473
474 # 3a. apply filtered patch to clean repo (clean)
474 # 3a. apply filtered patch to clean repo (clean)
475 if backups:
475 if backups:
476 hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)
476 hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)
477
477
478 # 3b. (apply)
478 # 3b. (apply)
479 if dopatch:
479 if dopatch:
480 try:
480 try:
481 ui.debug(_('applying patch\n'))
481 ui.debug(_('applying patch\n'))
482 ui.debug(fp.getvalue())
482 ui.debug(fp.getvalue())
483 pfiles = {}
483 pfiles = {}
484 patch.internalpatch(fp, ui, 1, repo.root, files=pfiles,
484 patch.internalpatch(fp, ui, 1, repo.root, files=pfiles,
485 eolmode=None)
485 eolmode=None)
486 patch.updatedir(ui, repo, pfiles)
486 patch.updatedir(ui, repo, pfiles)
487 except patch.PatchError, err:
487 except patch.PatchError, err:
488 s = str(err)
488 s = str(err)
489 if s:
489 if s:
490 raise util.Abort(s)
490 raise util.Abort(s)
491 else:
491 else:
492 raise util.Abort(_('patch failed to apply'))
492 raise util.Abort(_('patch failed to apply'))
493 del fp
493 del fp
494
494
495 # 4. We prepared working directory according to filtered patch.
495 # 4. We prepared working directory according to filtered patch.
496 # Now is the time to delegate the job to commit/qrefresh or the like!
496 # Now is the time to delegate the job to commit/qrefresh or the like!
497
497
498 # it is important to first chdir to repo root -- we'll call a
498 # it is important to first chdir to repo root -- we'll call a
499 # highlevel command with list of pathnames relative to repo root
499 # highlevel command with list of pathnames relative to repo root
500 cwd = os.getcwd()
500 cwd = os.getcwd()
501 os.chdir(repo.root)
501 os.chdir(repo.root)
502 try:
502 try:
503 committer(ui, repo, newfiles, opts)
503 committer(ui, repo, newfiles, opts)
504 finally:
504 finally:
505 os.chdir(cwd)
505 os.chdir(cwd)
506
506
507 return 0
507 return 0
508 finally:
508 finally:
509 # 5. finally restore backed-up files
509 # 5. finally restore backed-up files
510 try:
510 try:
511 for realname, tmpname in backups.iteritems():
511 for realname, tmpname in backups.iteritems():
512 ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
512 ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
513 util.copyfile(tmpname, repo.wjoin(realname))
513 util.copyfile(tmpname, repo.wjoin(realname))
514 os.unlink(tmpname)
514 os.unlink(tmpname)
515 os.rmdir(backupdir)
515 os.rmdir(backupdir)
516 except OSError:
516 except OSError:
517 pass
517 pass
518 return cmdutil.commit(ui, repo, recordfunc, pats, opts)
518 return cmdutil.commit(ui, repo, recordfunc, pats, opts)
519
519
520 cmdtable = {
520 cmdtable = {
521 "record":
521 "record":
522 (record,
522 (record,
523
523
524 # add commit options
524 # add commit options
525 commands.table['^commit|ci'][1],
525 commands.table['^commit|ci'][1],
526
526
527 _('hg record [OPTION]... [FILE]...')),
527 _('hg record [OPTION]... [FILE]...')),
528 }
528 }
529
529
530
530
531 def extsetup():
531 def extsetup():
532 try:
532 try:
533 mq = extensions.find('mq')
533 mq = extensions.find('mq')
534 except KeyError:
534 except KeyError:
535 return
535 return
536
536
537 qcmdtable = {
537 qcmdtable = {
538 "qrecord":
538 "qrecord":
539 (qrecord,
539 (qrecord,
540
540
541 # add qnew options, except '--force'
541 # add qnew options, except '--force'
542 [opt for opt in mq.cmdtable['qnew'][1] if opt[1] != 'force'],
542 [opt for opt in mq.cmdtable['qnew'][1] if opt[1] != 'force'],
543
543
544 _('hg qrecord [OPTION]... PATCH [FILE]...')),
544 _('hg qrecord [OPTION]... PATCH [FILE]...')),
545 }
545 }
546
546
547 cmdtable.update(qcmdtable)
547 cmdtable.update(qcmdtable)
548
548
@@ -1,31 +1,31 b''
1 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
1 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2, incorporated herein by reference.
4 # GNU General Public License version 2, incorporated herein by reference.
5
5
6 '''provides the hg share command'''
6 '''share a common history between several working directories'''
7
7
8 import os
8 import os
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial import hg, commands
10 from mercurial import hg, commands
11
11
12 def share(ui, source, dest=None, noupdate=False):
12 def share(ui, source, dest=None, noupdate=False):
13 """create a new shared repository (experimental)
13 """create a new shared repository (experimental)
14
14
15 Initialize a new repository and working directory that shares its
15 Initialize a new repository and working directory that shares its
16 history with another repository.
16 history with another repository.
17
17
18 NOTE: actions that change history such as rollback or moving the
18 NOTE: actions that change history such as rollback or moving the
19 source may confuse sharers.
19 source may confuse sharers.
20 """
20 """
21
21
22 return hg.share(ui, source, dest, not noupdate)
22 return hg.share(ui, source, dest, not noupdate)
23
23
24 cmdtable = {
24 cmdtable = {
25 "share":
25 "share":
26 (share,
26 (share,
27 [('U', 'noupdate', None, _('do not create a working copy'))],
27 [('U', 'noupdate', None, _('do not create a working copy'))],
28 _('[-U] SOURCE [DEST]')),
28 _('[-U] SOURCE [DEST]')),
29 }
29 }
30
30
31 commands.norepo += " share"
31 commands.norepo += " share"
@@ -1,605 +1,605 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''patch transplanting tool
8 '''transplant changesets from another branch
9
9
10 This extension allows you to transplant patches from another branch.
10 This extension allows you to transplant patches from another branch.
11
11
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 map from a changeset hash to its hash in the source repository.
13 map from a changeset hash to its hash in the source repository.
14 '''
14 '''
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 import os, tempfile
17 import os, tempfile
18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
19 from mercurial import patch, revlog, util, error
19 from mercurial import patch, revlog, util, error
20
20
21 class transplantentry(object):
21 class transplantentry(object):
22 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
23 self.lnode = lnode
23 self.lnode = lnode
24 self.rnode = rnode
24 self.rnode = rnode
25
25
26 class transplants(object):
26 class transplants(object):
27 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
28 self.path = path
28 self.path = path
29 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
30 self.opener = opener
30 self.opener = opener
31
31
32 if not opener:
32 if not opener:
33 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
34 self.transplants = []
34 self.transplants = []
35 self.dirty = False
35 self.dirty = False
36 self.read()
36 self.read()
37
37
38 def read(self):
38 def read(self):
39 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
40 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
41 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
42 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
43 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
44
44
45 def write(self):
45 def write(self):
46 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
47 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
48 os.mkdir(self.path)
48 os.mkdir(self.path)
49 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
50 for c in self.transplants:
50 for c in self.transplants:
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
53 fp.close()
53 fp.close()
54 self.dirty = False
54 self.dirty = False
55
55
56 def get(self, rnode):
56 def get(self, rnode):
57 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
58
58
59 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
60 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
61 self.dirty = True
61 self.dirty = True
62
62
63 def remove(self, transplant):
63 def remove(self, transplant):
64 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
65 self.dirty = True
65 self.dirty = True
66
66
67 class transplanter(object):
67 class transplanter(object):
68 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
69 self.ui = ui
69 self.ui = ui
70 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
71 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
72 self.transplants = transplants(self.path, 'transplants',
72 self.transplants = transplants(self.path, 'transplants',
73 opener=self.opener)
73 opener=self.opener)
74
74
75 def applied(self, repo, node, parent):
75 def applied(self, repo, node, parent):
76 '''returns True if a node is already an ancestor of parent
76 '''returns True if a node is already an ancestor of parent
77 or has already been transplanted'''
77 or has already been transplanted'''
78 if hasnode(repo, node):
78 if hasnode(repo, node):
79 if node in repo.changelog.reachable(parent, stop=node):
79 if node in repo.changelog.reachable(parent, stop=node):
80 return True
80 return True
81 for t in self.transplants.get(node):
81 for t in self.transplants.get(node):
82 # it might have been stripped
82 # it might have been stripped
83 if not hasnode(repo, t.lnode):
83 if not hasnode(repo, t.lnode):
84 self.transplants.remove(t)
84 self.transplants.remove(t)
85 return False
85 return False
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
87 return True
87 return True
88 return False
88 return False
89
89
90 def apply(self, repo, source, revmap, merges, opts={}):
90 def apply(self, repo, source, revmap, merges, opts={}):
91 '''apply the revisions in revmap one by one in revision order'''
91 '''apply the revisions in revmap one by one in revision order'''
92 revs = sorted(revmap)
92 revs = sorted(revmap)
93 p1, p2 = repo.dirstate.parents()
93 p1, p2 = repo.dirstate.parents()
94 pulls = []
94 pulls = []
95 diffopts = patch.diffopts(self.ui, opts)
95 diffopts = patch.diffopts(self.ui, opts)
96 diffopts.git = True
96 diffopts.git = True
97
97
98 lock = wlock = None
98 lock = wlock = None
99 try:
99 try:
100 wlock = repo.wlock()
100 wlock = repo.wlock()
101 lock = repo.lock()
101 lock = repo.lock()
102 for rev in revs:
102 for rev in revs:
103 node = revmap[rev]
103 node = revmap[rev]
104 revstr = '%s:%s' % (rev, revlog.short(node))
104 revstr = '%s:%s' % (rev, revlog.short(node))
105
105
106 if self.applied(repo, node, p1):
106 if self.applied(repo, node, p1):
107 self.ui.warn(_('skipping already applied revision %s\n') %
107 self.ui.warn(_('skipping already applied revision %s\n') %
108 revstr)
108 revstr)
109 continue
109 continue
110
110
111 parents = source.changelog.parents(node)
111 parents = source.changelog.parents(node)
112 if not opts.get('filter'):
112 if not opts.get('filter'):
113 # If the changeset parent is the same as the
113 # If the changeset parent is the same as the
114 # wdir's parent, just pull it.
114 # wdir's parent, just pull it.
115 if parents[0] == p1:
115 if parents[0] == p1:
116 pulls.append(node)
116 pulls.append(node)
117 p1 = node
117 p1 = node
118 continue
118 continue
119 if pulls:
119 if pulls:
120 if source != repo:
120 if source != repo:
121 repo.pull(source, heads=pulls)
121 repo.pull(source, heads=pulls)
122 merge.update(repo, pulls[-1], False, False, None)
122 merge.update(repo, pulls[-1], False, False, None)
123 p1, p2 = repo.dirstate.parents()
123 p1, p2 = repo.dirstate.parents()
124 pulls = []
124 pulls = []
125
125
126 domerge = False
126 domerge = False
127 if node in merges:
127 if node in merges:
128 # pulling all the merge revs at once would mean we
128 # pulling all the merge revs at once would mean we
129 # couldn't transplant after the latest even if
129 # couldn't transplant after the latest even if
130 # transplants before them fail.
130 # transplants before them fail.
131 domerge = True
131 domerge = True
132 if not hasnode(repo, node):
132 if not hasnode(repo, node):
133 repo.pull(source, heads=[node])
133 repo.pull(source, heads=[node])
134
134
135 if parents[1] != revlog.nullid:
135 if parents[1] != revlog.nullid:
136 self.ui.note(_('skipping merge changeset %s:%s\n')
136 self.ui.note(_('skipping merge changeset %s:%s\n')
137 % (rev, revlog.short(node)))
137 % (rev, revlog.short(node)))
138 patchfile = None
138 patchfile = None
139 else:
139 else:
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
141 fp = os.fdopen(fd, 'w')
141 fp = os.fdopen(fd, 'w')
142 gen = patch.diff(source, parents[0], node, opts=diffopts)
142 gen = patch.diff(source, parents[0], node, opts=diffopts)
143 for chunk in gen:
143 for chunk in gen:
144 fp.write(chunk)
144 fp.write(chunk)
145 fp.close()
145 fp.close()
146
146
147 del revmap[rev]
147 del revmap[rev]
148 if patchfile or domerge:
148 if patchfile or domerge:
149 try:
149 try:
150 n = self.applyone(repo, node,
150 n = self.applyone(repo, node,
151 source.changelog.read(node),
151 source.changelog.read(node),
152 patchfile, merge=domerge,
152 patchfile, merge=domerge,
153 log=opts.get('log'),
153 log=opts.get('log'),
154 filter=opts.get('filter'))
154 filter=opts.get('filter'))
155 if n and domerge:
155 if n and domerge:
156 self.ui.status(_('%s merged at %s\n') % (revstr,
156 self.ui.status(_('%s merged at %s\n') % (revstr,
157 revlog.short(n)))
157 revlog.short(n)))
158 elif n:
158 elif n:
159 self.ui.status(_('%s transplanted to %s\n')
159 self.ui.status(_('%s transplanted to %s\n')
160 % (revlog.short(node),
160 % (revlog.short(node),
161 revlog.short(n)))
161 revlog.short(n)))
162 finally:
162 finally:
163 if patchfile:
163 if patchfile:
164 os.unlink(patchfile)
164 os.unlink(patchfile)
165 if pulls:
165 if pulls:
166 repo.pull(source, heads=pulls)
166 repo.pull(source, heads=pulls)
167 merge.update(repo, pulls[-1], False, False, None)
167 merge.update(repo, pulls[-1], False, False, None)
168 finally:
168 finally:
169 self.saveseries(revmap, merges)
169 self.saveseries(revmap, merges)
170 self.transplants.write()
170 self.transplants.write()
171 lock.release()
171 lock.release()
172 wlock.release()
172 wlock.release()
173
173
174 def filter(self, filter, changelog, patchfile):
174 def filter(self, filter, changelog, patchfile):
175 '''arbitrarily rewrite changeset before applying it'''
175 '''arbitrarily rewrite changeset before applying it'''
176
176
177 self.ui.status(_('filtering %s\n') % patchfile)
177 self.ui.status(_('filtering %s\n') % patchfile)
178 user, date, msg = (changelog[1], changelog[2], changelog[4])
178 user, date, msg = (changelog[1], changelog[2], changelog[4])
179
179
180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
181 fp = os.fdopen(fd, 'w')
181 fp = os.fdopen(fd, 'w')
182 fp.write("# HG changeset patch\n")
182 fp.write("# HG changeset patch\n")
183 fp.write("# User %s\n" % user)
183 fp.write("# User %s\n" % user)
184 fp.write("# Date %d %d\n" % date)
184 fp.write("# Date %d %d\n" % date)
185 fp.write(changelog[4])
185 fp.write(changelog[4])
186 fp.close()
186 fp.close()
187
187
188 try:
188 try:
189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
190 util.shellquote(patchfile)),
190 util.shellquote(patchfile)),
191 environ={'HGUSER': changelog[1]},
191 environ={'HGUSER': changelog[1]},
192 onerr=util.Abort, errprefix=_('filter failed'))
192 onerr=util.Abort, errprefix=_('filter failed'))
193 user, date, msg = self.parselog(file(headerfile))[1:4]
193 user, date, msg = self.parselog(file(headerfile))[1:4]
194 finally:
194 finally:
195 os.unlink(headerfile)
195 os.unlink(headerfile)
196
196
197 return (user, date, msg)
197 return (user, date, msg)
198
198
199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
200 filter=None):
200 filter=None):
201 '''apply the patch in patchfile to the repository as a transplant'''
201 '''apply the patch in patchfile to the repository as a transplant'''
202 (manifest, user, (time, timezone), files, message) = cl[:5]
202 (manifest, user, (time, timezone), files, message) = cl[:5]
203 date = "%d %d" % (time, timezone)
203 date = "%d %d" % (time, timezone)
204 extra = {'transplant_source': node}
204 extra = {'transplant_source': node}
205 if filter:
205 if filter:
206 (user, date, message) = self.filter(filter, cl, patchfile)
206 (user, date, message) = self.filter(filter, cl, patchfile)
207
207
208 if log:
208 if log:
209 message += '\n(transplanted from %s)' % revlog.hex(node)
209 message += '\n(transplanted from %s)' % revlog.hex(node)
210
210
211 self.ui.status(_('applying %s\n') % revlog.short(node))
211 self.ui.status(_('applying %s\n') % revlog.short(node))
212 self.ui.note('%s %s\n%s\n' % (user, date, message))
212 self.ui.note('%s %s\n%s\n' % (user, date, message))
213
213
214 if not patchfile and not merge:
214 if not patchfile and not merge:
215 raise util.Abort(_('can only omit patchfile if merging'))
215 raise util.Abort(_('can only omit patchfile if merging'))
216 if patchfile:
216 if patchfile:
217 try:
217 try:
218 files = {}
218 files = {}
219 try:
219 try:
220 patch.patch(patchfile, self.ui, cwd=repo.root,
220 patch.patch(patchfile, self.ui, cwd=repo.root,
221 files=files, eolmode=None)
221 files=files, eolmode=None)
222 if not files:
222 if not files:
223 self.ui.warn(_('%s: empty changeset')
223 self.ui.warn(_('%s: empty changeset')
224 % revlog.hex(node))
224 % revlog.hex(node))
225 return None
225 return None
226 finally:
226 finally:
227 files = patch.updatedir(self.ui, repo, files)
227 files = patch.updatedir(self.ui, repo, files)
228 except Exception, inst:
228 except Exception, inst:
229 if filter:
229 if filter:
230 os.unlink(patchfile)
230 os.unlink(patchfile)
231 seriespath = os.path.join(self.path, 'series')
231 seriespath = os.path.join(self.path, 'series')
232 if os.path.exists(seriespath):
232 if os.path.exists(seriespath):
233 os.unlink(seriespath)
233 os.unlink(seriespath)
234 p1 = repo.dirstate.parents()[0]
234 p1 = repo.dirstate.parents()[0]
235 p2 = node
235 p2 = node
236 self.log(user, date, message, p1, p2, merge=merge)
236 self.log(user, date, message, p1, p2, merge=merge)
237 self.ui.write(str(inst) + '\n')
237 self.ui.write(str(inst) + '\n')
238 raise util.Abort(_('Fix up the merge and run '
238 raise util.Abort(_('Fix up the merge and run '
239 'hg transplant --continue'))
239 'hg transplant --continue'))
240 else:
240 else:
241 files = None
241 files = None
242 if merge:
242 if merge:
243 p1, p2 = repo.dirstate.parents()
243 p1, p2 = repo.dirstate.parents()
244 repo.dirstate.setparents(p1, node)
244 repo.dirstate.setparents(p1, node)
245 m = match.always(repo.root, '')
245 m = match.always(repo.root, '')
246 else:
246 else:
247 m = match.exact(repo.root, '', files)
247 m = match.exact(repo.root, '', files)
248
248
249 n = repo.commit(message, user, date, extra=extra, match=m)
249 n = repo.commit(message, user, date, extra=extra, match=m)
250 if not merge:
250 if not merge:
251 self.transplants.set(n, node)
251 self.transplants.set(n, node)
252
252
253 return n
253 return n
254
254
255 def resume(self, repo, source, opts=None):
255 def resume(self, repo, source, opts=None):
256 '''recover last transaction and apply remaining changesets'''
256 '''recover last transaction and apply remaining changesets'''
257 if os.path.exists(os.path.join(self.path, 'journal')):
257 if os.path.exists(os.path.join(self.path, 'journal')):
258 n, node = self.recover(repo)
258 n, node = self.recover(repo)
259 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
259 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
260 revlog.short(n)))
260 revlog.short(n)))
261 seriespath = os.path.join(self.path, 'series')
261 seriespath = os.path.join(self.path, 'series')
262 if not os.path.exists(seriespath):
262 if not os.path.exists(seriespath):
263 self.transplants.write()
263 self.transplants.write()
264 return
264 return
265 nodes, merges = self.readseries()
265 nodes, merges = self.readseries()
266 revmap = {}
266 revmap = {}
267 for n in nodes:
267 for n in nodes:
268 revmap[source.changelog.rev(n)] = n
268 revmap[source.changelog.rev(n)] = n
269 os.unlink(seriespath)
269 os.unlink(seriespath)
270
270
271 self.apply(repo, source, revmap, merges, opts)
271 self.apply(repo, source, revmap, merges, opts)
272
272
273 def recover(self, repo):
273 def recover(self, repo):
274 '''commit working directory using journal metadata'''
274 '''commit working directory using journal metadata'''
275 node, user, date, message, parents = self.readlog()
275 node, user, date, message, parents = self.readlog()
276 merge = len(parents) == 2
276 merge = len(parents) == 2
277
277
278 if not user or not date or not message or not parents[0]:
278 if not user or not date or not message or not parents[0]:
279 raise util.Abort(_('transplant log file is corrupt'))
279 raise util.Abort(_('transplant log file is corrupt'))
280
280
281 extra = {'transplant_source': node}
281 extra = {'transplant_source': node}
282 wlock = repo.wlock()
282 wlock = repo.wlock()
283 try:
283 try:
284 p1, p2 = repo.dirstate.parents()
284 p1, p2 = repo.dirstate.parents()
285 if p1 != parents[0]:
285 if p1 != parents[0]:
286 raise util.Abort(
286 raise util.Abort(
287 _('working dir not at transplant parent %s') %
287 _('working dir not at transplant parent %s') %
288 revlog.hex(parents[0]))
288 revlog.hex(parents[0]))
289 if merge:
289 if merge:
290 repo.dirstate.setparents(p1, parents[1])
290 repo.dirstate.setparents(p1, parents[1])
291 n = repo.commit(message, user, date, extra=extra)
291 n = repo.commit(message, user, date, extra=extra)
292 if not n:
292 if not n:
293 raise util.Abort(_('commit failed'))
293 raise util.Abort(_('commit failed'))
294 if not merge:
294 if not merge:
295 self.transplants.set(n, node)
295 self.transplants.set(n, node)
296 self.unlog()
296 self.unlog()
297
297
298 return n, node
298 return n, node
299 finally:
299 finally:
300 wlock.release()
300 wlock.release()
301
301
302 def readseries(self):
302 def readseries(self):
303 nodes = []
303 nodes = []
304 merges = []
304 merges = []
305 cur = nodes
305 cur = nodes
306 for line in self.opener('series').read().splitlines():
306 for line in self.opener('series').read().splitlines():
307 if line.startswith('# Merges'):
307 if line.startswith('# Merges'):
308 cur = merges
308 cur = merges
309 continue
309 continue
310 cur.append(revlog.bin(line))
310 cur.append(revlog.bin(line))
311
311
312 return (nodes, merges)
312 return (nodes, merges)
313
313
314 def saveseries(self, revmap, merges):
314 def saveseries(self, revmap, merges):
315 if not revmap:
315 if not revmap:
316 return
316 return
317
317
318 if not os.path.isdir(self.path):
318 if not os.path.isdir(self.path):
319 os.mkdir(self.path)
319 os.mkdir(self.path)
320 series = self.opener('series', 'w')
320 series = self.opener('series', 'w')
321 for rev in sorted(revmap):
321 for rev in sorted(revmap):
322 series.write(revlog.hex(revmap[rev]) + '\n')
322 series.write(revlog.hex(revmap[rev]) + '\n')
323 if merges:
323 if merges:
324 series.write('# Merges\n')
324 series.write('# Merges\n')
325 for m in merges:
325 for m in merges:
326 series.write(revlog.hex(m) + '\n')
326 series.write(revlog.hex(m) + '\n')
327 series.close()
327 series.close()
328
328
329 def parselog(self, fp):
329 def parselog(self, fp):
330 parents = []
330 parents = []
331 message = []
331 message = []
332 node = revlog.nullid
332 node = revlog.nullid
333 inmsg = False
333 inmsg = False
334 for line in fp.read().splitlines():
334 for line in fp.read().splitlines():
335 if inmsg:
335 if inmsg:
336 message.append(line)
336 message.append(line)
337 elif line.startswith('# User '):
337 elif line.startswith('# User '):
338 user = line[7:]
338 user = line[7:]
339 elif line.startswith('# Date '):
339 elif line.startswith('# Date '):
340 date = line[7:]
340 date = line[7:]
341 elif line.startswith('# Node ID '):
341 elif line.startswith('# Node ID '):
342 node = revlog.bin(line[10:])
342 node = revlog.bin(line[10:])
343 elif line.startswith('# Parent '):
343 elif line.startswith('# Parent '):
344 parents.append(revlog.bin(line[9:]))
344 parents.append(revlog.bin(line[9:]))
345 elif not line.startswith('#'):
345 elif not line.startswith('#'):
346 inmsg = True
346 inmsg = True
347 message.append(line)
347 message.append(line)
348 return (node, user, date, '\n'.join(message), parents)
348 return (node, user, date, '\n'.join(message), parents)
349
349
350 def log(self, user, date, message, p1, p2, merge=False):
350 def log(self, user, date, message, p1, p2, merge=False):
351 '''journal changelog metadata for later recover'''
351 '''journal changelog metadata for later recover'''
352
352
353 if not os.path.isdir(self.path):
353 if not os.path.isdir(self.path):
354 os.mkdir(self.path)
354 os.mkdir(self.path)
355 fp = self.opener('journal', 'w')
355 fp = self.opener('journal', 'w')
356 fp.write('# User %s\n' % user)
356 fp.write('# User %s\n' % user)
357 fp.write('# Date %s\n' % date)
357 fp.write('# Date %s\n' % date)
358 fp.write('# Node ID %s\n' % revlog.hex(p2))
358 fp.write('# Node ID %s\n' % revlog.hex(p2))
359 fp.write('# Parent ' + revlog.hex(p1) + '\n')
359 fp.write('# Parent ' + revlog.hex(p1) + '\n')
360 if merge:
360 if merge:
361 fp.write('# Parent ' + revlog.hex(p2) + '\n')
361 fp.write('# Parent ' + revlog.hex(p2) + '\n')
362 fp.write(message.rstrip() + '\n')
362 fp.write(message.rstrip() + '\n')
363 fp.close()
363 fp.close()
364
364
365 def readlog(self):
365 def readlog(self):
366 return self.parselog(self.opener('journal'))
366 return self.parselog(self.opener('journal'))
367
367
368 def unlog(self):
368 def unlog(self):
369 '''remove changelog journal'''
369 '''remove changelog journal'''
370 absdst = os.path.join(self.path, 'journal')
370 absdst = os.path.join(self.path, 'journal')
371 if os.path.exists(absdst):
371 if os.path.exists(absdst):
372 os.unlink(absdst)
372 os.unlink(absdst)
373
373
374 def transplantfilter(self, repo, source, root):
374 def transplantfilter(self, repo, source, root):
375 def matchfn(node):
375 def matchfn(node):
376 if self.applied(repo, node, root):
376 if self.applied(repo, node, root):
377 return False
377 return False
378 if source.changelog.parents(node)[1] != revlog.nullid:
378 if source.changelog.parents(node)[1] != revlog.nullid:
379 return False
379 return False
380 extra = source.changelog.read(node)[5]
380 extra = source.changelog.read(node)[5]
381 cnode = extra.get('transplant_source')
381 cnode = extra.get('transplant_source')
382 if cnode and self.applied(repo, cnode, root):
382 if cnode and self.applied(repo, cnode, root):
383 return False
383 return False
384 return True
384 return True
385
385
386 return matchfn
386 return matchfn
387
387
388 def hasnode(repo, node):
388 def hasnode(repo, node):
389 try:
389 try:
390 return repo.changelog.rev(node) != None
390 return repo.changelog.rev(node) != None
391 except error.RevlogError:
391 except error.RevlogError:
392 return False
392 return False
393
393
394 def browserevs(ui, repo, nodes, opts):
394 def browserevs(ui, repo, nodes, opts):
395 '''interactively transplant changesets'''
395 '''interactively transplant changesets'''
396 def browsehelp(ui):
396 def browsehelp(ui):
397 ui.write('y: transplant this changeset\n'
397 ui.write('y: transplant this changeset\n'
398 'n: skip this changeset\n'
398 'n: skip this changeset\n'
399 'm: merge at this changeset\n'
399 'm: merge at this changeset\n'
400 'p: show patch\n'
400 'p: show patch\n'
401 'c: commit selected changesets\n'
401 'c: commit selected changesets\n'
402 'q: cancel transplant\n'
402 'q: cancel transplant\n'
403 '?: show this help\n')
403 '?: show this help\n')
404
404
405 displayer = cmdutil.show_changeset(ui, repo, opts)
405 displayer = cmdutil.show_changeset(ui, repo, opts)
406 transplants = []
406 transplants = []
407 merges = []
407 merges = []
408 for node in nodes:
408 for node in nodes:
409 displayer.show(repo[node])
409 displayer.show(repo[node])
410 action = None
410 action = None
411 while not action:
411 while not action:
412 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
412 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
413 if action == '?':
413 if action == '?':
414 browsehelp(ui)
414 browsehelp(ui)
415 action = None
415 action = None
416 elif action == 'p':
416 elif action == 'p':
417 parent = repo.changelog.parents(node)[0]
417 parent = repo.changelog.parents(node)[0]
418 for chunk in patch.diff(repo, parent, node):
418 for chunk in patch.diff(repo, parent, node):
419 ui.write(chunk)
419 ui.write(chunk)
420 action = None
420 action = None
421 elif action not in ('y', 'n', 'm', 'c', 'q'):
421 elif action not in ('y', 'n', 'm', 'c', 'q'):
422 ui.write('no such option\n')
422 ui.write('no such option\n')
423 action = None
423 action = None
424 if action == 'y':
424 if action == 'y':
425 transplants.append(node)
425 transplants.append(node)
426 elif action == 'm':
426 elif action == 'm':
427 merges.append(node)
427 merges.append(node)
428 elif action == 'c':
428 elif action == 'c':
429 break
429 break
430 elif action == 'q':
430 elif action == 'q':
431 transplants = ()
431 transplants = ()
432 merges = ()
432 merges = ()
433 break
433 break
434 return (transplants, merges)
434 return (transplants, merges)
435
435
436 def transplant(ui, repo, *revs, **opts):
436 def transplant(ui, repo, *revs, **opts):
437 '''transplant changesets from another branch
437 '''transplant changesets from another branch
438
438
439 Selected changesets will be applied on top of the current working
439 Selected changesets will be applied on top of the current working
440 directory with the log of the original changeset. If --log is
440 directory with the log of the original changeset. If --log is
441 specified, log messages will have a comment appended of the form:
441 specified, log messages will have a comment appended of the form:
442
442
443 (transplanted from CHANGESETHASH)
443 (transplanted from CHANGESETHASH)
444
444
445 You can rewrite the changelog message with the --filter option.
445 You can rewrite the changelog message with the --filter option.
446 Its argument will be invoked with the current changelog message as
446 Its argument will be invoked with the current changelog message as
447 $1 and the patch as $2.
447 $1 and the patch as $2.
448
448
449 If --source/-s is specified, selects changesets from the named
449 If --source/-s is specified, selects changesets from the named
450 repository. If --branch/-b is specified, selects changesets from
450 repository. If --branch/-b is specified, selects changesets from
451 the branch holding the named revision, up to that revision. If
451 the branch holding the named revision, up to that revision. If
452 --all/-a is specified, all changesets on the branch will be
452 --all/-a is specified, all changesets on the branch will be
453 transplanted, otherwise you will be prompted to select the
453 transplanted, otherwise you will be prompted to select the
454 changesets you want.
454 changesets you want.
455
455
456 hg transplant --branch REVISION --all will rebase the selected
456 hg transplant --branch REVISION --all will rebase the selected
457 branch (up to the named revision) onto your current working
457 branch (up to the named revision) onto your current working
458 directory.
458 directory.
459
459
460 You can optionally mark selected transplanted changesets as merge
460 You can optionally mark selected transplanted changesets as merge
461 changesets. You will not be prompted to transplant any ancestors
461 changesets. You will not be prompted to transplant any ancestors
462 of a merged transplant, and you can merge descendants of them
462 of a merged transplant, and you can merge descendants of them
463 normally instead of transplanting them.
463 normally instead of transplanting them.
464
464
465 If no merges or revisions are provided, hg transplant will start
465 If no merges or revisions are provided, hg transplant will start
466 an interactive changeset browser.
466 an interactive changeset browser.
467
467
468 If a changeset application fails, you can fix the merge by hand
468 If a changeset application fails, you can fix the merge by hand
469 and then resume where you left off by calling hg transplant
469 and then resume where you left off by calling hg transplant
470 --continue/-c.
470 --continue/-c.
471 '''
471 '''
472 def getremotechanges(repo, url):
472 def getremotechanges(repo, url):
473 sourcerepo = ui.expandpath(url)
473 sourcerepo = ui.expandpath(url)
474 source = hg.repository(ui, sourcerepo)
474 source = hg.repository(ui, sourcerepo)
475 common, incoming, rheads = repo.findcommonincoming(source, force=True)
475 common, incoming, rheads = repo.findcommonincoming(source, force=True)
476 if not incoming:
476 if not incoming:
477 return (source, None, None)
477 return (source, None, None)
478
478
479 bundle = None
479 bundle = None
480 if not source.local():
480 if not source.local():
481 if source.capable('changegroupsubset'):
481 if source.capable('changegroupsubset'):
482 cg = source.changegroupsubset(incoming, rheads, 'incoming')
482 cg = source.changegroupsubset(incoming, rheads, 'incoming')
483 else:
483 else:
484 cg = source.changegroup(incoming, 'incoming')
484 cg = source.changegroup(incoming, 'incoming')
485 bundle = changegroup.writebundle(cg, None, 'HG10UN')
485 bundle = changegroup.writebundle(cg, None, 'HG10UN')
486 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
486 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
487
487
488 return (source, incoming, bundle)
488 return (source, incoming, bundle)
489
489
490 def incwalk(repo, incoming, branches, match=util.always):
490 def incwalk(repo, incoming, branches, match=util.always):
491 if not branches:
491 if not branches:
492 branches=None
492 branches=None
493 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
493 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
494 if match(node):
494 if match(node):
495 yield node
495 yield node
496
496
497 def transplantwalk(repo, root, branches, match=util.always):
497 def transplantwalk(repo, root, branches, match=util.always):
498 if not branches:
498 if not branches:
499 branches = repo.heads()
499 branches = repo.heads()
500 ancestors = []
500 ancestors = []
501 for branch in branches:
501 for branch in branches:
502 ancestors.append(repo.changelog.ancestor(root, branch))
502 ancestors.append(repo.changelog.ancestor(root, branch))
503 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
503 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
504 if match(node):
504 if match(node):
505 yield node
505 yield node
506
506
507 def checkopts(opts, revs):
507 def checkopts(opts, revs):
508 if opts.get('continue'):
508 if opts.get('continue'):
509 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
509 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
510 raise util.Abort(_('--continue is incompatible with '
510 raise util.Abort(_('--continue is incompatible with '
511 'branch, all or merge'))
511 'branch, all or merge'))
512 return
512 return
513 if not (opts.get('source') or revs or
513 if not (opts.get('source') or revs or
514 opts.get('merge') or opts.get('branch')):
514 opts.get('merge') or opts.get('branch')):
515 raise util.Abort(_('no source URL, branch tag or revision '
515 raise util.Abort(_('no source URL, branch tag or revision '
516 'list provided'))
516 'list provided'))
517 if opts.get('all'):
517 if opts.get('all'):
518 if not opts.get('branch'):
518 if not opts.get('branch'):
519 raise util.Abort(_('--all requires a branch revision'))
519 raise util.Abort(_('--all requires a branch revision'))
520 if revs:
520 if revs:
521 raise util.Abort(_('--all is incompatible with a '
521 raise util.Abort(_('--all is incompatible with a '
522 'revision list'))
522 'revision list'))
523
523
524 checkopts(opts, revs)
524 checkopts(opts, revs)
525
525
526 if not opts.get('log'):
526 if not opts.get('log'):
527 opts['log'] = ui.config('transplant', 'log')
527 opts['log'] = ui.config('transplant', 'log')
528 if not opts.get('filter'):
528 if not opts.get('filter'):
529 opts['filter'] = ui.config('transplant', 'filter')
529 opts['filter'] = ui.config('transplant', 'filter')
530
530
531 tp = transplanter(ui, repo)
531 tp = transplanter(ui, repo)
532
532
533 p1, p2 = repo.dirstate.parents()
533 p1, p2 = repo.dirstate.parents()
534 if len(repo) > 0 and p1 == revlog.nullid:
534 if len(repo) > 0 and p1 == revlog.nullid:
535 raise util.Abort(_('no revision checked out'))
535 raise util.Abort(_('no revision checked out'))
536 if not opts.get('continue'):
536 if not opts.get('continue'):
537 if p2 != revlog.nullid:
537 if p2 != revlog.nullid:
538 raise util.Abort(_('outstanding uncommitted merges'))
538 raise util.Abort(_('outstanding uncommitted merges'))
539 m, a, r, d = repo.status()[:4]
539 m, a, r, d = repo.status()[:4]
540 if m or a or r or d:
540 if m or a or r or d:
541 raise util.Abort(_('outstanding local changes'))
541 raise util.Abort(_('outstanding local changes'))
542
542
543 bundle = None
543 bundle = None
544 source = opts.get('source')
544 source = opts.get('source')
545 if source:
545 if source:
546 (source, incoming, bundle) = getremotechanges(repo, source)
546 (source, incoming, bundle) = getremotechanges(repo, source)
547 else:
547 else:
548 source = repo
548 source = repo
549
549
550 try:
550 try:
551 if opts.get('continue'):
551 if opts.get('continue'):
552 tp.resume(repo, source, opts)
552 tp.resume(repo, source, opts)
553 return
553 return
554
554
555 tf=tp.transplantfilter(repo, source, p1)
555 tf=tp.transplantfilter(repo, source, p1)
556 if opts.get('prune'):
556 if opts.get('prune'):
557 prune = [source.lookup(r)
557 prune = [source.lookup(r)
558 for r in cmdutil.revrange(source, opts.get('prune'))]
558 for r in cmdutil.revrange(source, opts.get('prune'))]
559 matchfn = lambda x: tf(x) and x not in prune
559 matchfn = lambda x: tf(x) and x not in prune
560 else:
560 else:
561 matchfn = tf
561 matchfn = tf
562 branches = map(source.lookup, opts.get('branch', ()))
562 branches = map(source.lookup, opts.get('branch', ()))
563 merges = map(source.lookup, opts.get('merge', ()))
563 merges = map(source.lookup, opts.get('merge', ()))
564 revmap = {}
564 revmap = {}
565 if revs:
565 if revs:
566 for r in cmdutil.revrange(source, revs):
566 for r in cmdutil.revrange(source, revs):
567 revmap[int(r)] = source.lookup(r)
567 revmap[int(r)] = source.lookup(r)
568 elif opts.get('all') or not merges:
568 elif opts.get('all') or not merges:
569 if source != repo:
569 if source != repo:
570 alltransplants = incwalk(source, incoming, branches,
570 alltransplants = incwalk(source, incoming, branches,
571 match=matchfn)
571 match=matchfn)
572 else:
572 else:
573 alltransplants = transplantwalk(source, p1, branches,
573 alltransplants = transplantwalk(source, p1, branches,
574 match=matchfn)
574 match=matchfn)
575 if opts.get('all'):
575 if opts.get('all'):
576 revs = alltransplants
576 revs = alltransplants
577 else:
577 else:
578 revs, newmerges = browserevs(ui, source, alltransplants, opts)
578 revs, newmerges = browserevs(ui, source, alltransplants, opts)
579 merges.extend(newmerges)
579 merges.extend(newmerges)
580 for r in revs:
580 for r in revs:
581 revmap[source.changelog.rev(r)] = r
581 revmap[source.changelog.rev(r)] = r
582 for r in merges:
582 for r in merges:
583 revmap[source.changelog.rev(r)] = r
583 revmap[source.changelog.rev(r)] = r
584
584
585 tp.apply(repo, source, revmap, merges, opts)
585 tp.apply(repo, source, revmap, merges, opts)
586 finally:
586 finally:
587 if bundle:
587 if bundle:
588 source.close()
588 source.close()
589 os.unlink(bundle)
589 os.unlink(bundle)
590
590
591 cmdtable = {
591 cmdtable = {
592 "transplant":
592 "transplant":
593 (transplant,
593 (transplant,
594 [('s', 'source', '', _('pull patches from REPOSITORY')),
594 [('s', 'source', '', _('pull patches from REPOSITORY')),
595 ('b', 'branch', [], _('pull patches from branch BRANCH')),
595 ('b', 'branch', [], _('pull patches from branch BRANCH')),
596 ('a', 'all', None, _('pull all changesets up to BRANCH')),
596 ('a', 'all', None, _('pull all changesets up to BRANCH')),
597 ('p', 'prune', [], _('skip over REV')),
597 ('p', 'prune', [], _('skip over REV')),
598 ('m', 'merge', [], _('merge at REV')),
598 ('m', 'merge', [], _('merge at REV')),
599 ('', 'log', None, _('append transplant info to log message')),
599 ('', 'log', None, _('append transplant info to log message')),
600 ('c', 'continue', None, _('continue last transplant session '
600 ('c', 'continue', None, _('continue last transplant session '
601 'after repair')),
601 'after repair')),
602 ('', 'filter', '', _('filter changesets through FILTER'))],
602 ('', 'filter', '', _('filter changesets through FILTER'))],
603 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] '
603 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] '
604 '[-m REV] [REV]...'))
604 '[-m REV] [REV]...'))
605 }
605 }
@@ -1,121 +1,120 b''
1 # win32mbcs.py -- MBCS filename support for Mercurial
1 # win32mbcs.py -- MBCS filename support for Mercurial
2 #
2 #
3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
4 #
4 #
5 # Version: 0.2
5 # Version: 0.2
6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2, incorporated herein by reference.
9 # GNU General Public License version 2, incorporated herein by reference.
10 #
10 #
11
11
12 """allow to use MBCS path with problematic encoding.
12 '''allow the use of MBCS paths with problematic encoding
13
13
14 Some MBCS encodings are not good for some path operations (i.e.
14 Some MBCS encodings are not good for some path operations (i.e.
15 splitting path, case conversion, etc.) with its encoded bytes. We call
15 splitting path, case conversion, etc.) with its encoded bytes. We call
16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
17 This extension can be used to fix the issue with those encodings by
17 This extension can be used to fix the issue with those encodings by
18 wrapping some functions to convert to Unicode string before path
18 wrapping some functions to convert to Unicode string before path
19 operation.
19 operation.
20
20
21 This extension is useful for:
21 This extension is useful for:
22 * Japanese Windows users using shift_jis encoding.
22 * Japanese Windows users using shift_jis encoding.
23 * Chinese Windows users using big5 encoding.
23 * Chinese Windows users using big5 encoding.
24 * All users who use a repository with one of problematic encodings on
24 * All users who use a repository with one of problematic encodings on
25 case-insensitive file system.
25 case-insensitive file system.
26
26
27 This extension is not needed for:
27 This extension is not needed for:
28 * Any user who use only ASCII chars in path.
28 * Any user who use only ASCII chars in path.
29 * Any user who do not use any of problematic encodings.
29 * Any user who do not use any of problematic encodings.
30
30
31 Note that there are some limitations on using this extension:
31 Note that there are some limitations on using this extension:
32 * You should use single encoding in one repository.
32 * You should use single encoding in one repository.
33 * You should set same encoding for the repository by locale or
33 * You should set same encoding for the repository by locale or
34 HGENCODING.
34 HGENCODING.
35
35
36 Path encoding conversion are done between Unicode and
36 Path encoding conversion are done between Unicode and
37 encoding.encoding which is decided by Mercurial from current locale
37 encoding.encoding which is decided by Mercurial from current locale
38 setting or HGENCODING.
38 setting or HGENCODING.
39
39 '''
40 """
41
40
42 import os
41 import os
43 from mercurial.i18n import _
42 from mercurial.i18n import _
44 from mercurial import util, encoding
43 from mercurial import util, encoding
45
44
46 def decode(arg):
45 def decode(arg):
47 if isinstance(arg, str):
46 if isinstance(arg, str):
48 uarg = arg.decode(encoding.encoding)
47 uarg = arg.decode(encoding.encoding)
49 if arg == uarg.encode(encoding.encoding):
48 if arg == uarg.encode(encoding.encoding):
50 return uarg
49 return uarg
51 raise UnicodeError("Not local encoding")
50 raise UnicodeError("Not local encoding")
52 elif isinstance(arg, tuple):
51 elif isinstance(arg, tuple):
53 return tuple(map(decode, arg))
52 return tuple(map(decode, arg))
54 elif isinstance(arg, list):
53 elif isinstance(arg, list):
55 return map(decode, arg)
54 return map(decode, arg)
56 return arg
55 return arg
57
56
58 def encode(arg):
57 def encode(arg):
59 if isinstance(arg, unicode):
58 if isinstance(arg, unicode):
60 return arg.encode(encoding.encoding)
59 return arg.encode(encoding.encoding)
61 elif isinstance(arg, tuple):
60 elif isinstance(arg, tuple):
62 return tuple(map(encode, arg))
61 return tuple(map(encode, arg))
63 elif isinstance(arg, list):
62 elif isinstance(arg, list):
64 return map(encode, arg)
63 return map(encode, arg)
65 return arg
64 return arg
66
65
67 def wrapper(func, args):
66 def wrapper(func, args):
68 # check argument is unicode, then call original
67 # check argument is unicode, then call original
69 for arg in args:
68 for arg in args:
70 if isinstance(arg, unicode):
69 if isinstance(arg, unicode):
71 return func(*args)
70 return func(*args)
72
71
73 try:
72 try:
74 # convert arguments to unicode, call func, then convert back
73 # convert arguments to unicode, call func, then convert back
75 return encode(func(*decode(args)))
74 return encode(func(*decode(args)))
76 except UnicodeError:
75 except UnicodeError:
77 # If not encoded with encoding.encoding, report it then
76 # If not encoded with encoding.encoding, report it then
78 # continue with calling original function.
77 # continue with calling original function.
79 raise util.Abort(_("[win32mbcs] filename conversion fail with"
78 raise util.Abort(_("[win32mbcs] filename conversion fail with"
80 " %s encoding\n") % (encoding.encoding))
79 " %s encoding\n") % (encoding.encoding))
81
80
82 def wrapname(name):
81 def wrapname(name):
83 idx = name.rfind('.')
82 idx = name.rfind('.')
84 module = name[:idx]
83 module = name[:idx]
85 name = name[idx+1:]
84 name = name[idx+1:]
86 module = globals()[module]
85 module = globals()[module]
87 func = getattr(module, name)
86 func = getattr(module, name)
88 def f(*args):
87 def f(*args):
89 return wrapper(func, args)
88 return wrapper(func, args)
90 try:
89 try:
91 f.__name__ = func.__name__ # fail with python23
90 f.__name__ = func.__name__ # fail with python23
92 except Exception:
91 except Exception:
93 pass
92 pass
94 setattr(module, name, f)
93 setattr(module, name, f)
95
94
96 # List of functions to be wrapped.
95 # List of functions to be wrapped.
97 # NOTE: os.path.dirname() and os.path.basename() are safe because
96 # NOTE: os.path.dirname() and os.path.basename() are safe because
98 # they use result of os.path.split()
97 # they use result of os.path.split()
99 funcs = '''os.path.join os.path.split os.path.splitext
98 funcs = '''os.path.join os.path.split os.path.splitext
100 os.path.splitunc os.path.normpath os.path.normcase os.makedirs
99 os.path.splitunc os.path.normpath os.path.normcase os.makedirs
101 util.endswithsep util.splitpath util.checkcase util.fspath'''
100 util.endswithsep util.splitpath util.checkcase util.fspath'''
102
101
103 # codec and alias names of sjis and big5 to be faked.
102 # codec and alias names of sjis and big5 to be faked.
104 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
103 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
105 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
104 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
106 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
105 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
107 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
106 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
108
107
109 def reposetup(ui, repo):
108 def reposetup(ui, repo):
110 # TODO: decide use of config section for this extension
109 # TODO: decide use of config section for this extension
111 if not os.path.supports_unicode_filenames:
110 if not os.path.supports_unicode_filenames:
112 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
111 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
113 return
112 return
114
113
115 # fake is only for relevant environment.
114 # fake is only for relevant environment.
116 if encoding.encoding.lower() in problematic_encodings.split():
115 if encoding.encoding.lower() in problematic_encodings.split():
117 for f in funcs.split():
116 for f in funcs.split():
118 wrapname(f)
117 wrapname(f)
119 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
118 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
120 % encoding.encoding)
119 % encoding.encoding)
121
120
@@ -1,158 +1,158 b''
1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
2 #
2 #
3 # Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''LF <-> CRLF/CR translation utilities
8 '''perform automatic newline conversion
9
9
10 To perform automatic newline conversion, use:
10 To perform automatic newline conversion, use:
11
11
12 [extensions]
12 [extensions]
13 hgext.win32text =
13 hgext.win32text =
14 [encode]
14 [encode]
15 ** = cleverencode:
15 ** = cleverencode:
16 # or ** = macencode:
16 # or ** = macencode:
17
17
18 [decode]
18 [decode]
19 ** = cleverdecode:
19 ** = cleverdecode:
20 # or ** = macdecode:
20 # or ** = macdecode:
21
21
22 If not doing conversion, to make sure you do not commit CRLF/CR by accident:
22 If not doing conversion, to make sure you do not commit CRLF/CR by accident:
23
23
24 [hooks]
24 [hooks]
25 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
25 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
26 # or pretxncommit.cr = python:hgext.win32text.forbidcr
26 # or pretxncommit.cr = python:hgext.win32text.forbidcr
27
27
28 To do the same check on a server to prevent CRLF/CR from being
28 To do the same check on a server to prevent CRLF/CR from being
29 pushed or pulled:
29 pushed or pulled:
30
30
31 [hooks]
31 [hooks]
32 pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
32 pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
33 # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
33 # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
34 '''
34 '''
35
35
36 from mercurial.i18n import _
36 from mercurial.i18n import _
37 from mercurial.node import short
37 from mercurial.node import short
38 from mercurial import util
38 from mercurial import util
39 import re
39 import re
40
40
41 # regexp for single LF without CR preceding.
41 # regexp for single LF without CR preceding.
42 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
42 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
43
43
44 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
44 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
45 filterstr = {'\r\n': 'clever', '\r': 'mac'}
45 filterstr = {'\r\n': 'clever', '\r': 'mac'}
46
46
47 def checknewline(s, newline, ui=None, repo=None, filename=None):
47 def checknewline(s, newline, ui=None, repo=None, filename=None):
48 # warn if already has 'newline' in repository.
48 # warn if already has 'newline' in repository.
49 # it might cause unexpected eol conversion.
49 # it might cause unexpected eol conversion.
50 # see issue 302:
50 # see issue 302:
51 # http://www.selenic.com/mercurial/bts/issue302
51 # http://www.selenic.com/mercurial/bts/issue302
52 if newline in s and ui and filename and repo:
52 if newline in s and ui and filename and repo:
53 ui.warn(_('WARNING: %s already has %s line endings\n'
53 ui.warn(_('WARNING: %s already has %s line endings\n'
54 'and does not need EOL conversion by the win32text plugin.\n'
54 'and does not need EOL conversion by the win32text plugin.\n'
55 'Before your next commit, please reconsider your '
55 'Before your next commit, please reconsider your '
56 'encode/decode settings in \nMercurial.ini or %s.\n') %
56 'encode/decode settings in \nMercurial.ini or %s.\n') %
57 (filename, newlinestr[newline], repo.join('hgrc')))
57 (filename, newlinestr[newline], repo.join('hgrc')))
58
58
59 def dumbdecode(s, cmd, **kwargs):
59 def dumbdecode(s, cmd, **kwargs):
60 checknewline(s, '\r\n', **kwargs)
60 checknewline(s, '\r\n', **kwargs)
61 # replace single LF to CRLF
61 # replace single LF to CRLF
62 return re_single_lf.sub('\\1\r\n', s)
62 return re_single_lf.sub('\\1\r\n', s)
63
63
64 def dumbencode(s, cmd):
64 def dumbencode(s, cmd):
65 return s.replace('\r\n', '\n')
65 return s.replace('\r\n', '\n')
66
66
67 def macdumbdecode(s, cmd, **kwargs):
67 def macdumbdecode(s, cmd, **kwargs):
68 checknewline(s, '\r', **kwargs)
68 checknewline(s, '\r', **kwargs)
69 return s.replace('\n', '\r')
69 return s.replace('\n', '\r')
70
70
71 def macdumbencode(s, cmd):
71 def macdumbencode(s, cmd):
72 return s.replace('\r', '\n')
72 return s.replace('\r', '\n')
73
73
74 def cleverdecode(s, cmd, **kwargs):
74 def cleverdecode(s, cmd, **kwargs):
75 if not util.binary(s):
75 if not util.binary(s):
76 return dumbdecode(s, cmd, **kwargs)
76 return dumbdecode(s, cmd, **kwargs)
77 return s
77 return s
78
78
79 def cleverencode(s, cmd):
79 def cleverencode(s, cmd):
80 if not util.binary(s):
80 if not util.binary(s):
81 return dumbencode(s, cmd)
81 return dumbencode(s, cmd)
82 return s
82 return s
83
83
84 def macdecode(s, cmd, **kwargs):
84 def macdecode(s, cmd, **kwargs):
85 if not util.binary(s):
85 if not util.binary(s):
86 return macdumbdecode(s, cmd, **kwargs)
86 return macdumbdecode(s, cmd, **kwargs)
87 return s
87 return s
88
88
89 def macencode(s, cmd):
89 def macencode(s, cmd):
90 if not util.binary(s):
90 if not util.binary(s):
91 return macdumbencode(s, cmd)
91 return macdumbencode(s, cmd)
92 return s
92 return s
93
93
94 _filters = {
94 _filters = {
95 'dumbdecode:': dumbdecode,
95 'dumbdecode:': dumbdecode,
96 'dumbencode:': dumbencode,
96 'dumbencode:': dumbencode,
97 'cleverdecode:': cleverdecode,
97 'cleverdecode:': cleverdecode,
98 'cleverencode:': cleverencode,
98 'cleverencode:': cleverencode,
99 'macdumbdecode:': macdumbdecode,
99 'macdumbdecode:': macdumbdecode,
100 'macdumbencode:': macdumbencode,
100 'macdumbencode:': macdumbencode,
101 'macdecode:': macdecode,
101 'macdecode:': macdecode,
102 'macencode:': macencode,
102 'macencode:': macencode,
103 }
103 }
104
104
105 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
105 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
106 halt = False
106 halt = False
107 seen = set()
107 seen = set()
108 # we try to walk changesets in reverse order from newest to
108 # we try to walk changesets in reverse order from newest to
109 # oldest, so that if we see a file multiple times, we take the
109 # oldest, so that if we see a file multiple times, we take the
110 # newest version as canonical. this prevents us from blocking a
110 # newest version as canonical. this prevents us from blocking a
111 # changegroup that contains an unacceptable commit followed later
111 # changegroup that contains an unacceptable commit followed later
112 # by a commit that fixes the problem.
112 # by a commit that fixes the problem.
113 tip = repo['tip']
113 tip = repo['tip']
114 for rev in xrange(len(repo)-1, repo[node].rev()-1, -1):
114 for rev in xrange(len(repo)-1, repo[node].rev()-1, -1):
115 c = repo[rev]
115 c = repo[rev]
116 for f in c.files():
116 for f in c.files():
117 if f in seen or f not in tip or f not in c:
117 if f in seen or f not in tip or f not in c:
118 continue
118 continue
119 seen.add(f)
119 seen.add(f)
120 data = c[f].data()
120 data = c[f].data()
121 if not util.binary(data) and newline in data:
121 if not util.binary(data) and newline in data:
122 if not halt:
122 if not halt:
123 ui.warn(_('Attempt to commit or push text file(s) '
123 ui.warn(_('Attempt to commit or push text file(s) '
124 'using %s line endings\n') %
124 'using %s line endings\n') %
125 newlinestr[newline])
125 newlinestr[newline])
126 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
126 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
127 halt = True
127 halt = True
128 if halt and hooktype == 'pretxnchangegroup':
128 if halt and hooktype == 'pretxnchangegroup':
129 crlf = newlinestr[newline].lower()
129 crlf = newlinestr[newline].lower()
130 filter = filterstr[newline]
130 filter = filterstr[newline]
131 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
131 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
132 'add to Mercurial.ini or .hg/hgrc:\n'
132 'add to Mercurial.ini or .hg/hgrc:\n'
133 '\n'
133 '\n'
134 '[hooks]\n'
134 '[hooks]\n'
135 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
135 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
136 '\n'
136 '\n'
137 'and also consider adding:\n'
137 'and also consider adding:\n'
138 '\n'
138 '\n'
139 '[extensions]\n'
139 '[extensions]\n'
140 'hgext.win32text =\n'
140 'hgext.win32text =\n'
141 '[encode]\n'
141 '[encode]\n'
142 '** = %sencode:\n'
142 '** = %sencode:\n'
143 '[decode]\n'
143 '[decode]\n'
144 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
144 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
145 return halt
145 return halt
146
146
147 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
147 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
148 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
148 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
149
149
150 def forbidcr(ui, repo, hooktype, node, **kwargs):
150 def forbidcr(ui, repo, hooktype, node, **kwargs):
151 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
151 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
152
152
153 def reposetup(ui, repo):
153 def reposetup(ui, repo):
154 if not repo.local():
154 if not repo.local():
155 return
155 return
156 for name, fn in _filters.iteritems():
156 for name, fn in _filters.iteritems():
157 repo.adddatafilter(name, fn)
157 repo.adddatafilter(name, fn)
158
158
@@ -1,159 +1,159 b''
1 # zeroconf.py - zeroconf support for Mercurial
1 # zeroconf.py - zeroconf support for Mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''zeroconf support for Mercurial repositories
8 '''discover and advertise repositories on the local network
9
9
10 Zeroconf enabled repositories will be announced in a network without
10 Zeroconf enabled repositories will be announced in a network without
11 the need to configure a server or a service. They can be discovered
11 the need to configure a server or a service. They can be discovered
12 without knowing their actual IP address.
12 without knowing their actual IP address.
13
13
14 To allow other people to discover your repository using run "hg serve"
14 To allow other people to discover your repository using run "hg serve"
15 in your repository.
15 in your repository.
16
16
17 $ cd test
17 $ cd test
18 $ hg serve
18 $ hg serve
19
19
20 You can discover zeroconf enabled repositories by running "hg paths".
20 You can discover zeroconf enabled repositories by running "hg paths".
21
21
22 $ hg paths
22 $ hg paths
23 zc-test = http://example.com:8000/test
23 zc-test = http://example.com:8000/test
24 '''
24 '''
25
25
26 import Zeroconf, socket, time, os
26 import Zeroconf, socket, time, os
27 from mercurial import ui
27 from mercurial import ui
28 from mercurial import extensions
28 from mercurial import extensions
29 from mercurial.hgweb import hgweb_mod
29 from mercurial.hgweb import hgweb_mod
30 from mercurial.hgweb import hgwebdir_mod
30 from mercurial.hgweb import hgwebdir_mod
31
31
32 # publish
32 # publish
33
33
34 server = None
34 server = None
35 localip = None
35 localip = None
36
36
37 def getip():
37 def getip():
38 # finds external-facing interface without sending any packets (Linux)
38 # finds external-facing interface without sending any packets (Linux)
39 try:
39 try:
40 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
40 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
41 s.connect(('1.0.0.1', 0))
41 s.connect(('1.0.0.1', 0))
42 ip = s.getsockname()[0]
42 ip = s.getsockname()[0]
43 return ip
43 return ip
44 except:
44 except:
45 pass
45 pass
46
46
47 # Generic method, sometimes gives useless results
47 # Generic method, sometimes gives useless results
48 try:
48 try:
49 dumbip = socket.gethostbyaddr(socket.gethostname())[2][0]
49 dumbip = socket.gethostbyaddr(socket.gethostname())[2][0]
50 if not dumbip.startswith('127.') and ':' not in dumbip:
50 if not dumbip.startswith('127.') and ':' not in dumbip:
51 return dumbip
51 return dumbip
52 except socket.gaierror:
52 except socket.gaierror:
53 dumbip = '127.0.0.1'
53 dumbip = '127.0.0.1'
54
54
55 # works elsewhere, but actually sends a packet
55 # works elsewhere, but actually sends a packet
56 try:
56 try:
57 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
57 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
58 s.connect(('1.0.0.1', 1))
58 s.connect(('1.0.0.1', 1))
59 ip = s.getsockname()[0]
59 ip = s.getsockname()[0]
60 return ip
60 return ip
61 except:
61 except:
62 pass
62 pass
63
63
64 return dumbip
64 return dumbip
65
65
66 def publish(name, desc, path, port):
66 def publish(name, desc, path, port):
67 global server, localip
67 global server, localip
68 if not server:
68 if not server:
69 ip = getip()
69 ip = getip()
70 if ip.startswith('127.'):
70 if ip.startswith('127.'):
71 # if we have no internet connection, this can happen.
71 # if we have no internet connection, this can happen.
72 return
72 return
73 localip = socket.inet_aton(ip)
73 localip = socket.inet_aton(ip)
74 server = Zeroconf.Zeroconf(ip)
74 server = Zeroconf.Zeroconf(ip)
75
75
76 hostname = socket.gethostname().split('.')[0]
76 hostname = socket.gethostname().split('.')[0]
77 host = hostname + ".local"
77 host = hostname + ".local"
78 name = "%s-%s" % (hostname, name)
78 name = "%s-%s" % (hostname, name)
79
79
80 # advertise to browsers
80 # advertise to browsers
81 svc = Zeroconf.ServiceInfo('_http._tcp.local.',
81 svc = Zeroconf.ServiceInfo('_http._tcp.local.',
82 name + '._http._tcp.local.',
82 name + '._http._tcp.local.',
83 server = host,
83 server = host,
84 port = port,
84 port = port,
85 properties = {'description': desc,
85 properties = {'description': desc,
86 'path': "/" + path},
86 'path': "/" + path},
87 address = localip, weight = 0, priority = 0)
87 address = localip, weight = 0, priority = 0)
88 server.registerService(svc)
88 server.registerService(svc)
89
89
90 # advertise to Mercurial clients
90 # advertise to Mercurial clients
91 svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
91 svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
92 name + '._hg._tcp.local.',
92 name + '._hg._tcp.local.',
93 server = host,
93 server = host,
94 port = port,
94 port = port,
95 properties = {'description': desc,
95 properties = {'description': desc,
96 'path': "/" + path},
96 'path': "/" + path},
97 address = localip, weight = 0, priority = 0)
97 address = localip, weight = 0, priority = 0)
98 server.registerService(svc)
98 server.registerService(svc)
99
99
100 class hgwebzc(hgweb_mod.hgweb):
100 class hgwebzc(hgweb_mod.hgweb):
101 def __init__(self, repo, name=None):
101 def __init__(self, repo, name=None):
102 super(hgwebzc, self).__init__(repo, name)
102 super(hgwebzc, self).__init__(repo, name)
103 name = self.reponame or os.path.basename(repo.root)
103 name = self.reponame or os.path.basename(repo.root)
104 desc = self.repo.ui.config("web", "description", name)
104 desc = self.repo.ui.config("web", "description", name)
105 publish(name, desc, name, int(repo.ui.config("web", "port", 8000)))
105 publish(name, desc, name, int(repo.ui.config("web", "port", 8000)))
106
106
107 class hgwebdirzc(hgwebdir_mod.hgwebdir):
107 class hgwebdirzc(hgwebdir_mod.hgwebdir):
108 def run(self):
108 def run(self):
109 for r, p in self.repos:
109 for r, p in self.repos:
110 u = self.ui.copy()
110 u = self.ui.copy()
111 u.readconfig(os.path.join(p, '.hg', 'hgrc'))
111 u.readconfig(os.path.join(p, '.hg', 'hgrc'))
112 n = os.path.basename(r)
112 n = os.path.basename(r)
113 publish(n, "hgweb", p, int(u.config("web", "port", 8000)))
113 publish(n, "hgweb", p, int(u.config("web", "port", 8000)))
114 return super(hgwebdirzc, self).run()
114 return super(hgwebdirzc, self).run()
115
115
116 # listen
116 # listen
117
117
118 class listener(object):
118 class listener(object):
119 def __init__(self):
119 def __init__(self):
120 self.found = {}
120 self.found = {}
121 def removeService(self, server, type, name):
121 def removeService(self, server, type, name):
122 if repr(name) in self.found:
122 if repr(name) in self.found:
123 del self.found[repr(name)]
123 del self.found[repr(name)]
124 def addService(self, server, type, name):
124 def addService(self, server, type, name):
125 self.found[repr(name)] = server.getServiceInfo(type, name)
125 self.found[repr(name)] = server.getServiceInfo(type, name)
126
126
127 def getzcpaths():
127 def getzcpaths():
128 ip = getip()
128 ip = getip()
129 if ip.startswith('127.'):
129 if ip.startswith('127.'):
130 return
130 return
131 server = Zeroconf.Zeroconf(ip)
131 server = Zeroconf.Zeroconf(ip)
132 l = listener()
132 l = listener()
133 Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l)
133 Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l)
134 time.sleep(1)
134 time.sleep(1)
135 server.close()
135 server.close()
136 for v in l.found.values():
136 for v in l.found.values():
137 n = v.name[:v.name.index('.')]
137 n = v.name[:v.name.index('.')]
138 n.replace(" ", "-")
138 n.replace(" ", "-")
139 u = "http://%s:%s%s" % (socket.inet_ntoa(v.address), v.port,
139 u = "http://%s:%s%s" % (socket.inet_ntoa(v.address), v.port,
140 v.properties.get("path", "/"))
140 v.properties.get("path", "/"))
141 yield "zc-" + n, u
141 yield "zc-" + n, u
142
142
143 def config(orig, self, section, key, default=None, untrusted=False):
143 def config(orig, self, section, key, default=None, untrusted=False):
144 if section == "paths" and key.startswith("zc-"):
144 if section == "paths" and key.startswith("zc-"):
145 for n, p in getzcpaths():
145 for n, p in getzcpaths():
146 if n == key:
146 if n == key:
147 return p
147 return p
148 return orig(self, section, key, default, untrusted)
148 return orig(self, section, key, default, untrusted)
149
149
150 def configitems(orig, self, section, untrusted=False):
150 def configitems(orig, self, section, untrusted=False):
151 r = orig(self, section, untrusted)
151 r = orig(self, section, untrusted)
152 if section == "paths":
152 if section == "paths":
153 r += getzcpaths()
153 r += getzcpaths()
154 return r
154 return r
155
155
156 extensions.wrapfunction(ui.ui, 'config', config)
156 extensions.wrapfunction(ui.ui, 'config', config)
157 extensions.wrapfunction(ui.ui, 'configitems', configitems)
157 extensions.wrapfunction(ui.ui, 'configitems', configitems)
158 hgweb_mod.hgweb = hgwebzc
158 hgweb_mod.hgweb = hgwebzc
159 hgwebdir_mod.hgwebdir = hgwebdirzc
159 hgwebdir_mod.hgwebdir = hgwebdirzc
@@ -1,177 +1,177 b''
1 # extensions.py - extension handling for mercurial
1 # extensions.py - extension handling for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import imp, os, sys
8 import imp, os
9 import util, cmdutil, help
9 import util, cmdutil, help
10 from i18n import _, gettext
10 from i18n import _, gettext
11
11
12 _extensions = {}
12 _extensions = {}
13 _order = []
13 _order = []
14
14
15 def extensions():
15 def extensions():
16 for name in _order:
16 for name in _order:
17 module = _extensions[name]
17 module = _extensions[name]
18 if module:
18 if module:
19 yield name, module
19 yield name, module
20
20
21 def find(name):
21 def find(name):
22 '''return module with given extension name'''
22 '''return module with given extension name'''
23 try:
23 try:
24 return _extensions[name]
24 return _extensions[name]
25 except KeyError:
25 except KeyError:
26 for k, v in _extensions.iteritems():
26 for k, v in _extensions.iteritems():
27 if k.endswith('.' + name) or k.endswith('/' + name):
27 if k.endswith('.' + name) or k.endswith('/' + name):
28 return v
28 return v
29 raise KeyError(name)
29 raise KeyError(name)
30
30
31 def loadpath(path, module_name):
31 def loadpath(path, module_name):
32 module_name = module_name.replace('.', '_')
32 module_name = module_name.replace('.', '_')
33 path = os.path.expanduser(path)
33 path = os.path.expanduser(path)
34 if os.path.isdir(path):
34 if os.path.isdir(path):
35 # module/__init__.py style
35 # module/__init__.py style
36 d, f = os.path.split(path.rstrip('/'))
36 d, f = os.path.split(path.rstrip('/'))
37 fd, fpath, desc = imp.find_module(f, [d])
37 fd, fpath, desc = imp.find_module(f, [d])
38 return imp.load_module(module_name, fd, fpath, desc)
38 return imp.load_module(module_name, fd, fpath, desc)
39 else:
39 else:
40 return imp.load_source(module_name, path)
40 return imp.load_source(module_name, path)
41
41
42 def load(ui, name, path):
42 def load(ui, name, path):
43 if name.startswith('hgext.') or name.startswith('hgext/'):
43 if name.startswith('hgext.') or name.startswith('hgext/'):
44 shortname = name[6:]
44 shortname = name[6:]
45 else:
45 else:
46 shortname = name
46 shortname = name
47 if shortname in _extensions:
47 if shortname in _extensions:
48 return
48 return
49 _extensions[shortname] = None
49 _extensions[shortname] = None
50 if path:
50 if path:
51 # the module will be loaded in sys.modules
51 # the module will be loaded in sys.modules
52 # choose an unique name so that it doesn't
52 # choose an unique name so that it doesn't
53 # conflicts with other modules
53 # conflicts with other modules
54 mod = loadpath(path, 'hgext.%s' % name)
54 mod = loadpath(path, 'hgext.%s' % name)
55 else:
55 else:
56 def importh(name):
56 def importh(name):
57 mod = __import__(name)
57 mod = __import__(name)
58 components = name.split('.')
58 components = name.split('.')
59 for comp in components[1:]:
59 for comp in components[1:]:
60 mod = getattr(mod, comp)
60 mod = getattr(mod, comp)
61 return mod
61 return mod
62 try:
62 try:
63 mod = importh("hgext.%s" % name)
63 mod = importh("hgext.%s" % name)
64 except ImportError:
64 except ImportError:
65 mod = importh(name)
65 mod = importh(name)
66 _extensions[shortname] = mod
66 _extensions[shortname] = mod
67 _order.append(shortname)
67 _order.append(shortname)
68
68
69 uisetup = getattr(mod, 'uisetup', None)
69 uisetup = getattr(mod, 'uisetup', None)
70 if uisetup:
70 if uisetup:
71 uisetup(ui)
71 uisetup(ui)
72
72
73 def loadall(ui):
73 def loadall(ui):
74 result = ui.configitems("extensions")
74 result = ui.configitems("extensions")
75 for (name, path) in result:
75 for (name, path) in result:
76 if path:
76 if path:
77 if path[0] == '!':
77 if path[0] == '!':
78 continue
78 continue
79 try:
79 try:
80 load(ui, name, path)
80 load(ui, name, path)
81 except KeyboardInterrupt:
81 except KeyboardInterrupt:
82 raise
82 raise
83 except Exception, inst:
83 except Exception, inst:
84 if path:
84 if path:
85 ui.warn(_("*** failed to import extension %s from %s: %s\n")
85 ui.warn(_("*** failed to import extension %s from %s: %s\n")
86 % (name, path, inst))
86 % (name, path, inst))
87 else:
87 else:
88 ui.warn(_("*** failed to import extension %s: %s\n")
88 ui.warn(_("*** failed to import extension %s: %s\n")
89 % (name, inst))
89 % (name, inst))
90 if ui.traceback():
90 if ui.traceback():
91 return 1
91 return 1
92
92
93 def wrapcommand(table, command, wrapper):
93 def wrapcommand(table, command, wrapper):
94 aliases, entry = cmdutil.findcmd(command, table)
94 aliases, entry = cmdutil.findcmd(command, table)
95 for alias, e in table.iteritems():
95 for alias, e in table.iteritems():
96 if e is entry:
96 if e is entry:
97 key = alias
97 key = alias
98 break
98 break
99
99
100 origfn = entry[0]
100 origfn = entry[0]
101 def wrap(*args, **kwargs):
101 def wrap(*args, **kwargs):
102 return util.checksignature(wrapper)(
102 return util.checksignature(wrapper)(
103 util.checksignature(origfn), *args, **kwargs)
103 util.checksignature(origfn), *args, **kwargs)
104
104
105 wrap.__doc__ = getattr(origfn, '__doc__')
105 wrap.__doc__ = getattr(origfn, '__doc__')
106 wrap.__module__ = getattr(origfn, '__module__')
106 wrap.__module__ = getattr(origfn, '__module__')
107
107
108 newentry = list(entry)
108 newentry = list(entry)
109 newentry[0] = wrap
109 newentry[0] = wrap
110 table[key] = tuple(newentry)
110 table[key] = tuple(newentry)
111 return entry
111 return entry
112
112
113 def wrapfunction(container, funcname, wrapper):
113 def wrapfunction(container, funcname, wrapper):
114 def wrap(*args, **kwargs):
114 def wrap(*args, **kwargs):
115 return wrapper(origfn, *args, **kwargs)
115 return wrapper(origfn, *args, **kwargs)
116
116
117 origfn = getattr(container, funcname)
117 origfn = getattr(container, funcname)
118 setattr(container, funcname, wrap)
118 setattr(container, funcname, wrap)
119 return origfn
119 return origfn
120
120
121 def disabled():
121 def disabled():
122 '''find disabled extensions from hgext
122 '''find disabled extensions from hgext
123 returns a dict of {name: desc}, and the max name length'''
123 returns a dict of {name: desc}, and the max name length'''
124
124
125 import hgext
125 import hgext
126 extpath = os.path.dirname(os.path.abspath(hgext.__file__))
126 extpath = os.path.dirname(os.path.abspath(hgext.__file__))
127
127
128 exts = {}
128 exts = {}
129 maxlength = 0
129 maxlength = 0
130 for e in os.listdir(extpath):
130 for e in os.listdir(extpath):
131
131
132 if e.endswith('.py'):
132 if e.endswith('.py'):
133 name = e.rsplit('.', 1)[0]
133 name = e.rsplit('.', 1)[0]
134 path = os.path.join(extpath, e)
134 path = os.path.join(extpath, e)
135 else:
135 else:
136 name = e
136 name = e
137 path = os.path.join(extpath, e, '__init__.py')
137 path = os.path.join(extpath, e, '__init__.py')
138 if not os.path.exists(path):
138 if not os.path.exists(path):
139 continue
139 continue
140
140
141 if name in exts or name in _order or name == '__init__':
141 if name in exts or name in _order or name == '__init__':
142 continue
142 continue
143
143
144 try:
144 try:
145 file = open(path)
145 file = open(path)
146 except IOError:
146 except IOError:
147 continue
147 continue
148 else:
148 else:
149 doc = help.moduledoc(file)
149 doc = help.moduledoc(file)
150 file.close()
150 file.close()
151
151
152 if doc: # extracting localized synopsis
152 if doc: # extracting localized synopsis
153 exts[name] = gettext(doc).splitlines()[0]
153 exts[name] = gettext(doc).splitlines()[0]
154 else:
154 else:
155 exts[name] = _('(no help text available)')
155 exts[name] = _('(no help text available)')
156
156
157 if len(name) > maxlength:
157 if len(name) > maxlength:
158 maxlength = len(name)
158 maxlength = len(name)
159
159
160 return exts, maxlength
160 return exts, maxlength
161
161
162 def enabled():
162 def enabled():
163 '''return a dict of {name: desc} of extensions, and the max name length'''
163 '''return a dict of {name: desc} of extensions, and the max name length'''
164
164
165 if not enabled:
165 if not enabled:
166 return {}, 0
166 return {}, 0
167
167
168 exts = {}
168 exts = {}
169 maxlength = 0
169 maxlength = 0
170 exthelps = []
170 exthelps = []
171 for ename, ext in extensions():
171 for ename, ext in extensions():
172 doc = (gettext(ext.__doc__) or _('(no help text available)'))
172 doc = (gettext(ext.__doc__) or _('(no help text available)'))
173 ename = ename.split('.')[-1]
173 ename = ename.split('.')[-1]
174 maxlength = max(len(ename), maxlength)
174 maxlength = max(len(ename), maxlength)
175 exts[ename] = doc.splitlines(0)[0].strip()
175 exts[ename] = doc.splitlines(0)[0].strip()
176
176
177 return exts, maxlength
177 return exts, maxlength
@@ -1,506 +1,513 b''
1 # help.py - help data for mercurial
1 # help.py - help data for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import textwrap
8 from i18n import _
9 from i18n import _
9 import extensions
10 import extensions
10
11
11
12
12 def moduledoc(file):
13 def moduledoc(file):
13 '''return the top-level python documentation for the given file
14 '''return the top-level python documentation for the given file
14
15
15 Loosely inspired by pydoc.source_synopsis(), but rewritten to handle \'''
16 Loosely inspired by pydoc.source_synopsis(), but rewritten to handle \'''
16 as well as """ and to return the whole text instead of just the synopsis'''
17 as well as """ and to return the whole text instead of just the synopsis'''
17 result = []
18 result = []
18
19
19 line = file.readline()
20 line = file.readline()
20 while line[:1] == '#' or not line.strip():
21 while line[:1] == '#' or not line.strip():
21 line = file.readline()
22 line = file.readline()
22 if not line: break
23 if not line: break
23
24
24 start = line[:3]
25 start = line[:3]
25 if start == '"""' or start == "'''":
26 if start == '"""' or start == "'''":
26 line = line[3:]
27 line = line[3:]
27 while line:
28 while line:
28 if line.rstrip().endswith(start):
29 if line.rstrip().endswith(start):
29 line = line.split(start)[0]
30 line = line.split(start)[0]
30 if line:
31 if line:
31 result.append(line)
32 result.append(line)
32 break
33 break
33 elif not line:
34 elif not line:
34 return None # unmatched delimiter
35 return None # unmatched delimiter
35 result.append(line)
36 result.append(line)
36 line = file.readline()
37 line = file.readline()
37 else:
38 else:
38 return None
39 return None
39
40
40 return ''.join(result)
41 return ''.join(result)
41
42
42 def listexts(header, exts, maxlength):
43 def listexts(header, exts, maxlength):
43 '''return a text listing of the given extensions'''
44 '''return a text listing of the given extensions'''
44 if not exts:
45 if not exts:
45 return ''
46 return ''
46 result = '\n%s\n\n' % header
47 result = '\n%s\n\n' % header
47 for name, desc in sorted(exts.iteritems()):
48 for name, desc in sorted(exts.iteritems()):
48 result += ' %s %s\n' % (name.ljust(maxlength), desc)
49 # wrap desc at 70 characters, just like the main help texts
50 desc = textwrap.wrap(desc, width=70 - maxlength - 4)
51 pad = '\n' + ' ' * (maxlength + 4)
52 result += ' %s %s\n' % (name.ljust(maxlength),
53 pad.join(desc))
49 return result
54 return result
50
55
51 def extshelp():
56 def extshelp():
52 doc = _(r'''
57 doc = _(r'''
53 Mercurial has a mechanism for adding new features through the
58 Mercurial has the ability to add new features through the use of
54 use of extensions. Extensions may bring new commands, or new
59 extensions. Extensions may add new commands, add options to
55 hooks, or change Mercurial's behavior.
60 existing commands, change the default behavior of commands, or
61 implement hooks.
56
62
57 Extensions are not loaded by default for a variety of reasons,
63 Extensions are not loaded by default for a variety of reasons:
58 they may be meant for advanced users or provide potentially
64 they can increase startup overhead; they may be meant for
59 dangerous commands (e.g. mq and rebase allow history to be
65 advanced usage only; they may provide potentially dangerous
60 rewritten), they might not be ready for prime-time yet, or
66 abilities (such as letting you destroy or modify history); they
61 they may alter Mercurial's behavior. It is thus up to the user
67 might not be ready for prime time; or they may alter some
62 to activate extensions as desired.
68 usual behaviors of stock Mercurial. It is thus up to the user to
69 activate extensions as needed.
63
70
64 To enable the "foo" extension, either shipped with Mercurial
71 To enable the "foo" extension, either shipped with Mercurial
65 or in the Python search path, create an entry for it in your
72 or in the Python search path, create an entry for it in your
66 hgrc, like this:
73 hgrc, like this:
67
74
68 [extensions]
75 [extensions]
69 foo =
76 foo =
70
77
71 You may also specify the full path to an extension:
78 You may also specify the full path to an extension:
72
79
73 [extensions]
80 [extensions]
74 myfeature = ~/.hgext/myfeature.py
81 myfeature = ~/.hgext/myfeature.py
75
82
76 To explicitly disable an extension enabled in an hgrc of broader
83 To explicitly disable an extension enabled in an hgrc of broader
77 scope, prepend its path with !:
84 scope, prepend its path with !:
78
85
79 [extensions]
86 [extensions]
80 # disabling extension bar residing in /ext/path
87 # disabling extension bar residing in /path/to/extension/bar.py
81 hgext.bar = !/path/to/extension/bar.py
88 hgext.bar = !/path/to/extension/bar.py
82 # ditto, but no path was supplied for extension baz
89 # ditto, but no path was supplied for extension baz
83 hgext.baz = !
90 hgext.baz = !
84 ''')
91 ''')
85
92
86 exts, maxlength = extensions.enabled()
93 exts, maxlength = extensions.enabled()
87 doc += listexts(_('enabled extensions:'), exts, maxlength)
94 doc += listexts(_('enabled extensions:'), exts, maxlength)
88
95
89 exts, maxlength = extensions.disabled()
96 exts, maxlength = extensions.disabled()
90 doc += listexts(_('disabled extensions:'), exts, maxlength)
97 doc += listexts(_('disabled extensions:'), exts, maxlength)
91
98
92 return doc
99 return doc
93
100
94 helptable = (
101 helptable = (
95 (["dates"], _("Date Formats"),
102 (["dates"], _("Date Formats"),
96 _(r'''
103 _(r'''
97 Some commands allow the user to specify a date, e.g.:
104 Some commands allow the user to specify a date, e.g.:
98 * backout, commit, import, tag: Specify the commit date.
105 * backout, commit, import, tag: Specify the commit date.
99 * log, revert, update: Select revision(s) by date.
106 * log, revert, update: Select revision(s) by date.
100
107
101 Many date formats are valid. Here are some examples:
108 Many date formats are valid. Here are some examples:
102
109
103 "Wed Dec 6 13:18:29 2006" (local timezone assumed)
110 "Wed Dec 6 13:18:29 2006" (local timezone assumed)
104 "Dec 6 13:18 -0600" (year assumed, time offset provided)
111 "Dec 6 13:18 -0600" (year assumed, time offset provided)
105 "Dec 6 13:18 UTC" (UTC and GMT are aliases for +0000)
112 "Dec 6 13:18 UTC" (UTC and GMT are aliases for +0000)
106 "Dec 6" (midnight)
113 "Dec 6" (midnight)
107 "13:18" (today assumed)
114 "13:18" (today assumed)
108 "3:39" (3:39AM assumed)
115 "3:39" (3:39AM assumed)
109 "3:39pm" (15:39)
116 "3:39pm" (15:39)
110 "2006-12-06 13:18:29" (ISO 8601 format)
117 "2006-12-06 13:18:29" (ISO 8601 format)
111 "2006-12-6 13:18"
118 "2006-12-6 13:18"
112 "2006-12-6"
119 "2006-12-6"
113 "12-6"
120 "12-6"
114 "12/6"
121 "12/6"
115 "12/6/6" (Dec 6 2006)
122 "12/6/6" (Dec 6 2006)
116
123
117 Lastly, there is Mercurial's internal format:
124 Lastly, there is Mercurial's internal format:
118
125
119 "1165432709 0" (Wed Dec 6 13:18:29 2006 UTC)
126 "1165432709 0" (Wed Dec 6 13:18:29 2006 UTC)
120
127
121 This is the internal representation format for dates. unixtime is
128 This is the internal representation format for dates. unixtime is
122 the number of seconds since the epoch (1970-01-01 00:00 UTC).
129 the number of seconds since the epoch (1970-01-01 00:00 UTC).
123 offset is the offset of the local timezone, in seconds west of UTC
130 offset is the offset of the local timezone, in seconds west of UTC
124 (negative if the timezone is east of UTC).
131 (negative if the timezone is east of UTC).
125
132
126 The log command also accepts date ranges:
133 The log command also accepts date ranges:
127
134
128 "<{datetime}" - at or before a given date/time
135 "<{datetime}" - at or before a given date/time
129 ">{datetime}" - on or after a given date/time
136 ">{datetime}" - on or after a given date/time
130 "{datetime} to {datetime}" - a date range, inclusive
137 "{datetime} to {datetime}" - a date range, inclusive
131 "-{days}" - within a given number of days of today
138 "-{days}" - within a given number of days of today
132 ''')),
139 ''')),
133
140
134 (["patterns"], _("File Name Patterns"),
141 (["patterns"], _("File Name Patterns"),
135 _(r'''
142 _(r'''
136 Mercurial accepts several notations for identifying one or more
143 Mercurial accepts several notations for identifying one or more
137 files at a time.
144 files at a time.
138
145
139 By default, Mercurial treats filenames as shell-style extended
146 By default, Mercurial treats filenames as shell-style extended
140 glob patterns.
147 glob patterns.
141
148
142 Alternate pattern notations must be specified explicitly.
149 Alternate pattern notations must be specified explicitly.
143
150
144 To use a plain path name without any pattern matching, start it
151 To use a plain path name without any pattern matching, start it
145 with "path:". These path names must completely match starting at
152 with "path:". These path names must completely match starting at
146 the current repository root.
153 the current repository root.
147
154
148 To use an extended glob, start a name with "glob:". Globs are
155 To use an extended glob, start a name with "glob:". Globs are
149 rooted at the current directory; a glob such as "*.c" will only
156 rooted at the current directory; a glob such as "*.c" will only
150 match files in the current directory ending with ".c".
157 match files in the current directory ending with ".c".
151
158
152 The supported glob syntax extensions are "**" to match any string
159 The supported glob syntax extensions are "**" to match any string
153 across path separators and "{a,b}" to mean "a or b".
160 across path separators and "{a,b}" to mean "a or b".
154
161
155 To use a Perl/Python regular expression, start a name with "re:".
162 To use a Perl/Python regular expression, start a name with "re:".
156 Regexp pattern matching is anchored at the root of the repository.
163 Regexp pattern matching is anchored at the root of the repository.
157
164
158 Plain examples:
165 Plain examples:
159
166
160 path:foo/bar a name bar in a directory named foo in the root of
167 path:foo/bar a name bar in a directory named foo in the root of
161 the repository
168 the repository
162 path:path:name a file or directory named "path:name"
169 path:path:name a file or directory named "path:name"
163
170
164 Glob examples:
171 Glob examples:
165
172
166 glob:*.c any name ending in ".c" in the current directory
173 glob:*.c any name ending in ".c" in the current directory
167 *.c any name ending in ".c" in the current directory
174 *.c any name ending in ".c" in the current directory
168 **.c any name ending in ".c" in any subdirectory of the
175 **.c any name ending in ".c" in any subdirectory of the
169 current directory including itself.
176 current directory including itself.
170 foo/*.c any name ending in ".c" in the directory foo
177 foo/*.c any name ending in ".c" in the directory foo
171 foo/**.c any name ending in ".c" in any subdirectory of foo
178 foo/**.c any name ending in ".c" in any subdirectory of foo
172 including itself.
179 including itself.
173
180
174 Regexp examples:
181 Regexp examples:
175
182
176 re:.*\.c$ any name ending in ".c", anywhere in the repository
183 re:.*\.c$ any name ending in ".c", anywhere in the repository
177
184
178 ''')),
185 ''')),
179
186
180 (['environment', 'env'], _('Environment Variables'),
187 (['environment', 'env'], _('Environment Variables'),
181 _(r'''
188 _(r'''
182 HG::
189 HG::
183 Path to the 'hg' executable, automatically passed when running
190 Path to the 'hg' executable, automatically passed when running
184 hooks, extensions or external tools. If unset or empty, this is
191 hooks, extensions or external tools. If unset or empty, this is
185 the hg executable's name if it's frozen, or an executable named
192 the hg executable's name if it's frozen, or an executable named
186 'hg' (with %PATHEXT% [defaulting to COM/EXE/BAT/CMD] extensions on
193 'hg' (with %PATHEXT% [defaulting to COM/EXE/BAT/CMD] extensions on
187 Windows) is searched.
194 Windows) is searched.
188
195
189 HGEDITOR::
196 HGEDITOR::
190 This is the name of the editor to run when committing. See EDITOR.
197 This is the name of the editor to run when committing. See EDITOR.
191
198
192 (deprecated, use .hgrc)
199 (deprecated, use .hgrc)
193
200
194 HGENCODING::
201 HGENCODING::
195 This overrides the default locale setting detected by Mercurial.
202 This overrides the default locale setting detected by Mercurial.
196 This setting is used to convert data including usernames,
203 This setting is used to convert data including usernames,
197 changeset descriptions, tag names, and branches. This setting can
204 changeset descriptions, tag names, and branches. This setting can
198 be overridden with the --encoding command-line option.
205 be overridden with the --encoding command-line option.
199
206
200 HGENCODINGMODE::
207 HGENCODINGMODE::
201 This sets Mercurial's behavior for handling unknown characters
208 This sets Mercurial's behavior for handling unknown characters
202 while transcoding user input. The default is "strict", which
209 while transcoding user input. The default is "strict", which
203 causes Mercurial to abort if it can't map a character. Other
210 causes Mercurial to abort if it can't map a character. Other
204 settings include "replace", which replaces unknown characters, and
211 settings include "replace", which replaces unknown characters, and
205 "ignore", which drops them. This setting can be overridden with
212 "ignore", which drops them. This setting can be overridden with
206 the --encodingmode command-line option.
213 the --encodingmode command-line option.
207
214
208 HGMERGE::
215 HGMERGE::
209 An executable to use for resolving merge conflicts. The program
216 An executable to use for resolving merge conflicts. The program
210 will be executed with three arguments: local file, remote file,
217 will be executed with three arguments: local file, remote file,
211 ancestor file.
218 ancestor file.
212
219
213 (deprecated, use .hgrc)
220 (deprecated, use .hgrc)
214
221
215 HGRCPATH::
222 HGRCPATH::
216 A list of files or directories to search for hgrc files. Item
223 A list of files or directories to search for hgrc files. Item
217 separator is ":" on Unix, ";" on Windows. If HGRCPATH is not set,
224 separator is ":" on Unix, ";" on Windows. If HGRCPATH is not set,
218 platform default search path is used. If empty, only the .hg/hgrc
225 platform default search path is used. If empty, only the .hg/hgrc
219 from the current repository is read.
226 from the current repository is read.
220
227
221 For each element in HGRCPATH:
228 For each element in HGRCPATH:
222 * if it's a directory, all files ending with .rc are added
229 * if it's a directory, all files ending with .rc are added
223 * otherwise, the file itself will be added
230 * otherwise, the file itself will be added
224
231
225 HGUSER::
232 HGUSER::
226 This is the string used as the author of a commit. If not set,
233 This is the string used as the author of a commit. If not set,
227 available values will be considered in this order:
234 available values will be considered in this order:
228
235
229 * HGUSER (deprecated)
236 * HGUSER (deprecated)
230 * hgrc files from the HGRCPATH
237 * hgrc files from the HGRCPATH
231 * EMAIL
238 * EMAIL
232 * interactive prompt
239 * interactive prompt
233 * LOGNAME (with '@hostname' appended)
240 * LOGNAME (with '@hostname' appended)
234
241
235 (deprecated, use .hgrc)
242 (deprecated, use .hgrc)
236
243
237 EMAIL::
244 EMAIL::
238 May be used as the author of a commit; see HGUSER.
245 May be used as the author of a commit; see HGUSER.
239
246
240 LOGNAME::
247 LOGNAME::
241 May be used as the author of a commit; see HGUSER.
248 May be used as the author of a commit; see HGUSER.
242
249
243 VISUAL::
250 VISUAL::
244 This is the name of the editor to use when committing. See EDITOR.
251 This is the name of the editor to use when committing. See EDITOR.
245
252
246 EDITOR::
253 EDITOR::
247 Sometimes Mercurial needs to open a text file in an editor for a
254 Sometimes Mercurial needs to open a text file in an editor for a
248 user to modify, for example when writing commit messages. The
255 user to modify, for example when writing commit messages. The
249 editor it uses is determined by looking at the environment
256 editor it uses is determined by looking at the environment
250 variables HGEDITOR, VISUAL and EDITOR, in that order. The first
257 variables HGEDITOR, VISUAL and EDITOR, in that order. The first
251 non-empty one is chosen. If all of them are empty, the editor
258 non-empty one is chosen. If all of them are empty, the editor
252 defaults to 'vi'.
259 defaults to 'vi'.
253
260
254 PYTHONPATH::
261 PYTHONPATH::
255 This is used by Python to find imported modules and may need to be
262 This is used by Python to find imported modules and may need to be
256 set appropriately if this Mercurial is not installed system-wide.
263 set appropriately if this Mercurial is not installed system-wide.
257 ''')),
264 ''')),
258
265
259 (['revs', 'revisions'], _('Specifying Single Revisions'),
266 (['revs', 'revisions'], _('Specifying Single Revisions'),
260 _(r'''
267 _(r'''
261 Mercurial supports several ways to specify individual revisions.
268 Mercurial supports several ways to specify individual revisions.
262
269
263 A plain integer is treated as a revision number. Negative integers
270 A plain integer is treated as a revision number. Negative integers
264 are treated as topological offsets from the tip, with -1 denoting
271 are treated as topological offsets from the tip, with -1 denoting
265 the tip. As such, negative numbers are only useful if you've
272 the tip. As such, negative numbers are only useful if you've
266 memorized your local tree numbers and want to save typing a single
273 memorized your local tree numbers and want to save typing a single
267 digit. This editor suggests copy and paste.
274 digit. This editor suggests copy and paste.
268
275
269 A 40-digit hexadecimal string is treated as a unique revision
276 A 40-digit hexadecimal string is treated as a unique revision
270 identifier.
277 identifier.
271
278
272 A hexadecimal string less than 40 characters long is treated as a
279 A hexadecimal string less than 40 characters long is treated as a
273 unique revision identifier, and referred to as a short-form
280 unique revision identifier, and referred to as a short-form
274 identifier. A short-form identifier is only valid if it is the
281 identifier. A short-form identifier is only valid if it is the
275 prefix of exactly one full-length identifier.
282 prefix of exactly one full-length identifier.
276
283
277 Any other string is treated as a tag name, which is a symbolic
284 Any other string is treated as a tag name, which is a symbolic
278 name associated with a revision identifier. Tag names may not
285 name associated with a revision identifier. Tag names may not
279 contain the ":" character.
286 contain the ":" character.
280
287
281 The reserved name "tip" is a special tag that always identifies
288 The reserved name "tip" is a special tag that always identifies
282 the most recent revision.
289 the most recent revision.
283
290
284 The reserved name "null" indicates the null revision. This is the
291 The reserved name "null" indicates the null revision. This is the
285 revision of an empty repository, and the parent of revision 0.
292 revision of an empty repository, and the parent of revision 0.
286
293
287 The reserved name "." indicates the working directory parent. If
294 The reserved name "." indicates the working directory parent. If
288 no working directory is checked out, it is equivalent to null. If
295 no working directory is checked out, it is equivalent to null. If
289 an uncommitted merge is in progress, "." is the revision of the
296 an uncommitted merge is in progress, "." is the revision of the
290 first parent.
297 first parent.
291 ''')),
298 ''')),
292
299
293 (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'),
300 (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'),
294 _(r'''
301 _(r'''
295 When Mercurial accepts more than one revision, they may be
302 When Mercurial accepts more than one revision, they may be
296 specified individually, or provided as a topologically continuous
303 specified individually, or provided as a topologically continuous
297 range, separated by the ":" character.
304 range, separated by the ":" character.
298
305
299 The syntax of range notation is [BEGIN]:[END], where BEGIN and END
306 The syntax of range notation is [BEGIN]:[END], where BEGIN and END
300 are revision identifiers. Both BEGIN and END are optional. If
307 are revision identifiers. Both BEGIN and END are optional. If
301 BEGIN is not specified, it defaults to revision number 0. If END
308 BEGIN is not specified, it defaults to revision number 0. If END
302 is not specified, it defaults to the tip. The range ":" thus means
309 is not specified, it defaults to the tip. The range ":" thus means
303 "all revisions".
310 "all revisions".
304
311
305 If BEGIN is greater than END, revisions are treated in reverse
312 If BEGIN is greater than END, revisions are treated in reverse
306 order.
313 order.
307
314
308 A range acts as a closed interval. This means that a range of 3:5
315 A range acts as a closed interval. This means that a range of 3:5
309 gives 3, 4 and 5. Similarly, a range of 9:6 gives 9, 8, 7, and 6.
316 gives 3, 4 and 5. Similarly, a range of 9:6 gives 9, 8, 7, and 6.
310 ''')),
317 ''')),
311
318
312 (['diffs'], _('Diff Formats'),
319 (['diffs'], _('Diff Formats'),
313 _(r'''
320 _(r'''
314 Mercurial's default format for showing changes between two
321 Mercurial's default format for showing changes between two
315 versions of a file is compatible with the unified format of GNU
322 versions of a file is compatible with the unified format of GNU
316 diff, which can be used by GNU patch and many other standard
323 diff, which can be used by GNU patch and many other standard
317 tools.
324 tools.
318
325
319 While this standard format is often enough, it does not encode the
326 While this standard format is often enough, it does not encode the
320 following information:
327 following information:
321
328
322 - executable status and other permission bits
329 - executable status and other permission bits
323 - copy or rename information
330 - copy or rename information
324 - changes in binary files
331 - changes in binary files
325 - creation or deletion of empty files
332 - creation or deletion of empty files
326
333
327 Mercurial also supports the extended diff format from the git VCS
334 Mercurial also supports the extended diff format from the git VCS
328 which addresses these limitations. The git diff format is not
335 which addresses these limitations. The git diff format is not
329 produced by default because a few widespread tools still do not
336 produced by default because a few widespread tools still do not
330 understand this format.
337 understand this format.
331
338
332 This means that when generating diffs from a Mercurial repository
339 This means that when generating diffs from a Mercurial repository
333 (e.g. with "hg export"), you should be careful about things like
340 (e.g. with "hg export"), you should be careful about things like
334 file copies and renames or other things mentioned above, because
341 file copies and renames or other things mentioned above, because
335 when applying a standard diff to a different repository, this
342 when applying a standard diff to a different repository, this
336 extra information is lost. Mercurial's internal operations (like
343 extra information is lost. Mercurial's internal operations (like
337 push and pull) are not affected by this, because they use an
344 push and pull) are not affected by this, because they use an
338 internal binary format for communicating changes.
345 internal binary format for communicating changes.
339
346
340 To make Mercurial produce the git extended diff format, use the
347 To make Mercurial produce the git extended diff format, use the
341 --git option available for many commands, or set 'git = True' in
348 --git option available for many commands, or set 'git = True' in
342 the [diff] section of your hgrc. You do not need to set this
349 the [diff] section of your hgrc. You do not need to set this
343 option when importing diffs in this format or using them in the mq
350 option when importing diffs in this format or using them in the mq
344 extension.
351 extension.
345 ''')),
352 ''')),
346 (['templating'], _('Template Usage'),
353 (['templating'], _('Template Usage'),
347 _(r'''
354 _(r'''
348 Mercurial allows you to customize output of commands through
355 Mercurial allows you to customize output of commands through
349 templates. You can either pass in a template from the command
356 templates. You can either pass in a template from the command
350 line, via the --template option, or select an existing
357 line, via the --template option, or select an existing
351 template-style (--style).
358 template-style (--style).
352
359
353 You can customize output for any "log-like" command: log,
360 You can customize output for any "log-like" command: log,
354 outgoing, incoming, tip, parents, heads and glog.
361 outgoing, incoming, tip, parents, heads and glog.
355
362
356 Three styles are packaged with Mercurial: default (the style used
363 Three styles are packaged with Mercurial: default (the style used
357 when no explicit preference is passed), compact and changelog.
364 when no explicit preference is passed), compact and changelog.
358 Usage:
365 Usage:
359
366
360 $ hg log -r1 --style changelog
367 $ hg log -r1 --style changelog
361
368
362 A template is a piece of text, with markup to invoke variable
369 A template is a piece of text, with markup to invoke variable
363 expansion:
370 expansion:
364
371
365 $ hg log -r1 --template "{node}\n"
372 $ hg log -r1 --template "{node}\n"
366 b56ce7b07c52de7d5fd79fb89701ea538af65746
373 b56ce7b07c52de7d5fd79fb89701ea538af65746
367
374
368 Strings in curly braces are called keywords. The availability of
375 Strings in curly braces are called keywords. The availability of
369 keywords depends on the exact context of the templater. These
376 keywords depends on the exact context of the templater. These
370 keywords are usually available for templating a log-like command:
377 keywords are usually available for templating a log-like command:
371
378
372 - author: String. The unmodified author of the changeset.
379 - author: String. The unmodified author of the changeset.
373 - branches: String. The name of the branch on which the changeset
380 - branches: String. The name of the branch on which the changeset
374 was committed. Will be empty if the branch name was default.
381 was committed. Will be empty if the branch name was default.
375 - date: Date information. The date when the changeset was committed.
382 - date: Date information. The date when the changeset was committed.
376 - desc: String. The text of the changeset description.
383 - desc: String. The text of the changeset description.
377 - diffstat: String. Statistics of changes with the following
384 - diffstat: String. Statistics of changes with the following
378 format: "modified files: +added/-removed lines"
385 format: "modified files: +added/-removed lines"
379 - files: List of strings. All files modified, added, or removed by
386 - files: List of strings. All files modified, added, or removed by
380 this changeset.
387 this changeset.
381 - file_adds: List of strings. Files added by this changeset.
388 - file_adds: List of strings. Files added by this changeset.
382 - file_mods: List of strings. Files modified by this changeset.
389 - file_mods: List of strings. Files modified by this changeset.
383 - file_dels: List of strings. Files removed by this changeset.
390 - file_dels: List of strings. Files removed by this changeset.
384 - node: String. The changeset identification hash, as a
391 - node: String. The changeset identification hash, as a
385 40-character hexadecimal string.
392 40-character hexadecimal string.
386 - parents: List of strings. The parents of the changeset.
393 - parents: List of strings. The parents of the changeset.
387 - rev: Integer. The repository-local changeset revision number.
394 - rev: Integer. The repository-local changeset revision number.
388 - tags: List of strings. Any tags associated with the changeset.
395 - tags: List of strings. Any tags associated with the changeset.
389
396
390 The "date" keyword does not produce human-readable output. If you
397 The "date" keyword does not produce human-readable output. If you
391 want to use a date in your output, you can use a filter to process
398 want to use a date in your output, you can use a filter to process
392 it. Filters are functions which return a string based on the input
399 it. Filters are functions which return a string based on the input
393 variable. You can also use a chain of filters to get the desired
400 variable. You can also use a chain of filters to get the desired
394 output:
401 output:
395
402
396 $ hg tip --template "{date|isodate}\n"
403 $ hg tip --template "{date|isodate}\n"
397 2008-08-21 18:22 +0000
404 2008-08-21 18:22 +0000
398
405
399 List of filters:
406 List of filters:
400
407
401 - addbreaks: Any text. Add an XHTML "<br />" tag before the end of
408 - addbreaks: Any text. Add an XHTML "<br />" tag before the end of
402 every line except the last.
409 every line except the last.
403 - age: Date. Returns a human-readable date/time difference between
410 - age: Date. Returns a human-readable date/time difference between
404 the given date/time and the current date/time.
411 the given date/time and the current date/time.
405 - basename: Any text. Treats the text as a path, and returns the
412 - basename: Any text. Treats the text as a path, and returns the
406 last component of the path after splitting by the path
413 last component of the path after splitting by the path
407 separator (ignoring trailing separators). For example,
414 separator (ignoring trailing separators). For example,
408 "foo/bar/baz" becomes "baz" and "foo/bar//" becomes "bar".
415 "foo/bar/baz" becomes "baz" and "foo/bar//" becomes "bar".
409 - stripdir: Treat the text as path and strip a directory level, if
416 - stripdir: Treat the text as path and strip a directory level, if
410 possible. For example, "foo" and "foo/bar" becomes "foo".
417 possible. For example, "foo" and "foo/bar" becomes "foo".
411 - date: Date. Returns a date in a Unix date format, including
418 - date: Date. Returns a date in a Unix date format, including
412 the timezone: "Mon Sep 04 15:13:13 2006 0700".
419 the timezone: "Mon Sep 04 15:13:13 2006 0700".
413 - domain: Any text. Finds the first string that looks like an
420 - domain: Any text. Finds the first string that looks like an
414 email address, and extracts just the domain component.
421 email address, and extracts just the domain component.
415 Example: 'User <user@example.com>' becomes 'example.com'.
422 Example: 'User <user@example.com>' becomes 'example.com'.
416 - email: Any text. Extracts the first string that looks like an
423 - email: Any text. Extracts the first string that looks like an
417 email address. Example: 'User <user@example.com>' becomes
424 email address. Example: 'User <user@example.com>' becomes
418 'user@example.com'.
425 'user@example.com'.
419 - escape: Any text. Replaces the special XML/XHTML characters "&",
426 - escape: Any text. Replaces the special XML/XHTML characters "&",
420 "<" and ">" with XML entities.
427 "<" and ">" with XML entities.
421 - fill68: Any text. Wraps the text to fit in 68 columns.
428 - fill68: Any text. Wraps the text to fit in 68 columns.
422 - fill76: Any text. Wraps the text to fit in 76 columns.
429 - fill76: Any text. Wraps the text to fit in 76 columns.
423 - firstline: Any text. Returns the first line of text.
430 - firstline: Any text. Returns the first line of text.
424 - nonempty: Any text. Returns '(none)' if the string is empty.
431 - nonempty: Any text. Returns '(none)' if the string is empty.
425 - hgdate: Date. Returns the date as a pair of numbers:
432 - hgdate: Date. Returns the date as a pair of numbers:
426 "1157407993 25200" (Unix timestamp, timezone offset).
433 "1157407993 25200" (Unix timestamp, timezone offset).
427 - isodate: Date. Returns the date in ISO 8601 format.
434 - isodate: Date. Returns the date in ISO 8601 format.
428 - localdate: Date. Converts a date to local date.
435 - localdate: Date. Converts a date to local date.
429 - obfuscate: Any text. Returns the input text rendered as a
436 - obfuscate: Any text. Returns the input text rendered as a
430 sequence of XML entities.
437 sequence of XML entities.
431 - person: Any text. Returns the text before an email address.
438 - person: Any text. Returns the text before an email address.
432 - rfc822date: Date. Returns a date using the same format used
439 - rfc822date: Date. Returns a date using the same format used
433 in email headers.
440 in email headers.
434 - short: Changeset hash. Returns the short form of a changeset
441 - short: Changeset hash. Returns the short form of a changeset
435 hash, i.e. a 12-byte hexadecimal string.
442 hash, i.e. a 12-byte hexadecimal string.
436 - shortdate: Date. Returns a date like "2006-09-18".
443 - shortdate: Date. Returns a date like "2006-09-18".
437 - strip: Any text. Strips all leading and trailing whitespace.
444 - strip: Any text. Strips all leading and trailing whitespace.
438 - tabindent: Any text. Returns the text, with every line except
445 - tabindent: Any text. Returns the text, with every line except
439 the first starting with a tab character.
446 the first starting with a tab character.
440 - urlescape: Any text. Escapes all "special" characters. For
447 - urlescape: Any text. Escapes all "special" characters. For
441 example, "foo bar" becomes "foo%20bar".
448 example, "foo bar" becomes "foo%20bar".
442 - user: Any text. Returns the user portion of an email address.
449 - user: Any text. Returns the user portion of an email address.
443 ''')),
450 ''')),
444
451
445 (['urls'], _('URL Paths'),
452 (['urls'], _('URL Paths'),
446 _(r'''
453 _(r'''
447 Valid URLs are of the form:
454 Valid URLs are of the form:
448
455
449 local/filesystem/path (or file://local/filesystem/path)
456 local/filesystem/path (or file://local/filesystem/path)
450 http://[user[:pass]@]host[:port]/[path]
457 http://[user[:pass]@]host[:port]/[path]
451 https://[user[:pass]@]host[:port]/[path]
458 https://[user[:pass]@]host[:port]/[path]
452 ssh://[user[:pass]@]host[:port]/[path]
459 ssh://[user[:pass]@]host[:port]/[path]
453
460
454 Paths in the local filesystem can either point to Mercurial
461 Paths in the local filesystem can either point to Mercurial
455 repositories or to bundle files (as created by 'hg bundle' or
462 repositories or to bundle files (as created by 'hg bundle' or
456 'hg incoming --bundle').
463 'hg incoming --bundle').
457
464
458 An optional identifier after # indicates a particular branch, tag,
465 An optional identifier after # indicates a particular branch, tag,
459 or changeset to use from the remote repository.
466 or changeset to use from the remote repository.
460
467
461 Some features, such as pushing to http:// and https:// URLs are
468 Some features, such as pushing to http:// and https:// URLs are
462 only possible if the feature is explicitly enabled on the remote
469 only possible if the feature is explicitly enabled on the remote
463 Mercurial server.
470 Mercurial server.
464
471
465 Some notes about using SSH with Mercurial:
472 Some notes about using SSH with Mercurial:
466 - SSH requires an accessible shell account on the destination
473 - SSH requires an accessible shell account on the destination
467 machine and a copy of hg in the remote path or specified with as
474 machine and a copy of hg in the remote path or specified with as
468 remotecmd.
475 remotecmd.
469 - path is relative to the remote user's home directory by default.
476 - path is relative to the remote user's home directory by default.
470 Use an extra slash at the start of a path to specify an absolute path:
477 Use an extra slash at the start of a path to specify an absolute path:
471 ssh://example.com//tmp/repository
478 ssh://example.com//tmp/repository
472 - Mercurial doesn't use its own compression via SSH; the right
479 - Mercurial doesn't use its own compression via SSH; the right
473 thing to do is to configure it in your ~/.ssh/config, e.g.:
480 thing to do is to configure it in your ~/.ssh/config, e.g.:
474 Host *.mylocalnetwork.example.com
481 Host *.mylocalnetwork.example.com
475 Compression no
482 Compression no
476 Host *
483 Host *
477 Compression yes
484 Compression yes
478 Alternatively specify "ssh -C" as your ssh command in your hgrc
485 Alternatively specify "ssh -C" as your ssh command in your hgrc
479 or with the --ssh command line option.
486 or with the --ssh command line option.
480
487
481 These URLs can all be stored in your hgrc with path aliases under
488 These URLs can all be stored in your hgrc with path aliases under
482 the [paths] section like so:
489 the [paths] section like so:
483 [paths]
490 [paths]
484 alias1 = URL1
491 alias1 = URL1
485 alias2 = URL2
492 alias2 = URL2
486 ...
493 ...
487
494
488 You can then use the alias for any command that uses a URL (for
495 You can then use the alias for any command that uses a URL (for
489 example 'hg pull alias1' would pull from the 'alias1' path).
496 example 'hg pull alias1' would pull from the 'alias1' path).
490
497
491 Two path aliases are special because they are used as defaults
498 Two path aliases are special because they are used as defaults
492 when you do not provide the URL to a command:
499 when you do not provide the URL to a command:
493
500
494 default:
501 default:
495 When you create a repository with hg clone, the clone command
502 When you create a repository with hg clone, the clone command
496 saves the location of the source repository as the new
503 saves the location of the source repository as the new
497 repository's 'default' path. This is then used when you omit
504 repository's 'default' path. This is then used when you omit
498 path from push- and pull-like commands (including incoming and
505 path from push- and pull-like commands (including incoming and
499 outgoing).
506 outgoing).
500
507
501 default-push:
508 default-push:
502 The push command will look for a path named 'default-push', and
509 The push command will look for a path named 'default-push', and
503 prefer it over 'default' if both are defined.
510 prefer it over 'default' if both are defined.
504 ''')),
511 ''')),
505 (["extensions"], _("Using additional features"), extshelp),
512 (["extensions"], _("Using additional features"), extshelp),
506 )
513 )
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now