##// END OF EJS Templates
extensions: document that `testedwith = 'internal'` is special...
Augie Fackler -
r25186:80c5b266 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,491 +1,495 b''
1 # synthrepo.py - repo synthesis
1 # synthrepo.py - repo synthesis
2 #
2 #
3 # Copyright 2012 Facebook
3 # Copyright 2012 Facebook
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''synthesize structurally interesting change history
8 '''synthesize structurally interesting change history
9
9
10 This extension is useful for creating a repository with properties
10 This extension is useful for creating a repository with properties
11 that are statistically similar to an existing repository. During
11 that are statistically similar to an existing repository. During
12 analysis, a simple probability table is constructed from the history
12 analysis, a simple probability table is constructed from the history
13 of an existing repository. During synthesis, these properties are
13 of an existing repository. During synthesis, these properties are
14 reconstructed.
14 reconstructed.
15
15
16 Properties that are analyzed and synthesized include the following:
16 Properties that are analyzed and synthesized include the following:
17
17
18 - Lines added or removed when an existing file is modified
18 - Lines added or removed when an existing file is modified
19 - Number and sizes of files added
19 - Number and sizes of files added
20 - Number of files removed
20 - Number of files removed
21 - Line lengths
21 - Line lengths
22 - Topological distance to parent changeset(s)
22 - Topological distance to parent changeset(s)
23 - Probability of a commit being a merge
23 - Probability of a commit being a merge
24 - Probability of a newly added file being added to a new directory
24 - Probability of a newly added file being added to a new directory
25 - Interarrival time, and time zone, of commits
25 - Interarrival time, and time zone, of commits
26 - Number of files in each directory
26 - Number of files in each directory
27
27
28 A few obvious properties that are not currently handled realistically:
28 A few obvious properties that are not currently handled realistically:
29
29
30 - Merges are treated as regular commits with two parents, which is not
30 - Merges are treated as regular commits with two parents, which is not
31 realistic
31 realistic
32 - Modifications are not treated as operations on hunks of lines, but
32 - Modifications are not treated as operations on hunks of lines, but
33 as insertions and deletions of randomly chosen single lines
33 as insertions and deletions of randomly chosen single lines
34 - Committer ID (always random)
34 - Committer ID (always random)
35 - Executability of files
35 - Executability of files
36 - Symlinks and binary files are ignored
36 - Symlinks and binary files are ignored
37 '''
37 '''
38
38
39 import bisect, collections, itertools, json, os, random, time, sys
39 import bisect, collections, itertools, json, os, random, time, sys
40 from mercurial import cmdutil, context, patch, scmutil, util, hg
40 from mercurial import cmdutil, context, patch, scmutil, util, hg
41 from mercurial.i18n import _
41 from mercurial.i18n import _
42 from mercurial.node import nullrev, nullid, short
42 from mercurial.node import nullrev, nullid, short
43
43
44 # Note for extension authors: ONLY specify testedwith = 'internal' for
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # be specifying the version(s) of Mercurial they are tested with, or
47 # leave the attribute unspecified.
44 testedwith = 'internal'
48 testedwith = 'internal'
45
49
46 cmdtable = {}
50 cmdtable = {}
47 command = cmdutil.command(cmdtable)
51 command = cmdutil.command(cmdtable)
48
52
49 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
53 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
50
54
51 def zerodict():
55 def zerodict():
52 return collections.defaultdict(lambda: 0)
56 return collections.defaultdict(lambda: 0)
53
57
54 def roundto(x, k):
58 def roundto(x, k):
55 if x > k * 2:
59 if x > k * 2:
56 return int(round(x / float(k)) * k)
60 return int(round(x / float(k)) * k)
57 return int(round(x))
61 return int(round(x))
58
62
59 def parsegitdiff(lines):
63 def parsegitdiff(lines):
60 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
64 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
61 binary = False
65 binary = False
62 for line in lines:
66 for line in lines:
63 start = line[:6]
67 start = line[:6]
64 if start == 'diff -':
68 if start == 'diff -':
65 if filename:
69 if filename:
66 yield filename, mar, lineadd, lineremove, binary
70 yield filename, mar, lineadd, lineremove, binary
67 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
71 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
68 filename = patch.gitre.match(line).group(1)
72 filename = patch.gitre.match(line).group(1)
69 elif start in newfile:
73 elif start in newfile:
70 mar = 'a'
74 mar = 'a'
71 elif start == 'GIT bi':
75 elif start == 'GIT bi':
72 binary = True
76 binary = True
73 elif start == 'delete':
77 elif start == 'delete':
74 mar = 'r'
78 mar = 'r'
75 elif start:
79 elif start:
76 s = start[0]
80 s = start[0]
77 if s == '-' and not line.startswith('--- '):
81 if s == '-' and not line.startswith('--- '):
78 lineremove += 1
82 lineremove += 1
79 elif s == '+' and not line.startswith('+++ '):
83 elif s == '+' and not line.startswith('+++ '):
80 lineadd[roundto(len(line) - 1, 5)] += 1
84 lineadd[roundto(len(line) - 1, 5)] += 1
81 if filename:
85 if filename:
82 yield filename, mar, lineadd, lineremove, binary
86 yield filename, mar, lineadd, lineremove, binary
83
87
84 @command('analyze',
88 @command('analyze',
85 [('o', 'output', '', _('write output to given file'), _('FILE')),
89 [('o', 'output', '', _('write output to given file'), _('FILE')),
86 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
90 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
87 _('hg analyze'), optionalrepo=True)
91 _('hg analyze'), optionalrepo=True)
88 def analyze(ui, repo, *revs, **opts):
92 def analyze(ui, repo, *revs, **opts):
89 '''create a simple model of a repository to use for later synthesis
93 '''create a simple model of a repository to use for later synthesis
90
94
91 This command examines every changeset in the given range (or all
95 This command examines every changeset in the given range (or all
92 of history if none are specified) and creates a simple statistical
96 of history if none are specified) and creates a simple statistical
93 model of the history of the repository. It also measures the directory
97 model of the history of the repository. It also measures the directory
94 structure of the repository as checked out.
98 structure of the repository as checked out.
95
99
96 The model is written out to a JSON file, and can be used by
100 The model is written out to a JSON file, and can be used by
97 :hg:`synthesize` to create or augment a repository with synthetic
101 :hg:`synthesize` to create or augment a repository with synthetic
98 commits that have a structure that is statistically similar to the
102 commits that have a structure that is statistically similar to the
99 analyzed repository.
103 analyzed repository.
100 '''
104 '''
101 root = repo.root
105 root = repo.root
102 if not root.endswith(os.path.sep):
106 if not root.endswith(os.path.sep):
103 root += os.path.sep
107 root += os.path.sep
104
108
105 revs = list(revs)
109 revs = list(revs)
106 revs.extend(opts['rev'])
110 revs.extend(opts['rev'])
107 if not revs:
111 if not revs:
108 revs = [':']
112 revs = [':']
109
113
110 output = opts['output']
114 output = opts['output']
111 if not output:
115 if not output:
112 output = os.path.basename(root) + '.json'
116 output = os.path.basename(root) + '.json'
113
117
114 if output == '-':
118 if output == '-':
115 fp = sys.stdout
119 fp = sys.stdout
116 else:
120 else:
117 fp = open(output, 'w')
121 fp = open(output, 'w')
118
122
119 # Always obtain file counts of each directory in the given root directory.
123 # Always obtain file counts of each directory in the given root directory.
120 def onerror(e):
124 def onerror(e):
121 ui.warn(_('error walking directory structure: %s\n') % e)
125 ui.warn(_('error walking directory structure: %s\n') % e)
122
126
123 dirs = {}
127 dirs = {}
124 rootprefixlen = len(root)
128 rootprefixlen = len(root)
125 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
129 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
126 dirpathfromroot = dirpath[rootprefixlen:]
130 dirpathfromroot = dirpath[rootprefixlen:]
127 dirs[dirpathfromroot] = len(filenames)
131 dirs[dirpathfromroot] = len(filenames)
128 if '.hg' in dirnames:
132 if '.hg' in dirnames:
129 dirnames.remove('.hg')
133 dirnames.remove('.hg')
130
134
131 lineschanged = zerodict()
135 lineschanged = zerodict()
132 children = zerodict()
136 children = zerodict()
133 p1distance = zerodict()
137 p1distance = zerodict()
134 p2distance = zerodict()
138 p2distance = zerodict()
135 linesinfilesadded = zerodict()
139 linesinfilesadded = zerodict()
136 fileschanged = zerodict()
140 fileschanged = zerodict()
137 filesadded = zerodict()
141 filesadded = zerodict()
138 filesremoved = zerodict()
142 filesremoved = zerodict()
139 linelengths = zerodict()
143 linelengths = zerodict()
140 interarrival = zerodict()
144 interarrival = zerodict()
141 parents = zerodict()
145 parents = zerodict()
142 dirsadded = zerodict()
146 dirsadded = zerodict()
143 tzoffset = zerodict()
147 tzoffset = zerodict()
144
148
145 # If a mercurial repo is available, also model the commit history.
149 # If a mercurial repo is available, also model the commit history.
146 if repo:
150 if repo:
147 revs = scmutil.revrange(repo, revs)
151 revs = scmutil.revrange(repo, revs)
148 revs.sort()
152 revs.sort()
149
153
150 progress = ui.progress
154 progress = ui.progress
151 _analyzing = _('analyzing')
155 _analyzing = _('analyzing')
152 _changesets = _('changesets')
156 _changesets = _('changesets')
153 _total = len(revs)
157 _total = len(revs)
154
158
155 for i, rev in enumerate(revs):
159 for i, rev in enumerate(revs):
156 progress(_analyzing, i, unit=_changesets, total=_total)
160 progress(_analyzing, i, unit=_changesets, total=_total)
157 ctx = repo[rev]
161 ctx = repo[rev]
158 pl = ctx.parents()
162 pl = ctx.parents()
159 pctx = pl[0]
163 pctx = pl[0]
160 prev = pctx.rev()
164 prev = pctx.rev()
161 children[prev] += 1
165 children[prev] += 1
162 p1distance[rev - prev] += 1
166 p1distance[rev - prev] += 1
163 parents[len(pl)] += 1
167 parents[len(pl)] += 1
164 tzoffset[ctx.date()[1]] += 1
168 tzoffset[ctx.date()[1]] += 1
165 if len(pl) > 1:
169 if len(pl) > 1:
166 p2distance[rev - pl[1].rev()] += 1
170 p2distance[rev - pl[1].rev()] += 1
167 if prev == rev - 1:
171 if prev == rev - 1:
168 lastctx = pctx
172 lastctx = pctx
169 else:
173 else:
170 lastctx = repo[rev - 1]
174 lastctx = repo[rev - 1]
171 if lastctx.rev() != nullrev:
175 if lastctx.rev() != nullrev:
172 timedelta = ctx.date()[0] - lastctx.date()[0]
176 timedelta = ctx.date()[0] - lastctx.date()[0]
173 interarrival[roundto(timedelta, 300)] += 1
177 interarrival[roundto(timedelta, 300)] += 1
174 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
178 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
175 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
179 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
176 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
180 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
177 if isbin:
181 if isbin:
178 continue
182 continue
179 added = sum(lineadd.itervalues(), 0)
183 added = sum(lineadd.itervalues(), 0)
180 if mar == 'm':
184 if mar == 'm':
181 if added and lineremove:
185 if added and lineremove:
182 lineschanged[roundto(added, 5),
186 lineschanged[roundto(added, 5),
183 roundto(lineremove, 5)] += 1
187 roundto(lineremove, 5)] += 1
184 filechanges += 1
188 filechanges += 1
185 elif mar == 'a':
189 elif mar == 'a':
186 fileadds += 1
190 fileadds += 1
187 if '/' in filename:
191 if '/' in filename:
188 filedir = filename.rsplit('/', 1)[0]
192 filedir = filename.rsplit('/', 1)[0]
189 if filedir not in pctx.dirs():
193 if filedir not in pctx.dirs():
190 diradds += 1
194 diradds += 1
191 linesinfilesadded[roundto(added, 5)] += 1
195 linesinfilesadded[roundto(added, 5)] += 1
192 elif mar == 'r':
196 elif mar == 'r':
193 fileremoves += 1
197 fileremoves += 1
194 for length, count in lineadd.iteritems():
198 for length, count in lineadd.iteritems():
195 linelengths[length] += count
199 linelengths[length] += count
196 fileschanged[filechanges] += 1
200 fileschanged[filechanges] += 1
197 filesadded[fileadds] += 1
201 filesadded[fileadds] += 1
198 dirsadded[diradds] += 1
202 dirsadded[diradds] += 1
199 filesremoved[fileremoves] += 1
203 filesremoved[fileremoves] += 1
200
204
201 invchildren = zerodict()
205 invchildren = zerodict()
202
206
203 for rev, count in children.iteritems():
207 for rev, count in children.iteritems():
204 invchildren[count] += 1
208 invchildren[count] += 1
205
209
206 if output != '-':
210 if output != '-':
207 ui.status(_('writing output to %s\n') % output)
211 ui.status(_('writing output to %s\n') % output)
208
212
209 def pronk(d):
213 def pronk(d):
210 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
214 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
211
215
212 json.dump({'revs': len(revs),
216 json.dump({'revs': len(revs),
213 'initdirs': pronk(dirs),
217 'initdirs': pronk(dirs),
214 'lineschanged': pronk(lineschanged),
218 'lineschanged': pronk(lineschanged),
215 'children': pronk(invchildren),
219 'children': pronk(invchildren),
216 'fileschanged': pronk(fileschanged),
220 'fileschanged': pronk(fileschanged),
217 'filesadded': pronk(filesadded),
221 'filesadded': pronk(filesadded),
218 'linesinfilesadded': pronk(linesinfilesadded),
222 'linesinfilesadded': pronk(linesinfilesadded),
219 'dirsadded': pronk(dirsadded),
223 'dirsadded': pronk(dirsadded),
220 'filesremoved': pronk(filesremoved),
224 'filesremoved': pronk(filesremoved),
221 'linelengths': pronk(linelengths),
225 'linelengths': pronk(linelengths),
222 'parents': pronk(parents),
226 'parents': pronk(parents),
223 'p1distance': pronk(p1distance),
227 'p1distance': pronk(p1distance),
224 'p2distance': pronk(p2distance),
228 'p2distance': pronk(p2distance),
225 'interarrival': pronk(interarrival),
229 'interarrival': pronk(interarrival),
226 'tzoffset': pronk(tzoffset),
230 'tzoffset': pronk(tzoffset),
227 },
231 },
228 fp)
232 fp)
229 fp.close()
233 fp.close()
230
234
231 @command('synthesize',
235 @command('synthesize',
232 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
236 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
233 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
237 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
234 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
238 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
235 _('hg synthesize [OPTION].. DESCFILE'))
239 _('hg synthesize [OPTION].. DESCFILE'))
236 def synthesize(ui, repo, descpath, **opts):
240 def synthesize(ui, repo, descpath, **opts):
237 '''synthesize commits based on a model of an existing repository
241 '''synthesize commits based on a model of an existing repository
238
242
239 The model must have been generated by :hg:`analyze`. Commits will
243 The model must have been generated by :hg:`analyze`. Commits will
240 be generated randomly according to the probabilities described in
244 be generated randomly according to the probabilities described in
241 the model. If --initfiles is set, the repository will be seeded with
245 the model. If --initfiles is set, the repository will be seeded with
242 the given number files following the modeled repository's directory
246 the given number files following the modeled repository's directory
243 structure.
247 structure.
244
248
245 When synthesizing new content, commit descriptions, and user
249 When synthesizing new content, commit descriptions, and user
246 names, words will be chosen randomly from a dictionary that is
250 names, words will be chosen randomly from a dictionary that is
247 presumed to contain one word per line. Use --dict to specify the
251 presumed to contain one word per line. Use --dict to specify the
248 path to an alternate dictionary to use.
252 path to an alternate dictionary to use.
249 '''
253 '''
250 try:
254 try:
251 fp = hg.openpath(ui, descpath)
255 fp = hg.openpath(ui, descpath)
252 except Exception, err:
256 except Exception, err:
253 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
257 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
254 desc = json.load(fp)
258 desc = json.load(fp)
255 fp.close()
259 fp.close()
256
260
257 def cdf(l):
261 def cdf(l):
258 if not l:
262 if not l:
259 return [], []
263 return [], []
260 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
264 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
261 t = float(sum(probs, 0))
265 t = float(sum(probs, 0))
262 s, cdfs = 0, []
266 s, cdfs = 0, []
263 for v in probs:
267 for v in probs:
264 s += v
268 s += v
265 cdfs.append(s / t)
269 cdfs.append(s / t)
266 return vals, cdfs
270 return vals, cdfs
267
271
268 lineschanged = cdf(desc['lineschanged'])
272 lineschanged = cdf(desc['lineschanged'])
269 fileschanged = cdf(desc['fileschanged'])
273 fileschanged = cdf(desc['fileschanged'])
270 filesadded = cdf(desc['filesadded'])
274 filesadded = cdf(desc['filesadded'])
271 dirsadded = cdf(desc['dirsadded'])
275 dirsadded = cdf(desc['dirsadded'])
272 filesremoved = cdf(desc['filesremoved'])
276 filesremoved = cdf(desc['filesremoved'])
273 linelengths = cdf(desc['linelengths'])
277 linelengths = cdf(desc['linelengths'])
274 parents = cdf(desc['parents'])
278 parents = cdf(desc['parents'])
275 p1distance = cdf(desc['p1distance'])
279 p1distance = cdf(desc['p1distance'])
276 p2distance = cdf(desc['p2distance'])
280 p2distance = cdf(desc['p2distance'])
277 interarrival = cdf(desc['interarrival'])
281 interarrival = cdf(desc['interarrival'])
278 linesinfilesadded = cdf(desc['linesinfilesadded'])
282 linesinfilesadded = cdf(desc['linesinfilesadded'])
279 tzoffset = cdf(desc['tzoffset'])
283 tzoffset = cdf(desc['tzoffset'])
280
284
281 dictfile = opts.get('dict') or '/usr/share/dict/words'
285 dictfile = opts.get('dict') or '/usr/share/dict/words'
282 try:
286 try:
283 fp = open(dictfile, 'rU')
287 fp = open(dictfile, 'rU')
284 except IOError, err:
288 except IOError, err:
285 raise util.Abort('%s: %s' % (dictfile, err.strerror))
289 raise util.Abort('%s: %s' % (dictfile, err.strerror))
286 words = fp.read().splitlines()
290 words = fp.read().splitlines()
287 fp.close()
291 fp.close()
288
292
289 initdirs = {}
293 initdirs = {}
290 if desc['initdirs']:
294 if desc['initdirs']:
291 for k, v in desc['initdirs']:
295 for k, v in desc['initdirs']:
292 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
296 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
293 initdirs = renamedirs(initdirs, words)
297 initdirs = renamedirs(initdirs, words)
294 initdirscdf = cdf(initdirs)
298 initdirscdf = cdf(initdirs)
295
299
296 def pick(cdf):
300 def pick(cdf):
297 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
301 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
298
302
299 def pickpath():
303 def pickpath():
300 return os.path.join(pick(initdirscdf), random.choice(words))
304 return os.path.join(pick(initdirscdf), random.choice(words))
301
305
302 def makeline(minimum=0):
306 def makeline(minimum=0):
303 total = max(minimum, pick(linelengths))
307 total = max(minimum, pick(linelengths))
304 c, l = 0, []
308 c, l = 0, []
305 while c < total:
309 while c < total:
306 w = random.choice(words)
310 w = random.choice(words)
307 c += len(w) + 1
311 c += len(w) + 1
308 l.append(w)
312 l.append(w)
309 return ' '.join(l)
313 return ' '.join(l)
310
314
311 wlock = repo.wlock()
315 wlock = repo.wlock()
312 lock = repo.lock()
316 lock = repo.lock()
313
317
314 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
318 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
315
319
316 progress = ui.progress
320 progress = ui.progress
317 _synthesizing = _('synthesizing')
321 _synthesizing = _('synthesizing')
318 _files = _('initial files')
322 _files = _('initial files')
319 _changesets = _('changesets')
323 _changesets = _('changesets')
320
324
321 # Synthesize a single initial revision adding files to the repo according
325 # Synthesize a single initial revision adding files to the repo according
322 # to the modeled directory structure.
326 # to the modeled directory structure.
323 initcount = int(opts['initfiles'])
327 initcount = int(opts['initfiles'])
324 if initcount and initdirs:
328 if initcount and initdirs:
325 pctx = repo[None].parents()[0]
329 pctx = repo[None].parents()[0]
326 dirs = set(pctx.dirs())
330 dirs = set(pctx.dirs())
327 files = {}
331 files = {}
328
332
329 def validpath(path):
333 def validpath(path):
330 # Don't pick filenames which are already directory names.
334 # Don't pick filenames which are already directory names.
331 if path in dirs:
335 if path in dirs:
332 return False
336 return False
333 # Don't pick directories which were used as file names.
337 # Don't pick directories which were used as file names.
334 while path:
338 while path:
335 if path in files:
339 if path in files:
336 return False
340 return False
337 path = os.path.dirname(path)
341 path = os.path.dirname(path)
338 return True
342 return True
339
343
340 for i in xrange(0, initcount):
344 for i in xrange(0, initcount):
341 ui.progress(_synthesizing, i, unit=_files, total=initcount)
345 ui.progress(_synthesizing, i, unit=_files, total=initcount)
342
346
343 path = pickpath()
347 path = pickpath()
344 while not validpath(path):
348 while not validpath(path):
345 path = pickpath()
349 path = pickpath()
346 data = '%s contents\n' % path
350 data = '%s contents\n' % path
347 files[path] = context.memfilectx(repo, path, data)
351 files[path] = context.memfilectx(repo, path, data)
348 dir = os.path.dirname(path)
352 dir = os.path.dirname(path)
349 while dir and dir not in dirs:
353 while dir and dir not in dirs:
350 dirs.add(dir)
354 dirs.add(dir)
351 dir = os.path.dirname(dir)
355 dir = os.path.dirname(dir)
352
356
353 def filectxfn(repo, memctx, path):
357 def filectxfn(repo, memctx, path):
354 return files[path]
358 return files[path]
355
359
356 ui.progress(_synthesizing, None)
360 ui.progress(_synthesizing, None)
357 message = 'synthesized wide repo with %d files' % (len(files),)
361 message = 'synthesized wide repo with %d files' % (len(files),)
358 mc = context.memctx(repo, [pctx.node(), nullid], message,
362 mc = context.memctx(repo, [pctx.node(), nullid], message,
359 files.iterkeys(), filectxfn, ui.username(),
363 files.iterkeys(), filectxfn, ui.username(),
360 '%d %d' % util.makedate())
364 '%d %d' % util.makedate())
361 initnode = mc.commit()
365 initnode = mc.commit()
362 if ui.debugflag:
366 if ui.debugflag:
363 hexfn = hex
367 hexfn = hex
364 else:
368 else:
365 hexfn = short
369 hexfn = short
366 ui.status(_('added commit %s with %d files\n')
370 ui.status(_('added commit %s with %d files\n')
367 % (hexfn(initnode), len(files)))
371 % (hexfn(initnode), len(files)))
368
372
369 # Synthesize incremental revisions to the repository, adding repo depth.
373 # Synthesize incremental revisions to the repository, adding repo depth.
370 count = int(opts['count'])
374 count = int(opts['count'])
371 heads = set(map(repo.changelog.rev, repo.heads()))
375 heads = set(map(repo.changelog.rev, repo.heads()))
372 for i in xrange(count):
376 for i in xrange(count):
373 progress(_synthesizing, i, unit=_changesets, total=count)
377 progress(_synthesizing, i, unit=_changesets, total=count)
374
378
375 node = repo.changelog.node
379 node = repo.changelog.node
376 revs = len(repo)
380 revs = len(repo)
377
381
378 def pickhead(heads, distance):
382 def pickhead(heads, distance):
379 if heads:
383 if heads:
380 lheads = sorted(heads)
384 lheads = sorted(heads)
381 rev = revs - min(pick(distance), revs)
385 rev = revs - min(pick(distance), revs)
382 if rev < lheads[-1]:
386 if rev < lheads[-1]:
383 rev = lheads[bisect.bisect_left(lheads, rev)]
387 rev = lheads[bisect.bisect_left(lheads, rev)]
384 else:
388 else:
385 rev = lheads[-1]
389 rev = lheads[-1]
386 return rev, node(rev)
390 return rev, node(rev)
387 return nullrev, nullid
391 return nullrev, nullid
388
392
389 r1 = revs - min(pick(p1distance), revs)
393 r1 = revs - min(pick(p1distance), revs)
390 p1 = node(r1)
394 p1 = node(r1)
391
395
392 # the number of heads will grow without bound if we use a pure
396 # the number of heads will grow without bound if we use a pure
393 # model, so artificially constrain their proliferation
397 # model, so artificially constrain their proliferation
394 toomanyheads = len(heads) > random.randint(1, 20)
398 toomanyheads = len(heads) > random.randint(1, 20)
395 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
399 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
396 r2, p2 = pickhead(heads.difference([r1]), p2distance)
400 r2, p2 = pickhead(heads.difference([r1]), p2distance)
397 else:
401 else:
398 r2, p2 = nullrev, nullid
402 r2, p2 = nullrev, nullid
399
403
400 pl = [p1, p2]
404 pl = [p1, p2]
401 pctx = repo[r1]
405 pctx = repo[r1]
402 mf = pctx.manifest()
406 mf = pctx.manifest()
403 mfk = mf.keys()
407 mfk = mf.keys()
404 changes = {}
408 changes = {}
405 if mfk:
409 if mfk:
406 for __ in xrange(pick(fileschanged)):
410 for __ in xrange(pick(fileschanged)):
407 for __ in xrange(10):
411 for __ in xrange(10):
408 fctx = pctx.filectx(random.choice(mfk))
412 fctx = pctx.filectx(random.choice(mfk))
409 path = fctx.path()
413 path = fctx.path()
410 if not (path in nevertouch or fctx.isbinary() or
414 if not (path in nevertouch or fctx.isbinary() or
411 'l' in fctx.flags()):
415 'l' in fctx.flags()):
412 break
416 break
413 lines = fctx.data().splitlines()
417 lines = fctx.data().splitlines()
414 add, remove = pick(lineschanged)
418 add, remove = pick(lineschanged)
415 for __ in xrange(remove):
419 for __ in xrange(remove):
416 if not lines:
420 if not lines:
417 break
421 break
418 del lines[random.randrange(0, len(lines))]
422 del lines[random.randrange(0, len(lines))]
419 for __ in xrange(add):
423 for __ in xrange(add):
420 lines.insert(random.randint(0, len(lines)), makeline())
424 lines.insert(random.randint(0, len(lines)), makeline())
421 path = fctx.path()
425 path = fctx.path()
422 changes[path] = context.memfilectx(repo, path,
426 changes[path] = context.memfilectx(repo, path,
423 '\n'.join(lines) + '\n')
427 '\n'.join(lines) + '\n')
424 for __ in xrange(pick(filesremoved)):
428 for __ in xrange(pick(filesremoved)):
425 path = random.choice(mfk)
429 path = random.choice(mfk)
426 for __ in xrange(10):
430 for __ in xrange(10):
427 path = random.choice(mfk)
431 path = random.choice(mfk)
428 if path not in changes:
432 if path not in changes:
429 changes[path] = None
433 changes[path] = None
430 break
434 break
431 if filesadded:
435 if filesadded:
432 dirs = list(pctx.dirs())
436 dirs = list(pctx.dirs())
433 dirs.insert(0, '')
437 dirs.insert(0, '')
434 for __ in xrange(pick(filesadded)):
438 for __ in xrange(pick(filesadded)):
435 pathstr = ''
439 pathstr = ''
436 while pathstr in dirs:
440 while pathstr in dirs:
437 path = [random.choice(dirs)]
441 path = [random.choice(dirs)]
438 if pick(dirsadded):
442 if pick(dirsadded):
439 path.append(random.choice(words))
443 path.append(random.choice(words))
440 path.append(random.choice(words))
444 path.append(random.choice(words))
441 pathstr = '/'.join(filter(None, path))
445 pathstr = '/'.join(filter(None, path))
442 data = '\n'.join(makeline()
446 data = '\n'.join(makeline()
443 for __ in xrange(pick(linesinfilesadded))) + '\n'
447 for __ in xrange(pick(linesinfilesadded))) + '\n'
444 changes[pathstr] = context.memfilectx(repo, pathstr, data)
448 changes[pathstr] = context.memfilectx(repo, pathstr, data)
445 def filectxfn(repo, memctx, path):
449 def filectxfn(repo, memctx, path):
446 return changes[path]
450 return changes[path]
447 if not changes:
451 if not changes:
448 continue
452 continue
449 if revs:
453 if revs:
450 date = repo['tip'].date()[0] + pick(interarrival)
454 date = repo['tip'].date()[0] + pick(interarrival)
451 else:
455 else:
452 date = time.time() - (86400 * count)
456 date = time.time() - (86400 * count)
453 # dates in mercurial must be positive, fit in 32-bit signed integers.
457 # dates in mercurial must be positive, fit in 32-bit signed integers.
454 date = min(0x7fffffff, max(0, date))
458 date = min(0x7fffffff, max(0, date))
455 user = random.choice(words) + '@' + random.choice(words)
459 user = random.choice(words) + '@' + random.choice(words)
456 mc = context.memctx(repo, pl, makeline(minimum=2),
460 mc = context.memctx(repo, pl, makeline(minimum=2),
457 sorted(changes.iterkeys()),
461 sorted(changes.iterkeys()),
458 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
462 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
459 newnode = mc.commit()
463 newnode = mc.commit()
460 heads.add(repo.changelog.rev(newnode))
464 heads.add(repo.changelog.rev(newnode))
461 heads.discard(r1)
465 heads.discard(r1)
462 heads.discard(r2)
466 heads.discard(r2)
463
467
464 lock.release()
468 lock.release()
465 wlock.release()
469 wlock.release()
466
470
467 def renamedirs(dirs, words):
471 def renamedirs(dirs, words):
468 '''Randomly rename the directory names in the per-dir file count dict.'''
472 '''Randomly rename the directory names in the per-dir file count dict.'''
469 wordgen = itertools.cycle(words)
473 wordgen = itertools.cycle(words)
470 replacements = {'': ''}
474 replacements = {'': ''}
471 def rename(dirpath):
475 def rename(dirpath):
472 '''Recursively rename the directory and all path prefixes.
476 '''Recursively rename the directory and all path prefixes.
473
477
474 The mapping from path to renamed path is stored for all path prefixes
478 The mapping from path to renamed path is stored for all path prefixes
475 as in dynamic programming, ensuring linear runtime and consistent
479 as in dynamic programming, ensuring linear runtime and consistent
476 renaming regardless of iteration order through the model.
480 renaming regardless of iteration order through the model.
477 '''
481 '''
478 if dirpath in replacements:
482 if dirpath in replacements:
479 return replacements[dirpath]
483 return replacements[dirpath]
480 head, _ = os.path.split(dirpath)
484 head, _ = os.path.split(dirpath)
481 if head:
485 if head:
482 head = rename(head)
486 head = rename(head)
483 else:
487 else:
484 head = ''
488 head = ''
485 renamed = os.path.join(head, wordgen.next())
489 renamed = os.path.join(head, wordgen.next())
486 replacements[dirpath] = renamed
490 replacements[dirpath] = renamed
487 return renamed
491 return renamed
488 result = []
492 result = []
489 for dirpath, count in dirs.iteritems():
493 for dirpath, count in dirs.iteritems():
490 result.append([rename(dirpath.lstrip(os.sep)), count])
494 result.append([rename(dirpath.lstrip(os.sep)), count])
491 return result
495 return result
@@ -1,316 +1,320 b''
1 # acl.py - changeset access control for mercurial
1 # acl.py - changeset access control for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''hooks for controlling repository access
8 '''hooks for controlling repository access
9
9
10 This hook makes it possible to allow or deny write access to given
10 This hook makes it possible to allow or deny write access to given
11 branches and paths of a repository when receiving incoming changesets
11 branches and paths of a repository when receiving incoming changesets
12 via pretxnchangegroup and pretxncommit.
12 via pretxnchangegroup and pretxncommit.
13
13
14 The authorization is matched based on the local user name on the
14 The authorization is matched based on the local user name on the
15 system where the hook runs, and not the committer of the original
15 system where the hook runs, and not the committer of the original
16 changeset (since the latter is merely informative).
16 changeset (since the latter is merely informative).
17
17
18 The acl hook is best used along with a restricted shell like hgsh,
18 The acl hook is best used along with a restricted shell like hgsh,
19 preventing authenticating users from doing anything other than pushing
19 preventing authenticating users from doing anything other than pushing
20 or pulling. The hook is not safe to use if users have interactive
20 or pulling. The hook is not safe to use if users have interactive
21 shell access, as they can then disable the hook. Nor is it safe if
21 shell access, as they can then disable the hook. Nor is it safe if
22 remote users share an account, because then there is no way to
22 remote users share an account, because then there is no way to
23 distinguish them.
23 distinguish them.
24
24
25 The order in which access checks are performed is:
25 The order in which access checks are performed is:
26
26
27 1) Deny list for branches (section ``acl.deny.branches``)
27 1) Deny list for branches (section ``acl.deny.branches``)
28 2) Allow list for branches (section ``acl.allow.branches``)
28 2) Allow list for branches (section ``acl.allow.branches``)
29 3) Deny list for paths (section ``acl.deny``)
29 3) Deny list for paths (section ``acl.deny``)
30 4) Allow list for paths (section ``acl.allow``)
30 4) Allow list for paths (section ``acl.allow``)
31
31
32 The allow and deny sections take key-value pairs.
32 The allow and deny sections take key-value pairs.
33
33
34 Branch-based Access Control
34 Branch-based Access Control
35 ---------------------------
35 ---------------------------
36
36
37 Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
37 Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
38 have branch-based access control. Keys in these sections can be
38 have branch-based access control. Keys in these sections can be
39 either:
39 either:
40
40
41 - a branch name, or
41 - a branch name, or
42 - an asterisk, to match any branch;
42 - an asterisk, to match any branch;
43
43
44 The corresponding values can be either:
44 The corresponding values can be either:
45
45
46 - a comma-separated list containing users and groups, or
46 - a comma-separated list containing users and groups, or
47 - an asterisk, to match anyone;
47 - an asterisk, to match anyone;
48
48
49 You can add the "!" prefix to a user or group name to invert the sense
49 You can add the "!" prefix to a user or group name to invert the sense
50 of the match.
50 of the match.
51
51
52 Path-based Access Control
52 Path-based Access Control
53 -------------------------
53 -------------------------
54
54
55 Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
55 Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
56 access control. Keys in these sections accept a subtree pattern (with
56 access control. Keys in these sections accept a subtree pattern (with
57 a glob syntax by default). The corresponding values follow the same
57 a glob syntax by default). The corresponding values follow the same
58 syntax as the other sections above.
58 syntax as the other sections above.
59
59
60 Groups
60 Groups
61 ------
61 ------
62
62
63 Group names must be prefixed with an ``@`` symbol. Specifying a group
63 Group names must be prefixed with an ``@`` symbol. Specifying a group
64 name has the same effect as specifying all the users in that group.
64 name has the same effect as specifying all the users in that group.
65
65
66 You can define group members in the ``acl.groups`` section.
66 You can define group members in the ``acl.groups`` section.
67 If a group name is not defined there, and Mercurial is running under
67 If a group name is not defined there, and Mercurial is running under
68 a Unix-like system, the list of users will be taken from the OS.
68 a Unix-like system, the list of users will be taken from the OS.
69 Otherwise, an exception will be raised.
69 Otherwise, an exception will be raised.
70
70
71 Example Configuration
71 Example Configuration
72 ---------------------
72 ---------------------
73
73
74 ::
74 ::
75
75
76 [hooks]
76 [hooks]
77
77
78 # Use this if you want to check access restrictions at commit time
78 # Use this if you want to check access restrictions at commit time
79 pretxncommit.acl = python:hgext.acl.hook
79 pretxncommit.acl = python:hgext.acl.hook
80
80
81 # Use this if you want to check access restrictions for pull, push,
81 # Use this if you want to check access restrictions for pull, push,
82 # bundle and serve.
82 # bundle and serve.
83 pretxnchangegroup.acl = python:hgext.acl.hook
83 pretxnchangegroup.acl = python:hgext.acl.hook
84
84
85 [acl]
85 [acl]
86 # Allow or deny access for incoming changes only if their source is
86 # Allow or deny access for incoming changes only if their source is
87 # listed here, let them pass otherwise. Source is "serve" for all
87 # listed here, let them pass otherwise. Source is "serve" for all
88 # remote access (http or ssh), "push", "pull" or "bundle" when the
88 # remote access (http or ssh), "push", "pull" or "bundle" when the
89 # related commands are run locally.
89 # related commands are run locally.
90 # Default: serve
90 # Default: serve
91 sources = serve
91 sources = serve
92
92
93 [acl.deny.branches]
93 [acl.deny.branches]
94
94
95 # Everyone is denied to the frozen branch:
95 # Everyone is denied to the frozen branch:
96 frozen-branch = *
96 frozen-branch = *
97
97
98 # A bad user is denied on all branches:
98 # A bad user is denied on all branches:
99 * = bad-user
99 * = bad-user
100
100
101 [acl.allow.branches]
101 [acl.allow.branches]
102
102
103 # A few users are allowed on branch-a:
103 # A few users are allowed on branch-a:
104 branch-a = user-1, user-2, user-3
104 branch-a = user-1, user-2, user-3
105
105
106 # Only one user is allowed on branch-b:
106 # Only one user is allowed on branch-b:
107 branch-b = user-1
107 branch-b = user-1
108
108
109 # The super user is allowed on any branch:
109 # The super user is allowed on any branch:
110 * = super-user
110 * = super-user
111
111
112 # Everyone is allowed on branch-for-tests:
112 # Everyone is allowed on branch-for-tests:
113 branch-for-tests = *
113 branch-for-tests = *
114
114
115 [acl.deny]
115 [acl.deny]
116 # This list is checked first. If a match is found, acl.allow is not
116 # This list is checked first. If a match is found, acl.allow is not
117 # checked. All users are granted access if acl.deny is not present.
117 # checked. All users are granted access if acl.deny is not present.
118 # Format for both lists: glob pattern = user, ..., @group, ...
118 # Format for both lists: glob pattern = user, ..., @group, ...
119
119
120 # To match everyone, use an asterisk for the user:
120 # To match everyone, use an asterisk for the user:
121 # my/glob/pattern = *
121 # my/glob/pattern = *
122
122
123 # user6 will not have write access to any file:
123 # user6 will not have write access to any file:
124 ** = user6
124 ** = user6
125
125
126 # Group "hg-denied" will not have write access to any file:
126 # Group "hg-denied" will not have write access to any file:
127 ** = @hg-denied
127 ** = @hg-denied
128
128
129 # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
129 # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
130 # everyone being able to change all other files. See below.
130 # everyone being able to change all other files. See below.
131 src/main/resources/DONT-TOUCH-THIS.txt = *
131 src/main/resources/DONT-TOUCH-THIS.txt = *
132
132
133 [acl.allow]
133 [acl.allow]
134 # if acl.allow is not present, all users are allowed by default
134 # if acl.allow is not present, all users are allowed by default
135 # empty acl.allow = no users allowed
135 # empty acl.allow = no users allowed
136
136
137 # User "doc_writer" has write access to any file under the "docs"
137 # User "doc_writer" has write access to any file under the "docs"
138 # folder:
138 # folder:
139 docs/** = doc_writer
139 docs/** = doc_writer
140
140
141 # User "jack" and group "designers" have write access to any file
141 # User "jack" and group "designers" have write access to any file
142 # under the "images" folder:
142 # under the "images" folder:
143 images/** = jack, @designers
143 images/** = jack, @designers
144
144
145 # Everyone (except for "user6" and "@hg-denied" - see acl.deny above)
145 # Everyone (except for "user6" and "@hg-denied" - see acl.deny above)
146 # will have write access to any file under the "resources" folder
146 # will have write access to any file under the "resources" folder
147 # (except for 1 file. See acl.deny):
147 # (except for 1 file. See acl.deny):
148 src/main/resources/** = *
148 src/main/resources/** = *
149
149
150 .hgtags = release_engineer
150 .hgtags = release_engineer
151
151
152 Examples using the "!" prefix
152 Examples using the "!" prefix
153 .............................
153 .............................
154
154
155 Suppose there's a branch that only a given user (or group) should be able to
155 Suppose there's a branch that only a given user (or group) should be able to
156 push to, and you don't want to restrict access to any other branch that may
156 push to, and you don't want to restrict access to any other branch that may
157 be created.
157 be created.
158
158
159 The "!" prefix allows you to prevent anyone except a given user or group to
159 The "!" prefix allows you to prevent anyone except a given user or group to
160 push changesets in a given branch or path.
160 push changesets in a given branch or path.
161
161
162 In the examples below, we will:
162 In the examples below, we will:
163 1) Deny access to branch "ring" to anyone but user "gollum"
163 1) Deny access to branch "ring" to anyone but user "gollum"
164 2) Deny access to branch "lake" to anyone but members of the group "hobbit"
164 2) Deny access to branch "lake" to anyone but members of the group "hobbit"
165 3) Deny access to a file to anyone but user "gollum"
165 3) Deny access to a file to anyone but user "gollum"
166
166
167 ::
167 ::
168
168
169 [acl.allow.branches]
169 [acl.allow.branches]
170 # Empty
170 # Empty
171
171
172 [acl.deny.branches]
172 [acl.deny.branches]
173
173
174 # 1) only 'gollum' can commit to branch 'ring';
174 # 1) only 'gollum' can commit to branch 'ring';
175 # 'gollum' and anyone else can still commit to any other branch.
175 # 'gollum' and anyone else can still commit to any other branch.
176 ring = !gollum
176 ring = !gollum
177
177
178 # 2) only members of the group 'hobbit' can commit to branch 'lake';
178 # 2) only members of the group 'hobbit' can commit to branch 'lake';
179 # 'hobbit' members and anyone else can still commit to any other branch.
179 # 'hobbit' members and anyone else can still commit to any other branch.
180 lake = !@hobbit
180 lake = !@hobbit
181
181
182 # You can also deny access based on file paths:
182 # You can also deny access based on file paths:
183
183
184 [acl.allow]
184 [acl.allow]
185 # Empty
185 # Empty
186
186
187 [acl.deny]
187 [acl.deny]
188 # 3) only 'gollum' can change the file below;
188 # 3) only 'gollum' can change the file below;
189 # 'gollum' and anyone else can still change any other file.
189 # 'gollum' and anyone else can still change any other file.
190 /misty/mountains/cave/ring = !gollum
190 /misty/mountains/cave/ring = !gollum
191
191
192 '''
192 '''
193
193
194 from mercurial.i18n import _
194 from mercurial.i18n import _
195 from mercurial import util, match
195 from mercurial import util, match
196 import getpass, urllib
196 import getpass, urllib
197
197
198 # Note for extension authors: ONLY specify testedwith = 'internal' for
199 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
200 # be specifying the version(s) of Mercurial they are tested with, or
201 # leave the attribute unspecified.
198 testedwith = 'internal'
202 testedwith = 'internal'
199
203
200 def _getusers(ui, group):
204 def _getusers(ui, group):
201
205
202 # First, try to use group definition from section [acl.groups]
206 # First, try to use group definition from section [acl.groups]
203 hgrcusers = ui.configlist('acl.groups', group)
207 hgrcusers = ui.configlist('acl.groups', group)
204 if hgrcusers:
208 if hgrcusers:
205 return hgrcusers
209 return hgrcusers
206
210
207 ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
211 ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
208 # If no users found in group definition, get users from OS-level group
212 # If no users found in group definition, get users from OS-level group
209 try:
213 try:
210 return util.groupmembers(group)
214 return util.groupmembers(group)
211 except KeyError:
215 except KeyError:
212 raise util.Abort(_("group '%s' is undefined") % group)
216 raise util.Abort(_("group '%s' is undefined") % group)
213
217
214 def _usermatch(ui, user, usersorgroups):
218 def _usermatch(ui, user, usersorgroups):
215
219
216 if usersorgroups == '*':
220 if usersorgroups == '*':
217 return True
221 return True
218
222
219 for ug in usersorgroups.replace(',', ' ').split():
223 for ug in usersorgroups.replace(',', ' ').split():
220
224
221 if ug.startswith('!'):
225 if ug.startswith('!'):
222 # Test for excluded user or group. Format:
226 # Test for excluded user or group. Format:
223 # if ug is a user name: !username
227 # if ug is a user name: !username
224 # if ug is a group name: !@groupname
228 # if ug is a group name: !@groupname
225 ug = ug[1:]
229 ug = ug[1:]
226 if not ug.startswith('@') and user != ug \
230 if not ug.startswith('@') and user != ug \
227 or ug.startswith('@') and user not in _getusers(ui, ug[1:]):
231 or ug.startswith('@') and user not in _getusers(ui, ug[1:]):
228 return True
232 return True
229
233
230 # Test for user or group. Format:
234 # Test for user or group. Format:
231 # if ug is a user name: username
235 # if ug is a user name: username
232 # if ug is a group name: @groupname
236 # if ug is a group name: @groupname
233 elif user == ug \
237 elif user == ug \
234 or ug.startswith('@') and user in _getusers(ui, ug[1:]):
238 or ug.startswith('@') and user in _getusers(ui, ug[1:]):
235 return True
239 return True
236
240
237 return False
241 return False
238
242
239 def buildmatch(ui, repo, user, key):
243 def buildmatch(ui, repo, user, key):
240 '''return tuple of (match function, list enabled).'''
244 '''return tuple of (match function, list enabled).'''
241 if not ui.has_section(key):
245 if not ui.has_section(key):
242 ui.debug('acl: %s not enabled\n' % key)
246 ui.debug('acl: %s not enabled\n' % key)
243 return None
247 return None
244
248
245 pats = [pat for pat, users in ui.configitems(key)
249 pats = [pat for pat, users in ui.configitems(key)
246 if _usermatch(ui, user, users)]
250 if _usermatch(ui, user, users)]
247 ui.debug('acl: %s enabled, %d entries for user %s\n' %
251 ui.debug('acl: %s enabled, %d entries for user %s\n' %
248 (key, len(pats), user))
252 (key, len(pats), user))
249
253
250 # Branch-based ACL
254 # Branch-based ACL
251 if not repo:
255 if not repo:
252 if pats:
256 if pats:
253 # If there's an asterisk (meaning "any branch"), always return True;
257 # If there's an asterisk (meaning "any branch"), always return True;
254 # Otherwise, test if b is in pats
258 # Otherwise, test if b is in pats
255 if '*' in pats:
259 if '*' in pats:
256 return util.always
260 return util.always
257 return lambda b: b in pats
261 return lambda b: b in pats
258 return util.never
262 return util.never
259
263
260 # Path-based ACL
264 # Path-based ACL
261 if pats:
265 if pats:
262 return match.match(repo.root, '', pats)
266 return match.match(repo.root, '', pats)
263 return util.never
267 return util.never
264
268
265 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
269 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
266 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
270 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
267 raise util.Abort(_('config error - hook type "%s" cannot stop '
271 raise util.Abort(_('config error - hook type "%s" cannot stop '
268 'incoming changesets nor commits') % hooktype)
272 'incoming changesets nor commits') % hooktype)
269 if (hooktype == 'pretxnchangegroup' and
273 if (hooktype == 'pretxnchangegroup' and
270 source not in ui.config('acl', 'sources', 'serve').split()):
274 source not in ui.config('acl', 'sources', 'serve').split()):
271 ui.debug('acl: changes have source "%s" - skipping\n' % source)
275 ui.debug('acl: changes have source "%s" - skipping\n' % source)
272 return
276 return
273
277
274 user = None
278 user = None
275 if source == 'serve' and 'url' in kwargs:
279 if source == 'serve' and 'url' in kwargs:
276 url = kwargs['url'].split(':')
280 url = kwargs['url'].split(':')
277 if url[0] == 'remote' and url[1].startswith('http'):
281 if url[0] == 'remote' and url[1].startswith('http'):
278 user = urllib.unquote(url[3])
282 user = urllib.unquote(url[3])
279
283
280 if user is None:
284 if user is None:
281 user = getpass.getuser()
285 user = getpass.getuser()
282
286
283 ui.debug('acl: checking access for user "%s"\n' % user)
287 ui.debug('acl: checking access for user "%s"\n' % user)
284
288
285 cfg = ui.config('acl', 'config')
289 cfg = ui.config('acl', 'config')
286 if cfg:
290 if cfg:
287 ui.readconfig(cfg, sections=['acl.groups', 'acl.allow.branches',
291 ui.readconfig(cfg, sections=['acl.groups', 'acl.allow.branches',
288 'acl.deny.branches', 'acl.allow', 'acl.deny'])
292 'acl.deny.branches', 'acl.allow', 'acl.deny'])
289
293
290 allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
294 allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
291 denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
295 denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
292 allow = buildmatch(ui, repo, user, 'acl.allow')
296 allow = buildmatch(ui, repo, user, 'acl.allow')
293 deny = buildmatch(ui, repo, user, 'acl.deny')
297 deny = buildmatch(ui, repo, user, 'acl.deny')
294
298
295 for rev in xrange(repo[node], len(repo)):
299 for rev in xrange(repo[node], len(repo)):
296 ctx = repo[rev]
300 ctx = repo[rev]
297 branch = ctx.branch()
301 branch = ctx.branch()
298 if denybranches and denybranches(branch):
302 if denybranches and denybranches(branch):
299 raise util.Abort(_('acl: user "%s" denied on branch "%s"'
303 raise util.Abort(_('acl: user "%s" denied on branch "%s"'
300 ' (changeset "%s")')
304 ' (changeset "%s")')
301 % (user, branch, ctx))
305 % (user, branch, ctx))
302 if allowbranches and not allowbranches(branch):
306 if allowbranches and not allowbranches(branch):
303 raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
307 raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
304 ' (changeset "%s")')
308 ' (changeset "%s")')
305 % (user, branch, ctx))
309 % (user, branch, ctx))
306 ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
310 ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
307 % (ctx, branch))
311 % (ctx, branch))
308
312
309 for f in ctx.files():
313 for f in ctx.files():
310 if deny and deny(f):
314 if deny and deny(f):
311 raise util.Abort(_('acl: user "%s" denied on "%s"'
315 raise util.Abort(_('acl: user "%s" denied on "%s"'
312 ' (changeset "%s")') % (user, f, ctx))
316 ' (changeset "%s")') % (user, f, ctx))
313 if allow and not allow(f):
317 if allow and not allow(f):
314 raise util.Abort(_('acl: user "%s" not allowed on "%s"'
318 raise util.Abort(_('acl: user "%s" not allowed on "%s"'
315 ' (changeset "%s")') % (user, f, ctx))
319 ' (changeset "%s")') % (user, f, ctx))
316 ui.debug('acl: path access granted: "%s"\n' % ctx)
320 ui.debug('acl: path access granted: "%s"\n' % ctx)
@@ -1,158 +1,162 b''
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track config key.
12 The events that get logged can be configured via the blackbox.track config key.
13 Examples::
13 Examples::
14
14
15 [blackbox]
15 [blackbox]
16 track = *
16 track = *
17
17
18 [blackbox]
18 [blackbox]
19 track = command, commandfinish, commandexception, exthook, pythonhook
19 track = command, commandfinish, commandexception, exthook, pythonhook
20
20
21 [blackbox]
21 [blackbox]
22 track = incoming
22 track = incoming
23
23
24 [blackbox]
24 [blackbox]
25 # limit the size of a log file
25 # limit the size of a log file
26 maxsize = 1.5 MB
26 maxsize = 1.5 MB
27 # rotate up to N log files when the current one gets too big
27 # rotate up to N log files when the current one gets too big
28 maxfiles = 3
28 maxfiles = 3
29
29
30 """
30 """
31
31
32 from mercurial import util, cmdutil
32 from mercurial import util, cmdutil
33 from mercurial.i18n import _
33 from mercurial.i18n import _
34 import errno, os, re
34 import errno, os, re
35
35
36 cmdtable = {}
36 cmdtable = {}
37 command = cmdutil.command(cmdtable)
37 command = cmdutil.command(cmdtable)
38 # Note for extension authors: ONLY specify testedwith = 'internal' for
39 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
40 # be specifying the version(s) of Mercurial they are tested with, or
41 # leave the attribute unspecified.
38 testedwith = 'internal'
42 testedwith = 'internal'
39 lastblackbox = None
43 lastblackbox = None
40
44
41 def wrapui(ui):
45 def wrapui(ui):
42 class blackboxui(ui.__class__):
46 class blackboxui(ui.__class__):
43 @util.propertycache
47 @util.propertycache
44 def track(self):
48 def track(self):
45 return self.configlist('blackbox', 'track', ['*'])
49 return self.configlist('blackbox', 'track', ['*'])
46
50
47 def _openlogfile(self):
51 def _openlogfile(self):
48 def rotate(oldpath, newpath):
52 def rotate(oldpath, newpath):
49 try:
53 try:
50 os.unlink(newpath)
54 os.unlink(newpath)
51 except OSError, err:
55 except OSError, err:
52 if err.errno != errno.ENOENT:
56 if err.errno != errno.ENOENT:
53 self.debug("warning: cannot remove '%s': %s\n" %
57 self.debug("warning: cannot remove '%s': %s\n" %
54 (newpath, err.strerror))
58 (newpath, err.strerror))
55 try:
59 try:
56 if newpath:
60 if newpath:
57 os.rename(oldpath, newpath)
61 os.rename(oldpath, newpath)
58 except OSError, err:
62 except OSError, err:
59 if err.errno != errno.ENOENT:
63 if err.errno != errno.ENOENT:
60 self.debug("warning: cannot rename '%s' to '%s': %s\n" %
64 self.debug("warning: cannot rename '%s' to '%s': %s\n" %
61 (newpath, oldpath, err.strerror))
65 (newpath, oldpath, err.strerror))
62
66
63 fp = self._bbopener('blackbox.log', 'a')
67 fp = self._bbopener('blackbox.log', 'a')
64 maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
68 maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
65 if maxsize > 0:
69 if maxsize > 0:
66 st = os.fstat(fp.fileno())
70 st = os.fstat(fp.fileno())
67 if st.st_size >= maxsize:
71 if st.st_size >= maxsize:
68 path = fp.name
72 path = fp.name
69 fp.close()
73 fp.close()
70 maxfiles = self.configint('blackbox', 'maxfiles', 7)
74 maxfiles = self.configint('blackbox', 'maxfiles', 7)
71 for i in xrange(maxfiles - 1, 1, -1):
75 for i in xrange(maxfiles - 1, 1, -1):
72 rotate(oldpath='%s.%d' % (path, i - 1),
76 rotate(oldpath='%s.%d' % (path, i - 1),
73 newpath='%s.%d' % (path, i))
77 newpath='%s.%d' % (path, i))
74 rotate(oldpath=path,
78 rotate(oldpath=path,
75 newpath=maxfiles > 0 and path + '.1')
79 newpath=maxfiles > 0 and path + '.1')
76 fp = self._bbopener('blackbox.log', 'a')
80 fp = self._bbopener('blackbox.log', 'a')
77 return fp
81 return fp
78
82
79 def log(self, event, *msg, **opts):
83 def log(self, event, *msg, **opts):
80 global lastblackbox
84 global lastblackbox
81 super(blackboxui, self).log(event, *msg, **opts)
85 super(blackboxui, self).log(event, *msg, **opts)
82
86
83 if not '*' in self.track and not event in self.track:
87 if not '*' in self.track and not event in self.track:
84 return
88 return
85
89
86 if util.safehasattr(self, '_blackbox'):
90 if util.safehasattr(self, '_blackbox'):
87 blackbox = self._blackbox
91 blackbox = self._blackbox
88 elif util.safehasattr(self, '_bbopener'):
92 elif util.safehasattr(self, '_bbopener'):
89 try:
93 try:
90 self._blackbox = self._openlogfile()
94 self._blackbox = self._openlogfile()
91 except (IOError, OSError), err:
95 except (IOError, OSError), err:
92 self.debug('warning: cannot write to blackbox.log: %s\n' %
96 self.debug('warning: cannot write to blackbox.log: %s\n' %
93 err.strerror)
97 err.strerror)
94 del self._bbopener
98 del self._bbopener
95 self._blackbox = None
99 self._blackbox = None
96 blackbox = self._blackbox
100 blackbox = self._blackbox
97 else:
101 else:
98 # certain ui instances exist outside the context of
102 # certain ui instances exist outside the context of
99 # a repo, so just default to the last blackbox that
103 # a repo, so just default to the last blackbox that
100 # was seen.
104 # was seen.
101 blackbox = lastblackbox
105 blackbox = lastblackbox
102
106
103 if blackbox:
107 if blackbox:
104 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
108 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
105 user = util.getuser()
109 user = util.getuser()
106 formattedmsg = msg[0] % msg[1:]
110 formattedmsg = msg[0] % msg[1:]
107 try:
111 try:
108 blackbox.write('%s %s> %s' % (date, user, formattedmsg))
112 blackbox.write('%s %s> %s' % (date, user, formattedmsg))
109 except IOError, err:
113 except IOError, err:
110 self.debug('warning: cannot write to blackbox.log: %s\n' %
114 self.debug('warning: cannot write to blackbox.log: %s\n' %
111 err.strerror)
115 err.strerror)
112 lastblackbox = blackbox
116 lastblackbox = blackbox
113
117
114 def setrepo(self, repo):
118 def setrepo(self, repo):
115 self._bbopener = repo.vfs
119 self._bbopener = repo.vfs
116
120
117 ui.__class__ = blackboxui
121 ui.__class__ = blackboxui
118
122
119 def uisetup(ui):
123 def uisetup(ui):
120 wrapui(ui)
124 wrapui(ui)
121
125
122 def reposetup(ui, repo):
126 def reposetup(ui, repo):
123 # During 'hg pull' a httppeer repo is created to represent the remote repo.
127 # During 'hg pull' a httppeer repo is created to represent the remote repo.
124 # It doesn't have a .hg directory to put a blackbox in, so we don't do
128 # It doesn't have a .hg directory to put a blackbox in, so we don't do
125 # the blackbox setup for it.
129 # the blackbox setup for it.
126 if not repo.local():
130 if not repo.local():
127 return
131 return
128
132
129 if util.safehasattr(ui, 'setrepo'):
133 if util.safehasattr(ui, 'setrepo'):
130 ui.setrepo(repo)
134 ui.setrepo(repo)
131
135
132 @command('^blackbox',
136 @command('^blackbox',
133 [('l', 'limit', 10, _('the number of events to show')),
137 [('l', 'limit', 10, _('the number of events to show')),
134 ],
138 ],
135 _('hg blackbox [OPTION]...'))
139 _('hg blackbox [OPTION]...'))
136 def blackbox(ui, repo, *revs, **opts):
140 def blackbox(ui, repo, *revs, **opts):
137 '''view the recent repository events
141 '''view the recent repository events
138 '''
142 '''
139
143
140 if not os.path.exists(repo.join('blackbox.log')):
144 if not os.path.exists(repo.join('blackbox.log')):
141 return
145 return
142
146
143 limit = opts.get('limit')
147 limit = opts.get('limit')
144 blackbox = repo.vfs('blackbox.log', 'r')
148 blackbox = repo.vfs('blackbox.log', 'r')
145 lines = blackbox.read().split('\n')
149 lines = blackbox.read().split('\n')
146
150
147 count = 0
151 count = 0
148 output = []
152 output = []
149 for line in reversed(lines):
153 for line in reversed(lines):
150 if count >= limit:
154 if count >= limit:
151 break
155 break
152
156
153 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
157 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
154 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
158 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
155 count += 1
159 count += 1
156 output.append(line)
160 output.append(line)
157
161
158 ui.status('\n'.join(reversed(output)))
162 ui.status('\n'.join(reversed(output)))
@@ -1,910 +1,914 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2011-4 Jim Hague <jim.hague@acm.org>
4 # Copyright 2011-4 Jim Hague <jim.hague@acm.org>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''hooks for integrating with the Bugzilla bug tracker
9 '''hooks for integrating with the Bugzilla bug tracker
10
10
11 This hook extension adds comments on bugs in Bugzilla when changesets
11 This hook extension adds comments on bugs in Bugzilla when changesets
12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
13 the Mercurial template mechanism.
13 the Mercurial template mechanism.
14
14
15 The bug references can optionally include an update for Bugzilla of the
15 The bug references can optionally include an update for Bugzilla of the
16 hours spent working on the bug. Bugs can also be marked fixed.
16 hours spent working on the bug. Bugs can also be marked fixed.
17
17
18 Three basic modes of access to Bugzilla are provided:
18 Three basic modes of access to Bugzilla are provided:
19
19
20 1. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
20 1. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
21
21
22 2. Check data via the Bugzilla XMLRPC interface and submit bug change
22 2. Check data via the Bugzilla XMLRPC interface and submit bug change
23 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
23 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
24
24
25 3. Writing directly to the Bugzilla database. Only Bugzilla installations
25 3. Writing directly to the Bugzilla database. Only Bugzilla installations
26 using MySQL are supported. Requires Python MySQLdb.
26 using MySQL are supported. Requires Python MySQLdb.
27
27
28 Writing directly to the database is susceptible to schema changes, and
28 Writing directly to the database is susceptible to schema changes, and
29 relies on a Bugzilla contrib script to send out bug change
29 relies on a Bugzilla contrib script to send out bug change
30 notification emails. This script runs as the user running Mercurial,
30 notification emails. This script runs as the user running Mercurial,
31 must be run on the host with the Bugzilla install, and requires
31 must be run on the host with the Bugzilla install, and requires
32 permission to read Bugzilla configuration details and the necessary
32 permission to read Bugzilla configuration details and the necessary
33 MySQL user and password to have full access rights to the Bugzilla
33 MySQL user and password to have full access rights to the Bugzilla
34 database. For these reasons this access mode is now considered
34 database. For these reasons this access mode is now considered
35 deprecated, and will not be updated for new Bugzilla versions going
35 deprecated, and will not be updated for new Bugzilla versions going
36 forward. Only adding comments is supported in this access mode.
36 forward. Only adding comments is supported in this access mode.
37
37
38 Access via XMLRPC needs a Bugzilla username and password to be specified
38 Access via XMLRPC needs a Bugzilla username and password to be specified
39 in the configuration. Comments are added under that username. Since the
39 in the configuration. Comments are added under that username. Since the
40 configuration must be readable by all Mercurial users, it is recommended
40 configuration must be readable by all Mercurial users, it is recommended
41 that the rights of that user are restricted in Bugzilla to the minimum
41 that the rights of that user are restricted in Bugzilla to the minimum
42 necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
42 necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
43
43
44 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
44 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
45 email to the Bugzilla email interface to submit comments to bugs.
45 email to the Bugzilla email interface to submit comments to bugs.
46 The From: address in the email is set to the email address of the Mercurial
46 The From: address in the email is set to the email address of the Mercurial
47 user, so the comment appears to come from the Mercurial user. In the event
47 user, so the comment appears to come from the Mercurial user. In the event
48 that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
48 that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
49 user, the email associated with the Bugzilla username used to log into
49 user, the email associated with the Bugzilla username used to log into
50 Bugzilla is used instead as the source of the comment. Marking bugs fixed
50 Bugzilla is used instead as the source of the comment. Marking bugs fixed
51 works on all supported Bugzilla versions.
51 works on all supported Bugzilla versions.
52
52
53 Configuration items common to all access modes:
53 Configuration items common to all access modes:
54
54
55 bugzilla.version
55 bugzilla.version
56 The access type to use. Values recognized are:
56 The access type to use. Values recognized are:
57
57
58 :``xmlrpc``: Bugzilla XMLRPC interface.
58 :``xmlrpc``: Bugzilla XMLRPC interface.
59 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
59 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
60 :``3.0``: MySQL access, Bugzilla 3.0 and later.
60 :``3.0``: MySQL access, Bugzilla 3.0 and later.
61 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
61 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
62 including 3.0.
62 including 3.0.
63 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
63 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
64 including 2.18.
64 including 2.18.
65
65
66 bugzilla.regexp
66 bugzilla.regexp
67 Regular expression to match bug IDs for update in changeset commit message.
67 Regular expression to match bug IDs for update in changeset commit message.
68 It must contain one "()" named group ``<ids>`` containing the bug
68 It must contain one "()" named group ``<ids>`` containing the bug
69 IDs separated by non-digit characters. It may also contain
69 IDs separated by non-digit characters. It may also contain
70 a named group ``<hours>`` with a floating-point number giving the
70 a named group ``<hours>`` with a floating-point number giving the
71 hours worked on the bug. If no named groups are present, the first
71 hours worked on the bug. If no named groups are present, the first
72 "()" group is assumed to contain the bug IDs, and work time is not
72 "()" group is assumed to contain the bug IDs, and work time is not
73 updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
73 updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
74 ``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
74 ``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
75 variations thereof, followed by an hours number prefixed by ``h`` or
75 variations thereof, followed by an hours number prefixed by ``h`` or
76 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
76 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
77
77
78 bugzilla.fixregexp
78 bugzilla.fixregexp
79 Regular expression to match bug IDs for marking fixed in changeset
79 Regular expression to match bug IDs for marking fixed in changeset
80 commit message. This must contain a "()" named group ``<ids>` containing
80 commit message. This must contain a "()" named group ``<ids>` containing
81 the bug IDs separated by non-digit characters. It may also contain
81 the bug IDs separated by non-digit characters. It may also contain
82 a named group ``<hours>`` with a floating-point number giving the
82 a named group ``<hours>`` with a floating-point number giving the
83 hours worked on the bug. If no named groups are present, the first
83 hours worked on the bug. If no named groups are present, the first
84 "()" group is assumed to contain the bug IDs, and work time is not
84 "()" group is assumed to contain the bug IDs, and work time is not
85 updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
85 updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
86 ``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
86 ``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
87 variations thereof, followed by an hours number prefixed by ``h`` or
87 variations thereof, followed by an hours number prefixed by ``h`` or
88 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
88 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
89
89
90 bugzilla.fixstatus
90 bugzilla.fixstatus
91 The status to set a bug to when marking fixed. Default ``RESOLVED``.
91 The status to set a bug to when marking fixed. Default ``RESOLVED``.
92
92
93 bugzilla.fixresolution
93 bugzilla.fixresolution
94 The resolution to set a bug to when marking fixed. Default ``FIXED``.
94 The resolution to set a bug to when marking fixed. Default ``FIXED``.
95
95
96 bugzilla.style
96 bugzilla.style
97 The style file to use when formatting comments.
97 The style file to use when formatting comments.
98
98
99 bugzilla.template
99 bugzilla.template
100 Template to use when formatting comments. Overrides style if
100 Template to use when formatting comments. Overrides style if
101 specified. In addition to the usual Mercurial keywords, the
101 specified. In addition to the usual Mercurial keywords, the
102 extension specifies:
102 extension specifies:
103
103
104 :``{bug}``: The Bugzilla bug ID.
104 :``{bug}``: The Bugzilla bug ID.
105 :``{root}``: The full pathname of the Mercurial repository.
105 :``{root}``: The full pathname of the Mercurial repository.
106 :``{webroot}``: Stripped pathname of the Mercurial repository.
106 :``{webroot}``: Stripped pathname of the Mercurial repository.
107 :``{hgweb}``: Base URL for browsing Mercurial repositories.
107 :``{hgweb}``: Base URL for browsing Mercurial repositories.
108
108
109 Default ``changeset {node|short} in repo {root} refers to bug
109 Default ``changeset {node|short} in repo {root} refers to bug
110 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
110 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
111
111
112 bugzilla.strip
112 bugzilla.strip
113 The number of path separator characters to strip from the front of
113 The number of path separator characters to strip from the front of
114 the Mercurial repository path (``{root}`` in templates) to produce
114 the Mercurial repository path (``{root}`` in templates) to produce
115 ``{webroot}``. For example, a repository with ``{root}``
115 ``{webroot}``. For example, a repository with ``{root}``
116 ``/var/local/my-project`` with a strip of 2 gives a value for
116 ``/var/local/my-project`` with a strip of 2 gives a value for
117 ``{webroot}`` of ``my-project``. Default 0.
117 ``{webroot}`` of ``my-project``. Default 0.
118
118
119 web.baseurl
119 web.baseurl
120 Base URL for browsing Mercurial repositories. Referenced from
120 Base URL for browsing Mercurial repositories. Referenced from
121 templates as ``{hgweb}``.
121 templates as ``{hgweb}``.
122
122
123 Configuration items common to XMLRPC+email and MySQL access modes:
123 Configuration items common to XMLRPC+email and MySQL access modes:
124
124
125 bugzilla.usermap
125 bugzilla.usermap
126 Path of file containing Mercurial committer email to Bugzilla user email
126 Path of file containing Mercurial committer email to Bugzilla user email
127 mappings. If specified, the file should contain one mapping per
127 mappings. If specified, the file should contain one mapping per
128 line::
128 line::
129
129
130 committer = Bugzilla user
130 committer = Bugzilla user
131
131
132 See also the ``[usermap]`` section.
132 See also the ``[usermap]`` section.
133
133
134 The ``[usermap]`` section is used to specify mappings of Mercurial
134 The ``[usermap]`` section is used to specify mappings of Mercurial
135 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
135 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
136 Contains entries of the form ``committer = Bugzilla user``.
136 Contains entries of the form ``committer = Bugzilla user``.
137
137
138 XMLRPC access mode configuration:
138 XMLRPC access mode configuration:
139
139
140 bugzilla.bzurl
140 bugzilla.bzurl
141 The base URL for the Bugzilla installation.
141 The base URL for the Bugzilla installation.
142 Default ``http://localhost/bugzilla``.
142 Default ``http://localhost/bugzilla``.
143
143
144 bugzilla.user
144 bugzilla.user
145 The username to use to log into Bugzilla via XMLRPC. Default
145 The username to use to log into Bugzilla via XMLRPC. Default
146 ``bugs``.
146 ``bugs``.
147
147
148 bugzilla.password
148 bugzilla.password
149 The password for Bugzilla login.
149 The password for Bugzilla login.
150
150
151 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
151 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
152 and also:
152 and also:
153
153
154 bugzilla.bzemail
154 bugzilla.bzemail
155 The Bugzilla email address.
155 The Bugzilla email address.
156
156
157 In addition, the Mercurial email settings must be configured. See the
157 In addition, the Mercurial email settings must be configured. See the
158 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
158 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
159
159
160 MySQL access mode configuration:
160 MySQL access mode configuration:
161
161
162 bugzilla.host
162 bugzilla.host
163 Hostname of the MySQL server holding the Bugzilla database.
163 Hostname of the MySQL server holding the Bugzilla database.
164 Default ``localhost``.
164 Default ``localhost``.
165
165
166 bugzilla.db
166 bugzilla.db
167 Name of the Bugzilla database in MySQL. Default ``bugs``.
167 Name of the Bugzilla database in MySQL. Default ``bugs``.
168
168
169 bugzilla.user
169 bugzilla.user
170 Username to use to access MySQL server. Default ``bugs``.
170 Username to use to access MySQL server. Default ``bugs``.
171
171
172 bugzilla.password
172 bugzilla.password
173 Password to use to access MySQL server.
173 Password to use to access MySQL server.
174
174
175 bugzilla.timeout
175 bugzilla.timeout
176 Database connection timeout (seconds). Default 5.
176 Database connection timeout (seconds). Default 5.
177
177
178 bugzilla.bzuser
178 bugzilla.bzuser
179 Fallback Bugzilla user name to record comments with, if changeset
179 Fallback Bugzilla user name to record comments with, if changeset
180 committer cannot be found as a Bugzilla user.
180 committer cannot be found as a Bugzilla user.
181
181
182 bugzilla.bzdir
182 bugzilla.bzdir
183 Bugzilla install directory. Used by default notify. Default
183 Bugzilla install directory. Used by default notify. Default
184 ``/var/www/html/bugzilla``.
184 ``/var/www/html/bugzilla``.
185
185
186 bugzilla.notify
186 bugzilla.notify
187 The command to run to get Bugzilla to send bug change notification
187 The command to run to get Bugzilla to send bug change notification
188 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
188 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
189 id) and ``user`` (committer bugzilla email). Default depends on
189 id) and ``user`` (committer bugzilla email). Default depends on
190 version; from 2.18 it is "cd %(bzdir)s && perl -T
190 version; from 2.18 it is "cd %(bzdir)s && perl -T
191 contrib/sendbugmail.pl %(id)s %(user)s".
191 contrib/sendbugmail.pl %(id)s %(user)s".
192
192
193 Activating the extension::
193 Activating the extension::
194
194
195 [extensions]
195 [extensions]
196 bugzilla =
196 bugzilla =
197
197
198 [hooks]
198 [hooks]
199 # run bugzilla hook on every change pulled or pushed in here
199 # run bugzilla hook on every change pulled or pushed in here
200 incoming.bugzilla = python:hgext.bugzilla.hook
200 incoming.bugzilla = python:hgext.bugzilla.hook
201
201
202 Example configurations:
202 Example configurations:
203
203
204 XMLRPC example configuration. This uses the Bugzilla at
204 XMLRPC example configuration. This uses the Bugzilla at
205 ``http://my-project.org/bugzilla``, logging in as user
205 ``http://my-project.org/bugzilla``, logging in as user
206 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
206 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
207 collection of Mercurial repositories in ``/var/local/hg/repos/``,
207 collection of Mercurial repositories in ``/var/local/hg/repos/``,
208 with a web interface at ``http://my-project.org/hg``. ::
208 with a web interface at ``http://my-project.org/hg``. ::
209
209
210 [bugzilla]
210 [bugzilla]
211 bzurl=http://my-project.org/bugzilla
211 bzurl=http://my-project.org/bugzilla
212 user=bugmail@my-project.org
212 user=bugmail@my-project.org
213 password=plugh
213 password=plugh
214 version=xmlrpc
214 version=xmlrpc
215 template=Changeset {node|short} in {root|basename}.
215 template=Changeset {node|short} in {root|basename}.
216 {hgweb}/{webroot}/rev/{node|short}\\n
216 {hgweb}/{webroot}/rev/{node|short}\\n
217 {desc}\\n
217 {desc}\\n
218 strip=5
218 strip=5
219
219
220 [web]
220 [web]
221 baseurl=http://my-project.org/hg
221 baseurl=http://my-project.org/hg
222
222
223 XMLRPC+email example configuration. This uses the Bugzilla at
223 XMLRPC+email example configuration. This uses the Bugzilla at
224 ``http://my-project.org/bugzilla``, logging in as user
224 ``http://my-project.org/bugzilla``, logging in as user
225 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
225 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
226 collection of Mercurial repositories in ``/var/local/hg/repos/``,
226 collection of Mercurial repositories in ``/var/local/hg/repos/``,
227 with a web interface at ``http://my-project.org/hg``. Bug comments
227 with a web interface at ``http://my-project.org/hg``. Bug comments
228 are sent to the Bugzilla email address
228 are sent to the Bugzilla email address
229 ``bugzilla@my-project.org``. ::
229 ``bugzilla@my-project.org``. ::
230
230
231 [bugzilla]
231 [bugzilla]
232 bzurl=http://my-project.org/bugzilla
232 bzurl=http://my-project.org/bugzilla
233 user=bugmail@my-project.org
233 user=bugmail@my-project.org
234 password=plugh
234 password=plugh
235 version=xmlrpc+email
235 version=xmlrpc+email
236 bzemail=bugzilla@my-project.org
236 bzemail=bugzilla@my-project.org
237 template=Changeset {node|short} in {root|basename}.
237 template=Changeset {node|short} in {root|basename}.
238 {hgweb}/{webroot}/rev/{node|short}\\n
238 {hgweb}/{webroot}/rev/{node|short}\\n
239 {desc}\\n
239 {desc}\\n
240 strip=5
240 strip=5
241
241
242 [web]
242 [web]
243 baseurl=http://my-project.org/hg
243 baseurl=http://my-project.org/hg
244
244
245 [usermap]
245 [usermap]
246 user@emaildomain.com=user.name@bugzilladomain.com
246 user@emaildomain.com=user.name@bugzilladomain.com
247
247
248 MySQL example configuration. This has a local Bugzilla 3.2 installation
248 MySQL example configuration. This has a local Bugzilla 3.2 installation
249 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
249 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
250 the Bugzilla database name is ``bugs`` and MySQL is
250 the Bugzilla database name is ``bugs`` and MySQL is
251 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
251 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
252 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
252 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
253 with a web interface at ``http://my-project.org/hg``. ::
253 with a web interface at ``http://my-project.org/hg``. ::
254
254
255 [bugzilla]
255 [bugzilla]
256 host=localhost
256 host=localhost
257 password=XYZZY
257 password=XYZZY
258 version=3.0
258 version=3.0
259 bzuser=unknown@domain.com
259 bzuser=unknown@domain.com
260 bzdir=/opt/bugzilla-3.2
260 bzdir=/opt/bugzilla-3.2
261 template=Changeset {node|short} in {root|basename}.
261 template=Changeset {node|short} in {root|basename}.
262 {hgweb}/{webroot}/rev/{node|short}\\n
262 {hgweb}/{webroot}/rev/{node|short}\\n
263 {desc}\\n
263 {desc}\\n
264 strip=5
264 strip=5
265
265
266 [web]
266 [web]
267 baseurl=http://my-project.org/hg
267 baseurl=http://my-project.org/hg
268
268
269 [usermap]
269 [usermap]
270 user@emaildomain.com=user.name@bugzilladomain.com
270 user@emaildomain.com=user.name@bugzilladomain.com
271
271
272 All the above add a comment to the Bugzilla bug record of the form::
272 All the above add a comment to the Bugzilla bug record of the form::
273
273
274 Changeset 3b16791d6642 in repository-name.
274 Changeset 3b16791d6642 in repository-name.
275 http://my-project.org/hg/repository-name/rev/3b16791d6642
275 http://my-project.org/hg/repository-name/rev/3b16791d6642
276
276
277 Changeset commit comment. Bug 1234.
277 Changeset commit comment. Bug 1234.
278 '''
278 '''
279
279
280 from mercurial.i18n import _
280 from mercurial.i18n import _
281 from mercurial.node import short
281 from mercurial.node import short
282 from mercurial import cmdutil, mail, util
282 from mercurial import cmdutil, mail, util
283 import re, time, urlparse, xmlrpclib
283 import re, time, urlparse, xmlrpclib
284
284
285 # Note for extension authors: ONLY specify testedwith = 'internal' for
286 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
287 # be specifying the version(s) of Mercurial they are tested with, or
288 # leave the attribute unspecified.
285 testedwith = 'internal'
289 testedwith = 'internal'
286
290
287 class bzaccess(object):
291 class bzaccess(object):
288 '''Base class for access to Bugzilla.'''
292 '''Base class for access to Bugzilla.'''
289
293
290 def __init__(self, ui):
294 def __init__(self, ui):
291 self.ui = ui
295 self.ui = ui
292 usermap = self.ui.config('bugzilla', 'usermap')
296 usermap = self.ui.config('bugzilla', 'usermap')
293 if usermap:
297 if usermap:
294 self.ui.readconfig(usermap, sections=['usermap'])
298 self.ui.readconfig(usermap, sections=['usermap'])
295
299
296 def map_committer(self, user):
300 def map_committer(self, user):
297 '''map name of committer to Bugzilla user name.'''
301 '''map name of committer to Bugzilla user name.'''
298 for committer, bzuser in self.ui.configitems('usermap'):
302 for committer, bzuser in self.ui.configitems('usermap'):
299 if committer.lower() == user.lower():
303 if committer.lower() == user.lower():
300 return bzuser
304 return bzuser
301 return user
305 return user
302
306
303 # Methods to be implemented by access classes.
307 # Methods to be implemented by access classes.
304 #
308 #
305 # 'bugs' is a dict keyed on bug id, where values are a dict holding
309 # 'bugs' is a dict keyed on bug id, where values are a dict holding
306 # updates to bug state. Recognized dict keys are:
310 # updates to bug state. Recognized dict keys are:
307 #
311 #
308 # 'hours': Value, float containing work hours to be updated.
312 # 'hours': Value, float containing work hours to be updated.
309 # 'fix': If key present, bug is to be marked fixed. Value ignored.
313 # 'fix': If key present, bug is to be marked fixed. Value ignored.
310
314
311 def filter_real_bug_ids(self, bugs):
315 def filter_real_bug_ids(self, bugs):
312 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
316 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
313 pass
317 pass
314
318
315 def filter_cset_known_bug_ids(self, node, bugs):
319 def filter_cset_known_bug_ids(self, node, bugs):
316 '''remove bug IDs where node occurs in comment text from bugs.'''
320 '''remove bug IDs where node occurs in comment text from bugs.'''
317 pass
321 pass
318
322
319 def updatebug(self, bugid, newstate, text, committer):
323 def updatebug(self, bugid, newstate, text, committer):
320 '''update the specified bug. Add comment text and set new states.
324 '''update the specified bug. Add comment text and set new states.
321
325
322 If possible add the comment as being from the committer of
326 If possible add the comment as being from the committer of
323 the changeset. Otherwise use the default Bugzilla user.
327 the changeset. Otherwise use the default Bugzilla user.
324 '''
328 '''
325 pass
329 pass
326
330
327 def notify(self, bugs, committer):
331 def notify(self, bugs, committer):
328 '''Force sending of Bugzilla notification emails.
332 '''Force sending of Bugzilla notification emails.
329
333
330 Only required if the access method does not trigger notification
334 Only required if the access method does not trigger notification
331 emails automatically.
335 emails automatically.
332 '''
336 '''
333 pass
337 pass
334
338
335 # Bugzilla via direct access to MySQL database.
339 # Bugzilla via direct access to MySQL database.
336 class bzmysql(bzaccess):
340 class bzmysql(bzaccess):
337 '''Support for direct MySQL access to Bugzilla.
341 '''Support for direct MySQL access to Bugzilla.
338
342
339 The earliest Bugzilla version this is tested with is version 2.16.
343 The earliest Bugzilla version this is tested with is version 2.16.
340
344
341 If your Bugzilla is version 3.4 or above, you are strongly
345 If your Bugzilla is version 3.4 or above, you are strongly
342 recommended to use the XMLRPC access method instead.
346 recommended to use the XMLRPC access method instead.
343 '''
347 '''
344
348
345 @staticmethod
349 @staticmethod
346 def sql_buglist(ids):
350 def sql_buglist(ids):
347 '''return SQL-friendly list of bug ids'''
351 '''return SQL-friendly list of bug ids'''
348 return '(' + ','.join(map(str, ids)) + ')'
352 return '(' + ','.join(map(str, ids)) + ')'
349
353
350 _MySQLdb = None
354 _MySQLdb = None
351
355
352 def __init__(self, ui):
356 def __init__(self, ui):
353 try:
357 try:
354 import MySQLdb as mysql
358 import MySQLdb as mysql
355 bzmysql._MySQLdb = mysql
359 bzmysql._MySQLdb = mysql
356 except ImportError, err:
360 except ImportError, err:
357 raise util.Abort(_('python mysql support not available: %s') % err)
361 raise util.Abort(_('python mysql support not available: %s') % err)
358
362
359 bzaccess.__init__(self, ui)
363 bzaccess.__init__(self, ui)
360
364
361 host = self.ui.config('bugzilla', 'host', 'localhost')
365 host = self.ui.config('bugzilla', 'host', 'localhost')
362 user = self.ui.config('bugzilla', 'user', 'bugs')
366 user = self.ui.config('bugzilla', 'user', 'bugs')
363 passwd = self.ui.config('bugzilla', 'password')
367 passwd = self.ui.config('bugzilla', 'password')
364 db = self.ui.config('bugzilla', 'db', 'bugs')
368 db = self.ui.config('bugzilla', 'db', 'bugs')
365 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
369 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
366 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
370 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
367 (host, db, user, '*' * len(passwd)))
371 (host, db, user, '*' * len(passwd)))
368 self.conn = bzmysql._MySQLdb.connect(host=host,
372 self.conn = bzmysql._MySQLdb.connect(host=host,
369 user=user, passwd=passwd,
373 user=user, passwd=passwd,
370 db=db,
374 db=db,
371 connect_timeout=timeout)
375 connect_timeout=timeout)
372 self.cursor = self.conn.cursor()
376 self.cursor = self.conn.cursor()
373 self.longdesc_id = self.get_longdesc_id()
377 self.longdesc_id = self.get_longdesc_id()
374 self.user_ids = {}
378 self.user_ids = {}
375 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
379 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
376
380
377 def run(self, *args, **kwargs):
381 def run(self, *args, **kwargs):
378 '''run a query.'''
382 '''run a query.'''
379 self.ui.note(_('query: %s %s\n') % (args, kwargs))
383 self.ui.note(_('query: %s %s\n') % (args, kwargs))
380 try:
384 try:
381 self.cursor.execute(*args, **kwargs)
385 self.cursor.execute(*args, **kwargs)
382 except bzmysql._MySQLdb.MySQLError:
386 except bzmysql._MySQLdb.MySQLError:
383 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
387 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
384 raise
388 raise
385
389
386 def get_longdesc_id(self):
390 def get_longdesc_id(self):
387 '''get identity of longdesc field'''
391 '''get identity of longdesc field'''
388 self.run('select fieldid from fielddefs where name = "longdesc"')
392 self.run('select fieldid from fielddefs where name = "longdesc"')
389 ids = self.cursor.fetchall()
393 ids = self.cursor.fetchall()
390 if len(ids) != 1:
394 if len(ids) != 1:
391 raise util.Abort(_('unknown database schema'))
395 raise util.Abort(_('unknown database schema'))
392 return ids[0][0]
396 return ids[0][0]
393
397
394 def filter_real_bug_ids(self, bugs):
398 def filter_real_bug_ids(self, bugs):
395 '''filter not-existing bugs from set.'''
399 '''filter not-existing bugs from set.'''
396 self.run('select bug_id from bugs where bug_id in %s' %
400 self.run('select bug_id from bugs where bug_id in %s' %
397 bzmysql.sql_buglist(bugs.keys()))
401 bzmysql.sql_buglist(bugs.keys()))
398 existing = [id for (id,) in self.cursor.fetchall()]
402 existing = [id for (id,) in self.cursor.fetchall()]
399 for id in bugs.keys():
403 for id in bugs.keys():
400 if id not in existing:
404 if id not in existing:
401 self.ui.status(_('bug %d does not exist\n') % id)
405 self.ui.status(_('bug %d does not exist\n') % id)
402 del bugs[id]
406 del bugs[id]
403
407
404 def filter_cset_known_bug_ids(self, node, bugs):
408 def filter_cset_known_bug_ids(self, node, bugs):
405 '''filter bug ids that already refer to this changeset from set.'''
409 '''filter bug ids that already refer to this changeset from set.'''
406 self.run('''select bug_id from longdescs where
410 self.run('''select bug_id from longdescs where
407 bug_id in %s and thetext like "%%%s%%"''' %
411 bug_id in %s and thetext like "%%%s%%"''' %
408 (bzmysql.sql_buglist(bugs.keys()), short(node)))
412 (bzmysql.sql_buglist(bugs.keys()), short(node)))
409 for (id,) in self.cursor.fetchall():
413 for (id,) in self.cursor.fetchall():
410 self.ui.status(_('bug %d already knows about changeset %s\n') %
414 self.ui.status(_('bug %d already knows about changeset %s\n') %
411 (id, short(node)))
415 (id, short(node)))
412 del bugs[id]
416 del bugs[id]
413
417
414 def notify(self, bugs, committer):
418 def notify(self, bugs, committer):
415 '''tell bugzilla to send mail.'''
419 '''tell bugzilla to send mail.'''
416 self.ui.status(_('telling bugzilla to send mail:\n'))
420 self.ui.status(_('telling bugzilla to send mail:\n'))
417 (user, userid) = self.get_bugzilla_user(committer)
421 (user, userid) = self.get_bugzilla_user(committer)
418 for id in bugs.keys():
422 for id in bugs.keys():
419 self.ui.status(_(' bug %s\n') % id)
423 self.ui.status(_(' bug %s\n') % id)
420 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
424 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
421 bzdir = self.ui.config('bugzilla', 'bzdir',
425 bzdir = self.ui.config('bugzilla', 'bzdir',
422 '/var/www/html/bugzilla')
426 '/var/www/html/bugzilla')
423 try:
427 try:
424 # Backwards-compatible with old notify string, which
428 # Backwards-compatible with old notify string, which
425 # took one string. This will throw with a new format
429 # took one string. This will throw with a new format
426 # string.
430 # string.
427 cmd = cmdfmt % id
431 cmd = cmdfmt % id
428 except TypeError:
432 except TypeError:
429 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
433 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
430 self.ui.note(_('running notify command %s\n') % cmd)
434 self.ui.note(_('running notify command %s\n') % cmd)
431 fp = util.popen('(%s) 2>&1' % cmd)
435 fp = util.popen('(%s) 2>&1' % cmd)
432 out = fp.read()
436 out = fp.read()
433 ret = fp.close()
437 ret = fp.close()
434 if ret:
438 if ret:
435 self.ui.warn(out)
439 self.ui.warn(out)
436 raise util.Abort(_('bugzilla notify command %s') %
440 raise util.Abort(_('bugzilla notify command %s') %
437 util.explainexit(ret)[0])
441 util.explainexit(ret)[0])
438 self.ui.status(_('done\n'))
442 self.ui.status(_('done\n'))
439
443
440 def get_user_id(self, user):
444 def get_user_id(self, user):
441 '''look up numeric bugzilla user id.'''
445 '''look up numeric bugzilla user id.'''
442 try:
446 try:
443 return self.user_ids[user]
447 return self.user_ids[user]
444 except KeyError:
448 except KeyError:
445 try:
449 try:
446 userid = int(user)
450 userid = int(user)
447 except ValueError:
451 except ValueError:
448 self.ui.note(_('looking up user %s\n') % user)
452 self.ui.note(_('looking up user %s\n') % user)
449 self.run('''select userid from profiles
453 self.run('''select userid from profiles
450 where login_name like %s''', user)
454 where login_name like %s''', user)
451 all = self.cursor.fetchall()
455 all = self.cursor.fetchall()
452 if len(all) != 1:
456 if len(all) != 1:
453 raise KeyError(user)
457 raise KeyError(user)
454 userid = int(all[0][0])
458 userid = int(all[0][0])
455 self.user_ids[user] = userid
459 self.user_ids[user] = userid
456 return userid
460 return userid
457
461
458 def get_bugzilla_user(self, committer):
462 def get_bugzilla_user(self, committer):
459 '''See if committer is a registered bugzilla user. Return
463 '''See if committer is a registered bugzilla user. Return
460 bugzilla username and userid if so. If not, return default
464 bugzilla username and userid if so. If not, return default
461 bugzilla username and userid.'''
465 bugzilla username and userid.'''
462 user = self.map_committer(committer)
466 user = self.map_committer(committer)
463 try:
467 try:
464 userid = self.get_user_id(user)
468 userid = self.get_user_id(user)
465 except KeyError:
469 except KeyError:
466 try:
470 try:
467 defaultuser = self.ui.config('bugzilla', 'bzuser')
471 defaultuser = self.ui.config('bugzilla', 'bzuser')
468 if not defaultuser:
472 if not defaultuser:
469 raise util.Abort(_('cannot find bugzilla user id for %s') %
473 raise util.Abort(_('cannot find bugzilla user id for %s') %
470 user)
474 user)
471 userid = self.get_user_id(defaultuser)
475 userid = self.get_user_id(defaultuser)
472 user = defaultuser
476 user = defaultuser
473 except KeyError:
477 except KeyError:
474 raise util.Abort(_('cannot find bugzilla user id for %s or %s')
478 raise util.Abort(_('cannot find bugzilla user id for %s or %s')
475 % (user, defaultuser))
479 % (user, defaultuser))
476 return (user, userid)
480 return (user, userid)
477
481
478 def updatebug(self, bugid, newstate, text, committer):
482 def updatebug(self, bugid, newstate, text, committer):
479 '''update bug state with comment text.
483 '''update bug state with comment text.
480
484
481 Try adding comment as committer of changeset, otherwise as
485 Try adding comment as committer of changeset, otherwise as
482 default bugzilla user.'''
486 default bugzilla user.'''
483 if len(newstate) > 0:
487 if len(newstate) > 0:
484 self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
488 self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
485
489
486 (user, userid) = self.get_bugzilla_user(committer)
490 (user, userid) = self.get_bugzilla_user(committer)
487 now = time.strftime('%Y-%m-%d %H:%M:%S')
491 now = time.strftime('%Y-%m-%d %H:%M:%S')
488 self.run('''insert into longdescs
492 self.run('''insert into longdescs
489 (bug_id, who, bug_when, thetext)
493 (bug_id, who, bug_when, thetext)
490 values (%s, %s, %s, %s)''',
494 values (%s, %s, %s, %s)''',
491 (bugid, userid, now, text))
495 (bugid, userid, now, text))
492 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
496 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
493 values (%s, %s, %s, %s)''',
497 values (%s, %s, %s, %s)''',
494 (bugid, userid, now, self.longdesc_id))
498 (bugid, userid, now, self.longdesc_id))
495 self.conn.commit()
499 self.conn.commit()
496
500
497 class bzmysql_2_18(bzmysql):
501 class bzmysql_2_18(bzmysql):
498 '''support for bugzilla 2.18 series.'''
502 '''support for bugzilla 2.18 series.'''
499
503
500 def __init__(self, ui):
504 def __init__(self, ui):
501 bzmysql.__init__(self, ui)
505 bzmysql.__init__(self, ui)
502 self.default_notify = \
506 self.default_notify = \
503 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
507 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
504
508
505 class bzmysql_3_0(bzmysql_2_18):
509 class bzmysql_3_0(bzmysql_2_18):
506 '''support for bugzilla 3.0 series.'''
510 '''support for bugzilla 3.0 series.'''
507
511
508 def __init__(self, ui):
512 def __init__(self, ui):
509 bzmysql_2_18.__init__(self, ui)
513 bzmysql_2_18.__init__(self, ui)
510
514
511 def get_longdesc_id(self):
515 def get_longdesc_id(self):
512 '''get identity of longdesc field'''
516 '''get identity of longdesc field'''
513 self.run('select id from fielddefs where name = "longdesc"')
517 self.run('select id from fielddefs where name = "longdesc"')
514 ids = self.cursor.fetchall()
518 ids = self.cursor.fetchall()
515 if len(ids) != 1:
519 if len(ids) != 1:
516 raise util.Abort(_('unknown database schema'))
520 raise util.Abort(_('unknown database schema'))
517 return ids[0][0]
521 return ids[0][0]
518
522
519 # Bugzilla via XMLRPC interface.
523 # Bugzilla via XMLRPC interface.
520
524
521 class cookietransportrequest(object):
525 class cookietransportrequest(object):
522 """A Transport request method that retains cookies over its lifetime.
526 """A Transport request method that retains cookies over its lifetime.
523
527
524 The regular xmlrpclib transports ignore cookies. Which causes
528 The regular xmlrpclib transports ignore cookies. Which causes
525 a bit of a problem when you need a cookie-based login, as with
529 a bit of a problem when you need a cookie-based login, as with
526 the Bugzilla XMLRPC interface prior to 4.4.3.
530 the Bugzilla XMLRPC interface prior to 4.4.3.
527
531
528 So this is a helper for defining a Transport which looks for
532 So this is a helper for defining a Transport which looks for
529 cookies being set in responses and saves them to add to all future
533 cookies being set in responses and saves them to add to all future
530 requests.
534 requests.
531 """
535 """
532
536
533 # Inspiration drawn from
537 # Inspiration drawn from
534 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
538 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
535 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
539 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
536
540
537 cookies = []
541 cookies = []
538 def send_cookies(self, connection):
542 def send_cookies(self, connection):
539 if self.cookies:
543 if self.cookies:
540 for cookie in self.cookies:
544 for cookie in self.cookies:
541 connection.putheader("Cookie", cookie)
545 connection.putheader("Cookie", cookie)
542
546
543 def request(self, host, handler, request_body, verbose=0):
547 def request(self, host, handler, request_body, verbose=0):
544 self.verbose = verbose
548 self.verbose = verbose
545 self.accept_gzip_encoding = False
549 self.accept_gzip_encoding = False
546
550
547 # issue XML-RPC request
551 # issue XML-RPC request
548 h = self.make_connection(host)
552 h = self.make_connection(host)
549 if verbose:
553 if verbose:
550 h.set_debuglevel(1)
554 h.set_debuglevel(1)
551
555
552 self.send_request(h, handler, request_body)
556 self.send_request(h, handler, request_body)
553 self.send_host(h, host)
557 self.send_host(h, host)
554 self.send_cookies(h)
558 self.send_cookies(h)
555 self.send_user_agent(h)
559 self.send_user_agent(h)
556 self.send_content(h, request_body)
560 self.send_content(h, request_body)
557
561
558 # Deal with differences between Python 2.4-2.6 and 2.7.
562 # Deal with differences between Python 2.4-2.6 and 2.7.
559 # In the former h is a HTTP(S). In the latter it's a
563 # In the former h is a HTTP(S). In the latter it's a
560 # HTTP(S)Connection. Luckily, the 2.4-2.6 implementation of
564 # HTTP(S)Connection. Luckily, the 2.4-2.6 implementation of
561 # HTTP(S) has an underlying HTTP(S)Connection, so extract
565 # HTTP(S) has an underlying HTTP(S)Connection, so extract
562 # that and use it.
566 # that and use it.
563 try:
567 try:
564 response = h.getresponse()
568 response = h.getresponse()
565 except AttributeError:
569 except AttributeError:
566 response = h._conn.getresponse()
570 response = h._conn.getresponse()
567
571
568 # Add any cookie definitions to our list.
572 # Add any cookie definitions to our list.
569 for header in response.msg.getallmatchingheaders("Set-Cookie"):
573 for header in response.msg.getallmatchingheaders("Set-Cookie"):
570 val = header.split(": ", 1)[1]
574 val = header.split(": ", 1)[1]
571 cookie = val.split(";", 1)[0]
575 cookie = val.split(";", 1)[0]
572 self.cookies.append(cookie)
576 self.cookies.append(cookie)
573
577
574 if response.status != 200:
578 if response.status != 200:
575 raise xmlrpclib.ProtocolError(host + handler, response.status,
579 raise xmlrpclib.ProtocolError(host + handler, response.status,
576 response.reason, response.msg.headers)
580 response.reason, response.msg.headers)
577
581
578 payload = response.read()
582 payload = response.read()
579 parser, unmarshaller = self.getparser()
583 parser, unmarshaller = self.getparser()
580 parser.feed(payload)
584 parser.feed(payload)
581 parser.close()
585 parser.close()
582
586
583 return unmarshaller.close()
587 return unmarshaller.close()
584
588
585 # The explicit calls to the underlying xmlrpclib __init__() methods are
589 # The explicit calls to the underlying xmlrpclib __init__() methods are
586 # necessary. The xmlrpclib.Transport classes are old-style classes, and
590 # necessary. The xmlrpclib.Transport classes are old-style classes, and
587 # it turns out their __init__() doesn't get called when doing multiple
591 # it turns out their __init__() doesn't get called when doing multiple
588 # inheritance with a new-style class.
592 # inheritance with a new-style class.
589 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
593 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
590 def __init__(self, use_datetime=0):
594 def __init__(self, use_datetime=0):
591 if util.safehasattr(xmlrpclib.Transport, "__init__"):
595 if util.safehasattr(xmlrpclib.Transport, "__init__"):
592 xmlrpclib.Transport.__init__(self, use_datetime)
596 xmlrpclib.Transport.__init__(self, use_datetime)
593
597
594 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
598 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
595 def __init__(self, use_datetime=0):
599 def __init__(self, use_datetime=0):
596 if util.safehasattr(xmlrpclib.Transport, "__init__"):
600 if util.safehasattr(xmlrpclib.Transport, "__init__"):
597 xmlrpclib.SafeTransport.__init__(self, use_datetime)
601 xmlrpclib.SafeTransport.__init__(self, use_datetime)
598
602
599 class bzxmlrpc(bzaccess):
603 class bzxmlrpc(bzaccess):
600 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
604 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
601
605
602 Requires a minimum Bugzilla version 3.4.
606 Requires a minimum Bugzilla version 3.4.
603 """
607 """
604
608
605 def __init__(self, ui):
609 def __init__(self, ui):
606 bzaccess.__init__(self, ui)
610 bzaccess.__init__(self, ui)
607
611
608 bzweb = self.ui.config('bugzilla', 'bzurl',
612 bzweb = self.ui.config('bugzilla', 'bzurl',
609 'http://localhost/bugzilla/')
613 'http://localhost/bugzilla/')
610 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
614 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
611
615
612 user = self.ui.config('bugzilla', 'user', 'bugs')
616 user = self.ui.config('bugzilla', 'user', 'bugs')
613 passwd = self.ui.config('bugzilla', 'password')
617 passwd = self.ui.config('bugzilla', 'password')
614
618
615 self.fixstatus = self.ui.config('bugzilla', 'fixstatus', 'RESOLVED')
619 self.fixstatus = self.ui.config('bugzilla', 'fixstatus', 'RESOLVED')
616 self.fixresolution = self.ui.config('bugzilla', 'fixresolution',
620 self.fixresolution = self.ui.config('bugzilla', 'fixresolution',
617 'FIXED')
621 'FIXED')
618
622
619 self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
623 self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
620 ver = self.bzproxy.Bugzilla.version()['version'].split('.')
624 ver = self.bzproxy.Bugzilla.version()['version'].split('.')
621 self.bzvermajor = int(ver[0])
625 self.bzvermajor = int(ver[0])
622 self.bzverminor = int(ver[1])
626 self.bzverminor = int(ver[1])
623 login = self.bzproxy.User.login({'login': user, 'password': passwd,
627 login = self.bzproxy.User.login({'login': user, 'password': passwd,
624 'restrict_login': True})
628 'restrict_login': True})
625 self.bztoken = login.get('token', '')
629 self.bztoken = login.get('token', '')
626
630
627 def transport(self, uri):
631 def transport(self, uri):
628 if urlparse.urlparse(uri, "http")[0] == "https":
632 if urlparse.urlparse(uri, "http")[0] == "https":
629 return cookiesafetransport()
633 return cookiesafetransport()
630 else:
634 else:
631 return cookietransport()
635 return cookietransport()
632
636
633 def get_bug_comments(self, id):
637 def get_bug_comments(self, id):
634 """Return a string with all comment text for a bug."""
638 """Return a string with all comment text for a bug."""
635 c = self.bzproxy.Bug.comments({'ids': [id],
639 c = self.bzproxy.Bug.comments({'ids': [id],
636 'include_fields': ['text'],
640 'include_fields': ['text'],
637 'token': self.bztoken})
641 'token': self.bztoken})
638 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
642 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
639
643
640 def filter_real_bug_ids(self, bugs):
644 def filter_real_bug_ids(self, bugs):
641 probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
645 probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
642 'include_fields': [],
646 'include_fields': [],
643 'permissive': True,
647 'permissive': True,
644 'token': self.bztoken,
648 'token': self.bztoken,
645 })
649 })
646 for badbug in probe['faults']:
650 for badbug in probe['faults']:
647 id = badbug['id']
651 id = badbug['id']
648 self.ui.status(_('bug %d does not exist\n') % id)
652 self.ui.status(_('bug %d does not exist\n') % id)
649 del bugs[id]
653 del bugs[id]
650
654
651 def filter_cset_known_bug_ids(self, node, bugs):
655 def filter_cset_known_bug_ids(self, node, bugs):
652 for id in sorted(bugs.keys()):
656 for id in sorted(bugs.keys()):
653 if self.get_bug_comments(id).find(short(node)) != -1:
657 if self.get_bug_comments(id).find(short(node)) != -1:
654 self.ui.status(_('bug %d already knows about changeset %s\n') %
658 self.ui.status(_('bug %d already knows about changeset %s\n') %
655 (id, short(node)))
659 (id, short(node)))
656 del bugs[id]
660 del bugs[id]
657
661
658 def updatebug(self, bugid, newstate, text, committer):
662 def updatebug(self, bugid, newstate, text, committer):
659 args = {}
663 args = {}
660 if 'hours' in newstate:
664 if 'hours' in newstate:
661 args['work_time'] = newstate['hours']
665 args['work_time'] = newstate['hours']
662
666
663 if self.bzvermajor >= 4:
667 if self.bzvermajor >= 4:
664 args['ids'] = [bugid]
668 args['ids'] = [bugid]
665 args['comment'] = {'body' : text}
669 args['comment'] = {'body' : text}
666 if 'fix' in newstate:
670 if 'fix' in newstate:
667 args['status'] = self.fixstatus
671 args['status'] = self.fixstatus
668 args['resolution'] = self.fixresolution
672 args['resolution'] = self.fixresolution
669 args['token'] = self.bztoken
673 args['token'] = self.bztoken
670 self.bzproxy.Bug.update(args)
674 self.bzproxy.Bug.update(args)
671 else:
675 else:
672 if 'fix' in newstate:
676 if 'fix' in newstate:
673 self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
677 self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
674 "to mark bugs fixed\n"))
678 "to mark bugs fixed\n"))
675 args['id'] = bugid
679 args['id'] = bugid
676 args['comment'] = text
680 args['comment'] = text
677 self.bzproxy.Bug.add_comment(args)
681 self.bzproxy.Bug.add_comment(args)
678
682
679 class bzxmlrpcemail(bzxmlrpc):
683 class bzxmlrpcemail(bzxmlrpc):
680 """Read data from Bugzilla via XMLRPC, send updates via email.
684 """Read data from Bugzilla via XMLRPC, send updates via email.
681
685
682 Advantages of sending updates via email:
686 Advantages of sending updates via email:
683 1. Comments can be added as any user, not just logged in user.
687 1. Comments can be added as any user, not just logged in user.
684 2. Bug statuses or other fields not accessible via XMLRPC can
688 2. Bug statuses or other fields not accessible via XMLRPC can
685 potentially be updated.
689 potentially be updated.
686
690
687 There is no XMLRPC function to change bug status before Bugzilla
691 There is no XMLRPC function to change bug status before Bugzilla
688 4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
692 4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
689 But bugs can be marked fixed via email from 3.4 onwards.
693 But bugs can be marked fixed via email from 3.4 onwards.
690 """
694 """
691
695
692 # The email interface changes subtly between 3.4 and 3.6. In 3.4,
696 # The email interface changes subtly between 3.4 and 3.6. In 3.4,
693 # in-email fields are specified as '@<fieldname> = <value>'. In
697 # in-email fields are specified as '@<fieldname> = <value>'. In
694 # 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
698 # 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
695 # in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
699 # in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
696 # compatibility, but rather than rely on this use the new format for
700 # compatibility, but rather than rely on this use the new format for
697 # 4.0 onwards.
701 # 4.0 onwards.
698
702
699 def __init__(self, ui):
703 def __init__(self, ui):
700 bzxmlrpc.__init__(self, ui)
704 bzxmlrpc.__init__(self, ui)
701
705
702 self.bzemail = self.ui.config('bugzilla', 'bzemail')
706 self.bzemail = self.ui.config('bugzilla', 'bzemail')
703 if not self.bzemail:
707 if not self.bzemail:
704 raise util.Abort(_("configuration 'bzemail' missing"))
708 raise util.Abort(_("configuration 'bzemail' missing"))
705 mail.validateconfig(self.ui)
709 mail.validateconfig(self.ui)
706
710
707 def makecommandline(self, fieldname, value):
711 def makecommandline(self, fieldname, value):
708 if self.bzvermajor >= 4:
712 if self.bzvermajor >= 4:
709 return "@%s %s" % (fieldname, str(value))
713 return "@%s %s" % (fieldname, str(value))
710 else:
714 else:
711 if fieldname == "id":
715 if fieldname == "id":
712 fieldname = "bug_id"
716 fieldname = "bug_id"
713 return "@%s = %s" % (fieldname, str(value))
717 return "@%s = %s" % (fieldname, str(value))
714
718
715 def send_bug_modify_email(self, bugid, commands, comment, committer):
719 def send_bug_modify_email(self, bugid, commands, comment, committer):
716 '''send modification message to Bugzilla bug via email.
720 '''send modification message to Bugzilla bug via email.
717
721
718 The message format is documented in the Bugzilla email_in.pl
722 The message format is documented in the Bugzilla email_in.pl
719 specification. commands is a list of command lines, comment is the
723 specification. commands is a list of command lines, comment is the
720 comment text.
724 comment text.
721
725
722 To stop users from crafting commit comments with
726 To stop users from crafting commit comments with
723 Bugzilla commands, specify the bug ID via the message body, rather
727 Bugzilla commands, specify the bug ID via the message body, rather
724 than the subject line, and leave a blank line after it.
728 than the subject line, and leave a blank line after it.
725 '''
729 '''
726 user = self.map_committer(committer)
730 user = self.map_committer(committer)
727 matches = self.bzproxy.User.get({'match': [user],
731 matches = self.bzproxy.User.get({'match': [user],
728 'token': self.bztoken})
732 'token': self.bztoken})
729 if not matches['users']:
733 if not matches['users']:
730 user = self.ui.config('bugzilla', 'user', 'bugs')
734 user = self.ui.config('bugzilla', 'user', 'bugs')
731 matches = self.bzproxy.User.get({'match': [user],
735 matches = self.bzproxy.User.get({'match': [user],
732 'token': self.bztoken})
736 'token': self.bztoken})
733 if not matches['users']:
737 if not matches['users']:
734 raise util.Abort(_("default bugzilla user %s email not found") %
738 raise util.Abort(_("default bugzilla user %s email not found") %
735 user)
739 user)
736 user = matches['users'][0]['email']
740 user = matches['users'][0]['email']
737 commands.append(self.makecommandline("id", bugid))
741 commands.append(self.makecommandline("id", bugid))
738
742
739 text = "\n".join(commands) + "\n\n" + comment
743 text = "\n".join(commands) + "\n\n" + comment
740
744
741 _charsets = mail._charsets(self.ui)
745 _charsets = mail._charsets(self.ui)
742 user = mail.addressencode(self.ui, user, _charsets)
746 user = mail.addressencode(self.ui, user, _charsets)
743 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
747 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
744 msg = mail.mimeencode(self.ui, text, _charsets)
748 msg = mail.mimeencode(self.ui, text, _charsets)
745 msg['From'] = user
749 msg['From'] = user
746 msg['To'] = bzemail
750 msg['To'] = bzemail
747 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
751 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
748 sendmail = mail.connect(self.ui)
752 sendmail = mail.connect(self.ui)
749 sendmail(user, bzemail, msg.as_string())
753 sendmail(user, bzemail, msg.as_string())
750
754
751 def updatebug(self, bugid, newstate, text, committer):
755 def updatebug(self, bugid, newstate, text, committer):
752 cmds = []
756 cmds = []
753 if 'hours' in newstate:
757 if 'hours' in newstate:
754 cmds.append(self.makecommandline("work_time", newstate['hours']))
758 cmds.append(self.makecommandline("work_time", newstate['hours']))
755 if 'fix' in newstate:
759 if 'fix' in newstate:
756 cmds.append(self.makecommandline("bug_status", self.fixstatus))
760 cmds.append(self.makecommandline("bug_status", self.fixstatus))
757 cmds.append(self.makecommandline("resolution", self.fixresolution))
761 cmds.append(self.makecommandline("resolution", self.fixresolution))
758 self.send_bug_modify_email(bugid, cmds, text, committer)
762 self.send_bug_modify_email(bugid, cmds, text, committer)
759
763
760 class bugzilla(object):
764 class bugzilla(object):
761 # supported versions of bugzilla. different versions have
765 # supported versions of bugzilla. different versions have
762 # different schemas.
766 # different schemas.
763 _versions = {
767 _versions = {
764 '2.16': bzmysql,
768 '2.16': bzmysql,
765 '2.18': bzmysql_2_18,
769 '2.18': bzmysql_2_18,
766 '3.0': bzmysql_3_0,
770 '3.0': bzmysql_3_0,
767 'xmlrpc': bzxmlrpc,
771 'xmlrpc': bzxmlrpc,
768 'xmlrpc+email': bzxmlrpcemail
772 'xmlrpc+email': bzxmlrpcemail
769 }
773 }
770
774
771 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
775 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
772 r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
776 r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
773 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
777 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
774
778
775 _default_fix_re = (r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
779 _default_fix_re = (r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
776 r'(?:nos?\.?|num(?:ber)?s?)?\s*'
780 r'(?:nos?\.?|num(?:ber)?s?)?\s*'
777 r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
781 r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
778 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
782 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
779
783
780 def __init__(self, ui, repo):
784 def __init__(self, ui, repo):
781 self.ui = ui
785 self.ui = ui
782 self.repo = repo
786 self.repo = repo
783
787
784 bzversion = self.ui.config('bugzilla', 'version')
788 bzversion = self.ui.config('bugzilla', 'version')
785 try:
789 try:
786 bzclass = bugzilla._versions[bzversion]
790 bzclass = bugzilla._versions[bzversion]
787 except KeyError:
791 except KeyError:
788 raise util.Abort(_('bugzilla version %s not supported') %
792 raise util.Abort(_('bugzilla version %s not supported') %
789 bzversion)
793 bzversion)
790 self.bzdriver = bzclass(self.ui)
794 self.bzdriver = bzclass(self.ui)
791
795
792 self.bug_re = re.compile(
796 self.bug_re = re.compile(
793 self.ui.config('bugzilla', 'regexp',
797 self.ui.config('bugzilla', 'regexp',
794 bugzilla._default_bug_re), re.IGNORECASE)
798 bugzilla._default_bug_re), re.IGNORECASE)
795 self.fix_re = re.compile(
799 self.fix_re = re.compile(
796 self.ui.config('bugzilla', 'fixregexp',
800 self.ui.config('bugzilla', 'fixregexp',
797 bugzilla._default_fix_re), re.IGNORECASE)
801 bugzilla._default_fix_re), re.IGNORECASE)
798 self.split_re = re.compile(r'\D+')
802 self.split_re = re.compile(r'\D+')
799
803
800 def find_bugs(self, ctx):
804 def find_bugs(self, ctx):
801 '''return bugs dictionary created from commit comment.
805 '''return bugs dictionary created from commit comment.
802
806
803 Extract bug info from changeset comments. Filter out any that are
807 Extract bug info from changeset comments. Filter out any that are
804 not known to Bugzilla, and any that already have a reference to
808 not known to Bugzilla, and any that already have a reference to
805 the given changeset in their comments.
809 the given changeset in their comments.
806 '''
810 '''
807 start = 0
811 start = 0
808 hours = 0.0
812 hours = 0.0
809 bugs = {}
813 bugs = {}
810 bugmatch = self.bug_re.search(ctx.description(), start)
814 bugmatch = self.bug_re.search(ctx.description(), start)
811 fixmatch = self.fix_re.search(ctx.description(), start)
815 fixmatch = self.fix_re.search(ctx.description(), start)
812 while True:
816 while True:
813 bugattribs = {}
817 bugattribs = {}
814 if not bugmatch and not fixmatch:
818 if not bugmatch and not fixmatch:
815 break
819 break
816 if not bugmatch:
820 if not bugmatch:
817 m = fixmatch
821 m = fixmatch
818 elif not fixmatch:
822 elif not fixmatch:
819 m = bugmatch
823 m = bugmatch
820 else:
824 else:
821 if bugmatch.start() < fixmatch.start():
825 if bugmatch.start() < fixmatch.start():
822 m = bugmatch
826 m = bugmatch
823 else:
827 else:
824 m = fixmatch
828 m = fixmatch
825 start = m.end()
829 start = m.end()
826 if m is bugmatch:
830 if m is bugmatch:
827 bugmatch = self.bug_re.search(ctx.description(), start)
831 bugmatch = self.bug_re.search(ctx.description(), start)
828 if 'fix' in bugattribs:
832 if 'fix' in bugattribs:
829 del bugattribs['fix']
833 del bugattribs['fix']
830 else:
834 else:
831 fixmatch = self.fix_re.search(ctx.description(), start)
835 fixmatch = self.fix_re.search(ctx.description(), start)
832 bugattribs['fix'] = None
836 bugattribs['fix'] = None
833
837
834 try:
838 try:
835 ids = m.group('ids')
839 ids = m.group('ids')
836 except IndexError:
840 except IndexError:
837 ids = m.group(1)
841 ids = m.group(1)
838 try:
842 try:
839 hours = float(m.group('hours'))
843 hours = float(m.group('hours'))
840 bugattribs['hours'] = hours
844 bugattribs['hours'] = hours
841 except IndexError:
845 except IndexError:
842 pass
846 pass
843 except TypeError:
847 except TypeError:
844 pass
848 pass
845 except ValueError:
849 except ValueError:
846 self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
850 self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
847
851
848 for id in self.split_re.split(ids):
852 for id in self.split_re.split(ids):
849 if not id:
853 if not id:
850 continue
854 continue
851 bugs[int(id)] = bugattribs
855 bugs[int(id)] = bugattribs
852 if bugs:
856 if bugs:
853 self.bzdriver.filter_real_bug_ids(bugs)
857 self.bzdriver.filter_real_bug_ids(bugs)
854 if bugs:
858 if bugs:
855 self.bzdriver.filter_cset_known_bug_ids(ctx.node(), bugs)
859 self.bzdriver.filter_cset_known_bug_ids(ctx.node(), bugs)
856 return bugs
860 return bugs
857
861
858 def update(self, bugid, newstate, ctx):
862 def update(self, bugid, newstate, ctx):
859 '''update bugzilla bug with reference to changeset.'''
863 '''update bugzilla bug with reference to changeset.'''
860
864
861 def webroot(root):
865 def webroot(root):
862 '''strip leading prefix of repo root and turn into
866 '''strip leading prefix of repo root and turn into
863 url-safe path.'''
867 url-safe path.'''
864 count = int(self.ui.config('bugzilla', 'strip', 0))
868 count = int(self.ui.config('bugzilla', 'strip', 0))
865 root = util.pconvert(root)
869 root = util.pconvert(root)
866 while count > 0:
870 while count > 0:
867 c = root.find('/')
871 c = root.find('/')
868 if c == -1:
872 if c == -1:
869 break
873 break
870 root = root[c + 1:]
874 root = root[c + 1:]
871 count -= 1
875 count -= 1
872 return root
876 return root
873
877
874 mapfile = self.ui.config('bugzilla', 'style')
878 mapfile = self.ui.config('bugzilla', 'style')
875 tmpl = self.ui.config('bugzilla', 'template')
879 tmpl = self.ui.config('bugzilla', 'template')
876 if not mapfile and not tmpl:
880 if not mapfile and not tmpl:
877 tmpl = _('changeset {node|short} in repo {root} refers '
881 tmpl = _('changeset {node|short} in repo {root} refers '
878 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
882 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
879 t = cmdutil.changeset_templater(self.ui, self.repo,
883 t = cmdutil.changeset_templater(self.ui, self.repo,
880 False, None, tmpl, mapfile, False)
884 False, None, tmpl, mapfile, False)
881 self.ui.pushbuffer()
885 self.ui.pushbuffer()
882 t.show(ctx, changes=ctx.changeset(),
886 t.show(ctx, changes=ctx.changeset(),
883 bug=str(bugid),
887 bug=str(bugid),
884 hgweb=self.ui.config('web', 'baseurl'),
888 hgweb=self.ui.config('web', 'baseurl'),
885 root=self.repo.root,
889 root=self.repo.root,
886 webroot=webroot(self.repo.root))
890 webroot=webroot(self.repo.root))
887 data = self.ui.popbuffer()
891 data = self.ui.popbuffer()
888 self.bzdriver.updatebug(bugid, newstate, data, util.email(ctx.user()))
892 self.bzdriver.updatebug(bugid, newstate, data, util.email(ctx.user()))
889
893
890 def notify(self, bugs, committer):
894 def notify(self, bugs, committer):
891 '''ensure Bugzilla users are notified of bug change.'''
895 '''ensure Bugzilla users are notified of bug change.'''
892 self.bzdriver.notify(bugs, committer)
896 self.bzdriver.notify(bugs, committer)
893
897
894 def hook(ui, repo, hooktype, node=None, **kwargs):
898 def hook(ui, repo, hooktype, node=None, **kwargs):
895 '''add comment to bugzilla for each changeset that refers to a
899 '''add comment to bugzilla for each changeset that refers to a
896 bugzilla bug id. only add a comment once per bug, so same change
900 bugzilla bug id. only add a comment once per bug, so same change
897 seen multiple times does not fill bug with duplicate data.'''
901 seen multiple times does not fill bug with duplicate data.'''
898 if node is None:
902 if node is None:
899 raise util.Abort(_('hook type %s does not pass a changeset id') %
903 raise util.Abort(_('hook type %s does not pass a changeset id') %
900 hooktype)
904 hooktype)
901 try:
905 try:
902 bz = bugzilla(ui, repo)
906 bz = bugzilla(ui, repo)
903 ctx = repo[node]
907 ctx = repo[node]
904 bugs = bz.find_bugs(ctx)
908 bugs = bz.find_bugs(ctx)
905 if bugs:
909 if bugs:
906 for bug in bugs:
910 for bug in bugs:
907 bz.update(bug, bugs[bug], ctx)
911 bz.update(bug, bugs[bug], ctx)
908 bz.notify(bugs, util.email(ctx.user()))
912 bz.notify(bugs, util.email(ctx.user()))
909 except Exception, e:
913 except Exception, e:
910 raise util.Abort(_('Bugzilla error: %s') % e)
914 raise util.Abort(_('Bugzilla error: %s') % e)
@@ -1,161 +1,165 b''
1 # Copyright (C) 2015 - Mike Edgar <adgar@google.com>
1 # Copyright (C) 2015 - Mike Edgar <adgar@google.com>
2 #
2 #
3 # This extension enables removal of file content at a given revision,
3 # This extension enables removal of file content at a given revision,
4 # rewriting the data/metadata of successive revisions to preserve revision log
4 # rewriting the data/metadata of successive revisions to preserve revision log
5 # integrity.
5 # integrity.
6
6
7 """erase file content at a given revision
7 """erase file content at a given revision
8
8
9 The censor command instructs Mercurial to erase all content of a file at a given
9 The censor command instructs Mercurial to erase all content of a file at a given
10 revision *without updating the changeset hash.* This allows existing history to
10 revision *without updating the changeset hash.* This allows existing history to
11 remain valid while preventing future clones/pulls from receiving the erased
11 remain valid while preventing future clones/pulls from receiving the erased
12 data.
12 data.
13
13
14 Typical uses for censor are due to security or legal requirements, including::
14 Typical uses for censor are due to security or legal requirements, including::
15
15
16 * Passwords, private keys, crytographic material
16 * Passwords, private keys, crytographic material
17 * Licensed data/code/libraries for which the license has expired
17 * Licensed data/code/libraries for which the license has expired
18 * Personally Identifiable Information or other private data
18 * Personally Identifiable Information or other private data
19
19
20 Censored nodes can interrupt mercurial's typical operation whenever the excised
20 Censored nodes can interrupt mercurial's typical operation whenever the excised
21 data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
21 data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
22 simply fail when asked to produce censored data. Others, like ``hg verify`` and
22 simply fail when asked to produce censored data. Others, like ``hg verify`` and
23 ``hg update``, must be capable of tolerating censored data to continue to
23 ``hg update``, must be capable of tolerating censored data to continue to
24 function in a meaningful way. Such commands only tolerate censored file
24 function in a meaningful way. Such commands only tolerate censored file
25 revisions if they are allowed by the "censor.policy=ignore" config option.
25 revisions if they are allowed by the "censor.policy=ignore" config option.
26 """
26 """
27
27
28 from mercurial.node import short
28 from mercurial.node import short
29 from mercurial import cmdutil, error, filelog, revlog, scmutil, util
29 from mercurial import cmdutil, error, filelog, revlog, scmutil, util
30 from mercurial.i18n import _
30 from mercurial.i18n import _
31
31
32 cmdtable = {}
32 cmdtable = {}
33 command = cmdutil.command(cmdtable)
33 command = cmdutil.command(cmdtable)
34 # Note for extension authors: ONLY specify testedwith = 'internal' for
35 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
36 # be specifying the version(s) of Mercurial they are tested with, or
37 # leave the attribute unspecified.
34 testedwith = 'internal'
38 testedwith = 'internal'
35
39
36 @command('censor',
40 @command('censor',
37 [('r', 'rev', '', _('censor file from specified revision'), _('REV')),
41 [('r', 'rev', '', _('censor file from specified revision'), _('REV')),
38 ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
42 ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
39 _('-r REV [-t TEXT] [FILE]'))
43 _('-r REV [-t TEXT] [FILE]'))
40 def censor(ui, repo, path, rev='', tombstone='', **opts):
44 def censor(ui, repo, path, rev='', tombstone='', **opts):
41 if not path:
45 if not path:
42 raise util.Abort(_('must specify file path to censor'))
46 raise util.Abort(_('must specify file path to censor'))
43 if not rev:
47 if not rev:
44 raise util.Abort(_('must specify revision to censor'))
48 raise util.Abort(_('must specify revision to censor'))
45
49
46 flog = repo.file(path)
50 flog = repo.file(path)
47 if not len(flog):
51 if not len(flog):
48 raise util.Abort(_('cannot censor file with no history'))
52 raise util.Abort(_('cannot censor file with no history'))
49
53
50 rev = scmutil.revsingle(repo, rev, rev).rev()
54 rev = scmutil.revsingle(repo, rev, rev).rev()
51 try:
55 try:
52 ctx = repo[rev]
56 ctx = repo[rev]
53 except KeyError:
57 except KeyError:
54 raise util.Abort(_('invalid revision identifier %s') % rev)
58 raise util.Abort(_('invalid revision identifier %s') % rev)
55
59
56 try:
60 try:
57 fctx = ctx.filectx(path)
61 fctx = ctx.filectx(path)
58 except error.LookupError:
62 except error.LookupError:
59 raise util.Abort(_('file does not exist at revision %s') % rev)
63 raise util.Abort(_('file does not exist at revision %s') % rev)
60
64
61 fnode = fctx.filenode()
65 fnode = fctx.filenode()
62 headctxs = [repo[c] for c in repo.heads()]
66 headctxs = [repo[c] for c in repo.heads()]
63 heads = [c for c in headctxs if path in c and c.filenode(path) == fnode]
67 heads = [c for c in headctxs if path in c and c.filenode(path) == fnode]
64 if heads:
68 if heads:
65 headlist = ', '.join([short(c.node()) for c in heads])
69 headlist = ', '.join([short(c.node()) for c in heads])
66 raise util.Abort(_('cannot censor file in heads (%s)') % headlist,
70 raise util.Abort(_('cannot censor file in heads (%s)') % headlist,
67 hint=_('clean/delete and commit first'))
71 hint=_('clean/delete and commit first'))
68
72
69 wctx = repo[None]
73 wctx = repo[None]
70 wp = wctx.parents()
74 wp = wctx.parents()
71 if ctx.node() in [p.node() for p in wp]:
75 if ctx.node() in [p.node() for p in wp]:
72 raise util.Abort(_('cannot censor working directory'),
76 raise util.Abort(_('cannot censor working directory'),
73 hint=_('clean/delete/update first'))
77 hint=_('clean/delete/update first'))
74
78
75 flogv = flog.version & 0xFFFF
79 flogv = flog.version & 0xFFFF
76 if flogv != revlog.REVLOGNG:
80 if flogv != revlog.REVLOGNG:
77 raise util.Abort(
81 raise util.Abort(
78 _('censor does not support revlog version %d') % (flogv,))
82 _('censor does not support revlog version %d') % (flogv,))
79
83
80 tombstone = filelog.packmeta({"censored": tombstone}, "")
84 tombstone = filelog.packmeta({"censored": tombstone}, "")
81
85
82 crev = fctx.filerev()
86 crev = fctx.filerev()
83
87
84 if len(tombstone) > flog.rawsize(crev):
88 if len(tombstone) > flog.rawsize(crev):
85 raise util.Abort(_(
89 raise util.Abort(_(
86 'censor tombstone must be no longer than censored data'))
90 'censor tombstone must be no longer than censored data'))
87
91
88 # Using two files instead of one makes it easy to rewrite entry-by-entry
92 # Using two files instead of one makes it easy to rewrite entry-by-entry
89 idxread = repo.svfs(flog.indexfile, 'r')
93 idxread = repo.svfs(flog.indexfile, 'r')
90 idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
94 idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
91 if flog.version & revlog.REVLOGNGINLINEDATA:
95 if flog.version & revlog.REVLOGNGINLINEDATA:
92 dataread, datawrite = idxread, idxwrite
96 dataread, datawrite = idxread, idxwrite
93 else:
97 else:
94 dataread = repo.svfs(flog.datafile, 'r')
98 dataread = repo.svfs(flog.datafile, 'r')
95 datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True)
99 datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True)
96
100
97 # Copy all revlog data up to the entry to be censored.
101 # Copy all revlog data up to the entry to be censored.
98 rio = revlog.revlogio()
102 rio = revlog.revlogio()
99 offset = flog.start(crev)
103 offset = flog.start(crev)
100
104
101 for chunk in util.filechunkiter(idxread, limit=crev * rio.size):
105 for chunk in util.filechunkiter(idxread, limit=crev * rio.size):
102 idxwrite.write(chunk)
106 idxwrite.write(chunk)
103 for chunk in util.filechunkiter(dataread, limit=offset):
107 for chunk in util.filechunkiter(dataread, limit=offset):
104 datawrite.write(chunk)
108 datawrite.write(chunk)
105
109
106 def rewriteindex(r, newoffs, newdata=None):
110 def rewriteindex(r, newoffs, newdata=None):
107 """Rewrite the index entry with a new data offset and optional new data.
111 """Rewrite the index entry with a new data offset and optional new data.
108
112
109 The newdata argument, if given, is a tuple of three positive integers:
113 The newdata argument, if given, is a tuple of three positive integers:
110 (new compressed, new uncompressed, added flag bits).
114 (new compressed, new uncompressed, added flag bits).
111 """
115 """
112 offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r]
116 offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r]
113 flags = revlog.gettype(offlags)
117 flags = revlog.gettype(offlags)
114 if newdata:
118 if newdata:
115 comp, uncomp, nflags = newdata
119 comp, uncomp, nflags = newdata
116 flags |= nflags
120 flags |= nflags
117 offlags = revlog.offset_type(newoffs, flags)
121 offlags = revlog.offset_type(newoffs, flags)
118 e = (offlags, comp, uncomp, r, link, p1, p2, nodeid)
122 e = (offlags, comp, uncomp, r, link, p1, p2, nodeid)
119 idxwrite.write(rio.packentry(e, None, flog.version, r))
123 idxwrite.write(rio.packentry(e, None, flog.version, r))
120 idxread.seek(rio.size, 1)
124 idxread.seek(rio.size, 1)
121
125
122 def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS):
126 def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS):
123 """Write the given full text to the filelog with the given data offset.
127 """Write the given full text to the filelog with the given data offset.
124
128
125 Returns:
129 Returns:
126 The integer number of data bytes written, for tracking data offsets.
130 The integer number of data bytes written, for tracking data offsets.
127 """
131 """
128 flag, compdata = flog.compress(data)
132 flag, compdata = flog.compress(data)
129 newcomp = len(flag) + len(compdata)
133 newcomp = len(flag) + len(compdata)
130 rewriteindex(r, offs, (newcomp, len(data), nflags))
134 rewriteindex(r, offs, (newcomp, len(data), nflags))
131 datawrite.write(flag)
135 datawrite.write(flag)
132 datawrite.write(compdata)
136 datawrite.write(compdata)
133 dataread.seek(flog.length(r), 1)
137 dataread.seek(flog.length(r), 1)
134 return newcomp
138 return newcomp
135
139
136 # Rewrite censored revlog entry with (padded) tombstone data.
140 # Rewrite censored revlog entry with (padded) tombstone data.
137 pad = ' ' * (flog.rawsize(crev) - len(tombstone))
141 pad = ' ' * (flog.rawsize(crev) - len(tombstone))
138 offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED)
142 offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED)
139
143
140 # Rewrite all following filelog revisions fixing up offsets and deltas.
144 # Rewrite all following filelog revisions fixing up offsets and deltas.
141 for srev in xrange(crev + 1, len(flog)):
145 for srev in xrange(crev + 1, len(flog)):
142 if crev in flog.parentrevs(srev):
146 if crev in flog.parentrevs(srev):
143 # Immediate children of censored node must be re-added as fulltext.
147 # Immediate children of censored node must be re-added as fulltext.
144 try:
148 try:
145 revdata = flog.revision(srev)
149 revdata = flog.revision(srev)
146 except error.CensoredNodeError, e:
150 except error.CensoredNodeError, e:
147 revdata = e.tombstone
151 revdata = e.tombstone
148 dlen = rewrite(srev, offset, revdata)
152 dlen = rewrite(srev, offset, revdata)
149 else:
153 else:
150 # Copy any other revision data verbatim after fixing up the offset.
154 # Copy any other revision data verbatim after fixing up the offset.
151 rewriteindex(srev, offset)
155 rewriteindex(srev, offset)
152 dlen = flog.length(srev)
156 dlen = flog.length(srev)
153 for chunk in util.filechunkiter(dataread, limit=dlen):
157 for chunk in util.filechunkiter(dataread, limit=dlen):
154 datawrite.write(chunk)
158 datawrite.write(chunk)
155 offset += dlen
159 offset += dlen
156
160
157 idxread.close()
161 idxread.close()
158 idxwrite.close()
162 idxwrite.close()
159 if dataread is not idxread:
163 if dataread is not idxread:
160 dataread.close()
164 dataread.close()
161 datawrite.close()
165 datawrite.close()
@@ -1,51 +1,55 b''
1 # Mercurial extension to provide the 'hg children' command
1 # Mercurial extension to provide the 'hg children' command
2 #
2 #
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 #
4 #
5 # Author(s):
5 # Author(s):
6 # Thomas Arendsen Hein <thomas@intevation.de>
6 # Thomas Arendsen Hein <thomas@intevation.de>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2 or any later version.
9 # GNU General Public License version 2 or any later version.
10
10
11 '''command to display child changesets (DEPRECATED)
11 '''command to display child changesets (DEPRECATED)
12
12
13 This extension is deprecated. You should use :hg:`log -r
13 This extension is deprecated. You should use :hg:`log -r
14 "children(REV)"` instead.
14 "children(REV)"` instead.
15 '''
15 '''
16
16
17 from mercurial import cmdutil
17 from mercurial import cmdutil
18 from mercurial.commands import templateopts
18 from mercurial.commands import templateopts
19 from mercurial.i18n import _
19 from mercurial.i18n import _
20
20
21 cmdtable = {}
21 cmdtable = {}
22 command = cmdutil.command(cmdtable)
22 command = cmdutil.command(cmdtable)
23 # Note for extension authors: ONLY specify testedwith = 'internal' for
24 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
25 # be specifying the version(s) of Mercurial they are tested with, or
26 # leave the attribute unspecified.
23 testedwith = 'internal'
27 testedwith = 'internal'
24
28
25 @command('children',
29 @command('children',
26 [('r', 'rev', '',
30 [('r', 'rev', '',
27 _('show children of the specified revision'), _('REV')),
31 _('show children of the specified revision'), _('REV')),
28 ] + templateopts,
32 ] + templateopts,
29 _('hg children [-r REV] [FILE]'),
33 _('hg children [-r REV] [FILE]'),
30 inferrepo=True)
34 inferrepo=True)
31 def children(ui, repo, file_=None, **opts):
35 def children(ui, repo, file_=None, **opts):
32 """show the children of the given or working directory revision
36 """show the children of the given or working directory revision
33
37
34 Print the children of the working directory's revisions. If a
38 Print the children of the working directory's revisions. If a
35 revision is given via -r/--rev, the children of that revision will
39 revision is given via -r/--rev, the children of that revision will
36 be printed. If a file argument is given, revision in which the
40 be printed. If a file argument is given, revision in which the
37 file was last changed (after the working directory revision or the
41 file was last changed (after the working directory revision or the
38 argument to --rev if given) is printed.
42 argument to --rev if given) is printed.
39 """
43 """
40 rev = opts.get('rev')
44 rev = opts.get('rev')
41 if file_:
45 if file_:
42 fctx = repo.filectx(file_, changeid=rev)
46 fctx = repo.filectx(file_, changeid=rev)
43 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
47 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
44 else:
48 else:
45 ctx = repo[rev]
49 ctx = repo[rev]
46 childctxs = ctx.children()
50 childctxs = ctx.children()
47
51
48 displayer = cmdutil.show_changeset(ui, repo, opts)
52 displayer = cmdutil.show_changeset(ui, repo, opts)
49 for cctx in childctxs:
53 for cctx in childctxs:
50 displayer.show(cctx)
54 displayer.show(cctx)
51 displayer.close()
55 displayer.close()
@@ -1,201 +1,205 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import patch, cmdutil, scmutil, util, commands
12 from mercurial import patch, cmdutil, scmutil, util, commands
13 from mercurial import encoding
13 from mercurial import encoding
14 import os
14 import os
15 import time, datetime
15 import time, datetime
16
16
17 cmdtable = {}
17 cmdtable = {}
18 command = cmdutil.command(cmdtable)
18 command = cmdutil.command(cmdtable)
19 # Note for extension authors: ONLY specify testedwith = 'internal' for
20 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
21 # be specifying the version(s) of Mercurial they are tested with, or
22 # leave the attribute unspecified.
19 testedwith = 'internal'
23 testedwith = 'internal'
20
24
21 def maketemplater(ui, repo, tmpl):
25 def maketemplater(ui, repo, tmpl):
22 try:
26 try:
23 t = cmdutil.changeset_templater(ui, repo, False, None, tmpl,
27 t = cmdutil.changeset_templater(ui, repo, False, None, tmpl,
24 None, False)
28 None, False)
25 except SyntaxError, inst:
29 except SyntaxError, inst:
26 raise util.Abort(inst.args[0])
30 raise util.Abort(inst.args[0])
27 return t
31 return t
28
32
29 def changedlines(ui, repo, ctx1, ctx2, fns):
33 def changedlines(ui, repo, ctx1, ctx2, fns):
30 added, removed = 0, 0
34 added, removed = 0, 0
31 fmatch = scmutil.matchfiles(repo, fns)
35 fmatch = scmutil.matchfiles(repo, fns)
32 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
36 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
33 for l in diff.split('\n'):
37 for l in diff.split('\n'):
34 if l.startswith("+") and not l.startswith("+++ "):
38 if l.startswith("+") and not l.startswith("+++ "):
35 added += 1
39 added += 1
36 elif l.startswith("-") and not l.startswith("--- "):
40 elif l.startswith("-") and not l.startswith("--- "):
37 removed += 1
41 removed += 1
38 return (added, removed)
42 return (added, removed)
39
43
40 def countrate(ui, repo, amap, *pats, **opts):
44 def countrate(ui, repo, amap, *pats, **opts):
41 """Calculate stats"""
45 """Calculate stats"""
42 if opts.get('dateformat'):
46 if opts.get('dateformat'):
43 def getkey(ctx):
47 def getkey(ctx):
44 t, tz = ctx.date()
48 t, tz = ctx.date()
45 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
49 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
46 return date.strftime(opts['dateformat'])
50 return date.strftime(opts['dateformat'])
47 else:
51 else:
48 tmpl = opts.get('oldtemplate') or opts.get('template')
52 tmpl = opts.get('oldtemplate') or opts.get('template')
49 tmpl = maketemplater(ui, repo, tmpl)
53 tmpl = maketemplater(ui, repo, tmpl)
50 def getkey(ctx):
54 def getkey(ctx):
51 ui.pushbuffer()
55 ui.pushbuffer()
52 tmpl.show(ctx)
56 tmpl.show(ctx)
53 return ui.popbuffer()
57 return ui.popbuffer()
54
58
55 state = {'count': 0}
59 state = {'count': 0}
56 rate = {}
60 rate = {}
57 df = False
61 df = False
58 if opts.get('date'):
62 if opts.get('date'):
59 df = util.matchdate(opts['date'])
63 df = util.matchdate(opts['date'])
60
64
61 m = scmutil.match(repo[None], pats, opts)
65 m = scmutil.match(repo[None], pats, opts)
62 def prep(ctx, fns):
66 def prep(ctx, fns):
63 rev = ctx.rev()
67 rev = ctx.rev()
64 if df and not df(ctx.date()[0]): # doesn't match date format
68 if df and not df(ctx.date()[0]): # doesn't match date format
65 return
69 return
66
70
67 key = getkey(ctx).strip()
71 key = getkey(ctx).strip()
68 key = amap.get(key, key) # alias remap
72 key = amap.get(key, key) # alias remap
69 if opts.get('changesets'):
73 if opts.get('changesets'):
70 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
74 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
71 else:
75 else:
72 parents = ctx.parents()
76 parents = ctx.parents()
73 if len(parents) > 1:
77 if len(parents) > 1:
74 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
78 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
75 return
79 return
76
80
77 ctx1 = parents[0]
81 ctx1 = parents[0]
78 lines = changedlines(ui, repo, ctx1, ctx, fns)
82 lines = changedlines(ui, repo, ctx1, ctx, fns)
79 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
83 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
80
84
81 state['count'] += 1
85 state['count'] += 1
82 ui.progress(_('analyzing'), state['count'], total=len(repo))
86 ui.progress(_('analyzing'), state['count'], total=len(repo))
83
87
84 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
88 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
85 continue
89 continue
86
90
87 ui.progress(_('analyzing'), None)
91 ui.progress(_('analyzing'), None)
88
92
89 return rate
93 return rate
90
94
91
95
92 @command('churn',
96 @command('churn',
93 [('r', 'rev', [],
97 [('r', 'rev', [],
94 _('count rate for the specified revision or revset'), _('REV')),
98 _('count rate for the specified revision or revset'), _('REV')),
95 ('d', 'date', '',
99 ('d', 'date', '',
96 _('count rate for revisions matching date spec'), _('DATE')),
100 _('count rate for revisions matching date spec'), _('DATE')),
97 ('t', 'oldtemplate', '',
101 ('t', 'oldtemplate', '',
98 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
102 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
99 ('T', 'template', '{author|email}',
103 ('T', 'template', '{author|email}',
100 _('template to group changesets'), _('TEMPLATE')),
104 _('template to group changesets'), _('TEMPLATE')),
101 ('f', 'dateformat', '',
105 ('f', 'dateformat', '',
102 _('strftime-compatible format for grouping by date'), _('FORMAT')),
106 _('strftime-compatible format for grouping by date'), _('FORMAT')),
103 ('c', 'changesets', False, _('count rate by number of changesets')),
107 ('c', 'changesets', False, _('count rate by number of changesets')),
104 ('s', 'sort', False, _('sort by key (default: sort by count)')),
108 ('s', 'sort', False, _('sort by key (default: sort by count)')),
105 ('', 'diffstat', False, _('display added/removed lines separately')),
109 ('', 'diffstat', False, _('display added/removed lines separately')),
106 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
110 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
107 ] + commands.walkopts,
111 ] + commands.walkopts,
108 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
112 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
109 inferrepo=True)
113 inferrepo=True)
110 def churn(ui, repo, *pats, **opts):
114 def churn(ui, repo, *pats, **opts):
111 '''histogram of changes to the repository
115 '''histogram of changes to the repository
112
116
113 This command will display a histogram representing the number
117 This command will display a histogram representing the number
114 of changed lines or revisions, grouped according to the given
118 of changed lines or revisions, grouped according to the given
115 template. The default template will group changes by author.
119 template. The default template will group changes by author.
116 The --dateformat option may be used to group the results by
120 The --dateformat option may be used to group the results by
117 date instead.
121 date instead.
118
122
119 Statistics are based on the number of changed lines, or
123 Statistics are based on the number of changed lines, or
120 alternatively the number of matching revisions if the
124 alternatively the number of matching revisions if the
121 --changesets option is specified.
125 --changesets option is specified.
122
126
123 Examples::
127 Examples::
124
128
125 # display count of changed lines for every committer
129 # display count of changed lines for every committer
126 hg churn -t "{author|email}"
130 hg churn -t "{author|email}"
127
131
128 # display daily activity graph
132 # display daily activity graph
129 hg churn -f "%H" -s -c
133 hg churn -f "%H" -s -c
130
134
131 # display activity of developers by month
135 # display activity of developers by month
132 hg churn -f "%Y-%m" -s -c
136 hg churn -f "%Y-%m" -s -c
133
137
134 # display count of lines changed in every year
138 # display count of lines changed in every year
135 hg churn -f "%Y" -s
139 hg churn -f "%Y" -s
136
140
137 It is possible to map alternate email addresses to a main address
141 It is possible to map alternate email addresses to a main address
138 by providing a file using the following format::
142 by providing a file using the following format::
139
143
140 <alias email> = <actual email>
144 <alias email> = <actual email>
141
145
142 Such a file may be specified with the --aliases option, otherwise
146 Such a file may be specified with the --aliases option, otherwise
143 a .hgchurn file will be looked for in the working directory root.
147 a .hgchurn file will be looked for in the working directory root.
144 Aliases will be split from the rightmost "=".
148 Aliases will be split from the rightmost "=".
145 '''
149 '''
146 def pad(s, l):
150 def pad(s, l):
147 return s + " " * (l - encoding.colwidth(s))
151 return s + " " * (l - encoding.colwidth(s))
148
152
149 amap = {}
153 amap = {}
150 aliases = opts.get('aliases')
154 aliases = opts.get('aliases')
151 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
155 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
152 aliases = repo.wjoin('.hgchurn')
156 aliases = repo.wjoin('.hgchurn')
153 if aliases:
157 if aliases:
154 for l in open(aliases, "r"):
158 for l in open(aliases, "r"):
155 try:
159 try:
156 alias, actual = l.rsplit('=' in l and '=' or None, 1)
160 alias, actual = l.rsplit('=' in l and '=' or None, 1)
157 amap[alias.strip()] = actual.strip()
161 amap[alias.strip()] = actual.strip()
158 except ValueError:
162 except ValueError:
159 l = l.strip()
163 l = l.strip()
160 if l:
164 if l:
161 ui.warn(_("skipping malformed alias: %s\n") % l)
165 ui.warn(_("skipping malformed alias: %s\n") % l)
162 continue
166 continue
163
167
164 rate = countrate(ui, repo, amap, *pats, **opts).items()
168 rate = countrate(ui, repo, amap, *pats, **opts).items()
165 if not rate:
169 if not rate:
166 return
170 return
167
171
168 if opts.get('sort'):
172 if opts.get('sort'):
169 rate.sort()
173 rate.sort()
170 else:
174 else:
171 rate.sort(key=lambda x: (-sum(x[1]), x))
175 rate.sort(key=lambda x: (-sum(x[1]), x))
172
176
173 # Be careful not to have a zero maxcount (issue833)
177 # Be careful not to have a zero maxcount (issue833)
174 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
178 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
175 maxname = max(len(k) for k, v in rate)
179 maxname = max(len(k) for k, v in rate)
176
180
177 ttywidth = ui.termwidth()
181 ttywidth = ui.termwidth()
178 ui.debug("assuming %i character terminal\n" % ttywidth)
182 ui.debug("assuming %i character terminal\n" % ttywidth)
179 width = ttywidth - maxname - 2 - 2 - 2
183 width = ttywidth - maxname - 2 - 2 - 2
180
184
181 if opts.get('diffstat'):
185 if opts.get('diffstat'):
182 width -= 15
186 width -= 15
183 def format(name, diffstat):
187 def format(name, diffstat):
184 added, removed = diffstat
188 added, removed = diffstat
185 return "%s %15s %s%s\n" % (pad(name, maxname),
189 return "%s %15s %s%s\n" % (pad(name, maxname),
186 '+%d/-%d' % (added, removed),
190 '+%d/-%d' % (added, removed),
187 ui.label('+' * charnum(added),
191 ui.label('+' * charnum(added),
188 'diffstat.inserted'),
192 'diffstat.inserted'),
189 ui.label('-' * charnum(removed),
193 ui.label('-' * charnum(removed),
190 'diffstat.deleted'))
194 'diffstat.deleted'))
191 else:
195 else:
192 width -= 6
196 width -= 6
193 def format(name, count):
197 def format(name, count):
194 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
198 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
195 '*' * charnum(sum(count)))
199 '*' * charnum(sum(count)))
196
200
197 def charnum(count):
201 def charnum(count):
198 return int(round(count * width / maxcount))
202 return int(round(count * width / maxcount))
199
203
200 for name, count in rate:
204 for name, count in rate:
201 ui.write(format(name, count))
205 ui.write(format(name, count))
@@ -1,683 +1,687 b''
1 # color.py color output for Mercurial commands
1 # color.py color output for Mercurial commands
2 #
2 #
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''colorize output from some commands
8 '''colorize output from some commands
9
9
10 The color extension colorizes output from several Mercurial commands.
10 The color extension colorizes output from several Mercurial commands.
11 For example, the diff command shows additions in green and deletions
11 For example, the diff command shows additions in green and deletions
12 in red, while the status command shows modified files in magenta. Many
12 in red, while the status command shows modified files in magenta. Many
13 other commands have analogous colors. It is possible to customize
13 other commands have analogous colors. It is possible to customize
14 these colors.
14 these colors.
15
15
16 Effects
16 Effects
17 -------
17 -------
18
18
19 Other effects in addition to color, like bold and underlined text, are
19 Other effects in addition to color, like bold and underlined text, are
20 also available. By default, the terminfo database is used to find the
20 also available. By default, the terminfo database is used to find the
21 terminal codes used to change color and effect. If terminfo is not
21 terminal codes used to change color and effect. If terminfo is not
22 available, then effects are rendered with the ECMA-48 SGR control
22 available, then effects are rendered with the ECMA-48 SGR control
23 function (aka ANSI escape codes).
23 function (aka ANSI escape codes).
24
24
25 The available effects in terminfo mode are 'blink', 'bold', 'dim',
25 The available effects in terminfo mode are 'blink', 'bold', 'dim',
26 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
26 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
27 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
27 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
28 'underline'. How each is rendered depends on the terminal emulator.
28 'underline'. How each is rendered depends on the terminal emulator.
29 Some may not be available for a given terminal type, and will be
29 Some may not be available for a given terminal type, and will be
30 silently ignored.
30 silently ignored.
31
31
32 Labels
32 Labels
33 ------
33 ------
34
34
35 Text receives color effects depending on the labels that it has. Many
35 Text receives color effects depending on the labels that it has. Many
36 default Mercurial commands emit labelled text. You can also define
36 default Mercurial commands emit labelled text. You can also define
37 your own labels in templates using the label function, see :hg:`help
37 your own labels in templates using the label function, see :hg:`help
38 templates`. A single portion of text may have more than one label. In
38 templates`. A single portion of text may have more than one label. In
39 that case, effects given to the last label will override any other
39 that case, effects given to the last label will override any other
40 effects. This includes the special "none" effect, which nullifies
40 effects. This includes the special "none" effect, which nullifies
41 other effects.
41 other effects.
42
42
43 Labels are normally invisible. In order to see these labels and their
43 Labels are normally invisible. In order to see these labels and their
44 position in the text, use the global --color=debug option. The same
44 position in the text, use the global --color=debug option. The same
45 anchor text may be associated to multiple labels, e.g.
45 anchor text may be associated to multiple labels, e.g.
46
46
47 [log.changeset changeset.secret|changeset: 22611:6f0a53c8f587]
47 [log.changeset changeset.secret|changeset: 22611:6f0a53c8f587]
48
48
49 The following are the default effects for some default labels. Default
49 The following are the default effects for some default labels. Default
50 effects may be overridden from your configuration file::
50 effects may be overridden from your configuration file::
51
51
52 [color]
52 [color]
53 status.modified = blue bold underline red_background
53 status.modified = blue bold underline red_background
54 status.added = green bold
54 status.added = green bold
55 status.removed = red bold blue_background
55 status.removed = red bold blue_background
56 status.deleted = cyan bold underline
56 status.deleted = cyan bold underline
57 status.unknown = magenta bold underline
57 status.unknown = magenta bold underline
58 status.ignored = black bold
58 status.ignored = black bold
59
59
60 # 'none' turns off all effects
60 # 'none' turns off all effects
61 status.clean = none
61 status.clean = none
62 status.copied = none
62 status.copied = none
63
63
64 qseries.applied = blue bold underline
64 qseries.applied = blue bold underline
65 qseries.unapplied = black bold
65 qseries.unapplied = black bold
66 qseries.missing = red bold
66 qseries.missing = red bold
67
67
68 diff.diffline = bold
68 diff.diffline = bold
69 diff.extended = cyan bold
69 diff.extended = cyan bold
70 diff.file_a = red bold
70 diff.file_a = red bold
71 diff.file_b = green bold
71 diff.file_b = green bold
72 diff.hunk = magenta
72 diff.hunk = magenta
73 diff.deleted = red
73 diff.deleted = red
74 diff.inserted = green
74 diff.inserted = green
75 diff.changed = white
75 diff.changed = white
76 diff.tab =
76 diff.tab =
77 diff.trailingwhitespace = bold red_background
77 diff.trailingwhitespace = bold red_background
78
78
79 # Blank so it inherits the style of the surrounding label
79 # Blank so it inherits the style of the surrounding label
80 changeset.public =
80 changeset.public =
81 changeset.draft =
81 changeset.draft =
82 changeset.secret =
82 changeset.secret =
83
83
84 resolve.unresolved = red bold
84 resolve.unresolved = red bold
85 resolve.resolved = green bold
85 resolve.resolved = green bold
86
86
87 bookmarks.current = green
87 bookmarks.current = green
88
88
89 branches.active = none
89 branches.active = none
90 branches.closed = black bold
90 branches.closed = black bold
91 branches.current = green
91 branches.current = green
92 branches.inactive = none
92 branches.inactive = none
93
93
94 tags.normal = green
94 tags.normal = green
95 tags.local = black bold
95 tags.local = black bold
96
96
97 rebase.rebased = blue
97 rebase.rebased = blue
98 rebase.remaining = red bold
98 rebase.remaining = red bold
99
99
100 shelve.age = cyan
100 shelve.age = cyan
101 shelve.newest = green bold
101 shelve.newest = green bold
102 shelve.name = blue bold
102 shelve.name = blue bold
103
103
104 histedit.remaining = red bold
104 histedit.remaining = red bold
105
105
106 Custom colors
106 Custom colors
107 -------------
107 -------------
108
108
109 Because there are only eight standard colors, this module allows you
109 Because there are only eight standard colors, this module allows you
110 to define color names for other color slots which might be available
110 to define color names for other color slots which might be available
111 for your terminal type, assuming terminfo mode. For instance::
111 for your terminal type, assuming terminfo mode. For instance::
112
112
113 color.brightblue = 12
113 color.brightblue = 12
114 color.pink = 207
114 color.pink = 207
115 color.orange = 202
115 color.orange = 202
116
116
117 to set 'brightblue' to color slot 12 (useful for 16 color terminals
117 to set 'brightblue' to color slot 12 (useful for 16 color terminals
118 that have brighter colors defined in the upper eight) and, 'pink' and
118 that have brighter colors defined in the upper eight) and, 'pink' and
119 'orange' to colors in 256-color xterm's default color cube. These
119 'orange' to colors in 256-color xterm's default color cube. These
120 defined colors may then be used as any of the pre-defined eight,
120 defined colors may then be used as any of the pre-defined eight,
121 including appending '_background' to set the background to that color.
121 including appending '_background' to set the background to that color.
122
122
123 Modes
123 Modes
124 -----
124 -----
125
125
126 By default, the color extension will use ANSI mode (or win32 mode on
126 By default, the color extension will use ANSI mode (or win32 mode on
127 Windows) if it detects a terminal. To override auto mode (to enable
127 Windows) if it detects a terminal. To override auto mode (to enable
128 terminfo mode, for example), set the following configuration option::
128 terminfo mode, for example), set the following configuration option::
129
129
130 [color]
130 [color]
131 mode = terminfo
131 mode = terminfo
132
132
133 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
133 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
134 disable color.
134 disable color.
135
135
136 Note that on some systems, terminfo mode may cause problems when using
136 Note that on some systems, terminfo mode may cause problems when using
137 color with the pager extension and less -R. less with the -R option
137 color with the pager extension and less -R. less with the -R option
138 will only display ECMA-48 color codes, and terminfo mode may sometimes
138 will only display ECMA-48 color codes, and terminfo mode may sometimes
139 emit codes that less doesn't understand. You can work around this by
139 emit codes that less doesn't understand. You can work around this by
140 either using ansi mode (or auto mode), or by using less -r (which will
140 either using ansi mode (or auto mode), or by using less -r (which will
141 pass through all terminal control codes, not just color control
141 pass through all terminal control codes, not just color control
142 codes).
142 codes).
143
143
144 On some systems (such as MSYS in Windows), the terminal may support
144 On some systems (such as MSYS in Windows), the terminal may support
145 a different color mode than the pager (activated via the "pager"
145 a different color mode than the pager (activated via the "pager"
146 extension). It is possible to define separate modes depending on whether
146 extension). It is possible to define separate modes depending on whether
147 the pager is active::
147 the pager is active::
148
148
149 [color]
149 [color]
150 mode = auto
150 mode = auto
151 pagermode = ansi
151 pagermode = ansi
152
152
153 If ``pagermode`` is not defined, the ``mode`` will be used.
153 If ``pagermode`` is not defined, the ``mode`` will be used.
154 '''
154 '''
155
155
156 import os
156 import os
157
157
158 from mercurial import cmdutil, commands, dispatch, extensions, subrepo, util
158 from mercurial import cmdutil, commands, dispatch, extensions, subrepo, util
159 from mercurial import ui as uimod
159 from mercurial import ui as uimod
160 from mercurial import templater, error
160 from mercurial import templater, error
161 from mercurial.i18n import _
161 from mercurial.i18n import _
162
162
163 cmdtable = {}
163 cmdtable = {}
164 command = cmdutil.command(cmdtable)
164 command = cmdutil.command(cmdtable)
165 # Note for extension authors: ONLY specify testedwith = 'internal' for
166 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
167 # be specifying the version(s) of Mercurial they are tested with, or
168 # leave the attribute unspecified.
165 testedwith = 'internal'
169 testedwith = 'internal'
166
170
167 # start and stop parameters for effects
171 # start and stop parameters for effects
168 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
172 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
169 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
173 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
170 'italic': 3, 'underline': 4, 'inverse': 7, 'dim': 2,
174 'italic': 3, 'underline': 4, 'inverse': 7, 'dim': 2,
171 'black_background': 40, 'red_background': 41,
175 'black_background': 40, 'red_background': 41,
172 'green_background': 42, 'yellow_background': 43,
176 'green_background': 42, 'yellow_background': 43,
173 'blue_background': 44, 'purple_background': 45,
177 'blue_background': 44, 'purple_background': 45,
174 'cyan_background': 46, 'white_background': 47}
178 'cyan_background': 46, 'white_background': 47}
175
179
176 def _terminfosetup(ui, mode):
180 def _terminfosetup(ui, mode):
177 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
181 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
178
182
179 global _terminfo_params
183 global _terminfo_params
180 # If we failed to load curses, we go ahead and return.
184 # If we failed to load curses, we go ahead and return.
181 if not _terminfo_params:
185 if not _terminfo_params:
182 return
186 return
183 # Otherwise, see what the config file says.
187 # Otherwise, see what the config file says.
184 if mode not in ('auto', 'terminfo'):
188 if mode not in ('auto', 'terminfo'):
185 return
189 return
186
190
187 _terminfo_params.update((key[6:], (False, int(val)))
191 _terminfo_params.update((key[6:], (False, int(val)))
188 for key, val in ui.configitems('color')
192 for key, val in ui.configitems('color')
189 if key.startswith('color.'))
193 if key.startswith('color.'))
190
194
191 try:
195 try:
192 curses.setupterm()
196 curses.setupterm()
193 except curses.error, e:
197 except curses.error, e:
194 _terminfo_params = {}
198 _terminfo_params = {}
195 return
199 return
196
200
197 for key, (b, e) in _terminfo_params.items():
201 for key, (b, e) in _terminfo_params.items():
198 if not b:
202 if not b:
199 continue
203 continue
200 if not curses.tigetstr(e):
204 if not curses.tigetstr(e):
201 # Most terminals don't support dim, invis, etc, so don't be
205 # Most terminals don't support dim, invis, etc, so don't be
202 # noisy and use ui.debug().
206 # noisy and use ui.debug().
203 ui.debug("no terminfo entry for %s\n" % e)
207 ui.debug("no terminfo entry for %s\n" % e)
204 del _terminfo_params[key]
208 del _terminfo_params[key]
205 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
209 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
206 # Only warn about missing terminfo entries if we explicitly asked for
210 # Only warn about missing terminfo entries if we explicitly asked for
207 # terminfo mode.
211 # terminfo mode.
208 if mode == "terminfo":
212 if mode == "terminfo":
209 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
213 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
210 "ECMA-48 color\n"))
214 "ECMA-48 color\n"))
211 _terminfo_params = {}
215 _terminfo_params = {}
212
216
213 def _modesetup(ui, coloropt):
217 def _modesetup(ui, coloropt):
214 global _terminfo_params
218 global _terminfo_params
215
219
216 if coloropt == 'debug':
220 if coloropt == 'debug':
217 return 'debug'
221 return 'debug'
218
222
219 auto = (coloropt == 'auto')
223 auto = (coloropt == 'auto')
220 always = not auto and util.parsebool(coloropt)
224 always = not auto and util.parsebool(coloropt)
221 if not always and not auto:
225 if not always and not auto:
222 return None
226 return None
223
227
224 formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())
228 formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())
225
229
226 mode = ui.config('color', 'mode', 'auto')
230 mode = ui.config('color', 'mode', 'auto')
227
231
228 # If pager is active, color.pagermode overrides color.mode.
232 # If pager is active, color.pagermode overrides color.mode.
229 if getattr(ui, 'pageractive', False):
233 if getattr(ui, 'pageractive', False):
230 mode = ui.config('color', 'pagermode', mode)
234 mode = ui.config('color', 'pagermode', mode)
231
235
232 realmode = mode
236 realmode = mode
233 if mode == 'auto':
237 if mode == 'auto':
234 if os.name == 'nt':
238 if os.name == 'nt':
235 term = os.environ.get('TERM')
239 term = os.environ.get('TERM')
236 # TERM won't be defined in a vanilla cmd.exe environment.
240 # TERM won't be defined in a vanilla cmd.exe environment.
237
241
238 # UNIX-like environments on Windows such as Cygwin and MSYS will
242 # UNIX-like environments on Windows such as Cygwin and MSYS will
239 # set TERM. They appear to make a best effort attempt at setting it
243 # set TERM. They appear to make a best effort attempt at setting it
240 # to something appropriate. However, not all environments with TERM
244 # to something appropriate. However, not all environments with TERM
241 # defined support ANSI. Since "ansi" could result in terminal
245 # defined support ANSI. Since "ansi" could result in terminal
242 # gibberish, we error on the side of selecting "win32". However, if
246 # gibberish, we error on the side of selecting "win32". However, if
243 # w32effects is not defined, we almost certainly don't support
247 # w32effects is not defined, we almost certainly don't support
244 # "win32", so don't even try.
248 # "win32", so don't even try.
245 if (term and 'xterm' in term) or not w32effects:
249 if (term and 'xterm' in term) or not w32effects:
246 realmode = 'ansi'
250 realmode = 'ansi'
247 else:
251 else:
248 realmode = 'win32'
252 realmode = 'win32'
249 else:
253 else:
250 realmode = 'ansi'
254 realmode = 'ansi'
251
255
252 def modewarn():
256 def modewarn():
253 # only warn if color.mode was explicitly set and we're in
257 # only warn if color.mode was explicitly set and we're in
254 # an interactive terminal
258 # an interactive terminal
255 if mode == realmode and ui.interactive():
259 if mode == realmode and ui.interactive():
256 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
260 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
257
261
258 if realmode == 'win32':
262 if realmode == 'win32':
259 _terminfo_params = {}
263 _terminfo_params = {}
260 if not w32effects:
264 if not w32effects:
261 modewarn()
265 modewarn()
262 return None
266 return None
263 _effects.update(w32effects)
267 _effects.update(w32effects)
264 elif realmode == 'ansi':
268 elif realmode == 'ansi':
265 _terminfo_params = {}
269 _terminfo_params = {}
266 elif realmode == 'terminfo':
270 elif realmode == 'terminfo':
267 _terminfosetup(ui, mode)
271 _terminfosetup(ui, mode)
268 if not _terminfo_params:
272 if not _terminfo_params:
269 ## FIXME Shouldn't we return None in this case too?
273 ## FIXME Shouldn't we return None in this case too?
270 modewarn()
274 modewarn()
271 realmode = 'ansi'
275 realmode = 'ansi'
272 else:
276 else:
273 return None
277 return None
274
278
275 if always or (auto and formatted):
279 if always or (auto and formatted):
276 return realmode
280 return realmode
277 return None
281 return None
278
282
279 try:
283 try:
280 import curses
284 import curses
281 # Mapping from effect name to terminfo attribute name or color number.
285 # Mapping from effect name to terminfo attribute name or color number.
282 # This will also force-load the curses module.
286 # This will also force-load the curses module.
283 _terminfo_params = {'none': (True, 'sgr0'),
287 _terminfo_params = {'none': (True, 'sgr0'),
284 'standout': (True, 'smso'),
288 'standout': (True, 'smso'),
285 'underline': (True, 'smul'),
289 'underline': (True, 'smul'),
286 'reverse': (True, 'rev'),
290 'reverse': (True, 'rev'),
287 'inverse': (True, 'rev'),
291 'inverse': (True, 'rev'),
288 'blink': (True, 'blink'),
292 'blink': (True, 'blink'),
289 'dim': (True, 'dim'),
293 'dim': (True, 'dim'),
290 'bold': (True, 'bold'),
294 'bold': (True, 'bold'),
291 'invisible': (True, 'invis'),
295 'invisible': (True, 'invis'),
292 'italic': (True, 'sitm'),
296 'italic': (True, 'sitm'),
293 'black': (False, curses.COLOR_BLACK),
297 'black': (False, curses.COLOR_BLACK),
294 'red': (False, curses.COLOR_RED),
298 'red': (False, curses.COLOR_RED),
295 'green': (False, curses.COLOR_GREEN),
299 'green': (False, curses.COLOR_GREEN),
296 'yellow': (False, curses.COLOR_YELLOW),
300 'yellow': (False, curses.COLOR_YELLOW),
297 'blue': (False, curses.COLOR_BLUE),
301 'blue': (False, curses.COLOR_BLUE),
298 'magenta': (False, curses.COLOR_MAGENTA),
302 'magenta': (False, curses.COLOR_MAGENTA),
299 'cyan': (False, curses.COLOR_CYAN),
303 'cyan': (False, curses.COLOR_CYAN),
300 'white': (False, curses.COLOR_WHITE)}
304 'white': (False, curses.COLOR_WHITE)}
301 except ImportError:
305 except ImportError:
302 _terminfo_params = {}
306 _terminfo_params = {}
303
307
304 _styles = {'grep.match': 'red bold',
308 _styles = {'grep.match': 'red bold',
305 'grep.linenumber': 'green',
309 'grep.linenumber': 'green',
306 'grep.rev': 'green',
310 'grep.rev': 'green',
307 'grep.change': 'green',
311 'grep.change': 'green',
308 'grep.sep': 'cyan',
312 'grep.sep': 'cyan',
309 'grep.filename': 'magenta',
313 'grep.filename': 'magenta',
310 'grep.user': 'magenta',
314 'grep.user': 'magenta',
311 'grep.date': 'magenta',
315 'grep.date': 'magenta',
312 'bookmarks.current': 'green',
316 'bookmarks.current': 'green',
313 'branches.active': 'none',
317 'branches.active': 'none',
314 'branches.closed': 'black bold',
318 'branches.closed': 'black bold',
315 'branches.current': 'green',
319 'branches.current': 'green',
316 'branches.inactive': 'none',
320 'branches.inactive': 'none',
317 'diff.changed': 'white',
321 'diff.changed': 'white',
318 'diff.deleted': 'red',
322 'diff.deleted': 'red',
319 'diff.diffline': 'bold',
323 'diff.diffline': 'bold',
320 'diff.extended': 'cyan bold',
324 'diff.extended': 'cyan bold',
321 'diff.file_a': 'red bold',
325 'diff.file_a': 'red bold',
322 'diff.file_b': 'green bold',
326 'diff.file_b': 'green bold',
323 'diff.hunk': 'magenta',
327 'diff.hunk': 'magenta',
324 'diff.inserted': 'green',
328 'diff.inserted': 'green',
325 'diff.tab': '',
329 'diff.tab': '',
326 'diff.trailingwhitespace': 'bold red_background',
330 'diff.trailingwhitespace': 'bold red_background',
327 'changeset.public' : '',
331 'changeset.public' : '',
328 'changeset.draft' : '',
332 'changeset.draft' : '',
329 'changeset.secret' : '',
333 'changeset.secret' : '',
330 'diffstat.deleted': 'red',
334 'diffstat.deleted': 'red',
331 'diffstat.inserted': 'green',
335 'diffstat.inserted': 'green',
332 'histedit.remaining': 'red bold',
336 'histedit.remaining': 'red bold',
333 'ui.prompt': 'yellow',
337 'ui.prompt': 'yellow',
334 'log.changeset': 'yellow',
338 'log.changeset': 'yellow',
335 'patchbomb.finalsummary': '',
339 'patchbomb.finalsummary': '',
336 'patchbomb.from': 'magenta',
340 'patchbomb.from': 'magenta',
337 'patchbomb.to': 'cyan',
341 'patchbomb.to': 'cyan',
338 'patchbomb.subject': 'green',
342 'patchbomb.subject': 'green',
339 'patchbomb.diffstats': '',
343 'patchbomb.diffstats': '',
340 'rebase.rebased': 'blue',
344 'rebase.rebased': 'blue',
341 'rebase.remaining': 'red bold',
345 'rebase.remaining': 'red bold',
342 'resolve.resolved': 'green bold',
346 'resolve.resolved': 'green bold',
343 'resolve.unresolved': 'red bold',
347 'resolve.unresolved': 'red bold',
344 'shelve.age': 'cyan',
348 'shelve.age': 'cyan',
345 'shelve.newest': 'green bold',
349 'shelve.newest': 'green bold',
346 'shelve.name': 'blue bold',
350 'shelve.name': 'blue bold',
347 'status.added': 'green bold',
351 'status.added': 'green bold',
348 'status.clean': 'none',
352 'status.clean': 'none',
349 'status.copied': 'none',
353 'status.copied': 'none',
350 'status.deleted': 'cyan bold underline',
354 'status.deleted': 'cyan bold underline',
351 'status.ignored': 'black bold',
355 'status.ignored': 'black bold',
352 'status.modified': 'blue bold',
356 'status.modified': 'blue bold',
353 'status.removed': 'red bold',
357 'status.removed': 'red bold',
354 'status.unknown': 'magenta bold underline',
358 'status.unknown': 'magenta bold underline',
355 'tags.normal': 'green',
359 'tags.normal': 'green',
356 'tags.local': 'black bold'}
360 'tags.local': 'black bold'}
357
361
358
362
359 def _effect_str(effect):
363 def _effect_str(effect):
360 '''Helper function for render_effects().'''
364 '''Helper function for render_effects().'''
361
365
362 bg = False
366 bg = False
363 if effect.endswith('_background'):
367 if effect.endswith('_background'):
364 bg = True
368 bg = True
365 effect = effect[:-11]
369 effect = effect[:-11]
366 attr, val = _terminfo_params[effect]
370 attr, val = _terminfo_params[effect]
367 if attr:
371 if attr:
368 return curses.tigetstr(val)
372 return curses.tigetstr(val)
369 elif bg:
373 elif bg:
370 return curses.tparm(curses.tigetstr('setab'), val)
374 return curses.tparm(curses.tigetstr('setab'), val)
371 else:
375 else:
372 return curses.tparm(curses.tigetstr('setaf'), val)
376 return curses.tparm(curses.tigetstr('setaf'), val)
373
377
374 def render_effects(text, effects):
378 def render_effects(text, effects):
375 'Wrap text in commands to turn on each effect.'
379 'Wrap text in commands to turn on each effect.'
376 if not text:
380 if not text:
377 return text
381 return text
378 if not _terminfo_params:
382 if not _terminfo_params:
379 start = [str(_effects[e]) for e in ['none'] + effects.split()]
383 start = [str(_effects[e]) for e in ['none'] + effects.split()]
380 start = '\033[' + ';'.join(start) + 'm'
384 start = '\033[' + ';'.join(start) + 'm'
381 stop = '\033[' + str(_effects['none']) + 'm'
385 stop = '\033[' + str(_effects['none']) + 'm'
382 else:
386 else:
383 start = ''.join(_effect_str(effect)
387 start = ''.join(_effect_str(effect)
384 for effect in ['none'] + effects.split())
388 for effect in ['none'] + effects.split())
385 stop = _effect_str('none')
389 stop = _effect_str('none')
386 return ''.join([start, text, stop])
390 return ''.join([start, text, stop])
387
391
388 def extstyles():
392 def extstyles():
389 for name, ext in extensions.extensions():
393 for name, ext in extensions.extensions():
390 _styles.update(getattr(ext, 'colortable', {}))
394 _styles.update(getattr(ext, 'colortable', {}))
391
395
392 def valideffect(effect):
396 def valideffect(effect):
393 'Determine if the effect is valid or not.'
397 'Determine if the effect is valid or not.'
394 good = False
398 good = False
395 if not _terminfo_params and effect in _effects:
399 if not _terminfo_params and effect in _effects:
396 good = True
400 good = True
397 elif effect in _terminfo_params or effect[:-11] in _terminfo_params:
401 elif effect in _terminfo_params or effect[:-11] in _terminfo_params:
398 good = True
402 good = True
399 return good
403 return good
400
404
401 def configstyles(ui):
405 def configstyles(ui):
402 for status, cfgeffects in ui.configitems('color'):
406 for status, cfgeffects in ui.configitems('color'):
403 if '.' not in status or status.startswith('color.'):
407 if '.' not in status or status.startswith('color.'):
404 continue
408 continue
405 cfgeffects = ui.configlist('color', status)
409 cfgeffects = ui.configlist('color', status)
406 if cfgeffects:
410 if cfgeffects:
407 good = []
411 good = []
408 for e in cfgeffects:
412 for e in cfgeffects:
409 if valideffect(e):
413 if valideffect(e):
410 good.append(e)
414 good.append(e)
411 else:
415 else:
412 ui.warn(_("ignoring unknown color/effect %r "
416 ui.warn(_("ignoring unknown color/effect %r "
413 "(configured in color.%s)\n")
417 "(configured in color.%s)\n")
414 % (e, status))
418 % (e, status))
415 _styles[status] = ' '.join(good)
419 _styles[status] = ' '.join(good)
416
420
417 class colorui(uimod.ui):
421 class colorui(uimod.ui):
418 def popbuffer(self, labeled=False):
422 def popbuffer(self, labeled=False):
419 if self._colormode is None:
423 if self._colormode is None:
420 return super(colorui, self).popbuffer(labeled)
424 return super(colorui, self).popbuffer(labeled)
421
425
422 self._bufferstates.pop()
426 self._bufferstates.pop()
423 if labeled:
427 if labeled:
424 return ''.join(self.label(a, label) for a, label
428 return ''.join(self.label(a, label) for a, label
425 in self._buffers.pop())
429 in self._buffers.pop())
426 return ''.join(a for a, label in self._buffers.pop())
430 return ''.join(a for a, label in self._buffers.pop())
427
431
428 _colormode = 'ansi'
432 _colormode = 'ansi'
429 def write(self, *args, **opts):
433 def write(self, *args, **opts):
430 if self._colormode is None:
434 if self._colormode is None:
431 return super(colorui, self).write(*args, **opts)
435 return super(colorui, self).write(*args, **opts)
432
436
433 label = opts.get('label', '')
437 label = opts.get('label', '')
434 if self._buffers:
438 if self._buffers:
435 self._buffers[-1].extend([(str(a), label) for a in args])
439 self._buffers[-1].extend([(str(a), label) for a in args])
436 elif self._colormode == 'win32':
440 elif self._colormode == 'win32':
437 for a in args:
441 for a in args:
438 win32print(a, super(colorui, self).write, **opts)
442 win32print(a, super(colorui, self).write, **opts)
439 else:
443 else:
440 return super(colorui, self).write(
444 return super(colorui, self).write(
441 *[self.label(str(a), label) for a in args], **opts)
445 *[self.label(str(a), label) for a in args], **opts)
442
446
443 def write_err(self, *args, **opts):
447 def write_err(self, *args, **opts):
444 if self._colormode is None:
448 if self._colormode is None:
445 return super(colorui, self).write_err(*args, **opts)
449 return super(colorui, self).write_err(*args, **opts)
446
450
447 label = opts.get('label', '')
451 label = opts.get('label', '')
448 if self._bufferstates and self._bufferstates[-1][0]:
452 if self._bufferstates and self._bufferstates[-1][0]:
449 return self.write(*args, **opts)
453 return self.write(*args, **opts)
450 if self._colormode == 'win32':
454 if self._colormode == 'win32':
451 for a in args:
455 for a in args:
452 win32print(a, super(colorui, self).write_err, **opts)
456 win32print(a, super(colorui, self).write_err, **opts)
453 else:
457 else:
454 return super(colorui, self).write_err(
458 return super(colorui, self).write_err(
455 *[self.label(str(a), label) for a in args], **opts)
459 *[self.label(str(a), label) for a in args], **opts)
456
460
457 def showlabel(self, msg, label):
461 def showlabel(self, msg, label):
458 if label and msg:
462 if label and msg:
459 if msg[-1] == '\n':
463 if msg[-1] == '\n':
460 return "[%s|%s]\n" % (label, msg[:-1])
464 return "[%s|%s]\n" % (label, msg[:-1])
461 else:
465 else:
462 return "[%s|%s]" % (label, msg)
466 return "[%s|%s]" % (label, msg)
463 else:
467 else:
464 return msg
468 return msg
465
469
466 def label(self, msg, label):
470 def label(self, msg, label):
467 if self._colormode is None:
471 if self._colormode is None:
468 return super(colorui, self).label(msg, label)
472 return super(colorui, self).label(msg, label)
469
473
470 if self._colormode == 'debug':
474 if self._colormode == 'debug':
471 return self.showlabel(msg, label)
475 return self.showlabel(msg, label)
472
476
473 effects = []
477 effects = []
474 for l in label.split():
478 for l in label.split():
475 s = _styles.get(l, '')
479 s = _styles.get(l, '')
476 if s:
480 if s:
477 effects.append(s)
481 effects.append(s)
478 elif valideffect(l):
482 elif valideffect(l):
479 effects.append(l)
483 effects.append(l)
480 effects = ' '.join(effects)
484 effects = ' '.join(effects)
481 if effects:
485 if effects:
482 return '\n'.join([render_effects(s, effects)
486 return '\n'.join([render_effects(s, effects)
483 for s in msg.split('\n')])
487 for s in msg.split('\n')])
484 return msg
488 return msg
485
489
486 def templatelabel(context, mapping, args):
490 def templatelabel(context, mapping, args):
487 if len(args) != 2:
491 if len(args) != 2:
488 # i18n: "label" is a keyword
492 # i18n: "label" is a keyword
489 raise error.ParseError(_("label expects two arguments"))
493 raise error.ParseError(_("label expects two arguments"))
490
494
491 # add known effects to the mapping so symbols like 'red', 'bold',
495 # add known effects to the mapping so symbols like 'red', 'bold',
492 # etc. don't need to be quoted
496 # etc. don't need to be quoted
493 mapping.update(dict([(k, k) for k in _effects]))
497 mapping.update(dict([(k, k) for k in _effects]))
494
498
495 thing = templater._evalifliteral(args[1], context, mapping)
499 thing = templater._evalifliteral(args[1], context, mapping)
496
500
497 # apparently, repo could be a string that is the favicon?
501 # apparently, repo could be a string that is the favicon?
498 repo = mapping.get('repo', '')
502 repo = mapping.get('repo', '')
499 if isinstance(repo, str):
503 if isinstance(repo, str):
500 return thing
504 return thing
501
505
502 label = templater._evalifliteral(args[0], context, mapping)
506 label = templater._evalifliteral(args[0], context, mapping)
503
507
504 thing = templater.stringify(thing)
508 thing = templater.stringify(thing)
505 label = templater.stringify(label)
509 label = templater.stringify(label)
506
510
507 return repo.ui.label(thing, label)
511 return repo.ui.label(thing, label)
508
512
509 def uisetup(ui):
513 def uisetup(ui):
510 if ui.plain():
514 if ui.plain():
511 return
515 return
512 if not isinstance(ui, colorui):
516 if not isinstance(ui, colorui):
513 colorui.__bases__ = (ui.__class__,)
517 colorui.__bases__ = (ui.__class__,)
514 ui.__class__ = colorui
518 ui.__class__ = colorui
515 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
519 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
516 mode = _modesetup(ui_, opts['color'])
520 mode = _modesetup(ui_, opts['color'])
517 colorui._colormode = mode
521 colorui._colormode = mode
518 if mode and mode != 'debug':
522 if mode and mode != 'debug':
519 extstyles()
523 extstyles()
520 configstyles(ui_)
524 configstyles(ui_)
521 return orig(ui_, opts, cmd, cmdfunc)
525 return orig(ui_, opts, cmd, cmdfunc)
522 def colorgit(orig, gitsub, commands, env=None, stream=False, cwd=None):
526 def colorgit(orig, gitsub, commands, env=None, stream=False, cwd=None):
523 if gitsub.ui._colormode and len(commands) and commands[0] == "diff":
527 if gitsub.ui._colormode and len(commands) and commands[0] == "diff":
524 # insert the argument in the front,
528 # insert the argument in the front,
525 # the end of git diff arguments is used for paths
529 # the end of git diff arguments is used for paths
526 commands.insert(1, '--color')
530 commands.insert(1, '--color')
527 return orig(gitsub, commands, env, stream, cwd)
531 return orig(gitsub, commands, env, stream, cwd)
528 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
532 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
529 extensions.wrapfunction(subrepo.gitsubrepo, '_gitnodir', colorgit)
533 extensions.wrapfunction(subrepo.gitsubrepo, '_gitnodir', colorgit)
530 templater.funcs['label'] = templatelabel
534 templater.funcs['label'] = templatelabel
531
535
532 def extsetup(ui):
536 def extsetup(ui):
533 commands.globalopts.append(
537 commands.globalopts.append(
534 ('', 'color', 'auto',
538 ('', 'color', 'auto',
535 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
539 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
536 # and should not be translated
540 # and should not be translated
537 _("when to colorize (boolean, always, auto, never, or debug)"),
541 _("when to colorize (boolean, always, auto, never, or debug)"),
538 _('TYPE')))
542 _('TYPE')))
539
543
540 @command('debugcolor', [], 'hg debugcolor')
544 @command('debugcolor', [], 'hg debugcolor')
541 def debugcolor(ui, repo, **opts):
545 def debugcolor(ui, repo, **opts):
542 global _styles
546 global _styles
543 _styles = {}
547 _styles = {}
544 for effect in _effects.keys():
548 for effect in _effects.keys():
545 _styles[effect] = effect
549 _styles[effect] = effect
546 ui.write(('color mode: %s\n') % ui._colormode)
550 ui.write(('color mode: %s\n') % ui._colormode)
547 ui.write(_('available colors:\n'))
551 ui.write(_('available colors:\n'))
548 for label, colors in _styles.items():
552 for label, colors in _styles.items():
549 ui.write(('%s\n') % colors, label=label)
553 ui.write(('%s\n') % colors, label=label)
550
554
551 if os.name != 'nt':
555 if os.name != 'nt':
552 w32effects = None
556 w32effects = None
553 else:
557 else:
554 import re, ctypes
558 import re, ctypes
555
559
556 _kernel32 = ctypes.windll.kernel32
560 _kernel32 = ctypes.windll.kernel32
557
561
558 _WORD = ctypes.c_ushort
562 _WORD = ctypes.c_ushort
559
563
560 _INVALID_HANDLE_VALUE = -1
564 _INVALID_HANDLE_VALUE = -1
561
565
562 class _COORD(ctypes.Structure):
566 class _COORD(ctypes.Structure):
563 _fields_ = [('X', ctypes.c_short),
567 _fields_ = [('X', ctypes.c_short),
564 ('Y', ctypes.c_short)]
568 ('Y', ctypes.c_short)]
565
569
566 class _SMALL_RECT(ctypes.Structure):
570 class _SMALL_RECT(ctypes.Structure):
567 _fields_ = [('Left', ctypes.c_short),
571 _fields_ = [('Left', ctypes.c_short),
568 ('Top', ctypes.c_short),
572 ('Top', ctypes.c_short),
569 ('Right', ctypes.c_short),
573 ('Right', ctypes.c_short),
570 ('Bottom', ctypes.c_short)]
574 ('Bottom', ctypes.c_short)]
571
575
572 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
576 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
573 _fields_ = [('dwSize', _COORD),
577 _fields_ = [('dwSize', _COORD),
574 ('dwCursorPosition', _COORD),
578 ('dwCursorPosition', _COORD),
575 ('wAttributes', _WORD),
579 ('wAttributes', _WORD),
576 ('srWindow', _SMALL_RECT),
580 ('srWindow', _SMALL_RECT),
577 ('dwMaximumWindowSize', _COORD)]
581 ('dwMaximumWindowSize', _COORD)]
578
582
579 _STD_OUTPUT_HANDLE = 0xfffffff5L # (DWORD)-11
583 _STD_OUTPUT_HANDLE = 0xfffffff5L # (DWORD)-11
580 _STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12
584 _STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12
581
585
582 _FOREGROUND_BLUE = 0x0001
586 _FOREGROUND_BLUE = 0x0001
583 _FOREGROUND_GREEN = 0x0002
587 _FOREGROUND_GREEN = 0x0002
584 _FOREGROUND_RED = 0x0004
588 _FOREGROUND_RED = 0x0004
585 _FOREGROUND_INTENSITY = 0x0008
589 _FOREGROUND_INTENSITY = 0x0008
586
590
587 _BACKGROUND_BLUE = 0x0010
591 _BACKGROUND_BLUE = 0x0010
588 _BACKGROUND_GREEN = 0x0020
592 _BACKGROUND_GREEN = 0x0020
589 _BACKGROUND_RED = 0x0040
593 _BACKGROUND_RED = 0x0040
590 _BACKGROUND_INTENSITY = 0x0080
594 _BACKGROUND_INTENSITY = 0x0080
591
595
592 _COMMON_LVB_REVERSE_VIDEO = 0x4000
596 _COMMON_LVB_REVERSE_VIDEO = 0x4000
593 _COMMON_LVB_UNDERSCORE = 0x8000
597 _COMMON_LVB_UNDERSCORE = 0x8000
594
598
595 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
599 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
596 w32effects = {
600 w32effects = {
597 'none': -1,
601 'none': -1,
598 'black': 0,
602 'black': 0,
599 'red': _FOREGROUND_RED,
603 'red': _FOREGROUND_RED,
600 'green': _FOREGROUND_GREEN,
604 'green': _FOREGROUND_GREEN,
601 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
605 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
602 'blue': _FOREGROUND_BLUE,
606 'blue': _FOREGROUND_BLUE,
603 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
607 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
604 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
608 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
605 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
609 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
606 'bold': _FOREGROUND_INTENSITY,
610 'bold': _FOREGROUND_INTENSITY,
607 'black_background': 0x100, # unused value > 0x0f
611 'black_background': 0x100, # unused value > 0x0f
608 'red_background': _BACKGROUND_RED,
612 'red_background': _BACKGROUND_RED,
609 'green_background': _BACKGROUND_GREEN,
613 'green_background': _BACKGROUND_GREEN,
610 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
614 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
611 'blue_background': _BACKGROUND_BLUE,
615 'blue_background': _BACKGROUND_BLUE,
612 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
616 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
613 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
617 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
614 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
618 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
615 _BACKGROUND_BLUE),
619 _BACKGROUND_BLUE),
616 'bold_background': _BACKGROUND_INTENSITY,
620 'bold_background': _BACKGROUND_INTENSITY,
617 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
621 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
618 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
622 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
619 }
623 }
620
624
621 passthrough = set([_FOREGROUND_INTENSITY,
625 passthrough = set([_FOREGROUND_INTENSITY,
622 _BACKGROUND_INTENSITY,
626 _BACKGROUND_INTENSITY,
623 _COMMON_LVB_UNDERSCORE,
627 _COMMON_LVB_UNDERSCORE,
624 _COMMON_LVB_REVERSE_VIDEO])
628 _COMMON_LVB_REVERSE_VIDEO])
625
629
626 stdout = _kernel32.GetStdHandle(
630 stdout = _kernel32.GetStdHandle(
627 _STD_OUTPUT_HANDLE) # don't close the handle returned
631 _STD_OUTPUT_HANDLE) # don't close the handle returned
628 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
632 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
629 w32effects = None
633 w32effects = None
630 else:
634 else:
631 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
635 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
632 if not _kernel32.GetConsoleScreenBufferInfo(
636 if not _kernel32.GetConsoleScreenBufferInfo(
633 stdout, ctypes.byref(csbi)):
637 stdout, ctypes.byref(csbi)):
634 # stdout may not support GetConsoleScreenBufferInfo()
638 # stdout may not support GetConsoleScreenBufferInfo()
635 # when called from subprocess or redirected
639 # when called from subprocess or redirected
636 w32effects = None
640 w32effects = None
637 else:
641 else:
638 origattr = csbi.wAttributes
642 origattr = csbi.wAttributes
639 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
643 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
640 re.MULTILINE | re.DOTALL)
644 re.MULTILINE | re.DOTALL)
641
645
642 def win32print(text, orig, **opts):
646 def win32print(text, orig, **opts):
643 label = opts.get('label', '')
647 label = opts.get('label', '')
644 attr = origattr
648 attr = origattr
645
649
646 def mapcolor(val, attr):
650 def mapcolor(val, attr):
647 if val == -1:
651 if val == -1:
648 return origattr
652 return origattr
649 elif val in passthrough:
653 elif val in passthrough:
650 return attr | val
654 return attr | val
651 elif val > 0x0f:
655 elif val > 0x0f:
652 return (val & 0x70) | (attr & 0x8f)
656 return (val & 0x70) | (attr & 0x8f)
653 else:
657 else:
654 return (val & 0x07) | (attr & 0xf8)
658 return (val & 0x07) | (attr & 0xf8)
655
659
656 # determine console attributes based on labels
660 # determine console attributes based on labels
657 for l in label.split():
661 for l in label.split():
658 style = _styles.get(l, '')
662 style = _styles.get(l, '')
659 for effect in style.split():
663 for effect in style.split():
660 try:
664 try:
661 attr = mapcolor(w32effects[effect], attr)
665 attr = mapcolor(w32effects[effect], attr)
662 except KeyError:
666 except KeyError:
663 # w32effects could not have certain attributes so we skip
667 # w32effects could not have certain attributes so we skip
664 # them if not found
668 # them if not found
665 pass
669 pass
666 # hack to ensure regexp finds data
670 # hack to ensure regexp finds data
667 if not text.startswith('\033['):
671 if not text.startswith('\033['):
668 text = '\033[m' + text
672 text = '\033[m' + text
669
673
670 # Look for ANSI-like codes embedded in text
674 # Look for ANSI-like codes embedded in text
671 m = re.match(ansire, text)
675 m = re.match(ansire, text)
672
676
673 try:
677 try:
674 while m:
678 while m:
675 for sattr in m.group(1).split(';'):
679 for sattr in m.group(1).split(';'):
676 if sattr:
680 if sattr:
677 attr = mapcolor(int(sattr), attr)
681 attr = mapcolor(int(sattr), attr)
678 _kernel32.SetConsoleTextAttribute(stdout, attr)
682 _kernel32.SetConsoleTextAttribute(stdout, attr)
679 orig(m.group(2), **opts)
683 orig(m.group(2), **opts)
680 m = re.match(ansire, m.group(3))
684 m = re.match(ansire, m.group(3))
681 finally:
685 finally:
682 # Explicitly reset original attributes
686 # Explicitly reset original attributes
683 _kernel32.SetConsoleTextAttribute(stdout, origattr)
687 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,405 +1,409 b''
1 # convert.py Foreign SCM converter
1 # convert.py Foreign SCM converter
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''import revisions from foreign VCS repositories into Mercurial'''
8 '''import revisions from foreign VCS repositories into Mercurial'''
9
9
10 import convcmd
10 import convcmd
11 import cvsps
11 import cvsps
12 import subversion
12 import subversion
13 from mercurial import cmdutil, templatekw
13 from mercurial import cmdutil, templatekw
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 cmdtable = {}
16 cmdtable = {}
17 command = cmdutil.command(cmdtable)
17 command = cmdutil.command(cmdtable)
18 # Note for extension authors: ONLY specify testedwith = 'internal' for
19 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
20 # be specifying the version(s) of Mercurial they are tested with, or
21 # leave the attribute unspecified.
18 testedwith = 'internal'
22 testedwith = 'internal'
19
23
20 # Commands definition was moved elsewhere to ease demandload job.
24 # Commands definition was moved elsewhere to ease demandload job.
21
25
22 @command('convert',
26 @command('convert',
23 [('', 'authors', '',
27 [('', 'authors', '',
24 _('username mapping filename (DEPRECATED, use --authormap instead)'),
28 _('username mapping filename (DEPRECATED, use --authormap instead)'),
25 _('FILE')),
29 _('FILE')),
26 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
30 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
27 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
31 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
28 ('r', 'rev', '', _('import up to source revision REV'), _('REV')),
32 ('r', 'rev', '', _('import up to source revision REV'), _('REV')),
29 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
33 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
30 ('', 'filemap', '', _('remap file names using contents of file'),
34 ('', 'filemap', '', _('remap file names using contents of file'),
31 _('FILE')),
35 _('FILE')),
32 ('', 'full', None,
36 ('', 'full', None,
33 _('apply filemap changes by converting all files again')),
37 _('apply filemap changes by converting all files again')),
34 ('', 'splicemap', '', _('splice synthesized history into place'),
38 ('', 'splicemap', '', _('splice synthesized history into place'),
35 _('FILE')),
39 _('FILE')),
36 ('', 'branchmap', '', _('change branch names while converting'),
40 ('', 'branchmap', '', _('change branch names while converting'),
37 _('FILE')),
41 _('FILE')),
38 ('', 'branchsort', None, _('try to sort changesets by branches')),
42 ('', 'branchsort', None, _('try to sort changesets by branches')),
39 ('', 'datesort', None, _('try to sort changesets by date')),
43 ('', 'datesort', None, _('try to sort changesets by date')),
40 ('', 'sourcesort', None, _('preserve source changesets order')),
44 ('', 'sourcesort', None, _('preserve source changesets order')),
41 ('', 'closesort', None, _('try to reorder closed revisions'))],
45 ('', 'closesort', None, _('try to reorder closed revisions'))],
42 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
46 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
43 norepo=True)
47 norepo=True)
44 def convert(ui, src, dest=None, revmapfile=None, **opts):
48 def convert(ui, src, dest=None, revmapfile=None, **opts):
45 """convert a foreign SCM repository to a Mercurial one.
49 """convert a foreign SCM repository to a Mercurial one.
46
50
47 Accepted source formats [identifiers]:
51 Accepted source formats [identifiers]:
48
52
49 - Mercurial [hg]
53 - Mercurial [hg]
50 - CVS [cvs]
54 - CVS [cvs]
51 - Darcs [darcs]
55 - Darcs [darcs]
52 - git [git]
56 - git [git]
53 - Subversion [svn]
57 - Subversion [svn]
54 - Monotone [mtn]
58 - Monotone [mtn]
55 - GNU Arch [gnuarch]
59 - GNU Arch [gnuarch]
56 - Bazaar [bzr]
60 - Bazaar [bzr]
57 - Perforce [p4]
61 - Perforce [p4]
58
62
59 Accepted destination formats [identifiers]:
63 Accepted destination formats [identifiers]:
60
64
61 - Mercurial [hg]
65 - Mercurial [hg]
62 - Subversion [svn] (history on branches is not preserved)
66 - Subversion [svn] (history on branches is not preserved)
63
67
64 If no revision is given, all revisions will be converted.
68 If no revision is given, all revisions will be converted.
65 Otherwise, convert will only import up to the named revision
69 Otherwise, convert will only import up to the named revision
66 (given in a format understood by the source).
70 (given in a format understood by the source).
67
71
68 If no destination directory name is specified, it defaults to the
72 If no destination directory name is specified, it defaults to the
69 basename of the source with ``-hg`` appended. If the destination
73 basename of the source with ``-hg`` appended. If the destination
70 repository doesn't exist, it will be created.
74 repository doesn't exist, it will be created.
71
75
72 By default, all sources except Mercurial will use --branchsort.
76 By default, all sources except Mercurial will use --branchsort.
73 Mercurial uses --sourcesort to preserve original revision numbers
77 Mercurial uses --sourcesort to preserve original revision numbers
74 order. Sort modes have the following effects:
78 order. Sort modes have the following effects:
75
79
76 --branchsort convert from parent to child revision when possible,
80 --branchsort convert from parent to child revision when possible,
77 which means branches are usually converted one after
81 which means branches are usually converted one after
78 the other. It generates more compact repositories.
82 the other. It generates more compact repositories.
79
83
80 --datesort sort revisions by date. Converted repositories have
84 --datesort sort revisions by date. Converted repositories have
81 good-looking changelogs but are often an order of
85 good-looking changelogs but are often an order of
82 magnitude larger than the same ones generated by
86 magnitude larger than the same ones generated by
83 --branchsort.
87 --branchsort.
84
88
85 --sourcesort try to preserve source revisions order, only
89 --sourcesort try to preserve source revisions order, only
86 supported by Mercurial sources.
90 supported by Mercurial sources.
87
91
88 --closesort try to move closed revisions as close as possible
92 --closesort try to move closed revisions as close as possible
89 to parent branches, only supported by Mercurial
93 to parent branches, only supported by Mercurial
90 sources.
94 sources.
91
95
92 If ``REVMAP`` isn't given, it will be put in a default location
96 If ``REVMAP`` isn't given, it will be put in a default location
93 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
97 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
94 text file that maps each source commit ID to the destination ID
98 text file that maps each source commit ID to the destination ID
95 for that revision, like so::
99 for that revision, like so::
96
100
97 <source ID> <destination ID>
101 <source ID> <destination ID>
98
102
99 If the file doesn't exist, it's automatically created. It's
103 If the file doesn't exist, it's automatically created. It's
100 updated on each commit copied, so :hg:`convert` can be interrupted
104 updated on each commit copied, so :hg:`convert` can be interrupted
101 and can be run repeatedly to copy new commits.
105 and can be run repeatedly to copy new commits.
102
106
103 The authormap is a simple text file that maps each source commit
107 The authormap is a simple text file that maps each source commit
104 author to a destination commit author. It is handy for source SCMs
108 author to a destination commit author. It is handy for source SCMs
105 that use unix logins to identify authors (e.g.: CVS). One line per
109 that use unix logins to identify authors (e.g.: CVS). One line per
106 author mapping and the line format is::
110 author mapping and the line format is::
107
111
108 source author = destination author
112 source author = destination author
109
113
110 Empty lines and lines starting with a ``#`` are ignored.
114 Empty lines and lines starting with a ``#`` are ignored.
111
115
112 The filemap is a file that allows filtering and remapping of files
116 The filemap is a file that allows filtering and remapping of files
113 and directories. Each line can contain one of the following
117 and directories. Each line can contain one of the following
114 directives::
118 directives::
115
119
116 include path/to/file-or-dir
120 include path/to/file-or-dir
117
121
118 exclude path/to/file-or-dir
122 exclude path/to/file-or-dir
119
123
120 rename path/to/source path/to/destination
124 rename path/to/source path/to/destination
121
125
122 Comment lines start with ``#``. A specified path matches if it
126 Comment lines start with ``#``. A specified path matches if it
123 equals the full relative name of a file or one of its parent
127 equals the full relative name of a file or one of its parent
124 directories. The ``include`` or ``exclude`` directive with the
128 directories. The ``include`` or ``exclude`` directive with the
125 longest matching path applies, so line order does not matter.
129 longest matching path applies, so line order does not matter.
126
130
127 The ``include`` directive causes a file, or all files under a
131 The ``include`` directive causes a file, or all files under a
128 directory, to be included in the destination repository. The default
132 directory, to be included in the destination repository. The default
129 if there are no ``include`` statements is to include everything.
133 if there are no ``include`` statements is to include everything.
130 If there are any ``include`` statements, nothing else is included.
134 If there are any ``include`` statements, nothing else is included.
131 The ``exclude`` directive causes files or directories to
135 The ``exclude`` directive causes files or directories to
132 be omitted. The ``rename`` directive renames a file or directory if
136 be omitted. The ``rename`` directive renames a file or directory if
133 it is converted. To rename from a subdirectory into the root of
137 it is converted. To rename from a subdirectory into the root of
134 the repository, use ``.`` as the path to rename to.
138 the repository, use ``.`` as the path to rename to.
135
139
136 ``--full`` will make sure the converted changesets contain exactly
140 ``--full`` will make sure the converted changesets contain exactly
137 the right files with the right content. It will make a full
141 the right files with the right content. It will make a full
138 conversion of all files, not just the ones that have
142 conversion of all files, not just the ones that have
139 changed. Files that already are correct will not be changed. This
143 changed. Files that already are correct will not be changed. This
140 can be used to apply filemap changes when converting
144 can be used to apply filemap changes when converting
141 incrementally. This is currently only supported for Mercurial and
145 incrementally. This is currently only supported for Mercurial and
142 Subversion.
146 Subversion.
143
147
144 The splicemap is a file that allows insertion of synthetic
148 The splicemap is a file that allows insertion of synthetic
145 history, letting you specify the parents of a revision. This is
149 history, letting you specify the parents of a revision. This is
146 useful if you want to e.g. give a Subversion merge two parents, or
150 useful if you want to e.g. give a Subversion merge two parents, or
147 graft two disconnected series of history together. Each entry
151 graft two disconnected series of history together. Each entry
148 contains a key, followed by a space, followed by one or two
152 contains a key, followed by a space, followed by one or two
149 comma-separated values::
153 comma-separated values::
150
154
151 key parent1, parent2
155 key parent1, parent2
152
156
153 The key is the revision ID in the source
157 The key is the revision ID in the source
154 revision control system whose parents should be modified (same
158 revision control system whose parents should be modified (same
155 format as a key in .hg/shamap). The values are the revision IDs
159 format as a key in .hg/shamap). The values are the revision IDs
156 (in either the source or destination revision control system) that
160 (in either the source or destination revision control system) that
157 should be used as the new parents for that node. For example, if
161 should be used as the new parents for that node. For example, if
158 you have merged "release-1.0" into "trunk", then you should
162 you have merged "release-1.0" into "trunk", then you should
159 specify the revision on "trunk" as the first parent and the one on
163 specify the revision on "trunk" as the first parent and the one on
160 the "release-1.0" branch as the second.
164 the "release-1.0" branch as the second.
161
165
162 The branchmap is a file that allows you to rename a branch when it is
166 The branchmap is a file that allows you to rename a branch when it is
163 being brought in from whatever external repository. When used in
167 being brought in from whatever external repository. When used in
164 conjunction with a splicemap, it allows for a powerful combination
168 conjunction with a splicemap, it allows for a powerful combination
165 to help fix even the most badly mismanaged repositories and turn them
169 to help fix even the most badly mismanaged repositories and turn them
166 into nicely structured Mercurial repositories. The branchmap contains
170 into nicely structured Mercurial repositories. The branchmap contains
167 lines of the form::
171 lines of the form::
168
172
169 original_branch_name new_branch_name
173 original_branch_name new_branch_name
170
174
171 where "original_branch_name" is the name of the branch in the
175 where "original_branch_name" is the name of the branch in the
172 source repository, and "new_branch_name" is the name of the branch
176 source repository, and "new_branch_name" is the name of the branch
173 is the destination repository. No whitespace is allowed in the
177 is the destination repository. No whitespace is allowed in the
174 branch names. This can be used to (for instance) move code in one
178 branch names. This can be used to (for instance) move code in one
175 repository from "default" to a named branch.
179 repository from "default" to a named branch.
176
180
177 Mercurial Source
181 Mercurial Source
178 ################
182 ################
179
183
180 The Mercurial source recognizes the following configuration
184 The Mercurial source recognizes the following configuration
181 options, which you can set on the command line with ``--config``:
185 options, which you can set on the command line with ``--config``:
182
186
183 :convert.hg.ignoreerrors: ignore integrity errors when reading.
187 :convert.hg.ignoreerrors: ignore integrity errors when reading.
184 Use it to fix Mercurial repositories with missing revlogs, by
188 Use it to fix Mercurial repositories with missing revlogs, by
185 converting from and to Mercurial. Default is False.
189 converting from and to Mercurial. Default is False.
186
190
187 :convert.hg.saverev: store original revision ID in changeset
191 :convert.hg.saverev: store original revision ID in changeset
188 (forces target IDs to change). It takes a boolean argument and
192 (forces target IDs to change). It takes a boolean argument and
189 defaults to False.
193 defaults to False.
190
194
191 :convert.hg.revs: revset specifying the source revisions to convert.
195 :convert.hg.revs: revset specifying the source revisions to convert.
192
196
193 CVS Source
197 CVS Source
194 ##########
198 ##########
195
199
196 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
200 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
197 to indicate the starting point of what will be converted. Direct
201 to indicate the starting point of what will be converted. Direct
198 access to the repository files is not needed, unless of course the
202 access to the repository files is not needed, unless of course the
199 repository is ``:local:``. The conversion uses the top level
203 repository is ``:local:``. The conversion uses the top level
200 directory in the sandbox to find the CVS repository, and then uses
204 directory in the sandbox to find the CVS repository, and then uses
201 CVS rlog commands to find files to convert. This means that unless
205 CVS rlog commands to find files to convert. This means that unless
202 a filemap is given, all files under the starting directory will be
206 a filemap is given, all files under the starting directory will be
203 converted, and that any directory reorganization in the CVS
207 converted, and that any directory reorganization in the CVS
204 sandbox is ignored.
208 sandbox is ignored.
205
209
206 The following options can be used with ``--config``:
210 The following options can be used with ``--config``:
207
211
208 :convert.cvsps.cache: Set to False to disable remote log caching,
212 :convert.cvsps.cache: Set to False to disable remote log caching,
209 for testing and debugging purposes. Default is True.
213 for testing and debugging purposes. Default is True.
210
214
211 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
215 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
212 allowed between commits with identical user and log message in
216 allowed between commits with identical user and log message in
213 a single changeset. When very large files were checked in as
217 a single changeset. When very large files were checked in as
214 part of a changeset then the default may not be long enough.
218 part of a changeset then the default may not be long enough.
215 The default is 60.
219 The default is 60.
216
220
217 :convert.cvsps.mergeto: Specify a regular expression to which
221 :convert.cvsps.mergeto: Specify a regular expression to which
218 commit log messages are matched. If a match occurs, then the
222 commit log messages are matched. If a match occurs, then the
219 conversion process will insert a dummy revision merging the
223 conversion process will insert a dummy revision merging the
220 branch on which this log message occurs to the branch
224 branch on which this log message occurs to the branch
221 indicated in the regex. Default is ``{{mergetobranch
225 indicated in the regex. Default is ``{{mergetobranch
222 ([-\\w]+)}}``
226 ([-\\w]+)}}``
223
227
224 :convert.cvsps.mergefrom: Specify a regular expression to which
228 :convert.cvsps.mergefrom: Specify a regular expression to which
225 commit log messages are matched. If a match occurs, then the
229 commit log messages are matched. If a match occurs, then the
226 conversion process will add the most recent revision on the
230 conversion process will add the most recent revision on the
227 branch indicated in the regex as the second parent of the
231 branch indicated in the regex as the second parent of the
228 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
232 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
229
233
230 :convert.localtimezone: use local time (as determined by the TZ
234 :convert.localtimezone: use local time (as determined by the TZ
231 environment variable) for changeset date/times. The default
235 environment variable) for changeset date/times. The default
232 is False (use UTC).
236 is False (use UTC).
233
237
234 :hooks.cvslog: Specify a Python function to be called at the end of
238 :hooks.cvslog: Specify a Python function to be called at the end of
235 gathering the CVS log. The function is passed a list with the
239 gathering the CVS log. The function is passed a list with the
236 log entries, and can modify the entries in-place, or add or
240 log entries, and can modify the entries in-place, or add or
237 delete them.
241 delete them.
238
242
239 :hooks.cvschangesets: Specify a Python function to be called after
243 :hooks.cvschangesets: Specify a Python function to be called after
240 the changesets are calculated from the CVS log. The
244 the changesets are calculated from the CVS log. The
241 function is passed a list with the changeset entries, and can
245 function is passed a list with the changeset entries, and can
242 modify the changesets in-place, or add or delete them.
246 modify the changesets in-place, or add or delete them.
243
247
244 An additional "debugcvsps" Mercurial command allows the builtin
248 An additional "debugcvsps" Mercurial command allows the builtin
245 changeset merging code to be run without doing a conversion. Its
249 changeset merging code to be run without doing a conversion. Its
246 parameters and output are similar to that of cvsps 2.1. Please see
250 parameters and output are similar to that of cvsps 2.1. Please see
247 the command help for more details.
251 the command help for more details.
248
252
249 Subversion Source
253 Subversion Source
250 #################
254 #################
251
255
252 Subversion source detects classical trunk/branches/tags layouts.
256 Subversion source detects classical trunk/branches/tags layouts.
253 By default, the supplied ``svn://repo/path/`` source URL is
257 By default, the supplied ``svn://repo/path/`` source URL is
254 converted as a single branch. If ``svn://repo/path/trunk`` exists
258 converted as a single branch. If ``svn://repo/path/trunk`` exists
255 it replaces the default branch. If ``svn://repo/path/branches``
259 it replaces the default branch. If ``svn://repo/path/branches``
256 exists, its subdirectories are listed as possible branches. If
260 exists, its subdirectories are listed as possible branches. If
257 ``svn://repo/path/tags`` exists, it is looked for tags referencing
261 ``svn://repo/path/tags`` exists, it is looked for tags referencing
258 converted branches. Default ``trunk``, ``branches`` and ``tags``
262 converted branches. Default ``trunk``, ``branches`` and ``tags``
259 values can be overridden with following options. Set them to paths
263 values can be overridden with following options. Set them to paths
260 relative to the source URL, or leave them blank to disable auto
264 relative to the source URL, or leave them blank to disable auto
261 detection.
265 detection.
262
266
263 The following options can be set with ``--config``:
267 The following options can be set with ``--config``:
264
268
265 :convert.svn.branches: specify the directory containing branches.
269 :convert.svn.branches: specify the directory containing branches.
266 The default is ``branches``.
270 The default is ``branches``.
267
271
268 :convert.svn.tags: specify the directory containing tags. The
272 :convert.svn.tags: specify the directory containing tags. The
269 default is ``tags``.
273 default is ``tags``.
270
274
271 :convert.svn.trunk: specify the name of the trunk branch. The
275 :convert.svn.trunk: specify the name of the trunk branch. The
272 default is ``trunk``.
276 default is ``trunk``.
273
277
274 :convert.localtimezone: use local time (as determined by the TZ
278 :convert.localtimezone: use local time (as determined by the TZ
275 environment variable) for changeset date/times. The default
279 environment variable) for changeset date/times. The default
276 is False (use UTC).
280 is False (use UTC).
277
281
278 Source history can be retrieved starting at a specific revision,
282 Source history can be retrieved starting at a specific revision,
279 instead of being integrally converted. Only single branch
283 instead of being integrally converted. Only single branch
280 conversions are supported.
284 conversions are supported.
281
285
282 :convert.svn.startrev: specify start Subversion revision number.
286 :convert.svn.startrev: specify start Subversion revision number.
283 The default is 0.
287 The default is 0.
284
288
285 Git Source
289 Git Source
286 ##########
290 ##########
287
291
288 The Git importer converts commits from all reachable branches (refs
292 The Git importer converts commits from all reachable branches (refs
289 in refs/heads) and remotes (refs in refs/remotes) to Mercurial.
293 in refs/heads) and remotes (refs in refs/remotes) to Mercurial.
290 Branches are converted to bookmarks with the same name, with the
294 Branches are converted to bookmarks with the same name, with the
291 leading 'refs/heads' stripped. Git submodules are converted to Git
295 leading 'refs/heads' stripped. Git submodules are converted to Git
292 subrepos in Mercurial.
296 subrepos in Mercurial.
293
297
294 The following options can be set with ``--config``:
298 The following options can be set with ``--config``:
295
299
296 :convert.git.similarity: specify how similar files modified in a
300 :convert.git.similarity: specify how similar files modified in a
297 commit must be to be imported as renames or copies, as a
301 commit must be to be imported as renames or copies, as a
298 percentage between ``0`` (disabled) and ``100`` (files must be
302 percentage between ``0`` (disabled) and ``100`` (files must be
299 identical). For example, ``90`` means that a delete/add pair will
303 identical). For example, ``90`` means that a delete/add pair will
300 be imported as a rename if more than 90% of the file hasn't
304 be imported as a rename if more than 90% of the file hasn't
301 changed. The default is ``50``.
305 changed. The default is ``50``.
302
306
303 :convert.git.findcopiesharder: while detecting copies, look at all
307 :convert.git.findcopiesharder: while detecting copies, look at all
304 files in the working copy instead of just changed ones. This
308 files in the working copy instead of just changed ones. This
305 is very expensive for large projects, and is only effective when
309 is very expensive for large projects, and is only effective when
306 ``convert.git.similarity`` is greater than 0. The default is False.
310 ``convert.git.similarity`` is greater than 0. The default is False.
307
311
308 Perforce Source
312 Perforce Source
309 ###############
313 ###############
310
314
311 The Perforce (P4) importer can be given a p4 depot path or a
315 The Perforce (P4) importer can be given a p4 depot path or a
312 client specification as source. It will convert all files in the
316 client specification as source. It will convert all files in the
313 source to a flat Mercurial repository, ignoring labels, branches
317 source to a flat Mercurial repository, ignoring labels, branches
314 and integrations. Note that when a depot path is given you then
318 and integrations. Note that when a depot path is given you then
315 usually should specify a target directory, because otherwise the
319 usually should specify a target directory, because otherwise the
316 target may be named ``...-hg``.
320 target may be named ``...-hg``.
317
321
318 It is possible to limit the amount of source history to be
322 It is possible to limit the amount of source history to be
319 converted by specifying an initial Perforce revision:
323 converted by specifying an initial Perforce revision:
320
324
321 :convert.p4.startrev: specify initial Perforce revision (a
325 :convert.p4.startrev: specify initial Perforce revision (a
322 Perforce changelist number).
326 Perforce changelist number).
323
327
324 Mercurial Destination
328 Mercurial Destination
325 #####################
329 #####################
326
330
327 The following options are supported:
331 The following options are supported:
328
332
329 :convert.hg.clonebranches: dispatch source branches in separate
333 :convert.hg.clonebranches: dispatch source branches in separate
330 clones. The default is False.
334 clones. The default is False.
331
335
332 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
336 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
333 ``default``.
337 ``default``.
334
338
335 :convert.hg.usebranchnames: preserve branch names. The default is
339 :convert.hg.usebranchnames: preserve branch names. The default is
336 True.
340 True.
337 """
341 """
338 return convcmd.convert(ui, src, dest, revmapfile, **opts)
342 return convcmd.convert(ui, src, dest, revmapfile, **opts)
339
343
340 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
344 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
341 def debugsvnlog(ui, **opts):
345 def debugsvnlog(ui, **opts):
342 return subversion.debugsvnlog(ui, **opts)
346 return subversion.debugsvnlog(ui, **opts)
343
347
344 @command('debugcvsps',
348 @command('debugcvsps',
345 [
349 [
346 # Main options shared with cvsps-2.1
350 # Main options shared with cvsps-2.1
347 ('b', 'branches', [], _('only return changes on specified branches')),
351 ('b', 'branches', [], _('only return changes on specified branches')),
348 ('p', 'prefix', '', _('prefix to remove from file names')),
352 ('p', 'prefix', '', _('prefix to remove from file names')),
349 ('r', 'revisions', [],
353 ('r', 'revisions', [],
350 _('only return changes after or between specified tags')),
354 _('only return changes after or between specified tags')),
351 ('u', 'update-cache', None, _("update cvs log cache")),
355 ('u', 'update-cache', None, _("update cvs log cache")),
352 ('x', 'new-cache', None, _("create new cvs log cache")),
356 ('x', 'new-cache', None, _("create new cvs log cache")),
353 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
357 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
354 ('', 'root', '', _('specify cvsroot')),
358 ('', 'root', '', _('specify cvsroot')),
355 # Options specific to builtin cvsps
359 # Options specific to builtin cvsps
356 ('', 'parents', '', _('show parent changesets')),
360 ('', 'parents', '', _('show parent changesets')),
357 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
361 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
358 # Options that are ignored for compatibility with cvsps-2.1
362 # Options that are ignored for compatibility with cvsps-2.1
359 ('A', 'cvs-direct', None, _('ignored for compatibility')),
363 ('A', 'cvs-direct', None, _('ignored for compatibility')),
360 ],
364 ],
361 _('hg debugcvsps [OPTION]... [PATH]...'),
365 _('hg debugcvsps [OPTION]... [PATH]...'),
362 norepo=True)
366 norepo=True)
363 def debugcvsps(ui, *args, **opts):
367 def debugcvsps(ui, *args, **opts):
364 '''create changeset information from CVS
368 '''create changeset information from CVS
365
369
366 This command is intended as a debugging tool for the CVS to
370 This command is intended as a debugging tool for the CVS to
367 Mercurial converter, and can be used as a direct replacement for
371 Mercurial converter, and can be used as a direct replacement for
368 cvsps.
372 cvsps.
369
373
370 Hg debugcvsps reads the CVS rlog for current directory (or any
374 Hg debugcvsps reads the CVS rlog for current directory (or any
371 named directory) in the CVS repository, and converts the log to a
375 named directory) in the CVS repository, and converts the log to a
372 series of changesets based on matching commit log entries and
376 series of changesets based on matching commit log entries and
373 dates.'''
377 dates.'''
374 return cvsps.debugcvsps(ui, *args, **opts)
378 return cvsps.debugcvsps(ui, *args, **opts)
375
379
376 def kwconverted(ctx, name):
380 def kwconverted(ctx, name):
377 rev = ctx.extra().get('convert_revision', '')
381 rev = ctx.extra().get('convert_revision', '')
378 if rev.startswith('svn:'):
382 if rev.startswith('svn:'):
379 if name == 'svnrev':
383 if name == 'svnrev':
380 return str(subversion.revsplit(rev)[2])
384 return str(subversion.revsplit(rev)[2])
381 elif name == 'svnpath':
385 elif name == 'svnpath':
382 return subversion.revsplit(rev)[1]
386 return subversion.revsplit(rev)[1]
383 elif name == 'svnuuid':
387 elif name == 'svnuuid':
384 return subversion.revsplit(rev)[0]
388 return subversion.revsplit(rev)[0]
385 return rev
389 return rev
386
390
387 def kwsvnrev(repo, ctx, **args):
391 def kwsvnrev(repo, ctx, **args):
388 """:svnrev: String. Converted subversion revision number."""
392 """:svnrev: String. Converted subversion revision number."""
389 return kwconverted(ctx, 'svnrev')
393 return kwconverted(ctx, 'svnrev')
390
394
391 def kwsvnpath(repo, ctx, **args):
395 def kwsvnpath(repo, ctx, **args):
392 """:svnpath: String. Converted subversion revision project path."""
396 """:svnpath: String. Converted subversion revision project path."""
393 return kwconverted(ctx, 'svnpath')
397 return kwconverted(ctx, 'svnpath')
394
398
395 def kwsvnuuid(repo, ctx, **args):
399 def kwsvnuuid(repo, ctx, **args):
396 """:svnuuid: String. Converted subversion revision repository identifier."""
400 """:svnuuid: String. Converted subversion revision repository identifier."""
397 return kwconverted(ctx, 'svnuuid')
401 return kwconverted(ctx, 'svnuuid')
398
402
399 def extsetup(ui):
403 def extsetup(ui):
400 templatekw.keywords['svnrev'] = kwsvnrev
404 templatekw.keywords['svnrev'] = kwsvnrev
401 templatekw.keywords['svnpath'] = kwsvnpath
405 templatekw.keywords['svnpath'] = kwsvnpath
402 templatekw.keywords['svnuuid'] = kwsvnuuid
406 templatekw.keywords['svnuuid'] = kwsvnuuid
403
407
404 # tell hggettext to extract docstrings from these functions:
408 # tell hggettext to extract docstrings from these functions:
405 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
409 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
@@ -1,350 +1,354 b''
1 """automatically manage newlines in repository files
1 """automatically manage newlines in repository files
2
2
3 This extension allows you to manage the type of line endings (CRLF or
3 This extension allows you to manage the type of line endings (CRLF or
4 LF) that are used in the repository and in the local working
4 LF) that are used in the repository and in the local working
5 directory. That way you can get CRLF line endings on Windows and LF on
5 directory. That way you can get CRLF line endings on Windows and LF on
6 Unix/Mac, thereby letting everybody use their OS native line endings.
6 Unix/Mac, thereby letting everybody use their OS native line endings.
7
7
8 The extension reads its configuration from a versioned ``.hgeol``
8 The extension reads its configuration from a versioned ``.hgeol``
9 configuration file found in the root of the working directory. The
9 configuration file found in the root of the working directory. The
10 ``.hgeol`` file use the same syntax as all other Mercurial
10 ``.hgeol`` file use the same syntax as all other Mercurial
11 configuration files. It uses two sections, ``[patterns]`` and
11 configuration files. It uses two sections, ``[patterns]`` and
12 ``[repository]``.
12 ``[repository]``.
13
13
14 The ``[patterns]`` section specifies how line endings should be
14 The ``[patterns]`` section specifies how line endings should be
15 converted between the working directory and the repository. The format is
15 converted between the working directory and the repository. The format is
16 specified by a file pattern. The first match is used, so put more
16 specified by a file pattern. The first match is used, so put more
17 specific patterns first. The available line endings are ``LF``,
17 specific patterns first. The available line endings are ``LF``,
18 ``CRLF``, and ``BIN``.
18 ``CRLF``, and ``BIN``.
19
19
20 Files with the declared format of ``CRLF`` or ``LF`` are always
20 Files with the declared format of ``CRLF`` or ``LF`` are always
21 checked out and stored in the repository in that format and files
21 checked out and stored in the repository in that format and files
22 declared to be binary (``BIN``) are left unchanged. Additionally,
22 declared to be binary (``BIN``) are left unchanged. Additionally,
23 ``native`` is an alias for checking out in the platform's default line
23 ``native`` is an alias for checking out in the platform's default line
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
26 default behaviour; it is only needed if you need to override a later,
26 default behaviour; it is only needed if you need to override a later,
27 more general pattern.
27 more general pattern.
28
28
29 The optional ``[repository]`` section specifies the line endings to
29 The optional ``[repository]`` section specifies the line endings to
30 use for files stored in the repository. It has a single setting,
30 use for files stored in the repository. It has a single setting,
31 ``native``, which determines the storage line endings for files
31 ``native``, which determines the storage line endings for files
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
35 will be converted to ``LF`` when stored in the repository. Files
35 will be converted to ``LF`` when stored in the repository. Files
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
37 are always stored as-is in the repository.
37 are always stored as-is in the repository.
38
38
39 Example versioned ``.hgeol`` file::
39 Example versioned ``.hgeol`` file::
40
40
41 [patterns]
41 [patterns]
42 **.py = native
42 **.py = native
43 **.vcproj = CRLF
43 **.vcproj = CRLF
44 **.txt = native
44 **.txt = native
45 Makefile = LF
45 Makefile = LF
46 **.jpg = BIN
46 **.jpg = BIN
47
47
48 [repository]
48 [repository]
49 native = LF
49 native = LF
50
50
51 .. note::
51 .. note::
52
52
53 The rules will first apply when files are touched in the working
53 The rules will first apply when files are touched in the working
54 directory, e.g. by updating to null and back to tip to touch all files.
54 directory, e.g. by updating to null and back to tip to touch all files.
55
55
56 The extension uses an optional ``[eol]`` section read from both the
56 The extension uses an optional ``[eol]`` section read from both the
57 normal Mercurial configuration files and the ``.hgeol`` file, with the
57 normal Mercurial configuration files and the ``.hgeol`` file, with the
58 latter overriding the former. You can use that section to control the
58 latter overriding the former. You can use that section to control the
59 overall behavior. There are three settings:
59 overall behavior. There are three settings:
60
60
61 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
61 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
62 ``CRLF`` to override the default interpretation of ``native`` for
62 ``CRLF`` to override the default interpretation of ``native`` for
63 checkout. This can be used with :hg:`archive` on Unix, say, to
63 checkout. This can be used with :hg:`archive` on Unix, say, to
64 generate an archive where files have line endings for Windows.
64 generate an archive where files have line endings for Windows.
65
65
66 - ``eol.only-consistent`` (default True) can be set to False to make
66 - ``eol.only-consistent`` (default True) can be set to False to make
67 the extension convert files with inconsistent EOLs. Inconsistent
67 the extension convert files with inconsistent EOLs. Inconsistent
68 means that there is both ``CRLF`` and ``LF`` present in the file.
68 means that there is both ``CRLF`` and ``LF`` present in the file.
69 Such files are normally not touched under the assumption that they
69 Such files are normally not touched under the assumption that they
70 have mixed EOLs on purpose.
70 have mixed EOLs on purpose.
71
71
72 - ``eol.fix-trailing-newline`` (default False) can be set to True to
72 - ``eol.fix-trailing-newline`` (default False) can be set to True to
73 ensure that converted files end with a EOL character (either ``\\n``
73 ensure that converted files end with a EOL character (either ``\\n``
74 or ``\\r\\n`` as per the configured patterns).
74 or ``\\r\\n`` as per the configured patterns).
75
75
76 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
76 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
77 like the deprecated win32text extension does. This means that you can
77 like the deprecated win32text extension does. This means that you can
78 disable win32text and enable eol and your filters will still work. You
78 disable win32text and enable eol and your filters will still work. You
79 only need to these filters until you have prepared a ``.hgeol`` file.
79 only need to these filters until you have prepared a ``.hgeol`` file.
80
80
81 The ``win32text.forbid*`` hooks provided by the win32text extension
81 The ``win32text.forbid*`` hooks provided by the win32text extension
82 have been unified into a single hook named ``eol.checkheadshook``. The
82 have been unified into a single hook named ``eol.checkheadshook``. The
83 hook will lookup the expected line endings from the ``.hgeol`` file,
83 hook will lookup the expected line endings from the ``.hgeol`` file,
84 which means you must migrate to a ``.hgeol`` file first before using
84 which means you must migrate to a ``.hgeol`` file first before using
85 the hook. ``eol.checkheadshook`` only checks heads, intermediate
85 the hook. ``eol.checkheadshook`` only checks heads, intermediate
86 invalid revisions will be pushed. To forbid them completely, use the
86 invalid revisions will be pushed. To forbid them completely, use the
87 ``eol.checkallhook`` hook. These hooks are best used as
87 ``eol.checkallhook`` hook. These hooks are best used as
88 ``pretxnchangegroup`` hooks.
88 ``pretxnchangegroup`` hooks.
89
89
90 See :hg:`help patterns` for more information about the glob patterns
90 See :hg:`help patterns` for more information about the glob patterns
91 used.
91 used.
92 """
92 """
93
93
94 from mercurial.i18n import _
94 from mercurial.i18n import _
95 from mercurial import util, config, extensions, match, error
95 from mercurial import util, config, extensions, match, error
96 import re, os
96 import re, os
97
97
98 # Note for extension authors: ONLY specify testedwith = 'internal' for
99 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
100 # be specifying the version(s) of Mercurial they are tested with, or
101 # leave the attribute unspecified.
98 testedwith = 'internal'
102 testedwith = 'internal'
99
103
100 # Matches a lone LF, i.e., one that is not part of CRLF.
104 # Matches a lone LF, i.e., one that is not part of CRLF.
101 singlelf = re.compile('(^|[^\r])\n')
105 singlelf = re.compile('(^|[^\r])\n')
102 # Matches a single EOL which can either be a CRLF where repeated CR
106 # Matches a single EOL which can either be a CRLF where repeated CR
103 # are removed or a LF. We do not care about old Macintosh files, so a
107 # are removed or a LF. We do not care about old Macintosh files, so a
104 # stray CR is an error.
108 # stray CR is an error.
105 eolre = re.compile('\r*\n')
109 eolre = re.compile('\r*\n')
106
110
107
111
108 def inconsistenteol(data):
112 def inconsistenteol(data):
109 return '\r\n' in data and singlelf.search(data)
113 return '\r\n' in data and singlelf.search(data)
110
114
111 def tolf(s, params, ui, **kwargs):
115 def tolf(s, params, ui, **kwargs):
112 """Filter to convert to LF EOLs."""
116 """Filter to convert to LF EOLs."""
113 if util.binary(s):
117 if util.binary(s):
114 return s
118 return s
115 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
119 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
116 return s
120 return s
117 if (ui.configbool('eol', 'fix-trailing-newline', False)
121 if (ui.configbool('eol', 'fix-trailing-newline', False)
118 and s and s[-1] != '\n'):
122 and s and s[-1] != '\n'):
119 s = s + '\n'
123 s = s + '\n'
120 return eolre.sub('\n', s)
124 return eolre.sub('\n', s)
121
125
122 def tocrlf(s, params, ui, **kwargs):
126 def tocrlf(s, params, ui, **kwargs):
123 """Filter to convert to CRLF EOLs."""
127 """Filter to convert to CRLF EOLs."""
124 if util.binary(s):
128 if util.binary(s):
125 return s
129 return s
126 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
130 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
127 return s
131 return s
128 if (ui.configbool('eol', 'fix-trailing-newline', False)
132 if (ui.configbool('eol', 'fix-trailing-newline', False)
129 and s and s[-1] != '\n'):
133 and s and s[-1] != '\n'):
130 s = s + '\n'
134 s = s + '\n'
131 return eolre.sub('\r\n', s)
135 return eolre.sub('\r\n', s)
132
136
133 def isbinary(s, params):
137 def isbinary(s, params):
134 """Filter to do nothing with the file."""
138 """Filter to do nothing with the file."""
135 return s
139 return s
136
140
137 filters = {
141 filters = {
138 'to-lf': tolf,
142 'to-lf': tolf,
139 'to-crlf': tocrlf,
143 'to-crlf': tocrlf,
140 'is-binary': isbinary,
144 'is-binary': isbinary,
141 # The following provide backwards compatibility with win32text
145 # The following provide backwards compatibility with win32text
142 'cleverencode:': tolf,
146 'cleverencode:': tolf,
143 'cleverdecode:': tocrlf
147 'cleverdecode:': tocrlf
144 }
148 }
145
149
146 class eolfile(object):
150 class eolfile(object):
147 def __init__(self, ui, root, data):
151 def __init__(self, ui, root, data):
148 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
152 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
149 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
153 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
150
154
151 self.cfg = config.config()
155 self.cfg = config.config()
152 # Our files should not be touched. The pattern must be
156 # Our files should not be touched. The pattern must be
153 # inserted first override a '** = native' pattern.
157 # inserted first override a '** = native' pattern.
154 self.cfg.set('patterns', '.hg*', 'BIN', 'eol')
158 self.cfg.set('patterns', '.hg*', 'BIN', 'eol')
155 # We can then parse the user's patterns.
159 # We can then parse the user's patterns.
156 self.cfg.parse('.hgeol', data)
160 self.cfg.parse('.hgeol', data)
157
161
158 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
162 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
159 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
163 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
160 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
164 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
161 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
165 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
162
166
163 include = []
167 include = []
164 exclude = []
168 exclude = []
165 for pattern, style in self.cfg.items('patterns'):
169 for pattern, style in self.cfg.items('patterns'):
166 key = style.upper()
170 key = style.upper()
167 if key == 'BIN':
171 if key == 'BIN':
168 exclude.append(pattern)
172 exclude.append(pattern)
169 else:
173 else:
170 include.append(pattern)
174 include.append(pattern)
171 # This will match the files for which we need to care
175 # This will match the files for which we need to care
172 # about inconsistent newlines.
176 # about inconsistent newlines.
173 self.match = match.match(root, '', [], include, exclude)
177 self.match = match.match(root, '', [], include, exclude)
174
178
175 def copytoui(self, ui):
179 def copytoui(self, ui):
176 for pattern, style in self.cfg.items('patterns'):
180 for pattern, style in self.cfg.items('patterns'):
177 key = style.upper()
181 key = style.upper()
178 try:
182 try:
179 ui.setconfig('decode', pattern, self._decode[key], 'eol')
183 ui.setconfig('decode', pattern, self._decode[key], 'eol')
180 ui.setconfig('encode', pattern, self._encode[key], 'eol')
184 ui.setconfig('encode', pattern, self._encode[key], 'eol')
181 except KeyError:
185 except KeyError:
182 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
186 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
183 % (style, self.cfg.source('patterns', pattern)))
187 % (style, self.cfg.source('patterns', pattern)))
184 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
188 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
185 for k, v in self.cfg.items('eol'):
189 for k, v in self.cfg.items('eol'):
186 ui.setconfig('eol', k, v, 'eol')
190 ui.setconfig('eol', k, v, 'eol')
187
191
188 def checkrev(self, repo, ctx, files):
192 def checkrev(self, repo, ctx, files):
189 failed = []
193 failed = []
190 for f in (files or ctx.files()):
194 for f in (files or ctx.files()):
191 if f not in ctx:
195 if f not in ctx:
192 continue
196 continue
193 for pattern, style in self.cfg.items('patterns'):
197 for pattern, style in self.cfg.items('patterns'):
194 if not match.match(repo.root, '', [pattern])(f):
198 if not match.match(repo.root, '', [pattern])(f):
195 continue
199 continue
196 target = self._encode[style.upper()]
200 target = self._encode[style.upper()]
197 data = ctx[f].data()
201 data = ctx[f].data()
198 if (target == "to-lf" and "\r\n" in data
202 if (target == "to-lf" and "\r\n" in data
199 or target == "to-crlf" and singlelf.search(data)):
203 or target == "to-crlf" and singlelf.search(data)):
200 failed.append((str(ctx), target, f))
204 failed.append((str(ctx), target, f))
201 break
205 break
202 return failed
206 return failed
203
207
204 def parseeol(ui, repo, nodes):
208 def parseeol(ui, repo, nodes):
205 try:
209 try:
206 for node in nodes:
210 for node in nodes:
207 try:
211 try:
208 if node is None:
212 if node is None:
209 # Cannot use workingctx.data() since it would load
213 # Cannot use workingctx.data() since it would load
210 # and cache the filters before we configure them.
214 # and cache the filters before we configure them.
211 data = repo.wfile('.hgeol').read()
215 data = repo.wfile('.hgeol').read()
212 else:
216 else:
213 data = repo[node]['.hgeol'].data()
217 data = repo[node]['.hgeol'].data()
214 return eolfile(ui, repo.root, data)
218 return eolfile(ui, repo.root, data)
215 except (IOError, LookupError):
219 except (IOError, LookupError):
216 pass
220 pass
217 except error.ParseError, inst:
221 except error.ParseError, inst:
218 ui.warn(_("warning: ignoring .hgeol file due to parse error "
222 ui.warn(_("warning: ignoring .hgeol file due to parse error "
219 "at %s: %s\n") % (inst.args[1], inst.args[0]))
223 "at %s: %s\n") % (inst.args[1], inst.args[0]))
220 return None
224 return None
221
225
222 def _checkhook(ui, repo, node, headsonly):
226 def _checkhook(ui, repo, node, headsonly):
223 # Get revisions to check and touched files at the same time
227 # Get revisions to check and touched files at the same time
224 files = set()
228 files = set()
225 revs = set()
229 revs = set()
226 for rev in xrange(repo[node].rev(), len(repo)):
230 for rev in xrange(repo[node].rev(), len(repo)):
227 revs.add(rev)
231 revs.add(rev)
228 if headsonly:
232 if headsonly:
229 ctx = repo[rev]
233 ctx = repo[rev]
230 files.update(ctx.files())
234 files.update(ctx.files())
231 for pctx in ctx.parents():
235 for pctx in ctx.parents():
232 revs.discard(pctx.rev())
236 revs.discard(pctx.rev())
233 failed = []
237 failed = []
234 for rev in revs:
238 for rev in revs:
235 ctx = repo[rev]
239 ctx = repo[rev]
236 eol = parseeol(ui, repo, [ctx.node()])
240 eol = parseeol(ui, repo, [ctx.node()])
237 if eol:
241 if eol:
238 failed.extend(eol.checkrev(repo, ctx, files))
242 failed.extend(eol.checkrev(repo, ctx, files))
239
243
240 if failed:
244 if failed:
241 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
245 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
242 msgs = []
246 msgs = []
243 for node, target, f in failed:
247 for node, target, f in failed:
244 msgs.append(_(" %s in %s should not have %s line endings") %
248 msgs.append(_(" %s in %s should not have %s line endings") %
245 (f, node, eols[target]))
249 (f, node, eols[target]))
246 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
250 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
247
251
248 def checkallhook(ui, repo, node, hooktype, **kwargs):
252 def checkallhook(ui, repo, node, hooktype, **kwargs):
249 """verify that files have expected EOLs"""
253 """verify that files have expected EOLs"""
250 _checkhook(ui, repo, node, False)
254 _checkhook(ui, repo, node, False)
251
255
252 def checkheadshook(ui, repo, node, hooktype, **kwargs):
256 def checkheadshook(ui, repo, node, hooktype, **kwargs):
253 """verify that files have expected EOLs"""
257 """verify that files have expected EOLs"""
254 _checkhook(ui, repo, node, True)
258 _checkhook(ui, repo, node, True)
255
259
256 # "checkheadshook" used to be called "hook"
260 # "checkheadshook" used to be called "hook"
257 hook = checkheadshook
261 hook = checkheadshook
258
262
259 def preupdate(ui, repo, hooktype, parent1, parent2):
263 def preupdate(ui, repo, hooktype, parent1, parent2):
260 repo.loadeol([parent1])
264 repo.loadeol([parent1])
261 return False
265 return False
262
266
263 def uisetup(ui):
267 def uisetup(ui):
264 ui.setconfig('hooks', 'preupdate.eol', preupdate, 'eol')
268 ui.setconfig('hooks', 'preupdate.eol', preupdate, 'eol')
265
269
266 def extsetup(ui):
270 def extsetup(ui):
267 try:
271 try:
268 extensions.find('win32text')
272 extensions.find('win32text')
269 ui.warn(_("the eol extension is incompatible with the "
273 ui.warn(_("the eol extension is incompatible with the "
270 "win32text extension\n"))
274 "win32text extension\n"))
271 except KeyError:
275 except KeyError:
272 pass
276 pass
273
277
274
278
275 def reposetup(ui, repo):
279 def reposetup(ui, repo):
276 uisetup(repo.ui)
280 uisetup(repo.ui)
277
281
278 if not repo.local():
282 if not repo.local():
279 return
283 return
280 for name, fn in filters.iteritems():
284 for name, fn in filters.iteritems():
281 repo.adddatafilter(name, fn)
285 repo.adddatafilter(name, fn)
282
286
283 ui.setconfig('patch', 'eol', 'auto', 'eol')
287 ui.setconfig('patch', 'eol', 'auto', 'eol')
284
288
285 class eolrepo(repo.__class__):
289 class eolrepo(repo.__class__):
286
290
287 def loadeol(self, nodes):
291 def loadeol(self, nodes):
288 eol = parseeol(self.ui, self, nodes)
292 eol = parseeol(self.ui, self, nodes)
289 if eol is None:
293 if eol is None:
290 return None
294 return None
291 eol.copytoui(self.ui)
295 eol.copytoui(self.ui)
292 return eol.match
296 return eol.match
293
297
294 def _hgcleardirstate(self):
298 def _hgcleardirstate(self):
295 self._eolfile = self.loadeol([None, 'tip'])
299 self._eolfile = self.loadeol([None, 'tip'])
296 if not self._eolfile:
300 if not self._eolfile:
297 self._eolfile = util.never
301 self._eolfile = util.never
298 return
302 return
299
303
300 try:
304 try:
301 cachemtime = os.path.getmtime(self.join("eol.cache"))
305 cachemtime = os.path.getmtime(self.join("eol.cache"))
302 except OSError:
306 except OSError:
303 cachemtime = 0
307 cachemtime = 0
304
308
305 try:
309 try:
306 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
310 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
307 except OSError:
311 except OSError:
308 eolmtime = 0
312 eolmtime = 0
309
313
310 if eolmtime > cachemtime:
314 if eolmtime > cachemtime:
311 self.ui.debug("eol: detected change in .hgeol\n")
315 self.ui.debug("eol: detected change in .hgeol\n")
312 wlock = None
316 wlock = None
313 try:
317 try:
314 wlock = self.wlock()
318 wlock = self.wlock()
315 for f in self.dirstate:
319 for f in self.dirstate:
316 if self.dirstate[f] == 'n':
320 if self.dirstate[f] == 'n':
317 # all normal files need to be looked at
321 # all normal files need to be looked at
318 # again since the new .hgeol file might no
322 # again since the new .hgeol file might no
319 # longer match a file it matched before
323 # longer match a file it matched before
320 self.dirstate.normallookup(f)
324 self.dirstate.normallookup(f)
321 # Create or touch the cache to update mtime
325 # Create or touch the cache to update mtime
322 self.vfs("eol.cache", "w").close()
326 self.vfs("eol.cache", "w").close()
323 wlock.release()
327 wlock.release()
324 except error.LockUnavailable:
328 except error.LockUnavailable:
325 # If we cannot lock the repository and clear the
329 # If we cannot lock the repository and clear the
326 # dirstate, then a commit might not see all files
330 # dirstate, then a commit might not see all files
327 # as modified. But if we cannot lock the
331 # as modified. But if we cannot lock the
328 # repository, then we can also not make a commit,
332 # repository, then we can also not make a commit,
329 # so ignore the error.
333 # so ignore the error.
330 pass
334 pass
331
335
332 def commitctx(self, ctx, error=False):
336 def commitctx(self, ctx, error=False):
333 for f in sorted(ctx.added() + ctx.modified()):
337 for f in sorted(ctx.added() + ctx.modified()):
334 if not self._eolfile(f):
338 if not self._eolfile(f):
335 continue
339 continue
336 fctx = ctx[f]
340 fctx = ctx[f]
337 if fctx is None:
341 if fctx is None:
338 continue
342 continue
339 data = fctx.data()
343 data = fctx.data()
340 if util.binary(data):
344 if util.binary(data):
341 # We should not abort here, since the user should
345 # We should not abort here, since the user should
342 # be able to say "** = native" to automatically
346 # be able to say "** = native" to automatically
343 # have all non-binary files taken care of.
347 # have all non-binary files taken care of.
344 continue
348 continue
345 if inconsistenteol(data):
349 if inconsistenteol(data):
346 raise util.Abort(_("inconsistent newline style "
350 raise util.Abort(_("inconsistent newline style "
347 "in %s\n") % f)
351 "in %s\n") % f)
348 return super(eolrepo, self).commitctx(ctx, error)
352 return super(eolrepo, self).commitctx(ctx, error)
349 repo.__class__ = eolrepo
353 repo.__class__ = eolrepo
350 repo._hgcleardirstate()
354 repo._hgcleardirstate()
@@ -1,340 +1,344 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 The extdiff extension also allows you to configure new diff commands, so
16 The extdiff extension also allows you to configure new diff commands, so
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18
18
19 [extdiff]
19 [extdiff]
20 # add new command that runs GNU diff(1) in 'context diff' mode
20 # add new command that runs GNU diff(1) in 'context diff' mode
21 cdiff = gdiff -Nprc5
21 cdiff = gdiff -Nprc5
22 ## or the old way:
22 ## or the old way:
23 #cmd.cdiff = gdiff
23 #cmd.cdiff = gdiff
24 #opts.cdiff = -Nprc5
24 #opts.cdiff = -Nprc5
25
25
26 # add new command called meld, runs meld (no need to name twice). If
26 # add new command called meld, runs meld (no need to name twice). If
27 # the meld executable is not available, the meld tool in [merge-tools]
27 # the meld executable is not available, the meld tool in [merge-tools]
28 # will be used, if available
28 # will be used, if available
29 meld =
29 meld =
30
30
31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 # your .vimrc
34 # your .vimrc
35 vimdiff = gvim -f "+next" \\
35 vimdiff = gvim -f "+next" \\
36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
37
37
38 Tool arguments can include variables that are expanded at runtime::
38 Tool arguments can include variables that are expanded at runtime::
39
39
40 $parent1, $plabel1 - filename, descriptive label of first parent
40 $parent1, $plabel1 - filename, descriptive label of first parent
41 $child, $clabel - filename, descriptive label of child revision
41 $child, $clabel - filename, descriptive label of child revision
42 $parent2, $plabel2 - filename, descriptive label of second parent
42 $parent2, $plabel2 - filename, descriptive label of second parent
43 $root - repository root
43 $root - repository root
44 $parent is an alias for $parent1.
44 $parent is an alias for $parent1.
45
45
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
47 sections for diff tool arguments, when none are specified in [extdiff].
47 sections for diff tool arguments, when none are specified in [extdiff].
48
48
49 ::
49 ::
50
50
51 [extdiff]
51 [extdiff]
52 kdiff3 =
52 kdiff3 =
53
53
54 [diff-tools]
54 [diff-tools]
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
56
56
57 You can use -I/-X and list of file or directory names like normal
57 You can use -I/-X and list of file or directory names like normal
58 :hg:`diff` command. The extdiff extension makes snapshots of only
58 :hg:`diff` command. The extdiff extension makes snapshots of only
59 needed files, so running the external diff program will actually be
59 needed files, so running the external diff program will actually be
60 pretty fast (at least faster than having to compare the entire tree).
60 pretty fast (at least faster than having to compare the entire tree).
61 '''
61 '''
62
62
63 from mercurial.i18n import _
63 from mercurial.i18n import _
64 from mercurial.node import short, nullid
64 from mercurial.node import short, nullid
65 from mercurial import cmdutil, scmutil, util, commands, encoding, filemerge
65 from mercurial import cmdutil, scmutil, util, commands, encoding, filemerge
66 import os, shlex, shutil, tempfile, re
66 import os, shlex, shutil, tempfile, re
67
67
68 cmdtable = {}
68 cmdtable = {}
69 command = cmdutil.command(cmdtable)
69 command = cmdutil.command(cmdtable)
70 # Note for extension authors: ONLY specify testedwith = 'internal' for
71 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
72 # be specifying the version(s) of Mercurial they are tested with, or
73 # leave the attribute unspecified.
70 testedwith = 'internal'
74 testedwith = 'internal'
71
75
72 def snapshot(ui, repo, files, node, tmproot):
76 def snapshot(ui, repo, files, node, tmproot):
73 '''snapshot files as of some revision
77 '''snapshot files as of some revision
74 if not using snapshot, -I/-X does not work and recursive diff
78 if not using snapshot, -I/-X does not work and recursive diff
75 in tools like kdiff3 and meld displays too many files.'''
79 in tools like kdiff3 and meld displays too many files.'''
76 dirname = os.path.basename(repo.root)
80 dirname = os.path.basename(repo.root)
77 if dirname == "":
81 if dirname == "":
78 dirname = "root"
82 dirname = "root"
79 if node is not None:
83 if node is not None:
80 dirname = '%s.%s' % (dirname, short(node))
84 dirname = '%s.%s' % (dirname, short(node))
81 base = os.path.join(tmproot, dirname)
85 base = os.path.join(tmproot, dirname)
82 os.mkdir(base)
86 os.mkdir(base)
83 if node is not None:
87 if node is not None:
84 ui.note(_('making snapshot of %d files from rev %s\n') %
88 ui.note(_('making snapshot of %d files from rev %s\n') %
85 (len(files), short(node)))
89 (len(files), short(node)))
86 else:
90 else:
87 ui.note(_('making snapshot of %d files from working directory\n') %
91 ui.note(_('making snapshot of %d files from working directory\n') %
88 (len(files)))
92 (len(files)))
89 wopener = scmutil.opener(base)
93 wopener = scmutil.opener(base)
90 fns_and_mtime = []
94 fns_and_mtime = []
91 ctx = repo[node]
95 ctx = repo[node]
92 for fn in sorted(files):
96 for fn in sorted(files):
93 wfn = util.pconvert(fn)
97 wfn = util.pconvert(fn)
94 if wfn not in ctx:
98 if wfn not in ctx:
95 # File doesn't exist; could be a bogus modify
99 # File doesn't exist; could be a bogus modify
96 continue
100 continue
97 ui.note(' %s\n' % wfn)
101 ui.note(' %s\n' % wfn)
98 dest = os.path.join(base, wfn)
102 dest = os.path.join(base, wfn)
99 fctx = ctx[wfn]
103 fctx = ctx[wfn]
100 data = repo.wwritedata(wfn, fctx.data())
104 data = repo.wwritedata(wfn, fctx.data())
101 if 'l' in fctx.flags():
105 if 'l' in fctx.flags():
102 wopener.symlink(data, wfn)
106 wopener.symlink(data, wfn)
103 else:
107 else:
104 wopener.write(wfn, data)
108 wopener.write(wfn, data)
105 if 'x' in fctx.flags():
109 if 'x' in fctx.flags():
106 util.setflags(dest, False, True)
110 util.setflags(dest, False, True)
107 if node is None:
111 if node is None:
108 fns_and_mtime.append((dest, repo.wjoin(fn),
112 fns_and_mtime.append((dest, repo.wjoin(fn),
109 os.lstat(dest).st_mtime))
113 os.lstat(dest).st_mtime))
110 return dirname, fns_and_mtime
114 return dirname, fns_and_mtime
111
115
112 def dodiff(ui, repo, cmdline, pats, opts):
116 def dodiff(ui, repo, cmdline, pats, opts):
113 '''Do the actual diff:
117 '''Do the actual diff:
114
118
115 - copy to a temp structure if diffing 2 internal revisions
119 - copy to a temp structure if diffing 2 internal revisions
116 - copy to a temp structure if diffing working revision with
120 - copy to a temp structure if diffing working revision with
117 another one and more than 1 file is changed
121 another one and more than 1 file is changed
118 - just invoke the diff for a single file in the working dir
122 - just invoke the diff for a single file in the working dir
119 '''
123 '''
120
124
121 revs = opts.get('rev')
125 revs = opts.get('rev')
122 change = opts.get('change')
126 change = opts.get('change')
123 do3way = '$parent2' in cmdline
127 do3way = '$parent2' in cmdline
124
128
125 if revs and change:
129 if revs and change:
126 msg = _('cannot specify --rev and --change at the same time')
130 msg = _('cannot specify --rev and --change at the same time')
127 raise util.Abort(msg)
131 raise util.Abort(msg)
128 elif change:
132 elif change:
129 node2 = scmutil.revsingle(repo, change, None).node()
133 node2 = scmutil.revsingle(repo, change, None).node()
130 node1a, node1b = repo.changelog.parents(node2)
134 node1a, node1b = repo.changelog.parents(node2)
131 else:
135 else:
132 node1a, node2 = scmutil.revpair(repo, revs)
136 node1a, node2 = scmutil.revpair(repo, revs)
133 if not revs:
137 if not revs:
134 node1b = repo.dirstate.p2()
138 node1b = repo.dirstate.p2()
135 else:
139 else:
136 node1b = nullid
140 node1b = nullid
137
141
138 # Disable 3-way merge if there is only one parent
142 # Disable 3-way merge if there is only one parent
139 if do3way:
143 if do3way:
140 if node1b == nullid:
144 if node1b == nullid:
141 do3way = False
145 do3way = False
142
146
143 matcher = scmutil.match(repo[node2], pats, opts)
147 matcher = scmutil.match(repo[node2], pats, opts)
144 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
148 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
145 if do3way:
149 if do3way:
146 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
150 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
147 else:
151 else:
148 mod_b, add_b, rem_b = set(), set(), set()
152 mod_b, add_b, rem_b = set(), set(), set()
149 modadd = mod_a | add_a | mod_b | add_b
153 modadd = mod_a | add_a | mod_b | add_b
150 common = modadd | rem_a | rem_b
154 common = modadd | rem_a | rem_b
151 if not common:
155 if not common:
152 return 0
156 return 0
153
157
154 tmproot = tempfile.mkdtemp(prefix='extdiff.')
158 tmproot = tempfile.mkdtemp(prefix='extdiff.')
155 try:
159 try:
156 # Always make a copy of node1a (and node1b, if applicable)
160 # Always make a copy of node1a (and node1b, if applicable)
157 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
161 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
158 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
162 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
159 rev1a = '@%d' % repo[node1a].rev()
163 rev1a = '@%d' % repo[node1a].rev()
160 if do3way:
164 if do3way:
161 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
165 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
162 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
166 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
163 rev1b = '@%d' % repo[node1b].rev()
167 rev1b = '@%d' % repo[node1b].rev()
164 else:
168 else:
165 dir1b = None
169 dir1b = None
166 rev1b = ''
170 rev1b = ''
167
171
168 fns_and_mtime = []
172 fns_and_mtime = []
169
173
170 # If node2 in not the wc or there is >1 change, copy it
174 # If node2 in not the wc or there is >1 change, copy it
171 dir2root = ''
175 dir2root = ''
172 rev2 = ''
176 rev2 = ''
173 if node2:
177 if node2:
174 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
178 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
175 rev2 = '@%d' % repo[node2].rev()
179 rev2 = '@%d' % repo[node2].rev()
176 elif len(common) > 1:
180 elif len(common) > 1:
177 #we only actually need to get the files to copy back to
181 #we only actually need to get the files to copy back to
178 #the working dir in this case (because the other cases
182 #the working dir in this case (because the other cases
179 #are: diffing 2 revisions or single file -- in which case
183 #are: diffing 2 revisions or single file -- in which case
180 #the file is already directly passed to the diff tool).
184 #the file is already directly passed to the diff tool).
181 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
185 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
182 else:
186 else:
183 # This lets the diff tool open the changed file directly
187 # This lets the diff tool open the changed file directly
184 dir2 = ''
188 dir2 = ''
185 dir2root = repo.root
189 dir2root = repo.root
186
190
187 label1a = rev1a
191 label1a = rev1a
188 label1b = rev1b
192 label1b = rev1b
189 label2 = rev2
193 label2 = rev2
190
194
191 # If only one change, diff the files instead of the directories
195 # If only one change, diff the files instead of the directories
192 # Handle bogus modifies correctly by checking if the files exist
196 # Handle bogus modifies correctly by checking if the files exist
193 if len(common) == 1:
197 if len(common) == 1:
194 common_file = util.localpath(common.pop())
198 common_file = util.localpath(common.pop())
195 dir1a = os.path.join(tmproot, dir1a, common_file)
199 dir1a = os.path.join(tmproot, dir1a, common_file)
196 label1a = common_file + rev1a
200 label1a = common_file + rev1a
197 if not os.path.isfile(dir1a):
201 if not os.path.isfile(dir1a):
198 dir1a = os.devnull
202 dir1a = os.devnull
199 if do3way:
203 if do3way:
200 dir1b = os.path.join(tmproot, dir1b, common_file)
204 dir1b = os.path.join(tmproot, dir1b, common_file)
201 label1b = common_file + rev1b
205 label1b = common_file + rev1b
202 if not os.path.isfile(dir1b):
206 if not os.path.isfile(dir1b):
203 dir1b = os.devnull
207 dir1b = os.devnull
204 dir2 = os.path.join(dir2root, dir2, common_file)
208 dir2 = os.path.join(dir2root, dir2, common_file)
205 label2 = common_file + rev2
209 label2 = common_file + rev2
206
210
207 # Function to quote file/dir names in the argument string.
211 # Function to quote file/dir names in the argument string.
208 # When not operating in 3-way mode, an empty string is
212 # When not operating in 3-way mode, an empty string is
209 # returned for parent2
213 # returned for parent2
210 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
214 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
211 'plabel1': label1a, 'plabel2': label1b,
215 'plabel1': label1a, 'plabel2': label1b,
212 'clabel': label2, 'child': dir2,
216 'clabel': label2, 'child': dir2,
213 'root': repo.root}
217 'root': repo.root}
214 def quote(match):
218 def quote(match):
215 pre = match.group(2)
219 pre = match.group(2)
216 key = match.group(3)
220 key = match.group(3)
217 if not do3way and key == 'parent2':
221 if not do3way and key == 'parent2':
218 return pre
222 return pre
219 return pre + util.shellquote(replace[key])
223 return pre + util.shellquote(replace[key])
220
224
221 # Match parent2 first, so 'parent1?' will match both parent1 and parent
225 # Match parent2 first, so 'parent1?' will match both parent1 and parent
222 regex = (r'''(['"]?)([^\s'"$]*)'''
226 regex = (r'''(['"]?)([^\s'"$]*)'''
223 r'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
227 r'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
224 if not do3way and not re.search(regex, cmdline):
228 if not do3way and not re.search(regex, cmdline):
225 cmdline += ' $parent1 $child'
229 cmdline += ' $parent1 $child'
226 cmdline = re.sub(regex, quote, cmdline)
230 cmdline = re.sub(regex, quote, cmdline)
227
231
228 ui.debug('running %r in %s\n' % (cmdline, tmproot))
232 ui.debug('running %r in %s\n' % (cmdline, tmproot))
229 ui.system(cmdline, cwd=tmproot)
233 ui.system(cmdline, cwd=tmproot)
230
234
231 for copy_fn, working_fn, mtime in fns_and_mtime:
235 for copy_fn, working_fn, mtime in fns_and_mtime:
232 if os.lstat(copy_fn).st_mtime != mtime:
236 if os.lstat(copy_fn).st_mtime != mtime:
233 ui.debug('file changed while diffing. '
237 ui.debug('file changed while diffing. '
234 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
238 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
235 util.copyfile(copy_fn, working_fn)
239 util.copyfile(copy_fn, working_fn)
236
240
237 return 1
241 return 1
238 finally:
242 finally:
239 ui.note(_('cleaning up temp directory\n'))
243 ui.note(_('cleaning up temp directory\n'))
240 shutil.rmtree(tmproot)
244 shutil.rmtree(tmproot)
241
245
242 @command('extdiff',
246 @command('extdiff',
243 [('p', 'program', '',
247 [('p', 'program', '',
244 _('comparison program to run'), _('CMD')),
248 _('comparison program to run'), _('CMD')),
245 ('o', 'option', [],
249 ('o', 'option', [],
246 _('pass option to comparison program'), _('OPT')),
250 _('pass option to comparison program'), _('OPT')),
247 ('r', 'rev', [], _('revision'), _('REV')),
251 ('r', 'rev', [], _('revision'), _('REV')),
248 ('c', 'change', '', _('change made by revision'), _('REV')),
252 ('c', 'change', '', _('change made by revision'), _('REV')),
249 ] + commands.walkopts,
253 ] + commands.walkopts,
250 _('hg extdiff [OPT]... [FILE]...'),
254 _('hg extdiff [OPT]... [FILE]...'),
251 inferrepo=True)
255 inferrepo=True)
252 def extdiff(ui, repo, *pats, **opts):
256 def extdiff(ui, repo, *pats, **opts):
253 '''use external program to diff repository (or selected files)
257 '''use external program to diff repository (or selected files)
254
258
255 Show differences between revisions for the specified files, using
259 Show differences between revisions for the specified files, using
256 an external program. The default program used is diff, with
260 an external program. The default program used is diff, with
257 default options "-Npru".
261 default options "-Npru".
258
262
259 To select a different program, use the -p/--program option. The
263 To select a different program, use the -p/--program option. The
260 program will be passed the names of two directories to compare. To
264 program will be passed the names of two directories to compare. To
261 pass additional options to the program, use -o/--option. These
265 pass additional options to the program, use -o/--option. These
262 will be passed before the names of the directories to compare.
266 will be passed before the names of the directories to compare.
263
267
264 When two revision arguments are given, then changes are shown
268 When two revision arguments are given, then changes are shown
265 between those revisions. If only one revision is specified then
269 between those revisions. If only one revision is specified then
266 that revision is compared to the working directory, and, when no
270 that revision is compared to the working directory, and, when no
267 revisions are specified, the working directory files are compared
271 revisions are specified, the working directory files are compared
268 to its parent.'''
272 to its parent.'''
269 program = opts.get('program')
273 program = opts.get('program')
270 option = opts.get('option')
274 option = opts.get('option')
271 if not program:
275 if not program:
272 program = 'diff'
276 program = 'diff'
273 option = option or ['-Npru']
277 option = option or ['-Npru']
274 cmdline = ' '.join(map(util.shellquote, [program] + option))
278 cmdline = ' '.join(map(util.shellquote, [program] + option))
275 return dodiff(ui, repo, cmdline, pats, opts)
279 return dodiff(ui, repo, cmdline, pats, opts)
276
280
277 def uisetup(ui):
281 def uisetup(ui):
278 for cmd, path in ui.configitems('extdiff'):
282 for cmd, path in ui.configitems('extdiff'):
279 path = util.expandpath(path)
283 path = util.expandpath(path)
280 if cmd.startswith('cmd.'):
284 if cmd.startswith('cmd.'):
281 cmd = cmd[4:]
285 cmd = cmd[4:]
282 if not path:
286 if not path:
283 path = util.findexe(cmd)
287 path = util.findexe(cmd)
284 if path is None:
288 if path is None:
285 path = filemerge.findexternaltool(ui, cmd) or cmd
289 path = filemerge.findexternaltool(ui, cmd) or cmd
286 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
290 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
287 cmdline = util.shellquote(path)
291 cmdline = util.shellquote(path)
288 if diffopts:
292 if diffopts:
289 cmdline += ' ' + diffopts
293 cmdline += ' ' + diffopts
290 elif cmd.startswith('opts.'):
294 elif cmd.startswith('opts.'):
291 continue
295 continue
292 else:
296 else:
293 if path:
297 if path:
294 # case "cmd = path opts"
298 # case "cmd = path opts"
295 cmdline = path
299 cmdline = path
296 diffopts = len(shlex.split(cmdline)) > 1
300 diffopts = len(shlex.split(cmdline)) > 1
297 else:
301 else:
298 # case "cmd ="
302 # case "cmd ="
299 path = util.findexe(cmd)
303 path = util.findexe(cmd)
300 if path is None:
304 if path is None:
301 path = filemerge.findexternaltool(ui, cmd) or cmd
305 path = filemerge.findexternaltool(ui, cmd) or cmd
302 cmdline = util.shellquote(path)
306 cmdline = util.shellquote(path)
303 diffopts = False
307 diffopts = False
304 # look for diff arguments in [diff-tools] then [merge-tools]
308 # look for diff arguments in [diff-tools] then [merge-tools]
305 if not diffopts:
309 if not diffopts:
306 args = ui.config('diff-tools', cmd+'.diffargs') or \
310 args = ui.config('diff-tools', cmd+'.diffargs') or \
307 ui.config('merge-tools', cmd+'.diffargs')
311 ui.config('merge-tools', cmd+'.diffargs')
308 if args:
312 if args:
309 cmdline += ' ' + args
313 cmdline += ' ' + args
310 def save(cmdline):
314 def save(cmdline):
311 '''use closure to save diff command to use'''
315 '''use closure to save diff command to use'''
312 def mydiff(ui, repo, *pats, **opts):
316 def mydiff(ui, repo, *pats, **opts):
313 options = ' '.join(map(util.shellquote, opts['option']))
317 options = ' '.join(map(util.shellquote, opts['option']))
314 if options:
318 if options:
315 options = ' ' + options
319 options = ' ' + options
316 return dodiff(ui, repo, cmdline + options, pats, opts)
320 return dodiff(ui, repo, cmdline + options, pats, opts)
317 doc = _('''\
321 doc = _('''\
318 use %(path)s to diff repository (or selected files)
322 use %(path)s to diff repository (or selected files)
319
323
320 Show differences between revisions for the specified files, using
324 Show differences between revisions for the specified files, using
321 the %(path)s program.
325 the %(path)s program.
322
326
323 When two revision arguments are given, then changes are shown
327 When two revision arguments are given, then changes are shown
324 between those revisions. If only one revision is specified then
328 between those revisions. If only one revision is specified then
325 that revision is compared to the working directory, and, when no
329 that revision is compared to the working directory, and, when no
326 revisions are specified, the working directory files are compared
330 revisions are specified, the working directory files are compared
327 to its parent.\
331 to its parent.\
328 ''') % {'path': util.uirepr(path)}
332 ''') % {'path': util.uirepr(path)}
329
333
330 # We must translate the docstring right away since it is
334 # We must translate the docstring right away since it is
331 # used as a format string. The string will unfortunately
335 # used as a format string. The string will unfortunately
332 # be translated again in commands.helpcmd and this will
336 # be translated again in commands.helpcmd and this will
333 # fail when the docstring contains non-ASCII characters.
337 # fail when the docstring contains non-ASCII characters.
334 # Decoding the string to a Unicode string here (using the
338 # Decoding the string to a Unicode string here (using the
335 # right encoding) prevents that.
339 # right encoding) prevents that.
336 mydiff.__doc__ = doc.decode(encoding.encoding)
340 mydiff.__doc__ = doc.decode(encoding.encoding)
337 return mydiff
341 return mydiff
338 cmdtable[cmd] = (save(cmdline),
342 cmdtable[cmd] = (save(cmdline),
339 cmdtable['extdiff'][1][1:],
343 cmdtable['extdiff'][1][1:],
340 _('hg %s [OPTION]... [FILE]...') % cmd)
344 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,150 +1,154 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''pull, update and merge in one command (DEPRECATED)'''
8 '''pull, update and merge in one command (DEPRECATED)'''
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial.node import short
11 from mercurial.node import short
12 from mercurial import commands, cmdutil, hg, util, error
12 from mercurial import commands, cmdutil, hg, util, error
13 from mercurial.lock import release
13 from mercurial.lock import release
14 from mercurial import exchange
14 from mercurial import exchange
15
15
16 cmdtable = {}
16 cmdtable = {}
17 command = cmdutil.command(cmdtable)
17 command = cmdutil.command(cmdtable)
18 # Note for extension authors: ONLY specify testedwith = 'internal' for
19 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
20 # be specifying the version(s) of Mercurial they are tested with, or
21 # leave the attribute unspecified.
18 testedwith = 'internal'
22 testedwith = 'internal'
19
23
20 @command('fetch',
24 @command('fetch',
21 [('r', 'rev', [],
25 [('r', 'rev', [],
22 _('a specific revision you would like to pull'), _('REV')),
26 _('a specific revision you would like to pull'), _('REV')),
23 ('e', 'edit', None, _('invoke editor on commit messages')),
27 ('e', 'edit', None, _('invoke editor on commit messages')),
24 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
28 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
25 ('', 'switch-parent', None, _('switch parents when merging')),
29 ('', 'switch-parent', None, _('switch parents when merging')),
26 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
30 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
27 _('hg fetch [SOURCE]'))
31 _('hg fetch [SOURCE]'))
28 def fetch(ui, repo, source='default', **opts):
32 def fetch(ui, repo, source='default', **opts):
29 '''pull changes from a remote repository, merge new changes if needed.
33 '''pull changes from a remote repository, merge new changes if needed.
30
34
31 This finds all changes from the repository at the specified path
35 This finds all changes from the repository at the specified path
32 or URL and adds them to the local repository.
36 or URL and adds them to the local repository.
33
37
34 If the pulled changes add a new branch head, the head is
38 If the pulled changes add a new branch head, the head is
35 automatically merged, and the result of the merge is committed.
39 automatically merged, and the result of the merge is committed.
36 Otherwise, the working directory is updated to include the new
40 Otherwise, the working directory is updated to include the new
37 changes.
41 changes.
38
42
39 When a merge is needed, the working directory is first updated to
43 When a merge is needed, the working directory is first updated to
40 the newly pulled changes. Local changes are then merged into the
44 the newly pulled changes. Local changes are then merged into the
41 pulled changes. To switch the merge order, use --switch-parent.
45 pulled changes. To switch the merge order, use --switch-parent.
42
46
43 See :hg:`help dates` for a list of formats valid for -d/--date.
47 See :hg:`help dates` for a list of formats valid for -d/--date.
44
48
45 Returns 0 on success.
49 Returns 0 on success.
46 '''
50 '''
47
51
48 date = opts.get('date')
52 date = opts.get('date')
49 if date:
53 if date:
50 opts['date'] = util.parsedate(date)
54 opts['date'] = util.parsedate(date)
51
55
52 parent, _p2 = repo.dirstate.parents()
56 parent, _p2 = repo.dirstate.parents()
53 branch = repo.dirstate.branch()
57 branch = repo.dirstate.branch()
54 try:
58 try:
55 branchnode = repo.branchtip(branch)
59 branchnode = repo.branchtip(branch)
56 except error.RepoLookupError:
60 except error.RepoLookupError:
57 branchnode = None
61 branchnode = None
58 if parent != branchnode:
62 if parent != branchnode:
59 raise util.Abort(_('working directory not at branch tip'),
63 raise util.Abort(_('working directory not at branch tip'),
60 hint=_('use "hg update" to check out branch tip'))
64 hint=_('use "hg update" to check out branch tip'))
61
65
62 wlock = lock = None
66 wlock = lock = None
63 try:
67 try:
64 wlock = repo.wlock()
68 wlock = repo.wlock()
65 lock = repo.lock()
69 lock = repo.lock()
66
70
67 cmdutil.bailifchanged(repo)
71 cmdutil.bailifchanged(repo)
68
72
69 bheads = repo.branchheads(branch)
73 bheads = repo.branchheads(branch)
70 bheads = [head for head in bheads if len(repo[head].children()) == 0]
74 bheads = [head for head in bheads if len(repo[head].children()) == 0]
71 if len(bheads) > 1:
75 if len(bheads) > 1:
72 raise util.Abort(_('multiple heads in this branch '
76 raise util.Abort(_('multiple heads in this branch '
73 '(use "hg heads ." and "hg merge" to merge)'))
77 '(use "hg heads ." and "hg merge" to merge)'))
74
78
75 other = hg.peer(repo, opts, ui.expandpath(source))
79 other = hg.peer(repo, opts, ui.expandpath(source))
76 ui.status(_('pulling from %s\n') %
80 ui.status(_('pulling from %s\n') %
77 util.hidepassword(ui.expandpath(source)))
81 util.hidepassword(ui.expandpath(source)))
78 revs = None
82 revs = None
79 if opts['rev']:
83 if opts['rev']:
80 try:
84 try:
81 revs = [other.lookup(rev) for rev in opts['rev']]
85 revs = [other.lookup(rev) for rev in opts['rev']]
82 except error.CapabilityError:
86 except error.CapabilityError:
83 err = _("other repository doesn't support revision lookup, "
87 err = _("other repository doesn't support revision lookup, "
84 "so a rev cannot be specified.")
88 "so a rev cannot be specified.")
85 raise util.Abort(err)
89 raise util.Abort(err)
86
90
87 # Are there any changes at all?
91 # Are there any changes at all?
88 modheads = exchange.pull(repo, other, heads=revs).cgresult
92 modheads = exchange.pull(repo, other, heads=revs).cgresult
89 if modheads == 0:
93 if modheads == 0:
90 return 0
94 return 0
91
95
92 # Is this a simple fast-forward along the current branch?
96 # Is this a simple fast-forward along the current branch?
93 newheads = repo.branchheads(branch)
97 newheads = repo.branchheads(branch)
94 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
98 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
95 if len(newheads) == 1 and len(newchildren):
99 if len(newheads) == 1 and len(newchildren):
96 if newchildren[0] != parent:
100 if newchildren[0] != parent:
97 return hg.update(repo, newchildren[0])
101 return hg.update(repo, newchildren[0])
98 else:
102 else:
99 return 0
103 return 0
100
104
101 # Are there more than one additional branch heads?
105 # Are there more than one additional branch heads?
102 newchildren = [n for n in newchildren if n != parent]
106 newchildren = [n for n in newchildren if n != parent]
103 newparent = parent
107 newparent = parent
104 if newchildren:
108 if newchildren:
105 newparent = newchildren[0]
109 newparent = newchildren[0]
106 hg.clean(repo, newparent)
110 hg.clean(repo, newparent)
107 newheads = [n for n in newheads if n != newparent]
111 newheads = [n for n in newheads if n != newparent]
108 if len(newheads) > 1:
112 if len(newheads) > 1:
109 ui.status(_('not merging with %d other new branch heads '
113 ui.status(_('not merging with %d other new branch heads '
110 '(use "hg heads ." and "hg merge" to merge them)\n') %
114 '(use "hg heads ." and "hg merge" to merge them)\n') %
111 (len(newheads) - 1))
115 (len(newheads) - 1))
112 return 1
116 return 1
113
117
114 if not newheads:
118 if not newheads:
115 return 0
119 return 0
116
120
117 # Otherwise, let's merge.
121 # Otherwise, let's merge.
118 err = False
122 err = False
119 if newheads:
123 if newheads:
120 # By default, we consider the repository we're pulling
124 # By default, we consider the repository we're pulling
121 # *from* as authoritative, so we merge our changes into
125 # *from* as authoritative, so we merge our changes into
122 # theirs.
126 # theirs.
123 if opts['switch_parent']:
127 if opts['switch_parent']:
124 firstparent, secondparent = newparent, newheads[0]
128 firstparent, secondparent = newparent, newheads[0]
125 else:
129 else:
126 firstparent, secondparent = newheads[0], newparent
130 firstparent, secondparent = newheads[0], newparent
127 ui.status(_('updating to %d:%s\n') %
131 ui.status(_('updating to %d:%s\n') %
128 (repo.changelog.rev(firstparent),
132 (repo.changelog.rev(firstparent),
129 short(firstparent)))
133 short(firstparent)))
130 hg.clean(repo, firstparent)
134 hg.clean(repo, firstparent)
131 ui.status(_('merging with %d:%s\n') %
135 ui.status(_('merging with %d:%s\n') %
132 (repo.changelog.rev(secondparent), short(secondparent)))
136 (repo.changelog.rev(secondparent), short(secondparent)))
133 err = hg.merge(repo, secondparent, remind=False)
137 err = hg.merge(repo, secondparent, remind=False)
134
138
135 if not err:
139 if not err:
136 # we don't translate commit messages
140 # we don't translate commit messages
137 message = (cmdutil.logmessage(ui, opts) or
141 message = (cmdutil.logmessage(ui, opts) or
138 ('Automated merge with %s' %
142 ('Automated merge with %s' %
139 util.removeauth(other.url())))
143 util.removeauth(other.url())))
140 editopt = opts.get('edit') or opts.get('force_editor')
144 editopt = opts.get('edit') or opts.get('force_editor')
141 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
145 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
142 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
146 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
143 ui.status(_('new changeset %d:%s merges remote changes '
147 ui.status(_('new changeset %d:%s merges remote changes '
144 'with local\n') % (repo.changelog.rev(n),
148 'with local\n') % (repo.changelog.rev(n),
145 short(n)))
149 short(n)))
146
150
147 return err
151 return err
148
152
149 finally:
153 finally:
150 release(lock, wlock)
154 release(lock, wlock)
@@ -1,297 +1,301 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands, match, cmdutil
9 from mercurial import util, commands, match, cmdutil
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 cmdtable = {}
13 cmdtable = {}
14 command = cmdutil.command(cmdtable)
14 command = cmdutil.command(cmdtable)
15 # Note for extension authors: ONLY specify testedwith = 'internal' for
16 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
17 # be specifying the version(s) of Mercurial they are tested with, or
18 # leave the attribute unspecified.
15 testedwith = 'internal'
19 testedwith = 'internal'
16
20
17 class gpg(object):
21 class gpg(object):
18 def __init__(self, path, key=None):
22 def __init__(self, path, key=None):
19 self.path = path
23 self.path = path
20 self.key = (key and " --local-user \"%s\"" % key) or ""
24 self.key = (key and " --local-user \"%s\"" % key) or ""
21
25
22 def sign(self, data):
26 def sign(self, data):
23 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
27 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
24 return util.filter(data, gpgcmd)
28 return util.filter(data, gpgcmd)
25
29
26 def verify(self, data, sig):
30 def verify(self, data, sig):
27 """ returns of the good and bad signatures"""
31 """ returns of the good and bad signatures"""
28 sigfile = datafile = None
32 sigfile = datafile = None
29 try:
33 try:
30 # create temporary files
34 # create temporary files
31 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
35 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
32 fp = os.fdopen(fd, 'wb')
36 fp = os.fdopen(fd, 'wb')
33 fp.write(sig)
37 fp.write(sig)
34 fp.close()
38 fp.close()
35 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
39 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
36 fp = os.fdopen(fd, 'wb')
40 fp = os.fdopen(fd, 'wb')
37 fp.write(data)
41 fp.write(data)
38 fp.close()
42 fp.close()
39 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
43 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
40 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
44 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
41 ret = util.filter("", gpgcmd)
45 ret = util.filter("", gpgcmd)
42 finally:
46 finally:
43 for f in (sigfile, datafile):
47 for f in (sigfile, datafile):
44 try:
48 try:
45 if f:
49 if f:
46 os.unlink(f)
50 os.unlink(f)
47 except OSError:
51 except OSError:
48 pass
52 pass
49 keys = []
53 keys = []
50 key, fingerprint = None, None
54 key, fingerprint = None, None
51 for l in ret.splitlines():
55 for l in ret.splitlines():
52 # see DETAILS in the gnupg documentation
56 # see DETAILS in the gnupg documentation
53 # filter the logger output
57 # filter the logger output
54 if not l.startswith("[GNUPG:]"):
58 if not l.startswith("[GNUPG:]"):
55 continue
59 continue
56 l = l[9:]
60 l = l[9:]
57 if l.startswith("VALIDSIG"):
61 if l.startswith("VALIDSIG"):
58 # fingerprint of the primary key
62 # fingerprint of the primary key
59 fingerprint = l.split()[10]
63 fingerprint = l.split()[10]
60 elif l.startswith("ERRSIG"):
64 elif l.startswith("ERRSIG"):
61 key = l.split(" ", 3)[:2]
65 key = l.split(" ", 3)[:2]
62 key.append("")
66 key.append("")
63 fingerprint = None
67 fingerprint = None
64 elif (l.startswith("GOODSIG") or
68 elif (l.startswith("GOODSIG") or
65 l.startswith("EXPSIG") or
69 l.startswith("EXPSIG") or
66 l.startswith("EXPKEYSIG") or
70 l.startswith("EXPKEYSIG") or
67 l.startswith("BADSIG")):
71 l.startswith("BADSIG")):
68 if key is not None:
72 if key is not None:
69 keys.append(key + [fingerprint])
73 keys.append(key + [fingerprint])
70 key = l.split(" ", 2)
74 key = l.split(" ", 2)
71 fingerprint = None
75 fingerprint = None
72 if key is not None:
76 if key is not None:
73 keys.append(key + [fingerprint])
77 keys.append(key + [fingerprint])
74 return keys
78 return keys
75
79
76 def newgpg(ui, **opts):
80 def newgpg(ui, **opts):
77 """create a new gpg instance"""
81 """create a new gpg instance"""
78 gpgpath = ui.config("gpg", "cmd", "gpg")
82 gpgpath = ui.config("gpg", "cmd", "gpg")
79 gpgkey = opts.get('key')
83 gpgkey = opts.get('key')
80 if not gpgkey:
84 if not gpgkey:
81 gpgkey = ui.config("gpg", "key", None)
85 gpgkey = ui.config("gpg", "key", None)
82 return gpg(gpgpath, gpgkey)
86 return gpg(gpgpath, gpgkey)
83
87
84 def sigwalk(repo):
88 def sigwalk(repo):
85 """
89 """
86 walk over every sigs, yields a couple
90 walk over every sigs, yields a couple
87 ((node, version, sig), (filename, linenumber))
91 ((node, version, sig), (filename, linenumber))
88 """
92 """
89 def parsefile(fileiter, context):
93 def parsefile(fileiter, context):
90 ln = 1
94 ln = 1
91 for l in fileiter:
95 for l in fileiter:
92 if not l:
96 if not l:
93 continue
97 continue
94 yield (l.split(" ", 2), (context, ln))
98 yield (l.split(" ", 2), (context, ln))
95 ln += 1
99 ln += 1
96
100
97 # read the heads
101 # read the heads
98 fl = repo.file(".hgsigs")
102 fl = repo.file(".hgsigs")
99 for r in reversed(fl.heads()):
103 for r in reversed(fl.heads()):
100 fn = ".hgsigs|%s" % hgnode.short(r)
104 fn = ".hgsigs|%s" % hgnode.short(r)
101 for item in parsefile(fl.read(r).splitlines(), fn):
105 for item in parsefile(fl.read(r).splitlines(), fn):
102 yield item
106 yield item
103 try:
107 try:
104 # read local signatures
108 # read local signatures
105 fn = "localsigs"
109 fn = "localsigs"
106 for item in parsefile(repo.vfs(fn), fn):
110 for item in parsefile(repo.vfs(fn), fn):
107 yield item
111 yield item
108 except IOError:
112 except IOError:
109 pass
113 pass
110
114
111 def getkeys(ui, repo, mygpg, sigdata, context):
115 def getkeys(ui, repo, mygpg, sigdata, context):
112 """get the keys who signed a data"""
116 """get the keys who signed a data"""
113 fn, ln = context
117 fn, ln = context
114 node, version, sig = sigdata
118 node, version, sig = sigdata
115 prefix = "%s:%d" % (fn, ln)
119 prefix = "%s:%d" % (fn, ln)
116 node = hgnode.bin(node)
120 node = hgnode.bin(node)
117
121
118 data = node2txt(repo, node, version)
122 data = node2txt(repo, node, version)
119 sig = binascii.a2b_base64(sig)
123 sig = binascii.a2b_base64(sig)
120 keys = mygpg.verify(data, sig)
124 keys = mygpg.verify(data, sig)
121
125
122 validkeys = []
126 validkeys = []
123 # warn for expired key and/or sigs
127 # warn for expired key and/or sigs
124 for key in keys:
128 for key in keys:
125 if key[0] == "ERRSIG":
129 if key[0] == "ERRSIG":
126 ui.write(_("%s Unknown key ID \"%s\"\n")
130 ui.write(_("%s Unknown key ID \"%s\"\n")
127 % (prefix, shortkey(ui, key[1][:15])))
131 % (prefix, shortkey(ui, key[1][:15])))
128 continue
132 continue
129 if key[0] == "BADSIG":
133 if key[0] == "BADSIG":
130 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
134 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
131 continue
135 continue
132 if key[0] == "EXPSIG":
136 if key[0] == "EXPSIG":
133 ui.write(_("%s Note: Signature has expired"
137 ui.write(_("%s Note: Signature has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
138 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 elif key[0] == "EXPKEYSIG":
139 elif key[0] == "EXPKEYSIG":
136 ui.write(_("%s Note: This key has expired"
140 ui.write(_("%s Note: This key has expired"
137 " (signed by: \"%s\")\n") % (prefix, key[2]))
141 " (signed by: \"%s\")\n") % (prefix, key[2]))
138 validkeys.append((key[1], key[2], key[3]))
142 validkeys.append((key[1], key[2], key[3]))
139 return validkeys
143 return validkeys
140
144
141 @command("sigs", [], _('hg sigs'))
145 @command("sigs", [], _('hg sigs'))
142 def sigs(ui, repo):
146 def sigs(ui, repo):
143 """list signed changesets"""
147 """list signed changesets"""
144 mygpg = newgpg(ui)
148 mygpg = newgpg(ui)
145 revs = {}
149 revs = {}
146
150
147 for data, context in sigwalk(repo):
151 for data, context in sigwalk(repo):
148 node, version, sig = data
152 node, version, sig = data
149 fn, ln = context
153 fn, ln = context
150 try:
154 try:
151 n = repo.lookup(node)
155 n = repo.lookup(node)
152 except KeyError:
156 except KeyError:
153 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
157 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
154 continue
158 continue
155 r = repo.changelog.rev(n)
159 r = repo.changelog.rev(n)
156 keys = getkeys(ui, repo, mygpg, data, context)
160 keys = getkeys(ui, repo, mygpg, data, context)
157 if not keys:
161 if not keys:
158 continue
162 continue
159 revs.setdefault(r, [])
163 revs.setdefault(r, [])
160 revs[r].extend(keys)
164 revs[r].extend(keys)
161 for rev in sorted(revs, reverse=True):
165 for rev in sorted(revs, reverse=True):
162 for k in revs[rev]:
166 for k in revs[rev]:
163 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
167 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
164 ui.write("%-30s %s\n" % (keystr(ui, k), r))
168 ui.write("%-30s %s\n" % (keystr(ui, k), r))
165
169
166 @command("sigcheck", [], _('hg sigcheck REV'))
170 @command("sigcheck", [], _('hg sigcheck REV'))
167 def check(ui, repo, rev):
171 def check(ui, repo, rev):
168 """verify all the signatures there may be for a particular revision"""
172 """verify all the signatures there may be for a particular revision"""
169 mygpg = newgpg(ui)
173 mygpg = newgpg(ui)
170 rev = repo.lookup(rev)
174 rev = repo.lookup(rev)
171 hexrev = hgnode.hex(rev)
175 hexrev = hgnode.hex(rev)
172 keys = []
176 keys = []
173
177
174 for data, context in sigwalk(repo):
178 for data, context in sigwalk(repo):
175 node, version, sig = data
179 node, version, sig = data
176 if node == hexrev:
180 if node == hexrev:
177 k = getkeys(ui, repo, mygpg, data, context)
181 k = getkeys(ui, repo, mygpg, data, context)
178 if k:
182 if k:
179 keys.extend(k)
183 keys.extend(k)
180
184
181 if not keys:
185 if not keys:
182 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
186 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
183 return
187 return
184
188
185 # print summary
189 # print summary
186 ui.write("%s is signed by:\n" % hgnode.short(rev))
190 ui.write("%s is signed by:\n" % hgnode.short(rev))
187 for key in keys:
191 for key in keys:
188 ui.write(" %s\n" % keystr(ui, key))
192 ui.write(" %s\n" % keystr(ui, key))
189
193
190 def keystr(ui, key):
194 def keystr(ui, key):
191 """associate a string to a key (username, comment)"""
195 """associate a string to a key (username, comment)"""
192 keyid, user, fingerprint = key
196 keyid, user, fingerprint = key
193 comment = ui.config("gpg", fingerprint, None)
197 comment = ui.config("gpg", fingerprint, None)
194 if comment:
198 if comment:
195 return "%s (%s)" % (user, comment)
199 return "%s (%s)" % (user, comment)
196 else:
200 else:
197 return user
201 return user
198
202
199 @command("sign",
203 @command("sign",
200 [('l', 'local', None, _('make the signature local')),
204 [('l', 'local', None, _('make the signature local')),
201 ('f', 'force', None, _('sign even if the sigfile is modified')),
205 ('f', 'force', None, _('sign even if the sigfile is modified')),
202 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
206 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
203 ('k', 'key', '',
207 ('k', 'key', '',
204 _('the key id to sign with'), _('ID')),
208 _('the key id to sign with'), _('ID')),
205 ('m', 'message', '',
209 ('m', 'message', '',
206 _('use text as commit message'), _('TEXT')),
210 _('use text as commit message'), _('TEXT')),
207 ('e', 'edit', False, _('invoke editor on commit messages')),
211 ('e', 'edit', False, _('invoke editor on commit messages')),
208 ] + commands.commitopts2,
212 ] + commands.commitopts2,
209 _('hg sign [OPTION]... [REV]...'))
213 _('hg sign [OPTION]... [REV]...'))
210 def sign(ui, repo, *revs, **opts):
214 def sign(ui, repo, *revs, **opts):
211 """add a signature for the current or given revision
215 """add a signature for the current or given revision
212
216
213 If no revision is given, the parent of the working directory is used,
217 If no revision is given, the parent of the working directory is used,
214 or tip if no revision is checked out.
218 or tip if no revision is checked out.
215
219
216 See :hg:`help dates` for a list of formats valid for -d/--date.
220 See :hg:`help dates` for a list of formats valid for -d/--date.
217 """
221 """
218
222
219 mygpg = newgpg(ui, **opts)
223 mygpg = newgpg(ui, **opts)
220 sigver = "0"
224 sigver = "0"
221 sigmessage = ""
225 sigmessage = ""
222
226
223 date = opts.get('date')
227 date = opts.get('date')
224 if date:
228 if date:
225 opts['date'] = util.parsedate(date)
229 opts['date'] = util.parsedate(date)
226
230
227 if revs:
231 if revs:
228 nodes = [repo.lookup(n) for n in revs]
232 nodes = [repo.lookup(n) for n in revs]
229 else:
233 else:
230 nodes = [node for node in repo.dirstate.parents()
234 nodes = [node for node in repo.dirstate.parents()
231 if node != hgnode.nullid]
235 if node != hgnode.nullid]
232 if len(nodes) > 1:
236 if len(nodes) > 1:
233 raise util.Abort(_('uncommitted merge - please provide a '
237 raise util.Abort(_('uncommitted merge - please provide a '
234 'specific revision'))
238 'specific revision'))
235 if not nodes:
239 if not nodes:
236 nodes = [repo.changelog.tip()]
240 nodes = [repo.changelog.tip()]
237
241
238 for n in nodes:
242 for n in nodes:
239 hexnode = hgnode.hex(n)
243 hexnode = hgnode.hex(n)
240 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
244 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
241 hgnode.short(n)))
245 hgnode.short(n)))
242 # build data
246 # build data
243 data = node2txt(repo, n, sigver)
247 data = node2txt(repo, n, sigver)
244 sig = mygpg.sign(data)
248 sig = mygpg.sign(data)
245 if not sig:
249 if not sig:
246 raise util.Abort(_("error while signing"))
250 raise util.Abort(_("error while signing"))
247 sig = binascii.b2a_base64(sig)
251 sig = binascii.b2a_base64(sig)
248 sig = sig.replace("\n", "")
252 sig = sig.replace("\n", "")
249 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
253 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
250
254
251 # write it
255 # write it
252 if opts['local']:
256 if opts['local']:
253 repo.vfs.append("localsigs", sigmessage)
257 repo.vfs.append("localsigs", sigmessage)
254 return
258 return
255
259
256 if not opts["force"]:
260 if not opts["force"]:
257 msigs = match.exact(repo.root, '', ['.hgsigs'])
261 msigs = match.exact(repo.root, '', ['.hgsigs'])
258 if any(repo.status(match=msigs, unknown=True, ignored=True)):
262 if any(repo.status(match=msigs, unknown=True, ignored=True)):
259 raise util.Abort(_("working copy of .hgsigs is changed "),
263 raise util.Abort(_("working copy of .hgsigs is changed "),
260 hint=_("please commit .hgsigs manually"))
264 hint=_("please commit .hgsigs manually"))
261
265
262 sigsfile = repo.wfile(".hgsigs", "ab")
266 sigsfile = repo.wfile(".hgsigs", "ab")
263 sigsfile.write(sigmessage)
267 sigsfile.write(sigmessage)
264 sigsfile.close()
268 sigsfile.close()
265
269
266 if '.hgsigs' not in repo.dirstate:
270 if '.hgsigs' not in repo.dirstate:
267 repo[None].add([".hgsigs"])
271 repo[None].add([".hgsigs"])
268
272
269 if opts["no_commit"]:
273 if opts["no_commit"]:
270 return
274 return
271
275
272 message = opts['message']
276 message = opts['message']
273 if not message:
277 if not message:
274 # we don't translate commit messages
278 # we don't translate commit messages
275 message = "\n".join(["Added signature for changeset %s"
279 message = "\n".join(["Added signature for changeset %s"
276 % hgnode.short(n)
280 % hgnode.short(n)
277 for n in nodes])
281 for n in nodes])
278 try:
282 try:
279 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
283 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
280 repo.commit(message, opts['user'], opts['date'], match=msigs,
284 repo.commit(message, opts['user'], opts['date'], match=msigs,
281 editor=editor)
285 editor=editor)
282 except ValueError, inst:
286 except ValueError, inst:
283 raise util.Abort(str(inst))
287 raise util.Abort(str(inst))
284
288
285 def shortkey(ui, key):
289 def shortkey(ui, key):
286 if len(key) != 16:
290 if len(key) != 16:
287 ui.debug("key ID \"%s\" format error\n" % key)
291 ui.debug("key ID \"%s\" format error\n" % key)
288 return key
292 return key
289
293
290 return key[-8:]
294 return key[-8:]
291
295
292 def node2txt(repo, node, ver):
296 def node2txt(repo, node, ver):
293 """map a manifest into some text"""
297 """map a manifest into some text"""
294 if ver == "0":
298 if ver == "0":
295 return "%s\n" % hgnode.hex(node)
299 return "%s\n" % hgnode.hex(node)
296 else:
300 else:
297 raise util.Abort(_("unknown signature version"))
301 raise util.Abort(_("unknown signature version"))
@@ -1,58 +1,62 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell (DEPRECATED)
8 '''command to view revision graphs from a shell (DEPRECATED)
9
9
10 The functionality of this extension has been include in core Mercurial
10 The functionality of this extension has been include in core Mercurial
11 since version 2.3.
11 since version 2.3.
12
12
13 This extension adds a --graph option to the incoming, outgoing and log
13 This extension adds a --graph option to the incoming, outgoing and log
14 commands. When this options is given, an ASCII representation of the
14 commands. When this options is given, an ASCII representation of the
15 revision graph is also shown.
15 revision graph is also shown.
16 '''
16 '''
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial import cmdutil, commands
19 from mercurial import cmdutil, commands
20
20
21 cmdtable = {}
21 cmdtable = {}
22 command = cmdutil.command(cmdtable)
22 command = cmdutil.command(cmdtable)
23 # Note for extension authors: ONLY specify testedwith = 'internal' for
24 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
25 # be specifying the version(s) of Mercurial they are tested with, or
26 # leave the attribute unspecified.
23 testedwith = 'internal'
27 testedwith = 'internal'
24
28
25 @command('glog',
29 @command('glog',
26 [('f', 'follow', None,
30 [('f', 'follow', None,
27 _('follow changeset history, or file history across copies and renames')),
31 _('follow changeset history, or file history across copies and renames')),
28 ('', 'follow-first', None,
32 ('', 'follow-first', None,
29 _('only follow the first parent of merge changesets (DEPRECATED)')),
33 _('only follow the first parent of merge changesets (DEPRECATED)')),
30 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
34 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
31 ('C', 'copies', None, _('show copied files')),
35 ('C', 'copies', None, _('show copied files')),
32 ('k', 'keyword', [],
36 ('k', 'keyword', [],
33 _('do case-insensitive search for a given text'), _('TEXT')),
37 _('do case-insensitive search for a given text'), _('TEXT')),
34 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
38 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
35 ('', 'removed', None, _('include revisions where files were removed')),
39 ('', 'removed', None, _('include revisions where files were removed')),
36 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
40 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
37 ('u', 'user', [], _('revisions committed by user'), _('USER')),
41 ('u', 'user', [], _('revisions committed by user'), _('USER')),
38 ('', 'only-branch', [],
42 ('', 'only-branch', [],
39 _('show only changesets within the given named branch (DEPRECATED)'),
43 _('show only changesets within the given named branch (DEPRECATED)'),
40 _('BRANCH')),
44 _('BRANCH')),
41 ('b', 'branch', [],
45 ('b', 'branch', [],
42 _('show changesets within the given named branch'), _('BRANCH')),
46 _('show changesets within the given named branch'), _('BRANCH')),
43 ('P', 'prune', [],
47 ('P', 'prune', [],
44 _('do not display revision or any of its ancestors'), _('REV')),
48 _('do not display revision or any of its ancestors'), _('REV')),
45 ] + commands.logopts + commands.walkopts,
49 ] + commands.logopts + commands.walkopts,
46 _('[OPTION]... [FILE]'),
50 _('[OPTION]... [FILE]'),
47 inferrepo=True)
51 inferrepo=True)
48 def graphlog(ui, repo, *pats, **opts):
52 def graphlog(ui, repo, *pats, **opts):
49 """show revision history alongside an ASCII revision graph
53 """show revision history alongside an ASCII revision graph
50
54
51 Print a revision history alongside a revision graph drawn with
55 Print a revision history alongside a revision graph drawn with
52 ASCII characters.
56 ASCII characters.
53
57
54 Nodes printed as an @ character are parents of the working
58 Nodes printed as an @ character are parents of the working
55 directory.
59 directory.
56 """
60 """
57 opts['graph'] = True
61 opts['graph'] = True
58 return commands.log(ui, repo, *pats, **opts)
62 return commands.log(ui, repo, *pats, **opts)
@@ -1,281 +1,285 b''
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """hooks for integrating with the CIA.vc notification service
6 """hooks for integrating with the CIA.vc notification service
7
7
8 This is meant to be run as a changegroup or incoming hook. To
8 This is meant to be run as a changegroup or incoming hook. To
9 configure it, set the following options in your hgrc::
9 configure it, set the following options in your hgrc::
10
10
11 [cia]
11 [cia]
12 # your registered CIA user name
12 # your registered CIA user name
13 user = foo
13 user = foo
14 # the name of the project in CIA
14 # the name of the project in CIA
15 project = foo
15 project = foo
16 # the module (subproject) (optional)
16 # the module (subproject) (optional)
17 #module = foo
17 #module = foo
18 # Append a diffstat to the log message (optional)
18 # Append a diffstat to the log message (optional)
19 #diffstat = False
19 #diffstat = False
20 # Template to use for log messages (optional)
20 # Template to use for log messages (optional)
21 #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}
21 #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}
22 # Style to use (optional)
22 # Style to use (optional)
23 #style = foo
23 #style = foo
24 # The URL of the CIA notification service (optional)
24 # The URL of the CIA notification service (optional)
25 # You can use mailto: URLs to send by email, e.g.
25 # You can use mailto: URLs to send by email, e.g.
26 # mailto:cia@cia.vc
26 # mailto:cia@cia.vc
27 # Make sure to set email.from if you do this.
27 # Make sure to set email.from if you do this.
28 #url = http://cia.vc/
28 #url = http://cia.vc/
29 # print message instead of sending it (optional)
29 # print message instead of sending it (optional)
30 #test = False
30 #test = False
31 # number of slashes to strip for url paths
31 # number of slashes to strip for url paths
32 #strip = 0
32 #strip = 0
33
33
34 [hooks]
34 [hooks]
35 # one of these:
35 # one of these:
36 changegroup.cia = python:hgcia.hook
36 changegroup.cia = python:hgcia.hook
37 #incoming.cia = python:hgcia.hook
37 #incoming.cia = python:hgcia.hook
38
38
39 [web]
39 [web]
40 # If you want hyperlinks (optional)
40 # If you want hyperlinks (optional)
41 baseurl = http://server/path/to/repo
41 baseurl = http://server/path/to/repo
42 """
42 """
43
43
44 from mercurial.i18n import _
44 from mercurial.i18n import _
45 from mercurial.node import bin, short
45 from mercurial.node import bin, short
46 from mercurial import cmdutil, patch, util, mail
46 from mercurial import cmdutil, patch, util, mail
47 import email.Parser
47 import email.Parser
48
48
49 import socket, xmlrpclib
49 import socket, xmlrpclib
50 from xml.sax import saxutils
50 from xml.sax import saxutils
51 # Note for extension authors: ONLY specify testedwith = 'internal' for
52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
53 # be specifying the version(s) of Mercurial they are tested with, or
54 # leave the attribute unspecified.
51 testedwith = 'internal'
55 testedwith = 'internal'
52
56
53 socket_timeout = 30 # seconds
57 socket_timeout = 30 # seconds
54 if util.safehasattr(socket, 'setdefaulttimeout'):
58 if util.safehasattr(socket, 'setdefaulttimeout'):
55 # set a timeout for the socket so you don't have to wait so looooong
59 # set a timeout for the socket so you don't have to wait so looooong
56 # when cia.vc is having problems. requires python >= 2.3:
60 # when cia.vc is having problems. requires python >= 2.3:
57 socket.setdefaulttimeout(socket_timeout)
61 socket.setdefaulttimeout(socket_timeout)
58
62
59 HGCIA_VERSION = '0.1'
63 HGCIA_VERSION = '0.1'
60 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
64 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
61
65
62
66
63 class ciamsg(object):
67 class ciamsg(object):
64 """ A CIA message """
68 """ A CIA message """
65 def __init__(self, cia, ctx):
69 def __init__(self, cia, ctx):
66 self.cia = cia
70 self.cia = cia
67 self.ctx = ctx
71 self.ctx = ctx
68 self.url = self.cia.url
72 self.url = self.cia.url
69 if self.url:
73 if self.url:
70 self.url += self.cia.root
74 self.url += self.cia.root
71
75
72 def fileelem(self, path, uri, action):
76 def fileelem(self, path, uri, action):
73 if uri:
77 if uri:
74 uri = ' uri=%s' % saxutils.quoteattr(uri)
78 uri = ' uri=%s' % saxutils.quoteattr(uri)
75 return '<file%s action=%s>%s</file>' % (
79 return '<file%s action=%s>%s</file>' % (
76 uri, saxutils.quoteattr(action), saxutils.escape(path))
80 uri, saxutils.quoteattr(action), saxutils.escape(path))
77
81
78 def fileelems(self):
82 def fileelems(self):
79 n = self.ctx.node()
83 n = self.ctx.node()
80 f = self.cia.repo.status(self.ctx.p1().node(), n)
84 f = self.cia.repo.status(self.ctx.p1().node(), n)
81 url = self.url or ''
85 url = self.url or ''
82 if url and url[-1] == '/':
86 if url and url[-1] == '/':
83 url = url[:-1]
87 url = url[:-1]
84 elems = []
88 elems = []
85 for path in f.modified:
89 for path in f.modified:
86 uri = '%s/diff/%s/%s' % (url, short(n), path)
90 uri = '%s/diff/%s/%s' % (url, short(n), path)
87 elems.append(self.fileelem(path, url and uri, 'modify'))
91 elems.append(self.fileelem(path, url and uri, 'modify'))
88 for path in f.added:
92 for path in f.added:
89 # TODO: copy/rename ?
93 # TODO: copy/rename ?
90 uri = '%s/file/%s/%s' % (url, short(n), path)
94 uri = '%s/file/%s/%s' % (url, short(n), path)
91 elems.append(self.fileelem(path, url and uri, 'add'))
95 elems.append(self.fileelem(path, url and uri, 'add'))
92 for path in f.removed:
96 for path in f.removed:
93 elems.append(self.fileelem(path, '', 'remove'))
97 elems.append(self.fileelem(path, '', 'remove'))
94
98
95 return '\n'.join(elems)
99 return '\n'.join(elems)
96
100
97 def sourceelem(self, project, module=None, branch=None):
101 def sourceelem(self, project, module=None, branch=None):
98 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
102 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
99 if module:
103 if module:
100 msg.append('<module>%s</module>' % saxutils.escape(module))
104 msg.append('<module>%s</module>' % saxutils.escape(module))
101 if branch:
105 if branch:
102 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
106 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
103 msg.append('</source>')
107 msg.append('</source>')
104
108
105 return '\n'.join(msg)
109 return '\n'.join(msg)
106
110
107 def diffstat(self):
111 def diffstat(self):
108 class patchbuf(object):
112 class patchbuf(object):
109 def __init__(self):
113 def __init__(self):
110 self.lines = []
114 self.lines = []
111 # diffstat is stupid
115 # diffstat is stupid
112 self.name = 'cia'
116 self.name = 'cia'
113 def write(self, data):
117 def write(self, data):
114 self.lines += data.splitlines(True)
118 self.lines += data.splitlines(True)
115 def close(self):
119 def close(self):
116 pass
120 pass
117
121
118 n = self.ctx.node()
122 n = self.ctx.node()
119 pbuf = patchbuf()
123 pbuf = patchbuf()
120 cmdutil.export(self.cia.repo, [n], fp=pbuf)
124 cmdutil.export(self.cia.repo, [n], fp=pbuf)
121 return patch.diffstat(pbuf.lines) or ''
125 return patch.diffstat(pbuf.lines) or ''
122
126
123 def logmsg(self):
127 def logmsg(self):
124 if self.cia.diffstat:
128 if self.cia.diffstat:
125 diffstat = self.diffstat()
129 diffstat = self.diffstat()
126 else:
130 else:
127 diffstat = ''
131 diffstat = ''
128 self.cia.ui.pushbuffer()
132 self.cia.ui.pushbuffer()
129 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
133 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
130 baseurl=self.cia.ui.config('web', 'baseurl'),
134 baseurl=self.cia.ui.config('web', 'baseurl'),
131 url=self.url, diffstat=diffstat,
135 url=self.url, diffstat=diffstat,
132 webroot=self.cia.root)
136 webroot=self.cia.root)
133 return self.cia.ui.popbuffer()
137 return self.cia.ui.popbuffer()
134
138
135 def xml(self):
139 def xml(self):
136 n = short(self.ctx.node())
140 n = short(self.ctx.node())
137 src = self.sourceelem(self.cia.project, module=self.cia.module,
141 src = self.sourceelem(self.cia.project, module=self.cia.module,
138 branch=self.ctx.branch())
142 branch=self.ctx.branch())
139 # unix timestamp
143 # unix timestamp
140 dt = self.ctx.date()
144 dt = self.ctx.date()
141 timestamp = dt[0]
145 timestamp = dt[0]
142
146
143 author = saxutils.escape(self.ctx.user())
147 author = saxutils.escape(self.ctx.user())
144 rev = '%d:%s' % (self.ctx.rev(), n)
148 rev = '%d:%s' % (self.ctx.rev(), n)
145 log = saxutils.escape(self.logmsg())
149 log = saxutils.escape(self.logmsg())
146
150
147 url = self.url
151 url = self.url
148 if url and url[-1] == '/':
152 if url and url[-1] == '/':
149 url = url[:-1]
153 url = url[:-1]
150 url = url and '<url>%s/rev/%s</url>' % (saxutils.escape(url), n) or ''
154 url = url and '<url>%s/rev/%s</url>' % (saxutils.escape(url), n) or ''
151
155
152 msg = """
156 msg = """
153 <message>
157 <message>
154 <generator>
158 <generator>
155 <name>Mercurial (hgcia)</name>
159 <name>Mercurial (hgcia)</name>
156 <version>%s</version>
160 <version>%s</version>
157 <url>%s</url>
161 <url>%s</url>
158 <user>%s</user>
162 <user>%s</user>
159 </generator>
163 </generator>
160 %s
164 %s
161 <body>
165 <body>
162 <commit>
166 <commit>
163 <author>%s</author>
167 <author>%s</author>
164 <version>%s</version>
168 <version>%s</version>
165 <log>%s</log>
169 <log>%s</log>
166 %s
170 %s
167 <files>%s</files>
171 <files>%s</files>
168 </commit>
172 </commit>
169 </body>
173 </body>
170 <timestamp>%d</timestamp>
174 <timestamp>%d</timestamp>
171 </message>
175 </message>
172 """ % \
176 """ % \
173 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
177 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
174 saxutils.escape(self.cia.user), src, author, rev, log, url,
178 saxutils.escape(self.cia.user), src, author, rev, log, url,
175 self.fileelems(), timestamp)
179 self.fileelems(), timestamp)
176
180
177 return msg
181 return msg
178
182
179
183
180 class hgcia(object):
184 class hgcia(object):
181 """ CIA notification class """
185 """ CIA notification class """
182
186
183 deftemplate = '{desc}'
187 deftemplate = '{desc}'
184 dstemplate = '{desc}\n-- \n{diffstat}'
188 dstemplate = '{desc}\n-- \n{diffstat}'
185
189
186 def __init__(self, ui, repo):
190 def __init__(self, ui, repo):
187 self.ui = ui
191 self.ui = ui
188 self.repo = repo
192 self.repo = repo
189
193
190 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
194 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
191 self.user = self.ui.config('cia', 'user')
195 self.user = self.ui.config('cia', 'user')
192 self.project = self.ui.config('cia', 'project')
196 self.project = self.ui.config('cia', 'project')
193 self.module = self.ui.config('cia', 'module')
197 self.module = self.ui.config('cia', 'module')
194 self.diffstat = self.ui.configbool('cia', 'diffstat')
198 self.diffstat = self.ui.configbool('cia', 'diffstat')
195 self.emailfrom = self.ui.config('email', 'from')
199 self.emailfrom = self.ui.config('email', 'from')
196 self.dryrun = self.ui.configbool('cia', 'test')
200 self.dryrun = self.ui.configbool('cia', 'test')
197 self.url = self.ui.config('web', 'baseurl')
201 self.url = self.ui.config('web', 'baseurl')
198 # Default to -1 for backward compatibility
202 # Default to -1 for backward compatibility
199 self.stripcount = int(self.ui.config('cia', 'strip', -1))
203 self.stripcount = int(self.ui.config('cia', 'strip', -1))
200 self.root = self.strip(self.repo.root)
204 self.root = self.strip(self.repo.root)
201
205
202 style = self.ui.config('cia', 'style')
206 style = self.ui.config('cia', 'style')
203 template = self.ui.config('cia', 'template')
207 template = self.ui.config('cia', 'template')
204 if not template:
208 if not template:
205 if self.diffstat:
209 if self.diffstat:
206 template = self.dstemplate
210 template = self.dstemplate
207 else:
211 else:
208 template = self.deftemplate
212 template = self.deftemplate
209 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
213 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
210 template, style, False)
214 template, style, False)
211 self.templater = t
215 self.templater = t
212
216
213 def strip(self, path):
217 def strip(self, path):
214 '''strip leading slashes from local path, turn into web-safe path.'''
218 '''strip leading slashes from local path, turn into web-safe path.'''
215
219
216 path = util.pconvert(path)
220 path = util.pconvert(path)
217 count = self.stripcount
221 count = self.stripcount
218 if count < 0:
222 if count < 0:
219 return ''
223 return ''
220 while count > 0:
224 while count > 0:
221 c = path.find('/')
225 c = path.find('/')
222 if c == -1:
226 if c == -1:
223 break
227 break
224 path = path[c + 1:]
228 path = path[c + 1:]
225 count -= 1
229 count -= 1
226 return path
230 return path
227
231
228 def sendrpc(self, msg):
232 def sendrpc(self, msg):
229 srv = xmlrpclib.Server(self.ciaurl)
233 srv = xmlrpclib.Server(self.ciaurl)
230 res = srv.hub.deliver(msg)
234 res = srv.hub.deliver(msg)
231 if res is not True and res != 'queued.':
235 if res is not True and res != 'queued.':
232 raise util.Abort(_('%s returned an error: %s') %
236 raise util.Abort(_('%s returned an error: %s') %
233 (self.ciaurl, res))
237 (self.ciaurl, res))
234
238
235 def sendemail(self, address, data):
239 def sendemail(self, address, data):
236 p = email.Parser.Parser()
240 p = email.Parser.Parser()
237 msg = p.parsestr(data)
241 msg = p.parsestr(data)
238 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
242 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
239 msg['To'] = address
243 msg['To'] = address
240 msg['From'] = self.emailfrom
244 msg['From'] = self.emailfrom
241 msg['Subject'] = 'DeliverXML'
245 msg['Subject'] = 'DeliverXML'
242 msg['Content-type'] = 'text/xml'
246 msg['Content-type'] = 'text/xml'
243 msgtext = msg.as_string()
247 msgtext = msg.as_string()
244
248
245 self.ui.status(_('hgcia: sending update to %s\n') % address)
249 self.ui.status(_('hgcia: sending update to %s\n') % address)
246 mail.sendmail(self.ui, util.email(self.emailfrom),
250 mail.sendmail(self.ui, util.email(self.emailfrom),
247 [address], msgtext)
251 [address], msgtext)
248
252
249
253
250 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
254 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
251 """ send CIA notification """
255 """ send CIA notification """
252 def sendmsg(cia, ctx):
256 def sendmsg(cia, ctx):
253 msg = ciamsg(cia, ctx).xml()
257 msg = ciamsg(cia, ctx).xml()
254 if cia.dryrun:
258 if cia.dryrun:
255 ui.write(msg)
259 ui.write(msg)
256 elif cia.ciaurl.startswith('mailto:'):
260 elif cia.ciaurl.startswith('mailto:'):
257 if not cia.emailfrom:
261 if not cia.emailfrom:
258 raise util.Abort(_('email.from must be defined when '
262 raise util.Abort(_('email.from must be defined when '
259 'sending by email'))
263 'sending by email'))
260 cia.sendemail(cia.ciaurl[7:], msg)
264 cia.sendemail(cia.ciaurl[7:], msg)
261 else:
265 else:
262 cia.sendrpc(msg)
266 cia.sendrpc(msg)
263
267
264 n = bin(node)
268 n = bin(node)
265 cia = hgcia(ui, repo)
269 cia = hgcia(ui, repo)
266 if not cia.user:
270 if not cia.user:
267 ui.debug('cia: no user specified')
271 ui.debug('cia: no user specified')
268 return
272 return
269 if not cia.project:
273 if not cia.project:
270 ui.debug('cia: no project specified')
274 ui.debug('cia: no project specified')
271 return
275 return
272 if hooktype == 'changegroup':
276 if hooktype == 'changegroup':
273 start = repo.changelog.rev(n)
277 start = repo.changelog.rev(n)
274 end = len(repo.changelog)
278 end = len(repo.changelog)
275 for rev in xrange(start, end):
279 for rev in xrange(start, end):
276 n = repo.changelog.node(rev)
280 n = repo.changelog.node(rev)
277 ctx = repo.changectx(n)
281 ctx = repo.changectx(n)
278 sendmsg(cia, ctx)
282 sendmsg(cia, ctx)
279 else:
283 else:
280 ctx = repo.changectx(n)
284 ctx = repo.changectx(n)
281 sendmsg(cia, ctx)
285 sendmsg(cia, ctx)
@@ -1,331 +1,335 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''browse the repository in a graphical way
8 '''browse the repository in a graphical way
9
9
10 The hgk extension allows browsing the history of a repository in a
10 The hgk extension allows browsing the history of a repository in a
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
12 distributed with Mercurial.)
12 distributed with Mercurial.)
13
13
14 hgk consists of two parts: a Tcl script that does the displaying and
14 hgk consists of two parts: a Tcl script that does the displaying and
15 querying of information, and an extension to Mercurial named hgk.py,
15 querying of information, and an extension to Mercurial named hgk.py,
16 which provides hooks for hgk to get information. hgk can be found in
16 which provides hooks for hgk to get information. hgk can be found in
17 the contrib directory, and the extension is shipped in the hgext
17 the contrib directory, and the extension is shipped in the hgext
18 repository, and needs to be enabled.
18 repository, and needs to be enabled.
19
19
20 The :hg:`view` command will launch the hgk Tcl script. For this command
20 The :hg:`view` command will launch the hgk Tcl script. For this command
21 to work, hgk must be in your search path. Alternately, you can specify
21 to work, hgk must be in your search path. Alternately, you can specify
22 the path to hgk in your configuration file::
22 the path to hgk in your configuration file::
23
23
24 [hgk]
24 [hgk]
25 path=/location/of/hgk
25 path=/location/of/hgk
26
26
27 hgk can make use of the extdiff extension to visualize revisions.
27 hgk can make use of the extdiff extension to visualize revisions.
28 Assuming you had already configured extdiff vdiff command, just add::
28 Assuming you had already configured extdiff vdiff command, just add::
29
29
30 [hgk]
30 [hgk]
31 vdiff=vdiff
31 vdiff=vdiff
32
32
33 Revisions context menu will now display additional entries to fire
33 Revisions context menu will now display additional entries to fire
34 vdiff on hovered and selected revisions.
34 vdiff on hovered and selected revisions.
35 '''
35 '''
36
36
37 import os
37 import os
38 from mercurial import cmdutil, commands, patch, scmutil, obsolete
38 from mercurial import cmdutil, commands, patch, scmutil, obsolete
39 from mercurial.node import nullid, nullrev, short
39 from mercurial.node import nullid, nullrev, short
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41
41
42 cmdtable = {}
42 cmdtable = {}
43 command = cmdutil.command(cmdtable)
43 command = cmdutil.command(cmdtable)
44 # Note for extension authors: ONLY specify testedwith = 'internal' for
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # be specifying the version(s) of Mercurial they are tested with, or
47 # leave the attribute unspecified.
44 testedwith = 'internal'
48 testedwith = 'internal'
45
49
46 @command('debug-diff-tree',
50 @command('debug-diff-tree',
47 [('p', 'patch', None, _('generate patch')),
51 [('p', 'patch', None, _('generate patch')),
48 ('r', 'recursive', None, _('recursive')),
52 ('r', 'recursive', None, _('recursive')),
49 ('P', 'pretty', None, _('pretty')),
53 ('P', 'pretty', None, _('pretty')),
50 ('s', 'stdin', None, _('stdin')),
54 ('s', 'stdin', None, _('stdin')),
51 ('C', 'copy', None, _('detect copies')),
55 ('C', 'copy', None, _('detect copies')),
52 ('S', 'search', "", _('search'))],
56 ('S', 'search', "", _('search'))],
53 ('[OPTION]... NODE1 NODE2 [FILE]...'),
57 ('[OPTION]... NODE1 NODE2 [FILE]...'),
54 inferrepo=True)
58 inferrepo=True)
55 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
59 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
56 """diff trees from two commits"""
60 """diff trees from two commits"""
57 def __difftree(repo, node1, node2, files=[]):
61 def __difftree(repo, node1, node2, files=[]):
58 assert node2 is not None
62 assert node2 is not None
59 mmap = repo[node1].manifest()
63 mmap = repo[node1].manifest()
60 mmap2 = repo[node2].manifest()
64 mmap2 = repo[node2].manifest()
61 m = scmutil.match(repo[node1], files)
65 m = scmutil.match(repo[node1], files)
62 modified, added, removed = repo.status(node1, node2, m)[:3]
66 modified, added, removed = repo.status(node1, node2, m)[:3]
63 empty = short(nullid)
67 empty = short(nullid)
64
68
65 for f in modified:
69 for f in modified:
66 # TODO get file permissions
70 # TODO get file permissions
67 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
71 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
68 (short(mmap[f]), short(mmap2[f]), f, f))
72 (short(mmap[f]), short(mmap2[f]), f, f))
69 for f in added:
73 for f in added:
70 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
74 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
71 (empty, short(mmap2[f]), f, f))
75 (empty, short(mmap2[f]), f, f))
72 for f in removed:
76 for f in removed:
73 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
77 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
74 (short(mmap[f]), empty, f, f))
78 (short(mmap[f]), empty, f, f))
75 ##
79 ##
76
80
77 while True:
81 while True:
78 if opts['stdin']:
82 if opts['stdin']:
79 try:
83 try:
80 line = raw_input().split(' ')
84 line = raw_input().split(' ')
81 node1 = line[0]
85 node1 = line[0]
82 if len(line) > 1:
86 if len(line) > 1:
83 node2 = line[1]
87 node2 = line[1]
84 else:
88 else:
85 node2 = None
89 node2 = None
86 except EOFError:
90 except EOFError:
87 break
91 break
88 node1 = repo.lookup(node1)
92 node1 = repo.lookup(node1)
89 if node2:
93 if node2:
90 node2 = repo.lookup(node2)
94 node2 = repo.lookup(node2)
91 else:
95 else:
92 node2 = node1
96 node2 = node1
93 node1 = repo.changelog.parents(node1)[0]
97 node1 = repo.changelog.parents(node1)[0]
94 if opts['patch']:
98 if opts['patch']:
95 if opts['pretty']:
99 if opts['pretty']:
96 catcommit(ui, repo, node2, "")
100 catcommit(ui, repo, node2, "")
97 m = scmutil.match(repo[node1], files)
101 m = scmutil.match(repo[node1], files)
98 diffopts = patch.difffeatureopts(ui)
102 diffopts = patch.difffeatureopts(ui)
99 diffopts.git = True
103 diffopts.git = True
100 chunks = patch.diff(repo, node1, node2, match=m,
104 chunks = patch.diff(repo, node1, node2, match=m,
101 opts=diffopts)
105 opts=diffopts)
102 for chunk in chunks:
106 for chunk in chunks:
103 ui.write(chunk)
107 ui.write(chunk)
104 else:
108 else:
105 __difftree(repo, node1, node2, files=files)
109 __difftree(repo, node1, node2, files=files)
106 if not opts['stdin']:
110 if not opts['stdin']:
107 break
111 break
108
112
109 def catcommit(ui, repo, n, prefix, ctx=None):
113 def catcommit(ui, repo, n, prefix, ctx=None):
110 nlprefix = '\n' + prefix
114 nlprefix = '\n' + prefix
111 if ctx is None:
115 if ctx is None:
112 ctx = repo[n]
116 ctx = repo[n]
113 # use ctx.node() instead ??
117 # use ctx.node() instead ??
114 ui.write(("tree %s\n" % short(ctx.changeset()[0])))
118 ui.write(("tree %s\n" % short(ctx.changeset()[0])))
115 for p in ctx.parents():
119 for p in ctx.parents():
116 ui.write(("parent %s\n" % p))
120 ui.write(("parent %s\n" % p))
117
121
118 date = ctx.date()
122 date = ctx.date()
119 description = ctx.description().replace("\0", "")
123 description = ctx.description().replace("\0", "")
120 ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))
124 ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))
121
125
122 if 'committer' in ctx.extra():
126 if 'committer' in ctx.extra():
123 ui.write(("committer %s\n" % ctx.extra()['committer']))
127 ui.write(("committer %s\n" % ctx.extra()['committer']))
124
128
125 ui.write(("revision %d\n" % ctx.rev()))
129 ui.write(("revision %d\n" % ctx.rev()))
126 ui.write(("branch %s\n" % ctx.branch()))
130 ui.write(("branch %s\n" % ctx.branch()))
127 if obsolete.isenabled(repo, obsolete.createmarkersopt):
131 if obsolete.isenabled(repo, obsolete.createmarkersopt):
128 if ctx.obsolete():
132 if ctx.obsolete():
129 ui.write(("obsolete\n"))
133 ui.write(("obsolete\n"))
130 ui.write(("phase %s\n\n" % ctx.phasestr()))
134 ui.write(("phase %s\n\n" % ctx.phasestr()))
131
135
132 if prefix != "":
136 if prefix != "":
133 ui.write("%s%s\n" % (prefix,
137 ui.write("%s%s\n" % (prefix,
134 description.replace('\n', nlprefix).strip()))
138 description.replace('\n', nlprefix).strip()))
135 else:
139 else:
136 ui.write(description + "\n")
140 ui.write(description + "\n")
137 if prefix:
141 if prefix:
138 ui.write('\0')
142 ui.write('\0')
139
143
140 @command('debug-merge-base', [], _('REV REV'))
144 @command('debug-merge-base', [], _('REV REV'))
141 def base(ui, repo, node1, node2):
145 def base(ui, repo, node1, node2):
142 """output common ancestor information"""
146 """output common ancestor information"""
143 node1 = repo.lookup(node1)
147 node1 = repo.lookup(node1)
144 node2 = repo.lookup(node2)
148 node2 = repo.lookup(node2)
145 n = repo.changelog.ancestor(node1, node2)
149 n = repo.changelog.ancestor(node1, node2)
146 ui.write(short(n) + "\n")
150 ui.write(short(n) + "\n")
147
151
148 @command('debug-cat-file',
152 @command('debug-cat-file',
149 [('s', 'stdin', None, _('stdin'))],
153 [('s', 'stdin', None, _('stdin'))],
150 _('[OPTION]... TYPE FILE'),
154 _('[OPTION]... TYPE FILE'),
151 inferrepo=True)
155 inferrepo=True)
152 def catfile(ui, repo, type=None, r=None, **opts):
156 def catfile(ui, repo, type=None, r=None, **opts):
153 """cat a specific revision"""
157 """cat a specific revision"""
154 # in stdin mode, every line except the commit is prefixed with two
158 # in stdin mode, every line except the commit is prefixed with two
155 # spaces. This way the our caller can find the commit without magic
159 # spaces. This way the our caller can find the commit without magic
156 # strings
160 # strings
157 #
161 #
158 prefix = ""
162 prefix = ""
159 if opts['stdin']:
163 if opts['stdin']:
160 try:
164 try:
161 (type, r) = raw_input().split(' ')
165 (type, r) = raw_input().split(' ')
162 prefix = " "
166 prefix = " "
163 except EOFError:
167 except EOFError:
164 return
168 return
165
169
166 else:
170 else:
167 if not type or not r:
171 if not type or not r:
168 ui.warn(_("cat-file: type or revision not supplied\n"))
172 ui.warn(_("cat-file: type or revision not supplied\n"))
169 commands.help_(ui, 'cat-file')
173 commands.help_(ui, 'cat-file')
170
174
171 while r:
175 while r:
172 if type != "commit":
176 if type != "commit":
173 ui.warn(_("aborting hg cat-file only understands commits\n"))
177 ui.warn(_("aborting hg cat-file only understands commits\n"))
174 return 1
178 return 1
175 n = repo.lookup(r)
179 n = repo.lookup(r)
176 catcommit(ui, repo, n, prefix)
180 catcommit(ui, repo, n, prefix)
177 if opts['stdin']:
181 if opts['stdin']:
178 try:
182 try:
179 (type, r) = raw_input().split(' ')
183 (type, r) = raw_input().split(' ')
180 except EOFError:
184 except EOFError:
181 break
185 break
182 else:
186 else:
183 break
187 break
184
188
185 # git rev-tree is a confusing thing. You can supply a number of
189 # git rev-tree is a confusing thing. You can supply a number of
186 # commit sha1s on the command line, and it walks the commit history
190 # commit sha1s on the command line, and it walks the commit history
187 # telling you which commits are reachable from the supplied ones via
191 # telling you which commits are reachable from the supplied ones via
188 # a bitmask based on arg position.
192 # a bitmask based on arg position.
189 # you can specify a commit to stop at by starting the sha1 with ^
193 # you can specify a commit to stop at by starting the sha1 with ^
190 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
194 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
191 def chlogwalk():
195 def chlogwalk():
192 count = len(repo)
196 count = len(repo)
193 i = count
197 i = count
194 l = [0] * 100
198 l = [0] * 100
195 chunk = 100
199 chunk = 100
196 while True:
200 while True:
197 if chunk > i:
201 if chunk > i:
198 chunk = i
202 chunk = i
199 i = 0
203 i = 0
200 else:
204 else:
201 i -= chunk
205 i -= chunk
202
206
203 for x in xrange(chunk):
207 for x in xrange(chunk):
204 if i + x >= count:
208 if i + x >= count:
205 l[chunk - x:] = [0] * (chunk - x)
209 l[chunk - x:] = [0] * (chunk - x)
206 break
210 break
207 if full is not None:
211 if full is not None:
208 if (i + x) in repo:
212 if (i + x) in repo:
209 l[x] = repo[i + x]
213 l[x] = repo[i + x]
210 l[x].changeset() # force reading
214 l[x].changeset() # force reading
211 else:
215 else:
212 if (i + x) in repo:
216 if (i + x) in repo:
213 l[x] = 1
217 l[x] = 1
214 for x in xrange(chunk - 1, -1, -1):
218 for x in xrange(chunk - 1, -1, -1):
215 if l[x] != 0:
219 if l[x] != 0:
216 yield (i + x, full is not None and l[x] or None)
220 yield (i + x, full is not None and l[x] or None)
217 if i == 0:
221 if i == 0:
218 break
222 break
219
223
220 # calculate and return the reachability bitmask for sha
224 # calculate and return the reachability bitmask for sha
221 def is_reachable(ar, reachable, sha):
225 def is_reachable(ar, reachable, sha):
222 if len(ar) == 0:
226 if len(ar) == 0:
223 return 1
227 return 1
224 mask = 0
228 mask = 0
225 for i in xrange(len(ar)):
229 for i in xrange(len(ar)):
226 if sha in reachable[i]:
230 if sha in reachable[i]:
227 mask |= 1 << i
231 mask |= 1 << i
228
232
229 return mask
233 return mask
230
234
231 reachable = []
235 reachable = []
232 stop_sha1 = []
236 stop_sha1 = []
233 want_sha1 = []
237 want_sha1 = []
234 count = 0
238 count = 0
235
239
236 # figure out which commits they are asking for and which ones they
240 # figure out which commits they are asking for and which ones they
237 # want us to stop on
241 # want us to stop on
238 for i, arg in enumerate(args):
242 for i, arg in enumerate(args):
239 if arg.startswith('^'):
243 if arg.startswith('^'):
240 s = repo.lookup(arg[1:])
244 s = repo.lookup(arg[1:])
241 stop_sha1.append(s)
245 stop_sha1.append(s)
242 want_sha1.append(s)
246 want_sha1.append(s)
243 elif arg != 'HEAD':
247 elif arg != 'HEAD':
244 want_sha1.append(repo.lookup(arg))
248 want_sha1.append(repo.lookup(arg))
245
249
246 # calculate the graph for the supplied commits
250 # calculate the graph for the supplied commits
247 for i, n in enumerate(want_sha1):
251 for i, n in enumerate(want_sha1):
248 reachable.append(set())
252 reachable.append(set())
249 visit = [n]
253 visit = [n]
250 reachable[i].add(n)
254 reachable[i].add(n)
251 while visit:
255 while visit:
252 n = visit.pop(0)
256 n = visit.pop(0)
253 if n in stop_sha1:
257 if n in stop_sha1:
254 continue
258 continue
255 for p in repo.changelog.parents(n):
259 for p in repo.changelog.parents(n):
256 if p not in reachable[i]:
260 if p not in reachable[i]:
257 reachable[i].add(p)
261 reachable[i].add(p)
258 visit.append(p)
262 visit.append(p)
259 if p in stop_sha1:
263 if p in stop_sha1:
260 continue
264 continue
261
265
262 # walk the repository looking for commits that are in our
266 # walk the repository looking for commits that are in our
263 # reachability graph
267 # reachability graph
264 for i, ctx in chlogwalk():
268 for i, ctx in chlogwalk():
265 if i not in repo:
269 if i not in repo:
266 continue
270 continue
267 n = repo.changelog.node(i)
271 n = repo.changelog.node(i)
268 mask = is_reachable(want_sha1, reachable, n)
272 mask = is_reachable(want_sha1, reachable, n)
269 if mask:
273 if mask:
270 parentstr = ""
274 parentstr = ""
271 if parents:
275 if parents:
272 pp = repo.changelog.parents(n)
276 pp = repo.changelog.parents(n)
273 if pp[0] != nullid:
277 if pp[0] != nullid:
274 parentstr += " " + short(pp[0])
278 parentstr += " " + short(pp[0])
275 if pp[1] != nullid:
279 if pp[1] != nullid:
276 parentstr += " " + short(pp[1])
280 parentstr += " " + short(pp[1])
277 if not full:
281 if not full:
278 ui.write("%s%s\n" % (short(n), parentstr))
282 ui.write("%s%s\n" % (short(n), parentstr))
279 elif full == "commit":
283 elif full == "commit":
280 ui.write("%s%s\n" % (short(n), parentstr))
284 ui.write("%s%s\n" % (short(n), parentstr))
281 catcommit(ui, repo, n, ' ', ctx)
285 catcommit(ui, repo, n, ' ', ctx)
282 else:
286 else:
283 (p1, p2) = repo.changelog.parents(n)
287 (p1, p2) = repo.changelog.parents(n)
284 (h, h1, h2) = map(short, (n, p1, p2))
288 (h, h1, h2) = map(short, (n, p1, p2))
285 (i1, i2) = map(repo.changelog.rev, (p1, p2))
289 (i1, i2) = map(repo.changelog.rev, (p1, p2))
286
290
287 date = ctx.date()[0]
291 date = ctx.date()[0]
288 ui.write("%s %s:%s" % (date, h, mask))
292 ui.write("%s %s:%s" % (date, h, mask))
289 mask = is_reachable(want_sha1, reachable, p1)
293 mask = is_reachable(want_sha1, reachable, p1)
290 if i1 != nullrev and mask > 0:
294 if i1 != nullrev and mask > 0:
291 ui.write("%s:%s " % (h1, mask)),
295 ui.write("%s:%s " % (h1, mask)),
292 mask = is_reachable(want_sha1, reachable, p2)
296 mask = is_reachable(want_sha1, reachable, p2)
293 if i2 != nullrev and mask > 0:
297 if i2 != nullrev and mask > 0:
294 ui.write("%s:%s " % (h2, mask))
298 ui.write("%s:%s " % (h2, mask))
295 ui.write("\n")
299 ui.write("\n")
296 if maxnr and count >= maxnr:
300 if maxnr and count >= maxnr:
297 break
301 break
298 count += 1
302 count += 1
299
303
300 # git rev-list tries to order things by date, and has the ability to stop
304 # git rev-list tries to order things by date, and has the ability to stop
301 # at a given commit without walking the whole repo. TODO add the stop
305 # at a given commit without walking the whole repo. TODO add the stop
302 # parameter
306 # parameter
303 @command('debug-rev-list',
307 @command('debug-rev-list',
304 [('H', 'header', None, _('header')),
308 [('H', 'header', None, _('header')),
305 ('t', 'topo-order', None, _('topo-order')),
309 ('t', 'topo-order', None, _('topo-order')),
306 ('p', 'parents', None, _('parents')),
310 ('p', 'parents', None, _('parents')),
307 ('n', 'max-count', 0, _('max-count'))],
311 ('n', 'max-count', 0, _('max-count'))],
308 ('[OPTION]... REV...'))
312 ('[OPTION]... REV...'))
309 def revlist(ui, repo, *revs, **opts):
313 def revlist(ui, repo, *revs, **opts):
310 """print revisions"""
314 """print revisions"""
311 if opts['header']:
315 if opts['header']:
312 full = "commit"
316 full = "commit"
313 else:
317 else:
314 full = None
318 full = None
315 copy = [x for x in revs]
319 copy = [x for x in revs]
316 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
320 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
317
321
318 @command('view',
322 @command('view',
319 [('l', 'limit', '',
323 [('l', 'limit', '',
320 _('limit number of changes displayed'), _('NUM'))],
324 _('limit number of changes displayed'), _('NUM'))],
321 _('[-l LIMIT] [REVRANGE]'))
325 _('[-l LIMIT] [REVRANGE]'))
322 def view(ui, repo, *etc, **opts):
326 def view(ui, repo, *etc, **opts):
323 "start interactive history viewer"
327 "start interactive history viewer"
324 os.chdir(repo.root)
328 os.chdir(repo.root)
325 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
329 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
326 if repo.filtername is None:
330 if repo.filtername is None:
327 optstr += '--hidden'
331 optstr += '--hidden'
328
332
329 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
333 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
330 ui.debug("running %s\n" % cmd)
334 ui.debug("running %s\n" % cmd)
331 ui.system(cmd)
335 ui.system(cmd)
@@ -1,64 +1,68 b''
1 # highlight - syntax highlighting in hgweb, based on Pygments
1 # highlight - syntax highlighting in hgweb, based on Pygments
2 #
2 #
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 """syntax highlighting for hgweb (requires Pygments)
11 """syntax highlighting for hgweb (requires Pygments)
12
12
13 It depends on the Pygments syntax highlighting library:
13 It depends on the Pygments syntax highlighting library:
14 http://pygments.org/
14 http://pygments.org/
15
15
16 There is a single configuration option::
16 There is a single configuration option::
17
17
18 [web]
18 [web]
19 pygments_style = <style>
19 pygments_style = <style>
20
20
21 The default is 'colorful'.
21 The default is 'colorful'.
22 """
22 """
23
23
24 import highlight
24 import highlight
25 from mercurial.hgweb import webcommands, webutil, common
25 from mercurial.hgweb import webcommands, webutil, common
26 from mercurial import extensions, encoding
26 from mercurial import extensions, encoding
27 # Note for extension authors: ONLY specify testedwith = 'internal' for
28 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
29 # be specifying the version(s) of Mercurial they are tested with, or
30 # leave the attribute unspecified.
27 testedwith = 'internal'
31 testedwith = 'internal'
28
32
29 def filerevision_highlight(orig, web, tmpl, fctx):
33 def filerevision_highlight(orig, web, tmpl, fctx):
30 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
34 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
31 # only pygmentize for mimetype containing 'html' so we both match
35 # only pygmentize for mimetype containing 'html' so we both match
32 # 'text/html' and possibly 'application/xhtml+xml' in the future
36 # 'text/html' and possibly 'application/xhtml+xml' in the future
33 # so that we don't have to touch the extension when the mimetype
37 # so that we don't have to touch the extension when the mimetype
34 # for a template changes; also hgweb optimizes the case that a
38 # for a template changes; also hgweb optimizes the case that a
35 # raw file is sent using rawfile() and doesn't call us, so we
39 # raw file is sent using rawfile() and doesn't call us, so we
36 # can't clash with the file's content-type here in case we
40 # can't clash with the file's content-type here in case we
37 # pygmentize a html file
41 # pygmentize a html file
38 if 'html' in mt:
42 if 'html' in mt:
39 style = web.config('web', 'pygments_style', 'colorful')
43 style = web.config('web', 'pygments_style', 'colorful')
40 highlight.pygmentize('fileline', fctx, style, tmpl)
44 highlight.pygmentize('fileline', fctx, style, tmpl)
41 return orig(web, tmpl, fctx)
45 return orig(web, tmpl, fctx)
42
46
43 def annotate_highlight(orig, web, req, tmpl):
47 def annotate_highlight(orig, web, req, tmpl):
44 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
48 mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
45 if 'html' in mt:
49 if 'html' in mt:
46 fctx = webutil.filectx(web.repo, req)
50 fctx = webutil.filectx(web.repo, req)
47 style = web.config('web', 'pygments_style', 'colorful')
51 style = web.config('web', 'pygments_style', 'colorful')
48 highlight.pygmentize('annotateline', fctx, style, tmpl)
52 highlight.pygmentize('annotateline', fctx, style, tmpl)
49 return orig(web, req, tmpl)
53 return orig(web, req, tmpl)
50
54
51 def generate_css(web, req, tmpl):
55 def generate_css(web, req, tmpl):
52 pg_style = web.config('web', 'pygments_style', 'colorful')
56 pg_style = web.config('web', 'pygments_style', 'colorful')
53 fmter = highlight.HtmlFormatter(style=pg_style)
57 fmter = highlight.HtmlFormatter(style=pg_style)
54 req.respond(common.HTTP_OK, 'text/css')
58 req.respond(common.HTTP_OK, 'text/css')
55 return ['/* pygments_style = %s */\n\n' % pg_style,
59 return ['/* pygments_style = %s */\n\n' % pg_style,
56 fmter.get_style_defs('')]
60 fmter.get_style_defs('')]
57
61
58 def extsetup():
62 def extsetup():
59 # monkeypatch in the new version
63 # monkeypatch in the new version
60 extensions.wrapfunction(webcommands, '_filerevision',
64 extensions.wrapfunction(webcommands, '_filerevision',
61 filerevision_highlight)
65 filerevision_highlight)
62 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
66 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
63 webcommands.highlightcss = generate_css
67 webcommands.highlightcss = generate_css
64 webcommands.__all__.append('highlightcss')
68 webcommands.__all__.append('highlightcss')
@@ -1,1151 +1,1155 b''
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but stop for amending
37 # e, edit = use commit, but stop for amending
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description
39 # r, roll = like fold, but discard this commit's description
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit message without changing commit content
41 # m, mess = edit message without changing commit content
42 #
42 #
43
43
44 In this file, lines beginning with ``#`` are ignored. You must specify a rule
44 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 for each revision in your history. For example, if you had meant to add gamma
45 for each revision in your history. For example, if you had meant to add gamma
46 before beta, and then wanted to add delta in the same revision as beta, you
46 before beta, and then wanted to add delta in the same revision as beta, you
47 would reorganize the file to look like this::
47 would reorganize the file to look like this::
48
48
49 pick 030b686bedc4 Add gamma
49 pick 030b686bedc4 Add gamma
50 pick c561b4e977df Add beta
50 pick c561b4e977df Add beta
51 fold 7c2fd3b9020c Add delta
51 fold 7c2fd3b9020c Add delta
52
52
53 # Edit history between c561b4e977df and 7c2fd3b9020c
53 # Edit history between c561b4e977df and 7c2fd3b9020c
54 #
54 #
55 # Commits are listed from least to most recent
55 # Commits are listed from least to most recent
56 #
56 #
57 # Commands:
57 # Commands:
58 # p, pick = use commit
58 # p, pick = use commit
59 # e, edit = use commit, but stop for amending
59 # e, edit = use commit, but stop for amending
60 # f, fold = use commit, but combine it with the one above
60 # f, fold = use commit, but combine it with the one above
61 # r, roll = like fold, but discard this commit's description
61 # r, roll = like fold, but discard this commit's description
62 # d, drop = remove commit from history
62 # d, drop = remove commit from history
63 # m, mess = edit message without changing commit content
63 # m, mess = edit message without changing commit content
64 #
64 #
65
65
66 At which point you close the editor and ``histedit`` starts working. When you
66 At which point you close the editor and ``histedit`` starts working. When you
67 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
67 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
68 those revisions together, offering you a chance to clean up the commit message::
68 those revisions together, offering you a chance to clean up the commit message::
69
69
70 Add beta
70 Add beta
71 ***
71 ***
72 Add delta
72 Add delta
73
73
74 Edit the commit message to your liking, then close the editor. For
74 Edit the commit message to your liking, then close the editor. For
75 this example, let's assume that the commit message was changed to
75 this example, let's assume that the commit message was changed to
76 ``Add beta and delta.`` After histedit has run and had a chance to
76 ``Add beta and delta.`` After histedit has run and had a chance to
77 remove any old or temporary revisions it needed, the history looks
77 remove any old or temporary revisions it needed, the history looks
78 like this::
78 like this::
79
79
80 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
80 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
81 | Add beta and delta.
81 | Add beta and delta.
82 |
82 |
83 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
83 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
84 | Add gamma
84 | Add gamma
85 |
85 |
86 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
86 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
87 Add alpha
87 Add alpha
88
88
89 Note that ``histedit`` does *not* remove any revisions (even its own temporary
89 Note that ``histedit`` does *not* remove any revisions (even its own temporary
90 ones) until after it has completed all the editing operations, so it will
90 ones) until after it has completed all the editing operations, so it will
91 probably perform several strip operations when it's done. For the above example,
91 probably perform several strip operations when it's done. For the above example,
92 it had to run strip twice. Strip can be slow depending on a variety of factors,
92 it had to run strip twice. Strip can be slow depending on a variety of factors,
93 so you might need to be a little patient. You can choose to keep the original
93 so you might need to be a little patient. You can choose to keep the original
94 revisions by passing the ``--keep`` flag.
94 revisions by passing the ``--keep`` flag.
95
95
96 The ``edit`` operation will drop you back to a command prompt,
96 The ``edit`` operation will drop you back to a command prompt,
97 allowing you to edit files freely, or even use ``hg record`` to commit
97 allowing you to edit files freely, or even use ``hg record`` to commit
98 some changes as a separate commit. When you're done, any remaining
98 some changes as a separate commit. When you're done, any remaining
99 uncommitted changes will be committed as well. When done, run ``hg
99 uncommitted changes will be committed as well. When done, run ``hg
100 histedit --continue`` to finish this step. You'll be prompted for a
100 histedit --continue`` to finish this step. You'll be prompted for a
101 new commit message, but the default commit message will be the
101 new commit message, but the default commit message will be the
102 original message for the ``edit`` ed revision.
102 original message for the ``edit`` ed revision.
103
103
104 The ``message`` operation will give you a chance to revise a commit
104 The ``message`` operation will give you a chance to revise a commit
105 message without changing the contents. It's a shortcut for doing
105 message without changing the contents. It's a shortcut for doing
106 ``edit`` immediately followed by `hg histedit --continue``.
106 ``edit`` immediately followed by `hg histedit --continue``.
107
107
108 If ``histedit`` encounters a conflict when moving a revision (while
108 If ``histedit`` encounters a conflict when moving a revision (while
109 handling ``pick`` or ``fold``), it'll stop in a similar manner to
109 handling ``pick`` or ``fold``), it'll stop in a similar manner to
110 ``edit`` with the difference that it won't prompt you for a commit
110 ``edit`` with the difference that it won't prompt you for a commit
111 message when done. If you decide at this point that you don't like how
111 message when done. If you decide at this point that you don't like how
112 much work it will be to rearrange history, or that you made a mistake,
112 much work it will be to rearrange history, or that you made a mistake,
113 you can use ``hg histedit --abort`` to abandon the new changes you
113 you can use ``hg histedit --abort`` to abandon the new changes you
114 have made and return to the state before you attempted to edit your
114 have made and return to the state before you attempted to edit your
115 history.
115 history.
116
116
117 If we clone the histedit-ed example repository above and add four more
117 If we clone the histedit-ed example repository above and add four more
118 changes, such that we have the following history::
118 changes, such that we have the following history::
119
119
120 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
120 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
121 | Add theta
121 | Add theta
122 |
122 |
123 o 5 140988835471 2009-04-27 18:04 -0500 stefan
123 o 5 140988835471 2009-04-27 18:04 -0500 stefan
124 | Add eta
124 | Add eta
125 |
125 |
126 o 4 122930637314 2009-04-27 18:04 -0500 stefan
126 o 4 122930637314 2009-04-27 18:04 -0500 stefan
127 | Add zeta
127 | Add zeta
128 |
128 |
129 o 3 836302820282 2009-04-27 18:04 -0500 stefan
129 o 3 836302820282 2009-04-27 18:04 -0500 stefan
130 | Add epsilon
130 | Add epsilon
131 |
131 |
132 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
132 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
133 | Add beta and delta.
133 | Add beta and delta.
134 |
134 |
135 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
135 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
136 | Add gamma
136 | Add gamma
137 |
137 |
138 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
138 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
139 Add alpha
139 Add alpha
140
140
141 If you run ``hg histedit --outgoing`` on the clone then it is the same
141 If you run ``hg histedit --outgoing`` on the clone then it is the same
142 as running ``hg histedit 836302820282``. If you need plan to push to a
142 as running ``hg histedit 836302820282``. If you need plan to push to a
143 repository that Mercurial does not detect to be related to the source
143 repository that Mercurial does not detect to be related to the source
144 repo, you can add a ``--force`` option.
144 repo, you can add a ``--force`` option.
145
145
146 Histedit rule lines are truncated to 80 characters by default. You
146 Histedit rule lines are truncated to 80 characters by default. You
147 can customise this behaviour by setting a different length in your
147 can customise this behaviour by setting a different length in your
148 configuration file::
148 configuration file::
149
149
150 [histedit]
150 [histedit]
151 linelen = 120 # truncate rule lines at 120 characters
151 linelen = 120 # truncate rule lines at 120 characters
152 """
152 """
153
153
154 try:
154 try:
155 import cPickle as pickle
155 import cPickle as pickle
156 pickle.dump # import now
156 pickle.dump # import now
157 except ImportError:
157 except ImportError:
158 import pickle
158 import pickle
159 import errno
159 import errno
160 import os
160 import os
161 import sys
161 import sys
162
162
163 from mercurial import cmdutil
163 from mercurial import cmdutil
164 from mercurial import discovery
164 from mercurial import discovery
165 from mercurial import error
165 from mercurial import error
166 from mercurial import changegroup
166 from mercurial import changegroup
167 from mercurial import copies
167 from mercurial import copies
168 from mercurial import context
168 from mercurial import context
169 from mercurial import exchange
169 from mercurial import exchange
170 from mercurial import extensions
170 from mercurial import extensions
171 from mercurial import hg
171 from mercurial import hg
172 from mercurial import node
172 from mercurial import node
173 from mercurial import repair
173 from mercurial import repair
174 from mercurial import scmutil
174 from mercurial import scmutil
175 from mercurial import util
175 from mercurial import util
176 from mercurial import obsolete
176 from mercurial import obsolete
177 from mercurial import merge as mergemod
177 from mercurial import merge as mergemod
178 from mercurial.lock import release
178 from mercurial.lock import release
179 from mercurial.i18n import _
179 from mercurial.i18n import _
180
180
181 cmdtable = {}
181 cmdtable = {}
182 command = cmdutil.command(cmdtable)
182 command = cmdutil.command(cmdtable)
183
183
184 # Note for extension authors: ONLY specify testedwith = 'internal' for
185 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
186 # be specifying the version(s) of Mercurial they are tested with, or
187 # leave the attribute unspecified.
184 testedwith = 'internal'
188 testedwith = 'internal'
185
189
186 # i18n: command names and abbreviations must remain untranslated
190 # i18n: command names and abbreviations must remain untranslated
187 editcomment = _("""# Edit history between %s and %s
191 editcomment = _("""# Edit history between %s and %s
188 #
192 #
189 # Commits are listed from least to most recent
193 # Commits are listed from least to most recent
190 #
194 #
191 # Commands:
195 # Commands:
192 # p, pick = use commit
196 # p, pick = use commit
193 # e, edit = use commit, but stop for amending
197 # e, edit = use commit, but stop for amending
194 # f, fold = use commit, but combine it with the one above
198 # f, fold = use commit, but combine it with the one above
195 # r, roll = like fold, but discard this commit's description
199 # r, roll = like fold, but discard this commit's description
196 # d, drop = remove commit from history
200 # d, drop = remove commit from history
197 # m, mess = edit message without changing commit content
201 # m, mess = edit message without changing commit content
198 #
202 #
199 """)
203 """)
200
204
201 class histeditstate(object):
205 class histeditstate(object):
202 def __init__(self, repo, parentctxnode=None, rules=None, keep=None,
206 def __init__(self, repo, parentctxnode=None, rules=None, keep=None,
203 topmost=None, replacements=None, lock=None, wlock=None):
207 topmost=None, replacements=None, lock=None, wlock=None):
204 self.repo = repo
208 self.repo = repo
205 self.rules = rules
209 self.rules = rules
206 self.keep = keep
210 self.keep = keep
207 self.topmost = topmost
211 self.topmost = topmost
208 self.parentctxnode = parentctxnode
212 self.parentctxnode = parentctxnode
209 self.lock = lock
213 self.lock = lock
210 self.wlock = wlock
214 self.wlock = wlock
211 self.backupfile = None
215 self.backupfile = None
212 if replacements is None:
216 if replacements is None:
213 self.replacements = []
217 self.replacements = []
214 else:
218 else:
215 self.replacements = replacements
219 self.replacements = replacements
216
220
217 def read(self):
221 def read(self):
218 """Load histedit state from disk and set fields appropriately."""
222 """Load histedit state from disk and set fields appropriately."""
219 try:
223 try:
220 fp = self.repo.vfs('histedit-state', 'r')
224 fp = self.repo.vfs('histedit-state', 'r')
221 except IOError, err:
225 except IOError, err:
222 if err.errno != errno.ENOENT:
226 if err.errno != errno.ENOENT:
223 raise
227 raise
224 raise util.Abort(_('no histedit in progress'))
228 raise util.Abort(_('no histedit in progress'))
225
229
226 try:
230 try:
227 data = pickle.load(fp)
231 data = pickle.load(fp)
228 parentctxnode, rules, keep, topmost, replacements = data
232 parentctxnode, rules, keep, topmost, replacements = data
229 backupfile = None
233 backupfile = None
230 except pickle.UnpicklingError:
234 except pickle.UnpicklingError:
231 data = self._load()
235 data = self._load()
232 parentctxnode, rules, keep, topmost, replacements, backupfile = data
236 parentctxnode, rules, keep, topmost, replacements, backupfile = data
233
237
234 self.parentctxnode = parentctxnode
238 self.parentctxnode = parentctxnode
235 self.rules = rules
239 self.rules = rules
236 self.keep = keep
240 self.keep = keep
237 self.topmost = topmost
241 self.topmost = topmost
238 self.replacements = replacements
242 self.replacements = replacements
239 self.backupfile = backupfile
243 self.backupfile = backupfile
240
244
241 def write(self):
245 def write(self):
242 fp = self.repo.vfs('histedit-state', 'w')
246 fp = self.repo.vfs('histedit-state', 'w')
243 fp.write('v1\n')
247 fp.write('v1\n')
244 fp.write('%s\n' % node.hex(self.parentctxnode))
248 fp.write('%s\n' % node.hex(self.parentctxnode))
245 fp.write('%s\n' % node.hex(self.topmost))
249 fp.write('%s\n' % node.hex(self.topmost))
246 fp.write('%s\n' % self.keep)
250 fp.write('%s\n' % self.keep)
247 fp.write('%d\n' % len(self.rules))
251 fp.write('%d\n' % len(self.rules))
248 for rule in self.rules:
252 for rule in self.rules:
249 fp.write('%s\n' % rule[0]) # action
253 fp.write('%s\n' % rule[0]) # action
250 fp.write('%s\n' % rule[1]) # remainder
254 fp.write('%s\n' % rule[1]) # remainder
251 fp.write('%d\n' % len(self.replacements))
255 fp.write('%d\n' % len(self.replacements))
252 for replacement in self.replacements:
256 for replacement in self.replacements:
253 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
257 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
254 for r in replacement[1])))
258 for r in replacement[1])))
255 backupfile = self.backupfile
259 backupfile = self.backupfile
256 if not backupfile:
260 if not backupfile:
257 backupfile = ''
261 backupfile = ''
258 fp.write('%s\n' % backupfile)
262 fp.write('%s\n' % backupfile)
259 fp.close()
263 fp.close()
260
264
261 def _load(self):
265 def _load(self):
262 fp = self.repo.vfs('histedit-state', 'r')
266 fp = self.repo.vfs('histedit-state', 'r')
263 lines = [l[:-1] for l in fp.readlines()]
267 lines = [l[:-1] for l in fp.readlines()]
264
268
265 index = 0
269 index = 0
266 lines[index] # version number
270 lines[index] # version number
267 index += 1
271 index += 1
268
272
269 parentctxnode = node.bin(lines[index])
273 parentctxnode = node.bin(lines[index])
270 index += 1
274 index += 1
271
275
272 topmost = node.bin(lines[index])
276 topmost = node.bin(lines[index])
273 index += 1
277 index += 1
274
278
275 keep = lines[index] == 'True'
279 keep = lines[index] == 'True'
276 index += 1
280 index += 1
277
281
278 # Rules
282 # Rules
279 rules = []
283 rules = []
280 rulelen = int(lines[index])
284 rulelen = int(lines[index])
281 index += 1
285 index += 1
282 for i in xrange(rulelen):
286 for i in xrange(rulelen):
283 ruleaction = lines[index]
287 ruleaction = lines[index]
284 index += 1
288 index += 1
285 rule = lines[index]
289 rule = lines[index]
286 index += 1
290 index += 1
287 rules.append((ruleaction, rule))
291 rules.append((ruleaction, rule))
288
292
289 # Replacements
293 # Replacements
290 replacements = []
294 replacements = []
291 replacementlen = int(lines[index])
295 replacementlen = int(lines[index])
292 index += 1
296 index += 1
293 for i in xrange(replacementlen):
297 for i in xrange(replacementlen):
294 replacement = lines[index]
298 replacement = lines[index]
295 original = node.bin(replacement[:40])
299 original = node.bin(replacement[:40])
296 succ = [node.bin(replacement[i:i + 40]) for i in
300 succ = [node.bin(replacement[i:i + 40]) for i in
297 range(40, len(replacement), 40)]
301 range(40, len(replacement), 40)]
298 replacements.append((original, succ))
302 replacements.append((original, succ))
299 index += 1
303 index += 1
300
304
301 backupfile = lines[index]
305 backupfile = lines[index]
302 index += 1
306 index += 1
303
307
304 fp.close()
308 fp.close()
305
309
306 return parentctxnode, rules, keep, topmost, replacements, backupfile
310 return parentctxnode, rules, keep, topmost, replacements, backupfile
307
311
308 def clear(self):
312 def clear(self):
309 self.repo.vfs.unlink('histedit-state')
313 self.repo.vfs.unlink('histedit-state')
310
314
311 class histeditaction(object):
315 class histeditaction(object):
312 def __init__(self, state, node):
316 def __init__(self, state, node):
313 self.state = state
317 self.state = state
314 self.repo = state.repo
318 self.repo = state.repo
315 self.node = node
319 self.node = node
316
320
317 @classmethod
321 @classmethod
318 def fromrule(cls, state, rule):
322 def fromrule(cls, state, rule):
319 """Parses the given rule, returning an instance of the histeditaction.
323 """Parses the given rule, returning an instance of the histeditaction.
320 """
324 """
321 repo = state.repo
325 repo = state.repo
322 rulehash = rule.strip().split(' ', 1)[0]
326 rulehash = rule.strip().split(' ', 1)[0]
323 try:
327 try:
324 node = repo[rulehash].node()
328 node = repo[rulehash].node()
325 except error.RepoError:
329 except error.RepoError:
326 raise util.Abort(_('unknown changeset %s listed') % rulehash[:12])
330 raise util.Abort(_('unknown changeset %s listed') % rulehash[:12])
327 return cls(state, node)
331 return cls(state, node)
328
332
329 def run(self):
333 def run(self):
330 """Runs the action. The default behavior is simply apply the action's
334 """Runs the action. The default behavior is simply apply the action's
331 rulectx onto the current parentctx."""
335 rulectx onto the current parentctx."""
332 self.applychange()
336 self.applychange()
333 self.continuedirty()
337 self.continuedirty()
334 return self.continueclean()
338 return self.continueclean()
335
339
336 def applychange(self):
340 def applychange(self):
337 """Applies the changes from this action's rulectx onto the current
341 """Applies the changes from this action's rulectx onto the current
338 parentctx, but does not commit them."""
342 parentctx, but does not commit them."""
339 repo = self.repo
343 repo = self.repo
340 rulectx = repo[self.node]
344 rulectx = repo[self.node]
341 hg.update(repo, self.state.parentctxnode)
345 hg.update(repo, self.state.parentctxnode)
342 stats = applychanges(repo.ui, repo, rulectx, {})
346 stats = applychanges(repo.ui, repo, rulectx, {})
343 if stats and stats[3] > 0:
347 if stats and stats[3] > 0:
344 raise error.InterventionRequired(_('Fix up the change and run '
348 raise error.InterventionRequired(_('Fix up the change and run '
345 'hg histedit --continue'))
349 'hg histedit --continue'))
346
350
347 def continuedirty(self):
351 def continuedirty(self):
348 """Continues the action when changes have been applied to the working
352 """Continues the action when changes have been applied to the working
349 copy. The default behavior is to commit the dirty changes."""
353 copy. The default behavior is to commit the dirty changes."""
350 repo = self.repo
354 repo = self.repo
351 rulectx = repo[self.node]
355 rulectx = repo[self.node]
352
356
353 editor = self.commiteditor()
357 editor = self.commiteditor()
354 commit = commitfuncfor(repo, rulectx)
358 commit = commitfuncfor(repo, rulectx)
355
359
356 commit(text=rulectx.description(), user=rulectx.user(),
360 commit(text=rulectx.description(), user=rulectx.user(),
357 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
361 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
358
362
359 def commiteditor(self):
363 def commiteditor(self):
360 """The editor to be used to edit the commit message."""
364 """The editor to be used to edit the commit message."""
361 return False
365 return False
362
366
363 def continueclean(self):
367 def continueclean(self):
364 """Continues the action when the working copy is clean. The default
368 """Continues the action when the working copy is clean. The default
365 behavior is to accept the current commit as the new version of the
369 behavior is to accept the current commit as the new version of the
366 rulectx."""
370 rulectx."""
367 ctx = self.repo['.']
371 ctx = self.repo['.']
368 if ctx.node() == self.state.parentctxnode:
372 if ctx.node() == self.state.parentctxnode:
369 self.repo.ui.warn(_('%s: empty changeset\n') %
373 self.repo.ui.warn(_('%s: empty changeset\n') %
370 node.short(self.node))
374 node.short(self.node))
371 return ctx, [(self.node, tuple())]
375 return ctx, [(self.node, tuple())]
372 if ctx.node() == self.node:
376 if ctx.node() == self.node:
373 # Nothing changed
377 # Nothing changed
374 return ctx, []
378 return ctx, []
375 return ctx, [(self.node, (ctx.node(),))]
379 return ctx, [(self.node, (ctx.node(),))]
376
380
377 def commitfuncfor(repo, src):
381 def commitfuncfor(repo, src):
378 """Build a commit function for the replacement of <src>
382 """Build a commit function for the replacement of <src>
379
383
380 This function ensure we apply the same treatment to all changesets.
384 This function ensure we apply the same treatment to all changesets.
381
385
382 - Add a 'histedit_source' entry in extra.
386 - Add a 'histedit_source' entry in extra.
383
387
384 Note that fold have its own separated logic because its handling is a bit
388 Note that fold have its own separated logic because its handling is a bit
385 different and not easily factored out of the fold method.
389 different and not easily factored out of the fold method.
386 """
390 """
387 phasemin = src.phase()
391 phasemin = src.phase()
388 def commitfunc(**kwargs):
392 def commitfunc(**kwargs):
389 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
393 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
390 try:
394 try:
391 repo.ui.setconfig('phases', 'new-commit', phasemin,
395 repo.ui.setconfig('phases', 'new-commit', phasemin,
392 'histedit')
396 'histedit')
393 extra = kwargs.get('extra', {}).copy()
397 extra = kwargs.get('extra', {}).copy()
394 extra['histedit_source'] = src.hex()
398 extra['histedit_source'] = src.hex()
395 kwargs['extra'] = extra
399 kwargs['extra'] = extra
396 return repo.commit(**kwargs)
400 return repo.commit(**kwargs)
397 finally:
401 finally:
398 repo.ui.restoreconfig(phasebackup)
402 repo.ui.restoreconfig(phasebackup)
399 return commitfunc
403 return commitfunc
400
404
401 def applychanges(ui, repo, ctx, opts):
405 def applychanges(ui, repo, ctx, opts):
402 """Merge changeset from ctx (only) in the current working directory"""
406 """Merge changeset from ctx (only) in the current working directory"""
403 wcpar = repo.dirstate.parents()[0]
407 wcpar = repo.dirstate.parents()[0]
404 if ctx.p1().node() == wcpar:
408 if ctx.p1().node() == wcpar:
405 # edition ar "in place" we do not need to make any merge,
409 # edition ar "in place" we do not need to make any merge,
406 # just applies changes on parent for edition
410 # just applies changes on parent for edition
407 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
411 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
408 stats = None
412 stats = None
409 else:
413 else:
410 try:
414 try:
411 # ui.forcemerge is an internal variable, do not document
415 # ui.forcemerge is an internal variable, do not document
412 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
416 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
413 'histedit')
417 'histedit')
414 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
418 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
415 finally:
419 finally:
416 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
420 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
417 return stats
421 return stats
418
422
419 def collapse(repo, first, last, commitopts, skipprompt=False):
423 def collapse(repo, first, last, commitopts, skipprompt=False):
420 """collapse the set of revisions from first to last as new one.
424 """collapse the set of revisions from first to last as new one.
421
425
422 Expected commit options are:
426 Expected commit options are:
423 - message
427 - message
424 - date
428 - date
425 - username
429 - username
426 Commit message is edited in all cases.
430 Commit message is edited in all cases.
427
431
428 This function works in memory."""
432 This function works in memory."""
429 ctxs = list(repo.set('%d::%d', first, last))
433 ctxs = list(repo.set('%d::%d', first, last))
430 if not ctxs:
434 if not ctxs:
431 return None
435 return None
432 base = first.parents()[0]
436 base = first.parents()[0]
433
437
434 # commit a new version of the old changeset, including the update
438 # commit a new version of the old changeset, including the update
435 # collect all files which might be affected
439 # collect all files which might be affected
436 files = set()
440 files = set()
437 for ctx in ctxs:
441 for ctx in ctxs:
438 files.update(ctx.files())
442 files.update(ctx.files())
439
443
440 # Recompute copies (avoid recording a -> b -> a)
444 # Recompute copies (avoid recording a -> b -> a)
441 copied = copies.pathcopies(base, last)
445 copied = copies.pathcopies(base, last)
442
446
443 # prune files which were reverted by the updates
447 # prune files which were reverted by the updates
444 def samefile(f):
448 def samefile(f):
445 if f in last.manifest():
449 if f in last.manifest():
446 a = last.filectx(f)
450 a = last.filectx(f)
447 if f in base.manifest():
451 if f in base.manifest():
448 b = base.filectx(f)
452 b = base.filectx(f)
449 return (a.data() == b.data()
453 return (a.data() == b.data()
450 and a.flags() == b.flags())
454 and a.flags() == b.flags())
451 else:
455 else:
452 return False
456 return False
453 else:
457 else:
454 return f not in base.manifest()
458 return f not in base.manifest()
455 files = [f for f in files if not samefile(f)]
459 files = [f for f in files if not samefile(f)]
456 # commit version of these files as defined by head
460 # commit version of these files as defined by head
457 headmf = last.manifest()
461 headmf = last.manifest()
458 def filectxfn(repo, ctx, path):
462 def filectxfn(repo, ctx, path):
459 if path in headmf:
463 if path in headmf:
460 fctx = last[path]
464 fctx = last[path]
461 flags = fctx.flags()
465 flags = fctx.flags()
462 mctx = context.memfilectx(repo,
466 mctx = context.memfilectx(repo,
463 fctx.path(), fctx.data(),
467 fctx.path(), fctx.data(),
464 islink='l' in flags,
468 islink='l' in flags,
465 isexec='x' in flags,
469 isexec='x' in flags,
466 copied=copied.get(path))
470 copied=copied.get(path))
467 return mctx
471 return mctx
468 return None
472 return None
469
473
470 if commitopts.get('message'):
474 if commitopts.get('message'):
471 message = commitopts['message']
475 message = commitopts['message']
472 else:
476 else:
473 message = first.description()
477 message = first.description()
474 user = commitopts.get('user')
478 user = commitopts.get('user')
475 date = commitopts.get('date')
479 date = commitopts.get('date')
476 extra = commitopts.get('extra')
480 extra = commitopts.get('extra')
477
481
478 parents = (first.p1().node(), first.p2().node())
482 parents = (first.p1().node(), first.p2().node())
479 editor = None
483 editor = None
480 if not skipprompt:
484 if not skipprompt:
481 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
485 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
482 new = context.memctx(repo,
486 new = context.memctx(repo,
483 parents=parents,
487 parents=parents,
484 text=message,
488 text=message,
485 files=files,
489 files=files,
486 filectxfn=filectxfn,
490 filectxfn=filectxfn,
487 user=user,
491 user=user,
488 date=date,
492 date=date,
489 extra=extra,
493 extra=extra,
490 editor=editor)
494 editor=editor)
491 return repo.commitctx(new)
495 return repo.commitctx(new)
492
496
493 class pick(histeditaction):
497 class pick(histeditaction):
494 def run(self):
498 def run(self):
495 rulectx = self.repo[self.node]
499 rulectx = self.repo[self.node]
496 if rulectx.parents()[0].node() == self.state.parentctxnode:
500 if rulectx.parents()[0].node() == self.state.parentctxnode:
497 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
501 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
498 return rulectx, []
502 return rulectx, []
499
503
500 return super(pick, self).run()
504 return super(pick, self).run()
501
505
502 class edit(histeditaction):
506 class edit(histeditaction):
503 def run(self):
507 def run(self):
504 repo = self.repo
508 repo = self.repo
505 rulectx = repo[self.node]
509 rulectx = repo[self.node]
506 hg.update(repo, self.state.parentctxnode)
510 hg.update(repo, self.state.parentctxnode)
507 applychanges(repo.ui, repo, rulectx, {})
511 applychanges(repo.ui, repo, rulectx, {})
508 raise error.InterventionRequired(
512 raise error.InterventionRequired(
509 _('Make changes as needed, you may commit or record as needed '
513 _('Make changes as needed, you may commit or record as needed '
510 'now.\nWhen you are finished, run hg histedit --continue to '
514 'now.\nWhen you are finished, run hg histedit --continue to '
511 'resume.'))
515 'resume.'))
512
516
513 def commiteditor(self):
517 def commiteditor(self):
514 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
518 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
515
519
516 class fold(histeditaction):
520 class fold(histeditaction):
517 def continuedirty(self):
521 def continuedirty(self):
518 repo = self.repo
522 repo = self.repo
519 rulectx = repo[self.node]
523 rulectx = repo[self.node]
520
524
521 commit = commitfuncfor(repo, rulectx)
525 commit = commitfuncfor(repo, rulectx)
522 commit(text='fold-temp-revision %s' % node.short(self.node),
526 commit(text='fold-temp-revision %s' % node.short(self.node),
523 user=rulectx.user(), date=rulectx.date(),
527 user=rulectx.user(), date=rulectx.date(),
524 extra=rulectx.extra())
528 extra=rulectx.extra())
525
529
526 def continueclean(self):
530 def continueclean(self):
527 repo = self.repo
531 repo = self.repo
528 ctx = repo['.']
532 ctx = repo['.']
529 rulectx = repo[self.node]
533 rulectx = repo[self.node]
530 parentctxnode = self.state.parentctxnode
534 parentctxnode = self.state.parentctxnode
531 if ctx.node() == parentctxnode:
535 if ctx.node() == parentctxnode:
532 repo.ui.warn(_('%s: empty changeset\n') %
536 repo.ui.warn(_('%s: empty changeset\n') %
533 node.short(self.node))
537 node.short(self.node))
534 return ctx, [(self.node, (parentctxnode,))]
538 return ctx, [(self.node, (parentctxnode,))]
535
539
536 parentctx = repo[parentctxnode]
540 parentctx = repo[parentctxnode]
537 newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
541 newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
538 parentctx))
542 parentctx))
539 if not newcommits:
543 if not newcommits:
540 repo.ui.warn(_('%s: cannot fold - working copy is not a '
544 repo.ui.warn(_('%s: cannot fold - working copy is not a '
541 'descendant of previous commit %s\n') %
545 'descendant of previous commit %s\n') %
542 (node.short(self.node), node.short(parentctxnode)))
546 (node.short(self.node), node.short(parentctxnode)))
543 return ctx, [(self.node, (ctx.node(),))]
547 return ctx, [(self.node, (ctx.node(),))]
544
548
545 middlecommits = newcommits.copy()
549 middlecommits = newcommits.copy()
546 middlecommits.discard(ctx.node())
550 middlecommits.discard(ctx.node())
547
551
548 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
552 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
549 middlecommits)
553 middlecommits)
550
554
551 def skipprompt(self):
555 def skipprompt(self):
552 return False
556 return False
553
557
554 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
558 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
555 parent = ctx.parents()[0].node()
559 parent = ctx.parents()[0].node()
556 hg.update(repo, parent)
560 hg.update(repo, parent)
557 ### prepare new commit data
561 ### prepare new commit data
558 commitopts = {}
562 commitopts = {}
559 commitopts['user'] = ctx.user()
563 commitopts['user'] = ctx.user()
560 # commit message
564 # commit message
561 if self.skipprompt():
565 if self.skipprompt():
562 newmessage = ctx.description()
566 newmessage = ctx.description()
563 else:
567 else:
564 newmessage = '\n***\n'.join(
568 newmessage = '\n***\n'.join(
565 [ctx.description()] +
569 [ctx.description()] +
566 [repo[r].description() for r in internalchanges] +
570 [repo[r].description() for r in internalchanges] +
567 [oldctx.description()]) + '\n'
571 [oldctx.description()]) + '\n'
568 commitopts['message'] = newmessage
572 commitopts['message'] = newmessage
569 # date
573 # date
570 commitopts['date'] = max(ctx.date(), oldctx.date())
574 commitopts['date'] = max(ctx.date(), oldctx.date())
571 extra = ctx.extra().copy()
575 extra = ctx.extra().copy()
572 # histedit_source
576 # histedit_source
573 # note: ctx is likely a temporary commit but that the best we can do
577 # note: ctx is likely a temporary commit but that the best we can do
574 # here. This is sufficient to solve issue3681 anyway.
578 # here. This is sufficient to solve issue3681 anyway.
575 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
579 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
576 commitopts['extra'] = extra
580 commitopts['extra'] = extra
577 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
581 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
578 try:
582 try:
579 phasemin = max(ctx.phase(), oldctx.phase())
583 phasemin = max(ctx.phase(), oldctx.phase())
580 repo.ui.setconfig('phases', 'new-commit', phasemin, 'histedit')
584 repo.ui.setconfig('phases', 'new-commit', phasemin, 'histedit')
581 n = collapse(repo, ctx, repo[newnode], commitopts,
585 n = collapse(repo, ctx, repo[newnode], commitopts,
582 skipprompt=self.skipprompt())
586 skipprompt=self.skipprompt())
583 finally:
587 finally:
584 repo.ui.restoreconfig(phasebackup)
588 repo.ui.restoreconfig(phasebackup)
585 if n is None:
589 if n is None:
586 return ctx, []
590 return ctx, []
587 hg.update(repo, n)
591 hg.update(repo, n)
588 replacements = [(oldctx.node(), (newnode,)),
592 replacements = [(oldctx.node(), (newnode,)),
589 (ctx.node(), (n,)),
593 (ctx.node(), (n,)),
590 (newnode, (n,)),
594 (newnode, (n,)),
591 ]
595 ]
592 for ich in internalchanges:
596 for ich in internalchanges:
593 replacements.append((ich, (n,)))
597 replacements.append((ich, (n,)))
594 return repo[n], replacements
598 return repo[n], replacements
595
599
596 class rollup(fold):
600 class rollup(fold):
597 def skipprompt(self):
601 def skipprompt(self):
598 return True
602 return True
599
603
600 class drop(histeditaction):
604 class drop(histeditaction):
601 def run(self):
605 def run(self):
602 parentctx = self.repo[self.state.parentctxnode]
606 parentctx = self.repo[self.state.parentctxnode]
603 return parentctx, [(self.node, tuple())]
607 return parentctx, [(self.node, tuple())]
604
608
605 class message(histeditaction):
609 class message(histeditaction):
606 def commiteditor(self):
610 def commiteditor(self):
607 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
611 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
608
612
609 def findoutgoing(ui, repo, remote=None, force=False, opts={}):
613 def findoutgoing(ui, repo, remote=None, force=False, opts={}):
610 """utility function to find the first outgoing changeset
614 """utility function to find the first outgoing changeset
611
615
612 Used by initialisation code"""
616 Used by initialisation code"""
613 dest = ui.expandpath(remote or 'default-push', remote or 'default')
617 dest = ui.expandpath(remote or 'default-push', remote or 'default')
614 dest, revs = hg.parseurl(dest, None)[:2]
618 dest, revs = hg.parseurl(dest, None)[:2]
615 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
619 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
616
620
617 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
621 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
618 other = hg.peer(repo, opts, dest)
622 other = hg.peer(repo, opts, dest)
619
623
620 if revs:
624 if revs:
621 revs = [repo.lookup(rev) for rev in revs]
625 revs = [repo.lookup(rev) for rev in revs]
622
626
623 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
627 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
624 if not outgoing.missing:
628 if not outgoing.missing:
625 raise util.Abort(_('no outgoing ancestors'))
629 raise util.Abort(_('no outgoing ancestors'))
626 roots = list(repo.revs("roots(%ln)", outgoing.missing))
630 roots = list(repo.revs("roots(%ln)", outgoing.missing))
627 if 1 < len(roots):
631 if 1 < len(roots):
628 msg = _('there are ambiguous outgoing revisions')
632 msg = _('there are ambiguous outgoing revisions')
629 hint = _('see "hg help histedit" for more detail')
633 hint = _('see "hg help histedit" for more detail')
630 raise util.Abort(msg, hint=hint)
634 raise util.Abort(msg, hint=hint)
631 return repo.lookup(roots[0])
635 return repo.lookup(roots[0])
632
636
633 actiontable = {'p': pick,
637 actiontable = {'p': pick,
634 'pick': pick,
638 'pick': pick,
635 'e': edit,
639 'e': edit,
636 'edit': edit,
640 'edit': edit,
637 'f': fold,
641 'f': fold,
638 'fold': fold,
642 'fold': fold,
639 'r': rollup,
643 'r': rollup,
640 'roll': rollup,
644 'roll': rollup,
641 'd': drop,
645 'd': drop,
642 'drop': drop,
646 'drop': drop,
643 'm': message,
647 'm': message,
644 'mess': message,
648 'mess': message,
645 }
649 }
646
650
647 @command('histedit',
651 @command('histedit',
648 [('', 'commands', '',
652 [('', 'commands', '',
649 _('read history edits from the specified file'), _('FILE')),
653 _('read history edits from the specified file'), _('FILE')),
650 ('c', 'continue', False, _('continue an edit already in progress')),
654 ('c', 'continue', False, _('continue an edit already in progress')),
651 ('', 'edit-plan', False, _('edit remaining actions list')),
655 ('', 'edit-plan', False, _('edit remaining actions list')),
652 ('k', 'keep', False,
656 ('k', 'keep', False,
653 _("don't strip old nodes after edit is complete")),
657 _("don't strip old nodes after edit is complete")),
654 ('', 'abort', False, _('abort an edit in progress')),
658 ('', 'abort', False, _('abort an edit in progress')),
655 ('o', 'outgoing', False, _('changesets not found in destination')),
659 ('o', 'outgoing', False, _('changesets not found in destination')),
656 ('f', 'force', False,
660 ('f', 'force', False,
657 _('force outgoing even for unrelated repositories')),
661 _('force outgoing even for unrelated repositories')),
658 ('r', 'rev', [], _('first revision to be edited'), _('REV'))],
662 ('r', 'rev', [], _('first revision to be edited'), _('REV'))],
659 _("ANCESTOR | --outgoing [URL]"))
663 _("ANCESTOR | --outgoing [URL]"))
660 def histedit(ui, repo, *freeargs, **opts):
664 def histedit(ui, repo, *freeargs, **opts):
661 """interactively edit changeset history
665 """interactively edit changeset history
662
666
663 This command edits changesets between ANCESTOR and the parent of
667 This command edits changesets between ANCESTOR and the parent of
664 the working directory.
668 the working directory.
665
669
666 With --outgoing, this edits changesets not found in the
670 With --outgoing, this edits changesets not found in the
667 destination repository. If URL of the destination is omitted, the
671 destination repository. If URL of the destination is omitted, the
668 'default-push' (or 'default') path will be used.
672 'default-push' (or 'default') path will be used.
669
673
670 For safety, this command is aborted, also if there are ambiguous
674 For safety, this command is aborted, also if there are ambiguous
671 outgoing revisions which may confuse users: for example, there are
675 outgoing revisions which may confuse users: for example, there are
672 multiple branches containing outgoing revisions.
676 multiple branches containing outgoing revisions.
673
677
674 Use "min(outgoing() and ::.)" or similar revset specification
678 Use "min(outgoing() and ::.)" or similar revset specification
675 instead of --outgoing to specify edit target revision exactly in
679 instead of --outgoing to specify edit target revision exactly in
676 such ambiguous situation. See :hg:`help revsets` for detail about
680 such ambiguous situation. See :hg:`help revsets` for detail about
677 selecting revisions.
681 selecting revisions.
678
682
679 Returns 0 on success, 1 if user intervention is required (not only
683 Returns 0 on success, 1 if user intervention is required (not only
680 for intentional "edit" command, but also for resolving unexpected
684 for intentional "edit" command, but also for resolving unexpected
681 conflicts).
685 conflicts).
682 """
686 """
683 state = histeditstate(repo)
687 state = histeditstate(repo)
684 try:
688 try:
685 state.wlock = repo.wlock()
689 state.wlock = repo.wlock()
686 state.lock = repo.lock()
690 state.lock = repo.lock()
687 _histedit(ui, repo, state, *freeargs, **opts)
691 _histedit(ui, repo, state, *freeargs, **opts)
688 finally:
692 finally:
689 release(state.lock, state.wlock)
693 release(state.lock, state.wlock)
690
694
691 def _histedit(ui, repo, state, *freeargs, **opts):
695 def _histedit(ui, repo, state, *freeargs, **opts):
692 # TODO only abort if we try and histedit mq patches, not just
696 # TODO only abort if we try and histedit mq patches, not just
693 # blanket if mq patches are applied somewhere
697 # blanket if mq patches are applied somewhere
694 mq = getattr(repo, 'mq', None)
698 mq = getattr(repo, 'mq', None)
695 if mq and mq.applied:
699 if mq and mq.applied:
696 raise util.Abort(_('source has mq patches applied'))
700 raise util.Abort(_('source has mq patches applied'))
697
701
698 # basic argument incompatibility processing
702 # basic argument incompatibility processing
699 outg = opts.get('outgoing')
703 outg = opts.get('outgoing')
700 cont = opts.get('continue')
704 cont = opts.get('continue')
701 editplan = opts.get('edit_plan')
705 editplan = opts.get('edit_plan')
702 abort = opts.get('abort')
706 abort = opts.get('abort')
703 force = opts.get('force')
707 force = opts.get('force')
704 rules = opts.get('commands', '')
708 rules = opts.get('commands', '')
705 revs = opts.get('rev', [])
709 revs = opts.get('rev', [])
706 goal = 'new' # This invocation goal, in new, continue, abort
710 goal = 'new' # This invocation goal, in new, continue, abort
707 if force and not outg:
711 if force and not outg:
708 raise util.Abort(_('--force only allowed with --outgoing'))
712 raise util.Abort(_('--force only allowed with --outgoing'))
709 if cont:
713 if cont:
710 if any((outg, abort, revs, freeargs, rules, editplan)):
714 if any((outg, abort, revs, freeargs, rules, editplan)):
711 raise util.Abort(_('no arguments allowed with --continue'))
715 raise util.Abort(_('no arguments allowed with --continue'))
712 goal = 'continue'
716 goal = 'continue'
713 elif abort:
717 elif abort:
714 if any((outg, revs, freeargs, rules, editplan)):
718 if any((outg, revs, freeargs, rules, editplan)):
715 raise util.Abort(_('no arguments allowed with --abort'))
719 raise util.Abort(_('no arguments allowed with --abort'))
716 goal = 'abort'
720 goal = 'abort'
717 elif editplan:
721 elif editplan:
718 if any((outg, revs, freeargs)):
722 if any((outg, revs, freeargs)):
719 raise util.Abort(_('only --commands argument allowed with '
723 raise util.Abort(_('only --commands argument allowed with '
720 '--edit-plan'))
724 '--edit-plan'))
721 goal = 'edit-plan'
725 goal = 'edit-plan'
722 else:
726 else:
723 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
727 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
724 raise util.Abort(_('history edit already in progress, try '
728 raise util.Abort(_('history edit already in progress, try '
725 '--continue or --abort'))
729 '--continue or --abort'))
726 if outg:
730 if outg:
727 if revs:
731 if revs:
728 raise util.Abort(_('no revisions allowed with --outgoing'))
732 raise util.Abort(_('no revisions allowed with --outgoing'))
729 if len(freeargs) > 1:
733 if len(freeargs) > 1:
730 raise util.Abort(
734 raise util.Abort(
731 _('only one repo argument allowed with --outgoing'))
735 _('only one repo argument allowed with --outgoing'))
732 else:
736 else:
733 revs.extend(freeargs)
737 revs.extend(freeargs)
734 if len(revs) == 0:
738 if len(revs) == 0:
735 histeditdefault = ui.config('histedit', 'defaultrev')
739 histeditdefault = ui.config('histedit', 'defaultrev')
736 if histeditdefault:
740 if histeditdefault:
737 revs.append(histeditdefault)
741 revs.append(histeditdefault)
738 if len(revs) != 1:
742 if len(revs) != 1:
739 raise util.Abort(
743 raise util.Abort(
740 _('histedit requires exactly one ancestor revision'))
744 _('histedit requires exactly one ancestor revision'))
741
745
742
746
743 replacements = []
747 replacements = []
744 keep = opts.get('keep', False)
748 keep = opts.get('keep', False)
745
749
746 # rebuild state
750 # rebuild state
747 if goal == 'continue':
751 if goal == 'continue':
748 state.read()
752 state.read()
749 state = bootstrapcontinue(ui, state, opts)
753 state = bootstrapcontinue(ui, state, opts)
750 elif goal == 'edit-plan':
754 elif goal == 'edit-plan':
751 state.read()
755 state.read()
752 if not rules:
756 if not rules:
753 comment = editcomment % (node.short(state.parentctxnode),
757 comment = editcomment % (node.short(state.parentctxnode),
754 node.short(state.topmost))
758 node.short(state.topmost))
755 rules = ruleeditor(repo, ui, state.rules, comment)
759 rules = ruleeditor(repo, ui, state.rules, comment)
756 else:
760 else:
757 if rules == '-':
761 if rules == '-':
758 f = sys.stdin
762 f = sys.stdin
759 else:
763 else:
760 f = open(rules)
764 f = open(rules)
761 rules = f.read()
765 rules = f.read()
762 f.close()
766 f.close()
763 rules = [l for l in (r.strip() for r in rules.splitlines())
767 rules = [l for l in (r.strip() for r in rules.splitlines())
764 if l and not l.startswith('#')]
768 if l and not l.startswith('#')]
765 rules = verifyrules(rules, repo, [repo[c] for [_a, c] in state.rules])
769 rules = verifyrules(rules, repo, [repo[c] for [_a, c] in state.rules])
766 state.rules = rules
770 state.rules = rules
767 state.write()
771 state.write()
768 return
772 return
769 elif goal == 'abort':
773 elif goal == 'abort':
770 state.read()
774 state.read()
771 mapping, tmpnodes, leafs, _ntm = processreplacement(state)
775 mapping, tmpnodes, leafs, _ntm = processreplacement(state)
772 ui.debug('restore wc to old parent %s\n' % node.short(state.topmost))
776 ui.debug('restore wc to old parent %s\n' % node.short(state.topmost))
773
777
774 # Recover our old commits if necessary
778 # Recover our old commits if necessary
775 if not state.topmost in repo and state.backupfile:
779 if not state.topmost in repo and state.backupfile:
776 backupfile = repo.join(state.backupfile)
780 backupfile = repo.join(state.backupfile)
777 f = hg.openpath(ui, backupfile)
781 f = hg.openpath(ui, backupfile)
778 gen = exchange.readbundle(ui, f, backupfile)
782 gen = exchange.readbundle(ui, f, backupfile)
779 changegroup.addchangegroup(repo, gen, 'histedit',
783 changegroup.addchangegroup(repo, gen, 'histedit',
780 'bundle:' + backupfile)
784 'bundle:' + backupfile)
781 os.remove(backupfile)
785 os.remove(backupfile)
782
786
783 # check whether we should update away
787 # check whether we should update away
784 parentnodes = [c.node() for c in repo[None].parents()]
788 parentnodes = [c.node() for c in repo[None].parents()]
785 for n in leafs | set([state.parentctxnode]):
789 for n in leafs | set([state.parentctxnode]):
786 if n in parentnodes:
790 if n in parentnodes:
787 hg.clean(repo, state.topmost)
791 hg.clean(repo, state.topmost)
788 break
792 break
789 else:
793 else:
790 pass
794 pass
791 cleanupnode(ui, repo, 'created', tmpnodes)
795 cleanupnode(ui, repo, 'created', tmpnodes)
792 cleanupnode(ui, repo, 'temp', leafs)
796 cleanupnode(ui, repo, 'temp', leafs)
793 state.clear()
797 state.clear()
794 return
798 return
795 else:
799 else:
796 cmdutil.checkunfinished(repo)
800 cmdutil.checkunfinished(repo)
797 cmdutil.bailifchanged(repo)
801 cmdutil.bailifchanged(repo)
798
802
799 topmost, empty = repo.dirstate.parents()
803 topmost, empty = repo.dirstate.parents()
800 if outg:
804 if outg:
801 if freeargs:
805 if freeargs:
802 remote = freeargs[0]
806 remote = freeargs[0]
803 else:
807 else:
804 remote = None
808 remote = None
805 root = findoutgoing(ui, repo, remote, force, opts)
809 root = findoutgoing(ui, repo, remote, force, opts)
806 else:
810 else:
807 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
811 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
808 if len(rr) != 1:
812 if len(rr) != 1:
809 raise util.Abort(_('The specified revisions must have '
813 raise util.Abort(_('The specified revisions must have '
810 'exactly one common root'))
814 'exactly one common root'))
811 root = rr[0].node()
815 root = rr[0].node()
812
816
813 revs = between(repo, root, topmost, keep)
817 revs = between(repo, root, topmost, keep)
814 if not revs:
818 if not revs:
815 raise util.Abort(_('%s is not an ancestor of working directory') %
819 raise util.Abort(_('%s is not an ancestor of working directory') %
816 node.short(root))
820 node.short(root))
817
821
818 ctxs = [repo[r] for r in revs]
822 ctxs = [repo[r] for r in revs]
819 if not rules:
823 if not rules:
820 comment = editcomment % (node.short(root), node.short(topmost))
824 comment = editcomment % (node.short(root), node.short(topmost))
821 rules = ruleeditor(repo, ui, [['pick', c] for c in ctxs], comment)
825 rules = ruleeditor(repo, ui, [['pick', c] for c in ctxs], comment)
822 else:
826 else:
823 if rules == '-':
827 if rules == '-':
824 f = sys.stdin
828 f = sys.stdin
825 else:
829 else:
826 f = open(rules)
830 f = open(rules)
827 rules = f.read()
831 rules = f.read()
828 f.close()
832 f.close()
829 rules = [l for l in (r.strip() for r in rules.splitlines())
833 rules = [l for l in (r.strip() for r in rules.splitlines())
830 if l and not l.startswith('#')]
834 if l and not l.startswith('#')]
831 rules = verifyrules(rules, repo, ctxs)
835 rules = verifyrules(rules, repo, ctxs)
832
836
833 parentctxnode = repo[root].parents()[0].node()
837 parentctxnode = repo[root].parents()[0].node()
834
838
835 state.parentctxnode = parentctxnode
839 state.parentctxnode = parentctxnode
836 state.rules = rules
840 state.rules = rules
837 state.keep = keep
841 state.keep = keep
838 state.topmost = topmost
842 state.topmost = topmost
839 state.replacements = replacements
843 state.replacements = replacements
840
844
841 # Create a backup so we can always abort completely.
845 # Create a backup so we can always abort completely.
842 backupfile = None
846 backupfile = None
843 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
847 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
844 backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
848 backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
845 'histedit')
849 'histedit')
846 state.backupfile = backupfile
850 state.backupfile = backupfile
847
851
848 while state.rules:
852 while state.rules:
849 state.write()
853 state.write()
850 action, ha = state.rules.pop(0)
854 action, ha = state.rules.pop(0)
851 ui.debug('histedit: processing %s %s\n' % (action, ha[:12]))
855 ui.debug('histedit: processing %s %s\n' % (action, ha[:12]))
852 actobj = actiontable[action].fromrule(state, ha)
856 actobj = actiontable[action].fromrule(state, ha)
853 parentctx, replacement_ = actobj.run()
857 parentctx, replacement_ = actobj.run()
854 state.parentctxnode = parentctx.node()
858 state.parentctxnode = parentctx.node()
855 state.replacements.extend(replacement_)
859 state.replacements.extend(replacement_)
856 state.write()
860 state.write()
857
861
858 hg.update(repo, state.parentctxnode)
862 hg.update(repo, state.parentctxnode)
859
863
860 mapping, tmpnodes, created, ntm = processreplacement(state)
864 mapping, tmpnodes, created, ntm = processreplacement(state)
861 if mapping:
865 if mapping:
862 for prec, succs in mapping.iteritems():
866 for prec, succs in mapping.iteritems():
863 if not succs:
867 if not succs:
864 ui.debug('histedit: %s is dropped\n' % node.short(prec))
868 ui.debug('histedit: %s is dropped\n' % node.short(prec))
865 else:
869 else:
866 ui.debug('histedit: %s is replaced by %s\n' % (
870 ui.debug('histedit: %s is replaced by %s\n' % (
867 node.short(prec), node.short(succs[0])))
871 node.short(prec), node.short(succs[0])))
868 if len(succs) > 1:
872 if len(succs) > 1:
869 m = 'histedit: %s'
873 m = 'histedit: %s'
870 for n in succs[1:]:
874 for n in succs[1:]:
871 ui.debug(m % node.short(n))
875 ui.debug(m % node.short(n))
872
876
873 if not keep:
877 if not keep:
874 if mapping:
878 if mapping:
875 movebookmarks(ui, repo, mapping, state.topmost, ntm)
879 movebookmarks(ui, repo, mapping, state.topmost, ntm)
876 # TODO update mq state
880 # TODO update mq state
877 if obsolete.isenabled(repo, obsolete.createmarkersopt):
881 if obsolete.isenabled(repo, obsolete.createmarkersopt):
878 markers = []
882 markers = []
879 # sort by revision number because it sound "right"
883 # sort by revision number because it sound "right"
880 for prec in sorted(mapping, key=repo.changelog.rev):
884 for prec in sorted(mapping, key=repo.changelog.rev):
881 succs = mapping[prec]
885 succs = mapping[prec]
882 markers.append((repo[prec],
886 markers.append((repo[prec],
883 tuple(repo[s] for s in succs)))
887 tuple(repo[s] for s in succs)))
884 if markers:
888 if markers:
885 obsolete.createmarkers(repo, markers)
889 obsolete.createmarkers(repo, markers)
886 else:
890 else:
887 cleanupnode(ui, repo, 'replaced', mapping)
891 cleanupnode(ui, repo, 'replaced', mapping)
888
892
889 cleanupnode(ui, repo, 'temp', tmpnodes)
893 cleanupnode(ui, repo, 'temp', tmpnodes)
890 state.clear()
894 state.clear()
891 if os.path.exists(repo.sjoin('undo')):
895 if os.path.exists(repo.sjoin('undo')):
892 os.unlink(repo.sjoin('undo'))
896 os.unlink(repo.sjoin('undo'))
893
897
894 def bootstrapcontinue(ui, state, opts):
898 def bootstrapcontinue(ui, state, opts):
895 repo = state.repo
899 repo = state.repo
896 if state.rules:
900 if state.rules:
897 action, currentnode = state.rules.pop(0)
901 action, currentnode = state.rules.pop(0)
898
902
899 actobj = actiontable[action].fromrule(state, currentnode)
903 actobj = actiontable[action].fromrule(state, currentnode)
900
904
901 s = repo.status()
905 s = repo.status()
902 if s.modified or s.added or s.removed or s.deleted:
906 if s.modified or s.added or s.removed or s.deleted:
903 actobj.continuedirty()
907 actobj.continuedirty()
904 s = repo.status()
908 s = repo.status()
905 if s.modified or s.added or s.removed or s.deleted:
909 if s.modified or s.added or s.removed or s.deleted:
906 raise util.Abort(_("working copy still dirty"))
910 raise util.Abort(_("working copy still dirty"))
907
911
908 parentctx, replacements = actobj.continueclean()
912 parentctx, replacements = actobj.continueclean()
909
913
910 state.parentctxnode = parentctx.node()
914 state.parentctxnode = parentctx.node()
911 state.replacements.extend(replacements)
915 state.replacements.extend(replacements)
912
916
913 return state
917 return state
914
918
915 def between(repo, old, new, keep):
919 def between(repo, old, new, keep):
916 """select and validate the set of revision to edit
920 """select and validate the set of revision to edit
917
921
918 When keep is false, the specified set can't have children."""
922 When keep is false, the specified set can't have children."""
919 ctxs = list(repo.set('%n::%n', old, new))
923 ctxs = list(repo.set('%n::%n', old, new))
920 if ctxs and not keep:
924 if ctxs and not keep:
921 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
925 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
922 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
926 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
923 raise util.Abort(_('cannot edit history that would orphan nodes'))
927 raise util.Abort(_('cannot edit history that would orphan nodes'))
924 if repo.revs('(%ld) and merge()', ctxs):
928 if repo.revs('(%ld) and merge()', ctxs):
925 raise util.Abort(_('cannot edit history that contains merges'))
929 raise util.Abort(_('cannot edit history that contains merges'))
926 root = ctxs[0] # list is already sorted by repo.set
930 root = ctxs[0] # list is already sorted by repo.set
927 if not root.mutable():
931 if not root.mutable():
928 raise util.Abort(_('cannot edit immutable changeset: %s') % root)
932 raise util.Abort(_('cannot edit immutable changeset: %s') % root)
929 return [c.node() for c in ctxs]
933 return [c.node() for c in ctxs]
930
934
931 def makedesc(repo, action, rev):
935 def makedesc(repo, action, rev):
932 """build a initial action line for a ctx
936 """build a initial action line for a ctx
933
937
934 line are in the form:
938 line are in the form:
935
939
936 <action> <hash> <rev> <summary>
940 <action> <hash> <rev> <summary>
937 """
941 """
938 ctx = repo[rev]
942 ctx = repo[rev]
939 summary = ''
943 summary = ''
940 if ctx.description():
944 if ctx.description():
941 summary = ctx.description().splitlines()[0]
945 summary = ctx.description().splitlines()[0]
942 line = '%s %s %d %s' % (action, ctx, ctx.rev(), summary)
946 line = '%s %s %d %s' % (action, ctx, ctx.rev(), summary)
943 # trim to 80 columns so it's not stupidly wide in my editor
947 # trim to 80 columns so it's not stupidly wide in my editor
944 maxlen = repo.ui.configint('histedit', 'linelen', default=80)
948 maxlen = repo.ui.configint('histedit', 'linelen', default=80)
945 maxlen = max(maxlen, 22) # avoid truncating hash
949 maxlen = max(maxlen, 22) # avoid truncating hash
946 return util.ellipsis(line, maxlen)
950 return util.ellipsis(line, maxlen)
947
951
948 def ruleeditor(repo, ui, rules, editcomment=""):
952 def ruleeditor(repo, ui, rules, editcomment=""):
949 """open an editor to edit rules
953 """open an editor to edit rules
950
954
951 rules are in the format [ [act, ctx], ...] like in state.rules
955 rules are in the format [ [act, ctx], ...] like in state.rules
952 """
956 """
953 rules = '\n'.join([makedesc(repo, act, rev) for [act, rev] in rules])
957 rules = '\n'.join([makedesc(repo, act, rev) for [act, rev] in rules])
954 rules += '\n\n'
958 rules += '\n\n'
955 rules += editcomment
959 rules += editcomment
956 rules = ui.edit(rules, ui.username())
960 rules = ui.edit(rules, ui.username())
957
961
958 # Save edit rules in .hg/histedit-last-edit.txt in case
962 # Save edit rules in .hg/histedit-last-edit.txt in case
959 # the user needs to ask for help after something
963 # the user needs to ask for help after something
960 # surprising happens.
964 # surprising happens.
961 f = open(repo.join('histedit-last-edit.txt'), 'w')
965 f = open(repo.join('histedit-last-edit.txt'), 'w')
962 f.write(rules)
966 f.write(rules)
963 f.close()
967 f.close()
964
968
965 return rules
969 return rules
966
970
967 def verifyrules(rules, repo, ctxs):
971 def verifyrules(rules, repo, ctxs):
968 """Verify that there exists exactly one edit rule per given changeset.
972 """Verify that there exists exactly one edit rule per given changeset.
969
973
970 Will abort if there are to many or too few rules, a malformed rule,
974 Will abort if there are to many or too few rules, a malformed rule,
971 or a rule on a changeset outside of the user-given range.
975 or a rule on a changeset outside of the user-given range.
972 """
976 """
973 parsed = []
977 parsed = []
974 expected = set(c.hex() for c in ctxs)
978 expected = set(c.hex() for c in ctxs)
975 seen = set()
979 seen = set()
976 for r in rules:
980 for r in rules:
977 if ' ' not in r:
981 if ' ' not in r:
978 raise util.Abort(_('malformed line "%s"') % r)
982 raise util.Abort(_('malformed line "%s"') % r)
979 action, rest = r.split(' ', 1)
983 action, rest = r.split(' ', 1)
980 ha = rest.strip().split(' ', 1)[0]
984 ha = rest.strip().split(' ', 1)[0]
981 try:
985 try:
982 ha = repo[ha].hex()
986 ha = repo[ha].hex()
983 except error.RepoError:
987 except error.RepoError:
984 raise util.Abort(_('unknown changeset %s listed') % ha[:12])
988 raise util.Abort(_('unknown changeset %s listed') % ha[:12])
985 if ha not in expected:
989 if ha not in expected:
986 raise util.Abort(
990 raise util.Abort(
987 _('may not use changesets other than the ones listed'))
991 _('may not use changesets other than the ones listed'))
988 if ha in seen:
992 if ha in seen:
989 raise util.Abort(_('duplicated command for changeset %s') %
993 raise util.Abort(_('duplicated command for changeset %s') %
990 ha[:12])
994 ha[:12])
991 seen.add(ha)
995 seen.add(ha)
992 if action not in actiontable:
996 if action not in actiontable:
993 raise util.Abort(_('unknown action "%s"') % action)
997 raise util.Abort(_('unknown action "%s"') % action)
994 parsed.append([action, ha])
998 parsed.append([action, ha])
995 missing = sorted(expected - seen) # sort to stabilize output
999 missing = sorted(expected - seen) # sort to stabilize output
996 if missing:
1000 if missing:
997 raise util.Abort(_('missing rules for changeset %s') %
1001 raise util.Abort(_('missing rules for changeset %s') %
998 missing[0][:12],
1002 missing[0][:12],
999 hint=_('do you want to use the drop action?'))
1003 hint=_('do you want to use the drop action?'))
1000 return parsed
1004 return parsed
1001
1005
1002 def processreplacement(state):
1006 def processreplacement(state):
1003 """process the list of replacements to return
1007 """process the list of replacements to return
1004
1008
1005 1) the final mapping between original and created nodes
1009 1) the final mapping between original and created nodes
1006 2) the list of temporary node created by histedit
1010 2) the list of temporary node created by histedit
1007 3) the list of new commit created by histedit"""
1011 3) the list of new commit created by histedit"""
1008 replacements = state.replacements
1012 replacements = state.replacements
1009 allsuccs = set()
1013 allsuccs = set()
1010 replaced = set()
1014 replaced = set()
1011 fullmapping = {}
1015 fullmapping = {}
1012 # initialise basic set
1016 # initialise basic set
1013 # fullmapping record all operation recorded in replacement
1017 # fullmapping record all operation recorded in replacement
1014 for rep in replacements:
1018 for rep in replacements:
1015 allsuccs.update(rep[1])
1019 allsuccs.update(rep[1])
1016 replaced.add(rep[0])
1020 replaced.add(rep[0])
1017 fullmapping.setdefault(rep[0], set()).update(rep[1])
1021 fullmapping.setdefault(rep[0], set()).update(rep[1])
1018 new = allsuccs - replaced
1022 new = allsuccs - replaced
1019 tmpnodes = allsuccs & replaced
1023 tmpnodes = allsuccs & replaced
1020 # Reduce content fullmapping into direct relation between original nodes
1024 # Reduce content fullmapping into direct relation between original nodes
1021 # and final node created during history edition
1025 # and final node created during history edition
1022 # Dropped changeset are replaced by an empty list
1026 # Dropped changeset are replaced by an empty list
1023 toproceed = set(fullmapping)
1027 toproceed = set(fullmapping)
1024 final = {}
1028 final = {}
1025 while toproceed:
1029 while toproceed:
1026 for x in list(toproceed):
1030 for x in list(toproceed):
1027 succs = fullmapping[x]
1031 succs = fullmapping[x]
1028 for s in list(succs):
1032 for s in list(succs):
1029 if s in toproceed:
1033 if s in toproceed:
1030 # non final node with unknown closure
1034 # non final node with unknown closure
1031 # We can't process this now
1035 # We can't process this now
1032 break
1036 break
1033 elif s in final:
1037 elif s in final:
1034 # non final node, replace with closure
1038 # non final node, replace with closure
1035 succs.remove(s)
1039 succs.remove(s)
1036 succs.update(final[s])
1040 succs.update(final[s])
1037 else:
1041 else:
1038 final[x] = succs
1042 final[x] = succs
1039 toproceed.remove(x)
1043 toproceed.remove(x)
1040 # remove tmpnodes from final mapping
1044 # remove tmpnodes from final mapping
1041 for n in tmpnodes:
1045 for n in tmpnodes:
1042 del final[n]
1046 del final[n]
1043 # we expect all changes involved in final to exist in the repo
1047 # we expect all changes involved in final to exist in the repo
1044 # turn `final` into list (topologically sorted)
1048 # turn `final` into list (topologically sorted)
1045 nm = state.repo.changelog.nodemap
1049 nm = state.repo.changelog.nodemap
1046 for prec, succs in final.items():
1050 for prec, succs in final.items():
1047 final[prec] = sorted(succs, key=nm.get)
1051 final[prec] = sorted(succs, key=nm.get)
1048
1052
1049 # computed topmost element (necessary for bookmark)
1053 # computed topmost element (necessary for bookmark)
1050 if new:
1054 if new:
1051 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1055 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1052 elif not final:
1056 elif not final:
1053 # Nothing rewritten at all. we won't need `newtopmost`
1057 # Nothing rewritten at all. we won't need `newtopmost`
1054 # It is the same as `oldtopmost` and `processreplacement` know it
1058 # It is the same as `oldtopmost` and `processreplacement` know it
1055 newtopmost = None
1059 newtopmost = None
1056 else:
1060 else:
1057 # every body died. The newtopmost is the parent of the root.
1061 # every body died. The newtopmost is the parent of the root.
1058 r = state.repo.changelog.rev
1062 r = state.repo.changelog.rev
1059 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1063 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1060
1064
1061 return final, tmpnodes, new, newtopmost
1065 return final, tmpnodes, new, newtopmost
1062
1066
1063 def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
1067 def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
1064 """Move bookmark from old to newly created node"""
1068 """Move bookmark from old to newly created node"""
1065 if not mapping:
1069 if not mapping:
1066 # if nothing got rewritten there is not purpose for this function
1070 # if nothing got rewritten there is not purpose for this function
1067 return
1071 return
1068 moves = []
1072 moves = []
1069 for bk, old in sorted(repo._bookmarks.iteritems()):
1073 for bk, old in sorted(repo._bookmarks.iteritems()):
1070 if old == oldtopmost:
1074 if old == oldtopmost:
1071 # special case ensure bookmark stay on tip.
1075 # special case ensure bookmark stay on tip.
1072 #
1076 #
1073 # This is arguably a feature and we may only want that for the
1077 # This is arguably a feature and we may only want that for the
1074 # active bookmark. But the behavior is kept compatible with the old
1078 # active bookmark. But the behavior is kept compatible with the old
1075 # version for now.
1079 # version for now.
1076 moves.append((bk, newtopmost))
1080 moves.append((bk, newtopmost))
1077 continue
1081 continue
1078 base = old
1082 base = old
1079 new = mapping.get(base, None)
1083 new = mapping.get(base, None)
1080 if new is None:
1084 if new is None:
1081 continue
1085 continue
1082 while not new:
1086 while not new:
1083 # base is killed, trying with parent
1087 # base is killed, trying with parent
1084 base = repo[base].p1().node()
1088 base = repo[base].p1().node()
1085 new = mapping.get(base, (base,))
1089 new = mapping.get(base, (base,))
1086 # nothing to move
1090 # nothing to move
1087 moves.append((bk, new[-1]))
1091 moves.append((bk, new[-1]))
1088 if moves:
1092 if moves:
1089 marks = repo._bookmarks
1093 marks = repo._bookmarks
1090 for mark, new in moves:
1094 for mark, new in moves:
1091 old = marks[mark]
1095 old = marks[mark]
1092 ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
1096 ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
1093 % (mark, node.short(old), node.short(new)))
1097 % (mark, node.short(old), node.short(new)))
1094 marks[mark] = new
1098 marks[mark] = new
1095 marks.write()
1099 marks.write()
1096
1100
1097 def cleanupnode(ui, repo, name, nodes):
1101 def cleanupnode(ui, repo, name, nodes):
1098 """strip a group of nodes from the repository
1102 """strip a group of nodes from the repository
1099
1103
1100 The set of node to strip may contains unknown nodes."""
1104 The set of node to strip may contains unknown nodes."""
1101 ui.debug('should strip %s nodes %s\n' %
1105 ui.debug('should strip %s nodes %s\n' %
1102 (name, ', '.join([node.short(n) for n in nodes])))
1106 (name, ', '.join([node.short(n) for n in nodes])))
1103 lock = None
1107 lock = None
1104 try:
1108 try:
1105 lock = repo.lock()
1109 lock = repo.lock()
1106 # Find all node that need to be stripped
1110 # Find all node that need to be stripped
1107 # (we hg %lr instead of %ln to silently ignore unknown item
1111 # (we hg %lr instead of %ln to silently ignore unknown item
1108 nm = repo.changelog.nodemap
1112 nm = repo.changelog.nodemap
1109 nodes = sorted(n for n in nodes if n in nm)
1113 nodes = sorted(n for n in nodes if n in nm)
1110 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1114 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1111 for c in roots:
1115 for c in roots:
1112 # We should process node in reverse order to strip tip most first.
1116 # We should process node in reverse order to strip tip most first.
1113 # but this trigger a bug in changegroup hook.
1117 # but this trigger a bug in changegroup hook.
1114 # This would reduce bundle overhead
1118 # This would reduce bundle overhead
1115 repair.strip(ui, repo, c)
1119 repair.strip(ui, repo, c)
1116 finally:
1120 finally:
1117 release(lock)
1121 release(lock)
1118
1122
1119 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1123 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1120 if isinstance(nodelist, str):
1124 if isinstance(nodelist, str):
1121 nodelist = [nodelist]
1125 nodelist = [nodelist]
1122 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1126 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1123 state = histeditstate(repo)
1127 state = histeditstate(repo)
1124 state.read()
1128 state.read()
1125 histedit_nodes = set([repo[rulehash].node() for (action, rulehash)
1129 histedit_nodes = set([repo[rulehash].node() for (action, rulehash)
1126 in state.rules if rulehash in repo])
1130 in state.rules if rulehash in repo])
1127 strip_nodes = set([repo[n].node() for n in nodelist])
1131 strip_nodes = set([repo[n].node() for n in nodelist])
1128 common_nodes = histedit_nodes & strip_nodes
1132 common_nodes = histedit_nodes & strip_nodes
1129 if common_nodes:
1133 if common_nodes:
1130 raise util.Abort(_("histedit in progress, can't strip %s")
1134 raise util.Abort(_("histedit in progress, can't strip %s")
1131 % ', '.join(node.short(x) for x in common_nodes))
1135 % ', '.join(node.short(x) for x in common_nodes))
1132 return orig(ui, repo, nodelist, *args, **kwargs)
1136 return orig(ui, repo, nodelist, *args, **kwargs)
1133
1137
1134 extensions.wrapfunction(repair, 'strip', stripwrapper)
1138 extensions.wrapfunction(repair, 'strip', stripwrapper)
1135
1139
1136 def summaryhook(ui, repo):
1140 def summaryhook(ui, repo):
1137 if not os.path.exists(repo.join('histedit-state')):
1141 if not os.path.exists(repo.join('histedit-state')):
1138 return
1142 return
1139 state = histeditstate(repo)
1143 state = histeditstate(repo)
1140 state.read()
1144 state.read()
1141 if state.rules:
1145 if state.rules:
1142 # i18n: column positioning for "hg summary"
1146 # i18n: column positioning for "hg summary"
1143 ui.write(_('hist: %s (histedit --continue)\n') %
1147 ui.write(_('hist: %s (histedit --continue)\n') %
1144 (ui.label(_('%d remaining'), 'histedit.remaining') %
1148 (ui.label(_('%d remaining'), 'histedit.remaining') %
1145 len(state.rules)))
1149 len(state.rules)))
1146
1150
1147 def extsetup(ui):
1151 def extsetup(ui):
1148 cmdutil.summaryhooks.add('histedit', summaryhook)
1152 cmdutil.summaryhooks.add('histedit', summaryhook)
1149 cmdutil.unfinishedstates.append(
1153 cmdutil.unfinishedstates.append(
1150 ['histedit-state', False, True, _('histedit in progress'),
1154 ['histedit-state', False, True, _('histedit in progress'),
1151 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
1155 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
@@ -1,742 +1,746 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
85 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
86 from mercurial import localrepo, match, patch, templatefilters, util
86 from mercurial import localrepo, match, patch, templatefilters, util
87 from mercurial import scmutil, pathutil
87 from mercurial import scmutil, pathutil
88 from mercurial.hgweb import webcommands
88 from mercurial.hgweb import webcommands
89 from mercurial.i18n import _
89 from mercurial.i18n import _
90 import os, re, tempfile
90 import os, re, tempfile
91
91
92 cmdtable = {}
92 cmdtable = {}
93 command = cmdutil.command(cmdtable)
93 command = cmdutil.command(cmdtable)
94 # Note for extension authors: ONLY specify testedwith = 'internal' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
94 testedwith = 'internal'
98 testedwith = 'internal'
95
99
96 # hg commands that do not act on keywords
100 # hg commands that do not act on keywords
97 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
101 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
98 ' outgoing push tip verify convert email glog')
102 ' outgoing push tip verify convert email glog')
99
103
100 # hg commands that trigger expansion only when writing to working dir,
104 # hg commands that trigger expansion only when writing to working dir,
101 # not when reading filelog, and unexpand when reading from working dir
105 # not when reading filelog, and unexpand when reading from working dir
102 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
106 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
103 ' unshelve rebase graft backout histedit fetch')
107 ' unshelve rebase graft backout histedit fetch')
104
108
105 # names of extensions using dorecord
109 # names of extensions using dorecord
106 recordextensions = 'record'
110 recordextensions = 'record'
107
111
108 colortable = {
112 colortable = {
109 'kwfiles.enabled': 'green bold',
113 'kwfiles.enabled': 'green bold',
110 'kwfiles.deleted': 'cyan bold underline',
114 'kwfiles.deleted': 'cyan bold underline',
111 'kwfiles.enabledunknown': 'green',
115 'kwfiles.enabledunknown': 'green',
112 'kwfiles.ignored': 'bold',
116 'kwfiles.ignored': 'bold',
113 'kwfiles.ignoredunknown': 'none'
117 'kwfiles.ignoredunknown': 'none'
114 }
118 }
115
119
116 # date like in cvs' $Date
120 # date like in cvs' $Date
117 def utcdate(text):
121 def utcdate(text):
118 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
122 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
119 '''
123 '''
120 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
124 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
121 # date like in svn's $Date
125 # date like in svn's $Date
122 def svnisodate(text):
126 def svnisodate(text):
123 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
127 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
124 +0200 (Tue, 18 Aug 2009)".
128 +0200 (Tue, 18 Aug 2009)".
125 '''
129 '''
126 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
130 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
127 # date like in svn's $Id
131 # date like in svn's $Id
128 def svnutcdate(text):
132 def svnutcdate(text):
129 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
133 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
130 11:00:13Z".
134 11:00:13Z".
131 '''
135 '''
132 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
136 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
133
137
134 templatefilters.filters.update({'utcdate': utcdate,
138 templatefilters.filters.update({'utcdate': utcdate,
135 'svnisodate': svnisodate,
139 'svnisodate': svnisodate,
136 'svnutcdate': svnutcdate})
140 'svnutcdate': svnutcdate})
137
141
138 # make keyword tools accessible
142 # make keyword tools accessible
139 kwtools = {'templater': None, 'hgcmd': ''}
143 kwtools = {'templater': None, 'hgcmd': ''}
140
144
141 def _defaultkwmaps(ui):
145 def _defaultkwmaps(ui):
142 '''Returns default keywordmaps according to keywordset configuration.'''
146 '''Returns default keywordmaps according to keywordset configuration.'''
143 templates = {
147 templates = {
144 'Revision': '{node|short}',
148 'Revision': '{node|short}',
145 'Author': '{author|user}',
149 'Author': '{author|user}',
146 }
150 }
147 kwsets = ({
151 kwsets = ({
148 'Date': '{date|utcdate}',
152 'Date': '{date|utcdate}',
149 'RCSfile': '{file|basename},v',
153 'RCSfile': '{file|basename},v',
150 'RCSFile': '{file|basename},v', # kept for backwards compatibility
154 'RCSFile': '{file|basename},v', # kept for backwards compatibility
151 # with hg-keyword
155 # with hg-keyword
152 'Source': '{root}/{file},v',
156 'Source': '{root}/{file},v',
153 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
157 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
154 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
158 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
155 }, {
159 }, {
156 'Date': '{date|svnisodate}',
160 'Date': '{date|svnisodate}',
157 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
161 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
158 'LastChangedRevision': '{node|short}',
162 'LastChangedRevision': '{node|short}',
159 'LastChangedBy': '{author|user}',
163 'LastChangedBy': '{author|user}',
160 'LastChangedDate': '{date|svnisodate}',
164 'LastChangedDate': '{date|svnisodate}',
161 })
165 })
162 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
166 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
163 return templates
167 return templates
164
168
165 def _shrinktext(text, subfunc):
169 def _shrinktext(text, subfunc):
166 '''Helper for keyword expansion removal in text.
170 '''Helper for keyword expansion removal in text.
167 Depending on subfunc also returns number of substitutions.'''
171 Depending on subfunc also returns number of substitutions.'''
168 return subfunc(r'$\1$', text)
172 return subfunc(r'$\1$', text)
169
173
170 def _preselect(wstatus, changed):
174 def _preselect(wstatus, changed):
171 '''Retrieves modified and added files from a working directory state
175 '''Retrieves modified and added files from a working directory state
172 and returns the subset of each contained in given changed files
176 and returns the subset of each contained in given changed files
173 retrieved from a change context.'''
177 retrieved from a change context.'''
174 modified = [f for f in wstatus.modified if f in changed]
178 modified = [f for f in wstatus.modified if f in changed]
175 added = [f for f in wstatus.added if f in changed]
179 added = [f for f in wstatus.added if f in changed]
176 return modified, added
180 return modified, added
177
181
178
182
179 class kwtemplater(object):
183 class kwtemplater(object):
180 '''
184 '''
181 Sets up keyword templates, corresponding keyword regex, and
185 Sets up keyword templates, corresponding keyword regex, and
182 provides keyword substitution functions.
186 provides keyword substitution functions.
183 '''
187 '''
184
188
185 def __init__(self, ui, repo, inc, exc):
189 def __init__(self, ui, repo, inc, exc):
186 self.ui = ui
190 self.ui = ui
187 self.repo = repo
191 self.repo = repo
188 self.match = match.match(repo.root, '', [], inc, exc)
192 self.match = match.match(repo.root, '', [], inc, exc)
189 self.restrict = kwtools['hgcmd'] in restricted.split()
193 self.restrict = kwtools['hgcmd'] in restricted.split()
190 self.postcommit = False
194 self.postcommit = False
191
195
192 kwmaps = self.ui.configitems('keywordmaps')
196 kwmaps = self.ui.configitems('keywordmaps')
193 if kwmaps: # override default templates
197 if kwmaps: # override default templates
194 self.templates = dict(kwmaps)
198 self.templates = dict(kwmaps)
195 else:
199 else:
196 self.templates = _defaultkwmaps(self.ui)
200 self.templates = _defaultkwmaps(self.ui)
197
201
198 @util.propertycache
202 @util.propertycache
199 def escape(self):
203 def escape(self):
200 '''Returns bar-separated and escaped keywords.'''
204 '''Returns bar-separated and escaped keywords.'''
201 return '|'.join(map(re.escape, self.templates.keys()))
205 return '|'.join(map(re.escape, self.templates.keys()))
202
206
203 @util.propertycache
207 @util.propertycache
204 def rekw(self):
208 def rekw(self):
205 '''Returns regex for unexpanded keywords.'''
209 '''Returns regex for unexpanded keywords.'''
206 return re.compile(r'\$(%s)\$' % self.escape)
210 return re.compile(r'\$(%s)\$' % self.escape)
207
211
208 @util.propertycache
212 @util.propertycache
209 def rekwexp(self):
213 def rekwexp(self):
210 '''Returns regex for expanded keywords.'''
214 '''Returns regex for expanded keywords.'''
211 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
215 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
212
216
213 def substitute(self, data, path, ctx, subfunc):
217 def substitute(self, data, path, ctx, subfunc):
214 '''Replaces keywords in data with expanded template.'''
218 '''Replaces keywords in data with expanded template.'''
215 def kwsub(mobj):
219 def kwsub(mobj):
216 kw = mobj.group(1)
220 kw = mobj.group(1)
217 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
221 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
218 self.templates[kw], '', False)
222 self.templates[kw], '', False)
219 self.ui.pushbuffer()
223 self.ui.pushbuffer()
220 ct.show(ctx, root=self.repo.root, file=path)
224 ct.show(ctx, root=self.repo.root, file=path)
221 ekw = templatefilters.firstline(self.ui.popbuffer())
225 ekw = templatefilters.firstline(self.ui.popbuffer())
222 return '$%s: %s $' % (kw, ekw)
226 return '$%s: %s $' % (kw, ekw)
223 return subfunc(kwsub, data)
227 return subfunc(kwsub, data)
224
228
225 def linkctx(self, path, fileid):
229 def linkctx(self, path, fileid):
226 '''Similar to filelog.linkrev, but returns a changectx.'''
230 '''Similar to filelog.linkrev, but returns a changectx.'''
227 return self.repo.filectx(path, fileid=fileid).changectx()
231 return self.repo.filectx(path, fileid=fileid).changectx()
228
232
229 def expand(self, path, node, data):
233 def expand(self, path, node, data):
230 '''Returns data with keywords expanded.'''
234 '''Returns data with keywords expanded.'''
231 if not self.restrict and self.match(path) and not util.binary(data):
235 if not self.restrict and self.match(path) and not util.binary(data):
232 ctx = self.linkctx(path, node)
236 ctx = self.linkctx(path, node)
233 return self.substitute(data, path, ctx, self.rekw.sub)
237 return self.substitute(data, path, ctx, self.rekw.sub)
234 return data
238 return data
235
239
236 def iskwfile(self, cand, ctx):
240 def iskwfile(self, cand, ctx):
237 '''Returns subset of candidates which are configured for keyword
241 '''Returns subset of candidates which are configured for keyword
238 expansion but are not symbolic links.'''
242 expansion but are not symbolic links.'''
239 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
243 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
240
244
241 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
245 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
242 '''Overwrites selected files expanding/shrinking keywords.'''
246 '''Overwrites selected files expanding/shrinking keywords.'''
243 if self.restrict or lookup or self.postcommit: # exclude kw_copy
247 if self.restrict or lookup or self.postcommit: # exclude kw_copy
244 candidates = self.iskwfile(candidates, ctx)
248 candidates = self.iskwfile(candidates, ctx)
245 if not candidates:
249 if not candidates:
246 return
250 return
247 kwcmd = self.restrict and lookup # kwexpand/kwshrink
251 kwcmd = self.restrict and lookup # kwexpand/kwshrink
248 if self.restrict or expand and lookup:
252 if self.restrict or expand and lookup:
249 mf = ctx.manifest()
253 mf = ctx.manifest()
250 if self.restrict or rekw:
254 if self.restrict or rekw:
251 re_kw = self.rekw
255 re_kw = self.rekw
252 else:
256 else:
253 re_kw = self.rekwexp
257 re_kw = self.rekwexp
254 if expand:
258 if expand:
255 msg = _('overwriting %s expanding keywords\n')
259 msg = _('overwriting %s expanding keywords\n')
256 else:
260 else:
257 msg = _('overwriting %s shrinking keywords\n')
261 msg = _('overwriting %s shrinking keywords\n')
258 for f in candidates:
262 for f in candidates:
259 if self.restrict:
263 if self.restrict:
260 data = self.repo.file(f).read(mf[f])
264 data = self.repo.file(f).read(mf[f])
261 else:
265 else:
262 data = self.repo.wread(f)
266 data = self.repo.wread(f)
263 if util.binary(data):
267 if util.binary(data):
264 continue
268 continue
265 if expand:
269 if expand:
266 parents = ctx.parents()
270 parents = ctx.parents()
267 if lookup:
271 if lookup:
268 ctx = self.linkctx(f, mf[f])
272 ctx = self.linkctx(f, mf[f])
269 elif self.restrict and len(parents) > 1:
273 elif self.restrict and len(parents) > 1:
270 # merge commit
274 # merge commit
271 # in case of conflict f is in modified state during
275 # in case of conflict f is in modified state during
272 # merge, even if f does not differ from f in parent
276 # merge, even if f does not differ from f in parent
273 for p in parents:
277 for p in parents:
274 if f in p and not p[f].cmp(ctx[f]):
278 if f in p and not p[f].cmp(ctx[f]):
275 ctx = p[f].changectx()
279 ctx = p[f].changectx()
276 break
280 break
277 data, found = self.substitute(data, f, ctx, re_kw.subn)
281 data, found = self.substitute(data, f, ctx, re_kw.subn)
278 elif self.restrict:
282 elif self.restrict:
279 found = re_kw.search(data)
283 found = re_kw.search(data)
280 else:
284 else:
281 data, found = _shrinktext(data, re_kw.subn)
285 data, found = _shrinktext(data, re_kw.subn)
282 if found:
286 if found:
283 self.ui.note(msg % f)
287 self.ui.note(msg % f)
284 fp = self.repo.wvfs(f, "wb", atomictemp=True)
288 fp = self.repo.wvfs(f, "wb", atomictemp=True)
285 fp.write(data)
289 fp.write(data)
286 fp.close()
290 fp.close()
287 if kwcmd:
291 if kwcmd:
288 self.repo.dirstate.normal(f)
292 self.repo.dirstate.normal(f)
289 elif self.postcommit:
293 elif self.postcommit:
290 self.repo.dirstate.normallookup(f)
294 self.repo.dirstate.normallookup(f)
291
295
292 def shrink(self, fname, text):
296 def shrink(self, fname, text):
293 '''Returns text with all keyword substitutions removed.'''
297 '''Returns text with all keyword substitutions removed.'''
294 if self.match(fname) and not util.binary(text):
298 if self.match(fname) and not util.binary(text):
295 return _shrinktext(text, self.rekwexp.sub)
299 return _shrinktext(text, self.rekwexp.sub)
296 return text
300 return text
297
301
298 def shrinklines(self, fname, lines):
302 def shrinklines(self, fname, lines):
299 '''Returns lines with keyword substitutions removed.'''
303 '''Returns lines with keyword substitutions removed.'''
300 if self.match(fname):
304 if self.match(fname):
301 text = ''.join(lines)
305 text = ''.join(lines)
302 if not util.binary(text):
306 if not util.binary(text):
303 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
307 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
304 return lines
308 return lines
305
309
306 def wread(self, fname, data):
310 def wread(self, fname, data):
307 '''If in restricted mode returns data read from wdir with
311 '''If in restricted mode returns data read from wdir with
308 keyword substitutions removed.'''
312 keyword substitutions removed.'''
309 if self.restrict:
313 if self.restrict:
310 return self.shrink(fname, data)
314 return self.shrink(fname, data)
311 return data
315 return data
312
316
313 class kwfilelog(filelog.filelog):
317 class kwfilelog(filelog.filelog):
314 '''
318 '''
315 Subclass of filelog to hook into its read, add, cmp methods.
319 Subclass of filelog to hook into its read, add, cmp methods.
316 Keywords are "stored" unexpanded, and processed on reading.
320 Keywords are "stored" unexpanded, and processed on reading.
317 '''
321 '''
318 def __init__(self, opener, kwt, path):
322 def __init__(self, opener, kwt, path):
319 super(kwfilelog, self).__init__(opener, path)
323 super(kwfilelog, self).__init__(opener, path)
320 self.kwt = kwt
324 self.kwt = kwt
321 self.path = path
325 self.path = path
322
326
323 def read(self, node):
327 def read(self, node):
324 '''Expands keywords when reading filelog.'''
328 '''Expands keywords when reading filelog.'''
325 data = super(kwfilelog, self).read(node)
329 data = super(kwfilelog, self).read(node)
326 if self.renamed(node):
330 if self.renamed(node):
327 return data
331 return data
328 return self.kwt.expand(self.path, node, data)
332 return self.kwt.expand(self.path, node, data)
329
333
330 def add(self, text, meta, tr, link, p1=None, p2=None):
334 def add(self, text, meta, tr, link, p1=None, p2=None):
331 '''Removes keyword substitutions when adding to filelog.'''
335 '''Removes keyword substitutions when adding to filelog.'''
332 text = self.kwt.shrink(self.path, text)
336 text = self.kwt.shrink(self.path, text)
333 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
337 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
334
338
335 def cmp(self, node, text):
339 def cmp(self, node, text):
336 '''Removes keyword substitutions for comparison.'''
340 '''Removes keyword substitutions for comparison.'''
337 text = self.kwt.shrink(self.path, text)
341 text = self.kwt.shrink(self.path, text)
338 return super(kwfilelog, self).cmp(node, text)
342 return super(kwfilelog, self).cmp(node, text)
339
343
340 def _status(ui, repo, wctx, kwt, *pats, **opts):
344 def _status(ui, repo, wctx, kwt, *pats, **opts):
341 '''Bails out if [keyword] configuration is not active.
345 '''Bails out if [keyword] configuration is not active.
342 Returns status of working directory.'''
346 Returns status of working directory.'''
343 if kwt:
347 if kwt:
344 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
348 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
345 unknown=opts.get('unknown') or opts.get('all'))
349 unknown=opts.get('unknown') or opts.get('all'))
346 if ui.configitems('keyword'):
350 if ui.configitems('keyword'):
347 raise util.Abort(_('[keyword] patterns cannot match'))
351 raise util.Abort(_('[keyword] patterns cannot match'))
348 raise util.Abort(_('no [keyword] patterns configured'))
352 raise util.Abort(_('no [keyword] patterns configured'))
349
353
350 def _kwfwrite(ui, repo, expand, *pats, **opts):
354 def _kwfwrite(ui, repo, expand, *pats, **opts):
351 '''Selects files and passes them to kwtemplater.overwrite.'''
355 '''Selects files and passes them to kwtemplater.overwrite.'''
352 wctx = repo[None]
356 wctx = repo[None]
353 if len(wctx.parents()) > 1:
357 if len(wctx.parents()) > 1:
354 raise util.Abort(_('outstanding uncommitted merge'))
358 raise util.Abort(_('outstanding uncommitted merge'))
355 kwt = kwtools['templater']
359 kwt = kwtools['templater']
356 wlock = repo.wlock()
360 wlock = repo.wlock()
357 try:
361 try:
358 status = _status(ui, repo, wctx, kwt, *pats, **opts)
362 status = _status(ui, repo, wctx, kwt, *pats, **opts)
359 if status.modified or status.added or status.removed or status.deleted:
363 if status.modified or status.added or status.removed or status.deleted:
360 raise util.Abort(_('outstanding uncommitted changes'))
364 raise util.Abort(_('outstanding uncommitted changes'))
361 kwt.overwrite(wctx, status.clean, True, expand)
365 kwt.overwrite(wctx, status.clean, True, expand)
362 finally:
366 finally:
363 wlock.release()
367 wlock.release()
364
368
365 @command('kwdemo',
369 @command('kwdemo',
366 [('d', 'default', None, _('show default keyword template maps')),
370 [('d', 'default', None, _('show default keyword template maps')),
367 ('f', 'rcfile', '',
371 ('f', 'rcfile', '',
368 _('read maps from rcfile'), _('FILE'))],
372 _('read maps from rcfile'), _('FILE'))],
369 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
373 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
370 optionalrepo=True)
374 optionalrepo=True)
371 def demo(ui, repo, *args, **opts):
375 def demo(ui, repo, *args, **opts):
372 '''print [keywordmaps] configuration and an expansion example
376 '''print [keywordmaps] configuration and an expansion example
373
377
374 Show current, custom, or default keyword template maps and their
378 Show current, custom, or default keyword template maps and their
375 expansions.
379 expansions.
376
380
377 Extend the current configuration by specifying maps as arguments
381 Extend the current configuration by specifying maps as arguments
378 and using -f/--rcfile to source an external hgrc file.
382 and using -f/--rcfile to source an external hgrc file.
379
383
380 Use -d/--default to disable current configuration.
384 Use -d/--default to disable current configuration.
381
385
382 See :hg:`help templates` for information on templates and filters.
386 See :hg:`help templates` for information on templates and filters.
383 '''
387 '''
384 def demoitems(section, items):
388 def demoitems(section, items):
385 ui.write('[%s]\n' % section)
389 ui.write('[%s]\n' % section)
386 for k, v in sorted(items):
390 for k, v in sorted(items):
387 ui.write('%s = %s\n' % (k, v))
391 ui.write('%s = %s\n' % (k, v))
388
392
389 fn = 'demo.txt'
393 fn = 'demo.txt'
390 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
394 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
391 ui.note(_('creating temporary repository at %s\n') % tmpdir)
395 ui.note(_('creating temporary repository at %s\n') % tmpdir)
392 repo = localrepo.localrepository(repo.baseui, tmpdir, True)
396 repo = localrepo.localrepository(repo.baseui, tmpdir, True)
393 ui.setconfig('keyword', fn, '', 'keyword')
397 ui.setconfig('keyword', fn, '', 'keyword')
394 svn = ui.configbool('keywordset', 'svn')
398 svn = ui.configbool('keywordset', 'svn')
395 # explicitly set keywordset for demo output
399 # explicitly set keywordset for demo output
396 ui.setconfig('keywordset', 'svn', svn, 'keyword')
400 ui.setconfig('keywordset', 'svn', svn, 'keyword')
397
401
398 uikwmaps = ui.configitems('keywordmaps')
402 uikwmaps = ui.configitems('keywordmaps')
399 if args or opts.get('rcfile'):
403 if args or opts.get('rcfile'):
400 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
404 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
401 if uikwmaps:
405 if uikwmaps:
402 ui.status(_('\textending current template maps\n'))
406 ui.status(_('\textending current template maps\n'))
403 if opts.get('default') or not uikwmaps:
407 if opts.get('default') or not uikwmaps:
404 if svn:
408 if svn:
405 ui.status(_('\toverriding default svn keywordset\n'))
409 ui.status(_('\toverriding default svn keywordset\n'))
406 else:
410 else:
407 ui.status(_('\toverriding default cvs keywordset\n'))
411 ui.status(_('\toverriding default cvs keywordset\n'))
408 if opts.get('rcfile'):
412 if opts.get('rcfile'):
409 ui.readconfig(opts.get('rcfile'))
413 ui.readconfig(opts.get('rcfile'))
410 if args:
414 if args:
411 # simulate hgrc parsing
415 # simulate hgrc parsing
412 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
416 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
413 fp = repo.vfs('hgrc', 'w')
417 fp = repo.vfs('hgrc', 'w')
414 fp.writelines(rcmaps)
418 fp.writelines(rcmaps)
415 fp.close()
419 fp.close()
416 ui.readconfig(repo.join('hgrc'))
420 ui.readconfig(repo.join('hgrc'))
417 kwmaps = dict(ui.configitems('keywordmaps'))
421 kwmaps = dict(ui.configitems('keywordmaps'))
418 elif opts.get('default'):
422 elif opts.get('default'):
419 if svn:
423 if svn:
420 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
424 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
421 else:
425 else:
422 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
426 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
423 kwmaps = _defaultkwmaps(ui)
427 kwmaps = _defaultkwmaps(ui)
424 if uikwmaps:
428 if uikwmaps:
425 ui.status(_('\tdisabling current template maps\n'))
429 ui.status(_('\tdisabling current template maps\n'))
426 for k, v in kwmaps.iteritems():
430 for k, v in kwmaps.iteritems():
427 ui.setconfig('keywordmaps', k, v, 'keyword')
431 ui.setconfig('keywordmaps', k, v, 'keyword')
428 else:
432 else:
429 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
433 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
430 if uikwmaps:
434 if uikwmaps:
431 kwmaps = dict(uikwmaps)
435 kwmaps = dict(uikwmaps)
432 else:
436 else:
433 kwmaps = _defaultkwmaps(ui)
437 kwmaps = _defaultkwmaps(ui)
434
438
435 uisetup(ui)
439 uisetup(ui)
436 reposetup(ui, repo)
440 reposetup(ui, repo)
437 ui.write('[extensions]\nkeyword =\n')
441 ui.write('[extensions]\nkeyword =\n')
438 demoitems('keyword', ui.configitems('keyword'))
442 demoitems('keyword', ui.configitems('keyword'))
439 demoitems('keywordset', ui.configitems('keywordset'))
443 demoitems('keywordset', ui.configitems('keywordset'))
440 demoitems('keywordmaps', kwmaps.iteritems())
444 demoitems('keywordmaps', kwmaps.iteritems())
441 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
445 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
442 repo.wvfs.write(fn, keywords)
446 repo.wvfs.write(fn, keywords)
443 repo[None].add([fn])
447 repo[None].add([fn])
444 ui.note(_('\nkeywords written to %s:\n') % fn)
448 ui.note(_('\nkeywords written to %s:\n') % fn)
445 ui.note(keywords)
449 ui.note(keywords)
446 wlock = repo.wlock()
450 wlock = repo.wlock()
447 try:
451 try:
448 repo.dirstate.setbranch('demobranch')
452 repo.dirstate.setbranch('demobranch')
449 finally:
453 finally:
450 wlock.release()
454 wlock.release()
451 for name, cmd in ui.configitems('hooks'):
455 for name, cmd in ui.configitems('hooks'):
452 if name.split('.', 1)[0].find('commit') > -1:
456 if name.split('.', 1)[0].find('commit') > -1:
453 repo.ui.setconfig('hooks', name, '', 'keyword')
457 repo.ui.setconfig('hooks', name, '', 'keyword')
454 msg = _('hg keyword configuration and expansion example')
458 msg = _('hg keyword configuration and expansion example')
455 ui.note(("hg ci -m '%s'\n" % msg))
459 ui.note(("hg ci -m '%s'\n" % msg))
456 repo.commit(text=msg)
460 repo.commit(text=msg)
457 ui.status(_('\n\tkeywords expanded\n'))
461 ui.status(_('\n\tkeywords expanded\n'))
458 ui.write(repo.wread(fn))
462 ui.write(repo.wread(fn))
459 repo.wvfs.rmtree(repo.root)
463 repo.wvfs.rmtree(repo.root)
460
464
461 @command('kwexpand',
465 @command('kwexpand',
462 commands.walkopts,
466 commands.walkopts,
463 _('hg kwexpand [OPTION]... [FILE]...'),
467 _('hg kwexpand [OPTION]... [FILE]...'),
464 inferrepo=True)
468 inferrepo=True)
465 def expand(ui, repo, *pats, **opts):
469 def expand(ui, repo, *pats, **opts):
466 '''expand keywords in the working directory
470 '''expand keywords in the working directory
467
471
468 Run after (re)enabling keyword expansion.
472 Run after (re)enabling keyword expansion.
469
473
470 kwexpand refuses to run if given files contain local changes.
474 kwexpand refuses to run if given files contain local changes.
471 '''
475 '''
472 # 3rd argument sets expansion to True
476 # 3rd argument sets expansion to True
473 _kwfwrite(ui, repo, True, *pats, **opts)
477 _kwfwrite(ui, repo, True, *pats, **opts)
474
478
475 @command('kwfiles',
479 @command('kwfiles',
476 [('A', 'all', None, _('show keyword status flags of all files')),
480 [('A', 'all', None, _('show keyword status flags of all files')),
477 ('i', 'ignore', None, _('show files excluded from expansion')),
481 ('i', 'ignore', None, _('show files excluded from expansion')),
478 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
482 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
479 ] + commands.walkopts,
483 ] + commands.walkopts,
480 _('hg kwfiles [OPTION]... [FILE]...'),
484 _('hg kwfiles [OPTION]... [FILE]...'),
481 inferrepo=True)
485 inferrepo=True)
482 def files(ui, repo, *pats, **opts):
486 def files(ui, repo, *pats, **opts):
483 '''show files configured for keyword expansion
487 '''show files configured for keyword expansion
484
488
485 List which files in the working directory are matched by the
489 List which files in the working directory are matched by the
486 [keyword] configuration patterns.
490 [keyword] configuration patterns.
487
491
488 Useful to prevent inadvertent keyword expansion and to speed up
492 Useful to prevent inadvertent keyword expansion and to speed up
489 execution by including only files that are actual candidates for
493 execution by including only files that are actual candidates for
490 expansion.
494 expansion.
491
495
492 See :hg:`help keyword` on how to construct patterns both for
496 See :hg:`help keyword` on how to construct patterns both for
493 inclusion and exclusion of files.
497 inclusion and exclusion of files.
494
498
495 With -A/--all and -v/--verbose the codes used to show the status
499 With -A/--all and -v/--verbose the codes used to show the status
496 of files are::
500 of files are::
497
501
498 K = keyword expansion candidate
502 K = keyword expansion candidate
499 k = keyword expansion candidate (not tracked)
503 k = keyword expansion candidate (not tracked)
500 I = ignored
504 I = ignored
501 i = ignored (not tracked)
505 i = ignored (not tracked)
502 '''
506 '''
503 kwt = kwtools['templater']
507 kwt = kwtools['templater']
504 wctx = repo[None]
508 wctx = repo[None]
505 status = _status(ui, repo, wctx, kwt, *pats, **opts)
509 status = _status(ui, repo, wctx, kwt, *pats, **opts)
506 if pats:
510 if pats:
507 cwd = repo.getcwd()
511 cwd = repo.getcwd()
508 else:
512 else:
509 cwd = ''
513 cwd = ''
510 files = []
514 files = []
511 if not opts.get('unknown') or opts.get('all'):
515 if not opts.get('unknown') or opts.get('all'):
512 files = sorted(status.modified + status.added + status.clean)
516 files = sorted(status.modified + status.added + status.clean)
513 kwfiles = kwt.iskwfile(files, wctx)
517 kwfiles = kwt.iskwfile(files, wctx)
514 kwdeleted = kwt.iskwfile(status.deleted, wctx)
518 kwdeleted = kwt.iskwfile(status.deleted, wctx)
515 kwunknown = kwt.iskwfile(status.unknown, wctx)
519 kwunknown = kwt.iskwfile(status.unknown, wctx)
516 if not opts.get('ignore') or opts.get('all'):
520 if not opts.get('ignore') or opts.get('all'):
517 showfiles = kwfiles, kwdeleted, kwunknown
521 showfiles = kwfiles, kwdeleted, kwunknown
518 else:
522 else:
519 showfiles = [], [], []
523 showfiles = [], [], []
520 if opts.get('all') or opts.get('ignore'):
524 if opts.get('all') or opts.get('ignore'):
521 showfiles += ([f for f in files if f not in kwfiles],
525 showfiles += ([f for f in files if f not in kwfiles],
522 [f for f in status.unknown if f not in kwunknown])
526 [f for f in status.unknown if f not in kwunknown])
523 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
527 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
524 kwstates = zip(kwlabels, 'K!kIi', showfiles)
528 kwstates = zip(kwlabels, 'K!kIi', showfiles)
525 fm = ui.formatter('kwfiles', opts)
529 fm = ui.formatter('kwfiles', opts)
526 fmt = '%.0s%s\n'
530 fmt = '%.0s%s\n'
527 if opts.get('all') or ui.verbose:
531 if opts.get('all') or ui.verbose:
528 fmt = '%s %s\n'
532 fmt = '%s %s\n'
529 for kwstate, char, filenames in kwstates:
533 for kwstate, char, filenames in kwstates:
530 label = 'kwfiles.' + kwstate
534 label = 'kwfiles.' + kwstate
531 for f in filenames:
535 for f in filenames:
532 fm.startitem()
536 fm.startitem()
533 fm.write('kwstatus path', fmt, char,
537 fm.write('kwstatus path', fmt, char,
534 repo.pathto(f, cwd), label=label)
538 repo.pathto(f, cwd), label=label)
535 fm.end()
539 fm.end()
536
540
537 @command('kwshrink',
541 @command('kwshrink',
538 commands.walkopts,
542 commands.walkopts,
539 _('hg kwshrink [OPTION]... [FILE]...'),
543 _('hg kwshrink [OPTION]... [FILE]...'),
540 inferrepo=True)
544 inferrepo=True)
541 def shrink(ui, repo, *pats, **opts):
545 def shrink(ui, repo, *pats, **opts):
542 '''revert expanded keywords in the working directory
546 '''revert expanded keywords in the working directory
543
547
544 Must be run before changing/disabling active keywords.
548 Must be run before changing/disabling active keywords.
545
549
546 kwshrink refuses to run if given files contain local changes.
550 kwshrink refuses to run if given files contain local changes.
547 '''
551 '''
548 # 3rd argument sets expansion to False
552 # 3rd argument sets expansion to False
549 _kwfwrite(ui, repo, False, *pats, **opts)
553 _kwfwrite(ui, repo, False, *pats, **opts)
550
554
551
555
552 def uisetup(ui):
556 def uisetup(ui):
553 ''' Monkeypatches dispatch._parse to retrieve user command.'''
557 ''' Monkeypatches dispatch._parse to retrieve user command.'''
554
558
555 def kwdispatch_parse(orig, ui, args):
559 def kwdispatch_parse(orig, ui, args):
556 '''Monkeypatch dispatch._parse to obtain running hg command.'''
560 '''Monkeypatch dispatch._parse to obtain running hg command.'''
557 cmd, func, args, options, cmdoptions = orig(ui, args)
561 cmd, func, args, options, cmdoptions = orig(ui, args)
558 kwtools['hgcmd'] = cmd
562 kwtools['hgcmd'] = cmd
559 return cmd, func, args, options, cmdoptions
563 return cmd, func, args, options, cmdoptions
560
564
561 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
565 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
562
566
563 def reposetup(ui, repo):
567 def reposetup(ui, repo):
564 '''Sets up repo as kwrepo for keyword substitution.
568 '''Sets up repo as kwrepo for keyword substitution.
565 Overrides file method to return kwfilelog instead of filelog
569 Overrides file method to return kwfilelog instead of filelog
566 if file matches user configuration.
570 if file matches user configuration.
567 Wraps commit to overwrite configured files with updated
571 Wraps commit to overwrite configured files with updated
568 keyword substitutions.
572 keyword substitutions.
569 Monkeypatches patch and webcommands.'''
573 Monkeypatches patch and webcommands.'''
570
574
571 try:
575 try:
572 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
576 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
573 or '.hg' in util.splitpath(repo.root)
577 or '.hg' in util.splitpath(repo.root)
574 or repo._url.startswith('bundle:')):
578 or repo._url.startswith('bundle:')):
575 return
579 return
576 except AttributeError:
580 except AttributeError:
577 pass
581 pass
578
582
579 inc, exc = [], ['.hg*']
583 inc, exc = [], ['.hg*']
580 for pat, opt in ui.configitems('keyword'):
584 for pat, opt in ui.configitems('keyword'):
581 if opt != 'ignore':
585 if opt != 'ignore':
582 inc.append(pat)
586 inc.append(pat)
583 else:
587 else:
584 exc.append(pat)
588 exc.append(pat)
585 if not inc:
589 if not inc:
586 return
590 return
587
591
588 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
592 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
589
593
590 class kwrepo(repo.__class__):
594 class kwrepo(repo.__class__):
591 def file(self, f):
595 def file(self, f):
592 if f[0] == '/':
596 if f[0] == '/':
593 f = f[1:]
597 f = f[1:]
594 return kwfilelog(self.svfs, kwt, f)
598 return kwfilelog(self.svfs, kwt, f)
595
599
596 def wread(self, filename):
600 def wread(self, filename):
597 data = super(kwrepo, self).wread(filename)
601 data = super(kwrepo, self).wread(filename)
598 return kwt.wread(filename, data)
602 return kwt.wread(filename, data)
599
603
600 def commit(self, *args, **opts):
604 def commit(self, *args, **opts):
601 # use custom commitctx for user commands
605 # use custom commitctx for user commands
602 # other extensions can still wrap repo.commitctx directly
606 # other extensions can still wrap repo.commitctx directly
603 self.commitctx = self.kwcommitctx
607 self.commitctx = self.kwcommitctx
604 try:
608 try:
605 return super(kwrepo, self).commit(*args, **opts)
609 return super(kwrepo, self).commit(*args, **opts)
606 finally:
610 finally:
607 del self.commitctx
611 del self.commitctx
608
612
609 def kwcommitctx(self, ctx, error=False):
613 def kwcommitctx(self, ctx, error=False):
610 n = super(kwrepo, self).commitctx(ctx, error)
614 n = super(kwrepo, self).commitctx(ctx, error)
611 # no lock needed, only called from repo.commit() which already locks
615 # no lock needed, only called from repo.commit() which already locks
612 if not kwt.postcommit:
616 if not kwt.postcommit:
613 restrict = kwt.restrict
617 restrict = kwt.restrict
614 kwt.restrict = True
618 kwt.restrict = True
615 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
619 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
616 False, True)
620 False, True)
617 kwt.restrict = restrict
621 kwt.restrict = restrict
618 return n
622 return n
619
623
620 def rollback(self, dryrun=False, force=False):
624 def rollback(self, dryrun=False, force=False):
621 wlock = self.wlock()
625 wlock = self.wlock()
622 try:
626 try:
623 if not dryrun:
627 if not dryrun:
624 changed = self['.'].files()
628 changed = self['.'].files()
625 ret = super(kwrepo, self).rollback(dryrun, force)
629 ret = super(kwrepo, self).rollback(dryrun, force)
626 if not dryrun:
630 if not dryrun:
627 ctx = self['.']
631 ctx = self['.']
628 modified, added = _preselect(ctx.status(), changed)
632 modified, added = _preselect(ctx.status(), changed)
629 kwt.overwrite(ctx, modified, True, True)
633 kwt.overwrite(ctx, modified, True, True)
630 kwt.overwrite(ctx, added, True, False)
634 kwt.overwrite(ctx, added, True, False)
631 return ret
635 return ret
632 finally:
636 finally:
633 wlock.release()
637 wlock.release()
634
638
635 # monkeypatches
639 # monkeypatches
636 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
640 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
637 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
641 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
638 rejects or conflicts due to expanded keywords in working dir.'''
642 rejects or conflicts due to expanded keywords in working dir.'''
639 orig(self, ui, gp, backend, store, eolmode)
643 orig(self, ui, gp, backend, store, eolmode)
640 # shrink keywords read from working dir
644 # shrink keywords read from working dir
641 self.lines = kwt.shrinklines(self.fname, self.lines)
645 self.lines = kwt.shrinklines(self.fname, self.lines)
642
646
643 def kwdiff(orig, *args, **kwargs):
647 def kwdiff(orig, *args, **kwargs):
644 '''Monkeypatch patch.diff to avoid expansion.'''
648 '''Monkeypatch patch.diff to avoid expansion.'''
645 kwt.restrict = True
649 kwt.restrict = True
646 return orig(*args, **kwargs)
650 return orig(*args, **kwargs)
647
651
648 def kwweb_skip(orig, web, req, tmpl):
652 def kwweb_skip(orig, web, req, tmpl):
649 '''Wraps webcommands.x turning off keyword expansion.'''
653 '''Wraps webcommands.x turning off keyword expansion.'''
650 kwt.match = util.never
654 kwt.match = util.never
651 return orig(web, req, tmpl)
655 return orig(web, req, tmpl)
652
656
653 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
657 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
654 '''Wraps cmdutil.amend expanding keywords after amend.'''
658 '''Wraps cmdutil.amend expanding keywords after amend.'''
655 wlock = repo.wlock()
659 wlock = repo.wlock()
656 try:
660 try:
657 kwt.postcommit = True
661 kwt.postcommit = True
658 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
662 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
659 if newid != old.node():
663 if newid != old.node():
660 ctx = repo[newid]
664 ctx = repo[newid]
661 kwt.restrict = True
665 kwt.restrict = True
662 kwt.overwrite(ctx, ctx.files(), False, True)
666 kwt.overwrite(ctx, ctx.files(), False, True)
663 kwt.restrict = False
667 kwt.restrict = False
664 return newid
668 return newid
665 finally:
669 finally:
666 wlock.release()
670 wlock.release()
667
671
668 def kw_copy(orig, ui, repo, pats, opts, rename=False):
672 def kw_copy(orig, ui, repo, pats, opts, rename=False):
669 '''Wraps cmdutil.copy so that copy/rename destinations do not
673 '''Wraps cmdutil.copy so that copy/rename destinations do not
670 contain expanded keywords.
674 contain expanded keywords.
671 Note that the source of a regular file destination may also be a
675 Note that the source of a regular file destination may also be a
672 symlink:
676 symlink:
673 hg cp sym x -> x is symlink
677 hg cp sym x -> x is symlink
674 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
678 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
675 For the latter we have to follow the symlink to find out whether its
679 For the latter we have to follow the symlink to find out whether its
676 target is configured for expansion and we therefore must unexpand the
680 target is configured for expansion and we therefore must unexpand the
677 keywords in the destination.'''
681 keywords in the destination.'''
678 wlock = repo.wlock()
682 wlock = repo.wlock()
679 try:
683 try:
680 orig(ui, repo, pats, opts, rename)
684 orig(ui, repo, pats, opts, rename)
681 if opts.get('dry_run'):
685 if opts.get('dry_run'):
682 return
686 return
683 wctx = repo[None]
687 wctx = repo[None]
684 cwd = repo.getcwd()
688 cwd = repo.getcwd()
685
689
686 def haskwsource(dest):
690 def haskwsource(dest):
687 '''Returns true if dest is a regular file and configured for
691 '''Returns true if dest is a regular file and configured for
688 expansion or a symlink which points to a file configured for
692 expansion or a symlink which points to a file configured for
689 expansion. '''
693 expansion. '''
690 source = repo.dirstate.copied(dest)
694 source = repo.dirstate.copied(dest)
691 if 'l' in wctx.flags(source):
695 if 'l' in wctx.flags(source):
692 source = pathutil.canonpath(repo.root, cwd,
696 source = pathutil.canonpath(repo.root, cwd,
693 os.path.realpath(source))
697 os.path.realpath(source))
694 return kwt.match(source)
698 return kwt.match(source)
695
699
696 candidates = [f for f in repo.dirstate.copies() if
700 candidates = [f for f in repo.dirstate.copies() if
697 'l' not in wctx.flags(f) and haskwsource(f)]
701 'l' not in wctx.flags(f) and haskwsource(f)]
698 kwt.overwrite(wctx, candidates, False, False)
702 kwt.overwrite(wctx, candidates, False, False)
699 finally:
703 finally:
700 wlock.release()
704 wlock.release()
701
705
702 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
706 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
703 '''Wraps record.dorecord expanding keywords after recording.'''
707 '''Wraps record.dorecord expanding keywords after recording.'''
704 wlock = repo.wlock()
708 wlock = repo.wlock()
705 try:
709 try:
706 # record returns 0 even when nothing has changed
710 # record returns 0 even when nothing has changed
707 # therefore compare nodes before and after
711 # therefore compare nodes before and after
708 kwt.postcommit = True
712 kwt.postcommit = True
709 ctx = repo['.']
713 ctx = repo['.']
710 wstatus = ctx.status()
714 wstatus = ctx.status()
711 ret = orig(ui, repo, commitfunc, *pats, **opts)
715 ret = orig(ui, repo, commitfunc, *pats, **opts)
712 recctx = repo['.']
716 recctx = repo['.']
713 if ctx != recctx:
717 if ctx != recctx:
714 modified, added = _preselect(wstatus, recctx.files())
718 modified, added = _preselect(wstatus, recctx.files())
715 kwt.restrict = False
719 kwt.restrict = False
716 kwt.overwrite(recctx, modified, False, True)
720 kwt.overwrite(recctx, modified, False, True)
717 kwt.overwrite(recctx, added, False, True, True)
721 kwt.overwrite(recctx, added, False, True, True)
718 kwt.restrict = True
722 kwt.restrict = True
719 return ret
723 return ret
720 finally:
724 finally:
721 wlock.release()
725 wlock.release()
722
726
723 def kwfilectx_cmp(orig, self, fctx):
727 def kwfilectx_cmp(orig, self, fctx):
724 # keyword affects data size, comparing wdir and filelog size does
728 # keyword affects data size, comparing wdir and filelog size does
725 # not make sense
729 # not make sense
726 if (fctx._filerev is None and
730 if (fctx._filerev is None and
727 (self._repo._encodefilterpats or
731 (self._repo._encodefilterpats or
728 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
732 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
729 self.size() - 4 == fctx.size()) or
733 self.size() - 4 == fctx.size()) or
730 self.size() == fctx.size()):
734 self.size() == fctx.size()):
731 return self._filelog.cmp(self._filenode, fctx.data())
735 return self._filelog.cmp(self._filenode, fctx.data())
732 return True
736 return True
733
737
734 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
738 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
735 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
739 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
736 extensions.wrapfunction(patch, 'diff', kwdiff)
740 extensions.wrapfunction(patch, 'diff', kwdiff)
737 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
741 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
738 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
742 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
739 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
743 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
740 for c in 'annotate changeset rev filediff diff'.split():
744 for c in 'annotate changeset rev filediff diff'.split():
741 extensions.wrapfunction(webcommands, c, kwweb_skip)
745 extensions.wrapfunction(webcommands, c, kwweb_skip)
742 repo.__class__ = kwrepo
746 repo.__class__ = kwrepo
@@ -1,128 +1,132 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''track large binary files
9 '''track large binary files
10
10
11 Large binary files tend to be not very compressible, not very
11 Large binary files tend to be not very compressible, not very
12 diffable, and not at all mergeable. Such files are not handled
12 diffable, and not at all mergeable. Such files are not handled
13 efficiently by Mercurial's storage format (revlog), which is based on
13 efficiently by Mercurial's storage format (revlog), which is based on
14 compressed binary deltas; storing large binary files as regular
14 compressed binary deltas; storing large binary files as regular
15 Mercurial files wastes bandwidth and disk space and increases
15 Mercurial files wastes bandwidth and disk space and increases
16 Mercurial's memory usage. The largefiles extension addresses these
16 Mercurial's memory usage. The largefiles extension addresses these
17 problems by adding a centralized client-server layer on top of
17 problems by adding a centralized client-server layer on top of
18 Mercurial: largefiles live in a *central store* out on the network
18 Mercurial: largefiles live in a *central store* out on the network
19 somewhere, and you only fetch the revisions that you need when you
19 somewhere, and you only fetch the revisions that you need when you
20 need them.
20 need them.
21
21
22 largefiles works by maintaining a "standin file" in .hglf/ for each
22 largefiles works by maintaining a "standin file" in .hglf/ for each
23 largefile. The standins are small (41 bytes: an SHA-1 hash plus
23 largefile. The standins are small (41 bytes: an SHA-1 hash plus
24 newline) and are tracked by Mercurial. Largefile revisions are
24 newline) and are tracked by Mercurial. Largefile revisions are
25 identified by the SHA-1 hash of their contents, which is written to
25 identified by the SHA-1 hash of their contents, which is written to
26 the standin. largefiles uses that revision ID to get/put largefile
26 the standin. largefiles uses that revision ID to get/put largefile
27 revisions from/to the central store. This saves both disk space and
27 revisions from/to the central store. This saves both disk space and
28 bandwidth, since you don't need to retrieve all historical revisions
28 bandwidth, since you don't need to retrieve all historical revisions
29 of large files when you clone or pull.
29 of large files when you clone or pull.
30
30
31 To start a new repository or add new large binary files, just add
31 To start a new repository or add new large binary files, just add
32 --large to your :hg:`add` command. For example::
32 --large to your :hg:`add` command. For example::
33
33
34 $ dd if=/dev/urandom of=randomdata count=2000
34 $ dd if=/dev/urandom of=randomdata count=2000
35 $ hg add --large randomdata
35 $ hg add --large randomdata
36 $ hg commit -m 'add randomdata as a largefile'
36 $ hg commit -m 'add randomdata as a largefile'
37
37
38 When you push a changeset that adds/modifies largefiles to a remote
38 When you push a changeset that adds/modifies largefiles to a remote
39 repository, its largefile revisions will be uploaded along with it.
39 repository, its largefile revisions will be uploaded along with it.
40 Note that the remote Mercurial must also have the largefiles extension
40 Note that the remote Mercurial must also have the largefiles extension
41 enabled for this to work.
41 enabled for this to work.
42
42
43 When you pull a changeset that affects largefiles from a remote
43 When you pull a changeset that affects largefiles from a remote
44 repository, the largefiles for the changeset will by default not be
44 repository, the largefiles for the changeset will by default not be
45 pulled down. However, when you update to such a revision, any
45 pulled down. However, when you update to such a revision, any
46 largefiles needed by that revision are downloaded and cached (if
46 largefiles needed by that revision are downloaded and cached (if
47 they have never been downloaded before). One way to pull largefiles
47 they have never been downloaded before). One way to pull largefiles
48 when pulling is thus to use --update, which will update your working
48 when pulling is thus to use --update, which will update your working
49 copy to the latest pulled revision (and thereby downloading any new
49 copy to the latest pulled revision (and thereby downloading any new
50 largefiles).
50 largefiles).
51
51
52 If you want to pull largefiles you don't need for update yet, then
52 If you want to pull largefiles you don't need for update yet, then
53 you can use pull with the `--lfrev` option or the :hg:`lfpull` command.
53 you can use pull with the `--lfrev` option or the :hg:`lfpull` command.
54
54
55 If you know you are pulling from a non-default location and want to
55 If you know you are pulling from a non-default location and want to
56 download all the largefiles that correspond to the new changesets at
56 download all the largefiles that correspond to the new changesets at
57 the same time, then you can pull with `--lfrev "pulled()"`.
57 the same time, then you can pull with `--lfrev "pulled()"`.
58
58
59 If you just want to ensure that you will have the largefiles needed to
59 If you just want to ensure that you will have the largefiles needed to
60 merge or rebase with new heads that you are pulling, then you can pull
60 merge or rebase with new heads that you are pulling, then you can pull
61 with `--lfrev "head(pulled())"` flag to pre-emptively download any largefiles
61 with `--lfrev "head(pulled())"` flag to pre-emptively download any largefiles
62 that are new in the heads you are pulling.
62 that are new in the heads you are pulling.
63
63
64 Keep in mind that network access may now be required to update to
64 Keep in mind that network access may now be required to update to
65 changesets that you have not previously updated to. The nature of the
65 changesets that you have not previously updated to. The nature of the
66 largefiles extension means that updating is no longer guaranteed to
66 largefiles extension means that updating is no longer guaranteed to
67 be a local-only operation.
67 be a local-only operation.
68
68
69 If you already have large files tracked by Mercurial without the
69 If you already have large files tracked by Mercurial without the
70 largefiles extension, you will need to convert your repository in
70 largefiles extension, you will need to convert your repository in
71 order to benefit from largefiles. This is done with the
71 order to benefit from largefiles. This is done with the
72 :hg:`lfconvert` command::
72 :hg:`lfconvert` command::
73
73
74 $ hg lfconvert --size 10 oldrepo newrepo
74 $ hg lfconvert --size 10 oldrepo newrepo
75
75
76 In repositories that already have largefiles in them, any new file
76 In repositories that already have largefiles in them, any new file
77 over 10MB will automatically be added as a largefile. To change this
77 over 10MB will automatically be added as a largefile. To change this
78 threshold, set ``largefiles.minsize`` in your Mercurial config file
78 threshold, set ``largefiles.minsize`` in your Mercurial config file
79 to the minimum size in megabytes to track as a largefile, or use the
79 to the minimum size in megabytes to track as a largefile, or use the
80 --lfsize option to the add command (also in megabytes)::
80 --lfsize option to the add command (also in megabytes)::
81
81
82 [largefiles]
82 [largefiles]
83 minsize = 2
83 minsize = 2
84
84
85 $ hg add --lfsize 2
85 $ hg add --lfsize 2
86
86
87 The ``largefiles.patterns`` config option allows you to specify a list
87 The ``largefiles.patterns`` config option allows you to specify a list
88 of filename patterns (see :hg:`help patterns`) that should always be
88 of filename patterns (see :hg:`help patterns`) that should always be
89 tracked as largefiles::
89 tracked as largefiles::
90
90
91 [largefiles]
91 [largefiles]
92 patterns =
92 patterns =
93 *.jpg
93 *.jpg
94 re:.*\.(png|bmp)$
94 re:.*\.(png|bmp)$
95 library.zip
95 library.zip
96 content/audio/*
96 content/audio/*
97
97
98 Files that match one of these patterns will be added as largefiles
98 Files that match one of these patterns will be added as largefiles
99 regardless of their size.
99 regardless of their size.
100
100
101 The ``largefiles.minsize`` and ``largefiles.patterns`` config options
101 The ``largefiles.minsize`` and ``largefiles.patterns`` config options
102 will be ignored for any repositories not already containing a
102 will be ignored for any repositories not already containing a
103 largefile. To add the first largefile to a repository, you must
103 largefile. To add the first largefile to a repository, you must
104 explicitly do so with the --large flag passed to the :hg:`add`
104 explicitly do so with the --large flag passed to the :hg:`add`
105 command.
105 command.
106 '''
106 '''
107
107
108 from mercurial import hg, localrepo
108 from mercurial import hg, localrepo
109
109
110 import lfcommands
110 import lfcommands
111 import proto
111 import proto
112 import reposetup
112 import reposetup
113 import uisetup as uisetupmod
113 import uisetup as uisetupmod
114
114
115 # Note for extension authors: ONLY specify testedwith = 'internal' for
116 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
117 # be specifying the version(s) of Mercurial they are tested with, or
118 # leave the attribute unspecified.
115 testedwith = 'internal'
119 testedwith = 'internal'
116
120
117 reposetup = reposetup.reposetup
121 reposetup = reposetup.reposetup
118
122
119 def featuresetup(ui, supported):
123 def featuresetup(ui, supported):
120 # don't die on seeing a repo with the largefiles requirement
124 # don't die on seeing a repo with the largefiles requirement
121 supported |= set(['largefiles'])
125 supported |= set(['largefiles'])
122
126
123 def uisetup(ui):
127 def uisetup(ui):
124 localrepo.localrepository.featuresetupfuncs.add(featuresetup)
128 localrepo.localrepository.featuresetupfuncs.add(featuresetup)
125 hg.wirepeersetupfuncs.append(proto.wirereposetup)
129 hg.wirepeersetupfuncs.append(proto.wirereposetup)
126 uisetupmod.uisetup(ui)
130 uisetupmod.uisetup(ui)
127
131
128 cmdtable = lfcommands.cmdtable
132 cmdtable = lfcommands.cmdtable
@@ -1,3579 +1,3583 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from mercurial.i18n import _
65 from mercurial.i18n import _
66 from mercurial.node import bin, hex, short, nullid, nullrev
66 from mercurial.node import bin, hex, short, nullid, nullrev
67 from mercurial.lock import release
67 from mercurial.lock import release
68 from mercurial import commands, cmdutil, hg, scmutil, util, revset
68 from mercurial import commands, cmdutil, hg, scmutil, util, revset
69 from mercurial import extensions, error, phases
69 from mercurial import extensions, error, phases
70 from mercurial import patch as patchmod
70 from mercurial import patch as patchmod
71 from mercurial import localrepo
71 from mercurial import localrepo
72 from mercurial import subrepo
72 from mercurial import subrepo
73 import os, re, errno, shutil
73 import os, re, errno, shutil
74
74
75 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
75 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
76
76
77 cmdtable = {}
77 cmdtable = {}
78 command = cmdutil.command(cmdtable)
78 command = cmdutil.command(cmdtable)
79 # Note for extension authors: ONLY specify testedwith = 'internal' for
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # be specifying the version(s) of Mercurial they are tested with, or
82 # leave the attribute unspecified.
79 testedwith = 'internal'
83 testedwith = 'internal'
80
84
81 # force load strip extension formerly included in mq and import some utility
85 # force load strip extension formerly included in mq and import some utility
82 try:
86 try:
83 stripext = extensions.find('strip')
87 stripext = extensions.find('strip')
84 except KeyError:
88 except KeyError:
85 # note: load is lazy so we could avoid the try-except,
89 # note: load is lazy so we could avoid the try-except,
86 # but I (marmoute) prefer this explicit code.
90 # but I (marmoute) prefer this explicit code.
87 class dummyui(object):
91 class dummyui(object):
88 def debug(self, msg):
92 def debug(self, msg):
89 pass
93 pass
90 stripext = extensions.load(dummyui(), 'strip', '')
94 stripext = extensions.load(dummyui(), 'strip', '')
91
95
92 strip = stripext.strip
96 strip = stripext.strip
93 checksubstate = stripext.checksubstate
97 checksubstate = stripext.checksubstate
94 checklocalchanges = stripext.checklocalchanges
98 checklocalchanges = stripext.checklocalchanges
95
99
96
100
97 # Patch names looks like unix-file names.
101 # Patch names looks like unix-file names.
98 # They must be joinable with queue directory and result in the patch path.
102 # They must be joinable with queue directory and result in the patch path.
99 normname = util.normpath
103 normname = util.normpath
100
104
101 class statusentry(object):
105 class statusentry(object):
102 def __init__(self, node, name):
106 def __init__(self, node, name):
103 self.node, self.name = node, name
107 self.node, self.name = node, name
104 def __repr__(self):
108 def __repr__(self):
105 return hex(self.node) + ':' + self.name
109 return hex(self.node) + ':' + self.name
106
110
107 # The order of the headers in 'hg export' HG patches:
111 # The order of the headers in 'hg export' HG patches:
108 HGHEADERS = [
112 HGHEADERS = [
109 # '# HG changeset patch',
113 # '# HG changeset patch',
110 '# User ',
114 '# User ',
111 '# Date ',
115 '# Date ',
112 '# ',
116 '# ',
113 '# Branch ',
117 '# Branch ',
114 '# Node ID ',
118 '# Node ID ',
115 '# Parent ', # can occur twice for merges - but that is not relevant for mq
119 '# Parent ', # can occur twice for merges - but that is not relevant for mq
116 ]
120 ]
117 # The order of headers in plain 'mail style' patches:
121 # The order of headers in plain 'mail style' patches:
118 PLAINHEADERS = {
122 PLAINHEADERS = {
119 'from': 0,
123 'from': 0,
120 'date': 1,
124 'date': 1,
121 'subject': 2,
125 'subject': 2,
122 }
126 }
123
127
124 def inserthgheader(lines, header, value):
128 def inserthgheader(lines, header, value):
125 """Assuming lines contains a HG patch header, add a header line with value.
129 """Assuming lines contains a HG patch header, add a header line with value.
126 >>> try: inserthgheader([], '# Date ', 'z')
130 >>> try: inserthgheader([], '# Date ', 'z')
127 ... except ValueError, inst: print "oops"
131 ... except ValueError, inst: print "oops"
128 oops
132 oops
129 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
133 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
130 ['# HG changeset patch', '# Date z']
134 ['# HG changeset patch', '# Date z']
131 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
135 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
132 ['# HG changeset patch', '# Date z', '']
136 ['# HG changeset patch', '# Date z', '']
133 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
137 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
134 ['# HG changeset patch', '# User y', '# Date z']
138 ['# HG changeset patch', '# User y', '# Date z']
135 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
139 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
136 ... '# User ', 'z')
140 ... '# User ', 'z')
137 ['# HG changeset patch', '# Date x', '# User z']
141 ['# HG changeset patch', '# Date x', '# User z']
138 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
142 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
139 ['# HG changeset patch', '# Date z']
143 ['# HG changeset patch', '# Date z']
140 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
144 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
141 ['# HG changeset patch', '# Date z', '', '# Date y']
145 ['# HG changeset patch', '# Date z', '', '# Date y']
142 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
146 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
143 ['# HG changeset patch', '# Date z', '# Parent y']
147 ['# HG changeset patch', '# Date z', '# Parent y']
144 """
148 """
145 start = lines.index('# HG changeset patch') + 1
149 start = lines.index('# HG changeset patch') + 1
146 newindex = HGHEADERS.index(header)
150 newindex = HGHEADERS.index(header)
147 bestpos = len(lines)
151 bestpos = len(lines)
148 for i in range(start, len(lines)):
152 for i in range(start, len(lines)):
149 line = lines[i]
153 line = lines[i]
150 if not line.startswith('# '):
154 if not line.startswith('# '):
151 bestpos = min(bestpos, i)
155 bestpos = min(bestpos, i)
152 break
156 break
153 for lineindex, h in enumerate(HGHEADERS):
157 for lineindex, h in enumerate(HGHEADERS):
154 if line.startswith(h):
158 if line.startswith(h):
155 if lineindex == newindex:
159 if lineindex == newindex:
156 lines[i] = header + value
160 lines[i] = header + value
157 return lines
161 return lines
158 if lineindex > newindex:
162 if lineindex > newindex:
159 bestpos = min(bestpos, i)
163 bestpos = min(bestpos, i)
160 break # next line
164 break # next line
161 lines.insert(bestpos, header + value)
165 lines.insert(bestpos, header + value)
162 return lines
166 return lines
163
167
164 def insertplainheader(lines, header, value):
168 def insertplainheader(lines, header, value):
165 """For lines containing a plain patch header, add a header line with value.
169 """For lines containing a plain patch header, add a header line with value.
166 >>> insertplainheader([], 'Date', 'z')
170 >>> insertplainheader([], 'Date', 'z')
167 ['Date: z']
171 ['Date: z']
168 >>> insertplainheader([''], 'Date', 'z')
172 >>> insertplainheader([''], 'Date', 'z')
169 ['Date: z', '']
173 ['Date: z', '']
170 >>> insertplainheader(['x'], 'Date', 'z')
174 >>> insertplainheader(['x'], 'Date', 'z')
171 ['Date: z', '', 'x']
175 ['Date: z', '', 'x']
172 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
176 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
173 ['From: y', 'Date: z', '', 'x']
177 ['From: y', 'Date: z', '', 'x']
174 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
178 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
175 [' date : x', 'From: z', '']
179 [' date : x', 'From: z', '']
176 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
180 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
177 ['Date: z', '', 'Date: y']
181 ['Date: z', '', 'Date: y']
178 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
182 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
179 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
183 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
180 """
184 """
181 newprio = PLAINHEADERS[header.lower()]
185 newprio = PLAINHEADERS[header.lower()]
182 bestpos = len(lines)
186 bestpos = len(lines)
183 for i, line in enumerate(lines):
187 for i, line in enumerate(lines):
184 if ':' in line:
188 if ':' in line:
185 lheader = line.split(':', 1)[0].strip().lower()
189 lheader = line.split(':', 1)[0].strip().lower()
186 lprio = PLAINHEADERS.get(lheader, newprio + 1)
190 lprio = PLAINHEADERS.get(lheader, newprio + 1)
187 if lprio == newprio:
191 if lprio == newprio:
188 lines[i] = '%s: %s' % (header, value)
192 lines[i] = '%s: %s' % (header, value)
189 return lines
193 return lines
190 if lprio > newprio and i < bestpos:
194 if lprio > newprio and i < bestpos:
191 bestpos = i
195 bestpos = i
192 else:
196 else:
193 if line:
197 if line:
194 lines.insert(i, '')
198 lines.insert(i, '')
195 if i < bestpos:
199 if i < bestpos:
196 bestpos = i
200 bestpos = i
197 break
201 break
198 lines.insert(bestpos, '%s: %s' % (header, value))
202 lines.insert(bestpos, '%s: %s' % (header, value))
199 return lines
203 return lines
200
204
201 class patchheader(object):
205 class patchheader(object):
202 def __init__(self, pf, plainmode=False):
206 def __init__(self, pf, plainmode=False):
203 def eatdiff(lines):
207 def eatdiff(lines):
204 while lines:
208 while lines:
205 l = lines[-1]
209 l = lines[-1]
206 if (l.startswith("diff -") or
210 if (l.startswith("diff -") or
207 l.startswith("Index:") or
211 l.startswith("Index:") or
208 l.startswith("===========")):
212 l.startswith("===========")):
209 del lines[-1]
213 del lines[-1]
210 else:
214 else:
211 break
215 break
212 def eatempty(lines):
216 def eatempty(lines):
213 while lines:
217 while lines:
214 if not lines[-1].strip():
218 if not lines[-1].strip():
215 del lines[-1]
219 del lines[-1]
216 else:
220 else:
217 break
221 break
218
222
219 message = []
223 message = []
220 comments = []
224 comments = []
221 user = None
225 user = None
222 date = None
226 date = None
223 parent = None
227 parent = None
224 format = None
228 format = None
225 subject = None
229 subject = None
226 branch = None
230 branch = None
227 nodeid = None
231 nodeid = None
228 diffstart = 0
232 diffstart = 0
229
233
230 for line in file(pf):
234 for line in file(pf):
231 line = line.rstrip()
235 line = line.rstrip()
232 if (line.startswith('diff --git')
236 if (line.startswith('diff --git')
233 or (diffstart and line.startswith('+++ '))):
237 or (diffstart and line.startswith('+++ '))):
234 diffstart = 2
238 diffstart = 2
235 break
239 break
236 diffstart = 0 # reset
240 diffstart = 0 # reset
237 if line.startswith("--- "):
241 if line.startswith("--- "):
238 diffstart = 1
242 diffstart = 1
239 continue
243 continue
240 elif format == "hgpatch":
244 elif format == "hgpatch":
241 # parse values when importing the result of an hg export
245 # parse values when importing the result of an hg export
242 if line.startswith("# User "):
246 if line.startswith("# User "):
243 user = line[7:]
247 user = line[7:]
244 elif line.startswith("# Date "):
248 elif line.startswith("# Date "):
245 date = line[7:]
249 date = line[7:]
246 elif line.startswith("# Parent "):
250 elif line.startswith("# Parent "):
247 parent = line[9:].lstrip() # handle double trailing space
251 parent = line[9:].lstrip() # handle double trailing space
248 elif line.startswith("# Branch "):
252 elif line.startswith("# Branch "):
249 branch = line[9:]
253 branch = line[9:]
250 elif line.startswith("# Node ID "):
254 elif line.startswith("# Node ID "):
251 nodeid = line[10:]
255 nodeid = line[10:]
252 elif not line.startswith("# ") and line:
256 elif not line.startswith("# ") and line:
253 message.append(line)
257 message.append(line)
254 format = None
258 format = None
255 elif line == '# HG changeset patch':
259 elif line == '# HG changeset patch':
256 message = []
260 message = []
257 format = "hgpatch"
261 format = "hgpatch"
258 elif (format != "tagdone" and (line.startswith("Subject: ") or
262 elif (format != "tagdone" and (line.startswith("Subject: ") or
259 line.startswith("subject: "))):
263 line.startswith("subject: "))):
260 subject = line[9:]
264 subject = line[9:]
261 format = "tag"
265 format = "tag"
262 elif (format != "tagdone" and (line.startswith("From: ") or
266 elif (format != "tagdone" and (line.startswith("From: ") or
263 line.startswith("from: "))):
267 line.startswith("from: "))):
264 user = line[6:]
268 user = line[6:]
265 format = "tag"
269 format = "tag"
266 elif (format != "tagdone" and (line.startswith("Date: ") or
270 elif (format != "tagdone" and (line.startswith("Date: ") or
267 line.startswith("date: "))):
271 line.startswith("date: "))):
268 date = line[6:]
272 date = line[6:]
269 format = "tag"
273 format = "tag"
270 elif format == "tag" and line == "":
274 elif format == "tag" and line == "":
271 # when looking for tags (subject: from: etc) they
275 # when looking for tags (subject: from: etc) they
272 # end once you find a blank line in the source
276 # end once you find a blank line in the source
273 format = "tagdone"
277 format = "tagdone"
274 elif message or line:
278 elif message or line:
275 message.append(line)
279 message.append(line)
276 comments.append(line)
280 comments.append(line)
277
281
278 eatdiff(message)
282 eatdiff(message)
279 eatdiff(comments)
283 eatdiff(comments)
280 # Remember the exact starting line of the patch diffs before consuming
284 # Remember the exact starting line of the patch diffs before consuming
281 # empty lines, for external use by TortoiseHg and others
285 # empty lines, for external use by TortoiseHg and others
282 self.diffstartline = len(comments)
286 self.diffstartline = len(comments)
283 eatempty(message)
287 eatempty(message)
284 eatempty(comments)
288 eatempty(comments)
285
289
286 # make sure message isn't empty
290 # make sure message isn't empty
287 if format and format.startswith("tag") and subject:
291 if format and format.startswith("tag") and subject:
288 message.insert(0, subject)
292 message.insert(0, subject)
289
293
290 self.message = message
294 self.message = message
291 self.comments = comments
295 self.comments = comments
292 self.user = user
296 self.user = user
293 self.date = date
297 self.date = date
294 self.parent = parent
298 self.parent = parent
295 # nodeid and branch are for external use by TortoiseHg and others
299 # nodeid and branch are for external use by TortoiseHg and others
296 self.nodeid = nodeid
300 self.nodeid = nodeid
297 self.branch = branch
301 self.branch = branch
298 self.haspatch = diffstart > 1
302 self.haspatch = diffstart > 1
299 self.plainmode = (plainmode or
303 self.plainmode = (plainmode or
300 '# HG changeset patch' not in self.comments and
304 '# HG changeset patch' not in self.comments and
301 any(c.startswith('Date: ') or
305 any(c.startswith('Date: ') or
302 c.startswith('From: ')
306 c.startswith('From: ')
303 for c in self.comments))
307 for c in self.comments))
304
308
305 def setuser(self, user):
309 def setuser(self, user):
306 try:
310 try:
307 inserthgheader(self.comments, '# User ', user)
311 inserthgheader(self.comments, '# User ', user)
308 except ValueError:
312 except ValueError:
309 if self.plainmode:
313 if self.plainmode:
310 insertplainheader(self.comments, 'From', user)
314 insertplainheader(self.comments, 'From', user)
311 else:
315 else:
312 tmp = ['# HG changeset patch', '# User ' + user]
316 tmp = ['# HG changeset patch', '# User ' + user]
313 self.comments = tmp + self.comments
317 self.comments = tmp + self.comments
314 self.user = user
318 self.user = user
315
319
316 def setdate(self, date):
320 def setdate(self, date):
317 try:
321 try:
318 inserthgheader(self.comments, '# Date ', date)
322 inserthgheader(self.comments, '# Date ', date)
319 except ValueError:
323 except ValueError:
320 if self.plainmode:
324 if self.plainmode:
321 insertplainheader(self.comments, 'Date', date)
325 insertplainheader(self.comments, 'Date', date)
322 else:
326 else:
323 tmp = ['# HG changeset patch', '# Date ' + date]
327 tmp = ['# HG changeset patch', '# Date ' + date]
324 self.comments = tmp + self.comments
328 self.comments = tmp + self.comments
325 self.date = date
329 self.date = date
326
330
327 def setparent(self, parent):
331 def setparent(self, parent):
328 try:
332 try:
329 inserthgheader(self.comments, '# Parent ', parent)
333 inserthgheader(self.comments, '# Parent ', parent)
330 except ValueError:
334 except ValueError:
331 if not self.plainmode:
335 if not self.plainmode:
332 tmp = ['# HG changeset patch', '# Parent ' + parent]
336 tmp = ['# HG changeset patch', '# Parent ' + parent]
333 self.comments = tmp + self.comments
337 self.comments = tmp + self.comments
334 self.parent = parent
338 self.parent = parent
335
339
336 def setmessage(self, message):
340 def setmessage(self, message):
337 if self.comments:
341 if self.comments:
338 self._delmsg()
342 self._delmsg()
339 self.message = [message]
343 self.message = [message]
340 if message:
344 if message:
341 if self.plainmode and self.comments and self.comments[-1]:
345 if self.plainmode and self.comments and self.comments[-1]:
342 self.comments.append('')
346 self.comments.append('')
343 self.comments.append(message)
347 self.comments.append(message)
344
348
345 def __str__(self):
349 def __str__(self):
346 s = '\n'.join(self.comments).rstrip()
350 s = '\n'.join(self.comments).rstrip()
347 if not s:
351 if not s:
348 return ''
352 return ''
349 return s + '\n\n'
353 return s + '\n\n'
350
354
351 def _delmsg(self):
355 def _delmsg(self):
352 '''Remove existing message, keeping the rest of the comments fields.
356 '''Remove existing message, keeping the rest of the comments fields.
353 If comments contains 'subject: ', message will prepend
357 If comments contains 'subject: ', message will prepend
354 the field and a blank line.'''
358 the field and a blank line.'''
355 if self.message:
359 if self.message:
356 subj = 'subject: ' + self.message[0].lower()
360 subj = 'subject: ' + self.message[0].lower()
357 for i in xrange(len(self.comments)):
361 for i in xrange(len(self.comments)):
358 if subj == self.comments[i].lower():
362 if subj == self.comments[i].lower():
359 del self.comments[i]
363 del self.comments[i]
360 self.message = self.message[2:]
364 self.message = self.message[2:]
361 break
365 break
362 ci = 0
366 ci = 0
363 for mi in self.message:
367 for mi in self.message:
364 while mi != self.comments[ci]:
368 while mi != self.comments[ci]:
365 ci += 1
369 ci += 1
366 del self.comments[ci]
370 del self.comments[ci]
367
371
368 def newcommit(repo, phase, *args, **kwargs):
372 def newcommit(repo, phase, *args, **kwargs):
369 """helper dedicated to ensure a commit respect mq.secret setting
373 """helper dedicated to ensure a commit respect mq.secret setting
370
374
371 It should be used instead of repo.commit inside the mq source for operation
375 It should be used instead of repo.commit inside the mq source for operation
372 creating new changeset.
376 creating new changeset.
373 """
377 """
374 repo = repo.unfiltered()
378 repo = repo.unfiltered()
375 if phase is None:
379 if phase is None:
376 if repo.ui.configbool('mq', 'secret', False):
380 if repo.ui.configbool('mq', 'secret', False):
377 phase = phases.secret
381 phase = phases.secret
378 if phase is not None:
382 if phase is not None:
379 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
383 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
380 allowemptybackup = repo.ui.backupconfig('ui', 'allowemptycommit')
384 allowemptybackup = repo.ui.backupconfig('ui', 'allowemptycommit')
381 try:
385 try:
382 if phase is not None:
386 if phase is not None:
383 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
387 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
384 repo.ui.setconfig('ui', 'allowemptycommit', True)
388 repo.ui.setconfig('ui', 'allowemptycommit', True)
385 return repo.commit(*args, **kwargs)
389 return repo.commit(*args, **kwargs)
386 finally:
390 finally:
387 repo.ui.restoreconfig(allowemptybackup)
391 repo.ui.restoreconfig(allowemptybackup)
388 if phase is not None:
392 if phase is not None:
389 repo.ui.restoreconfig(phasebackup)
393 repo.ui.restoreconfig(phasebackup)
390
394
391 class AbortNoCleanup(error.Abort):
395 class AbortNoCleanup(error.Abort):
392 pass
396 pass
393
397
394 class queue(object):
398 class queue(object):
395 def __init__(self, ui, baseui, path, patchdir=None):
399 def __init__(self, ui, baseui, path, patchdir=None):
396 self.basepath = path
400 self.basepath = path
397 try:
401 try:
398 fh = open(os.path.join(path, 'patches.queue'))
402 fh = open(os.path.join(path, 'patches.queue'))
399 cur = fh.read().rstrip()
403 cur = fh.read().rstrip()
400 fh.close()
404 fh.close()
401 if not cur:
405 if not cur:
402 curpath = os.path.join(path, 'patches')
406 curpath = os.path.join(path, 'patches')
403 else:
407 else:
404 curpath = os.path.join(path, 'patches-' + cur)
408 curpath = os.path.join(path, 'patches-' + cur)
405 except IOError:
409 except IOError:
406 curpath = os.path.join(path, 'patches')
410 curpath = os.path.join(path, 'patches')
407 self.path = patchdir or curpath
411 self.path = patchdir or curpath
408 self.opener = scmutil.opener(self.path)
412 self.opener = scmutil.opener(self.path)
409 self.ui = ui
413 self.ui = ui
410 self.baseui = baseui
414 self.baseui = baseui
411 self.applieddirty = False
415 self.applieddirty = False
412 self.seriesdirty = False
416 self.seriesdirty = False
413 self.added = []
417 self.added = []
414 self.seriespath = "series"
418 self.seriespath = "series"
415 self.statuspath = "status"
419 self.statuspath = "status"
416 self.guardspath = "guards"
420 self.guardspath = "guards"
417 self.activeguards = None
421 self.activeguards = None
418 self.guardsdirty = False
422 self.guardsdirty = False
419 # Handle mq.git as a bool with extended values
423 # Handle mq.git as a bool with extended values
420 try:
424 try:
421 gitmode = ui.configbool('mq', 'git', None)
425 gitmode = ui.configbool('mq', 'git', None)
422 if gitmode is None:
426 if gitmode is None:
423 raise error.ConfigError
427 raise error.ConfigError
424 if gitmode:
428 if gitmode:
425 self.gitmode = 'yes'
429 self.gitmode = 'yes'
426 else:
430 else:
427 self.gitmode = 'no'
431 self.gitmode = 'no'
428 except error.ConfigError:
432 except error.ConfigError:
429 self.gitmode = ui.config('mq', 'git', 'auto').lower()
433 self.gitmode = ui.config('mq', 'git', 'auto').lower()
430 self.plainmode = ui.configbool('mq', 'plain', False)
434 self.plainmode = ui.configbool('mq', 'plain', False)
431 self.checkapplied = True
435 self.checkapplied = True
432
436
433 @util.propertycache
437 @util.propertycache
434 def applied(self):
438 def applied(self):
435 def parselines(lines):
439 def parselines(lines):
436 for l in lines:
440 for l in lines:
437 entry = l.split(':', 1)
441 entry = l.split(':', 1)
438 if len(entry) > 1:
442 if len(entry) > 1:
439 n, name = entry
443 n, name = entry
440 yield statusentry(bin(n), name)
444 yield statusentry(bin(n), name)
441 elif l.strip():
445 elif l.strip():
442 self.ui.warn(_('malformated mq status line: %s\n') % entry)
446 self.ui.warn(_('malformated mq status line: %s\n') % entry)
443 # else we ignore empty lines
447 # else we ignore empty lines
444 try:
448 try:
445 lines = self.opener.read(self.statuspath).splitlines()
449 lines = self.opener.read(self.statuspath).splitlines()
446 return list(parselines(lines))
450 return list(parselines(lines))
447 except IOError, e:
451 except IOError, e:
448 if e.errno == errno.ENOENT:
452 if e.errno == errno.ENOENT:
449 return []
453 return []
450 raise
454 raise
451
455
452 @util.propertycache
456 @util.propertycache
453 def fullseries(self):
457 def fullseries(self):
454 try:
458 try:
455 return self.opener.read(self.seriespath).splitlines()
459 return self.opener.read(self.seriespath).splitlines()
456 except IOError, e:
460 except IOError, e:
457 if e.errno == errno.ENOENT:
461 if e.errno == errno.ENOENT:
458 return []
462 return []
459 raise
463 raise
460
464
461 @util.propertycache
465 @util.propertycache
462 def series(self):
466 def series(self):
463 self.parseseries()
467 self.parseseries()
464 return self.series
468 return self.series
465
469
466 @util.propertycache
470 @util.propertycache
467 def seriesguards(self):
471 def seriesguards(self):
468 self.parseseries()
472 self.parseseries()
469 return self.seriesguards
473 return self.seriesguards
470
474
471 def invalidate(self):
475 def invalidate(self):
472 for a in 'applied fullseries series seriesguards'.split():
476 for a in 'applied fullseries series seriesguards'.split():
473 if a in self.__dict__:
477 if a in self.__dict__:
474 delattr(self, a)
478 delattr(self, a)
475 self.applieddirty = False
479 self.applieddirty = False
476 self.seriesdirty = False
480 self.seriesdirty = False
477 self.guardsdirty = False
481 self.guardsdirty = False
478 self.activeguards = None
482 self.activeguards = None
479
483
480 def diffopts(self, opts={}, patchfn=None):
484 def diffopts(self, opts={}, patchfn=None):
481 diffopts = patchmod.diffopts(self.ui, opts)
485 diffopts = patchmod.diffopts(self.ui, opts)
482 if self.gitmode == 'auto':
486 if self.gitmode == 'auto':
483 diffopts.upgrade = True
487 diffopts.upgrade = True
484 elif self.gitmode == 'keep':
488 elif self.gitmode == 'keep':
485 pass
489 pass
486 elif self.gitmode in ('yes', 'no'):
490 elif self.gitmode in ('yes', 'no'):
487 diffopts.git = self.gitmode == 'yes'
491 diffopts.git = self.gitmode == 'yes'
488 else:
492 else:
489 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
493 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
490 ' got %s') % self.gitmode)
494 ' got %s') % self.gitmode)
491 if patchfn:
495 if patchfn:
492 diffopts = self.patchopts(diffopts, patchfn)
496 diffopts = self.patchopts(diffopts, patchfn)
493 return diffopts
497 return diffopts
494
498
495 def patchopts(self, diffopts, *patches):
499 def patchopts(self, diffopts, *patches):
496 """Return a copy of input diff options with git set to true if
500 """Return a copy of input diff options with git set to true if
497 referenced patch is a git patch and should be preserved as such.
501 referenced patch is a git patch and should be preserved as such.
498 """
502 """
499 diffopts = diffopts.copy()
503 diffopts = diffopts.copy()
500 if not diffopts.git and self.gitmode == 'keep':
504 if not diffopts.git and self.gitmode == 'keep':
501 for patchfn in patches:
505 for patchfn in patches:
502 patchf = self.opener(patchfn, 'r')
506 patchf = self.opener(patchfn, 'r')
503 # if the patch was a git patch, refresh it as a git patch
507 # if the patch was a git patch, refresh it as a git patch
504 for line in patchf:
508 for line in patchf:
505 if line.startswith('diff --git'):
509 if line.startswith('diff --git'):
506 diffopts.git = True
510 diffopts.git = True
507 break
511 break
508 patchf.close()
512 patchf.close()
509 return diffopts
513 return diffopts
510
514
511 def join(self, *p):
515 def join(self, *p):
512 return os.path.join(self.path, *p)
516 return os.path.join(self.path, *p)
513
517
514 def findseries(self, patch):
518 def findseries(self, patch):
515 def matchpatch(l):
519 def matchpatch(l):
516 l = l.split('#', 1)[0]
520 l = l.split('#', 1)[0]
517 return l.strip() == patch
521 return l.strip() == patch
518 for index, l in enumerate(self.fullseries):
522 for index, l in enumerate(self.fullseries):
519 if matchpatch(l):
523 if matchpatch(l):
520 return index
524 return index
521 return None
525 return None
522
526
523 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
527 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
524
528
525 def parseseries(self):
529 def parseseries(self):
526 self.series = []
530 self.series = []
527 self.seriesguards = []
531 self.seriesguards = []
528 for l in self.fullseries:
532 for l in self.fullseries:
529 h = l.find('#')
533 h = l.find('#')
530 if h == -1:
534 if h == -1:
531 patch = l
535 patch = l
532 comment = ''
536 comment = ''
533 elif h == 0:
537 elif h == 0:
534 continue
538 continue
535 else:
539 else:
536 patch = l[:h]
540 patch = l[:h]
537 comment = l[h:]
541 comment = l[h:]
538 patch = patch.strip()
542 patch = patch.strip()
539 if patch:
543 if patch:
540 if patch in self.series:
544 if patch in self.series:
541 raise util.Abort(_('%s appears more than once in %s') %
545 raise util.Abort(_('%s appears more than once in %s') %
542 (patch, self.join(self.seriespath)))
546 (patch, self.join(self.seriespath)))
543 self.series.append(patch)
547 self.series.append(patch)
544 self.seriesguards.append(self.guard_re.findall(comment))
548 self.seriesguards.append(self.guard_re.findall(comment))
545
549
546 def checkguard(self, guard):
550 def checkguard(self, guard):
547 if not guard:
551 if not guard:
548 return _('guard cannot be an empty string')
552 return _('guard cannot be an empty string')
549 bad_chars = '# \t\r\n\f'
553 bad_chars = '# \t\r\n\f'
550 first = guard[0]
554 first = guard[0]
551 if first in '-+':
555 if first in '-+':
552 return (_('guard %r starts with invalid character: %r') %
556 return (_('guard %r starts with invalid character: %r') %
553 (guard, first))
557 (guard, first))
554 for c in bad_chars:
558 for c in bad_chars:
555 if c in guard:
559 if c in guard:
556 return _('invalid character in guard %r: %r') % (guard, c)
560 return _('invalid character in guard %r: %r') % (guard, c)
557
561
558 def setactive(self, guards):
562 def setactive(self, guards):
559 for guard in guards:
563 for guard in guards:
560 bad = self.checkguard(guard)
564 bad = self.checkguard(guard)
561 if bad:
565 if bad:
562 raise util.Abort(bad)
566 raise util.Abort(bad)
563 guards = sorted(set(guards))
567 guards = sorted(set(guards))
564 self.ui.debug('active guards: %s\n' % ' '.join(guards))
568 self.ui.debug('active guards: %s\n' % ' '.join(guards))
565 self.activeguards = guards
569 self.activeguards = guards
566 self.guardsdirty = True
570 self.guardsdirty = True
567
571
568 def active(self):
572 def active(self):
569 if self.activeguards is None:
573 if self.activeguards is None:
570 self.activeguards = []
574 self.activeguards = []
571 try:
575 try:
572 guards = self.opener.read(self.guardspath).split()
576 guards = self.opener.read(self.guardspath).split()
573 except IOError, err:
577 except IOError, err:
574 if err.errno != errno.ENOENT:
578 if err.errno != errno.ENOENT:
575 raise
579 raise
576 guards = []
580 guards = []
577 for i, guard in enumerate(guards):
581 for i, guard in enumerate(guards):
578 bad = self.checkguard(guard)
582 bad = self.checkguard(guard)
579 if bad:
583 if bad:
580 self.ui.warn('%s:%d: %s\n' %
584 self.ui.warn('%s:%d: %s\n' %
581 (self.join(self.guardspath), i + 1, bad))
585 (self.join(self.guardspath), i + 1, bad))
582 else:
586 else:
583 self.activeguards.append(guard)
587 self.activeguards.append(guard)
584 return self.activeguards
588 return self.activeguards
585
589
586 def setguards(self, idx, guards):
590 def setguards(self, idx, guards):
587 for g in guards:
591 for g in guards:
588 if len(g) < 2:
592 if len(g) < 2:
589 raise util.Abort(_('guard %r too short') % g)
593 raise util.Abort(_('guard %r too short') % g)
590 if g[0] not in '-+':
594 if g[0] not in '-+':
591 raise util.Abort(_('guard %r starts with invalid char') % g)
595 raise util.Abort(_('guard %r starts with invalid char') % g)
592 bad = self.checkguard(g[1:])
596 bad = self.checkguard(g[1:])
593 if bad:
597 if bad:
594 raise util.Abort(bad)
598 raise util.Abort(bad)
595 drop = self.guard_re.sub('', self.fullseries[idx])
599 drop = self.guard_re.sub('', self.fullseries[idx])
596 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
600 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
597 self.parseseries()
601 self.parseseries()
598 self.seriesdirty = True
602 self.seriesdirty = True
599
603
600 def pushable(self, idx):
604 def pushable(self, idx):
601 if isinstance(idx, str):
605 if isinstance(idx, str):
602 idx = self.series.index(idx)
606 idx = self.series.index(idx)
603 patchguards = self.seriesguards[idx]
607 patchguards = self.seriesguards[idx]
604 if not patchguards:
608 if not patchguards:
605 return True, None
609 return True, None
606 guards = self.active()
610 guards = self.active()
607 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
611 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
608 if exactneg:
612 if exactneg:
609 return False, repr(exactneg[0])
613 return False, repr(exactneg[0])
610 pos = [g for g in patchguards if g[0] == '+']
614 pos = [g for g in patchguards if g[0] == '+']
611 exactpos = [g for g in pos if g[1:] in guards]
615 exactpos = [g for g in pos if g[1:] in guards]
612 if pos:
616 if pos:
613 if exactpos:
617 if exactpos:
614 return True, repr(exactpos[0])
618 return True, repr(exactpos[0])
615 return False, ' '.join(map(repr, pos))
619 return False, ' '.join(map(repr, pos))
616 return True, ''
620 return True, ''
617
621
618 def explainpushable(self, idx, all_patches=False):
622 def explainpushable(self, idx, all_patches=False):
619 if all_patches:
623 if all_patches:
620 write = self.ui.write
624 write = self.ui.write
621 else:
625 else:
622 write = self.ui.warn
626 write = self.ui.warn
623
627
624 if all_patches or self.ui.verbose:
628 if all_patches or self.ui.verbose:
625 if isinstance(idx, str):
629 if isinstance(idx, str):
626 idx = self.series.index(idx)
630 idx = self.series.index(idx)
627 pushable, why = self.pushable(idx)
631 pushable, why = self.pushable(idx)
628 if all_patches and pushable:
632 if all_patches and pushable:
629 if why is None:
633 if why is None:
630 write(_('allowing %s - no guards in effect\n') %
634 write(_('allowing %s - no guards in effect\n') %
631 self.series[idx])
635 self.series[idx])
632 else:
636 else:
633 if not why:
637 if not why:
634 write(_('allowing %s - no matching negative guards\n') %
638 write(_('allowing %s - no matching negative guards\n') %
635 self.series[idx])
639 self.series[idx])
636 else:
640 else:
637 write(_('allowing %s - guarded by %s\n') %
641 write(_('allowing %s - guarded by %s\n') %
638 (self.series[idx], why))
642 (self.series[idx], why))
639 if not pushable:
643 if not pushable:
640 if why:
644 if why:
641 write(_('skipping %s - guarded by %s\n') %
645 write(_('skipping %s - guarded by %s\n') %
642 (self.series[idx], why))
646 (self.series[idx], why))
643 else:
647 else:
644 write(_('skipping %s - no matching guards\n') %
648 write(_('skipping %s - no matching guards\n') %
645 self.series[idx])
649 self.series[idx])
646
650
647 def savedirty(self):
651 def savedirty(self):
648 def writelist(items, path):
652 def writelist(items, path):
649 fp = self.opener(path, 'w')
653 fp = self.opener(path, 'w')
650 for i in items:
654 for i in items:
651 fp.write("%s\n" % i)
655 fp.write("%s\n" % i)
652 fp.close()
656 fp.close()
653 if self.applieddirty:
657 if self.applieddirty:
654 writelist(map(str, self.applied), self.statuspath)
658 writelist(map(str, self.applied), self.statuspath)
655 self.applieddirty = False
659 self.applieddirty = False
656 if self.seriesdirty:
660 if self.seriesdirty:
657 writelist(self.fullseries, self.seriespath)
661 writelist(self.fullseries, self.seriespath)
658 self.seriesdirty = False
662 self.seriesdirty = False
659 if self.guardsdirty:
663 if self.guardsdirty:
660 writelist(self.activeguards, self.guardspath)
664 writelist(self.activeguards, self.guardspath)
661 self.guardsdirty = False
665 self.guardsdirty = False
662 if self.added:
666 if self.added:
663 qrepo = self.qrepo()
667 qrepo = self.qrepo()
664 if qrepo:
668 if qrepo:
665 qrepo[None].add(f for f in self.added if f not in qrepo[None])
669 qrepo[None].add(f for f in self.added if f not in qrepo[None])
666 self.added = []
670 self.added = []
667
671
668 def removeundo(self, repo):
672 def removeundo(self, repo):
669 undo = repo.sjoin('undo')
673 undo = repo.sjoin('undo')
670 if not os.path.exists(undo):
674 if not os.path.exists(undo):
671 return
675 return
672 try:
676 try:
673 os.unlink(undo)
677 os.unlink(undo)
674 except OSError, inst:
678 except OSError, inst:
675 self.ui.warn(_('error removing undo: %s\n') % str(inst))
679 self.ui.warn(_('error removing undo: %s\n') % str(inst))
676
680
677 def backup(self, repo, files, copy=False):
681 def backup(self, repo, files, copy=False):
678 # backup local changes in --force case
682 # backup local changes in --force case
679 for f in sorted(files):
683 for f in sorted(files):
680 absf = repo.wjoin(f)
684 absf = repo.wjoin(f)
681 if os.path.lexists(absf):
685 if os.path.lexists(absf):
682 self.ui.note(_('saving current version of %s as %s\n') %
686 self.ui.note(_('saving current version of %s as %s\n') %
683 (f, f + '.orig'))
687 (f, f + '.orig'))
684 if copy:
688 if copy:
685 util.copyfile(absf, absf + '.orig')
689 util.copyfile(absf, absf + '.orig')
686 else:
690 else:
687 util.rename(absf, absf + '.orig')
691 util.rename(absf, absf + '.orig')
688
692
689 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
693 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
690 fp=None, changes=None, opts={}):
694 fp=None, changes=None, opts={}):
691 stat = opts.get('stat')
695 stat = opts.get('stat')
692 m = scmutil.match(repo[node1], files, opts)
696 m = scmutil.match(repo[node1], files, opts)
693 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
697 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
694 changes, stat, fp)
698 changes, stat, fp)
695
699
696 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
700 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
697 # first try just applying the patch
701 # first try just applying the patch
698 (err, n) = self.apply(repo, [patch], update_status=False,
702 (err, n) = self.apply(repo, [patch], update_status=False,
699 strict=True, merge=rev)
703 strict=True, merge=rev)
700
704
701 if err == 0:
705 if err == 0:
702 return (err, n)
706 return (err, n)
703
707
704 if n is None:
708 if n is None:
705 raise util.Abort(_("apply failed for patch %s") % patch)
709 raise util.Abort(_("apply failed for patch %s") % patch)
706
710
707 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
711 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
708
712
709 # apply failed, strip away that rev and merge.
713 # apply failed, strip away that rev and merge.
710 hg.clean(repo, head)
714 hg.clean(repo, head)
711 strip(self.ui, repo, [n], update=False, backup=False)
715 strip(self.ui, repo, [n], update=False, backup=False)
712
716
713 ctx = repo[rev]
717 ctx = repo[rev]
714 ret = hg.merge(repo, rev)
718 ret = hg.merge(repo, rev)
715 if ret:
719 if ret:
716 raise util.Abort(_("update returned %d") % ret)
720 raise util.Abort(_("update returned %d") % ret)
717 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
721 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
718 if n is None:
722 if n is None:
719 raise util.Abort(_("repo commit failed"))
723 raise util.Abort(_("repo commit failed"))
720 try:
724 try:
721 ph = patchheader(mergeq.join(patch), self.plainmode)
725 ph = patchheader(mergeq.join(patch), self.plainmode)
722 except Exception:
726 except Exception:
723 raise util.Abort(_("unable to read %s") % patch)
727 raise util.Abort(_("unable to read %s") % patch)
724
728
725 diffopts = self.patchopts(diffopts, patch)
729 diffopts = self.patchopts(diffopts, patch)
726 patchf = self.opener(patch, "w")
730 patchf = self.opener(patch, "w")
727 comments = str(ph)
731 comments = str(ph)
728 if comments:
732 if comments:
729 patchf.write(comments)
733 patchf.write(comments)
730 self.printdiff(repo, diffopts, head, n, fp=patchf)
734 self.printdiff(repo, diffopts, head, n, fp=patchf)
731 patchf.close()
735 patchf.close()
732 self.removeundo(repo)
736 self.removeundo(repo)
733 return (0, n)
737 return (0, n)
734
738
735 def qparents(self, repo, rev=None):
739 def qparents(self, repo, rev=None):
736 """return the mq handled parent or p1
740 """return the mq handled parent or p1
737
741
738 In some case where mq get himself in being the parent of a merge the
742 In some case where mq get himself in being the parent of a merge the
739 appropriate parent may be p2.
743 appropriate parent may be p2.
740 (eg: an in progress merge started with mq disabled)
744 (eg: an in progress merge started with mq disabled)
741
745
742 If no parent are managed by mq, p1 is returned.
746 If no parent are managed by mq, p1 is returned.
743 """
747 """
744 if rev is None:
748 if rev is None:
745 (p1, p2) = repo.dirstate.parents()
749 (p1, p2) = repo.dirstate.parents()
746 if p2 == nullid:
750 if p2 == nullid:
747 return p1
751 return p1
748 if not self.applied:
752 if not self.applied:
749 return None
753 return None
750 return self.applied[-1].node
754 return self.applied[-1].node
751 p1, p2 = repo.changelog.parents(rev)
755 p1, p2 = repo.changelog.parents(rev)
752 if p2 != nullid and p2 in [x.node for x in self.applied]:
756 if p2 != nullid and p2 in [x.node for x in self.applied]:
753 return p2
757 return p2
754 return p1
758 return p1
755
759
756 def mergepatch(self, repo, mergeq, series, diffopts):
760 def mergepatch(self, repo, mergeq, series, diffopts):
757 if not self.applied:
761 if not self.applied:
758 # each of the patches merged in will have two parents. This
762 # each of the patches merged in will have two parents. This
759 # can confuse the qrefresh, qdiff, and strip code because it
763 # can confuse the qrefresh, qdiff, and strip code because it
760 # needs to know which parent is actually in the patch queue.
764 # needs to know which parent is actually in the patch queue.
761 # so, we insert a merge marker with only one parent. This way
765 # so, we insert a merge marker with only one parent. This way
762 # the first patch in the queue is never a merge patch
766 # the first patch in the queue is never a merge patch
763 #
767 #
764 pname = ".hg.patches.merge.marker"
768 pname = ".hg.patches.merge.marker"
765 n = newcommit(repo, None, '[mq]: merge marker', force=True)
769 n = newcommit(repo, None, '[mq]: merge marker', force=True)
766 self.removeundo(repo)
770 self.removeundo(repo)
767 self.applied.append(statusentry(n, pname))
771 self.applied.append(statusentry(n, pname))
768 self.applieddirty = True
772 self.applieddirty = True
769
773
770 head = self.qparents(repo)
774 head = self.qparents(repo)
771
775
772 for patch in series:
776 for patch in series:
773 patch = mergeq.lookup(patch, strict=True)
777 patch = mergeq.lookup(patch, strict=True)
774 if not patch:
778 if not patch:
775 self.ui.warn(_("patch %s does not exist\n") % patch)
779 self.ui.warn(_("patch %s does not exist\n") % patch)
776 return (1, None)
780 return (1, None)
777 pushable, reason = self.pushable(patch)
781 pushable, reason = self.pushable(patch)
778 if not pushable:
782 if not pushable:
779 self.explainpushable(patch, all_patches=True)
783 self.explainpushable(patch, all_patches=True)
780 continue
784 continue
781 info = mergeq.isapplied(patch)
785 info = mergeq.isapplied(patch)
782 if not info:
786 if not info:
783 self.ui.warn(_("patch %s is not applied\n") % patch)
787 self.ui.warn(_("patch %s is not applied\n") % patch)
784 return (1, None)
788 return (1, None)
785 rev = info[1]
789 rev = info[1]
786 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
790 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
787 if head:
791 if head:
788 self.applied.append(statusentry(head, patch))
792 self.applied.append(statusentry(head, patch))
789 self.applieddirty = True
793 self.applieddirty = True
790 if err:
794 if err:
791 return (err, head)
795 return (err, head)
792 self.savedirty()
796 self.savedirty()
793 return (0, head)
797 return (0, head)
794
798
795 def patch(self, repo, patchfile):
799 def patch(self, repo, patchfile):
796 '''Apply patchfile to the working directory.
800 '''Apply patchfile to the working directory.
797 patchfile: name of patch file'''
801 patchfile: name of patch file'''
798 files = set()
802 files = set()
799 try:
803 try:
800 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
804 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
801 files=files, eolmode=None)
805 files=files, eolmode=None)
802 return (True, list(files), fuzz)
806 return (True, list(files), fuzz)
803 except Exception, inst:
807 except Exception, inst:
804 self.ui.note(str(inst) + '\n')
808 self.ui.note(str(inst) + '\n')
805 if not self.ui.verbose:
809 if not self.ui.verbose:
806 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
810 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
807 self.ui.traceback()
811 self.ui.traceback()
808 return (False, list(files), False)
812 return (False, list(files), False)
809
813
810 def apply(self, repo, series, list=False, update_status=True,
814 def apply(self, repo, series, list=False, update_status=True,
811 strict=False, patchdir=None, merge=None, all_files=None,
815 strict=False, patchdir=None, merge=None, all_files=None,
812 tobackup=None, keepchanges=False):
816 tobackup=None, keepchanges=False):
813 wlock = dsguard = lock = tr = None
817 wlock = dsguard = lock = tr = None
814 try:
818 try:
815 wlock = repo.wlock()
819 wlock = repo.wlock()
816 dsguard = cmdutil.dirstateguard(repo, 'mq.apply')
820 dsguard = cmdutil.dirstateguard(repo, 'mq.apply')
817 lock = repo.lock()
821 lock = repo.lock()
818 tr = repo.transaction("qpush")
822 tr = repo.transaction("qpush")
819 try:
823 try:
820 ret = self._apply(repo, series, list, update_status,
824 ret = self._apply(repo, series, list, update_status,
821 strict, patchdir, merge, all_files=all_files,
825 strict, patchdir, merge, all_files=all_files,
822 tobackup=tobackup, keepchanges=keepchanges)
826 tobackup=tobackup, keepchanges=keepchanges)
823 tr.close()
827 tr.close()
824 self.savedirty()
828 self.savedirty()
825 dsguard.close()
829 dsguard.close()
826 return ret
830 return ret
827 except AbortNoCleanup:
831 except AbortNoCleanup:
828 tr.close()
832 tr.close()
829 self.savedirty()
833 self.savedirty()
830 dsguard.close()
834 dsguard.close()
831 raise
835 raise
832 except: # re-raises
836 except: # re-raises
833 try:
837 try:
834 tr.abort()
838 tr.abort()
835 finally:
839 finally:
836 repo.invalidate()
840 repo.invalidate()
837 self.invalidate()
841 self.invalidate()
838 raise
842 raise
839 finally:
843 finally:
840 release(tr, lock, dsguard, wlock)
844 release(tr, lock, dsguard, wlock)
841 self.removeundo(repo)
845 self.removeundo(repo)
842
846
843 def _apply(self, repo, series, list=False, update_status=True,
847 def _apply(self, repo, series, list=False, update_status=True,
844 strict=False, patchdir=None, merge=None, all_files=None,
848 strict=False, patchdir=None, merge=None, all_files=None,
845 tobackup=None, keepchanges=False):
849 tobackup=None, keepchanges=False):
846 """returns (error, hash)
850 """returns (error, hash)
847
851
848 error = 1 for unable to read, 2 for patch failed, 3 for patch
852 error = 1 for unable to read, 2 for patch failed, 3 for patch
849 fuzz. tobackup is None or a set of files to backup before they
853 fuzz. tobackup is None or a set of files to backup before they
850 are modified by a patch.
854 are modified by a patch.
851 """
855 """
852 # TODO unify with commands.py
856 # TODO unify with commands.py
853 if not patchdir:
857 if not patchdir:
854 patchdir = self.path
858 patchdir = self.path
855 err = 0
859 err = 0
856 n = None
860 n = None
857 for patchname in series:
861 for patchname in series:
858 pushable, reason = self.pushable(patchname)
862 pushable, reason = self.pushable(patchname)
859 if not pushable:
863 if not pushable:
860 self.explainpushable(patchname, all_patches=True)
864 self.explainpushable(patchname, all_patches=True)
861 continue
865 continue
862 self.ui.status(_("applying %s\n") % patchname)
866 self.ui.status(_("applying %s\n") % patchname)
863 pf = os.path.join(patchdir, patchname)
867 pf = os.path.join(patchdir, patchname)
864
868
865 try:
869 try:
866 ph = patchheader(self.join(patchname), self.plainmode)
870 ph = patchheader(self.join(patchname), self.plainmode)
867 except IOError:
871 except IOError:
868 self.ui.warn(_("unable to read %s\n") % patchname)
872 self.ui.warn(_("unable to read %s\n") % patchname)
869 err = 1
873 err = 1
870 break
874 break
871
875
872 message = ph.message
876 message = ph.message
873 if not message:
877 if not message:
874 # The commit message should not be translated
878 # The commit message should not be translated
875 message = "imported patch %s\n" % patchname
879 message = "imported patch %s\n" % patchname
876 else:
880 else:
877 if list:
881 if list:
878 # The commit message should not be translated
882 # The commit message should not be translated
879 message.append("\nimported patch %s" % patchname)
883 message.append("\nimported patch %s" % patchname)
880 message = '\n'.join(message)
884 message = '\n'.join(message)
881
885
882 if ph.haspatch:
886 if ph.haspatch:
883 if tobackup:
887 if tobackup:
884 touched = patchmod.changedfiles(self.ui, repo, pf)
888 touched = patchmod.changedfiles(self.ui, repo, pf)
885 touched = set(touched) & tobackup
889 touched = set(touched) & tobackup
886 if touched and keepchanges:
890 if touched and keepchanges:
887 raise AbortNoCleanup(
891 raise AbortNoCleanup(
888 _("conflicting local changes found"),
892 _("conflicting local changes found"),
889 hint=_("did you forget to qrefresh?"))
893 hint=_("did you forget to qrefresh?"))
890 self.backup(repo, touched, copy=True)
894 self.backup(repo, touched, copy=True)
891 tobackup = tobackup - touched
895 tobackup = tobackup - touched
892 (patcherr, files, fuzz) = self.patch(repo, pf)
896 (patcherr, files, fuzz) = self.patch(repo, pf)
893 if all_files is not None:
897 if all_files is not None:
894 all_files.update(files)
898 all_files.update(files)
895 patcherr = not patcherr
899 patcherr = not patcherr
896 else:
900 else:
897 self.ui.warn(_("patch %s is empty\n") % patchname)
901 self.ui.warn(_("patch %s is empty\n") % patchname)
898 patcherr, files, fuzz = 0, [], 0
902 patcherr, files, fuzz = 0, [], 0
899
903
900 if merge and files:
904 if merge and files:
901 # Mark as removed/merged and update dirstate parent info
905 # Mark as removed/merged and update dirstate parent info
902 removed = []
906 removed = []
903 merged = []
907 merged = []
904 for f in files:
908 for f in files:
905 if os.path.lexists(repo.wjoin(f)):
909 if os.path.lexists(repo.wjoin(f)):
906 merged.append(f)
910 merged.append(f)
907 else:
911 else:
908 removed.append(f)
912 removed.append(f)
909 repo.dirstate.beginparentchange()
913 repo.dirstate.beginparentchange()
910 for f in removed:
914 for f in removed:
911 repo.dirstate.remove(f)
915 repo.dirstate.remove(f)
912 for f in merged:
916 for f in merged:
913 repo.dirstate.merge(f)
917 repo.dirstate.merge(f)
914 p1, p2 = repo.dirstate.parents()
918 p1, p2 = repo.dirstate.parents()
915 repo.setparents(p1, merge)
919 repo.setparents(p1, merge)
916 repo.dirstate.endparentchange()
920 repo.dirstate.endparentchange()
917
921
918 if all_files and '.hgsubstate' in all_files:
922 if all_files and '.hgsubstate' in all_files:
919 wctx = repo[None]
923 wctx = repo[None]
920 pctx = repo['.']
924 pctx = repo['.']
921 overwrite = False
925 overwrite = False
922 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
926 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
923 overwrite)
927 overwrite)
924 files += mergedsubstate.keys()
928 files += mergedsubstate.keys()
925
929
926 match = scmutil.matchfiles(repo, files or [])
930 match = scmutil.matchfiles(repo, files or [])
927 oldtip = repo['tip']
931 oldtip = repo['tip']
928 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
932 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
929 force=True)
933 force=True)
930 if repo['tip'] == oldtip:
934 if repo['tip'] == oldtip:
931 raise util.Abort(_("qpush exactly duplicates child changeset"))
935 raise util.Abort(_("qpush exactly duplicates child changeset"))
932 if n is None:
936 if n is None:
933 raise util.Abort(_("repository commit failed"))
937 raise util.Abort(_("repository commit failed"))
934
938
935 if update_status:
939 if update_status:
936 self.applied.append(statusentry(n, patchname))
940 self.applied.append(statusentry(n, patchname))
937
941
938 if patcherr:
942 if patcherr:
939 self.ui.warn(_("patch failed, rejects left in working "
943 self.ui.warn(_("patch failed, rejects left in working "
940 "directory\n"))
944 "directory\n"))
941 err = 2
945 err = 2
942 break
946 break
943
947
944 if fuzz and strict:
948 if fuzz and strict:
945 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
949 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
946 err = 3
950 err = 3
947 break
951 break
948 return (err, n)
952 return (err, n)
949
953
950 def _cleanup(self, patches, numrevs, keep=False):
954 def _cleanup(self, patches, numrevs, keep=False):
951 if not keep:
955 if not keep:
952 r = self.qrepo()
956 r = self.qrepo()
953 if r:
957 if r:
954 r[None].forget(patches)
958 r[None].forget(patches)
955 for p in patches:
959 for p in patches:
956 try:
960 try:
957 os.unlink(self.join(p))
961 os.unlink(self.join(p))
958 except OSError, inst:
962 except OSError, inst:
959 if inst.errno != errno.ENOENT:
963 if inst.errno != errno.ENOENT:
960 raise
964 raise
961
965
962 qfinished = []
966 qfinished = []
963 if numrevs:
967 if numrevs:
964 qfinished = self.applied[:numrevs]
968 qfinished = self.applied[:numrevs]
965 del self.applied[:numrevs]
969 del self.applied[:numrevs]
966 self.applieddirty = True
970 self.applieddirty = True
967
971
968 unknown = []
972 unknown = []
969
973
970 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
974 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
971 reverse=True):
975 reverse=True):
972 if i is not None:
976 if i is not None:
973 del self.fullseries[i]
977 del self.fullseries[i]
974 else:
978 else:
975 unknown.append(p)
979 unknown.append(p)
976
980
977 if unknown:
981 if unknown:
978 if numrevs:
982 if numrevs:
979 rev = dict((entry.name, entry.node) for entry in qfinished)
983 rev = dict((entry.name, entry.node) for entry in qfinished)
980 for p in unknown:
984 for p in unknown:
981 msg = _('revision %s refers to unknown patches: %s\n')
985 msg = _('revision %s refers to unknown patches: %s\n')
982 self.ui.warn(msg % (short(rev[p]), p))
986 self.ui.warn(msg % (short(rev[p]), p))
983 else:
987 else:
984 msg = _('unknown patches: %s\n')
988 msg = _('unknown patches: %s\n')
985 raise util.Abort(''.join(msg % p for p in unknown))
989 raise util.Abort(''.join(msg % p for p in unknown))
986
990
987 self.parseseries()
991 self.parseseries()
988 self.seriesdirty = True
992 self.seriesdirty = True
989 return [entry.node for entry in qfinished]
993 return [entry.node for entry in qfinished]
990
994
991 def _revpatches(self, repo, revs):
995 def _revpatches(self, repo, revs):
992 firstrev = repo[self.applied[0].node].rev()
996 firstrev = repo[self.applied[0].node].rev()
993 patches = []
997 patches = []
994 for i, rev in enumerate(revs):
998 for i, rev in enumerate(revs):
995
999
996 if rev < firstrev:
1000 if rev < firstrev:
997 raise util.Abort(_('revision %d is not managed') % rev)
1001 raise util.Abort(_('revision %d is not managed') % rev)
998
1002
999 ctx = repo[rev]
1003 ctx = repo[rev]
1000 base = self.applied[i].node
1004 base = self.applied[i].node
1001 if ctx.node() != base:
1005 if ctx.node() != base:
1002 msg = _('cannot delete revision %d above applied patches')
1006 msg = _('cannot delete revision %d above applied patches')
1003 raise util.Abort(msg % rev)
1007 raise util.Abort(msg % rev)
1004
1008
1005 patch = self.applied[i].name
1009 patch = self.applied[i].name
1006 for fmt in ('[mq]: %s', 'imported patch %s'):
1010 for fmt in ('[mq]: %s', 'imported patch %s'):
1007 if ctx.description() == fmt % patch:
1011 if ctx.description() == fmt % patch:
1008 msg = _('patch %s finalized without changeset message\n')
1012 msg = _('patch %s finalized without changeset message\n')
1009 repo.ui.status(msg % patch)
1013 repo.ui.status(msg % patch)
1010 break
1014 break
1011
1015
1012 patches.append(patch)
1016 patches.append(patch)
1013 return patches
1017 return patches
1014
1018
1015 def finish(self, repo, revs):
1019 def finish(self, repo, revs):
1016 # Manually trigger phase computation to ensure phasedefaults is
1020 # Manually trigger phase computation to ensure phasedefaults is
1017 # executed before we remove the patches.
1021 # executed before we remove the patches.
1018 repo._phasecache
1022 repo._phasecache
1019 patches = self._revpatches(repo, sorted(revs))
1023 patches = self._revpatches(repo, sorted(revs))
1020 qfinished = self._cleanup(patches, len(patches))
1024 qfinished = self._cleanup(patches, len(patches))
1021 if qfinished and repo.ui.configbool('mq', 'secret', False):
1025 if qfinished and repo.ui.configbool('mq', 'secret', False):
1022 # only use this logic when the secret option is added
1026 # only use this logic when the secret option is added
1023 oldqbase = repo[qfinished[0]]
1027 oldqbase = repo[qfinished[0]]
1024 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1028 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1025 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1029 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1026 tr = repo.transaction('qfinish')
1030 tr = repo.transaction('qfinish')
1027 try:
1031 try:
1028 phases.advanceboundary(repo, tr, tphase, qfinished)
1032 phases.advanceboundary(repo, tr, tphase, qfinished)
1029 tr.close()
1033 tr.close()
1030 finally:
1034 finally:
1031 tr.release()
1035 tr.release()
1032
1036
1033 def delete(self, repo, patches, opts):
1037 def delete(self, repo, patches, opts):
1034 if not patches and not opts.get('rev'):
1038 if not patches and not opts.get('rev'):
1035 raise util.Abort(_('qdelete requires at least one revision or '
1039 raise util.Abort(_('qdelete requires at least one revision or '
1036 'patch name'))
1040 'patch name'))
1037
1041
1038 realpatches = []
1042 realpatches = []
1039 for patch in patches:
1043 for patch in patches:
1040 patch = self.lookup(patch, strict=True)
1044 patch = self.lookup(patch, strict=True)
1041 info = self.isapplied(patch)
1045 info = self.isapplied(patch)
1042 if info:
1046 if info:
1043 raise util.Abort(_("cannot delete applied patch %s") % patch)
1047 raise util.Abort(_("cannot delete applied patch %s") % patch)
1044 if patch not in self.series:
1048 if patch not in self.series:
1045 raise util.Abort(_("patch %s not in series file") % patch)
1049 raise util.Abort(_("patch %s not in series file") % patch)
1046 if patch not in realpatches:
1050 if patch not in realpatches:
1047 realpatches.append(patch)
1051 realpatches.append(patch)
1048
1052
1049 numrevs = 0
1053 numrevs = 0
1050 if opts.get('rev'):
1054 if opts.get('rev'):
1051 if not self.applied:
1055 if not self.applied:
1052 raise util.Abort(_('no patches applied'))
1056 raise util.Abort(_('no patches applied'))
1053 revs = scmutil.revrange(repo, opts.get('rev'))
1057 revs = scmutil.revrange(repo, opts.get('rev'))
1054 revs.sort()
1058 revs.sort()
1055 revpatches = self._revpatches(repo, revs)
1059 revpatches = self._revpatches(repo, revs)
1056 realpatches += revpatches
1060 realpatches += revpatches
1057 numrevs = len(revpatches)
1061 numrevs = len(revpatches)
1058
1062
1059 self._cleanup(realpatches, numrevs, opts.get('keep'))
1063 self._cleanup(realpatches, numrevs, opts.get('keep'))
1060
1064
1061 def checktoppatch(self, repo):
1065 def checktoppatch(self, repo):
1062 '''check that working directory is at qtip'''
1066 '''check that working directory is at qtip'''
1063 if self.applied:
1067 if self.applied:
1064 top = self.applied[-1].node
1068 top = self.applied[-1].node
1065 patch = self.applied[-1].name
1069 patch = self.applied[-1].name
1066 if repo.dirstate.p1() != top:
1070 if repo.dirstate.p1() != top:
1067 raise util.Abort(_("working directory revision is not qtip"))
1071 raise util.Abort(_("working directory revision is not qtip"))
1068 return top, patch
1072 return top, patch
1069 return None, None
1073 return None, None
1070
1074
1071 def putsubstate2changes(self, substatestate, changes):
1075 def putsubstate2changes(self, substatestate, changes):
1072 for files in changes[:3]:
1076 for files in changes[:3]:
1073 if '.hgsubstate' in files:
1077 if '.hgsubstate' in files:
1074 return # already listed up
1078 return # already listed up
1075 # not yet listed up
1079 # not yet listed up
1076 if substatestate in 'a?':
1080 if substatestate in 'a?':
1077 changes[1].append('.hgsubstate')
1081 changes[1].append('.hgsubstate')
1078 elif substatestate in 'r':
1082 elif substatestate in 'r':
1079 changes[2].append('.hgsubstate')
1083 changes[2].append('.hgsubstate')
1080 else: # modified
1084 else: # modified
1081 changes[0].append('.hgsubstate')
1085 changes[0].append('.hgsubstate')
1082
1086
1083 def checklocalchanges(self, repo, force=False, refresh=True):
1087 def checklocalchanges(self, repo, force=False, refresh=True):
1084 excsuffix = ''
1088 excsuffix = ''
1085 if refresh:
1089 if refresh:
1086 excsuffix = ', refresh first'
1090 excsuffix = ', refresh first'
1087 # plain versions for i18n tool to detect them
1091 # plain versions for i18n tool to detect them
1088 _("local changes found, refresh first")
1092 _("local changes found, refresh first")
1089 _("local changed subrepos found, refresh first")
1093 _("local changed subrepos found, refresh first")
1090 return checklocalchanges(repo, force, excsuffix)
1094 return checklocalchanges(repo, force, excsuffix)
1091
1095
1092 _reserved = ('series', 'status', 'guards', '.', '..')
1096 _reserved = ('series', 'status', 'guards', '.', '..')
1093 def checkreservedname(self, name):
1097 def checkreservedname(self, name):
1094 if name in self._reserved:
1098 if name in self._reserved:
1095 raise util.Abort(_('"%s" cannot be used as the name of a patch')
1099 raise util.Abort(_('"%s" cannot be used as the name of a patch')
1096 % name)
1100 % name)
1097 for prefix in ('.hg', '.mq'):
1101 for prefix in ('.hg', '.mq'):
1098 if name.startswith(prefix):
1102 if name.startswith(prefix):
1099 raise util.Abort(_('patch name cannot begin with "%s"')
1103 raise util.Abort(_('patch name cannot begin with "%s"')
1100 % prefix)
1104 % prefix)
1101 for c in ('#', ':'):
1105 for c in ('#', ':'):
1102 if c in name:
1106 if c in name:
1103 raise util.Abort(_('"%s" cannot be used in the name of a patch')
1107 raise util.Abort(_('"%s" cannot be used in the name of a patch')
1104 % c)
1108 % c)
1105
1109
1106 def checkpatchname(self, name, force=False):
1110 def checkpatchname(self, name, force=False):
1107 self.checkreservedname(name)
1111 self.checkreservedname(name)
1108 if not force and os.path.exists(self.join(name)):
1112 if not force and os.path.exists(self.join(name)):
1109 if os.path.isdir(self.join(name)):
1113 if os.path.isdir(self.join(name)):
1110 raise util.Abort(_('"%s" already exists as a directory')
1114 raise util.Abort(_('"%s" already exists as a directory')
1111 % name)
1115 % name)
1112 else:
1116 else:
1113 raise util.Abort(_('patch "%s" already exists') % name)
1117 raise util.Abort(_('patch "%s" already exists') % name)
1114
1118
1115 def checkkeepchanges(self, keepchanges, force):
1119 def checkkeepchanges(self, keepchanges, force):
1116 if force and keepchanges:
1120 if force and keepchanges:
1117 raise util.Abort(_('cannot use both --force and --keep-changes'))
1121 raise util.Abort(_('cannot use both --force and --keep-changes'))
1118
1122
1119 def new(self, repo, patchfn, *pats, **opts):
1123 def new(self, repo, patchfn, *pats, **opts):
1120 """options:
1124 """options:
1121 msg: a string or a no-argument function returning a string
1125 msg: a string or a no-argument function returning a string
1122 """
1126 """
1123 msg = opts.get('msg')
1127 msg = opts.get('msg')
1124 edit = opts.get('edit')
1128 edit = opts.get('edit')
1125 editform = opts.get('editform', 'mq.qnew')
1129 editform = opts.get('editform', 'mq.qnew')
1126 user = opts.get('user')
1130 user = opts.get('user')
1127 date = opts.get('date')
1131 date = opts.get('date')
1128 if date:
1132 if date:
1129 date = util.parsedate(date)
1133 date = util.parsedate(date)
1130 diffopts = self.diffopts({'git': opts.get('git')})
1134 diffopts = self.diffopts({'git': opts.get('git')})
1131 if opts.get('checkname', True):
1135 if opts.get('checkname', True):
1132 self.checkpatchname(patchfn)
1136 self.checkpatchname(patchfn)
1133 inclsubs = checksubstate(repo)
1137 inclsubs = checksubstate(repo)
1134 if inclsubs:
1138 if inclsubs:
1135 substatestate = repo.dirstate['.hgsubstate']
1139 substatestate = repo.dirstate['.hgsubstate']
1136 if opts.get('include') or opts.get('exclude') or pats:
1140 if opts.get('include') or opts.get('exclude') or pats:
1137 match = scmutil.match(repo[None], pats, opts)
1141 match = scmutil.match(repo[None], pats, opts)
1138 # detect missing files in pats
1142 # detect missing files in pats
1139 def badfn(f, msg):
1143 def badfn(f, msg):
1140 if f != '.hgsubstate': # .hgsubstate is auto-created
1144 if f != '.hgsubstate': # .hgsubstate is auto-created
1141 raise util.Abort('%s: %s' % (f, msg))
1145 raise util.Abort('%s: %s' % (f, msg))
1142 match.bad = badfn
1146 match.bad = badfn
1143 changes = repo.status(match=match)
1147 changes = repo.status(match=match)
1144 else:
1148 else:
1145 changes = self.checklocalchanges(repo, force=True)
1149 changes = self.checklocalchanges(repo, force=True)
1146 commitfiles = list(inclsubs)
1150 commitfiles = list(inclsubs)
1147 for files in changes[:3]:
1151 for files in changes[:3]:
1148 commitfiles.extend(files)
1152 commitfiles.extend(files)
1149 match = scmutil.matchfiles(repo, commitfiles)
1153 match = scmutil.matchfiles(repo, commitfiles)
1150 if len(repo[None].parents()) > 1:
1154 if len(repo[None].parents()) > 1:
1151 raise util.Abort(_('cannot manage merge changesets'))
1155 raise util.Abort(_('cannot manage merge changesets'))
1152 self.checktoppatch(repo)
1156 self.checktoppatch(repo)
1153 insert = self.fullseriesend()
1157 insert = self.fullseriesend()
1154 wlock = repo.wlock()
1158 wlock = repo.wlock()
1155 try:
1159 try:
1156 try:
1160 try:
1157 # if patch file write fails, abort early
1161 # if patch file write fails, abort early
1158 p = self.opener(patchfn, "w")
1162 p = self.opener(patchfn, "w")
1159 except IOError, e:
1163 except IOError, e:
1160 raise util.Abort(_('cannot write patch "%s": %s')
1164 raise util.Abort(_('cannot write patch "%s": %s')
1161 % (patchfn, e.strerror))
1165 % (patchfn, e.strerror))
1162 try:
1166 try:
1163 defaultmsg = "[mq]: %s" % patchfn
1167 defaultmsg = "[mq]: %s" % patchfn
1164 editor = cmdutil.getcommiteditor(editform=editform)
1168 editor = cmdutil.getcommiteditor(editform=editform)
1165 if edit:
1169 if edit:
1166 def finishdesc(desc):
1170 def finishdesc(desc):
1167 if desc.rstrip():
1171 if desc.rstrip():
1168 return desc
1172 return desc
1169 else:
1173 else:
1170 return defaultmsg
1174 return defaultmsg
1171 # i18n: this message is shown in editor with "HG: " prefix
1175 # i18n: this message is shown in editor with "HG: " prefix
1172 extramsg = _('Leave message empty to use default message.')
1176 extramsg = _('Leave message empty to use default message.')
1173 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1177 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1174 extramsg=extramsg,
1178 extramsg=extramsg,
1175 editform=editform)
1179 editform=editform)
1176 commitmsg = msg
1180 commitmsg = msg
1177 else:
1181 else:
1178 commitmsg = msg or defaultmsg
1182 commitmsg = msg or defaultmsg
1179
1183
1180 n = newcommit(repo, None, commitmsg, user, date, match=match,
1184 n = newcommit(repo, None, commitmsg, user, date, match=match,
1181 force=True, editor=editor)
1185 force=True, editor=editor)
1182 if n is None:
1186 if n is None:
1183 raise util.Abort(_("repo commit failed"))
1187 raise util.Abort(_("repo commit failed"))
1184 try:
1188 try:
1185 self.fullseries[insert:insert] = [patchfn]
1189 self.fullseries[insert:insert] = [patchfn]
1186 self.applied.append(statusentry(n, patchfn))
1190 self.applied.append(statusentry(n, patchfn))
1187 self.parseseries()
1191 self.parseseries()
1188 self.seriesdirty = True
1192 self.seriesdirty = True
1189 self.applieddirty = True
1193 self.applieddirty = True
1190 nctx = repo[n]
1194 nctx = repo[n]
1191 ph = patchheader(self.join(patchfn), self.plainmode)
1195 ph = patchheader(self.join(patchfn), self.plainmode)
1192 if user:
1196 if user:
1193 ph.setuser(user)
1197 ph.setuser(user)
1194 if date:
1198 if date:
1195 ph.setdate('%s %s' % date)
1199 ph.setdate('%s %s' % date)
1196 ph.setparent(hex(nctx.p1().node()))
1200 ph.setparent(hex(nctx.p1().node()))
1197 msg = nctx.description().strip()
1201 msg = nctx.description().strip()
1198 if msg == defaultmsg.strip():
1202 if msg == defaultmsg.strip():
1199 msg = ''
1203 msg = ''
1200 ph.setmessage(msg)
1204 ph.setmessage(msg)
1201 p.write(str(ph))
1205 p.write(str(ph))
1202 if commitfiles:
1206 if commitfiles:
1203 parent = self.qparents(repo, n)
1207 parent = self.qparents(repo, n)
1204 if inclsubs:
1208 if inclsubs:
1205 self.putsubstate2changes(substatestate, changes)
1209 self.putsubstate2changes(substatestate, changes)
1206 chunks = patchmod.diff(repo, node1=parent, node2=n,
1210 chunks = patchmod.diff(repo, node1=parent, node2=n,
1207 changes=changes, opts=diffopts)
1211 changes=changes, opts=diffopts)
1208 for chunk in chunks:
1212 for chunk in chunks:
1209 p.write(chunk)
1213 p.write(chunk)
1210 p.close()
1214 p.close()
1211 r = self.qrepo()
1215 r = self.qrepo()
1212 if r:
1216 if r:
1213 r[None].add([patchfn])
1217 r[None].add([patchfn])
1214 except: # re-raises
1218 except: # re-raises
1215 repo.rollback()
1219 repo.rollback()
1216 raise
1220 raise
1217 except Exception:
1221 except Exception:
1218 patchpath = self.join(patchfn)
1222 patchpath = self.join(patchfn)
1219 try:
1223 try:
1220 os.unlink(patchpath)
1224 os.unlink(patchpath)
1221 except OSError:
1225 except OSError:
1222 self.ui.warn(_('error unlinking %s\n') % patchpath)
1226 self.ui.warn(_('error unlinking %s\n') % patchpath)
1223 raise
1227 raise
1224 self.removeundo(repo)
1228 self.removeundo(repo)
1225 finally:
1229 finally:
1226 release(wlock)
1230 release(wlock)
1227
1231
1228 def isapplied(self, patch):
1232 def isapplied(self, patch):
1229 """returns (index, rev, patch)"""
1233 """returns (index, rev, patch)"""
1230 for i, a in enumerate(self.applied):
1234 for i, a in enumerate(self.applied):
1231 if a.name == patch:
1235 if a.name == patch:
1232 return (i, a.node, a.name)
1236 return (i, a.node, a.name)
1233 return None
1237 return None
1234
1238
1235 # if the exact patch name does not exist, we try a few
1239 # if the exact patch name does not exist, we try a few
1236 # variations. If strict is passed, we try only #1
1240 # variations. If strict is passed, we try only #1
1237 #
1241 #
1238 # 1) a number (as string) to indicate an offset in the series file
1242 # 1) a number (as string) to indicate an offset in the series file
1239 # 2) a unique substring of the patch name was given
1243 # 2) a unique substring of the patch name was given
1240 # 3) patchname[-+]num to indicate an offset in the series file
1244 # 3) patchname[-+]num to indicate an offset in the series file
1241 def lookup(self, patch, strict=False):
1245 def lookup(self, patch, strict=False):
1242 def partialname(s):
1246 def partialname(s):
1243 if s in self.series:
1247 if s in self.series:
1244 return s
1248 return s
1245 matches = [x for x in self.series if s in x]
1249 matches = [x for x in self.series if s in x]
1246 if len(matches) > 1:
1250 if len(matches) > 1:
1247 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1251 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1248 for m in matches:
1252 for m in matches:
1249 self.ui.warn(' %s\n' % m)
1253 self.ui.warn(' %s\n' % m)
1250 return None
1254 return None
1251 if matches:
1255 if matches:
1252 return matches[0]
1256 return matches[0]
1253 if self.series and self.applied:
1257 if self.series and self.applied:
1254 if s == 'qtip':
1258 if s == 'qtip':
1255 return self.series[self.seriesend(True) - 1]
1259 return self.series[self.seriesend(True) - 1]
1256 if s == 'qbase':
1260 if s == 'qbase':
1257 return self.series[0]
1261 return self.series[0]
1258 return None
1262 return None
1259
1263
1260 if patch in self.series:
1264 if patch in self.series:
1261 return patch
1265 return patch
1262
1266
1263 if not os.path.isfile(self.join(patch)):
1267 if not os.path.isfile(self.join(patch)):
1264 try:
1268 try:
1265 sno = int(patch)
1269 sno = int(patch)
1266 except (ValueError, OverflowError):
1270 except (ValueError, OverflowError):
1267 pass
1271 pass
1268 else:
1272 else:
1269 if -len(self.series) <= sno < len(self.series):
1273 if -len(self.series) <= sno < len(self.series):
1270 return self.series[sno]
1274 return self.series[sno]
1271
1275
1272 if not strict:
1276 if not strict:
1273 res = partialname(patch)
1277 res = partialname(patch)
1274 if res:
1278 if res:
1275 return res
1279 return res
1276 minus = patch.rfind('-')
1280 minus = patch.rfind('-')
1277 if minus >= 0:
1281 if minus >= 0:
1278 res = partialname(patch[:minus])
1282 res = partialname(patch[:minus])
1279 if res:
1283 if res:
1280 i = self.series.index(res)
1284 i = self.series.index(res)
1281 try:
1285 try:
1282 off = int(patch[minus + 1:] or 1)
1286 off = int(patch[minus + 1:] or 1)
1283 except (ValueError, OverflowError):
1287 except (ValueError, OverflowError):
1284 pass
1288 pass
1285 else:
1289 else:
1286 if i - off >= 0:
1290 if i - off >= 0:
1287 return self.series[i - off]
1291 return self.series[i - off]
1288 plus = patch.rfind('+')
1292 plus = patch.rfind('+')
1289 if plus >= 0:
1293 if plus >= 0:
1290 res = partialname(patch[:plus])
1294 res = partialname(patch[:plus])
1291 if res:
1295 if res:
1292 i = self.series.index(res)
1296 i = self.series.index(res)
1293 try:
1297 try:
1294 off = int(patch[plus + 1:] or 1)
1298 off = int(patch[plus + 1:] or 1)
1295 except (ValueError, OverflowError):
1299 except (ValueError, OverflowError):
1296 pass
1300 pass
1297 else:
1301 else:
1298 if i + off < len(self.series):
1302 if i + off < len(self.series):
1299 return self.series[i + off]
1303 return self.series[i + off]
1300 raise util.Abort(_("patch %s not in series") % patch)
1304 raise util.Abort(_("patch %s not in series") % patch)
1301
1305
1302 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1306 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1303 all=False, move=False, exact=False, nobackup=False,
1307 all=False, move=False, exact=False, nobackup=False,
1304 keepchanges=False):
1308 keepchanges=False):
1305 self.checkkeepchanges(keepchanges, force)
1309 self.checkkeepchanges(keepchanges, force)
1306 diffopts = self.diffopts()
1310 diffopts = self.diffopts()
1307 wlock = repo.wlock()
1311 wlock = repo.wlock()
1308 try:
1312 try:
1309 heads = []
1313 heads = []
1310 for hs in repo.branchmap().itervalues():
1314 for hs in repo.branchmap().itervalues():
1311 heads.extend(hs)
1315 heads.extend(hs)
1312 if not heads:
1316 if not heads:
1313 heads = [nullid]
1317 heads = [nullid]
1314 if repo.dirstate.p1() not in heads and not exact:
1318 if repo.dirstate.p1() not in heads and not exact:
1315 self.ui.status(_("(working directory not at a head)\n"))
1319 self.ui.status(_("(working directory not at a head)\n"))
1316
1320
1317 if not self.series:
1321 if not self.series:
1318 self.ui.warn(_('no patches in series\n'))
1322 self.ui.warn(_('no patches in series\n'))
1319 return 0
1323 return 0
1320
1324
1321 # Suppose our series file is: A B C and the current 'top'
1325 # Suppose our series file is: A B C and the current 'top'
1322 # patch is B. qpush C should be performed (moving forward)
1326 # patch is B. qpush C should be performed (moving forward)
1323 # qpush B is a NOP (no change) qpush A is an error (can't
1327 # qpush B is a NOP (no change) qpush A is an error (can't
1324 # go backwards with qpush)
1328 # go backwards with qpush)
1325 if patch:
1329 if patch:
1326 patch = self.lookup(patch)
1330 patch = self.lookup(patch)
1327 info = self.isapplied(patch)
1331 info = self.isapplied(patch)
1328 if info and info[0] >= len(self.applied) - 1:
1332 if info and info[0] >= len(self.applied) - 1:
1329 self.ui.warn(
1333 self.ui.warn(
1330 _('qpush: %s is already at the top\n') % patch)
1334 _('qpush: %s is already at the top\n') % patch)
1331 return 0
1335 return 0
1332
1336
1333 pushable, reason = self.pushable(patch)
1337 pushable, reason = self.pushable(patch)
1334 if pushable:
1338 if pushable:
1335 if self.series.index(patch) < self.seriesend():
1339 if self.series.index(patch) < self.seriesend():
1336 raise util.Abort(
1340 raise util.Abort(
1337 _("cannot push to a previous patch: %s") % patch)
1341 _("cannot push to a previous patch: %s") % patch)
1338 else:
1342 else:
1339 if reason:
1343 if reason:
1340 reason = _('guarded by %s') % reason
1344 reason = _('guarded by %s') % reason
1341 else:
1345 else:
1342 reason = _('no matching guards')
1346 reason = _('no matching guards')
1343 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1347 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1344 return 1
1348 return 1
1345 elif all:
1349 elif all:
1346 patch = self.series[-1]
1350 patch = self.series[-1]
1347 if self.isapplied(patch):
1351 if self.isapplied(patch):
1348 self.ui.warn(_('all patches are currently applied\n'))
1352 self.ui.warn(_('all patches are currently applied\n'))
1349 return 0
1353 return 0
1350
1354
1351 # Following the above example, starting at 'top' of B:
1355 # Following the above example, starting at 'top' of B:
1352 # qpush should be performed (pushes C), but a subsequent
1356 # qpush should be performed (pushes C), but a subsequent
1353 # qpush without an argument is an error (nothing to
1357 # qpush without an argument is an error (nothing to
1354 # apply). This allows a loop of "...while hg qpush..." to
1358 # apply). This allows a loop of "...while hg qpush..." to
1355 # work as it detects an error when done
1359 # work as it detects an error when done
1356 start = self.seriesend()
1360 start = self.seriesend()
1357 if start == len(self.series):
1361 if start == len(self.series):
1358 self.ui.warn(_('patch series already fully applied\n'))
1362 self.ui.warn(_('patch series already fully applied\n'))
1359 return 1
1363 return 1
1360 if not force and not keepchanges:
1364 if not force and not keepchanges:
1361 self.checklocalchanges(repo, refresh=self.applied)
1365 self.checklocalchanges(repo, refresh=self.applied)
1362
1366
1363 if exact:
1367 if exact:
1364 if keepchanges:
1368 if keepchanges:
1365 raise util.Abort(
1369 raise util.Abort(
1366 _("cannot use --exact and --keep-changes together"))
1370 _("cannot use --exact and --keep-changes together"))
1367 if move:
1371 if move:
1368 raise util.Abort(_('cannot use --exact and --move '
1372 raise util.Abort(_('cannot use --exact and --move '
1369 'together'))
1373 'together'))
1370 if self.applied:
1374 if self.applied:
1371 raise util.Abort(_('cannot push --exact with applied '
1375 raise util.Abort(_('cannot push --exact with applied '
1372 'patches'))
1376 'patches'))
1373 root = self.series[start]
1377 root = self.series[start]
1374 target = patchheader(self.join(root), self.plainmode).parent
1378 target = patchheader(self.join(root), self.plainmode).parent
1375 if not target:
1379 if not target:
1376 raise util.Abort(
1380 raise util.Abort(
1377 _("%s does not have a parent recorded") % root)
1381 _("%s does not have a parent recorded") % root)
1378 if not repo[target] == repo['.']:
1382 if not repo[target] == repo['.']:
1379 hg.update(repo, target)
1383 hg.update(repo, target)
1380
1384
1381 if move:
1385 if move:
1382 if not patch:
1386 if not patch:
1383 raise util.Abort(_("please specify the patch to move"))
1387 raise util.Abort(_("please specify the patch to move"))
1384 for fullstart, rpn in enumerate(self.fullseries):
1388 for fullstart, rpn in enumerate(self.fullseries):
1385 # strip markers for patch guards
1389 # strip markers for patch guards
1386 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1390 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1387 break
1391 break
1388 for i, rpn in enumerate(self.fullseries[fullstart:]):
1392 for i, rpn in enumerate(self.fullseries[fullstart:]):
1389 # strip markers for patch guards
1393 # strip markers for patch guards
1390 if self.guard_re.split(rpn, 1)[0] == patch:
1394 if self.guard_re.split(rpn, 1)[0] == patch:
1391 break
1395 break
1392 index = fullstart + i
1396 index = fullstart + i
1393 assert index < len(self.fullseries)
1397 assert index < len(self.fullseries)
1394 fullpatch = self.fullseries[index]
1398 fullpatch = self.fullseries[index]
1395 del self.fullseries[index]
1399 del self.fullseries[index]
1396 self.fullseries.insert(fullstart, fullpatch)
1400 self.fullseries.insert(fullstart, fullpatch)
1397 self.parseseries()
1401 self.parseseries()
1398 self.seriesdirty = True
1402 self.seriesdirty = True
1399
1403
1400 self.applieddirty = True
1404 self.applieddirty = True
1401 if start > 0:
1405 if start > 0:
1402 self.checktoppatch(repo)
1406 self.checktoppatch(repo)
1403 if not patch:
1407 if not patch:
1404 patch = self.series[start]
1408 patch = self.series[start]
1405 end = start + 1
1409 end = start + 1
1406 else:
1410 else:
1407 end = self.series.index(patch, start) + 1
1411 end = self.series.index(patch, start) + 1
1408
1412
1409 tobackup = set()
1413 tobackup = set()
1410 if (not nobackup and force) or keepchanges:
1414 if (not nobackup and force) or keepchanges:
1411 status = self.checklocalchanges(repo, force=True)
1415 status = self.checklocalchanges(repo, force=True)
1412 if keepchanges:
1416 if keepchanges:
1413 tobackup.update(status.modified + status.added +
1417 tobackup.update(status.modified + status.added +
1414 status.removed + status.deleted)
1418 status.removed + status.deleted)
1415 else:
1419 else:
1416 tobackup.update(status.modified + status.added)
1420 tobackup.update(status.modified + status.added)
1417
1421
1418 s = self.series[start:end]
1422 s = self.series[start:end]
1419 all_files = set()
1423 all_files = set()
1420 try:
1424 try:
1421 if mergeq:
1425 if mergeq:
1422 ret = self.mergepatch(repo, mergeq, s, diffopts)
1426 ret = self.mergepatch(repo, mergeq, s, diffopts)
1423 else:
1427 else:
1424 ret = self.apply(repo, s, list, all_files=all_files,
1428 ret = self.apply(repo, s, list, all_files=all_files,
1425 tobackup=tobackup, keepchanges=keepchanges)
1429 tobackup=tobackup, keepchanges=keepchanges)
1426 except AbortNoCleanup:
1430 except AbortNoCleanup:
1427 raise
1431 raise
1428 except: # re-raises
1432 except: # re-raises
1429 self.ui.warn(_('cleaning up working directory...'))
1433 self.ui.warn(_('cleaning up working directory...'))
1430 node = repo.dirstate.p1()
1434 node = repo.dirstate.p1()
1431 hg.revert(repo, node, None)
1435 hg.revert(repo, node, None)
1432 # only remove unknown files that we know we touched or
1436 # only remove unknown files that we know we touched or
1433 # created while patching
1437 # created while patching
1434 for f in all_files:
1438 for f in all_files:
1435 if f not in repo.dirstate:
1439 if f not in repo.dirstate:
1436 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1440 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1437 self.ui.warn(_('done\n'))
1441 self.ui.warn(_('done\n'))
1438 raise
1442 raise
1439
1443
1440 if not self.applied:
1444 if not self.applied:
1441 return ret[0]
1445 return ret[0]
1442 top = self.applied[-1].name
1446 top = self.applied[-1].name
1443 if ret[0] and ret[0] > 1:
1447 if ret[0] and ret[0] > 1:
1444 msg = _("errors during apply, please fix and refresh %s\n")
1448 msg = _("errors during apply, please fix and refresh %s\n")
1445 self.ui.write(msg % top)
1449 self.ui.write(msg % top)
1446 else:
1450 else:
1447 self.ui.write(_("now at: %s\n") % top)
1451 self.ui.write(_("now at: %s\n") % top)
1448 return ret[0]
1452 return ret[0]
1449
1453
1450 finally:
1454 finally:
1451 wlock.release()
1455 wlock.release()
1452
1456
1453 def pop(self, repo, patch=None, force=False, update=True, all=False,
1457 def pop(self, repo, patch=None, force=False, update=True, all=False,
1454 nobackup=False, keepchanges=False):
1458 nobackup=False, keepchanges=False):
1455 self.checkkeepchanges(keepchanges, force)
1459 self.checkkeepchanges(keepchanges, force)
1456 wlock = repo.wlock()
1460 wlock = repo.wlock()
1457 try:
1461 try:
1458 if patch:
1462 if patch:
1459 # index, rev, patch
1463 # index, rev, patch
1460 info = self.isapplied(patch)
1464 info = self.isapplied(patch)
1461 if not info:
1465 if not info:
1462 patch = self.lookup(patch)
1466 patch = self.lookup(patch)
1463 info = self.isapplied(patch)
1467 info = self.isapplied(patch)
1464 if not info:
1468 if not info:
1465 raise util.Abort(_("patch %s is not applied") % patch)
1469 raise util.Abort(_("patch %s is not applied") % patch)
1466
1470
1467 if not self.applied:
1471 if not self.applied:
1468 # Allow qpop -a to work repeatedly,
1472 # Allow qpop -a to work repeatedly,
1469 # but not qpop without an argument
1473 # but not qpop without an argument
1470 self.ui.warn(_("no patches applied\n"))
1474 self.ui.warn(_("no patches applied\n"))
1471 return not all
1475 return not all
1472
1476
1473 if all:
1477 if all:
1474 start = 0
1478 start = 0
1475 elif patch:
1479 elif patch:
1476 start = info[0] + 1
1480 start = info[0] + 1
1477 else:
1481 else:
1478 start = len(self.applied) - 1
1482 start = len(self.applied) - 1
1479
1483
1480 if start >= len(self.applied):
1484 if start >= len(self.applied):
1481 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1485 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1482 return
1486 return
1483
1487
1484 if not update:
1488 if not update:
1485 parents = repo.dirstate.parents()
1489 parents = repo.dirstate.parents()
1486 rr = [x.node for x in self.applied]
1490 rr = [x.node for x in self.applied]
1487 for p in parents:
1491 for p in parents:
1488 if p in rr:
1492 if p in rr:
1489 self.ui.warn(_("qpop: forcing dirstate update\n"))
1493 self.ui.warn(_("qpop: forcing dirstate update\n"))
1490 update = True
1494 update = True
1491 else:
1495 else:
1492 parents = [p.node() for p in repo[None].parents()]
1496 parents = [p.node() for p in repo[None].parents()]
1493 needupdate = False
1497 needupdate = False
1494 for entry in self.applied[start:]:
1498 for entry in self.applied[start:]:
1495 if entry.node in parents:
1499 if entry.node in parents:
1496 needupdate = True
1500 needupdate = True
1497 break
1501 break
1498 update = needupdate
1502 update = needupdate
1499
1503
1500 tobackup = set()
1504 tobackup = set()
1501 if update:
1505 if update:
1502 s = self.checklocalchanges(repo, force=force or keepchanges)
1506 s = self.checklocalchanges(repo, force=force or keepchanges)
1503 if force:
1507 if force:
1504 if not nobackup:
1508 if not nobackup:
1505 tobackup.update(s.modified + s.added)
1509 tobackup.update(s.modified + s.added)
1506 elif keepchanges:
1510 elif keepchanges:
1507 tobackup.update(s.modified + s.added +
1511 tobackup.update(s.modified + s.added +
1508 s.removed + s.deleted)
1512 s.removed + s.deleted)
1509
1513
1510 self.applieddirty = True
1514 self.applieddirty = True
1511 end = len(self.applied)
1515 end = len(self.applied)
1512 rev = self.applied[start].node
1516 rev = self.applied[start].node
1513
1517
1514 try:
1518 try:
1515 heads = repo.changelog.heads(rev)
1519 heads = repo.changelog.heads(rev)
1516 except error.LookupError:
1520 except error.LookupError:
1517 node = short(rev)
1521 node = short(rev)
1518 raise util.Abort(_('trying to pop unknown node %s') % node)
1522 raise util.Abort(_('trying to pop unknown node %s') % node)
1519
1523
1520 if heads != [self.applied[-1].node]:
1524 if heads != [self.applied[-1].node]:
1521 raise util.Abort(_("popping would remove a revision not "
1525 raise util.Abort(_("popping would remove a revision not "
1522 "managed by this patch queue"))
1526 "managed by this patch queue"))
1523 if not repo[self.applied[-1].node].mutable():
1527 if not repo[self.applied[-1].node].mutable():
1524 raise util.Abort(
1528 raise util.Abort(
1525 _("popping would remove an immutable revision"),
1529 _("popping would remove an immutable revision"),
1526 hint=_('see "hg help phases" for details'))
1530 hint=_('see "hg help phases" for details'))
1527
1531
1528 # we know there are no local changes, so we can make a simplified
1532 # we know there are no local changes, so we can make a simplified
1529 # form of hg.update.
1533 # form of hg.update.
1530 if update:
1534 if update:
1531 qp = self.qparents(repo, rev)
1535 qp = self.qparents(repo, rev)
1532 ctx = repo[qp]
1536 ctx = repo[qp]
1533 m, a, r, d = repo.status(qp, '.')[:4]
1537 m, a, r, d = repo.status(qp, '.')[:4]
1534 if d:
1538 if d:
1535 raise util.Abort(_("deletions found between repo revs"))
1539 raise util.Abort(_("deletions found between repo revs"))
1536
1540
1537 tobackup = set(a + m + r) & tobackup
1541 tobackup = set(a + m + r) & tobackup
1538 if keepchanges and tobackup:
1542 if keepchanges and tobackup:
1539 raise util.Abort(_("local changes found, refresh first"))
1543 raise util.Abort(_("local changes found, refresh first"))
1540 self.backup(repo, tobackup)
1544 self.backup(repo, tobackup)
1541 repo.dirstate.beginparentchange()
1545 repo.dirstate.beginparentchange()
1542 for f in a:
1546 for f in a:
1543 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1547 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1544 repo.dirstate.drop(f)
1548 repo.dirstate.drop(f)
1545 for f in m + r:
1549 for f in m + r:
1546 fctx = ctx[f]
1550 fctx = ctx[f]
1547 repo.wwrite(f, fctx.data(), fctx.flags())
1551 repo.wwrite(f, fctx.data(), fctx.flags())
1548 repo.dirstate.normal(f)
1552 repo.dirstate.normal(f)
1549 repo.setparents(qp, nullid)
1553 repo.setparents(qp, nullid)
1550 repo.dirstate.endparentchange()
1554 repo.dirstate.endparentchange()
1551 for patch in reversed(self.applied[start:end]):
1555 for patch in reversed(self.applied[start:end]):
1552 self.ui.status(_("popping %s\n") % patch.name)
1556 self.ui.status(_("popping %s\n") % patch.name)
1553 del self.applied[start:end]
1557 del self.applied[start:end]
1554 strip(self.ui, repo, [rev], update=False, backup=False)
1558 strip(self.ui, repo, [rev], update=False, backup=False)
1555 for s, state in repo['.'].substate.items():
1559 for s, state in repo['.'].substate.items():
1556 repo['.'].sub(s).get(state)
1560 repo['.'].sub(s).get(state)
1557 if self.applied:
1561 if self.applied:
1558 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1562 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1559 else:
1563 else:
1560 self.ui.write(_("patch queue now empty\n"))
1564 self.ui.write(_("patch queue now empty\n"))
1561 finally:
1565 finally:
1562 wlock.release()
1566 wlock.release()
1563
1567
1564 def diff(self, repo, pats, opts):
1568 def diff(self, repo, pats, opts):
1565 top, patch = self.checktoppatch(repo)
1569 top, patch = self.checktoppatch(repo)
1566 if not top:
1570 if not top:
1567 self.ui.write(_("no patches applied\n"))
1571 self.ui.write(_("no patches applied\n"))
1568 return
1572 return
1569 qp = self.qparents(repo, top)
1573 qp = self.qparents(repo, top)
1570 if opts.get('reverse'):
1574 if opts.get('reverse'):
1571 node1, node2 = None, qp
1575 node1, node2 = None, qp
1572 else:
1576 else:
1573 node1, node2 = qp, None
1577 node1, node2 = qp, None
1574 diffopts = self.diffopts(opts, patch)
1578 diffopts = self.diffopts(opts, patch)
1575 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1579 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1576
1580
1577 def refresh(self, repo, pats=None, **opts):
1581 def refresh(self, repo, pats=None, **opts):
1578 if not self.applied:
1582 if not self.applied:
1579 self.ui.write(_("no patches applied\n"))
1583 self.ui.write(_("no patches applied\n"))
1580 return 1
1584 return 1
1581 msg = opts.get('msg', '').rstrip()
1585 msg = opts.get('msg', '').rstrip()
1582 edit = opts.get('edit')
1586 edit = opts.get('edit')
1583 editform = opts.get('editform', 'mq.qrefresh')
1587 editform = opts.get('editform', 'mq.qrefresh')
1584 newuser = opts.get('user')
1588 newuser = opts.get('user')
1585 newdate = opts.get('date')
1589 newdate = opts.get('date')
1586 if newdate:
1590 if newdate:
1587 newdate = '%d %d' % util.parsedate(newdate)
1591 newdate = '%d %d' % util.parsedate(newdate)
1588 wlock = repo.wlock()
1592 wlock = repo.wlock()
1589
1593
1590 try:
1594 try:
1591 self.checktoppatch(repo)
1595 self.checktoppatch(repo)
1592 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1596 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1593 if repo.changelog.heads(top) != [top]:
1597 if repo.changelog.heads(top) != [top]:
1594 raise util.Abort(_("cannot refresh a revision with children"))
1598 raise util.Abort(_("cannot refresh a revision with children"))
1595 if not repo[top].mutable():
1599 if not repo[top].mutable():
1596 raise util.Abort(_("cannot refresh immutable revision"),
1600 raise util.Abort(_("cannot refresh immutable revision"),
1597 hint=_('see "hg help phases" for details'))
1601 hint=_('see "hg help phases" for details'))
1598
1602
1599 cparents = repo.changelog.parents(top)
1603 cparents = repo.changelog.parents(top)
1600 patchparent = self.qparents(repo, top)
1604 patchparent = self.qparents(repo, top)
1601
1605
1602 inclsubs = checksubstate(repo, hex(patchparent))
1606 inclsubs = checksubstate(repo, hex(patchparent))
1603 if inclsubs:
1607 if inclsubs:
1604 substatestate = repo.dirstate['.hgsubstate']
1608 substatestate = repo.dirstate['.hgsubstate']
1605
1609
1606 ph = patchheader(self.join(patchfn), self.plainmode)
1610 ph = patchheader(self.join(patchfn), self.plainmode)
1607 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1611 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1608 if newuser:
1612 if newuser:
1609 ph.setuser(newuser)
1613 ph.setuser(newuser)
1610 if newdate:
1614 if newdate:
1611 ph.setdate(newdate)
1615 ph.setdate(newdate)
1612 ph.setparent(hex(patchparent))
1616 ph.setparent(hex(patchparent))
1613
1617
1614 # only commit new patch when write is complete
1618 # only commit new patch when write is complete
1615 patchf = self.opener(patchfn, 'w', atomictemp=True)
1619 patchf = self.opener(patchfn, 'w', atomictemp=True)
1616
1620
1617 # update the dirstate in place, strip off the qtip commit
1621 # update the dirstate in place, strip off the qtip commit
1618 # and then commit.
1622 # and then commit.
1619 #
1623 #
1620 # this should really read:
1624 # this should really read:
1621 # mm, dd, aa = repo.status(top, patchparent)[:3]
1625 # mm, dd, aa = repo.status(top, patchparent)[:3]
1622 # but we do it backwards to take advantage of manifest/changelog
1626 # but we do it backwards to take advantage of manifest/changelog
1623 # caching against the next repo.status call
1627 # caching against the next repo.status call
1624 mm, aa, dd = repo.status(patchparent, top)[:3]
1628 mm, aa, dd = repo.status(patchparent, top)[:3]
1625 changes = repo.changelog.read(top)
1629 changes = repo.changelog.read(top)
1626 man = repo.manifest.read(changes[0])
1630 man = repo.manifest.read(changes[0])
1627 aaa = aa[:]
1631 aaa = aa[:]
1628 matchfn = scmutil.match(repo[None], pats, opts)
1632 matchfn = scmutil.match(repo[None], pats, opts)
1629 # in short mode, we only diff the files included in the
1633 # in short mode, we only diff the files included in the
1630 # patch already plus specified files
1634 # patch already plus specified files
1631 if opts.get('short'):
1635 if opts.get('short'):
1632 # if amending a patch, we start with existing
1636 # if amending a patch, we start with existing
1633 # files plus specified files - unfiltered
1637 # files plus specified files - unfiltered
1634 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1638 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1635 # filter with include/exclude options
1639 # filter with include/exclude options
1636 matchfn = scmutil.match(repo[None], opts=opts)
1640 matchfn = scmutil.match(repo[None], opts=opts)
1637 else:
1641 else:
1638 match = scmutil.matchall(repo)
1642 match = scmutil.matchall(repo)
1639 m, a, r, d = repo.status(match=match)[:4]
1643 m, a, r, d = repo.status(match=match)[:4]
1640 mm = set(mm)
1644 mm = set(mm)
1641 aa = set(aa)
1645 aa = set(aa)
1642 dd = set(dd)
1646 dd = set(dd)
1643
1647
1644 # we might end up with files that were added between
1648 # we might end up with files that were added between
1645 # qtip and the dirstate parent, but then changed in the
1649 # qtip and the dirstate parent, but then changed in the
1646 # local dirstate. in this case, we want them to only
1650 # local dirstate. in this case, we want them to only
1647 # show up in the added section
1651 # show up in the added section
1648 for x in m:
1652 for x in m:
1649 if x not in aa:
1653 if x not in aa:
1650 mm.add(x)
1654 mm.add(x)
1651 # we might end up with files added by the local dirstate that
1655 # we might end up with files added by the local dirstate that
1652 # were deleted by the patch. In this case, they should only
1656 # were deleted by the patch. In this case, they should only
1653 # show up in the changed section.
1657 # show up in the changed section.
1654 for x in a:
1658 for x in a:
1655 if x in dd:
1659 if x in dd:
1656 dd.remove(x)
1660 dd.remove(x)
1657 mm.add(x)
1661 mm.add(x)
1658 else:
1662 else:
1659 aa.add(x)
1663 aa.add(x)
1660 # make sure any files deleted in the local dirstate
1664 # make sure any files deleted in the local dirstate
1661 # are not in the add or change column of the patch
1665 # are not in the add or change column of the patch
1662 forget = []
1666 forget = []
1663 for x in d + r:
1667 for x in d + r:
1664 if x in aa:
1668 if x in aa:
1665 aa.remove(x)
1669 aa.remove(x)
1666 forget.append(x)
1670 forget.append(x)
1667 continue
1671 continue
1668 else:
1672 else:
1669 mm.discard(x)
1673 mm.discard(x)
1670 dd.add(x)
1674 dd.add(x)
1671
1675
1672 m = list(mm)
1676 m = list(mm)
1673 r = list(dd)
1677 r = list(dd)
1674 a = list(aa)
1678 a = list(aa)
1675
1679
1676 # create 'match' that includes the files to be recommitted.
1680 # create 'match' that includes the files to be recommitted.
1677 # apply matchfn via repo.status to ensure correct case handling.
1681 # apply matchfn via repo.status to ensure correct case handling.
1678 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1682 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1679 allmatches = set(cm + ca + cr + cd)
1683 allmatches = set(cm + ca + cr + cd)
1680 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1684 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1681
1685
1682 files = set(inclsubs)
1686 files = set(inclsubs)
1683 for x in refreshchanges:
1687 for x in refreshchanges:
1684 files.update(x)
1688 files.update(x)
1685 match = scmutil.matchfiles(repo, files)
1689 match = scmutil.matchfiles(repo, files)
1686
1690
1687 bmlist = repo[top].bookmarks()
1691 bmlist = repo[top].bookmarks()
1688
1692
1689 dsguard = None
1693 dsguard = None
1690 try:
1694 try:
1691 dsguard = cmdutil.dirstateguard(repo, 'mq.refresh')
1695 dsguard = cmdutil.dirstateguard(repo, 'mq.refresh')
1692 if diffopts.git or diffopts.upgrade:
1696 if diffopts.git or diffopts.upgrade:
1693 copies = {}
1697 copies = {}
1694 for dst in a:
1698 for dst in a:
1695 src = repo.dirstate.copied(dst)
1699 src = repo.dirstate.copied(dst)
1696 # during qfold, the source file for copies may
1700 # during qfold, the source file for copies may
1697 # be removed. Treat this as a simple add.
1701 # be removed. Treat this as a simple add.
1698 if src is not None and src in repo.dirstate:
1702 if src is not None and src in repo.dirstate:
1699 copies.setdefault(src, []).append(dst)
1703 copies.setdefault(src, []).append(dst)
1700 repo.dirstate.add(dst)
1704 repo.dirstate.add(dst)
1701 # remember the copies between patchparent and qtip
1705 # remember the copies between patchparent and qtip
1702 for dst in aaa:
1706 for dst in aaa:
1703 f = repo.file(dst)
1707 f = repo.file(dst)
1704 src = f.renamed(man[dst])
1708 src = f.renamed(man[dst])
1705 if src:
1709 if src:
1706 copies.setdefault(src[0], []).extend(
1710 copies.setdefault(src[0], []).extend(
1707 copies.get(dst, []))
1711 copies.get(dst, []))
1708 if dst in a:
1712 if dst in a:
1709 copies[src[0]].append(dst)
1713 copies[src[0]].append(dst)
1710 # we can't copy a file created by the patch itself
1714 # we can't copy a file created by the patch itself
1711 if dst in copies:
1715 if dst in copies:
1712 del copies[dst]
1716 del copies[dst]
1713 for src, dsts in copies.iteritems():
1717 for src, dsts in copies.iteritems():
1714 for dst in dsts:
1718 for dst in dsts:
1715 repo.dirstate.copy(src, dst)
1719 repo.dirstate.copy(src, dst)
1716 else:
1720 else:
1717 for dst in a:
1721 for dst in a:
1718 repo.dirstate.add(dst)
1722 repo.dirstate.add(dst)
1719 # Drop useless copy information
1723 # Drop useless copy information
1720 for f in list(repo.dirstate.copies()):
1724 for f in list(repo.dirstate.copies()):
1721 repo.dirstate.copy(None, f)
1725 repo.dirstate.copy(None, f)
1722 for f in r:
1726 for f in r:
1723 repo.dirstate.remove(f)
1727 repo.dirstate.remove(f)
1724 # if the patch excludes a modified file, mark that
1728 # if the patch excludes a modified file, mark that
1725 # file with mtime=0 so status can see it.
1729 # file with mtime=0 so status can see it.
1726 mm = []
1730 mm = []
1727 for i in xrange(len(m) - 1, -1, -1):
1731 for i in xrange(len(m) - 1, -1, -1):
1728 if not matchfn(m[i]):
1732 if not matchfn(m[i]):
1729 mm.append(m[i])
1733 mm.append(m[i])
1730 del m[i]
1734 del m[i]
1731 for f in m:
1735 for f in m:
1732 repo.dirstate.normal(f)
1736 repo.dirstate.normal(f)
1733 for f in mm:
1737 for f in mm:
1734 repo.dirstate.normallookup(f)
1738 repo.dirstate.normallookup(f)
1735 for f in forget:
1739 for f in forget:
1736 repo.dirstate.drop(f)
1740 repo.dirstate.drop(f)
1737
1741
1738 user = ph.user or changes[1]
1742 user = ph.user or changes[1]
1739
1743
1740 oldphase = repo[top].phase()
1744 oldphase = repo[top].phase()
1741
1745
1742 # assumes strip can roll itself back if interrupted
1746 # assumes strip can roll itself back if interrupted
1743 repo.setparents(*cparents)
1747 repo.setparents(*cparents)
1744 self.applied.pop()
1748 self.applied.pop()
1745 self.applieddirty = True
1749 self.applieddirty = True
1746 strip(self.ui, repo, [top], update=False, backup=False)
1750 strip(self.ui, repo, [top], update=False, backup=False)
1747 dsguard.close()
1751 dsguard.close()
1748 finally:
1752 finally:
1749 release(dsguard)
1753 release(dsguard)
1750
1754
1751 try:
1755 try:
1752 # might be nice to attempt to roll back strip after this
1756 # might be nice to attempt to roll back strip after this
1753
1757
1754 defaultmsg = "[mq]: %s" % patchfn
1758 defaultmsg = "[mq]: %s" % patchfn
1755 editor = cmdutil.getcommiteditor(editform=editform)
1759 editor = cmdutil.getcommiteditor(editform=editform)
1756 if edit:
1760 if edit:
1757 def finishdesc(desc):
1761 def finishdesc(desc):
1758 if desc.rstrip():
1762 if desc.rstrip():
1759 ph.setmessage(desc)
1763 ph.setmessage(desc)
1760 return desc
1764 return desc
1761 return defaultmsg
1765 return defaultmsg
1762 # i18n: this message is shown in editor with "HG: " prefix
1766 # i18n: this message is shown in editor with "HG: " prefix
1763 extramsg = _('Leave message empty to use default message.')
1767 extramsg = _('Leave message empty to use default message.')
1764 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1768 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1765 extramsg=extramsg,
1769 extramsg=extramsg,
1766 editform=editform)
1770 editform=editform)
1767 message = msg or "\n".join(ph.message)
1771 message = msg or "\n".join(ph.message)
1768 elif not msg:
1772 elif not msg:
1769 if not ph.message:
1773 if not ph.message:
1770 message = defaultmsg
1774 message = defaultmsg
1771 else:
1775 else:
1772 message = "\n".join(ph.message)
1776 message = "\n".join(ph.message)
1773 else:
1777 else:
1774 message = msg
1778 message = msg
1775 ph.setmessage(msg)
1779 ph.setmessage(msg)
1776
1780
1777 # Ensure we create a new changeset in the same phase than
1781 # Ensure we create a new changeset in the same phase than
1778 # the old one.
1782 # the old one.
1779 n = newcommit(repo, oldphase, message, user, ph.date,
1783 n = newcommit(repo, oldphase, message, user, ph.date,
1780 match=match, force=True, editor=editor)
1784 match=match, force=True, editor=editor)
1781 # only write patch after a successful commit
1785 # only write patch after a successful commit
1782 c = [list(x) for x in refreshchanges]
1786 c = [list(x) for x in refreshchanges]
1783 if inclsubs:
1787 if inclsubs:
1784 self.putsubstate2changes(substatestate, c)
1788 self.putsubstate2changes(substatestate, c)
1785 chunks = patchmod.diff(repo, patchparent,
1789 chunks = patchmod.diff(repo, patchparent,
1786 changes=c, opts=diffopts)
1790 changes=c, opts=diffopts)
1787 comments = str(ph)
1791 comments = str(ph)
1788 if comments:
1792 if comments:
1789 patchf.write(comments)
1793 patchf.write(comments)
1790 for chunk in chunks:
1794 for chunk in chunks:
1791 patchf.write(chunk)
1795 patchf.write(chunk)
1792 patchf.close()
1796 patchf.close()
1793
1797
1794 marks = repo._bookmarks
1798 marks = repo._bookmarks
1795 for bm in bmlist:
1799 for bm in bmlist:
1796 marks[bm] = n
1800 marks[bm] = n
1797 marks.write()
1801 marks.write()
1798
1802
1799 self.applied.append(statusentry(n, patchfn))
1803 self.applied.append(statusentry(n, patchfn))
1800 except: # re-raises
1804 except: # re-raises
1801 ctx = repo[cparents[0]]
1805 ctx = repo[cparents[0]]
1802 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1806 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1803 self.savedirty()
1807 self.savedirty()
1804 self.ui.warn(_('refresh interrupted while patch was popped! '
1808 self.ui.warn(_('refresh interrupted while patch was popped! '
1805 '(revert --all, qpush to recover)\n'))
1809 '(revert --all, qpush to recover)\n'))
1806 raise
1810 raise
1807 finally:
1811 finally:
1808 wlock.release()
1812 wlock.release()
1809 self.removeundo(repo)
1813 self.removeundo(repo)
1810
1814
1811 def init(self, repo, create=False):
1815 def init(self, repo, create=False):
1812 if not create and os.path.isdir(self.path):
1816 if not create and os.path.isdir(self.path):
1813 raise util.Abort(_("patch queue directory already exists"))
1817 raise util.Abort(_("patch queue directory already exists"))
1814 try:
1818 try:
1815 os.mkdir(self.path)
1819 os.mkdir(self.path)
1816 except OSError, inst:
1820 except OSError, inst:
1817 if inst.errno != errno.EEXIST or not create:
1821 if inst.errno != errno.EEXIST or not create:
1818 raise
1822 raise
1819 if create:
1823 if create:
1820 return self.qrepo(create=True)
1824 return self.qrepo(create=True)
1821
1825
1822 def unapplied(self, repo, patch=None):
1826 def unapplied(self, repo, patch=None):
1823 if patch and patch not in self.series:
1827 if patch and patch not in self.series:
1824 raise util.Abort(_("patch %s is not in series file") % patch)
1828 raise util.Abort(_("patch %s is not in series file") % patch)
1825 if not patch:
1829 if not patch:
1826 start = self.seriesend()
1830 start = self.seriesend()
1827 else:
1831 else:
1828 start = self.series.index(patch) + 1
1832 start = self.series.index(patch) + 1
1829 unapplied = []
1833 unapplied = []
1830 for i in xrange(start, len(self.series)):
1834 for i in xrange(start, len(self.series)):
1831 pushable, reason = self.pushable(i)
1835 pushable, reason = self.pushable(i)
1832 if pushable:
1836 if pushable:
1833 unapplied.append((i, self.series[i]))
1837 unapplied.append((i, self.series[i]))
1834 self.explainpushable(i)
1838 self.explainpushable(i)
1835 return unapplied
1839 return unapplied
1836
1840
1837 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1841 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1838 summary=False):
1842 summary=False):
1839 def displayname(pfx, patchname, state):
1843 def displayname(pfx, patchname, state):
1840 if pfx:
1844 if pfx:
1841 self.ui.write(pfx)
1845 self.ui.write(pfx)
1842 if summary:
1846 if summary:
1843 ph = patchheader(self.join(patchname), self.plainmode)
1847 ph = patchheader(self.join(patchname), self.plainmode)
1844 if ph.message:
1848 if ph.message:
1845 msg = ph.message[0]
1849 msg = ph.message[0]
1846 else:
1850 else:
1847 msg = ''
1851 msg = ''
1848
1852
1849 if self.ui.formatted():
1853 if self.ui.formatted():
1850 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1854 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1851 if width > 0:
1855 if width > 0:
1852 msg = util.ellipsis(msg, width)
1856 msg = util.ellipsis(msg, width)
1853 else:
1857 else:
1854 msg = ''
1858 msg = ''
1855 self.ui.write(patchname, label='qseries.' + state)
1859 self.ui.write(patchname, label='qseries.' + state)
1856 self.ui.write(': ')
1860 self.ui.write(': ')
1857 self.ui.write(msg, label='qseries.message.' + state)
1861 self.ui.write(msg, label='qseries.message.' + state)
1858 else:
1862 else:
1859 self.ui.write(patchname, label='qseries.' + state)
1863 self.ui.write(patchname, label='qseries.' + state)
1860 self.ui.write('\n')
1864 self.ui.write('\n')
1861
1865
1862 applied = set([p.name for p in self.applied])
1866 applied = set([p.name for p in self.applied])
1863 if length is None:
1867 if length is None:
1864 length = len(self.series) - start
1868 length = len(self.series) - start
1865 if not missing:
1869 if not missing:
1866 if self.ui.verbose:
1870 if self.ui.verbose:
1867 idxwidth = len(str(start + length - 1))
1871 idxwidth = len(str(start + length - 1))
1868 for i in xrange(start, start + length):
1872 for i in xrange(start, start + length):
1869 patch = self.series[i]
1873 patch = self.series[i]
1870 if patch in applied:
1874 if patch in applied:
1871 char, state = 'A', 'applied'
1875 char, state = 'A', 'applied'
1872 elif self.pushable(i)[0]:
1876 elif self.pushable(i)[0]:
1873 char, state = 'U', 'unapplied'
1877 char, state = 'U', 'unapplied'
1874 else:
1878 else:
1875 char, state = 'G', 'guarded'
1879 char, state = 'G', 'guarded'
1876 pfx = ''
1880 pfx = ''
1877 if self.ui.verbose:
1881 if self.ui.verbose:
1878 pfx = '%*d %s ' % (idxwidth, i, char)
1882 pfx = '%*d %s ' % (idxwidth, i, char)
1879 elif status and status != char:
1883 elif status and status != char:
1880 continue
1884 continue
1881 displayname(pfx, patch, state)
1885 displayname(pfx, patch, state)
1882 else:
1886 else:
1883 msng_list = []
1887 msng_list = []
1884 for root, dirs, files in os.walk(self.path):
1888 for root, dirs, files in os.walk(self.path):
1885 d = root[len(self.path) + 1:]
1889 d = root[len(self.path) + 1:]
1886 for f in files:
1890 for f in files:
1887 fl = os.path.join(d, f)
1891 fl = os.path.join(d, f)
1888 if (fl not in self.series and
1892 if (fl not in self.series and
1889 fl not in (self.statuspath, self.seriespath,
1893 fl not in (self.statuspath, self.seriespath,
1890 self.guardspath)
1894 self.guardspath)
1891 and not fl.startswith('.')):
1895 and not fl.startswith('.')):
1892 msng_list.append(fl)
1896 msng_list.append(fl)
1893 for x in sorted(msng_list):
1897 for x in sorted(msng_list):
1894 pfx = self.ui.verbose and ('D ') or ''
1898 pfx = self.ui.verbose and ('D ') or ''
1895 displayname(pfx, x, 'missing')
1899 displayname(pfx, x, 'missing')
1896
1900
1897 def issaveline(self, l):
1901 def issaveline(self, l):
1898 if l.name == '.hg.patches.save.line':
1902 if l.name == '.hg.patches.save.line':
1899 return True
1903 return True
1900
1904
1901 def qrepo(self, create=False):
1905 def qrepo(self, create=False):
1902 ui = self.baseui.copy()
1906 ui = self.baseui.copy()
1903 if create or os.path.isdir(self.join(".hg")):
1907 if create or os.path.isdir(self.join(".hg")):
1904 return hg.repository(ui, path=self.path, create=create)
1908 return hg.repository(ui, path=self.path, create=create)
1905
1909
1906 def restore(self, repo, rev, delete=None, qupdate=None):
1910 def restore(self, repo, rev, delete=None, qupdate=None):
1907 desc = repo[rev].description().strip()
1911 desc = repo[rev].description().strip()
1908 lines = desc.splitlines()
1912 lines = desc.splitlines()
1909 i = 0
1913 i = 0
1910 datastart = None
1914 datastart = None
1911 series = []
1915 series = []
1912 applied = []
1916 applied = []
1913 qpp = None
1917 qpp = None
1914 for i, line in enumerate(lines):
1918 for i, line in enumerate(lines):
1915 if line == 'Patch Data:':
1919 if line == 'Patch Data:':
1916 datastart = i + 1
1920 datastart = i + 1
1917 elif line.startswith('Dirstate:'):
1921 elif line.startswith('Dirstate:'):
1918 l = line.rstrip()
1922 l = line.rstrip()
1919 l = l[10:].split(' ')
1923 l = l[10:].split(' ')
1920 qpp = [bin(x) for x in l]
1924 qpp = [bin(x) for x in l]
1921 elif datastart is not None:
1925 elif datastart is not None:
1922 l = line.rstrip()
1926 l = line.rstrip()
1923 n, name = l.split(':', 1)
1927 n, name = l.split(':', 1)
1924 if n:
1928 if n:
1925 applied.append(statusentry(bin(n), name))
1929 applied.append(statusentry(bin(n), name))
1926 else:
1930 else:
1927 series.append(l)
1931 series.append(l)
1928 if datastart is None:
1932 if datastart is None:
1929 self.ui.warn(_("no saved patch data found\n"))
1933 self.ui.warn(_("no saved patch data found\n"))
1930 return 1
1934 return 1
1931 self.ui.warn(_("restoring status: %s\n") % lines[0])
1935 self.ui.warn(_("restoring status: %s\n") % lines[0])
1932 self.fullseries = series
1936 self.fullseries = series
1933 self.applied = applied
1937 self.applied = applied
1934 self.parseseries()
1938 self.parseseries()
1935 self.seriesdirty = True
1939 self.seriesdirty = True
1936 self.applieddirty = True
1940 self.applieddirty = True
1937 heads = repo.changelog.heads()
1941 heads = repo.changelog.heads()
1938 if delete:
1942 if delete:
1939 if rev not in heads:
1943 if rev not in heads:
1940 self.ui.warn(_("save entry has children, leaving it alone\n"))
1944 self.ui.warn(_("save entry has children, leaving it alone\n"))
1941 else:
1945 else:
1942 self.ui.warn(_("removing save entry %s\n") % short(rev))
1946 self.ui.warn(_("removing save entry %s\n") % short(rev))
1943 pp = repo.dirstate.parents()
1947 pp = repo.dirstate.parents()
1944 if rev in pp:
1948 if rev in pp:
1945 update = True
1949 update = True
1946 else:
1950 else:
1947 update = False
1951 update = False
1948 strip(self.ui, repo, [rev], update=update, backup=False)
1952 strip(self.ui, repo, [rev], update=update, backup=False)
1949 if qpp:
1953 if qpp:
1950 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1954 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1951 (short(qpp[0]), short(qpp[1])))
1955 (short(qpp[0]), short(qpp[1])))
1952 if qupdate:
1956 if qupdate:
1953 self.ui.status(_("updating queue directory\n"))
1957 self.ui.status(_("updating queue directory\n"))
1954 r = self.qrepo()
1958 r = self.qrepo()
1955 if not r:
1959 if not r:
1956 self.ui.warn(_("unable to load queue repository\n"))
1960 self.ui.warn(_("unable to load queue repository\n"))
1957 return 1
1961 return 1
1958 hg.clean(r, qpp[0])
1962 hg.clean(r, qpp[0])
1959
1963
1960 def save(self, repo, msg=None):
1964 def save(self, repo, msg=None):
1961 if not self.applied:
1965 if not self.applied:
1962 self.ui.warn(_("save: no patches applied, exiting\n"))
1966 self.ui.warn(_("save: no patches applied, exiting\n"))
1963 return 1
1967 return 1
1964 if self.issaveline(self.applied[-1]):
1968 if self.issaveline(self.applied[-1]):
1965 self.ui.warn(_("status is already saved\n"))
1969 self.ui.warn(_("status is already saved\n"))
1966 return 1
1970 return 1
1967
1971
1968 if not msg:
1972 if not msg:
1969 msg = _("hg patches saved state")
1973 msg = _("hg patches saved state")
1970 else:
1974 else:
1971 msg = "hg patches: " + msg.rstrip('\r\n')
1975 msg = "hg patches: " + msg.rstrip('\r\n')
1972 r = self.qrepo()
1976 r = self.qrepo()
1973 if r:
1977 if r:
1974 pp = r.dirstate.parents()
1978 pp = r.dirstate.parents()
1975 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1979 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1976 msg += "\n\nPatch Data:\n"
1980 msg += "\n\nPatch Data:\n"
1977 msg += ''.join('%s\n' % x for x in self.applied)
1981 msg += ''.join('%s\n' % x for x in self.applied)
1978 msg += ''.join(':%s\n' % x for x in self.fullseries)
1982 msg += ''.join(':%s\n' % x for x in self.fullseries)
1979 n = repo.commit(msg, force=True)
1983 n = repo.commit(msg, force=True)
1980 if not n:
1984 if not n:
1981 self.ui.warn(_("repo commit failed\n"))
1985 self.ui.warn(_("repo commit failed\n"))
1982 return 1
1986 return 1
1983 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1987 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1984 self.applieddirty = True
1988 self.applieddirty = True
1985 self.removeundo(repo)
1989 self.removeundo(repo)
1986
1990
1987 def fullseriesend(self):
1991 def fullseriesend(self):
1988 if self.applied:
1992 if self.applied:
1989 p = self.applied[-1].name
1993 p = self.applied[-1].name
1990 end = self.findseries(p)
1994 end = self.findseries(p)
1991 if end is None:
1995 if end is None:
1992 return len(self.fullseries)
1996 return len(self.fullseries)
1993 return end + 1
1997 return end + 1
1994 return 0
1998 return 0
1995
1999
1996 def seriesend(self, all_patches=False):
2000 def seriesend(self, all_patches=False):
1997 """If all_patches is False, return the index of the next pushable patch
2001 """If all_patches is False, return the index of the next pushable patch
1998 in the series, or the series length. If all_patches is True, return the
2002 in the series, or the series length. If all_patches is True, return the
1999 index of the first patch past the last applied one.
2003 index of the first patch past the last applied one.
2000 """
2004 """
2001 end = 0
2005 end = 0
2002 def nextpatch(start):
2006 def nextpatch(start):
2003 if all_patches or start >= len(self.series):
2007 if all_patches or start >= len(self.series):
2004 return start
2008 return start
2005 for i in xrange(start, len(self.series)):
2009 for i in xrange(start, len(self.series)):
2006 p, reason = self.pushable(i)
2010 p, reason = self.pushable(i)
2007 if p:
2011 if p:
2008 return i
2012 return i
2009 self.explainpushable(i)
2013 self.explainpushable(i)
2010 return len(self.series)
2014 return len(self.series)
2011 if self.applied:
2015 if self.applied:
2012 p = self.applied[-1].name
2016 p = self.applied[-1].name
2013 try:
2017 try:
2014 end = self.series.index(p)
2018 end = self.series.index(p)
2015 except ValueError:
2019 except ValueError:
2016 return 0
2020 return 0
2017 return nextpatch(end + 1)
2021 return nextpatch(end + 1)
2018 return nextpatch(end)
2022 return nextpatch(end)
2019
2023
2020 def appliedname(self, index):
2024 def appliedname(self, index):
2021 pname = self.applied[index].name
2025 pname = self.applied[index].name
2022 if not self.ui.verbose:
2026 if not self.ui.verbose:
2023 p = pname
2027 p = pname
2024 else:
2028 else:
2025 p = str(self.series.index(pname)) + " " + pname
2029 p = str(self.series.index(pname)) + " " + pname
2026 return p
2030 return p
2027
2031
2028 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2032 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2029 force=None, git=False):
2033 force=None, git=False):
2030 def checkseries(patchname):
2034 def checkseries(patchname):
2031 if patchname in self.series:
2035 if patchname in self.series:
2032 raise util.Abort(_('patch %s is already in the series file')
2036 raise util.Abort(_('patch %s is already in the series file')
2033 % patchname)
2037 % patchname)
2034
2038
2035 if rev:
2039 if rev:
2036 if files:
2040 if files:
2037 raise util.Abort(_('option "-r" not valid when importing '
2041 raise util.Abort(_('option "-r" not valid when importing '
2038 'files'))
2042 'files'))
2039 rev = scmutil.revrange(repo, rev)
2043 rev = scmutil.revrange(repo, rev)
2040 rev.sort(reverse=True)
2044 rev.sort(reverse=True)
2041 elif not files:
2045 elif not files:
2042 raise util.Abort(_('no files or revisions specified'))
2046 raise util.Abort(_('no files or revisions specified'))
2043 if (len(files) > 1 or len(rev) > 1) and patchname:
2047 if (len(files) > 1 or len(rev) > 1) and patchname:
2044 raise util.Abort(_('option "-n" not valid when importing multiple '
2048 raise util.Abort(_('option "-n" not valid when importing multiple '
2045 'patches'))
2049 'patches'))
2046 imported = []
2050 imported = []
2047 if rev:
2051 if rev:
2048 # If mq patches are applied, we can only import revisions
2052 # If mq patches are applied, we can only import revisions
2049 # that form a linear path to qbase.
2053 # that form a linear path to qbase.
2050 # Otherwise, they should form a linear path to a head.
2054 # Otherwise, they should form a linear path to a head.
2051 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2055 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2052 if len(heads) > 1:
2056 if len(heads) > 1:
2053 raise util.Abort(_('revision %d is the root of more than one '
2057 raise util.Abort(_('revision %d is the root of more than one '
2054 'branch') % rev.last())
2058 'branch') % rev.last())
2055 if self.applied:
2059 if self.applied:
2056 base = repo.changelog.node(rev.first())
2060 base = repo.changelog.node(rev.first())
2057 if base in [n.node for n in self.applied]:
2061 if base in [n.node for n in self.applied]:
2058 raise util.Abort(_('revision %d is already managed')
2062 raise util.Abort(_('revision %d is already managed')
2059 % rev.first())
2063 % rev.first())
2060 if heads != [self.applied[-1].node]:
2064 if heads != [self.applied[-1].node]:
2061 raise util.Abort(_('revision %d is not the parent of '
2065 raise util.Abort(_('revision %d is not the parent of '
2062 'the queue') % rev.first())
2066 'the queue') % rev.first())
2063 base = repo.changelog.rev(self.applied[0].node)
2067 base = repo.changelog.rev(self.applied[0].node)
2064 lastparent = repo.changelog.parentrevs(base)[0]
2068 lastparent = repo.changelog.parentrevs(base)[0]
2065 else:
2069 else:
2066 if heads != [repo.changelog.node(rev.first())]:
2070 if heads != [repo.changelog.node(rev.first())]:
2067 raise util.Abort(_('revision %d has unmanaged children')
2071 raise util.Abort(_('revision %d has unmanaged children')
2068 % rev.first())
2072 % rev.first())
2069 lastparent = None
2073 lastparent = None
2070
2074
2071 diffopts = self.diffopts({'git': git})
2075 diffopts = self.diffopts({'git': git})
2072 tr = repo.transaction('qimport')
2076 tr = repo.transaction('qimport')
2073 try:
2077 try:
2074 for r in rev:
2078 for r in rev:
2075 if not repo[r].mutable():
2079 if not repo[r].mutable():
2076 raise util.Abort(_('revision %d is not mutable') % r,
2080 raise util.Abort(_('revision %d is not mutable') % r,
2077 hint=_('see "hg help phases" '
2081 hint=_('see "hg help phases" '
2078 'for details'))
2082 'for details'))
2079 p1, p2 = repo.changelog.parentrevs(r)
2083 p1, p2 = repo.changelog.parentrevs(r)
2080 n = repo.changelog.node(r)
2084 n = repo.changelog.node(r)
2081 if p2 != nullrev:
2085 if p2 != nullrev:
2082 raise util.Abort(_('cannot import merge revision %d')
2086 raise util.Abort(_('cannot import merge revision %d')
2083 % r)
2087 % r)
2084 if lastparent and lastparent != r:
2088 if lastparent and lastparent != r:
2085 raise util.Abort(_('revision %d is not the parent of '
2089 raise util.Abort(_('revision %d is not the parent of '
2086 '%d')
2090 '%d')
2087 % (r, lastparent))
2091 % (r, lastparent))
2088 lastparent = p1
2092 lastparent = p1
2089
2093
2090 if not patchname:
2094 if not patchname:
2091 patchname = normname('%d.diff' % r)
2095 patchname = normname('%d.diff' % r)
2092 checkseries(patchname)
2096 checkseries(patchname)
2093 self.checkpatchname(patchname, force)
2097 self.checkpatchname(patchname, force)
2094 self.fullseries.insert(0, patchname)
2098 self.fullseries.insert(0, patchname)
2095
2099
2096 patchf = self.opener(patchname, "w")
2100 patchf = self.opener(patchname, "w")
2097 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2101 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2098 patchf.close()
2102 patchf.close()
2099
2103
2100 se = statusentry(n, patchname)
2104 se = statusentry(n, patchname)
2101 self.applied.insert(0, se)
2105 self.applied.insert(0, se)
2102
2106
2103 self.added.append(patchname)
2107 self.added.append(patchname)
2104 imported.append(patchname)
2108 imported.append(patchname)
2105 patchname = None
2109 patchname = None
2106 if rev and repo.ui.configbool('mq', 'secret', False):
2110 if rev and repo.ui.configbool('mq', 'secret', False):
2107 # if we added anything with --rev, move the secret root
2111 # if we added anything with --rev, move the secret root
2108 phases.retractboundary(repo, tr, phases.secret, [n])
2112 phases.retractboundary(repo, tr, phases.secret, [n])
2109 self.parseseries()
2113 self.parseseries()
2110 self.applieddirty = True
2114 self.applieddirty = True
2111 self.seriesdirty = True
2115 self.seriesdirty = True
2112 tr.close()
2116 tr.close()
2113 finally:
2117 finally:
2114 tr.release()
2118 tr.release()
2115
2119
2116 for i, filename in enumerate(files):
2120 for i, filename in enumerate(files):
2117 if existing:
2121 if existing:
2118 if filename == '-':
2122 if filename == '-':
2119 raise util.Abort(_('-e is incompatible with import from -'))
2123 raise util.Abort(_('-e is incompatible with import from -'))
2120 filename = normname(filename)
2124 filename = normname(filename)
2121 self.checkreservedname(filename)
2125 self.checkreservedname(filename)
2122 if util.url(filename).islocal():
2126 if util.url(filename).islocal():
2123 originpath = self.join(filename)
2127 originpath = self.join(filename)
2124 if not os.path.isfile(originpath):
2128 if not os.path.isfile(originpath):
2125 raise util.Abort(
2129 raise util.Abort(
2126 _("patch %s does not exist") % filename)
2130 _("patch %s does not exist") % filename)
2127
2131
2128 if patchname:
2132 if patchname:
2129 self.checkpatchname(patchname, force)
2133 self.checkpatchname(patchname, force)
2130
2134
2131 self.ui.write(_('renaming %s to %s\n')
2135 self.ui.write(_('renaming %s to %s\n')
2132 % (filename, patchname))
2136 % (filename, patchname))
2133 util.rename(originpath, self.join(patchname))
2137 util.rename(originpath, self.join(patchname))
2134 else:
2138 else:
2135 patchname = filename
2139 patchname = filename
2136
2140
2137 else:
2141 else:
2138 if filename == '-' and not patchname:
2142 if filename == '-' and not patchname:
2139 raise util.Abort(_('need --name to import a patch from -'))
2143 raise util.Abort(_('need --name to import a patch from -'))
2140 elif not patchname:
2144 elif not patchname:
2141 patchname = normname(os.path.basename(filename.rstrip('/')))
2145 patchname = normname(os.path.basename(filename.rstrip('/')))
2142 self.checkpatchname(patchname, force)
2146 self.checkpatchname(patchname, force)
2143 try:
2147 try:
2144 if filename == '-':
2148 if filename == '-':
2145 text = self.ui.fin.read()
2149 text = self.ui.fin.read()
2146 else:
2150 else:
2147 fp = hg.openpath(self.ui, filename)
2151 fp = hg.openpath(self.ui, filename)
2148 text = fp.read()
2152 text = fp.read()
2149 fp.close()
2153 fp.close()
2150 except (OSError, IOError):
2154 except (OSError, IOError):
2151 raise util.Abort(_("unable to read file %s") % filename)
2155 raise util.Abort(_("unable to read file %s") % filename)
2152 patchf = self.opener(patchname, "w")
2156 patchf = self.opener(patchname, "w")
2153 patchf.write(text)
2157 patchf.write(text)
2154 patchf.close()
2158 patchf.close()
2155 if not force:
2159 if not force:
2156 checkseries(patchname)
2160 checkseries(patchname)
2157 if patchname not in self.series:
2161 if patchname not in self.series:
2158 index = self.fullseriesend() + i
2162 index = self.fullseriesend() + i
2159 self.fullseries[index:index] = [patchname]
2163 self.fullseries[index:index] = [patchname]
2160 self.parseseries()
2164 self.parseseries()
2161 self.seriesdirty = True
2165 self.seriesdirty = True
2162 self.ui.warn(_("adding %s to series file\n") % patchname)
2166 self.ui.warn(_("adding %s to series file\n") % patchname)
2163 self.added.append(patchname)
2167 self.added.append(patchname)
2164 imported.append(patchname)
2168 imported.append(patchname)
2165 patchname = None
2169 patchname = None
2166
2170
2167 self.removeundo(repo)
2171 self.removeundo(repo)
2168 return imported
2172 return imported
2169
2173
2170 def fixkeepchangesopts(ui, opts):
2174 def fixkeepchangesopts(ui, opts):
2171 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2175 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2172 or opts.get('exact')):
2176 or opts.get('exact')):
2173 return opts
2177 return opts
2174 opts = dict(opts)
2178 opts = dict(opts)
2175 opts['keep_changes'] = True
2179 opts['keep_changes'] = True
2176 return opts
2180 return opts
2177
2181
2178 @command("qdelete|qremove|qrm",
2182 @command("qdelete|qremove|qrm",
2179 [('k', 'keep', None, _('keep patch file')),
2183 [('k', 'keep', None, _('keep patch file')),
2180 ('r', 'rev', [],
2184 ('r', 'rev', [],
2181 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2185 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2182 _('hg qdelete [-k] [PATCH]...'))
2186 _('hg qdelete [-k] [PATCH]...'))
2183 def delete(ui, repo, *patches, **opts):
2187 def delete(ui, repo, *patches, **opts):
2184 """remove patches from queue
2188 """remove patches from queue
2185
2189
2186 The patches must not be applied, and at least one patch is required. Exact
2190 The patches must not be applied, and at least one patch is required. Exact
2187 patch identifiers must be given. With -k/--keep, the patch files are
2191 patch identifiers must be given. With -k/--keep, the patch files are
2188 preserved in the patch directory.
2192 preserved in the patch directory.
2189
2193
2190 To stop managing a patch and move it into permanent history,
2194 To stop managing a patch and move it into permanent history,
2191 use the :hg:`qfinish` command."""
2195 use the :hg:`qfinish` command."""
2192 q = repo.mq
2196 q = repo.mq
2193 q.delete(repo, patches, opts)
2197 q.delete(repo, patches, opts)
2194 q.savedirty()
2198 q.savedirty()
2195 return 0
2199 return 0
2196
2200
2197 @command("qapplied",
2201 @command("qapplied",
2198 [('1', 'last', None, _('show only the preceding applied patch'))
2202 [('1', 'last', None, _('show only the preceding applied patch'))
2199 ] + seriesopts,
2203 ] + seriesopts,
2200 _('hg qapplied [-1] [-s] [PATCH]'))
2204 _('hg qapplied [-1] [-s] [PATCH]'))
2201 def applied(ui, repo, patch=None, **opts):
2205 def applied(ui, repo, patch=None, **opts):
2202 """print the patches already applied
2206 """print the patches already applied
2203
2207
2204 Returns 0 on success."""
2208 Returns 0 on success."""
2205
2209
2206 q = repo.mq
2210 q = repo.mq
2207
2211
2208 if patch:
2212 if patch:
2209 if patch not in q.series:
2213 if patch not in q.series:
2210 raise util.Abort(_("patch %s is not in series file") % patch)
2214 raise util.Abort(_("patch %s is not in series file") % patch)
2211 end = q.series.index(patch) + 1
2215 end = q.series.index(patch) + 1
2212 else:
2216 else:
2213 end = q.seriesend(True)
2217 end = q.seriesend(True)
2214
2218
2215 if opts.get('last') and not end:
2219 if opts.get('last') and not end:
2216 ui.write(_("no patches applied\n"))
2220 ui.write(_("no patches applied\n"))
2217 return 1
2221 return 1
2218 elif opts.get('last') and end == 1:
2222 elif opts.get('last') and end == 1:
2219 ui.write(_("only one patch applied\n"))
2223 ui.write(_("only one patch applied\n"))
2220 return 1
2224 return 1
2221 elif opts.get('last'):
2225 elif opts.get('last'):
2222 start = end - 2
2226 start = end - 2
2223 end = 1
2227 end = 1
2224 else:
2228 else:
2225 start = 0
2229 start = 0
2226
2230
2227 q.qseries(repo, length=end, start=start, status='A',
2231 q.qseries(repo, length=end, start=start, status='A',
2228 summary=opts.get('summary'))
2232 summary=opts.get('summary'))
2229
2233
2230
2234
2231 @command("qunapplied",
2235 @command("qunapplied",
2232 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2236 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2233 _('hg qunapplied [-1] [-s] [PATCH]'))
2237 _('hg qunapplied [-1] [-s] [PATCH]'))
2234 def unapplied(ui, repo, patch=None, **opts):
2238 def unapplied(ui, repo, patch=None, **opts):
2235 """print the patches not yet applied
2239 """print the patches not yet applied
2236
2240
2237 Returns 0 on success."""
2241 Returns 0 on success."""
2238
2242
2239 q = repo.mq
2243 q = repo.mq
2240 if patch:
2244 if patch:
2241 if patch not in q.series:
2245 if patch not in q.series:
2242 raise util.Abort(_("patch %s is not in series file") % patch)
2246 raise util.Abort(_("patch %s is not in series file") % patch)
2243 start = q.series.index(patch) + 1
2247 start = q.series.index(patch) + 1
2244 else:
2248 else:
2245 start = q.seriesend(True)
2249 start = q.seriesend(True)
2246
2250
2247 if start == len(q.series) and opts.get('first'):
2251 if start == len(q.series) and opts.get('first'):
2248 ui.write(_("all patches applied\n"))
2252 ui.write(_("all patches applied\n"))
2249 return 1
2253 return 1
2250
2254
2251 if opts.get('first'):
2255 if opts.get('first'):
2252 length = 1
2256 length = 1
2253 else:
2257 else:
2254 length = None
2258 length = None
2255 q.qseries(repo, start=start, length=length, status='U',
2259 q.qseries(repo, start=start, length=length, status='U',
2256 summary=opts.get('summary'))
2260 summary=opts.get('summary'))
2257
2261
2258 @command("qimport",
2262 @command("qimport",
2259 [('e', 'existing', None, _('import file in patch directory')),
2263 [('e', 'existing', None, _('import file in patch directory')),
2260 ('n', 'name', '',
2264 ('n', 'name', '',
2261 _('name of patch file'), _('NAME')),
2265 _('name of patch file'), _('NAME')),
2262 ('f', 'force', None, _('overwrite existing files')),
2266 ('f', 'force', None, _('overwrite existing files')),
2263 ('r', 'rev', [],
2267 ('r', 'rev', [],
2264 _('place existing revisions under mq control'), _('REV')),
2268 _('place existing revisions under mq control'), _('REV')),
2265 ('g', 'git', None, _('use git extended diff format')),
2269 ('g', 'git', None, _('use git extended diff format')),
2266 ('P', 'push', None, _('qpush after importing'))],
2270 ('P', 'push', None, _('qpush after importing'))],
2267 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2271 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2268 def qimport(ui, repo, *filename, **opts):
2272 def qimport(ui, repo, *filename, **opts):
2269 """import a patch or existing changeset
2273 """import a patch or existing changeset
2270
2274
2271 The patch is inserted into the series after the last applied
2275 The patch is inserted into the series after the last applied
2272 patch. If no patches have been applied, qimport prepends the patch
2276 patch. If no patches have been applied, qimport prepends the patch
2273 to the series.
2277 to the series.
2274
2278
2275 The patch will have the same name as its source file unless you
2279 The patch will have the same name as its source file unless you
2276 give it a new one with -n/--name.
2280 give it a new one with -n/--name.
2277
2281
2278 You can register an existing patch inside the patch directory with
2282 You can register an existing patch inside the patch directory with
2279 the -e/--existing flag.
2283 the -e/--existing flag.
2280
2284
2281 With -f/--force, an existing patch of the same name will be
2285 With -f/--force, an existing patch of the same name will be
2282 overwritten.
2286 overwritten.
2283
2287
2284 An existing changeset may be placed under mq control with -r/--rev
2288 An existing changeset may be placed under mq control with -r/--rev
2285 (e.g. qimport --rev . -n patch will place the current revision
2289 (e.g. qimport --rev . -n patch will place the current revision
2286 under mq control). With -g/--git, patches imported with --rev will
2290 under mq control). With -g/--git, patches imported with --rev will
2287 use the git diff format. See the diffs help topic for information
2291 use the git diff format. See the diffs help topic for information
2288 on why this is important for preserving rename/copy information
2292 on why this is important for preserving rename/copy information
2289 and permission changes. Use :hg:`qfinish` to remove changesets
2293 and permission changes. Use :hg:`qfinish` to remove changesets
2290 from mq control.
2294 from mq control.
2291
2295
2292 To import a patch from standard input, pass - as the patch file.
2296 To import a patch from standard input, pass - as the patch file.
2293 When importing from standard input, a patch name must be specified
2297 When importing from standard input, a patch name must be specified
2294 using the --name flag.
2298 using the --name flag.
2295
2299
2296 To import an existing patch while renaming it::
2300 To import an existing patch while renaming it::
2297
2301
2298 hg qimport -e existing-patch -n new-name
2302 hg qimport -e existing-patch -n new-name
2299
2303
2300 Returns 0 if import succeeded.
2304 Returns 0 if import succeeded.
2301 """
2305 """
2302 lock = repo.lock() # cause this may move phase
2306 lock = repo.lock() # cause this may move phase
2303 try:
2307 try:
2304 q = repo.mq
2308 q = repo.mq
2305 try:
2309 try:
2306 imported = q.qimport(
2310 imported = q.qimport(
2307 repo, filename, patchname=opts.get('name'),
2311 repo, filename, patchname=opts.get('name'),
2308 existing=opts.get('existing'), force=opts.get('force'),
2312 existing=opts.get('existing'), force=opts.get('force'),
2309 rev=opts.get('rev'), git=opts.get('git'))
2313 rev=opts.get('rev'), git=opts.get('git'))
2310 finally:
2314 finally:
2311 q.savedirty()
2315 q.savedirty()
2312 finally:
2316 finally:
2313 lock.release()
2317 lock.release()
2314
2318
2315 if imported and opts.get('push') and not opts.get('rev'):
2319 if imported and opts.get('push') and not opts.get('rev'):
2316 return q.push(repo, imported[-1])
2320 return q.push(repo, imported[-1])
2317 return 0
2321 return 0
2318
2322
2319 def qinit(ui, repo, create):
2323 def qinit(ui, repo, create):
2320 """initialize a new queue repository
2324 """initialize a new queue repository
2321
2325
2322 This command also creates a series file for ordering patches, and
2326 This command also creates a series file for ordering patches, and
2323 an mq-specific .hgignore file in the queue repository, to exclude
2327 an mq-specific .hgignore file in the queue repository, to exclude
2324 the status and guards files (these contain mostly transient state).
2328 the status and guards files (these contain mostly transient state).
2325
2329
2326 Returns 0 if initialization succeeded."""
2330 Returns 0 if initialization succeeded."""
2327 q = repo.mq
2331 q = repo.mq
2328 r = q.init(repo, create)
2332 r = q.init(repo, create)
2329 q.savedirty()
2333 q.savedirty()
2330 if r:
2334 if r:
2331 if not os.path.exists(r.wjoin('.hgignore')):
2335 if not os.path.exists(r.wjoin('.hgignore')):
2332 fp = r.wvfs('.hgignore', 'w')
2336 fp = r.wvfs('.hgignore', 'w')
2333 fp.write('^\\.hg\n')
2337 fp.write('^\\.hg\n')
2334 fp.write('^\\.mq\n')
2338 fp.write('^\\.mq\n')
2335 fp.write('syntax: glob\n')
2339 fp.write('syntax: glob\n')
2336 fp.write('status\n')
2340 fp.write('status\n')
2337 fp.write('guards\n')
2341 fp.write('guards\n')
2338 fp.close()
2342 fp.close()
2339 if not os.path.exists(r.wjoin('series')):
2343 if not os.path.exists(r.wjoin('series')):
2340 r.wvfs('series', 'w').close()
2344 r.wvfs('series', 'w').close()
2341 r[None].add(['.hgignore', 'series'])
2345 r[None].add(['.hgignore', 'series'])
2342 commands.add(ui, r)
2346 commands.add(ui, r)
2343 return 0
2347 return 0
2344
2348
2345 @command("^qinit",
2349 @command("^qinit",
2346 [('c', 'create-repo', None, _('create queue repository'))],
2350 [('c', 'create-repo', None, _('create queue repository'))],
2347 _('hg qinit [-c]'))
2351 _('hg qinit [-c]'))
2348 def init(ui, repo, **opts):
2352 def init(ui, repo, **opts):
2349 """init a new queue repository (DEPRECATED)
2353 """init a new queue repository (DEPRECATED)
2350
2354
2351 The queue repository is unversioned by default. If
2355 The queue repository is unversioned by default. If
2352 -c/--create-repo is specified, qinit will create a separate nested
2356 -c/--create-repo is specified, qinit will create a separate nested
2353 repository for patches (qinit -c may also be run later to convert
2357 repository for patches (qinit -c may also be run later to convert
2354 an unversioned patch repository into a versioned one). You can use
2358 an unversioned patch repository into a versioned one). You can use
2355 qcommit to commit changes to this queue repository.
2359 qcommit to commit changes to this queue repository.
2356
2360
2357 This command is deprecated. Without -c, it's implied by other relevant
2361 This command is deprecated. Without -c, it's implied by other relevant
2358 commands. With -c, use :hg:`init --mq` instead."""
2362 commands. With -c, use :hg:`init --mq` instead."""
2359 return qinit(ui, repo, create=opts.get('create_repo'))
2363 return qinit(ui, repo, create=opts.get('create_repo'))
2360
2364
2361 @command("qclone",
2365 @command("qclone",
2362 [('', 'pull', None, _('use pull protocol to copy metadata')),
2366 [('', 'pull', None, _('use pull protocol to copy metadata')),
2363 ('U', 'noupdate', None,
2367 ('U', 'noupdate', None,
2364 _('do not update the new working directories')),
2368 _('do not update the new working directories')),
2365 ('', 'uncompressed', None,
2369 ('', 'uncompressed', None,
2366 _('use uncompressed transfer (fast over LAN)')),
2370 _('use uncompressed transfer (fast over LAN)')),
2367 ('p', 'patches', '',
2371 ('p', 'patches', '',
2368 _('location of source patch repository'), _('REPO')),
2372 _('location of source patch repository'), _('REPO')),
2369 ] + commands.remoteopts,
2373 ] + commands.remoteopts,
2370 _('hg qclone [OPTION]... SOURCE [DEST]'),
2374 _('hg qclone [OPTION]... SOURCE [DEST]'),
2371 norepo=True)
2375 norepo=True)
2372 def clone(ui, source, dest=None, **opts):
2376 def clone(ui, source, dest=None, **opts):
2373 '''clone main and patch repository at same time
2377 '''clone main and patch repository at same time
2374
2378
2375 If source is local, destination will have no patches applied. If
2379 If source is local, destination will have no patches applied. If
2376 source is remote, this command can not check if patches are
2380 source is remote, this command can not check if patches are
2377 applied in source, so cannot guarantee that patches are not
2381 applied in source, so cannot guarantee that patches are not
2378 applied in destination. If you clone remote repository, be sure
2382 applied in destination. If you clone remote repository, be sure
2379 before that it has no patches applied.
2383 before that it has no patches applied.
2380
2384
2381 Source patch repository is looked for in <src>/.hg/patches by
2385 Source patch repository is looked for in <src>/.hg/patches by
2382 default. Use -p <url> to change.
2386 default. Use -p <url> to change.
2383
2387
2384 The patch directory must be a nested Mercurial repository, as
2388 The patch directory must be a nested Mercurial repository, as
2385 would be created by :hg:`init --mq`.
2389 would be created by :hg:`init --mq`.
2386
2390
2387 Return 0 on success.
2391 Return 0 on success.
2388 '''
2392 '''
2389 def patchdir(repo):
2393 def patchdir(repo):
2390 """compute a patch repo url from a repo object"""
2394 """compute a patch repo url from a repo object"""
2391 url = repo.url()
2395 url = repo.url()
2392 if url.endswith('/'):
2396 if url.endswith('/'):
2393 url = url[:-1]
2397 url = url[:-1]
2394 return url + '/.hg/patches'
2398 return url + '/.hg/patches'
2395
2399
2396 # main repo (destination and sources)
2400 # main repo (destination and sources)
2397 if dest is None:
2401 if dest is None:
2398 dest = hg.defaultdest(source)
2402 dest = hg.defaultdest(source)
2399 sr = hg.peer(ui, opts, ui.expandpath(source))
2403 sr = hg.peer(ui, opts, ui.expandpath(source))
2400
2404
2401 # patches repo (source only)
2405 # patches repo (source only)
2402 if opts.get('patches'):
2406 if opts.get('patches'):
2403 patchespath = ui.expandpath(opts.get('patches'))
2407 patchespath = ui.expandpath(opts.get('patches'))
2404 else:
2408 else:
2405 patchespath = patchdir(sr)
2409 patchespath = patchdir(sr)
2406 try:
2410 try:
2407 hg.peer(ui, opts, patchespath)
2411 hg.peer(ui, opts, patchespath)
2408 except error.RepoError:
2412 except error.RepoError:
2409 raise util.Abort(_('versioned patch repository not found'
2413 raise util.Abort(_('versioned patch repository not found'
2410 ' (see init --mq)'))
2414 ' (see init --mq)'))
2411 qbase, destrev = None, None
2415 qbase, destrev = None, None
2412 if sr.local():
2416 if sr.local():
2413 repo = sr.local()
2417 repo = sr.local()
2414 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2418 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2415 qbase = repo.mq.applied[0].node
2419 qbase = repo.mq.applied[0].node
2416 if not hg.islocal(dest):
2420 if not hg.islocal(dest):
2417 heads = set(repo.heads())
2421 heads = set(repo.heads())
2418 destrev = list(heads.difference(repo.heads(qbase)))
2422 destrev = list(heads.difference(repo.heads(qbase)))
2419 destrev.append(repo.changelog.parents(qbase)[0])
2423 destrev.append(repo.changelog.parents(qbase)[0])
2420 elif sr.capable('lookup'):
2424 elif sr.capable('lookup'):
2421 try:
2425 try:
2422 qbase = sr.lookup('qbase')
2426 qbase = sr.lookup('qbase')
2423 except error.RepoError:
2427 except error.RepoError:
2424 pass
2428 pass
2425
2429
2426 ui.note(_('cloning main repository\n'))
2430 ui.note(_('cloning main repository\n'))
2427 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2431 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2428 pull=opts.get('pull'),
2432 pull=opts.get('pull'),
2429 rev=destrev,
2433 rev=destrev,
2430 update=False,
2434 update=False,
2431 stream=opts.get('uncompressed'))
2435 stream=opts.get('uncompressed'))
2432
2436
2433 ui.note(_('cloning patch repository\n'))
2437 ui.note(_('cloning patch repository\n'))
2434 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2438 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2435 pull=opts.get('pull'), update=not opts.get('noupdate'),
2439 pull=opts.get('pull'), update=not opts.get('noupdate'),
2436 stream=opts.get('uncompressed'))
2440 stream=opts.get('uncompressed'))
2437
2441
2438 if dr.local():
2442 if dr.local():
2439 repo = dr.local()
2443 repo = dr.local()
2440 if qbase:
2444 if qbase:
2441 ui.note(_('stripping applied patches from destination '
2445 ui.note(_('stripping applied patches from destination '
2442 'repository\n'))
2446 'repository\n'))
2443 strip(ui, repo, [qbase], update=False, backup=None)
2447 strip(ui, repo, [qbase], update=False, backup=None)
2444 if not opts.get('noupdate'):
2448 if not opts.get('noupdate'):
2445 ui.note(_('updating destination repository\n'))
2449 ui.note(_('updating destination repository\n'))
2446 hg.update(repo, repo.changelog.tip())
2450 hg.update(repo, repo.changelog.tip())
2447
2451
2448 @command("qcommit|qci",
2452 @command("qcommit|qci",
2449 commands.table["^commit|ci"][1],
2453 commands.table["^commit|ci"][1],
2450 _('hg qcommit [OPTION]... [FILE]...'),
2454 _('hg qcommit [OPTION]... [FILE]...'),
2451 inferrepo=True)
2455 inferrepo=True)
2452 def commit(ui, repo, *pats, **opts):
2456 def commit(ui, repo, *pats, **opts):
2453 """commit changes in the queue repository (DEPRECATED)
2457 """commit changes in the queue repository (DEPRECATED)
2454
2458
2455 This command is deprecated; use :hg:`commit --mq` instead."""
2459 This command is deprecated; use :hg:`commit --mq` instead."""
2456 q = repo.mq
2460 q = repo.mq
2457 r = q.qrepo()
2461 r = q.qrepo()
2458 if not r:
2462 if not r:
2459 raise util.Abort('no queue repository')
2463 raise util.Abort('no queue repository')
2460 commands.commit(r.ui, r, *pats, **opts)
2464 commands.commit(r.ui, r, *pats, **opts)
2461
2465
2462 @command("qseries",
2466 @command("qseries",
2463 [('m', 'missing', None, _('print patches not in series')),
2467 [('m', 'missing', None, _('print patches not in series')),
2464 ] + seriesopts,
2468 ] + seriesopts,
2465 _('hg qseries [-ms]'))
2469 _('hg qseries [-ms]'))
2466 def series(ui, repo, **opts):
2470 def series(ui, repo, **opts):
2467 """print the entire series file
2471 """print the entire series file
2468
2472
2469 Returns 0 on success."""
2473 Returns 0 on success."""
2470 repo.mq.qseries(repo, missing=opts.get('missing'),
2474 repo.mq.qseries(repo, missing=opts.get('missing'),
2471 summary=opts.get('summary'))
2475 summary=opts.get('summary'))
2472 return 0
2476 return 0
2473
2477
2474 @command("qtop", seriesopts, _('hg qtop [-s]'))
2478 @command("qtop", seriesopts, _('hg qtop [-s]'))
2475 def top(ui, repo, **opts):
2479 def top(ui, repo, **opts):
2476 """print the name of the current patch
2480 """print the name of the current patch
2477
2481
2478 Returns 0 on success."""
2482 Returns 0 on success."""
2479 q = repo.mq
2483 q = repo.mq
2480 if q.applied:
2484 if q.applied:
2481 t = q.seriesend(True)
2485 t = q.seriesend(True)
2482 else:
2486 else:
2483 t = 0
2487 t = 0
2484
2488
2485 if t:
2489 if t:
2486 q.qseries(repo, start=t - 1, length=1, status='A',
2490 q.qseries(repo, start=t - 1, length=1, status='A',
2487 summary=opts.get('summary'))
2491 summary=opts.get('summary'))
2488 else:
2492 else:
2489 ui.write(_("no patches applied\n"))
2493 ui.write(_("no patches applied\n"))
2490 return 1
2494 return 1
2491
2495
2492 @command("qnext", seriesopts, _('hg qnext [-s]'))
2496 @command("qnext", seriesopts, _('hg qnext [-s]'))
2493 def next(ui, repo, **opts):
2497 def next(ui, repo, **opts):
2494 """print the name of the next pushable patch
2498 """print the name of the next pushable patch
2495
2499
2496 Returns 0 on success."""
2500 Returns 0 on success."""
2497 q = repo.mq
2501 q = repo.mq
2498 end = q.seriesend()
2502 end = q.seriesend()
2499 if end == len(q.series):
2503 if end == len(q.series):
2500 ui.write(_("all patches applied\n"))
2504 ui.write(_("all patches applied\n"))
2501 return 1
2505 return 1
2502 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2506 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2503
2507
2504 @command("qprev", seriesopts, _('hg qprev [-s]'))
2508 @command("qprev", seriesopts, _('hg qprev [-s]'))
2505 def prev(ui, repo, **opts):
2509 def prev(ui, repo, **opts):
2506 """print the name of the preceding applied patch
2510 """print the name of the preceding applied patch
2507
2511
2508 Returns 0 on success."""
2512 Returns 0 on success."""
2509 q = repo.mq
2513 q = repo.mq
2510 l = len(q.applied)
2514 l = len(q.applied)
2511 if l == 1:
2515 if l == 1:
2512 ui.write(_("only one patch applied\n"))
2516 ui.write(_("only one patch applied\n"))
2513 return 1
2517 return 1
2514 if not l:
2518 if not l:
2515 ui.write(_("no patches applied\n"))
2519 ui.write(_("no patches applied\n"))
2516 return 1
2520 return 1
2517 idx = q.series.index(q.applied[-2].name)
2521 idx = q.series.index(q.applied[-2].name)
2518 q.qseries(repo, start=idx, length=1, status='A',
2522 q.qseries(repo, start=idx, length=1, status='A',
2519 summary=opts.get('summary'))
2523 summary=opts.get('summary'))
2520
2524
2521 def setupheaderopts(ui, opts):
2525 def setupheaderopts(ui, opts):
2522 if not opts.get('user') and opts.get('currentuser'):
2526 if not opts.get('user') and opts.get('currentuser'):
2523 opts['user'] = ui.username()
2527 opts['user'] = ui.username()
2524 if not opts.get('date') and opts.get('currentdate'):
2528 if not opts.get('date') and opts.get('currentdate'):
2525 opts['date'] = "%d %d" % util.makedate()
2529 opts['date'] = "%d %d" % util.makedate()
2526
2530
2527 @command("^qnew",
2531 @command("^qnew",
2528 [('e', 'edit', None, _('invoke editor on commit messages')),
2532 [('e', 'edit', None, _('invoke editor on commit messages')),
2529 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2533 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2530 ('g', 'git', None, _('use git extended diff format')),
2534 ('g', 'git', None, _('use git extended diff format')),
2531 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2535 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2532 ('u', 'user', '',
2536 ('u', 'user', '',
2533 _('add "From: <USER>" to patch'), _('USER')),
2537 _('add "From: <USER>" to patch'), _('USER')),
2534 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2538 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2535 ('d', 'date', '',
2539 ('d', 'date', '',
2536 _('add "Date: <DATE>" to patch'), _('DATE'))
2540 _('add "Date: <DATE>" to patch'), _('DATE'))
2537 ] + commands.walkopts + commands.commitopts,
2541 ] + commands.walkopts + commands.commitopts,
2538 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2542 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2539 inferrepo=True)
2543 inferrepo=True)
2540 def new(ui, repo, patch, *args, **opts):
2544 def new(ui, repo, patch, *args, **opts):
2541 """create a new patch
2545 """create a new patch
2542
2546
2543 qnew creates a new patch on top of the currently-applied patch (if
2547 qnew creates a new patch on top of the currently-applied patch (if
2544 any). The patch will be initialized with any outstanding changes
2548 any). The patch will be initialized with any outstanding changes
2545 in the working directory. You may also use -I/--include,
2549 in the working directory. You may also use -I/--include,
2546 -X/--exclude, and/or a list of files after the patch name to add
2550 -X/--exclude, and/or a list of files after the patch name to add
2547 only changes to matching files to the new patch, leaving the rest
2551 only changes to matching files to the new patch, leaving the rest
2548 as uncommitted modifications.
2552 as uncommitted modifications.
2549
2553
2550 -u/--user and -d/--date can be used to set the (given) user and
2554 -u/--user and -d/--date can be used to set the (given) user and
2551 date, respectively. -U/--currentuser and -D/--currentdate set user
2555 date, respectively. -U/--currentuser and -D/--currentdate set user
2552 to current user and date to current date.
2556 to current user and date to current date.
2553
2557
2554 -e/--edit, -m/--message or -l/--logfile set the patch header as
2558 -e/--edit, -m/--message or -l/--logfile set the patch header as
2555 well as the commit message. If none is specified, the header is
2559 well as the commit message. If none is specified, the header is
2556 empty and the commit message is '[mq]: PATCH'.
2560 empty and the commit message is '[mq]: PATCH'.
2557
2561
2558 Use the -g/--git option to keep the patch in the git extended diff
2562 Use the -g/--git option to keep the patch in the git extended diff
2559 format. Read the diffs help topic for more information on why this
2563 format. Read the diffs help topic for more information on why this
2560 is important for preserving permission changes and copy/rename
2564 is important for preserving permission changes and copy/rename
2561 information.
2565 information.
2562
2566
2563 Returns 0 on successful creation of a new patch.
2567 Returns 0 on successful creation of a new patch.
2564 """
2568 """
2565 msg = cmdutil.logmessage(ui, opts)
2569 msg = cmdutil.logmessage(ui, opts)
2566 q = repo.mq
2570 q = repo.mq
2567 opts['msg'] = msg
2571 opts['msg'] = msg
2568 setupheaderopts(ui, opts)
2572 setupheaderopts(ui, opts)
2569 q.new(repo, patch, *args, **opts)
2573 q.new(repo, patch, *args, **opts)
2570 q.savedirty()
2574 q.savedirty()
2571 return 0
2575 return 0
2572
2576
2573 @command("^qrefresh",
2577 @command("^qrefresh",
2574 [('e', 'edit', None, _('invoke editor on commit messages')),
2578 [('e', 'edit', None, _('invoke editor on commit messages')),
2575 ('g', 'git', None, _('use git extended diff format')),
2579 ('g', 'git', None, _('use git extended diff format')),
2576 ('s', 'short', None,
2580 ('s', 'short', None,
2577 _('refresh only files already in the patch and specified files')),
2581 _('refresh only files already in the patch and specified files')),
2578 ('U', 'currentuser', None,
2582 ('U', 'currentuser', None,
2579 _('add/update author field in patch with current user')),
2583 _('add/update author field in patch with current user')),
2580 ('u', 'user', '',
2584 ('u', 'user', '',
2581 _('add/update author field in patch with given user'), _('USER')),
2585 _('add/update author field in patch with given user'), _('USER')),
2582 ('D', 'currentdate', None,
2586 ('D', 'currentdate', None,
2583 _('add/update date field in patch with current date')),
2587 _('add/update date field in patch with current date')),
2584 ('d', 'date', '',
2588 ('d', 'date', '',
2585 _('add/update date field in patch with given date'), _('DATE'))
2589 _('add/update date field in patch with given date'), _('DATE'))
2586 ] + commands.walkopts + commands.commitopts,
2590 ] + commands.walkopts + commands.commitopts,
2587 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2588 inferrepo=True)
2592 inferrepo=True)
2589 def refresh(ui, repo, *pats, **opts):
2593 def refresh(ui, repo, *pats, **opts):
2590 """update the current patch
2594 """update the current patch
2591
2595
2592 If any file patterns are provided, the refreshed patch will
2596 If any file patterns are provided, the refreshed patch will
2593 contain only the modifications that match those patterns; the
2597 contain only the modifications that match those patterns; the
2594 remaining modifications will remain in the working directory.
2598 remaining modifications will remain in the working directory.
2595
2599
2596 If -s/--short is specified, files currently included in the patch
2600 If -s/--short is specified, files currently included in the patch
2597 will be refreshed just like matched files and remain in the patch.
2601 will be refreshed just like matched files and remain in the patch.
2598
2602
2599 If -e/--edit is specified, Mercurial will start your configured editor for
2603 If -e/--edit is specified, Mercurial will start your configured editor for
2600 you to enter a message. In case qrefresh fails, you will find a backup of
2604 you to enter a message. In case qrefresh fails, you will find a backup of
2601 your message in ``.hg/last-message.txt``.
2605 your message in ``.hg/last-message.txt``.
2602
2606
2603 hg add/remove/copy/rename work as usual, though you might want to
2607 hg add/remove/copy/rename work as usual, though you might want to
2604 use git-style patches (-g/--git or [diff] git=1) to track copies
2608 use git-style patches (-g/--git or [diff] git=1) to track copies
2605 and renames. See the diffs help topic for more information on the
2609 and renames. See the diffs help topic for more information on the
2606 git diff format.
2610 git diff format.
2607
2611
2608 Returns 0 on success.
2612 Returns 0 on success.
2609 """
2613 """
2610 q = repo.mq
2614 q = repo.mq
2611 message = cmdutil.logmessage(ui, opts)
2615 message = cmdutil.logmessage(ui, opts)
2612 setupheaderopts(ui, opts)
2616 setupheaderopts(ui, opts)
2613 wlock = repo.wlock()
2617 wlock = repo.wlock()
2614 try:
2618 try:
2615 ret = q.refresh(repo, pats, msg=message, **opts)
2619 ret = q.refresh(repo, pats, msg=message, **opts)
2616 q.savedirty()
2620 q.savedirty()
2617 return ret
2621 return ret
2618 finally:
2622 finally:
2619 wlock.release()
2623 wlock.release()
2620
2624
2621 @command("^qdiff",
2625 @command("^qdiff",
2622 commands.diffopts + commands.diffopts2 + commands.walkopts,
2626 commands.diffopts + commands.diffopts2 + commands.walkopts,
2623 _('hg qdiff [OPTION]... [FILE]...'),
2627 _('hg qdiff [OPTION]... [FILE]...'),
2624 inferrepo=True)
2628 inferrepo=True)
2625 def diff(ui, repo, *pats, **opts):
2629 def diff(ui, repo, *pats, **opts):
2626 """diff of the current patch and subsequent modifications
2630 """diff of the current patch and subsequent modifications
2627
2631
2628 Shows a diff which includes the current patch as well as any
2632 Shows a diff which includes the current patch as well as any
2629 changes which have been made in the working directory since the
2633 changes which have been made in the working directory since the
2630 last refresh (thus showing what the current patch would become
2634 last refresh (thus showing what the current patch would become
2631 after a qrefresh).
2635 after a qrefresh).
2632
2636
2633 Use :hg:`diff` if you only want to see the changes made since the
2637 Use :hg:`diff` if you only want to see the changes made since the
2634 last qrefresh, or :hg:`export qtip` if you want to see changes
2638 last qrefresh, or :hg:`export qtip` if you want to see changes
2635 made by the current patch without including changes made since the
2639 made by the current patch without including changes made since the
2636 qrefresh.
2640 qrefresh.
2637
2641
2638 Returns 0 on success.
2642 Returns 0 on success.
2639 """
2643 """
2640 repo.mq.diff(repo, pats, opts)
2644 repo.mq.diff(repo, pats, opts)
2641 return 0
2645 return 0
2642
2646
2643 @command('qfold',
2647 @command('qfold',
2644 [('e', 'edit', None, _('invoke editor on commit messages')),
2648 [('e', 'edit', None, _('invoke editor on commit messages')),
2645 ('k', 'keep', None, _('keep folded patch files')),
2649 ('k', 'keep', None, _('keep folded patch files')),
2646 ] + commands.commitopts,
2650 ] + commands.commitopts,
2647 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2651 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2648 def fold(ui, repo, *files, **opts):
2652 def fold(ui, repo, *files, **opts):
2649 """fold the named patches into the current patch
2653 """fold the named patches into the current patch
2650
2654
2651 Patches must not yet be applied. Each patch will be successively
2655 Patches must not yet be applied. Each patch will be successively
2652 applied to the current patch in the order given. If all the
2656 applied to the current patch in the order given. If all the
2653 patches apply successfully, the current patch will be refreshed
2657 patches apply successfully, the current patch will be refreshed
2654 with the new cumulative patch, and the folded patches will be
2658 with the new cumulative patch, and the folded patches will be
2655 deleted. With -k/--keep, the folded patch files will not be
2659 deleted. With -k/--keep, the folded patch files will not be
2656 removed afterwards.
2660 removed afterwards.
2657
2661
2658 The header for each folded patch will be concatenated with the
2662 The header for each folded patch will be concatenated with the
2659 current patch header, separated by a line of ``* * *``.
2663 current patch header, separated by a line of ``* * *``.
2660
2664
2661 Returns 0 on success."""
2665 Returns 0 on success."""
2662 q = repo.mq
2666 q = repo.mq
2663 if not files:
2667 if not files:
2664 raise util.Abort(_('qfold requires at least one patch name'))
2668 raise util.Abort(_('qfold requires at least one patch name'))
2665 if not q.checktoppatch(repo)[0]:
2669 if not q.checktoppatch(repo)[0]:
2666 raise util.Abort(_('no patches applied'))
2670 raise util.Abort(_('no patches applied'))
2667 q.checklocalchanges(repo)
2671 q.checklocalchanges(repo)
2668
2672
2669 message = cmdutil.logmessage(ui, opts)
2673 message = cmdutil.logmessage(ui, opts)
2670
2674
2671 parent = q.lookup('qtip')
2675 parent = q.lookup('qtip')
2672 patches = []
2676 patches = []
2673 messages = []
2677 messages = []
2674 for f in files:
2678 for f in files:
2675 p = q.lookup(f)
2679 p = q.lookup(f)
2676 if p in patches or p == parent:
2680 if p in patches or p == parent:
2677 ui.warn(_('skipping already folded patch %s\n') % p)
2681 ui.warn(_('skipping already folded patch %s\n') % p)
2678 if q.isapplied(p):
2682 if q.isapplied(p):
2679 raise util.Abort(_('qfold cannot fold already applied patch %s')
2683 raise util.Abort(_('qfold cannot fold already applied patch %s')
2680 % p)
2684 % p)
2681 patches.append(p)
2685 patches.append(p)
2682
2686
2683 for p in patches:
2687 for p in patches:
2684 if not message:
2688 if not message:
2685 ph = patchheader(q.join(p), q.plainmode)
2689 ph = patchheader(q.join(p), q.plainmode)
2686 if ph.message:
2690 if ph.message:
2687 messages.append(ph.message)
2691 messages.append(ph.message)
2688 pf = q.join(p)
2692 pf = q.join(p)
2689 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2693 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2690 if not patchsuccess:
2694 if not patchsuccess:
2691 raise util.Abort(_('error folding patch %s') % p)
2695 raise util.Abort(_('error folding patch %s') % p)
2692
2696
2693 if not message:
2697 if not message:
2694 ph = patchheader(q.join(parent), q.plainmode)
2698 ph = patchheader(q.join(parent), q.plainmode)
2695 message = ph.message
2699 message = ph.message
2696 for msg in messages:
2700 for msg in messages:
2697 if msg:
2701 if msg:
2698 if message:
2702 if message:
2699 message.append('* * *')
2703 message.append('* * *')
2700 message.extend(msg)
2704 message.extend(msg)
2701 message = '\n'.join(message)
2705 message = '\n'.join(message)
2702
2706
2703 diffopts = q.patchopts(q.diffopts(), *patches)
2707 diffopts = q.patchopts(q.diffopts(), *patches)
2704 wlock = repo.wlock()
2708 wlock = repo.wlock()
2705 try:
2709 try:
2706 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2710 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2707 editform='mq.qfold')
2711 editform='mq.qfold')
2708 q.delete(repo, patches, opts)
2712 q.delete(repo, patches, opts)
2709 q.savedirty()
2713 q.savedirty()
2710 finally:
2714 finally:
2711 wlock.release()
2715 wlock.release()
2712
2716
2713 @command("qgoto",
2717 @command("qgoto",
2714 [('', 'keep-changes', None,
2718 [('', 'keep-changes', None,
2715 _('tolerate non-conflicting local changes')),
2719 _('tolerate non-conflicting local changes')),
2716 ('f', 'force', None, _('overwrite any local changes')),
2720 ('f', 'force', None, _('overwrite any local changes')),
2717 ('', 'no-backup', None, _('do not save backup copies of files'))],
2721 ('', 'no-backup', None, _('do not save backup copies of files'))],
2718 _('hg qgoto [OPTION]... PATCH'))
2722 _('hg qgoto [OPTION]... PATCH'))
2719 def goto(ui, repo, patch, **opts):
2723 def goto(ui, repo, patch, **opts):
2720 '''push or pop patches until named patch is at top of stack
2724 '''push or pop patches until named patch is at top of stack
2721
2725
2722 Returns 0 on success.'''
2726 Returns 0 on success.'''
2723 opts = fixkeepchangesopts(ui, opts)
2727 opts = fixkeepchangesopts(ui, opts)
2724 q = repo.mq
2728 q = repo.mq
2725 patch = q.lookup(patch)
2729 patch = q.lookup(patch)
2726 nobackup = opts.get('no_backup')
2730 nobackup = opts.get('no_backup')
2727 keepchanges = opts.get('keep_changes')
2731 keepchanges = opts.get('keep_changes')
2728 if q.isapplied(patch):
2732 if q.isapplied(patch):
2729 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2733 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2730 keepchanges=keepchanges)
2734 keepchanges=keepchanges)
2731 else:
2735 else:
2732 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2736 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2733 keepchanges=keepchanges)
2737 keepchanges=keepchanges)
2734 q.savedirty()
2738 q.savedirty()
2735 return ret
2739 return ret
2736
2740
2737 @command("qguard",
2741 @command("qguard",
2738 [('l', 'list', None, _('list all patches and guards')),
2742 [('l', 'list', None, _('list all patches and guards')),
2739 ('n', 'none', None, _('drop all guards'))],
2743 ('n', 'none', None, _('drop all guards'))],
2740 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2744 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2741 def guard(ui, repo, *args, **opts):
2745 def guard(ui, repo, *args, **opts):
2742 '''set or print guards for a patch
2746 '''set or print guards for a patch
2743
2747
2744 Guards control whether a patch can be pushed. A patch with no
2748 Guards control whether a patch can be pushed. A patch with no
2745 guards is always pushed. A patch with a positive guard ("+foo") is
2749 guards is always pushed. A patch with a positive guard ("+foo") is
2746 pushed only if the :hg:`qselect` command has activated it. A patch with
2750 pushed only if the :hg:`qselect` command has activated it. A patch with
2747 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2751 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2748 has activated it.
2752 has activated it.
2749
2753
2750 With no arguments, print the currently active guards.
2754 With no arguments, print the currently active guards.
2751 With arguments, set guards for the named patch.
2755 With arguments, set guards for the named patch.
2752
2756
2753 .. note::
2757 .. note::
2754
2758
2755 Specifying negative guards now requires '--'.
2759 Specifying negative guards now requires '--'.
2756
2760
2757 To set guards on another patch::
2761 To set guards on another patch::
2758
2762
2759 hg qguard other.patch -- +2.6.17 -stable
2763 hg qguard other.patch -- +2.6.17 -stable
2760
2764
2761 Returns 0 on success.
2765 Returns 0 on success.
2762 '''
2766 '''
2763 def status(idx):
2767 def status(idx):
2764 guards = q.seriesguards[idx] or ['unguarded']
2768 guards = q.seriesguards[idx] or ['unguarded']
2765 if q.series[idx] in applied:
2769 if q.series[idx] in applied:
2766 state = 'applied'
2770 state = 'applied'
2767 elif q.pushable(idx)[0]:
2771 elif q.pushable(idx)[0]:
2768 state = 'unapplied'
2772 state = 'unapplied'
2769 else:
2773 else:
2770 state = 'guarded'
2774 state = 'guarded'
2771 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2775 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2772 ui.write('%s: ' % ui.label(q.series[idx], label))
2776 ui.write('%s: ' % ui.label(q.series[idx], label))
2773
2777
2774 for i, guard in enumerate(guards):
2778 for i, guard in enumerate(guards):
2775 if guard.startswith('+'):
2779 if guard.startswith('+'):
2776 ui.write(guard, label='qguard.positive')
2780 ui.write(guard, label='qguard.positive')
2777 elif guard.startswith('-'):
2781 elif guard.startswith('-'):
2778 ui.write(guard, label='qguard.negative')
2782 ui.write(guard, label='qguard.negative')
2779 else:
2783 else:
2780 ui.write(guard, label='qguard.unguarded')
2784 ui.write(guard, label='qguard.unguarded')
2781 if i != len(guards) - 1:
2785 if i != len(guards) - 1:
2782 ui.write(' ')
2786 ui.write(' ')
2783 ui.write('\n')
2787 ui.write('\n')
2784 q = repo.mq
2788 q = repo.mq
2785 applied = set(p.name for p in q.applied)
2789 applied = set(p.name for p in q.applied)
2786 patch = None
2790 patch = None
2787 args = list(args)
2791 args = list(args)
2788 if opts.get('list'):
2792 if opts.get('list'):
2789 if args or opts.get('none'):
2793 if args or opts.get('none'):
2790 raise util.Abort(_('cannot mix -l/--list with options or '
2794 raise util.Abort(_('cannot mix -l/--list with options or '
2791 'arguments'))
2795 'arguments'))
2792 for i in xrange(len(q.series)):
2796 for i in xrange(len(q.series)):
2793 status(i)
2797 status(i)
2794 return
2798 return
2795 if not args or args[0][0:1] in '-+':
2799 if not args or args[0][0:1] in '-+':
2796 if not q.applied:
2800 if not q.applied:
2797 raise util.Abort(_('no patches applied'))
2801 raise util.Abort(_('no patches applied'))
2798 patch = q.applied[-1].name
2802 patch = q.applied[-1].name
2799 if patch is None and args[0][0:1] not in '-+':
2803 if patch is None and args[0][0:1] not in '-+':
2800 patch = args.pop(0)
2804 patch = args.pop(0)
2801 if patch is None:
2805 if patch is None:
2802 raise util.Abort(_('no patch to work with'))
2806 raise util.Abort(_('no patch to work with'))
2803 if args or opts.get('none'):
2807 if args or opts.get('none'):
2804 idx = q.findseries(patch)
2808 idx = q.findseries(patch)
2805 if idx is None:
2809 if idx is None:
2806 raise util.Abort(_('no patch named %s') % patch)
2810 raise util.Abort(_('no patch named %s') % patch)
2807 q.setguards(idx, args)
2811 q.setguards(idx, args)
2808 q.savedirty()
2812 q.savedirty()
2809 else:
2813 else:
2810 status(q.series.index(q.lookup(patch)))
2814 status(q.series.index(q.lookup(patch)))
2811
2815
2812 @command("qheader", [], _('hg qheader [PATCH]'))
2816 @command("qheader", [], _('hg qheader [PATCH]'))
2813 def header(ui, repo, patch=None):
2817 def header(ui, repo, patch=None):
2814 """print the header of the topmost or specified patch
2818 """print the header of the topmost or specified patch
2815
2819
2816 Returns 0 on success."""
2820 Returns 0 on success."""
2817 q = repo.mq
2821 q = repo.mq
2818
2822
2819 if patch:
2823 if patch:
2820 patch = q.lookup(patch)
2824 patch = q.lookup(patch)
2821 else:
2825 else:
2822 if not q.applied:
2826 if not q.applied:
2823 ui.write(_('no patches applied\n'))
2827 ui.write(_('no patches applied\n'))
2824 return 1
2828 return 1
2825 patch = q.lookup('qtip')
2829 patch = q.lookup('qtip')
2826 ph = patchheader(q.join(patch), q.plainmode)
2830 ph = patchheader(q.join(patch), q.plainmode)
2827
2831
2828 ui.write('\n'.join(ph.message) + '\n')
2832 ui.write('\n'.join(ph.message) + '\n')
2829
2833
2830 def lastsavename(path):
2834 def lastsavename(path):
2831 (directory, base) = os.path.split(path)
2835 (directory, base) = os.path.split(path)
2832 names = os.listdir(directory)
2836 names = os.listdir(directory)
2833 namere = re.compile("%s.([0-9]+)" % base)
2837 namere = re.compile("%s.([0-9]+)" % base)
2834 maxindex = None
2838 maxindex = None
2835 maxname = None
2839 maxname = None
2836 for f in names:
2840 for f in names:
2837 m = namere.match(f)
2841 m = namere.match(f)
2838 if m:
2842 if m:
2839 index = int(m.group(1))
2843 index = int(m.group(1))
2840 if maxindex is None or index > maxindex:
2844 if maxindex is None or index > maxindex:
2841 maxindex = index
2845 maxindex = index
2842 maxname = f
2846 maxname = f
2843 if maxname:
2847 if maxname:
2844 return (os.path.join(directory, maxname), maxindex)
2848 return (os.path.join(directory, maxname), maxindex)
2845 return (None, None)
2849 return (None, None)
2846
2850
2847 def savename(path):
2851 def savename(path):
2848 (last, index) = lastsavename(path)
2852 (last, index) = lastsavename(path)
2849 if last is None:
2853 if last is None:
2850 index = 0
2854 index = 0
2851 newpath = path + ".%d" % (index + 1)
2855 newpath = path + ".%d" % (index + 1)
2852 return newpath
2856 return newpath
2853
2857
2854 @command("^qpush",
2858 @command("^qpush",
2855 [('', 'keep-changes', None,
2859 [('', 'keep-changes', None,
2856 _('tolerate non-conflicting local changes')),
2860 _('tolerate non-conflicting local changes')),
2857 ('f', 'force', None, _('apply on top of local changes')),
2861 ('f', 'force', None, _('apply on top of local changes')),
2858 ('e', 'exact', None,
2862 ('e', 'exact', None,
2859 _('apply the target patch to its recorded parent')),
2863 _('apply the target patch to its recorded parent')),
2860 ('l', 'list', None, _('list patch name in commit text')),
2864 ('l', 'list', None, _('list patch name in commit text')),
2861 ('a', 'all', None, _('apply all patches')),
2865 ('a', 'all', None, _('apply all patches')),
2862 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2866 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2863 ('n', 'name', '',
2867 ('n', 'name', '',
2864 _('merge queue name (DEPRECATED)'), _('NAME')),
2868 _('merge queue name (DEPRECATED)'), _('NAME')),
2865 ('', 'move', None,
2869 ('', 'move', None,
2866 _('reorder patch series and apply only the patch')),
2870 _('reorder patch series and apply only the patch')),
2867 ('', 'no-backup', None, _('do not save backup copies of files'))],
2871 ('', 'no-backup', None, _('do not save backup copies of files'))],
2868 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2872 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2869 def push(ui, repo, patch=None, **opts):
2873 def push(ui, repo, patch=None, **opts):
2870 """push the next patch onto the stack
2874 """push the next patch onto the stack
2871
2875
2872 By default, abort if the working directory contains uncommitted
2876 By default, abort if the working directory contains uncommitted
2873 changes. With --keep-changes, abort only if the uncommitted files
2877 changes. With --keep-changes, abort only if the uncommitted files
2874 overlap with patched files. With -f/--force, backup and patch over
2878 overlap with patched files. With -f/--force, backup and patch over
2875 uncommitted changes.
2879 uncommitted changes.
2876
2880
2877 Return 0 on success.
2881 Return 0 on success.
2878 """
2882 """
2879 q = repo.mq
2883 q = repo.mq
2880 mergeq = None
2884 mergeq = None
2881
2885
2882 opts = fixkeepchangesopts(ui, opts)
2886 opts = fixkeepchangesopts(ui, opts)
2883 if opts.get('merge'):
2887 if opts.get('merge'):
2884 if opts.get('name'):
2888 if opts.get('name'):
2885 newpath = repo.join(opts.get('name'))
2889 newpath = repo.join(opts.get('name'))
2886 else:
2890 else:
2887 newpath, i = lastsavename(q.path)
2891 newpath, i = lastsavename(q.path)
2888 if not newpath:
2892 if not newpath:
2889 ui.warn(_("no saved queues found, please use -n\n"))
2893 ui.warn(_("no saved queues found, please use -n\n"))
2890 return 1
2894 return 1
2891 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2895 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2892 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2896 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2893 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2897 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2894 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2898 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2895 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2899 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2896 keepchanges=opts.get('keep_changes'))
2900 keepchanges=opts.get('keep_changes'))
2897 return ret
2901 return ret
2898
2902
2899 @command("^qpop",
2903 @command("^qpop",
2900 [('a', 'all', None, _('pop all patches')),
2904 [('a', 'all', None, _('pop all patches')),
2901 ('n', 'name', '',
2905 ('n', 'name', '',
2902 _('queue name to pop (DEPRECATED)'), _('NAME')),
2906 _('queue name to pop (DEPRECATED)'), _('NAME')),
2903 ('', 'keep-changes', None,
2907 ('', 'keep-changes', None,
2904 _('tolerate non-conflicting local changes')),
2908 _('tolerate non-conflicting local changes')),
2905 ('f', 'force', None, _('forget any local changes to patched files')),
2909 ('f', 'force', None, _('forget any local changes to patched files')),
2906 ('', 'no-backup', None, _('do not save backup copies of files'))],
2910 ('', 'no-backup', None, _('do not save backup copies of files'))],
2907 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2911 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2908 def pop(ui, repo, patch=None, **opts):
2912 def pop(ui, repo, patch=None, **opts):
2909 """pop the current patch off the stack
2913 """pop the current patch off the stack
2910
2914
2911 Without argument, pops off the top of the patch stack. If given a
2915 Without argument, pops off the top of the patch stack. If given a
2912 patch name, keeps popping off patches until the named patch is at
2916 patch name, keeps popping off patches until the named patch is at
2913 the top of the stack.
2917 the top of the stack.
2914
2918
2915 By default, abort if the working directory contains uncommitted
2919 By default, abort if the working directory contains uncommitted
2916 changes. With --keep-changes, abort only if the uncommitted files
2920 changes. With --keep-changes, abort only if the uncommitted files
2917 overlap with patched files. With -f/--force, backup and discard
2921 overlap with patched files. With -f/--force, backup and discard
2918 changes made to such files.
2922 changes made to such files.
2919
2923
2920 Return 0 on success.
2924 Return 0 on success.
2921 """
2925 """
2922 opts = fixkeepchangesopts(ui, opts)
2926 opts = fixkeepchangesopts(ui, opts)
2923 localupdate = True
2927 localupdate = True
2924 if opts.get('name'):
2928 if opts.get('name'):
2925 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2929 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2926 ui.warn(_('using patch queue: %s\n') % q.path)
2930 ui.warn(_('using patch queue: %s\n') % q.path)
2927 localupdate = False
2931 localupdate = False
2928 else:
2932 else:
2929 q = repo.mq
2933 q = repo.mq
2930 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2934 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2931 all=opts.get('all'), nobackup=opts.get('no_backup'),
2935 all=opts.get('all'), nobackup=opts.get('no_backup'),
2932 keepchanges=opts.get('keep_changes'))
2936 keepchanges=opts.get('keep_changes'))
2933 q.savedirty()
2937 q.savedirty()
2934 return ret
2938 return ret
2935
2939
2936 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2940 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2937 def rename(ui, repo, patch, name=None, **opts):
2941 def rename(ui, repo, patch, name=None, **opts):
2938 """rename a patch
2942 """rename a patch
2939
2943
2940 With one argument, renames the current patch to PATCH1.
2944 With one argument, renames the current patch to PATCH1.
2941 With two arguments, renames PATCH1 to PATCH2.
2945 With two arguments, renames PATCH1 to PATCH2.
2942
2946
2943 Returns 0 on success."""
2947 Returns 0 on success."""
2944 q = repo.mq
2948 q = repo.mq
2945 if not name:
2949 if not name:
2946 name = patch
2950 name = patch
2947 patch = None
2951 patch = None
2948
2952
2949 if patch:
2953 if patch:
2950 patch = q.lookup(patch)
2954 patch = q.lookup(patch)
2951 else:
2955 else:
2952 if not q.applied:
2956 if not q.applied:
2953 ui.write(_('no patches applied\n'))
2957 ui.write(_('no patches applied\n'))
2954 return
2958 return
2955 patch = q.lookup('qtip')
2959 patch = q.lookup('qtip')
2956 absdest = q.join(name)
2960 absdest = q.join(name)
2957 if os.path.isdir(absdest):
2961 if os.path.isdir(absdest):
2958 name = normname(os.path.join(name, os.path.basename(patch)))
2962 name = normname(os.path.join(name, os.path.basename(patch)))
2959 absdest = q.join(name)
2963 absdest = q.join(name)
2960 q.checkpatchname(name)
2964 q.checkpatchname(name)
2961
2965
2962 ui.note(_('renaming %s to %s\n') % (patch, name))
2966 ui.note(_('renaming %s to %s\n') % (patch, name))
2963 i = q.findseries(patch)
2967 i = q.findseries(patch)
2964 guards = q.guard_re.findall(q.fullseries[i])
2968 guards = q.guard_re.findall(q.fullseries[i])
2965 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2969 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2966 q.parseseries()
2970 q.parseseries()
2967 q.seriesdirty = True
2971 q.seriesdirty = True
2968
2972
2969 info = q.isapplied(patch)
2973 info = q.isapplied(patch)
2970 if info:
2974 if info:
2971 q.applied[info[0]] = statusentry(info[1], name)
2975 q.applied[info[0]] = statusentry(info[1], name)
2972 q.applieddirty = True
2976 q.applieddirty = True
2973
2977
2974 destdir = os.path.dirname(absdest)
2978 destdir = os.path.dirname(absdest)
2975 if not os.path.isdir(destdir):
2979 if not os.path.isdir(destdir):
2976 os.makedirs(destdir)
2980 os.makedirs(destdir)
2977 util.rename(q.join(patch), absdest)
2981 util.rename(q.join(patch), absdest)
2978 r = q.qrepo()
2982 r = q.qrepo()
2979 if r and patch in r.dirstate:
2983 if r and patch in r.dirstate:
2980 wctx = r[None]
2984 wctx = r[None]
2981 wlock = r.wlock()
2985 wlock = r.wlock()
2982 try:
2986 try:
2983 if r.dirstate[patch] == 'a':
2987 if r.dirstate[patch] == 'a':
2984 r.dirstate.drop(patch)
2988 r.dirstate.drop(patch)
2985 r.dirstate.add(name)
2989 r.dirstate.add(name)
2986 else:
2990 else:
2987 wctx.copy(patch, name)
2991 wctx.copy(patch, name)
2988 wctx.forget([patch])
2992 wctx.forget([patch])
2989 finally:
2993 finally:
2990 wlock.release()
2994 wlock.release()
2991
2995
2992 q.savedirty()
2996 q.savedirty()
2993
2997
2994 @command("qrestore",
2998 @command("qrestore",
2995 [('d', 'delete', None, _('delete save entry')),
2999 [('d', 'delete', None, _('delete save entry')),
2996 ('u', 'update', None, _('update queue working directory'))],
3000 ('u', 'update', None, _('update queue working directory'))],
2997 _('hg qrestore [-d] [-u] REV'))
3001 _('hg qrestore [-d] [-u] REV'))
2998 def restore(ui, repo, rev, **opts):
3002 def restore(ui, repo, rev, **opts):
2999 """restore the queue state saved by a revision (DEPRECATED)
3003 """restore the queue state saved by a revision (DEPRECATED)
3000
3004
3001 This command is deprecated, use :hg:`rebase` instead."""
3005 This command is deprecated, use :hg:`rebase` instead."""
3002 rev = repo.lookup(rev)
3006 rev = repo.lookup(rev)
3003 q = repo.mq
3007 q = repo.mq
3004 q.restore(repo, rev, delete=opts.get('delete'),
3008 q.restore(repo, rev, delete=opts.get('delete'),
3005 qupdate=opts.get('update'))
3009 qupdate=opts.get('update'))
3006 q.savedirty()
3010 q.savedirty()
3007 return 0
3011 return 0
3008
3012
3009 @command("qsave",
3013 @command("qsave",
3010 [('c', 'copy', None, _('copy patch directory')),
3014 [('c', 'copy', None, _('copy patch directory')),
3011 ('n', 'name', '',
3015 ('n', 'name', '',
3012 _('copy directory name'), _('NAME')),
3016 _('copy directory name'), _('NAME')),
3013 ('e', 'empty', None, _('clear queue status file')),
3017 ('e', 'empty', None, _('clear queue status file')),
3014 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3018 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3015 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3019 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3016 def save(ui, repo, **opts):
3020 def save(ui, repo, **opts):
3017 """save current queue state (DEPRECATED)
3021 """save current queue state (DEPRECATED)
3018
3022
3019 This command is deprecated, use :hg:`rebase` instead."""
3023 This command is deprecated, use :hg:`rebase` instead."""
3020 q = repo.mq
3024 q = repo.mq
3021 message = cmdutil.logmessage(ui, opts)
3025 message = cmdutil.logmessage(ui, opts)
3022 ret = q.save(repo, msg=message)
3026 ret = q.save(repo, msg=message)
3023 if ret:
3027 if ret:
3024 return ret
3028 return ret
3025 q.savedirty() # save to .hg/patches before copying
3029 q.savedirty() # save to .hg/patches before copying
3026 if opts.get('copy'):
3030 if opts.get('copy'):
3027 path = q.path
3031 path = q.path
3028 if opts.get('name'):
3032 if opts.get('name'):
3029 newpath = os.path.join(q.basepath, opts.get('name'))
3033 newpath = os.path.join(q.basepath, opts.get('name'))
3030 if os.path.exists(newpath):
3034 if os.path.exists(newpath):
3031 if not os.path.isdir(newpath):
3035 if not os.path.isdir(newpath):
3032 raise util.Abort(_('destination %s exists and is not '
3036 raise util.Abort(_('destination %s exists and is not '
3033 'a directory') % newpath)
3037 'a directory') % newpath)
3034 if not opts.get('force'):
3038 if not opts.get('force'):
3035 raise util.Abort(_('destination %s exists, '
3039 raise util.Abort(_('destination %s exists, '
3036 'use -f to force') % newpath)
3040 'use -f to force') % newpath)
3037 else:
3041 else:
3038 newpath = savename(path)
3042 newpath = savename(path)
3039 ui.warn(_("copy %s to %s\n") % (path, newpath))
3043 ui.warn(_("copy %s to %s\n") % (path, newpath))
3040 util.copyfiles(path, newpath)
3044 util.copyfiles(path, newpath)
3041 if opts.get('empty'):
3045 if opts.get('empty'):
3042 del q.applied[:]
3046 del q.applied[:]
3043 q.applieddirty = True
3047 q.applieddirty = True
3044 q.savedirty()
3048 q.savedirty()
3045 return 0
3049 return 0
3046
3050
3047
3051
3048 @command("qselect",
3052 @command("qselect",
3049 [('n', 'none', None, _('disable all guards')),
3053 [('n', 'none', None, _('disable all guards')),
3050 ('s', 'series', None, _('list all guards in series file')),
3054 ('s', 'series', None, _('list all guards in series file')),
3051 ('', 'pop', None, _('pop to before first guarded applied patch')),
3055 ('', 'pop', None, _('pop to before first guarded applied patch')),
3052 ('', 'reapply', None, _('pop, then reapply patches'))],
3056 ('', 'reapply', None, _('pop, then reapply patches'))],
3053 _('hg qselect [OPTION]... [GUARD]...'))
3057 _('hg qselect [OPTION]... [GUARD]...'))
3054 def select(ui, repo, *args, **opts):
3058 def select(ui, repo, *args, **opts):
3055 '''set or print guarded patches to push
3059 '''set or print guarded patches to push
3056
3060
3057 Use the :hg:`qguard` command to set or print guards on patch, then use
3061 Use the :hg:`qguard` command to set or print guards on patch, then use
3058 qselect to tell mq which guards to use. A patch will be pushed if
3062 qselect to tell mq which guards to use. A patch will be pushed if
3059 it has no guards or any positive guards match the currently
3063 it has no guards or any positive guards match the currently
3060 selected guard, but will not be pushed if any negative guards
3064 selected guard, but will not be pushed if any negative guards
3061 match the current guard. For example::
3065 match the current guard. For example::
3062
3066
3063 qguard foo.patch -- -stable (negative guard)
3067 qguard foo.patch -- -stable (negative guard)
3064 qguard bar.patch +stable (positive guard)
3068 qguard bar.patch +stable (positive guard)
3065 qselect stable
3069 qselect stable
3066
3070
3067 This activates the "stable" guard. mq will skip foo.patch (because
3071 This activates the "stable" guard. mq will skip foo.patch (because
3068 it has a negative match) but push bar.patch (because it has a
3072 it has a negative match) but push bar.patch (because it has a
3069 positive match).
3073 positive match).
3070
3074
3071 With no arguments, prints the currently active guards.
3075 With no arguments, prints the currently active guards.
3072 With one argument, sets the active guard.
3076 With one argument, sets the active guard.
3073
3077
3074 Use -n/--none to deactivate guards (no other arguments needed).
3078 Use -n/--none to deactivate guards (no other arguments needed).
3075 When no guards are active, patches with positive guards are
3079 When no guards are active, patches with positive guards are
3076 skipped and patches with negative guards are pushed.
3080 skipped and patches with negative guards are pushed.
3077
3081
3078 qselect can change the guards on applied patches. It does not pop
3082 qselect can change the guards on applied patches. It does not pop
3079 guarded patches by default. Use --pop to pop back to the last
3083 guarded patches by default. Use --pop to pop back to the last
3080 applied patch that is not guarded. Use --reapply (which implies
3084 applied patch that is not guarded. Use --reapply (which implies
3081 --pop) to push back to the current patch afterwards, but skip
3085 --pop) to push back to the current patch afterwards, but skip
3082 guarded patches.
3086 guarded patches.
3083
3087
3084 Use -s/--series to print a list of all guards in the series file
3088 Use -s/--series to print a list of all guards in the series file
3085 (no other arguments needed). Use -v for more information.
3089 (no other arguments needed). Use -v for more information.
3086
3090
3087 Returns 0 on success.'''
3091 Returns 0 on success.'''
3088
3092
3089 q = repo.mq
3093 q = repo.mq
3090 guards = q.active()
3094 guards = q.active()
3091 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3095 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3092 if args or opts.get('none'):
3096 if args or opts.get('none'):
3093 old_unapplied = q.unapplied(repo)
3097 old_unapplied = q.unapplied(repo)
3094 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3098 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3095 q.setactive(args)
3099 q.setactive(args)
3096 q.savedirty()
3100 q.savedirty()
3097 if not args:
3101 if not args:
3098 ui.status(_('guards deactivated\n'))
3102 ui.status(_('guards deactivated\n'))
3099 if not opts.get('pop') and not opts.get('reapply'):
3103 if not opts.get('pop') and not opts.get('reapply'):
3100 unapplied = q.unapplied(repo)
3104 unapplied = q.unapplied(repo)
3101 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3105 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3102 if len(unapplied) != len(old_unapplied):
3106 if len(unapplied) != len(old_unapplied):
3103 ui.status(_('number of unguarded, unapplied patches has '
3107 ui.status(_('number of unguarded, unapplied patches has '
3104 'changed from %d to %d\n') %
3108 'changed from %d to %d\n') %
3105 (len(old_unapplied), len(unapplied)))
3109 (len(old_unapplied), len(unapplied)))
3106 if len(guarded) != len(old_guarded):
3110 if len(guarded) != len(old_guarded):
3107 ui.status(_('number of guarded, applied patches has changed '
3111 ui.status(_('number of guarded, applied patches has changed '
3108 'from %d to %d\n') %
3112 'from %d to %d\n') %
3109 (len(old_guarded), len(guarded)))
3113 (len(old_guarded), len(guarded)))
3110 elif opts.get('series'):
3114 elif opts.get('series'):
3111 guards = {}
3115 guards = {}
3112 noguards = 0
3116 noguards = 0
3113 for gs in q.seriesguards:
3117 for gs in q.seriesguards:
3114 if not gs:
3118 if not gs:
3115 noguards += 1
3119 noguards += 1
3116 for g in gs:
3120 for g in gs:
3117 guards.setdefault(g, 0)
3121 guards.setdefault(g, 0)
3118 guards[g] += 1
3122 guards[g] += 1
3119 if ui.verbose:
3123 if ui.verbose:
3120 guards['NONE'] = noguards
3124 guards['NONE'] = noguards
3121 guards = guards.items()
3125 guards = guards.items()
3122 guards.sort(key=lambda x: x[0][1:])
3126 guards.sort(key=lambda x: x[0][1:])
3123 if guards:
3127 if guards:
3124 ui.note(_('guards in series file:\n'))
3128 ui.note(_('guards in series file:\n'))
3125 for guard, count in guards:
3129 for guard, count in guards:
3126 ui.note('%2d ' % count)
3130 ui.note('%2d ' % count)
3127 ui.write(guard, '\n')
3131 ui.write(guard, '\n')
3128 else:
3132 else:
3129 ui.note(_('no guards in series file\n'))
3133 ui.note(_('no guards in series file\n'))
3130 else:
3134 else:
3131 if guards:
3135 if guards:
3132 ui.note(_('active guards:\n'))
3136 ui.note(_('active guards:\n'))
3133 for g in guards:
3137 for g in guards:
3134 ui.write(g, '\n')
3138 ui.write(g, '\n')
3135 else:
3139 else:
3136 ui.write(_('no active guards\n'))
3140 ui.write(_('no active guards\n'))
3137 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3141 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3138 popped = False
3142 popped = False
3139 if opts.get('pop') or opts.get('reapply'):
3143 if opts.get('pop') or opts.get('reapply'):
3140 for i in xrange(len(q.applied)):
3144 for i in xrange(len(q.applied)):
3141 if not pushable(i):
3145 if not pushable(i):
3142 ui.status(_('popping guarded patches\n'))
3146 ui.status(_('popping guarded patches\n'))
3143 popped = True
3147 popped = True
3144 if i == 0:
3148 if i == 0:
3145 q.pop(repo, all=True)
3149 q.pop(repo, all=True)
3146 else:
3150 else:
3147 q.pop(repo, q.applied[i - 1].name)
3151 q.pop(repo, q.applied[i - 1].name)
3148 break
3152 break
3149 if popped:
3153 if popped:
3150 try:
3154 try:
3151 if reapply:
3155 if reapply:
3152 ui.status(_('reapplying unguarded patches\n'))
3156 ui.status(_('reapplying unguarded patches\n'))
3153 q.push(repo, reapply)
3157 q.push(repo, reapply)
3154 finally:
3158 finally:
3155 q.savedirty()
3159 q.savedirty()
3156
3160
3157 @command("qfinish",
3161 @command("qfinish",
3158 [('a', 'applied', None, _('finish all applied changesets'))],
3162 [('a', 'applied', None, _('finish all applied changesets'))],
3159 _('hg qfinish [-a] [REV]...'))
3163 _('hg qfinish [-a] [REV]...'))
3160 def finish(ui, repo, *revrange, **opts):
3164 def finish(ui, repo, *revrange, **opts):
3161 """move applied patches into repository history
3165 """move applied patches into repository history
3162
3166
3163 Finishes the specified revisions (corresponding to applied
3167 Finishes the specified revisions (corresponding to applied
3164 patches) by moving them out of mq control into regular repository
3168 patches) by moving them out of mq control into regular repository
3165 history.
3169 history.
3166
3170
3167 Accepts a revision range or the -a/--applied option. If --applied
3171 Accepts a revision range or the -a/--applied option. If --applied
3168 is specified, all applied mq revisions are removed from mq
3172 is specified, all applied mq revisions are removed from mq
3169 control. Otherwise, the given revisions must be at the base of the
3173 control. Otherwise, the given revisions must be at the base of the
3170 stack of applied patches.
3174 stack of applied patches.
3171
3175
3172 This can be especially useful if your changes have been applied to
3176 This can be especially useful if your changes have been applied to
3173 an upstream repository, or if you are about to push your changes
3177 an upstream repository, or if you are about to push your changes
3174 to upstream.
3178 to upstream.
3175
3179
3176 Returns 0 on success.
3180 Returns 0 on success.
3177 """
3181 """
3178 if not opts.get('applied') and not revrange:
3182 if not opts.get('applied') and not revrange:
3179 raise util.Abort(_('no revisions specified'))
3183 raise util.Abort(_('no revisions specified'))
3180 elif opts.get('applied'):
3184 elif opts.get('applied'):
3181 revrange = ('qbase::qtip',) + revrange
3185 revrange = ('qbase::qtip',) + revrange
3182
3186
3183 q = repo.mq
3187 q = repo.mq
3184 if not q.applied:
3188 if not q.applied:
3185 ui.status(_('no patches applied\n'))
3189 ui.status(_('no patches applied\n'))
3186 return 0
3190 return 0
3187
3191
3188 revs = scmutil.revrange(repo, revrange)
3192 revs = scmutil.revrange(repo, revrange)
3189 if repo['.'].rev() in revs and repo[None].files():
3193 if repo['.'].rev() in revs and repo[None].files():
3190 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3194 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3191 # queue.finish may changes phases but leave the responsibility to lock the
3195 # queue.finish may changes phases but leave the responsibility to lock the
3192 # repo to the caller to avoid deadlock with wlock. This command code is
3196 # repo to the caller to avoid deadlock with wlock. This command code is
3193 # responsibility for this locking.
3197 # responsibility for this locking.
3194 lock = repo.lock()
3198 lock = repo.lock()
3195 try:
3199 try:
3196 q.finish(repo, revs)
3200 q.finish(repo, revs)
3197 q.savedirty()
3201 q.savedirty()
3198 finally:
3202 finally:
3199 lock.release()
3203 lock.release()
3200 return 0
3204 return 0
3201
3205
3202 @command("qqueue",
3206 @command("qqueue",
3203 [('l', 'list', False, _('list all available queues')),
3207 [('l', 'list', False, _('list all available queues')),
3204 ('', 'active', False, _('print name of active queue')),
3208 ('', 'active', False, _('print name of active queue')),
3205 ('c', 'create', False, _('create new queue')),
3209 ('c', 'create', False, _('create new queue')),
3206 ('', 'rename', False, _('rename active queue')),
3210 ('', 'rename', False, _('rename active queue')),
3207 ('', 'delete', False, _('delete reference to queue')),
3211 ('', 'delete', False, _('delete reference to queue')),
3208 ('', 'purge', False, _('delete queue, and remove patch dir')),
3212 ('', 'purge', False, _('delete queue, and remove patch dir')),
3209 ],
3213 ],
3210 _('[OPTION] [QUEUE]'))
3214 _('[OPTION] [QUEUE]'))
3211 def qqueue(ui, repo, name=None, **opts):
3215 def qqueue(ui, repo, name=None, **opts):
3212 '''manage multiple patch queues
3216 '''manage multiple patch queues
3213
3217
3214 Supports switching between different patch queues, as well as creating
3218 Supports switching between different patch queues, as well as creating
3215 new patch queues and deleting existing ones.
3219 new patch queues and deleting existing ones.
3216
3220
3217 Omitting a queue name or specifying -l/--list will show you the registered
3221 Omitting a queue name or specifying -l/--list will show you the registered
3218 queues - by default the "normal" patches queue is registered. The currently
3222 queues - by default the "normal" patches queue is registered. The currently
3219 active queue will be marked with "(active)". Specifying --active will print
3223 active queue will be marked with "(active)". Specifying --active will print
3220 only the name of the active queue.
3224 only the name of the active queue.
3221
3225
3222 To create a new queue, use -c/--create. The queue is automatically made
3226 To create a new queue, use -c/--create. The queue is automatically made
3223 active, except in the case where there are applied patches from the
3227 active, except in the case where there are applied patches from the
3224 currently active queue in the repository. Then the queue will only be
3228 currently active queue in the repository. Then the queue will only be
3225 created and switching will fail.
3229 created and switching will fail.
3226
3230
3227 To delete an existing queue, use --delete. You cannot delete the currently
3231 To delete an existing queue, use --delete. You cannot delete the currently
3228 active queue.
3232 active queue.
3229
3233
3230 Returns 0 on success.
3234 Returns 0 on success.
3231 '''
3235 '''
3232 q = repo.mq
3236 q = repo.mq
3233 _defaultqueue = 'patches'
3237 _defaultqueue = 'patches'
3234 _allqueues = 'patches.queues'
3238 _allqueues = 'patches.queues'
3235 _activequeue = 'patches.queue'
3239 _activequeue = 'patches.queue'
3236
3240
3237 def _getcurrent():
3241 def _getcurrent():
3238 cur = os.path.basename(q.path)
3242 cur = os.path.basename(q.path)
3239 if cur.startswith('patches-'):
3243 if cur.startswith('patches-'):
3240 cur = cur[8:]
3244 cur = cur[8:]
3241 return cur
3245 return cur
3242
3246
3243 def _noqueues():
3247 def _noqueues():
3244 try:
3248 try:
3245 fh = repo.vfs(_allqueues, 'r')
3249 fh = repo.vfs(_allqueues, 'r')
3246 fh.close()
3250 fh.close()
3247 except IOError:
3251 except IOError:
3248 return True
3252 return True
3249
3253
3250 return False
3254 return False
3251
3255
3252 def _getqueues():
3256 def _getqueues():
3253 current = _getcurrent()
3257 current = _getcurrent()
3254
3258
3255 try:
3259 try:
3256 fh = repo.vfs(_allqueues, 'r')
3260 fh = repo.vfs(_allqueues, 'r')
3257 queues = [queue.strip() for queue in fh if queue.strip()]
3261 queues = [queue.strip() for queue in fh if queue.strip()]
3258 fh.close()
3262 fh.close()
3259 if current not in queues:
3263 if current not in queues:
3260 queues.append(current)
3264 queues.append(current)
3261 except IOError:
3265 except IOError:
3262 queues = [_defaultqueue]
3266 queues = [_defaultqueue]
3263
3267
3264 return sorted(queues)
3268 return sorted(queues)
3265
3269
3266 def _setactive(name):
3270 def _setactive(name):
3267 if q.applied:
3271 if q.applied:
3268 raise util.Abort(_('new queue created, but cannot make active '
3272 raise util.Abort(_('new queue created, but cannot make active '
3269 'as patches are applied'))
3273 'as patches are applied'))
3270 _setactivenocheck(name)
3274 _setactivenocheck(name)
3271
3275
3272 def _setactivenocheck(name):
3276 def _setactivenocheck(name):
3273 fh = repo.vfs(_activequeue, 'w')
3277 fh = repo.vfs(_activequeue, 'w')
3274 if name != 'patches':
3278 if name != 'patches':
3275 fh.write(name)
3279 fh.write(name)
3276 fh.close()
3280 fh.close()
3277
3281
3278 def _addqueue(name):
3282 def _addqueue(name):
3279 fh = repo.vfs(_allqueues, 'a')
3283 fh = repo.vfs(_allqueues, 'a')
3280 fh.write('%s\n' % (name,))
3284 fh.write('%s\n' % (name,))
3281 fh.close()
3285 fh.close()
3282
3286
3283 def _queuedir(name):
3287 def _queuedir(name):
3284 if name == 'patches':
3288 if name == 'patches':
3285 return repo.join('patches')
3289 return repo.join('patches')
3286 else:
3290 else:
3287 return repo.join('patches-' + name)
3291 return repo.join('patches-' + name)
3288
3292
3289 def _validname(name):
3293 def _validname(name):
3290 for n in name:
3294 for n in name:
3291 if n in ':\\/.':
3295 if n in ':\\/.':
3292 return False
3296 return False
3293 return True
3297 return True
3294
3298
3295 def _delete(name):
3299 def _delete(name):
3296 if name not in existing:
3300 if name not in existing:
3297 raise util.Abort(_('cannot delete queue that does not exist'))
3301 raise util.Abort(_('cannot delete queue that does not exist'))
3298
3302
3299 current = _getcurrent()
3303 current = _getcurrent()
3300
3304
3301 if name == current:
3305 if name == current:
3302 raise util.Abort(_('cannot delete currently active queue'))
3306 raise util.Abort(_('cannot delete currently active queue'))
3303
3307
3304 fh = repo.vfs('patches.queues.new', 'w')
3308 fh = repo.vfs('patches.queues.new', 'w')
3305 for queue in existing:
3309 for queue in existing:
3306 if queue == name:
3310 if queue == name:
3307 continue
3311 continue
3308 fh.write('%s\n' % (queue,))
3312 fh.write('%s\n' % (queue,))
3309 fh.close()
3313 fh.close()
3310 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3314 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3311
3315
3312 if not name or opts.get('list') or opts.get('active'):
3316 if not name or opts.get('list') or opts.get('active'):
3313 current = _getcurrent()
3317 current = _getcurrent()
3314 if opts.get('active'):
3318 if opts.get('active'):
3315 ui.write('%s\n' % (current,))
3319 ui.write('%s\n' % (current,))
3316 return
3320 return
3317 for queue in _getqueues():
3321 for queue in _getqueues():
3318 ui.write('%s' % (queue,))
3322 ui.write('%s' % (queue,))
3319 if queue == current and not ui.quiet:
3323 if queue == current and not ui.quiet:
3320 ui.write(_(' (active)\n'))
3324 ui.write(_(' (active)\n'))
3321 else:
3325 else:
3322 ui.write('\n')
3326 ui.write('\n')
3323 return
3327 return
3324
3328
3325 if not _validname(name):
3329 if not _validname(name):
3326 raise util.Abort(
3330 raise util.Abort(
3327 _('invalid queue name, may not contain the characters ":\\/."'))
3331 _('invalid queue name, may not contain the characters ":\\/."'))
3328
3332
3329 existing = _getqueues()
3333 existing = _getqueues()
3330
3334
3331 if opts.get('create'):
3335 if opts.get('create'):
3332 if name in existing:
3336 if name in existing:
3333 raise util.Abort(_('queue "%s" already exists') % name)
3337 raise util.Abort(_('queue "%s" already exists') % name)
3334 if _noqueues():
3338 if _noqueues():
3335 _addqueue(_defaultqueue)
3339 _addqueue(_defaultqueue)
3336 _addqueue(name)
3340 _addqueue(name)
3337 _setactive(name)
3341 _setactive(name)
3338 elif opts.get('rename'):
3342 elif opts.get('rename'):
3339 current = _getcurrent()
3343 current = _getcurrent()
3340 if name == current:
3344 if name == current:
3341 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3345 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3342 if name in existing:
3346 if name in existing:
3343 raise util.Abort(_('queue "%s" already exists') % name)
3347 raise util.Abort(_('queue "%s" already exists') % name)
3344
3348
3345 olddir = _queuedir(current)
3349 olddir = _queuedir(current)
3346 newdir = _queuedir(name)
3350 newdir = _queuedir(name)
3347
3351
3348 if os.path.exists(newdir):
3352 if os.path.exists(newdir):
3349 raise util.Abort(_('non-queue directory "%s" already exists') %
3353 raise util.Abort(_('non-queue directory "%s" already exists') %
3350 newdir)
3354 newdir)
3351
3355
3352 fh = repo.vfs('patches.queues.new', 'w')
3356 fh = repo.vfs('patches.queues.new', 'w')
3353 for queue in existing:
3357 for queue in existing:
3354 if queue == current:
3358 if queue == current:
3355 fh.write('%s\n' % (name,))
3359 fh.write('%s\n' % (name,))
3356 if os.path.exists(olddir):
3360 if os.path.exists(olddir):
3357 util.rename(olddir, newdir)
3361 util.rename(olddir, newdir)
3358 else:
3362 else:
3359 fh.write('%s\n' % (queue,))
3363 fh.write('%s\n' % (queue,))
3360 fh.close()
3364 fh.close()
3361 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3365 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3362 _setactivenocheck(name)
3366 _setactivenocheck(name)
3363 elif opts.get('delete'):
3367 elif opts.get('delete'):
3364 _delete(name)
3368 _delete(name)
3365 elif opts.get('purge'):
3369 elif opts.get('purge'):
3366 if name in existing:
3370 if name in existing:
3367 _delete(name)
3371 _delete(name)
3368 qdir = _queuedir(name)
3372 qdir = _queuedir(name)
3369 if os.path.exists(qdir):
3373 if os.path.exists(qdir):
3370 shutil.rmtree(qdir)
3374 shutil.rmtree(qdir)
3371 else:
3375 else:
3372 if name not in existing:
3376 if name not in existing:
3373 raise util.Abort(_('use --create to create a new queue'))
3377 raise util.Abort(_('use --create to create a new queue'))
3374 _setactive(name)
3378 _setactive(name)
3375
3379
3376 def mqphasedefaults(repo, roots):
3380 def mqphasedefaults(repo, roots):
3377 """callback used to set mq changeset as secret when no phase data exists"""
3381 """callback used to set mq changeset as secret when no phase data exists"""
3378 if repo.mq.applied:
3382 if repo.mq.applied:
3379 if repo.ui.configbool('mq', 'secret', False):
3383 if repo.ui.configbool('mq', 'secret', False):
3380 mqphase = phases.secret
3384 mqphase = phases.secret
3381 else:
3385 else:
3382 mqphase = phases.draft
3386 mqphase = phases.draft
3383 qbase = repo[repo.mq.applied[0].node]
3387 qbase = repo[repo.mq.applied[0].node]
3384 roots[mqphase].add(qbase.node())
3388 roots[mqphase].add(qbase.node())
3385 return roots
3389 return roots
3386
3390
3387 def reposetup(ui, repo):
3391 def reposetup(ui, repo):
3388 class mqrepo(repo.__class__):
3392 class mqrepo(repo.__class__):
3389 @localrepo.unfilteredpropertycache
3393 @localrepo.unfilteredpropertycache
3390 def mq(self):
3394 def mq(self):
3391 return queue(self.ui, self.baseui, self.path)
3395 return queue(self.ui, self.baseui, self.path)
3392
3396
3393 def invalidateall(self):
3397 def invalidateall(self):
3394 super(mqrepo, self).invalidateall()
3398 super(mqrepo, self).invalidateall()
3395 if localrepo.hasunfilteredcache(self, 'mq'):
3399 if localrepo.hasunfilteredcache(self, 'mq'):
3396 # recreate mq in case queue path was changed
3400 # recreate mq in case queue path was changed
3397 delattr(self.unfiltered(), 'mq')
3401 delattr(self.unfiltered(), 'mq')
3398
3402
3399 def abortifwdirpatched(self, errmsg, force=False):
3403 def abortifwdirpatched(self, errmsg, force=False):
3400 if self.mq.applied and self.mq.checkapplied and not force:
3404 if self.mq.applied and self.mq.checkapplied and not force:
3401 parents = self.dirstate.parents()
3405 parents = self.dirstate.parents()
3402 patches = [s.node for s in self.mq.applied]
3406 patches = [s.node for s in self.mq.applied]
3403 if parents[0] in patches or parents[1] in patches:
3407 if parents[0] in patches or parents[1] in patches:
3404 raise util.Abort(errmsg)
3408 raise util.Abort(errmsg)
3405
3409
3406 def commit(self, text="", user=None, date=None, match=None,
3410 def commit(self, text="", user=None, date=None, match=None,
3407 force=False, editor=False, extra={}):
3411 force=False, editor=False, extra={}):
3408 self.abortifwdirpatched(
3412 self.abortifwdirpatched(
3409 _('cannot commit over an applied mq patch'),
3413 _('cannot commit over an applied mq patch'),
3410 force)
3414 force)
3411
3415
3412 return super(mqrepo, self).commit(text, user, date, match, force,
3416 return super(mqrepo, self).commit(text, user, date, match, force,
3413 editor, extra)
3417 editor, extra)
3414
3418
3415 def checkpush(self, pushop):
3419 def checkpush(self, pushop):
3416 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3420 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3417 outapplied = [e.node for e in self.mq.applied]
3421 outapplied = [e.node for e in self.mq.applied]
3418 if pushop.revs:
3422 if pushop.revs:
3419 # Assume applied patches have no non-patch descendants and
3423 # Assume applied patches have no non-patch descendants and
3420 # are not on remote already. Filtering any changeset not
3424 # are not on remote already. Filtering any changeset not
3421 # pushed.
3425 # pushed.
3422 heads = set(pushop.revs)
3426 heads = set(pushop.revs)
3423 for node in reversed(outapplied):
3427 for node in reversed(outapplied):
3424 if node in heads:
3428 if node in heads:
3425 break
3429 break
3426 else:
3430 else:
3427 outapplied.pop()
3431 outapplied.pop()
3428 # looking for pushed and shared changeset
3432 # looking for pushed and shared changeset
3429 for node in outapplied:
3433 for node in outapplied:
3430 if self[node].phase() < phases.secret:
3434 if self[node].phase() < phases.secret:
3431 raise util.Abort(_('source has mq patches applied'))
3435 raise util.Abort(_('source has mq patches applied'))
3432 # no non-secret patches pushed
3436 # no non-secret patches pushed
3433 super(mqrepo, self).checkpush(pushop)
3437 super(mqrepo, self).checkpush(pushop)
3434
3438
3435 def _findtags(self):
3439 def _findtags(self):
3436 '''augment tags from base class with patch tags'''
3440 '''augment tags from base class with patch tags'''
3437 result = super(mqrepo, self)._findtags()
3441 result = super(mqrepo, self)._findtags()
3438
3442
3439 q = self.mq
3443 q = self.mq
3440 if not q.applied:
3444 if not q.applied:
3441 return result
3445 return result
3442
3446
3443 mqtags = [(patch.node, patch.name) for patch in q.applied]
3447 mqtags = [(patch.node, patch.name) for patch in q.applied]
3444
3448
3445 try:
3449 try:
3446 # for now ignore filtering business
3450 # for now ignore filtering business
3447 self.unfiltered().changelog.rev(mqtags[-1][0])
3451 self.unfiltered().changelog.rev(mqtags[-1][0])
3448 except error.LookupError:
3452 except error.LookupError:
3449 self.ui.warn(_('mq status file refers to unknown node %s\n')
3453 self.ui.warn(_('mq status file refers to unknown node %s\n')
3450 % short(mqtags[-1][0]))
3454 % short(mqtags[-1][0]))
3451 return result
3455 return result
3452
3456
3453 # do not add fake tags for filtered revisions
3457 # do not add fake tags for filtered revisions
3454 included = self.changelog.hasnode
3458 included = self.changelog.hasnode
3455 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3459 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3456 if not mqtags:
3460 if not mqtags:
3457 return result
3461 return result
3458
3462
3459 mqtags.append((mqtags[-1][0], 'qtip'))
3463 mqtags.append((mqtags[-1][0], 'qtip'))
3460 mqtags.append((mqtags[0][0], 'qbase'))
3464 mqtags.append((mqtags[0][0], 'qbase'))
3461 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3465 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3462 tags = result[0]
3466 tags = result[0]
3463 for patch in mqtags:
3467 for patch in mqtags:
3464 if patch[1] in tags:
3468 if patch[1] in tags:
3465 self.ui.warn(_('tag %s overrides mq patch of the same '
3469 self.ui.warn(_('tag %s overrides mq patch of the same '
3466 'name\n') % patch[1])
3470 'name\n') % patch[1])
3467 else:
3471 else:
3468 tags[patch[1]] = patch[0]
3472 tags[patch[1]] = patch[0]
3469
3473
3470 return result
3474 return result
3471
3475
3472 if repo.local():
3476 if repo.local():
3473 repo.__class__ = mqrepo
3477 repo.__class__ = mqrepo
3474
3478
3475 repo._phasedefaults.append(mqphasedefaults)
3479 repo._phasedefaults.append(mqphasedefaults)
3476
3480
3477 def mqimport(orig, ui, repo, *args, **kwargs):
3481 def mqimport(orig, ui, repo, *args, **kwargs):
3478 if (util.safehasattr(repo, 'abortifwdirpatched')
3482 if (util.safehasattr(repo, 'abortifwdirpatched')
3479 and not kwargs.get('no_commit', False)):
3483 and not kwargs.get('no_commit', False)):
3480 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3484 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3481 kwargs.get('force'))
3485 kwargs.get('force'))
3482 return orig(ui, repo, *args, **kwargs)
3486 return orig(ui, repo, *args, **kwargs)
3483
3487
3484 def mqinit(orig, ui, *args, **kwargs):
3488 def mqinit(orig, ui, *args, **kwargs):
3485 mq = kwargs.pop('mq', None)
3489 mq = kwargs.pop('mq', None)
3486
3490
3487 if not mq:
3491 if not mq:
3488 return orig(ui, *args, **kwargs)
3492 return orig(ui, *args, **kwargs)
3489
3493
3490 if args:
3494 if args:
3491 repopath = args[0]
3495 repopath = args[0]
3492 if not hg.islocal(repopath):
3496 if not hg.islocal(repopath):
3493 raise util.Abort(_('only a local queue repository '
3497 raise util.Abort(_('only a local queue repository '
3494 'may be initialized'))
3498 'may be initialized'))
3495 else:
3499 else:
3496 repopath = cmdutil.findrepo(os.getcwd())
3500 repopath = cmdutil.findrepo(os.getcwd())
3497 if not repopath:
3501 if not repopath:
3498 raise util.Abort(_('there is no Mercurial repository here '
3502 raise util.Abort(_('there is no Mercurial repository here '
3499 '(.hg not found)'))
3503 '(.hg not found)'))
3500 repo = hg.repository(ui, repopath)
3504 repo = hg.repository(ui, repopath)
3501 return qinit(ui, repo, True)
3505 return qinit(ui, repo, True)
3502
3506
3503 def mqcommand(orig, ui, repo, *args, **kwargs):
3507 def mqcommand(orig, ui, repo, *args, **kwargs):
3504 """Add --mq option to operate on patch repository instead of main"""
3508 """Add --mq option to operate on patch repository instead of main"""
3505
3509
3506 # some commands do not like getting unknown options
3510 # some commands do not like getting unknown options
3507 mq = kwargs.pop('mq', None)
3511 mq = kwargs.pop('mq', None)
3508
3512
3509 if not mq:
3513 if not mq:
3510 return orig(ui, repo, *args, **kwargs)
3514 return orig(ui, repo, *args, **kwargs)
3511
3515
3512 q = repo.mq
3516 q = repo.mq
3513 r = q.qrepo()
3517 r = q.qrepo()
3514 if not r:
3518 if not r:
3515 raise util.Abort(_('no queue repository'))
3519 raise util.Abort(_('no queue repository'))
3516 return orig(r.ui, r, *args, **kwargs)
3520 return orig(r.ui, r, *args, **kwargs)
3517
3521
3518 def summaryhook(ui, repo):
3522 def summaryhook(ui, repo):
3519 q = repo.mq
3523 q = repo.mq
3520 m = []
3524 m = []
3521 a, u = len(q.applied), len(q.unapplied(repo))
3525 a, u = len(q.applied), len(q.unapplied(repo))
3522 if a:
3526 if a:
3523 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3527 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3524 if u:
3528 if u:
3525 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3529 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3526 if m:
3530 if m:
3527 # i18n: column positioning for "hg summary"
3531 # i18n: column positioning for "hg summary"
3528 ui.write(_("mq: %s\n") % ', '.join(m))
3532 ui.write(_("mq: %s\n") % ', '.join(m))
3529 else:
3533 else:
3530 # i18n: column positioning for "hg summary"
3534 # i18n: column positioning for "hg summary"
3531 ui.note(_("mq: (empty queue)\n"))
3535 ui.note(_("mq: (empty queue)\n"))
3532
3536
3533 def revsetmq(repo, subset, x):
3537 def revsetmq(repo, subset, x):
3534 """``mq()``
3538 """``mq()``
3535 Changesets managed by MQ.
3539 Changesets managed by MQ.
3536 """
3540 """
3537 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3541 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3538 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3542 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3539 return revset.baseset([r for r in subset if r in applied])
3543 return revset.baseset([r for r in subset if r in applied])
3540
3544
3541 # tell hggettext to extract docstrings from these functions:
3545 # tell hggettext to extract docstrings from these functions:
3542 i18nfunctions = [revsetmq]
3546 i18nfunctions = [revsetmq]
3543
3547
3544 def extsetup(ui):
3548 def extsetup(ui):
3545 # Ensure mq wrappers are called first, regardless of extension load order by
3549 # Ensure mq wrappers are called first, regardless of extension load order by
3546 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3550 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3547 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3551 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3548
3552
3549 extensions.wrapcommand(commands.table, 'import', mqimport)
3553 extensions.wrapcommand(commands.table, 'import', mqimport)
3550 cmdutil.summaryhooks.add('mq', summaryhook)
3554 cmdutil.summaryhooks.add('mq', summaryhook)
3551
3555
3552 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3556 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3553 entry[1].extend(mqopt)
3557 entry[1].extend(mqopt)
3554
3558
3555 nowrap = set(commands.norepo.split(" "))
3559 nowrap = set(commands.norepo.split(" "))
3556
3560
3557 def dotable(cmdtable):
3561 def dotable(cmdtable):
3558 for cmd in cmdtable.keys():
3562 for cmd in cmdtable.keys():
3559 cmd = cmdutil.parsealiases(cmd)[0]
3563 cmd = cmdutil.parsealiases(cmd)[0]
3560 if cmd in nowrap:
3564 if cmd in nowrap:
3561 continue
3565 continue
3562 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3566 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3563 entry[1].extend(mqopt)
3567 entry[1].extend(mqopt)
3564
3568
3565 dotable(commands.table)
3569 dotable(commands.table)
3566
3570
3567 for extname, extmodule in extensions.extensions():
3571 for extname, extmodule in extensions.extensions():
3568 if extmodule.__file__ != __file__:
3572 if extmodule.__file__ != __file__:
3569 dotable(getattr(extmodule, 'cmdtable', {}))
3573 dotable(getattr(extmodule, 'cmdtable', {}))
3570
3574
3571 revset.symbols['mq'] = revsetmq
3575 revset.symbols['mq'] = revsetmq
3572
3576
3573 colortable = {'qguard.negative': 'red',
3577 colortable = {'qguard.negative': 'red',
3574 'qguard.positive': 'yellow',
3578 'qguard.positive': 'yellow',
3575 'qguard.unguarded': 'green',
3579 'qguard.unguarded': 'green',
3576 'qseries.applied': 'blue bold underline',
3580 'qseries.applied': 'blue bold underline',
3577 'qseries.guarded': 'black bold',
3581 'qseries.guarded': 'black bold',
3578 'qseries.missing': 'red bold',
3582 'qseries.missing': 'red bold',
3579 'qseries.unapplied': 'black bold'}
3583 'qseries.unapplied': 'black bold'}
@@ -1,415 +1,419 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''hooks for sending email push notifications
8 '''hooks for sending email push notifications
9
9
10 This extension implements hooks to send email notifications when
10 This extension implements hooks to send email notifications when
11 changesets are sent from or received by the local repository.
11 changesets are sent from or received by the local repository.
12
12
13 First, enable the extension as explained in :hg:`help extensions`, and
13 First, enable the extension as explained in :hg:`help extensions`, and
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
15 are run when changesets are received, while ``outgoing`` hooks are for
15 are run when changesets are received, while ``outgoing`` hooks are for
16 changesets sent to another repository::
16 changesets sent to another repository::
17
17
18 [hooks]
18 [hooks]
19 # one email for each incoming changeset
19 # one email for each incoming changeset
20 incoming.notify = python:hgext.notify.hook
20 incoming.notify = python:hgext.notify.hook
21 # one email for all incoming changesets
21 # one email for all incoming changesets
22 changegroup.notify = python:hgext.notify.hook
22 changegroup.notify = python:hgext.notify.hook
23
23
24 # one email for all outgoing changesets
24 # one email for all outgoing changesets
25 outgoing.notify = python:hgext.notify.hook
25 outgoing.notify = python:hgext.notify.hook
26
26
27 This registers the hooks. To enable notification, subscribers must
27 This registers the hooks. To enable notification, subscribers must
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
29 repositories to a given recipient. The ``[reposubs]`` section maps
29 repositories to a given recipient. The ``[reposubs]`` section maps
30 multiple recipients to a single repository::
30 multiple recipients to a single repository::
31
31
32 [usersubs]
32 [usersubs]
33 # key is subscriber email, value is a comma-separated list of repo patterns
33 # key is subscriber email, value is a comma-separated list of repo patterns
34 user@host = pattern
34 user@host = pattern
35
35
36 [reposubs]
36 [reposubs]
37 # key is repo pattern, value is a comma-separated list of subscriber emails
37 # key is repo pattern, value is a comma-separated list of subscriber emails
38 pattern = user@host
38 pattern = user@host
39
39
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
41 optionally combined with a revset expression. A revset expression, if
41 optionally combined with a revset expression. A revset expression, if
42 present, is separated from the glob by a hash. Example::
42 present, is separated from the glob by a hash. Example::
43
43
44 [reposubs]
44 [reposubs]
45 */widgets#branch(release) = qa-team@example.com
45 */widgets#branch(release) = qa-team@example.com
46
46
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
48 branch triggers a notification in any repository ending in ``widgets``.
48 branch triggers a notification in any repository ending in ``widgets``.
49
49
50 In order to place them under direct user management, ``[usersubs]`` and
50 In order to place them under direct user management, ``[usersubs]`` and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
52 incorporated by reference::
52 incorporated by reference::
53
53
54 [notify]
54 [notify]
55 config = /path/to/subscriptionsfile
55 config = /path/to/subscriptionsfile
56
56
57 Notifications will not be sent until the ``notify.test`` value is set
57 Notifications will not be sent until the ``notify.test`` value is set
58 to ``False``; see below.
58 to ``False``; see below.
59
59
60 Notifications content can be tweaked with the following configuration entries:
60 Notifications content can be tweaked with the following configuration entries:
61
61
62 notify.test
62 notify.test
63 If ``True``, print messages to stdout instead of sending them. Default: True.
63 If ``True``, print messages to stdout instead of sending them. Default: True.
64
64
65 notify.sources
65 notify.sources
66 Space-separated list of change sources. Notifications are activated only
66 Space-separated list of change sources. Notifications are activated only
67 when a changeset's source is in this list. Sources may be:
67 when a changeset's source is in this list. Sources may be:
68
68
69 :``serve``: changesets received via http or ssh
69 :``serve``: changesets received via http or ssh
70 :``pull``: changesets received via ``hg pull``
70 :``pull``: changesets received via ``hg pull``
71 :``unbundle``: changesets received via ``hg unbundle``
71 :``unbundle``: changesets received via ``hg unbundle``
72 :``push``: changesets sent or received via ``hg push``
72 :``push``: changesets sent or received via ``hg push``
73 :``bundle``: changesets sent via ``hg unbundle``
73 :``bundle``: changesets sent via ``hg unbundle``
74
74
75 Default: serve.
75 Default: serve.
76
76
77 notify.strip
77 notify.strip
78 Number of leading slashes to strip from url paths. By default, notifications
78 Number of leading slashes to strip from url paths. By default, notifications
79 reference repositories with their absolute path. ``notify.strip`` lets you
79 reference repositories with their absolute path. ``notify.strip`` lets you
80 turn them into relative paths. For example, ``notify.strip=3`` will change
80 turn them into relative paths. For example, ``notify.strip=3`` will change
81 ``/long/path/repository`` into ``repository``. Default: 0.
81 ``/long/path/repository`` into ``repository``. Default: 0.
82
82
83 notify.domain
83 notify.domain
84 Default email domain for sender or recipients with no explicit domain.
84 Default email domain for sender or recipients with no explicit domain.
85
85
86 notify.style
86 notify.style
87 Style file to use when formatting emails.
87 Style file to use when formatting emails.
88
88
89 notify.template
89 notify.template
90 Template to use when formatting emails.
90 Template to use when formatting emails.
91
91
92 notify.incoming
92 notify.incoming
93 Template to use when run as an incoming hook, overriding ``notify.template``.
93 Template to use when run as an incoming hook, overriding ``notify.template``.
94
94
95 notify.outgoing
95 notify.outgoing
96 Template to use when run as an outgoing hook, overriding ``notify.template``.
96 Template to use when run as an outgoing hook, overriding ``notify.template``.
97
97
98 notify.changegroup
98 notify.changegroup
99 Template to use when running as a changegroup hook, overriding
99 Template to use when running as a changegroup hook, overriding
100 ``notify.template``.
100 ``notify.template``.
101
101
102 notify.maxdiff
102 notify.maxdiff
103 Maximum number of diff lines to include in notification email. Set to 0
103 Maximum number of diff lines to include in notification email. Set to 0
104 to disable the diff, or -1 to include all of it. Default: 300.
104 to disable the diff, or -1 to include all of it. Default: 300.
105
105
106 notify.maxsubject
106 notify.maxsubject
107 Maximum number of characters in email's subject line. Default: 67.
107 Maximum number of characters in email's subject line. Default: 67.
108
108
109 notify.diffstat
109 notify.diffstat
110 Set to True to include a diffstat before diff content. Default: True.
110 Set to True to include a diffstat before diff content. Default: True.
111
111
112 notify.merge
112 notify.merge
113 If True, send notifications for merge changesets. Default: True.
113 If True, send notifications for merge changesets. Default: True.
114
114
115 notify.mbox
115 notify.mbox
116 If set, append mails to this mbox file instead of sending. Default: None.
116 If set, append mails to this mbox file instead of sending. Default: None.
117
117
118 notify.fromauthor
118 notify.fromauthor
119 If set, use the committer of the first changeset in a changegroup for
119 If set, use the committer of the first changeset in a changegroup for
120 the "From" field of the notification mail. If not set, take the user
120 the "From" field of the notification mail. If not set, take the user
121 from the pushing repo. Default: False.
121 from the pushing repo. Default: False.
122
122
123 If set, the following entries will also be used to customize the
123 If set, the following entries will also be used to customize the
124 notifications:
124 notifications:
125
125
126 email.from
126 email.from
127 Email ``From`` address to use if none can be found in the generated
127 Email ``From`` address to use if none can be found in the generated
128 email content.
128 email content.
129
129
130 web.baseurl
130 web.baseurl
131 Root repository URL to combine with repository paths when making
131 Root repository URL to combine with repository paths when making
132 references. See also ``notify.strip``.
132 references. See also ``notify.strip``.
133
133
134 '''
134 '''
135
135
136 import email, socket, time
136 import email, socket, time
137 # On python2.4 you have to import this by name or they fail to
137 # On python2.4 you have to import this by name or they fail to
138 # load. This was not a problem on Python 2.7.
138 # load. This was not a problem on Python 2.7.
139 import email.Parser
139 import email.Parser
140 from mercurial.i18n import _
140 from mercurial.i18n import _
141 from mercurial import patch, cmdutil, util, mail
141 from mercurial import patch, cmdutil, util, mail
142 import fnmatch
142 import fnmatch
143
143
144 # Note for extension authors: ONLY specify testedwith = 'internal' for
145 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
146 # be specifying the version(s) of Mercurial they are tested with, or
147 # leave the attribute unspecified.
144 testedwith = 'internal'
148 testedwith = 'internal'
145
149
146 # template for single changeset can include email headers.
150 # template for single changeset can include email headers.
147 single_template = '''
151 single_template = '''
148 Subject: changeset in {webroot}: {desc|firstline|strip}
152 Subject: changeset in {webroot}: {desc|firstline|strip}
149 From: {author}
153 From: {author}
150
154
151 changeset {node|short} in {root}
155 changeset {node|short} in {root}
152 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
156 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
153 description:
157 description:
154 \t{desc|tabindent|strip}
158 \t{desc|tabindent|strip}
155 '''.lstrip()
159 '''.lstrip()
156
160
157 # template for multiple changesets should not contain email headers,
161 # template for multiple changesets should not contain email headers,
158 # because only first set of headers will be used and result will look
162 # because only first set of headers will be used and result will look
159 # strange.
163 # strange.
160 multiple_template = '''
164 multiple_template = '''
161 changeset {node|short} in {root}
165 changeset {node|short} in {root}
162 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
166 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
163 summary: {desc|firstline}
167 summary: {desc|firstline}
164 '''
168 '''
165
169
166 deftemplates = {
170 deftemplates = {
167 'changegroup': multiple_template,
171 'changegroup': multiple_template,
168 }
172 }
169
173
170 class notifier(object):
174 class notifier(object):
171 '''email notification class.'''
175 '''email notification class.'''
172
176
173 def __init__(self, ui, repo, hooktype):
177 def __init__(self, ui, repo, hooktype):
174 self.ui = ui
178 self.ui = ui
175 cfg = self.ui.config('notify', 'config')
179 cfg = self.ui.config('notify', 'config')
176 if cfg:
180 if cfg:
177 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
181 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
178 self.repo = repo
182 self.repo = repo
179 self.stripcount = int(self.ui.config('notify', 'strip', 0))
183 self.stripcount = int(self.ui.config('notify', 'strip', 0))
180 self.root = self.strip(self.repo.root)
184 self.root = self.strip(self.repo.root)
181 self.domain = self.ui.config('notify', 'domain')
185 self.domain = self.ui.config('notify', 'domain')
182 self.mbox = self.ui.config('notify', 'mbox')
186 self.mbox = self.ui.config('notify', 'mbox')
183 self.test = self.ui.configbool('notify', 'test', True)
187 self.test = self.ui.configbool('notify', 'test', True)
184 self.charsets = mail._charsets(self.ui)
188 self.charsets = mail._charsets(self.ui)
185 self.subs = self.subscribers()
189 self.subs = self.subscribers()
186 self.merge = self.ui.configbool('notify', 'merge', True)
190 self.merge = self.ui.configbool('notify', 'merge', True)
187
191
188 mapfile = self.ui.config('notify', 'style')
192 mapfile = self.ui.config('notify', 'style')
189 template = (self.ui.config('notify', hooktype) or
193 template = (self.ui.config('notify', hooktype) or
190 self.ui.config('notify', 'template'))
194 self.ui.config('notify', 'template'))
191 if not mapfile and not template:
195 if not mapfile and not template:
192 template = deftemplates.get(hooktype) or single_template
196 template = deftemplates.get(hooktype) or single_template
193 self.t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
197 self.t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
194 template, mapfile, False)
198 template, mapfile, False)
195
199
196 def strip(self, path):
200 def strip(self, path):
197 '''strip leading slashes from local path, turn into web-safe path.'''
201 '''strip leading slashes from local path, turn into web-safe path.'''
198
202
199 path = util.pconvert(path)
203 path = util.pconvert(path)
200 count = self.stripcount
204 count = self.stripcount
201 while count > 0:
205 while count > 0:
202 c = path.find('/')
206 c = path.find('/')
203 if c == -1:
207 if c == -1:
204 break
208 break
205 path = path[c + 1:]
209 path = path[c + 1:]
206 count -= 1
210 count -= 1
207 return path
211 return path
208
212
209 def fixmail(self, addr):
213 def fixmail(self, addr):
210 '''try to clean up email addresses.'''
214 '''try to clean up email addresses.'''
211
215
212 addr = util.email(addr.strip())
216 addr = util.email(addr.strip())
213 if self.domain:
217 if self.domain:
214 a = addr.find('@localhost')
218 a = addr.find('@localhost')
215 if a != -1:
219 if a != -1:
216 addr = addr[:a]
220 addr = addr[:a]
217 if '@' not in addr:
221 if '@' not in addr:
218 return addr + '@' + self.domain
222 return addr + '@' + self.domain
219 return addr
223 return addr
220
224
221 def subscribers(self):
225 def subscribers(self):
222 '''return list of email addresses of subscribers to this repo.'''
226 '''return list of email addresses of subscribers to this repo.'''
223 subs = set()
227 subs = set()
224 for user, pats in self.ui.configitems('usersubs'):
228 for user, pats in self.ui.configitems('usersubs'):
225 for pat in pats.split(','):
229 for pat in pats.split(','):
226 if '#' in pat:
230 if '#' in pat:
227 pat, revs = pat.split('#', 1)
231 pat, revs = pat.split('#', 1)
228 else:
232 else:
229 revs = None
233 revs = None
230 if fnmatch.fnmatch(self.repo.root, pat.strip()):
234 if fnmatch.fnmatch(self.repo.root, pat.strip()):
231 subs.add((self.fixmail(user), revs))
235 subs.add((self.fixmail(user), revs))
232 for pat, users in self.ui.configitems('reposubs'):
236 for pat, users in self.ui.configitems('reposubs'):
233 if '#' in pat:
237 if '#' in pat:
234 pat, revs = pat.split('#', 1)
238 pat, revs = pat.split('#', 1)
235 else:
239 else:
236 revs = None
240 revs = None
237 if fnmatch.fnmatch(self.repo.root, pat):
241 if fnmatch.fnmatch(self.repo.root, pat):
238 for user in users.split(','):
242 for user in users.split(','):
239 subs.add((self.fixmail(user), revs))
243 subs.add((self.fixmail(user), revs))
240 return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
244 return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
241 for s, r in sorted(subs)]
245 for s, r in sorted(subs)]
242
246
243 def node(self, ctx, **props):
247 def node(self, ctx, **props):
244 '''format one changeset, unless it is a suppressed merge.'''
248 '''format one changeset, unless it is a suppressed merge.'''
245 if not self.merge and len(ctx.parents()) > 1:
249 if not self.merge and len(ctx.parents()) > 1:
246 return False
250 return False
247 self.t.show(ctx, changes=ctx.changeset(),
251 self.t.show(ctx, changes=ctx.changeset(),
248 baseurl=self.ui.config('web', 'baseurl'),
252 baseurl=self.ui.config('web', 'baseurl'),
249 root=self.repo.root, webroot=self.root, **props)
253 root=self.repo.root, webroot=self.root, **props)
250 return True
254 return True
251
255
252 def skipsource(self, source):
256 def skipsource(self, source):
253 '''true if incoming changes from this source should be skipped.'''
257 '''true if incoming changes from this source should be skipped.'''
254 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
258 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
255 return source not in ok_sources
259 return source not in ok_sources
256
260
257 def send(self, ctx, count, data):
261 def send(self, ctx, count, data):
258 '''send message.'''
262 '''send message.'''
259
263
260 # Select subscribers by revset
264 # Select subscribers by revset
261 subs = set()
265 subs = set()
262 for sub, spec in self.subs:
266 for sub, spec in self.subs:
263 if spec is None:
267 if spec is None:
264 subs.add(sub)
268 subs.add(sub)
265 continue
269 continue
266 revs = self.repo.revs('%r and %d:', spec, ctx.rev())
270 revs = self.repo.revs('%r and %d:', spec, ctx.rev())
267 if len(revs):
271 if len(revs):
268 subs.add(sub)
272 subs.add(sub)
269 continue
273 continue
270 if len(subs) == 0:
274 if len(subs) == 0:
271 self.ui.debug('notify: no subscribers to selected repo '
275 self.ui.debug('notify: no subscribers to selected repo '
272 'and revset\n')
276 'and revset\n')
273 return
277 return
274
278
275 p = email.Parser.Parser()
279 p = email.Parser.Parser()
276 try:
280 try:
277 msg = p.parsestr(data)
281 msg = p.parsestr(data)
278 except email.Errors.MessageParseError, inst:
282 except email.Errors.MessageParseError, inst:
279 raise util.Abort(inst)
283 raise util.Abort(inst)
280
284
281 # store sender and subject
285 # store sender and subject
282 sender, subject = msg['From'], msg['Subject']
286 sender, subject = msg['From'], msg['Subject']
283 del msg['From'], msg['Subject']
287 del msg['From'], msg['Subject']
284
288
285 if not msg.is_multipart():
289 if not msg.is_multipart():
286 # create fresh mime message from scratch
290 # create fresh mime message from scratch
287 # (multipart templates must take care of this themselves)
291 # (multipart templates must take care of this themselves)
288 headers = msg.items()
292 headers = msg.items()
289 payload = msg.get_payload()
293 payload = msg.get_payload()
290 # for notification prefer readability over data precision
294 # for notification prefer readability over data precision
291 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
295 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
292 # reinstate custom headers
296 # reinstate custom headers
293 for k, v in headers:
297 for k, v in headers:
294 msg[k] = v
298 msg[k] = v
295
299
296 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
300 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
297
301
298 # try to make subject line exist and be useful
302 # try to make subject line exist and be useful
299 if not subject:
303 if not subject:
300 if count > 1:
304 if count > 1:
301 subject = _('%s: %d new changesets') % (self.root, count)
305 subject = _('%s: %d new changesets') % (self.root, count)
302 else:
306 else:
303 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
307 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
304 subject = '%s: %s' % (self.root, s)
308 subject = '%s: %s' % (self.root, s)
305 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
309 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
306 if maxsubject:
310 if maxsubject:
307 subject = util.ellipsis(subject, maxsubject)
311 subject = util.ellipsis(subject, maxsubject)
308 msg['Subject'] = mail.headencode(self.ui, subject,
312 msg['Subject'] = mail.headencode(self.ui, subject,
309 self.charsets, self.test)
313 self.charsets, self.test)
310
314
311 # try to make message have proper sender
315 # try to make message have proper sender
312 if not sender:
316 if not sender:
313 sender = self.ui.config('email', 'from') or self.ui.username()
317 sender = self.ui.config('email', 'from') or self.ui.username()
314 if '@' not in sender or '@localhost' in sender:
318 if '@' not in sender or '@localhost' in sender:
315 sender = self.fixmail(sender)
319 sender = self.fixmail(sender)
316 msg['From'] = mail.addressencode(self.ui, sender,
320 msg['From'] = mail.addressencode(self.ui, sender,
317 self.charsets, self.test)
321 self.charsets, self.test)
318
322
319 msg['X-Hg-Notification'] = 'changeset %s' % ctx
323 msg['X-Hg-Notification'] = 'changeset %s' % ctx
320 if not msg['Message-Id']:
324 if not msg['Message-Id']:
321 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
325 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
322 (ctx, int(time.time()),
326 (ctx, int(time.time()),
323 hash(self.repo.root), socket.getfqdn()))
327 hash(self.repo.root), socket.getfqdn()))
324 msg['To'] = ', '.join(sorted(subs))
328 msg['To'] = ', '.join(sorted(subs))
325
329
326 msgtext = msg.as_string()
330 msgtext = msg.as_string()
327 if self.test:
331 if self.test:
328 self.ui.write(msgtext)
332 self.ui.write(msgtext)
329 if not msgtext.endswith('\n'):
333 if not msgtext.endswith('\n'):
330 self.ui.write('\n')
334 self.ui.write('\n')
331 else:
335 else:
332 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
336 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
333 (len(subs), count))
337 (len(subs), count))
334 mail.sendmail(self.ui, util.email(msg['From']),
338 mail.sendmail(self.ui, util.email(msg['From']),
335 subs, msgtext, mbox=self.mbox)
339 subs, msgtext, mbox=self.mbox)
336
340
337 def diff(self, ctx, ref=None):
341 def diff(self, ctx, ref=None):
338
342
339 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
343 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
340 prev = ctx.p1().node()
344 prev = ctx.p1().node()
341 if ref:
345 if ref:
342 ref = ref.node()
346 ref = ref.node()
343 else:
347 else:
344 ref = ctx.node()
348 ref = ctx.node()
345 chunks = patch.diff(self.repo, prev, ref,
349 chunks = patch.diff(self.repo, prev, ref,
346 opts=patch.diffallopts(self.ui))
350 opts=patch.diffallopts(self.ui))
347 difflines = ''.join(chunks).splitlines()
351 difflines = ''.join(chunks).splitlines()
348
352
349 if self.ui.configbool('notify', 'diffstat', True):
353 if self.ui.configbool('notify', 'diffstat', True):
350 s = patch.diffstat(difflines)
354 s = patch.diffstat(difflines)
351 # s may be nil, don't include the header if it is
355 # s may be nil, don't include the header if it is
352 if s:
356 if s:
353 self.ui.write('\ndiffstat:\n\n%s' % s)
357 self.ui.write('\ndiffstat:\n\n%s' % s)
354
358
355 if maxdiff == 0:
359 if maxdiff == 0:
356 return
360 return
357 elif maxdiff > 0 and len(difflines) > maxdiff:
361 elif maxdiff > 0 and len(difflines) > maxdiff:
358 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
362 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
359 self.ui.write(msg % (len(difflines), maxdiff))
363 self.ui.write(msg % (len(difflines), maxdiff))
360 difflines = difflines[:maxdiff]
364 difflines = difflines[:maxdiff]
361 elif difflines:
365 elif difflines:
362 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
366 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
363
367
364 self.ui.write("\n".join(difflines))
368 self.ui.write("\n".join(difflines))
365
369
366 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
370 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
367 '''send email notifications to interested subscribers.
371 '''send email notifications to interested subscribers.
368
372
369 if used as changegroup hook, send one email for all changesets in
373 if used as changegroup hook, send one email for all changesets in
370 changegroup. else send one email per changeset.'''
374 changegroup. else send one email per changeset.'''
371
375
372 n = notifier(ui, repo, hooktype)
376 n = notifier(ui, repo, hooktype)
373 ctx = repo[node]
377 ctx = repo[node]
374
378
375 if not n.subs:
379 if not n.subs:
376 ui.debug('notify: no subscribers to repository %s\n' % n.root)
380 ui.debug('notify: no subscribers to repository %s\n' % n.root)
377 return
381 return
378 if n.skipsource(source):
382 if n.skipsource(source):
379 ui.debug('notify: changes have source "%s" - skipping\n' % source)
383 ui.debug('notify: changes have source "%s" - skipping\n' % source)
380 return
384 return
381
385
382 ui.pushbuffer()
386 ui.pushbuffer()
383 data = ''
387 data = ''
384 count = 0
388 count = 0
385 author = ''
389 author = ''
386 if hooktype == 'changegroup' or hooktype == 'outgoing':
390 if hooktype == 'changegroup' or hooktype == 'outgoing':
387 start, end = ctx.rev(), len(repo)
391 start, end = ctx.rev(), len(repo)
388 for rev in xrange(start, end):
392 for rev in xrange(start, end):
389 if n.node(repo[rev]):
393 if n.node(repo[rev]):
390 count += 1
394 count += 1
391 if not author:
395 if not author:
392 author = repo[rev].user()
396 author = repo[rev].user()
393 else:
397 else:
394 data += ui.popbuffer()
398 data += ui.popbuffer()
395 ui.note(_('notify: suppressing notification for merge %d:%s\n')
399 ui.note(_('notify: suppressing notification for merge %d:%s\n')
396 % (rev, repo[rev].hex()[:12]))
400 % (rev, repo[rev].hex()[:12]))
397 ui.pushbuffer()
401 ui.pushbuffer()
398 if count:
402 if count:
399 n.diff(ctx, repo['tip'])
403 n.diff(ctx, repo['tip'])
400 else:
404 else:
401 if not n.node(ctx):
405 if not n.node(ctx):
402 ui.popbuffer()
406 ui.popbuffer()
403 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
407 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
404 (ctx.rev(), ctx.hex()[:12]))
408 (ctx.rev(), ctx.hex()[:12]))
405 return
409 return
406 count += 1
410 count += 1
407 n.diff(ctx)
411 n.diff(ctx)
408
412
409 data += ui.popbuffer()
413 data += ui.popbuffer()
410 fromauthor = ui.config('notify', 'fromauthor')
414 fromauthor = ui.config('notify', 'fromauthor')
411 if author and fromauthor:
415 if author and fromauthor:
412 data = '\n'.join(['From: %s' % author, data])
416 data = '\n'.join(['From: %s' % author, data])
413
417
414 if count:
418 if count:
415 n.send(ctx, count, data)
419 n.send(ctx, count, data)
@@ -1,175 +1,179 b''
1 # pager.py - display output using a pager
1 # pager.py - display output using a pager
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # To load the extension, add it to your configuration file:
8 # To load the extension, add it to your configuration file:
9 #
9 #
10 # [extension]
10 # [extension]
11 # pager =
11 # pager =
12 #
12 #
13 # Run "hg help pager" to get info on configuration.
13 # Run "hg help pager" to get info on configuration.
14
14
15 '''browse command output with an external pager
15 '''browse command output with an external pager
16
16
17 To set the pager that should be used, set the application variable::
17 To set the pager that should be used, set the application variable::
18
18
19 [pager]
19 [pager]
20 pager = less -FRX
20 pager = less -FRX
21
21
22 If no pager is set, the pager extensions uses the environment variable
22 If no pager is set, the pager extensions uses the environment variable
23 $PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
23 $PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
24
24
25 You can disable the pager for certain commands by adding them to the
25 You can disable the pager for certain commands by adding them to the
26 pager.ignore list::
26 pager.ignore list::
27
27
28 [pager]
28 [pager]
29 ignore = version, help, update
29 ignore = version, help, update
30
30
31 You can also enable the pager only for certain commands using
31 You can also enable the pager only for certain commands using
32 pager.attend. Below is the default list of commands to be paged::
32 pager.attend. Below is the default list of commands to be paged::
33
33
34 [pager]
34 [pager]
35 attend = annotate, cat, diff, export, glog, log, qdiff
35 attend = annotate, cat, diff, export, glog, log, qdiff
36
36
37 Setting pager.attend to an empty value will cause all commands to be
37 Setting pager.attend to an empty value will cause all commands to be
38 paged.
38 paged.
39
39
40 If pager.attend is present, pager.ignore will be ignored.
40 If pager.attend is present, pager.ignore will be ignored.
41
41
42 Lastly, you can enable and disable paging for individual commands with
42 Lastly, you can enable and disable paging for individual commands with
43 the attend-<command> option. This setting takes precedence over
43 the attend-<command> option. This setting takes precedence over
44 existing attend and ignore options and defaults::
44 existing attend and ignore options and defaults::
45
45
46 [pager]
46 [pager]
47 attend-cat = false
47 attend-cat = false
48
48
49 To ignore global commands like :hg:`version` or :hg:`help`, you have
49 To ignore global commands like :hg:`version` or :hg:`help`, you have
50 to specify them in your user configuration file.
50 to specify them in your user configuration file.
51
51
52 The --pager=... option can also be used to control when the pager is
52 The --pager=... option can also be used to control when the pager is
53 used. Use a boolean value like yes, no, on, off, or use auto for
53 used. Use a boolean value like yes, no, on, off, or use auto for
54 normal behavior.
54 normal behavior.
55
55
56 '''
56 '''
57
57
58 import atexit, sys, os, signal, subprocess, errno, shlex
58 import atexit, sys, os, signal, subprocess, errno, shlex
59 from mercurial import commands, dispatch, util, extensions, cmdutil
59 from mercurial import commands, dispatch, util, extensions, cmdutil
60 from mercurial.i18n import _
60 from mercurial.i18n import _
61
61
62 # Note for extension authors: ONLY specify testedwith = 'internal' for
63 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
64 # be specifying the version(s) of Mercurial they are tested with, or
65 # leave the attribute unspecified.
62 testedwith = 'internal'
66 testedwith = 'internal'
63
67
64 def _pagerfork(ui, p):
68 def _pagerfork(ui, p):
65 if not util.safehasattr(os, 'fork'):
69 if not util.safehasattr(os, 'fork'):
66 sys.stdout = util.popen(p, 'wb')
70 sys.stdout = util.popen(p, 'wb')
67 if ui._isatty(sys.stderr):
71 if ui._isatty(sys.stderr):
68 sys.stderr = sys.stdout
72 sys.stderr = sys.stdout
69 return
73 return
70 fdin, fdout = os.pipe()
74 fdin, fdout = os.pipe()
71 pid = os.fork()
75 pid = os.fork()
72 if pid == 0:
76 if pid == 0:
73 os.close(fdin)
77 os.close(fdin)
74 os.dup2(fdout, sys.stdout.fileno())
78 os.dup2(fdout, sys.stdout.fileno())
75 if ui._isatty(sys.stderr):
79 if ui._isatty(sys.stderr):
76 os.dup2(fdout, sys.stderr.fileno())
80 os.dup2(fdout, sys.stderr.fileno())
77 os.close(fdout)
81 os.close(fdout)
78 return
82 return
79 os.dup2(fdin, sys.stdin.fileno())
83 os.dup2(fdin, sys.stdin.fileno())
80 os.close(fdin)
84 os.close(fdin)
81 os.close(fdout)
85 os.close(fdout)
82 try:
86 try:
83 os.execvp('/bin/sh', ['/bin/sh', '-c', p])
87 os.execvp('/bin/sh', ['/bin/sh', '-c', p])
84 except OSError, e:
88 except OSError, e:
85 if e.errno == errno.ENOENT:
89 if e.errno == errno.ENOENT:
86 # no /bin/sh, try executing the pager directly
90 # no /bin/sh, try executing the pager directly
87 args = shlex.split(p)
91 args = shlex.split(p)
88 os.execvp(args[0], args)
92 os.execvp(args[0], args)
89 else:
93 else:
90 raise
94 raise
91
95
92 def _pagersubprocess(ui, p):
96 def _pagersubprocess(ui, p):
93 pager = subprocess.Popen(p, shell=True, bufsize=-1,
97 pager = subprocess.Popen(p, shell=True, bufsize=-1,
94 close_fds=util.closefds, stdin=subprocess.PIPE,
98 close_fds=util.closefds, stdin=subprocess.PIPE,
95 stdout=sys.stdout, stderr=sys.stderr)
99 stdout=sys.stdout, stderr=sys.stderr)
96
100
97 stdout = os.dup(sys.stdout.fileno())
101 stdout = os.dup(sys.stdout.fileno())
98 stderr = os.dup(sys.stderr.fileno())
102 stderr = os.dup(sys.stderr.fileno())
99 os.dup2(pager.stdin.fileno(), sys.stdout.fileno())
103 os.dup2(pager.stdin.fileno(), sys.stdout.fileno())
100 if ui._isatty(sys.stderr):
104 if ui._isatty(sys.stderr):
101 os.dup2(pager.stdin.fileno(), sys.stderr.fileno())
105 os.dup2(pager.stdin.fileno(), sys.stderr.fileno())
102
106
103 @atexit.register
107 @atexit.register
104 def killpager():
108 def killpager():
105 if util.safehasattr(signal, "SIGINT"):
109 if util.safehasattr(signal, "SIGINT"):
106 signal.signal(signal.SIGINT, signal.SIG_IGN)
110 signal.signal(signal.SIGINT, signal.SIG_IGN)
107 pager.stdin.close()
111 pager.stdin.close()
108 os.dup2(stdout, sys.stdout.fileno())
112 os.dup2(stdout, sys.stdout.fileno())
109 os.dup2(stderr, sys.stderr.fileno())
113 os.dup2(stderr, sys.stderr.fileno())
110 pager.wait()
114 pager.wait()
111
115
112 def _runpager(ui, p):
116 def _runpager(ui, p):
113 # The subprocess module shipped with Python <= 2.4 is buggy (issue3533).
117 # The subprocess module shipped with Python <= 2.4 is buggy (issue3533).
114 # The compat version is buggy on Windows (issue3225), but has been shipping
118 # The compat version is buggy on Windows (issue3225), but has been shipping
115 # with hg for a long time. Preserve existing functionality.
119 # with hg for a long time. Preserve existing functionality.
116 if sys.version_info >= (2, 5):
120 if sys.version_info >= (2, 5):
117 _pagersubprocess(ui, p)
121 _pagersubprocess(ui, p)
118 else:
122 else:
119 _pagerfork(ui, p)
123 _pagerfork(ui, p)
120
124
121 def uisetup(ui):
125 def uisetup(ui):
122 if '--debugger' in sys.argv or not ui.formatted():
126 if '--debugger' in sys.argv or not ui.formatted():
123 return
127 return
124
128
125 def pagecmd(orig, ui, options, cmd, cmdfunc):
129 def pagecmd(orig, ui, options, cmd, cmdfunc):
126 p = ui.config("pager", "pager", os.environ.get("PAGER"))
130 p = ui.config("pager", "pager", os.environ.get("PAGER"))
127 usepager = False
131 usepager = False
128 always = util.parsebool(options['pager'])
132 always = util.parsebool(options['pager'])
129 auto = options['pager'] == 'auto'
133 auto = options['pager'] == 'auto'
130
134
131 if not p:
135 if not p:
132 pass
136 pass
133 elif always:
137 elif always:
134 usepager = True
138 usepager = True
135 elif not auto:
139 elif not auto:
136 usepager = False
140 usepager = False
137 else:
141 else:
138 attend = ui.configlist('pager', 'attend', attended)
142 attend = ui.configlist('pager', 'attend', attended)
139 ignore = ui.configlist('pager', 'ignore')
143 ignore = ui.configlist('pager', 'ignore')
140 cmds, _ = cmdutil.findcmd(cmd, commands.table)
144 cmds, _ = cmdutil.findcmd(cmd, commands.table)
141
145
142 for cmd in cmds:
146 for cmd in cmds:
143 var = 'attend-%s' % cmd
147 var = 'attend-%s' % cmd
144 if ui.config('pager', var):
148 if ui.config('pager', var):
145 usepager = ui.configbool('pager', var)
149 usepager = ui.configbool('pager', var)
146 break
150 break
147 if (cmd in attend or
151 if (cmd in attend or
148 (cmd not in ignore and not attend)):
152 (cmd not in ignore and not attend)):
149 usepager = True
153 usepager = True
150 break
154 break
151
155
152 setattr(ui, 'pageractive', usepager)
156 setattr(ui, 'pageractive', usepager)
153
157
154 if usepager:
158 if usepager:
155 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
159 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
156 ui.setconfig('ui', 'interactive', False, 'pager')
160 ui.setconfig('ui', 'interactive', False, 'pager')
157 if util.safehasattr(signal, "SIGPIPE"):
161 if util.safehasattr(signal, "SIGPIPE"):
158 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
162 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
159 _runpager(ui, p)
163 _runpager(ui, p)
160 return orig(ui, options, cmd, cmdfunc)
164 return orig(ui, options, cmd, cmdfunc)
161
165
162 # Wrap dispatch._runcommand after color is loaded so color can see
166 # Wrap dispatch._runcommand after color is loaded so color can see
163 # ui.pageractive. Otherwise, if we loaded first, color's wrapped
167 # ui.pageractive. Otherwise, if we loaded first, color's wrapped
164 # dispatch._runcommand would run without having access to ui.pageractive.
168 # dispatch._runcommand would run without having access to ui.pageractive.
165 def afterloaded(loaded):
169 def afterloaded(loaded):
166 extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
170 extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
167 extensions.afterloaded('color', afterloaded)
171 extensions.afterloaded('color', afterloaded)
168
172
169 def extsetup(ui):
173 def extsetup(ui):
170 commands.globalopts.append(
174 commands.globalopts.append(
171 ('', 'pager', 'auto',
175 ('', 'pager', 'auto',
172 _("when to paginate (boolean, always, auto, or never)"),
176 _("when to paginate (boolean, always, auto, or never)"),
173 _('TYPE')))
177 _('TYPE')))
174
178
175 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
179 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
@@ -1,656 +1,660 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 You can control the default inclusion of an introduction message with the
47 You can control the default inclusion of an introduction message with the
48 ``patchbomb.intro`` configuration option. The configuration is always
48 ``patchbomb.intro`` configuration option. The configuration is always
49 overwritten by command line flags like --intro and --desc::
49 overwritten by command line flags like --intro and --desc::
50
50
51 [patchbomb]
51 [patchbomb]
52 intro=auto # include introduction message if more than 1 patch (default)
52 intro=auto # include introduction message if more than 1 patch (default)
53 intro=never # never include an introduction message
53 intro=never # never include an introduction message
54 intro=always # always include an introduction message
54 intro=always # always include an introduction message
55
55
56 You can set patchbomb to always ask for confirmation by setting
56 You can set patchbomb to always ask for confirmation by setting
57 ``patchbomb.confirm`` to true.
57 ``patchbomb.confirm`` to true.
58 '''
58 '''
59
59
60 import os, errno, socket, tempfile, cStringIO
60 import os, errno, socket, tempfile, cStringIO
61 import email
61 import email
62 # On python2.4 you have to import these by name or they fail to
62 # On python2.4 you have to import these by name or they fail to
63 # load. This was not a problem on Python 2.7.
63 # load. This was not a problem on Python 2.7.
64 import email.Generator
64 import email.Generator
65 import email.MIMEMultipart
65 import email.MIMEMultipart
66
66
67 from mercurial import cmdutil, commands, hg, mail, patch, util
67 from mercurial import cmdutil, commands, hg, mail, patch, util
68 from mercurial import scmutil
68 from mercurial import scmutil
69 from mercurial.i18n import _
69 from mercurial.i18n import _
70 from mercurial.node import bin
70 from mercurial.node import bin
71
71
72 cmdtable = {}
72 cmdtable = {}
73 command = cmdutil.command(cmdtable)
73 command = cmdutil.command(cmdtable)
74 # Note for extension authors: ONLY specify testedwith = 'internal' for
75 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
76 # be specifying the version(s) of Mercurial they are tested with, or
77 # leave the attribute unspecified.
74 testedwith = 'internal'
78 testedwith = 'internal'
75
79
76 def prompt(ui, prompt, default=None, rest=':'):
80 def prompt(ui, prompt, default=None, rest=':'):
77 if default:
81 if default:
78 prompt += ' [%s]' % default
82 prompt += ' [%s]' % default
79 return ui.prompt(prompt + rest, default)
83 return ui.prompt(prompt + rest, default)
80
84
81 def introwanted(ui, opts, number):
85 def introwanted(ui, opts, number):
82 '''is an introductory message apparently wanted?'''
86 '''is an introductory message apparently wanted?'''
83 introconfig = ui.config('patchbomb', 'intro', 'auto')
87 introconfig = ui.config('patchbomb', 'intro', 'auto')
84 if opts.get('intro') or opts.get('desc'):
88 if opts.get('intro') or opts.get('desc'):
85 intro = True
89 intro = True
86 elif introconfig == 'always':
90 elif introconfig == 'always':
87 intro = True
91 intro = True
88 elif introconfig == 'never':
92 elif introconfig == 'never':
89 intro = False
93 intro = False
90 elif introconfig == 'auto':
94 elif introconfig == 'auto':
91 intro = 1 < number
95 intro = 1 < number
92 else:
96 else:
93 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
97 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
94 % introconfig)
98 % introconfig)
95 ui.write_err(_('(should be one of always, never, auto)\n'))
99 ui.write_err(_('(should be one of always, never, auto)\n'))
96 intro = 1 < number
100 intro = 1 < number
97 return intro
101 return intro
98
102
99 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
103 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
100 patchname=None):
104 patchname=None):
101
105
102 desc = []
106 desc = []
103 node = None
107 node = None
104 body = ''
108 body = ''
105
109
106 for line in patchlines:
110 for line in patchlines:
107 if line.startswith('#'):
111 if line.startswith('#'):
108 if line.startswith('# Node ID'):
112 if line.startswith('# Node ID'):
109 node = line.split()[-1]
113 node = line.split()[-1]
110 continue
114 continue
111 if line.startswith('diff -r') or line.startswith('diff --git'):
115 if line.startswith('diff -r') or line.startswith('diff --git'):
112 break
116 break
113 desc.append(line)
117 desc.append(line)
114
118
115 if not patchname and not node:
119 if not patchname and not node:
116 raise ValueError
120 raise ValueError
117
121
118 if opts.get('attach') and not opts.get('body'):
122 if opts.get('attach') and not opts.get('body'):
119 body = ('\n'.join(desc[1:]).strip() or
123 body = ('\n'.join(desc[1:]).strip() or
120 'Patch subject is complete summary.')
124 'Patch subject is complete summary.')
121 body += '\n\n\n'
125 body += '\n\n\n'
122
126
123 if opts.get('plain'):
127 if opts.get('plain'):
124 while patchlines and patchlines[0].startswith('# '):
128 while patchlines and patchlines[0].startswith('# '):
125 patchlines.pop(0)
129 patchlines.pop(0)
126 if patchlines:
130 if patchlines:
127 patchlines.pop(0)
131 patchlines.pop(0)
128 while patchlines and not patchlines[0].strip():
132 while patchlines and not patchlines[0].strip():
129 patchlines.pop(0)
133 patchlines.pop(0)
130
134
131 ds = patch.diffstat(patchlines, git=opts.get('git'))
135 ds = patch.diffstat(patchlines, git=opts.get('git'))
132 if opts.get('diffstat'):
136 if opts.get('diffstat'):
133 body += ds + '\n\n'
137 body += ds + '\n\n'
134
138
135 addattachment = opts.get('attach') or opts.get('inline')
139 addattachment = opts.get('attach') or opts.get('inline')
136 if not addattachment or opts.get('body'):
140 if not addattachment or opts.get('body'):
137 body += '\n'.join(patchlines)
141 body += '\n'.join(patchlines)
138
142
139 if addattachment:
143 if addattachment:
140 msg = email.MIMEMultipart.MIMEMultipart()
144 msg = email.MIMEMultipart.MIMEMultipart()
141 if body:
145 if body:
142 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
146 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
143 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
147 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
144 opts.get('test'))
148 opts.get('test'))
145 binnode = bin(node)
149 binnode = bin(node)
146 # if node is mq patch, it will have the patch file's name as a tag
150 # if node is mq patch, it will have the patch file's name as a tag
147 if not patchname:
151 if not patchname:
148 patchtags = [t for t in repo.nodetags(binnode)
152 patchtags = [t for t in repo.nodetags(binnode)
149 if t.endswith('.patch') or t.endswith('.diff')]
153 if t.endswith('.patch') or t.endswith('.diff')]
150 if patchtags:
154 if patchtags:
151 patchname = patchtags[0]
155 patchname = patchtags[0]
152 elif total > 1:
156 elif total > 1:
153 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
157 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
154 binnode, seqno=idx,
158 binnode, seqno=idx,
155 total=total)
159 total=total)
156 else:
160 else:
157 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
161 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
158 disposition = 'inline'
162 disposition = 'inline'
159 if opts.get('attach'):
163 if opts.get('attach'):
160 disposition = 'attachment'
164 disposition = 'attachment'
161 p['Content-Disposition'] = disposition + '; filename=' + patchname
165 p['Content-Disposition'] = disposition + '; filename=' + patchname
162 msg.attach(p)
166 msg.attach(p)
163 else:
167 else:
164 msg = mail.mimetextpatch(body, display=opts.get('test'))
168 msg = mail.mimetextpatch(body, display=opts.get('test'))
165
169
166 flag = ' '.join(opts.get('flag'))
170 flag = ' '.join(opts.get('flag'))
167 if flag:
171 if flag:
168 flag = ' ' + flag
172 flag = ' ' + flag
169
173
170 subj = desc[0].strip().rstrip('. ')
174 subj = desc[0].strip().rstrip('. ')
171 if not numbered:
175 if not numbered:
172 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
176 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
173 else:
177 else:
174 tlen = len(str(total))
178 tlen = len(str(total))
175 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
179 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
176 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
180 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
177 msg['X-Mercurial-Node'] = node
181 msg['X-Mercurial-Node'] = node
178 msg['X-Mercurial-Series-Index'] = '%i' % idx
182 msg['X-Mercurial-Series-Index'] = '%i' % idx
179 msg['X-Mercurial-Series-Total'] = '%i' % total
183 msg['X-Mercurial-Series-Total'] = '%i' % total
180 return msg, subj, ds
184 return msg, subj, ds
181
185
182 def _getpatches(repo, revs, **opts):
186 def _getpatches(repo, revs, **opts):
183 """return a list of patches for a list of revisions
187 """return a list of patches for a list of revisions
184
188
185 Each patch in the list is itself a list of lines.
189 Each patch in the list is itself a list of lines.
186 """
190 """
187 ui = repo.ui
191 ui = repo.ui
188 prev = repo['.'].rev()
192 prev = repo['.'].rev()
189 for r in revs:
193 for r in revs:
190 if r == prev and (repo[None].files() or repo[None].deleted()):
194 if r == prev and (repo[None].files() or repo[None].deleted()):
191 ui.warn(_('warning: working directory has '
195 ui.warn(_('warning: working directory has '
192 'uncommitted changes\n'))
196 'uncommitted changes\n'))
193 output = cStringIO.StringIO()
197 output = cStringIO.StringIO()
194 cmdutil.export(repo, [r], fp=output,
198 cmdutil.export(repo, [r], fp=output,
195 opts=patch.difffeatureopts(ui, opts, git=True))
199 opts=patch.difffeatureopts(ui, opts, git=True))
196 yield output.getvalue().split('\n')
200 yield output.getvalue().split('\n')
197 def _getbundle(repo, dest, **opts):
201 def _getbundle(repo, dest, **opts):
198 """return a bundle containing changesets missing in "dest"
202 """return a bundle containing changesets missing in "dest"
199
203
200 The `opts` keyword-arguments are the same as the one accepted by the
204 The `opts` keyword-arguments are the same as the one accepted by the
201 `bundle` command.
205 `bundle` command.
202
206
203 The bundle is a returned as a single in-memory binary blob.
207 The bundle is a returned as a single in-memory binary blob.
204 """
208 """
205 ui = repo.ui
209 ui = repo.ui
206 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
210 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
207 tmpfn = os.path.join(tmpdir, 'bundle')
211 tmpfn = os.path.join(tmpdir, 'bundle')
208 try:
212 try:
209 commands.bundle(ui, repo, tmpfn, dest, **opts)
213 commands.bundle(ui, repo, tmpfn, dest, **opts)
210 fp = open(tmpfn, 'rb')
214 fp = open(tmpfn, 'rb')
211 data = fp.read()
215 data = fp.read()
212 fp.close()
216 fp.close()
213 return data
217 return data
214 finally:
218 finally:
215 try:
219 try:
216 os.unlink(tmpfn)
220 os.unlink(tmpfn)
217 except OSError:
221 except OSError:
218 pass
222 pass
219 os.rmdir(tmpdir)
223 os.rmdir(tmpdir)
220
224
221 def _getdescription(repo, defaultbody, sender, **opts):
225 def _getdescription(repo, defaultbody, sender, **opts):
222 """obtain the body of the introduction message and return it
226 """obtain the body of the introduction message and return it
223
227
224 This is also used for the body of email with an attached bundle.
228 This is also used for the body of email with an attached bundle.
225
229
226 The body can be obtained either from the command line option or entered by
230 The body can be obtained either from the command line option or entered by
227 the user through the editor.
231 the user through the editor.
228 """
232 """
229 ui = repo.ui
233 ui = repo.ui
230 if opts.get('desc'):
234 if opts.get('desc'):
231 body = open(opts.get('desc')).read()
235 body = open(opts.get('desc')).read()
232 else:
236 else:
233 ui.write(_('\nWrite the introductory message for the '
237 ui.write(_('\nWrite the introductory message for the '
234 'patch series.\n\n'))
238 'patch series.\n\n'))
235 body = ui.edit(defaultbody, sender)
239 body = ui.edit(defaultbody, sender)
236 # Save series description in case sendmail fails
240 # Save series description in case sendmail fails
237 msgfile = repo.vfs('last-email.txt', 'wb')
241 msgfile = repo.vfs('last-email.txt', 'wb')
238 msgfile.write(body)
242 msgfile.write(body)
239 msgfile.close()
243 msgfile.close()
240 return body
244 return body
241
245
242 def _getbundlemsgs(repo, sender, bundle, **opts):
246 def _getbundlemsgs(repo, sender, bundle, **opts):
243 """Get the full email for sending a given bundle
247 """Get the full email for sending a given bundle
244
248
245 This function returns a list of "email" tuples (subject, content, None).
249 This function returns a list of "email" tuples (subject, content, None).
246 The list is always one message long in that case.
250 The list is always one message long in that case.
247 """
251 """
248 ui = repo.ui
252 ui = repo.ui
249 _charsets = mail._charsets(ui)
253 _charsets = mail._charsets(ui)
250 subj = (opts.get('subject')
254 subj = (opts.get('subject')
251 or prompt(ui, 'Subject:', 'A bundle for your repository'))
255 or prompt(ui, 'Subject:', 'A bundle for your repository'))
252
256
253 body = _getdescription(repo, '', sender, **opts)
257 body = _getdescription(repo, '', sender, **opts)
254 msg = email.MIMEMultipart.MIMEMultipart()
258 msg = email.MIMEMultipart.MIMEMultipart()
255 if body:
259 if body:
256 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
260 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
257 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
261 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
258 datapart.set_payload(bundle)
262 datapart.set_payload(bundle)
259 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
263 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
260 datapart.add_header('Content-Disposition', 'attachment',
264 datapart.add_header('Content-Disposition', 'attachment',
261 filename=bundlename)
265 filename=bundlename)
262 email.Encoders.encode_base64(datapart)
266 email.Encoders.encode_base64(datapart)
263 msg.attach(datapart)
267 msg.attach(datapart)
264 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
268 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
265 return [(msg, subj, None)]
269 return [(msg, subj, None)]
266
270
267 def _makeintro(repo, sender, patches, **opts):
271 def _makeintro(repo, sender, patches, **opts):
268 """make an introduction email, asking the user for content if needed
272 """make an introduction email, asking the user for content if needed
269
273
270 email is returned as (subject, body, cumulative-diffstat)"""
274 email is returned as (subject, body, cumulative-diffstat)"""
271 ui = repo.ui
275 ui = repo.ui
272 _charsets = mail._charsets(ui)
276 _charsets = mail._charsets(ui)
273 tlen = len(str(len(patches)))
277 tlen = len(str(len(patches)))
274
278
275 flag = opts.get('flag') or ''
279 flag = opts.get('flag') or ''
276 if flag:
280 if flag:
277 flag = ' ' + ' '.join(flag)
281 flag = ' ' + ' '.join(flag)
278 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
282 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
279
283
280 subj = (opts.get('subject') or
284 subj = (opts.get('subject') or
281 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
285 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
282 if not subj:
286 if not subj:
283 return None # skip intro if the user doesn't bother
287 return None # skip intro if the user doesn't bother
284
288
285 subj = prefix + ' ' + subj
289 subj = prefix + ' ' + subj
286
290
287 body = ''
291 body = ''
288 if opts.get('diffstat'):
292 if opts.get('diffstat'):
289 # generate a cumulative diffstat of the whole patch series
293 # generate a cumulative diffstat of the whole patch series
290 diffstat = patch.diffstat(sum(patches, []))
294 diffstat = patch.diffstat(sum(patches, []))
291 body = '\n' + diffstat
295 body = '\n' + diffstat
292 else:
296 else:
293 diffstat = None
297 diffstat = None
294
298
295 body = _getdescription(repo, body, sender, **opts)
299 body = _getdescription(repo, body, sender, **opts)
296 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
300 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
297 msg['Subject'] = mail.headencode(ui, subj, _charsets,
301 msg['Subject'] = mail.headencode(ui, subj, _charsets,
298 opts.get('test'))
302 opts.get('test'))
299 return (msg, subj, diffstat)
303 return (msg, subj, diffstat)
300
304
301 def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
305 def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
302 """return a list of emails from a list of patches
306 """return a list of emails from a list of patches
303
307
304 This involves introduction message creation if necessary.
308 This involves introduction message creation if necessary.
305
309
306 This function returns a list of "email" tuples (subject, content, None).
310 This function returns a list of "email" tuples (subject, content, None).
307 """
311 """
308 ui = repo.ui
312 ui = repo.ui
309 _charsets = mail._charsets(ui)
313 _charsets = mail._charsets(ui)
310 msgs = []
314 msgs = []
311
315
312 ui.write(_('this patch series consists of %d patches.\n\n')
316 ui.write(_('this patch series consists of %d patches.\n\n')
313 % len(patches))
317 % len(patches))
314
318
315 # build the intro message, or skip it if the user declines
319 # build the intro message, or skip it if the user declines
316 if introwanted(ui, opts, len(patches)):
320 if introwanted(ui, opts, len(patches)):
317 msg = _makeintro(repo, sender, patches, **opts)
321 msg = _makeintro(repo, sender, patches, **opts)
318 if msg:
322 if msg:
319 msgs.append(msg)
323 msgs.append(msg)
320
324
321 # are we going to send more than one message?
325 # are we going to send more than one message?
322 numbered = len(msgs) + len(patches) > 1
326 numbered = len(msgs) + len(patches) > 1
323
327
324 # now generate the actual patch messages
328 # now generate the actual patch messages
325 name = None
329 name = None
326 for i, p in enumerate(patches):
330 for i, p in enumerate(patches):
327 if patchnames:
331 if patchnames:
328 name = patchnames[i]
332 name = patchnames[i]
329 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
333 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
330 len(patches), numbered, name)
334 len(patches), numbered, name)
331 msgs.append(msg)
335 msgs.append(msg)
332
336
333 return msgs
337 return msgs
334
338
335 def _getoutgoing(repo, dest, revs):
339 def _getoutgoing(repo, dest, revs):
336 '''Return the revisions present locally but not in dest'''
340 '''Return the revisions present locally but not in dest'''
337 ui = repo.ui
341 ui = repo.ui
338 url = ui.expandpath(dest or 'default-push', dest or 'default')
342 url = ui.expandpath(dest or 'default-push', dest or 'default')
339 url = hg.parseurl(url)[0]
343 url = hg.parseurl(url)[0]
340 ui.status(_('comparing with %s\n') % util.hidepassword(url))
344 ui.status(_('comparing with %s\n') % util.hidepassword(url))
341
345
342 revs = [r for r in revs if r >= 0]
346 revs = [r for r in revs if r >= 0]
343 if not revs:
347 if not revs:
344 revs = [len(repo) - 1]
348 revs = [len(repo) - 1]
345 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
349 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
346 if not revs:
350 if not revs:
347 ui.status(_("no changes found\n"))
351 ui.status(_("no changes found\n"))
348 return revs
352 return revs
349
353
350 emailopts = [
354 emailopts = [
351 ('', 'body', None, _('send patches as inline message text (default)')),
355 ('', 'body', None, _('send patches as inline message text (default)')),
352 ('a', 'attach', None, _('send patches as attachments')),
356 ('a', 'attach', None, _('send patches as attachments')),
353 ('i', 'inline', None, _('send patches as inline attachments')),
357 ('i', 'inline', None, _('send patches as inline attachments')),
354 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
358 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
355 ('c', 'cc', [], _('email addresses of copy recipients')),
359 ('c', 'cc', [], _('email addresses of copy recipients')),
356 ('', 'confirm', None, _('ask for confirmation before sending')),
360 ('', 'confirm', None, _('ask for confirmation before sending')),
357 ('d', 'diffstat', None, _('add diffstat output to messages')),
361 ('d', 'diffstat', None, _('add diffstat output to messages')),
358 ('', 'date', '', _('use the given date as the sending date')),
362 ('', 'date', '', _('use the given date as the sending date')),
359 ('', 'desc', '', _('use the given file as the series description')),
363 ('', 'desc', '', _('use the given file as the series description')),
360 ('f', 'from', '', _('email address of sender')),
364 ('f', 'from', '', _('email address of sender')),
361 ('n', 'test', None, _('print messages that would be sent')),
365 ('n', 'test', None, _('print messages that would be sent')),
362 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
366 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
363 ('', 'reply-to', [], _('email addresses replies should be sent to')),
367 ('', 'reply-to', [], _('email addresses replies should be sent to')),
364 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
368 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
365 ('', 'in-reply-to', '', _('message identifier to reply to')),
369 ('', 'in-reply-to', '', _('message identifier to reply to')),
366 ('', 'flag', [], _('flags to add in subject prefixes')),
370 ('', 'flag', [], _('flags to add in subject prefixes')),
367 ('t', 'to', [], _('email addresses of recipients'))]
371 ('t', 'to', [], _('email addresses of recipients'))]
368
372
369 @command('email',
373 @command('email',
370 [('g', 'git', None, _('use git extended diff format')),
374 [('g', 'git', None, _('use git extended diff format')),
371 ('', 'plain', None, _('omit hg patch header')),
375 ('', 'plain', None, _('omit hg patch header')),
372 ('o', 'outgoing', None,
376 ('o', 'outgoing', None,
373 _('send changes not found in the target repository')),
377 _('send changes not found in the target repository')),
374 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
378 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
375 ('', 'bundlename', 'bundle',
379 ('', 'bundlename', 'bundle',
376 _('name of the bundle attachment file'), _('NAME')),
380 _('name of the bundle attachment file'), _('NAME')),
377 ('r', 'rev', [], _('a revision to send'), _('REV')),
381 ('r', 'rev', [], _('a revision to send'), _('REV')),
378 ('', 'force', None, _('run even when remote repository is unrelated '
382 ('', 'force', None, _('run even when remote repository is unrelated '
379 '(with -b/--bundle)')),
383 '(with -b/--bundle)')),
380 ('', 'base', [], _('a base changeset to specify instead of a destination '
384 ('', 'base', [], _('a base changeset to specify instead of a destination '
381 '(with -b/--bundle)'), _('REV')),
385 '(with -b/--bundle)'), _('REV')),
382 ('', 'intro', None, _('send an introduction email for a single patch')),
386 ('', 'intro', None, _('send an introduction email for a single patch')),
383 ] + emailopts + commands.remoteopts,
387 ] + emailopts + commands.remoteopts,
384 _('hg email [OPTION]... [DEST]...'))
388 _('hg email [OPTION]... [DEST]...'))
385 def patchbomb(ui, repo, *revs, **opts):
389 def patchbomb(ui, repo, *revs, **opts):
386 '''send changesets by email
390 '''send changesets by email
387
391
388 By default, diffs are sent in the format generated by
392 By default, diffs are sent in the format generated by
389 :hg:`export`, one per message. The series starts with a "[PATCH 0
393 :hg:`export`, one per message. The series starts with a "[PATCH 0
390 of N]" introduction, which describes the series as a whole.
394 of N]" introduction, which describes the series as a whole.
391
395
392 Each patch email has a Subject line of "[PATCH M of N] ...", using
396 Each patch email has a Subject line of "[PATCH M of N] ...", using
393 the first line of the changeset description as the subject text.
397 the first line of the changeset description as the subject text.
394 The message contains two or three parts. First, the changeset
398 The message contains two or three parts. First, the changeset
395 description.
399 description.
396
400
397 With the -d/--diffstat option, if the diffstat program is
401 With the -d/--diffstat option, if the diffstat program is
398 installed, the result of running diffstat on the patch is inserted.
402 installed, the result of running diffstat on the patch is inserted.
399
403
400 Finally, the patch itself, as generated by :hg:`export`.
404 Finally, the patch itself, as generated by :hg:`export`.
401
405
402 With the -d/--diffstat or --confirm options, you will be presented
406 With the -d/--diffstat or --confirm options, you will be presented
403 with a final summary of all messages and asked for confirmation before
407 with a final summary of all messages and asked for confirmation before
404 the messages are sent.
408 the messages are sent.
405
409
406 By default the patch is included as text in the email body for
410 By default the patch is included as text in the email body for
407 easy reviewing. Using the -a/--attach option will instead create
411 easy reviewing. Using the -a/--attach option will instead create
408 an attachment for the patch. With -i/--inline an inline attachment
412 an attachment for the patch. With -i/--inline an inline attachment
409 will be created. You can include a patch both as text in the email
413 will be created. You can include a patch both as text in the email
410 body and as a regular or an inline attachment by combining the
414 body and as a regular or an inline attachment by combining the
411 -a/--attach or -i/--inline with the --body option.
415 -a/--attach or -i/--inline with the --body option.
412
416
413 With -o/--outgoing, emails will be generated for patches not found
417 With -o/--outgoing, emails will be generated for patches not found
414 in the destination repository (or only those which are ancestors
418 in the destination repository (or only those which are ancestors
415 of the specified revisions if any are provided)
419 of the specified revisions if any are provided)
416
420
417 With -b/--bundle, changesets are selected as for --outgoing, but a
421 With -b/--bundle, changesets are selected as for --outgoing, but a
418 single email containing a binary Mercurial bundle as an attachment
422 single email containing a binary Mercurial bundle as an attachment
419 will be sent.
423 will be sent.
420
424
421 With -m/--mbox, instead of previewing each patchbomb message in a
425 With -m/--mbox, instead of previewing each patchbomb message in a
422 pager or sending the messages directly, it will create a UNIX
426 pager or sending the messages directly, it will create a UNIX
423 mailbox file with the patch emails. This mailbox file can be
427 mailbox file with the patch emails. This mailbox file can be
424 previewed with any mail user agent which supports UNIX mbox
428 previewed with any mail user agent which supports UNIX mbox
425 files.
429 files.
426
430
427 With -n/--test, all steps will run, but mail will not be sent.
431 With -n/--test, all steps will run, but mail will not be sent.
428 You will be prompted for an email recipient address, a subject and
432 You will be prompted for an email recipient address, a subject and
429 an introductory message describing the patches of your patchbomb.
433 an introductory message describing the patches of your patchbomb.
430 Then when all is done, patchbomb messages are displayed. If the
434 Then when all is done, patchbomb messages are displayed. If the
431 PAGER environment variable is set, your pager will be fired up once
435 PAGER environment variable is set, your pager will be fired up once
432 for each patchbomb message, so you can verify everything is alright.
436 for each patchbomb message, so you can verify everything is alright.
433
437
434 In case email sending fails, you will find a backup of your series
438 In case email sending fails, you will find a backup of your series
435 introductory message in ``.hg/last-email.txt``.
439 introductory message in ``.hg/last-email.txt``.
436
440
437 The default behavior of this command can be customized through
441 The default behavior of this command can be customized through
438 configuration. (See :hg:`help patchbomb` for details)
442 configuration. (See :hg:`help patchbomb` for details)
439
443
440 Examples::
444 Examples::
441
445
442 hg email -r 3000 # send patch 3000 only
446 hg email -r 3000 # send patch 3000 only
443 hg email -r 3000 -r 3001 # send patches 3000 and 3001
447 hg email -r 3000 -r 3001 # send patches 3000 and 3001
444 hg email -r 3000:3005 # send patches 3000 through 3005
448 hg email -r 3000:3005 # send patches 3000 through 3005
445 hg email 3000 # send patch 3000 (deprecated)
449 hg email 3000 # send patch 3000 (deprecated)
446
450
447 hg email -o # send all patches not in default
451 hg email -o # send all patches not in default
448 hg email -o DEST # send all patches not in DEST
452 hg email -o DEST # send all patches not in DEST
449 hg email -o -r 3000 # send all ancestors of 3000 not in default
453 hg email -o -r 3000 # send all ancestors of 3000 not in default
450 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
454 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
451
455
452 hg email -b # send bundle of all patches not in default
456 hg email -b # send bundle of all patches not in default
453 hg email -b DEST # send bundle of all patches not in DEST
457 hg email -b DEST # send bundle of all patches not in DEST
454 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
458 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
455 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
459 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
456
460
457 hg email -o -m mbox && # generate an mbox file...
461 hg email -o -m mbox && # generate an mbox file...
458 mutt -R -f mbox # ... and view it with mutt
462 mutt -R -f mbox # ... and view it with mutt
459 hg email -o -m mbox && # generate an mbox file ...
463 hg email -o -m mbox && # generate an mbox file ...
460 formail -s sendmail \\ # ... and use formail to send from the mbox
464 formail -s sendmail \\ # ... and use formail to send from the mbox
461 -bm -t < mbox # ... using sendmail
465 -bm -t < mbox # ... using sendmail
462
466
463 Before using this command, you will need to enable email in your
467 Before using this command, you will need to enable email in your
464 hgrc. See the [email] section in hgrc(5) for details.
468 hgrc. See the [email] section in hgrc(5) for details.
465 '''
469 '''
466
470
467 _charsets = mail._charsets(ui)
471 _charsets = mail._charsets(ui)
468
472
469 bundle = opts.get('bundle')
473 bundle = opts.get('bundle')
470 date = opts.get('date')
474 date = opts.get('date')
471 mbox = opts.get('mbox')
475 mbox = opts.get('mbox')
472 outgoing = opts.get('outgoing')
476 outgoing = opts.get('outgoing')
473 rev = opts.get('rev')
477 rev = opts.get('rev')
474 # internal option used by pbranches
478 # internal option used by pbranches
475 patches = opts.get('patches')
479 patches = opts.get('patches')
476
480
477 if not (opts.get('test') or mbox):
481 if not (opts.get('test') or mbox):
478 # really sending
482 # really sending
479 mail.validateconfig(ui)
483 mail.validateconfig(ui)
480
484
481 if not (revs or rev or outgoing or bundle or patches):
485 if not (revs or rev or outgoing or bundle or patches):
482 raise util.Abort(_('specify at least one changeset with -r or -o'))
486 raise util.Abort(_('specify at least one changeset with -r or -o'))
483
487
484 if outgoing and bundle:
488 if outgoing and bundle:
485 raise util.Abort(_("--outgoing mode always on with --bundle;"
489 raise util.Abort(_("--outgoing mode always on with --bundle;"
486 " do not re-specify --outgoing"))
490 " do not re-specify --outgoing"))
487
491
488 if outgoing or bundle:
492 if outgoing or bundle:
489 if len(revs) > 1:
493 if len(revs) > 1:
490 raise util.Abort(_("too many destinations"))
494 raise util.Abort(_("too many destinations"))
491 if revs:
495 if revs:
492 dest = revs[0]
496 dest = revs[0]
493 else:
497 else:
494 dest = None
498 dest = None
495 revs = []
499 revs = []
496
500
497 if rev:
501 if rev:
498 if revs:
502 if revs:
499 raise util.Abort(_('use only one form to specify the revision'))
503 raise util.Abort(_('use only one form to specify the revision'))
500 revs = rev
504 revs = rev
501
505
502 revs = scmutil.revrange(repo, revs)
506 revs = scmutil.revrange(repo, revs)
503 if outgoing:
507 if outgoing:
504 revs = _getoutgoing(repo, dest, revs)
508 revs = _getoutgoing(repo, dest, revs)
505 if bundle:
509 if bundle:
506 opts['revs'] = [str(r) for r in revs]
510 opts['revs'] = [str(r) for r in revs]
507
511
508 # start
512 # start
509 if date:
513 if date:
510 start_time = util.parsedate(date)
514 start_time = util.parsedate(date)
511 else:
515 else:
512 start_time = util.makedate()
516 start_time = util.makedate()
513
517
514 def genmsgid(id):
518 def genmsgid(id):
515 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
519 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
516
520
517 sender = (opts.get('from') or ui.config('email', 'from') or
521 sender = (opts.get('from') or ui.config('email', 'from') or
518 ui.config('patchbomb', 'from') or
522 ui.config('patchbomb', 'from') or
519 prompt(ui, 'From', ui.username()))
523 prompt(ui, 'From', ui.username()))
520
524
521 if patches:
525 if patches:
522 msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
526 msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
523 **opts)
527 **opts)
524 elif bundle:
528 elif bundle:
525 bundledata = _getbundle(repo, dest, **opts)
529 bundledata = _getbundle(repo, dest, **opts)
526 bundleopts = opts.copy()
530 bundleopts = opts.copy()
527 bundleopts.pop('bundle', None) # already processed
531 bundleopts.pop('bundle', None) # already processed
528 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
532 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
529 else:
533 else:
530 _patches = list(_getpatches(repo, revs, **opts))
534 _patches = list(_getpatches(repo, revs, **opts))
531 msgs = _getpatchmsgs(repo, sender, _patches, **opts)
535 msgs = _getpatchmsgs(repo, sender, _patches, **opts)
532
536
533 showaddrs = []
537 showaddrs = []
534
538
535 def getaddrs(header, ask=False, default=None):
539 def getaddrs(header, ask=False, default=None):
536 configkey = header.lower()
540 configkey = header.lower()
537 opt = header.replace('-', '_').lower()
541 opt = header.replace('-', '_').lower()
538 addrs = opts.get(opt)
542 addrs = opts.get(opt)
539 if addrs:
543 if addrs:
540 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
544 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
541 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
545 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
542
546
543 # not on the command line: fallback to config and then maybe ask
547 # not on the command line: fallback to config and then maybe ask
544 addr = (ui.config('email', configkey) or
548 addr = (ui.config('email', configkey) or
545 ui.config('patchbomb', configkey) or
549 ui.config('patchbomb', configkey) or
546 '')
550 '')
547 if not addr and ask:
551 if not addr and ask:
548 addr = prompt(ui, header, default=default)
552 addr = prompt(ui, header, default=default)
549 if addr:
553 if addr:
550 showaddrs.append('%s: %s' % (header, addr))
554 showaddrs.append('%s: %s' % (header, addr))
551 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
555 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
552 else:
556 else:
553 return default
557 return default
554
558
555 to = getaddrs('To', ask=True)
559 to = getaddrs('To', ask=True)
556 if not to:
560 if not to:
557 # we can get here in non-interactive mode
561 # we can get here in non-interactive mode
558 raise util.Abort(_('no recipient addresses provided'))
562 raise util.Abort(_('no recipient addresses provided'))
559 cc = getaddrs('Cc', ask=True, default='') or []
563 cc = getaddrs('Cc', ask=True, default='') or []
560 bcc = getaddrs('Bcc') or []
564 bcc = getaddrs('Bcc') or []
561 replyto = getaddrs('Reply-To')
565 replyto = getaddrs('Reply-To')
562
566
563 confirm = ui.configbool('patchbomb', 'confirm')
567 confirm = ui.configbool('patchbomb', 'confirm')
564 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
568 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
565
569
566 if confirm:
570 if confirm:
567 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
571 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
568 ui.write(('From: %s\n' % sender), label='patchbomb.from')
572 ui.write(('From: %s\n' % sender), label='patchbomb.from')
569 for addr in showaddrs:
573 for addr in showaddrs:
570 ui.write('%s\n' % addr, label='patchbomb.to')
574 ui.write('%s\n' % addr, label='patchbomb.to')
571 for m, subj, ds in msgs:
575 for m, subj, ds in msgs:
572 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
576 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
573 if ds:
577 if ds:
574 ui.write(ds, label='patchbomb.diffstats')
578 ui.write(ds, label='patchbomb.diffstats')
575 ui.write('\n')
579 ui.write('\n')
576 if ui.promptchoice(_('are you sure you want to send (yn)?'
580 if ui.promptchoice(_('are you sure you want to send (yn)?'
577 '$$ &Yes $$ &No')):
581 '$$ &Yes $$ &No')):
578 raise util.Abort(_('patchbomb canceled'))
582 raise util.Abort(_('patchbomb canceled'))
579
583
580 ui.write('\n')
584 ui.write('\n')
581
585
582 parent = opts.get('in_reply_to') or None
586 parent = opts.get('in_reply_to') or None
583 # angle brackets may be omitted, they're not semantically part of the msg-id
587 # angle brackets may be omitted, they're not semantically part of the msg-id
584 if parent is not None:
588 if parent is not None:
585 if not parent.startswith('<'):
589 if not parent.startswith('<'):
586 parent = '<' + parent
590 parent = '<' + parent
587 if not parent.endswith('>'):
591 if not parent.endswith('>'):
588 parent += '>'
592 parent += '>'
589
593
590 sender_addr = email.Utils.parseaddr(sender)[1]
594 sender_addr = email.Utils.parseaddr(sender)[1]
591 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
595 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
592 sendmail = None
596 sendmail = None
593 firstpatch = None
597 firstpatch = None
594 for i, (m, subj, ds) in enumerate(msgs):
598 for i, (m, subj, ds) in enumerate(msgs):
595 try:
599 try:
596 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
600 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
597 if not firstpatch:
601 if not firstpatch:
598 firstpatch = m['Message-Id']
602 firstpatch = m['Message-Id']
599 m['X-Mercurial-Series-Id'] = firstpatch
603 m['X-Mercurial-Series-Id'] = firstpatch
600 except TypeError:
604 except TypeError:
601 m['Message-Id'] = genmsgid('patchbomb')
605 m['Message-Id'] = genmsgid('patchbomb')
602 if parent:
606 if parent:
603 m['In-Reply-To'] = parent
607 m['In-Reply-To'] = parent
604 m['References'] = parent
608 m['References'] = parent
605 if not parent or 'X-Mercurial-Node' not in m:
609 if not parent or 'X-Mercurial-Node' not in m:
606 parent = m['Message-Id']
610 parent = m['Message-Id']
607
611
608 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
612 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
609 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
613 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
610
614
611 start_time = (start_time[0] + 1, start_time[1])
615 start_time = (start_time[0] + 1, start_time[1])
612 m['From'] = sender
616 m['From'] = sender
613 m['To'] = ', '.join(to)
617 m['To'] = ', '.join(to)
614 if cc:
618 if cc:
615 m['Cc'] = ', '.join(cc)
619 m['Cc'] = ', '.join(cc)
616 if bcc:
620 if bcc:
617 m['Bcc'] = ', '.join(bcc)
621 m['Bcc'] = ', '.join(bcc)
618 if replyto:
622 if replyto:
619 m['Reply-To'] = ', '.join(replyto)
623 m['Reply-To'] = ', '.join(replyto)
620 if opts.get('test'):
624 if opts.get('test'):
621 ui.status(_('displaying '), subj, ' ...\n')
625 ui.status(_('displaying '), subj, ' ...\n')
622 ui.flush()
626 ui.flush()
623 if 'PAGER' in os.environ and not ui.plain():
627 if 'PAGER' in os.environ and not ui.plain():
624 fp = util.popen(os.environ['PAGER'], 'w')
628 fp = util.popen(os.environ['PAGER'], 'w')
625 else:
629 else:
626 fp = ui
630 fp = ui
627 generator = email.Generator.Generator(fp, mangle_from_=False)
631 generator = email.Generator.Generator(fp, mangle_from_=False)
628 try:
632 try:
629 generator.flatten(m, 0)
633 generator.flatten(m, 0)
630 fp.write('\n')
634 fp.write('\n')
631 except IOError, inst:
635 except IOError, inst:
632 if inst.errno != errno.EPIPE:
636 if inst.errno != errno.EPIPE:
633 raise
637 raise
634 if fp is not ui:
638 if fp is not ui:
635 fp.close()
639 fp.close()
636 else:
640 else:
637 if not sendmail:
641 if not sendmail:
638 verifycert = ui.config('smtp', 'verifycert')
642 verifycert = ui.config('smtp', 'verifycert')
639 if opts.get('insecure'):
643 if opts.get('insecure'):
640 ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
644 ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
641 try:
645 try:
642 sendmail = mail.connect(ui, mbox=mbox)
646 sendmail = mail.connect(ui, mbox=mbox)
643 finally:
647 finally:
644 ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
648 ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
645 ui.status(_('sending '), subj, ' ...\n')
649 ui.status(_('sending '), subj, ' ...\n')
646 ui.progress(_('sending'), i, item=subj, total=len(msgs))
650 ui.progress(_('sending'), i, item=subj, total=len(msgs))
647 if not mbox:
651 if not mbox:
648 # Exim does not remove the Bcc field
652 # Exim does not remove the Bcc field
649 del m['Bcc']
653 del m['Bcc']
650 fp = cStringIO.StringIO()
654 fp = cStringIO.StringIO()
651 generator = email.Generator.Generator(fp, mangle_from_=False)
655 generator = email.Generator.Generator(fp, mangle_from_=False)
652 generator.flatten(m, 0)
656 generator.flatten(m, 0)
653 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
657 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
654
658
655 ui.progress(_('writing'), None)
659 ui.progress(_('writing'), None)
656 ui.progress(_('sending'), None)
660 ui.progress(_('sending'), None)
@@ -1,320 +1,324 b''
1 # progress.py show progress bars for some actions
1 # progress.py show progress bars for some actions
2 #
2 #
3 # Copyright (C) 2010 Augie Fackler <durin42@gmail.com>
3 # Copyright (C) 2010 Augie Fackler <durin42@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """show progress bars for some actions
8 """show progress bars for some actions
9
9
10 This extension uses the progress information logged by hg commands
10 This extension uses the progress information logged by hg commands
11 to draw progress bars that are as informative as possible. Some progress
11 to draw progress bars that are as informative as possible. Some progress
12 bars only offer indeterminate information, while others have a definite
12 bars only offer indeterminate information, while others have a definite
13 end point.
13 end point.
14
14
15 The following settings are available::
15 The following settings are available::
16
16
17 [progress]
17 [progress]
18 delay = 3 # number of seconds (float) before showing the progress bar
18 delay = 3 # number of seconds (float) before showing the progress bar
19 changedelay = 1 # changedelay: minimum delay before showing a new topic.
19 changedelay = 1 # changedelay: minimum delay before showing a new topic.
20 # If set to less than 3 * refresh, that value will
20 # If set to less than 3 * refresh, that value will
21 # be used instead.
21 # be used instead.
22 refresh = 0.1 # time in seconds between refreshes of the progress bar
22 refresh = 0.1 # time in seconds between refreshes of the progress bar
23 format = topic bar number estimate # format of the progress bar
23 format = topic bar number estimate # format of the progress bar
24 width = <none> # if set, the maximum width of the progress information
24 width = <none> # if set, the maximum width of the progress information
25 # (that is, min(width, term width) will be used)
25 # (that is, min(width, term width) will be used)
26 clear-complete = True # clear the progress bar after it's done
26 clear-complete = True # clear the progress bar after it's done
27 disable = False # if true, don't show a progress bar
27 disable = False # if true, don't show a progress bar
28 assume-tty = False # if true, ALWAYS show a progress bar, unless
28 assume-tty = False # if true, ALWAYS show a progress bar, unless
29 # disable is given
29 # disable is given
30
30
31 Valid entries for the format field are topic, bar, number, unit,
31 Valid entries for the format field are topic, bar, number, unit,
32 estimate, speed, and item. item defaults to the last 20 characters of
32 estimate, speed, and item. item defaults to the last 20 characters of
33 the item, but this can be changed by adding either ``-<num>`` which
33 the item, but this can be changed by adding either ``-<num>`` which
34 would take the last num characters, or ``+<num>`` for the first num
34 would take the last num characters, or ``+<num>`` for the first num
35 characters.
35 characters.
36 """
36 """
37
37
38 import sys
38 import sys
39 import time
39 import time
40 import threading
40 import threading
41
41
42 from mercurial.i18n import _
42 from mercurial.i18n import _
43 # Note for extension authors: ONLY specify testedwith = 'internal' for
44 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
45 # be specifying the version(s) of Mercurial they are tested with, or
46 # leave the attribute unspecified.
43 testedwith = 'internal'
47 testedwith = 'internal'
44
48
45 from mercurial import encoding
49 from mercurial import encoding
46
50
47 def spacejoin(*args):
51 def spacejoin(*args):
48 return ' '.join(s for s in args if s)
52 return ' '.join(s for s in args if s)
49
53
50 def shouldprint(ui):
54 def shouldprint(ui):
51 return not ui.plain() and (ui._isatty(sys.stderr) or
55 return not ui.plain() and (ui._isatty(sys.stderr) or
52 ui.configbool('progress', 'assume-tty'))
56 ui.configbool('progress', 'assume-tty'))
53
57
54 def fmtremaining(seconds):
58 def fmtremaining(seconds):
55 if seconds < 60:
59 if seconds < 60:
56 # i18n: format XX seconds as "XXs"
60 # i18n: format XX seconds as "XXs"
57 return _("%02ds") % (seconds)
61 return _("%02ds") % (seconds)
58 minutes = seconds // 60
62 minutes = seconds // 60
59 if minutes < 60:
63 if minutes < 60:
60 seconds -= minutes * 60
64 seconds -= minutes * 60
61 # i18n: format X minutes and YY seconds as "XmYYs"
65 # i18n: format X minutes and YY seconds as "XmYYs"
62 return _("%dm%02ds") % (minutes, seconds)
66 return _("%dm%02ds") % (minutes, seconds)
63 # we're going to ignore seconds in this case
67 # we're going to ignore seconds in this case
64 minutes += 1
68 minutes += 1
65 hours = minutes // 60
69 hours = minutes // 60
66 minutes -= hours * 60
70 minutes -= hours * 60
67 if hours < 30:
71 if hours < 30:
68 # i18n: format X hours and YY minutes as "XhYYm"
72 # i18n: format X hours and YY minutes as "XhYYm"
69 return _("%dh%02dm") % (hours, minutes)
73 return _("%dh%02dm") % (hours, minutes)
70 # we're going to ignore minutes in this case
74 # we're going to ignore minutes in this case
71 hours += 1
75 hours += 1
72 days = hours // 24
76 days = hours // 24
73 hours -= days * 24
77 hours -= days * 24
74 if days < 15:
78 if days < 15:
75 # i18n: format X days and YY hours as "XdYYh"
79 # i18n: format X days and YY hours as "XdYYh"
76 return _("%dd%02dh") % (days, hours)
80 return _("%dd%02dh") % (days, hours)
77 # we're going to ignore hours in this case
81 # we're going to ignore hours in this case
78 days += 1
82 days += 1
79 weeks = days // 7
83 weeks = days // 7
80 days -= weeks * 7
84 days -= weeks * 7
81 if weeks < 55:
85 if weeks < 55:
82 # i18n: format X weeks and YY days as "XwYYd"
86 # i18n: format X weeks and YY days as "XwYYd"
83 return _("%dw%02dd") % (weeks, days)
87 return _("%dw%02dd") % (weeks, days)
84 # we're going to ignore days and treat a year as 52 weeks
88 # we're going to ignore days and treat a year as 52 weeks
85 weeks += 1
89 weeks += 1
86 years = weeks // 52
90 years = weeks // 52
87 weeks -= years * 52
91 weeks -= years * 52
88 # i18n: format X years and YY weeks as "XyYYw"
92 # i18n: format X years and YY weeks as "XyYYw"
89 return _("%dy%02dw") % (years, weeks)
93 return _("%dy%02dw") % (years, weeks)
90
94
91 class progbar(object):
95 class progbar(object):
92 def __init__(self, ui):
96 def __init__(self, ui):
93 self.ui = ui
97 self.ui = ui
94 self._refreshlock = threading.Lock()
98 self._refreshlock = threading.Lock()
95 self.resetstate()
99 self.resetstate()
96
100
97 def resetstate(self):
101 def resetstate(self):
98 self.topics = []
102 self.topics = []
99 self.topicstates = {}
103 self.topicstates = {}
100 self.starttimes = {}
104 self.starttimes = {}
101 self.startvals = {}
105 self.startvals = {}
102 self.printed = False
106 self.printed = False
103 self.lastprint = time.time() + float(self.ui.config(
107 self.lastprint = time.time() + float(self.ui.config(
104 'progress', 'delay', default=3))
108 'progress', 'delay', default=3))
105 self.curtopic = None
109 self.curtopic = None
106 self.lasttopic = None
110 self.lasttopic = None
107 self.indetcount = 0
111 self.indetcount = 0
108 self.refresh = float(self.ui.config(
112 self.refresh = float(self.ui.config(
109 'progress', 'refresh', default=0.1))
113 'progress', 'refresh', default=0.1))
110 self.changedelay = max(3 * self.refresh,
114 self.changedelay = max(3 * self.refresh,
111 float(self.ui.config(
115 float(self.ui.config(
112 'progress', 'changedelay', default=1)))
116 'progress', 'changedelay', default=1)))
113 self.order = self.ui.configlist(
117 self.order = self.ui.configlist(
114 'progress', 'format',
118 'progress', 'format',
115 default=['topic', 'bar', 'number', 'estimate'])
119 default=['topic', 'bar', 'number', 'estimate'])
116
120
117 def show(self, now, topic, pos, item, unit, total):
121 def show(self, now, topic, pos, item, unit, total):
118 if not shouldprint(self.ui):
122 if not shouldprint(self.ui):
119 return
123 return
120 termwidth = self.width()
124 termwidth = self.width()
121 self.printed = True
125 self.printed = True
122 head = ''
126 head = ''
123 needprogress = False
127 needprogress = False
124 tail = ''
128 tail = ''
125 for indicator in self.order:
129 for indicator in self.order:
126 add = ''
130 add = ''
127 if indicator == 'topic':
131 if indicator == 'topic':
128 add = topic
132 add = topic
129 elif indicator == 'number':
133 elif indicator == 'number':
130 if total:
134 if total:
131 add = ('% ' + str(len(str(total))) +
135 add = ('% ' + str(len(str(total))) +
132 's/%s') % (pos, total)
136 's/%s') % (pos, total)
133 else:
137 else:
134 add = str(pos)
138 add = str(pos)
135 elif indicator.startswith('item') and item:
139 elif indicator.startswith('item') and item:
136 slice = 'end'
140 slice = 'end'
137 if '-' in indicator:
141 if '-' in indicator:
138 wid = int(indicator.split('-')[1])
142 wid = int(indicator.split('-')[1])
139 elif '+' in indicator:
143 elif '+' in indicator:
140 slice = 'beginning'
144 slice = 'beginning'
141 wid = int(indicator.split('+')[1])
145 wid = int(indicator.split('+')[1])
142 else:
146 else:
143 wid = 20
147 wid = 20
144 if slice == 'end':
148 if slice == 'end':
145 add = encoding.trim(item, wid, leftside=True)
149 add = encoding.trim(item, wid, leftside=True)
146 else:
150 else:
147 add = encoding.trim(item, wid)
151 add = encoding.trim(item, wid)
148 add += (wid - encoding.colwidth(add)) * ' '
152 add += (wid - encoding.colwidth(add)) * ' '
149 elif indicator == 'bar':
153 elif indicator == 'bar':
150 add = ''
154 add = ''
151 needprogress = True
155 needprogress = True
152 elif indicator == 'unit' and unit:
156 elif indicator == 'unit' and unit:
153 add = unit
157 add = unit
154 elif indicator == 'estimate':
158 elif indicator == 'estimate':
155 add = self.estimate(topic, pos, total, now)
159 add = self.estimate(topic, pos, total, now)
156 elif indicator == 'speed':
160 elif indicator == 'speed':
157 add = self.speed(topic, pos, unit, now)
161 add = self.speed(topic, pos, unit, now)
158 if not needprogress:
162 if not needprogress:
159 head = spacejoin(head, add)
163 head = spacejoin(head, add)
160 else:
164 else:
161 tail = spacejoin(tail, add)
165 tail = spacejoin(tail, add)
162 if needprogress:
166 if needprogress:
163 used = 0
167 used = 0
164 if head:
168 if head:
165 used += encoding.colwidth(head) + 1
169 used += encoding.colwidth(head) + 1
166 if tail:
170 if tail:
167 used += encoding.colwidth(tail) + 1
171 used += encoding.colwidth(tail) + 1
168 progwidth = termwidth - used - 3
172 progwidth = termwidth - used - 3
169 if total and pos <= total:
173 if total and pos <= total:
170 amt = pos * progwidth // total
174 amt = pos * progwidth // total
171 bar = '=' * (amt - 1)
175 bar = '=' * (amt - 1)
172 if amt > 0:
176 if amt > 0:
173 bar += '>'
177 bar += '>'
174 bar += ' ' * (progwidth - amt)
178 bar += ' ' * (progwidth - amt)
175 else:
179 else:
176 progwidth -= 3
180 progwidth -= 3
177 self.indetcount += 1
181 self.indetcount += 1
178 # mod the count by twice the width so we can make the
182 # mod the count by twice the width so we can make the
179 # cursor bounce between the right and left sides
183 # cursor bounce between the right and left sides
180 amt = self.indetcount % (2 * progwidth)
184 amt = self.indetcount % (2 * progwidth)
181 amt -= progwidth
185 amt -= progwidth
182 bar = (' ' * int(progwidth - abs(amt)) + '<=>' +
186 bar = (' ' * int(progwidth - abs(amt)) + '<=>' +
183 ' ' * int(abs(amt)))
187 ' ' * int(abs(amt)))
184 prog = ''.join(('[', bar , ']'))
188 prog = ''.join(('[', bar , ']'))
185 out = spacejoin(head, prog, tail)
189 out = spacejoin(head, prog, tail)
186 else:
190 else:
187 out = spacejoin(head, tail)
191 out = spacejoin(head, tail)
188 sys.stderr.write('\r' + encoding.trim(out, termwidth))
192 sys.stderr.write('\r' + encoding.trim(out, termwidth))
189 self.lasttopic = topic
193 self.lasttopic = topic
190 sys.stderr.flush()
194 sys.stderr.flush()
191
195
192 def clear(self):
196 def clear(self):
193 if not shouldprint(self.ui):
197 if not shouldprint(self.ui):
194 return
198 return
195 sys.stderr.write('\r%s\r' % (' ' * self.width()))
199 sys.stderr.write('\r%s\r' % (' ' * self.width()))
196
200
197 def complete(self):
201 def complete(self):
198 if not shouldprint(self.ui):
202 if not shouldprint(self.ui):
199 return
203 return
200 if self.ui.configbool('progress', 'clear-complete', default=True):
204 if self.ui.configbool('progress', 'clear-complete', default=True):
201 self.clear()
205 self.clear()
202 else:
206 else:
203 sys.stderr.write('\n')
207 sys.stderr.write('\n')
204 sys.stderr.flush()
208 sys.stderr.flush()
205
209
206 def width(self):
210 def width(self):
207 tw = self.ui.termwidth()
211 tw = self.ui.termwidth()
208 return min(int(self.ui.config('progress', 'width', default=tw)), tw)
212 return min(int(self.ui.config('progress', 'width', default=tw)), tw)
209
213
210 def estimate(self, topic, pos, total, now):
214 def estimate(self, topic, pos, total, now):
211 if total is None:
215 if total is None:
212 return ''
216 return ''
213 initialpos = self.startvals[topic]
217 initialpos = self.startvals[topic]
214 target = total - initialpos
218 target = total - initialpos
215 delta = pos - initialpos
219 delta = pos - initialpos
216 if delta > 0:
220 if delta > 0:
217 elapsed = now - self.starttimes[topic]
221 elapsed = now - self.starttimes[topic]
218 if elapsed > float(
222 if elapsed > float(
219 self.ui.config('progress', 'estimate', default=2)):
223 self.ui.config('progress', 'estimate', default=2)):
220 seconds = (elapsed * (target - delta)) // delta + 1
224 seconds = (elapsed * (target - delta)) // delta + 1
221 return fmtremaining(seconds)
225 return fmtremaining(seconds)
222 return ''
226 return ''
223
227
224 def speed(self, topic, pos, unit, now):
228 def speed(self, topic, pos, unit, now):
225 initialpos = self.startvals[topic]
229 initialpos = self.startvals[topic]
226 delta = pos - initialpos
230 delta = pos - initialpos
227 elapsed = now - self.starttimes[topic]
231 elapsed = now - self.starttimes[topic]
228 if elapsed > float(
232 if elapsed > float(
229 self.ui.config('progress', 'estimate', default=2)):
233 self.ui.config('progress', 'estimate', default=2)):
230 return _('%d %s/sec') % (delta / elapsed, unit)
234 return _('%d %s/sec') % (delta / elapsed, unit)
231 return ''
235 return ''
232
236
233 def _oktoprint(self, now):
237 def _oktoprint(self, now):
234 '''Check if conditions are met to print - e.g. changedelay elapsed'''
238 '''Check if conditions are met to print - e.g. changedelay elapsed'''
235 if (self.lasttopic is None # first time we printed
239 if (self.lasttopic is None # first time we printed
236 # not a topic change
240 # not a topic change
237 or self.curtopic == self.lasttopic
241 or self.curtopic == self.lasttopic
238 # it's been long enough we should print anyway
242 # it's been long enough we should print anyway
239 or now - self.lastprint >= self.changedelay):
243 or now - self.lastprint >= self.changedelay):
240 return True
244 return True
241 else:
245 else:
242 return False
246 return False
243
247
244 def progress(self, topic, pos, item='', unit='', total=None):
248 def progress(self, topic, pos, item='', unit='', total=None):
245 now = time.time()
249 now = time.time()
246 self._refreshlock.acquire()
250 self._refreshlock.acquire()
247 try:
251 try:
248 if pos is None:
252 if pos is None:
249 self.starttimes.pop(topic, None)
253 self.starttimes.pop(topic, None)
250 self.startvals.pop(topic, None)
254 self.startvals.pop(topic, None)
251 self.topicstates.pop(topic, None)
255 self.topicstates.pop(topic, None)
252 # reset the progress bar if this is the outermost topic
256 # reset the progress bar if this is the outermost topic
253 if self.topics and self.topics[0] == topic and self.printed:
257 if self.topics and self.topics[0] == topic and self.printed:
254 self.complete()
258 self.complete()
255 self.resetstate()
259 self.resetstate()
256 # truncate the list of topics assuming all topics within
260 # truncate the list of topics assuming all topics within
257 # this one are also closed
261 # this one are also closed
258 if topic in self.topics:
262 if topic in self.topics:
259 self.topics = self.topics[:self.topics.index(topic)]
263 self.topics = self.topics[:self.topics.index(topic)]
260 # reset the last topic to the one we just unwound to,
264 # reset the last topic to the one we just unwound to,
261 # so that higher-level topics will be stickier than
265 # so that higher-level topics will be stickier than
262 # lower-level topics
266 # lower-level topics
263 if self.topics:
267 if self.topics:
264 self.lasttopic = self.topics[-1]
268 self.lasttopic = self.topics[-1]
265 else:
269 else:
266 self.lasttopic = None
270 self.lasttopic = None
267 else:
271 else:
268 if topic not in self.topics:
272 if topic not in self.topics:
269 self.starttimes[topic] = now
273 self.starttimes[topic] = now
270 self.startvals[topic] = pos
274 self.startvals[topic] = pos
271 self.topics.append(topic)
275 self.topics.append(topic)
272 self.topicstates[topic] = pos, item, unit, total
276 self.topicstates[topic] = pos, item, unit, total
273 self.curtopic = topic
277 self.curtopic = topic
274 if now - self.lastprint >= self.refresh and self.topics:
278 if now - self.lastprint >= self.refresh and self.topics:
275 if self._oktoprint(now):
279 if self._oktoprint(now):
276 self.lastprint = now
280 self.lastprint = now
277 self.show(now, topic, *self.topicstates[topic])
281 self.show(now, topic, *self.topicstates[topic])
278 finally:
282 finally:
279 self._refreshlock.release()
283 self._refreshlock.release()
280
284
281 _singleton = None
285 _singleton = None
282
286
283 def uisetup(ui):
287 def uisetup(ui):
284 global _singleton
288 global _singleton
285 class progressui(ui.__class__):
289 class progressui(ui.__class__):
286 _progbar = None
290 _progbar = None
287
291
288 def _quiet(self):
292 def _quiet(self):
289 return self.debugflag or self.quiet
293 return self.debugflag or self.quiet
290
294
291 def progress(self, *args, **opts):
295 def progress(self, *args, **opts):
292 if not self._quiet():
296 if not self._quiet():
293 self._progbar.progress(*args, **opts)
297 self._progbar.progress(*args, **opts)
294 return super(progressui, self).progress(*args, **opts)
298 return super(progressui, self).progress(*args, **opts)
295
299
296 def write(self, *args, **opts):
300 def write(self, *args, **opts):
297 if not self._quiet() and self._progbar.printed:
301 if not self._quiet() and self._progbar.printed:
298 self._progbar.clear()
302 self._progbar.clear()
299 return super(progressui, self).write(*args, **opts)
303 return super(progressui, self).write(*args, **opts)
300
304
301 def write_err(self, *args, **opts):
305 def write_err(self, *args, **opts):
302 if not self._quiet() and self._progbar.printed:
306 if not self._quiet() and self._progbar.printed:
303 self._progbar.clear()
307 self._progbar.clear()
304 return super(progressui, self).write_err(*args, **opts)
308 return super(progressui, self).write_err(*args, **opts)
305
309
306 # Apps that derive a class from ui.ui() can use
310 # Apps that derive a class from ui.ui() can use
307 # setconfig('progress', 'disable', 'True') to disable this extension
311 # setconfig('progress', 'disable', 'True') to disable this extension
308 if ui.configbool('progress', 'disable'):
312 if ui.configbool('progress', 'disable'):
309 return
313 return
310 if shouldprint(ui) and not ui.debugflag and not ui.quiet:
314 if shouldprint(ui) and not ui.debugflag and not ui.quiet:
311 ui.__class__ = progressui
315 ui.__class__ = progressui
312 # we instantiate one globally shared progress bar to avoid
316 # we instantiate one globally shared progress bar to avoid
313 # competing progress bars when multiple UI objects get created
317 # competing progress bars when multiple UI objects get created
314 if not progressui._progbar:
318 if not progressui._progbar:
315 if _singleton is None:
319 if _singleton is None:
316 _singleton = progbar(ui)
320 _singleton = progbar(ui)
317 progressui._progbar = _singleton
321 progressui._progbar = _singleton
318
322
319 def reposetup(ui, repo):
323 def reposetup(ui, repo):
320 uisetup(repo.ui)
324 uisetup(repo.ui)
@@ -1,115 +1,119 b''
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 #
2 #
3 # This is a small extension for Mercurial (http://mercurial.selenic.com/)
3 # This is a small extension for Mercurial (http://mercurial.selenic.com/)
4 # that removes files not known to mercurial
4 # that removes files not known to mercurial
5 #
5 #
6 # This program was inspired by the "cvspurge" script contained in CVS
6 # This program was inspired by the "cvspurge" script contained in CVS
7 # utilities (http://www.red-bean.com/cvsutils/).
7 # utilities (http://www.red-bean.com/cvsutils/).
8 #
8 #
9 # For help on the usage of "hg purge" use:
9 # For help on the usage of "hg purge" use:
10 # hg help purge
10 # hg help purge
11 #
11 #
12 # This program is free software; you can redistribute it and/or modify
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
15 # (at your option) any later version.
16 #
16 #
17 # This program is distributed in the hope that it will be useful,
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
20 # GNU General Public License for more details.
21 #
21 #
22 # You should have received a copy of the GNU General Public License
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, see <http://www.gnu.org/licenses/>.
23 # along with this program; if not, see <http://www.gnu.org/licenses/>.
24
24
25 '''command to delete untracked files from the working directory'''
25 '''command to delete untracked files from the working directory'''
26
26
27 from mercurial import util, commands, cmdutil, scmutil
27 from mercurial import util, commands, cmdutil, scmutil
28 from mercurial.i18n import _
28 from mercurial.i18n import _
29 import os
29 import os
30
30
31 cmdtable = {}
31 cmdtable = {}
32 command = cmdutil.command(cmdtable)
32 command = cmdutil.command(cmdtable)
33 # Note for extension authors: ONLY specify testedwith = 'internal' for
34 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
35 # be specifying the version(s) of Mercurial they are tested with, or
36 # leave the attribute unspecified.
33 testedwith = 'internal'
37 testedwith = 'internal'
34
38
35 @command('purge|clean',
39 @command('purge|clean',
36 [('a', 'abort-on-err', None, _('abort if an error occurs')),
40 [('a', 'abort-on-err', None, _('abort if an error occurs')),
37 ('', 'all', None, _('purge ignored files too')),
41 ('', 'all', None, _('purge ignored files too')),
38 ('', 'dirs', None, _('purge empty directories')),
42 ('', 'dirs', None, _('purge empty directories')),
39 ('', 'files', None, _('purge files')),
43 ('', 'files', None, _('purge files')),
40 ('p', 'print', None, _('print filenames instead of deleting them')),
44 ('p', 'print', None, _('print filenames instead of deleting them')),
41 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
45 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
42 ' (implies -p/--print)')),
46 ' (implies -p/--print)')),
43 ] + commands.walkopts,
47 ] + commands.walkopts,
44 _('hg purge [OPTION]... [DIR]...'))
48 _('hg purge [OPTION]... [DIR]...'))
45 def purge(ui, repo, *dirs, **opts):
49 def purge(ui, repo, *dirs, **opts):
46 '''removes files not tracked by Mercurial
50 '''removes files not tracked by Mercurial
47
51
48 Delete files not known to Mercurial. This is useful to test local
52 Delete files not known to Mercurial. This is useful to test local
49 and uncommitted changes in an otherwise-clean source tree.
53 and uncommitted changes in an otherwise-clean source tree.
50
54
51 This means that purge will delete the following by default:
55 This means that purge will delete the following by default:
52
56
53 - Unknown files: files marked with "?" by :hg:`status`
57 - Unknown files: files marked with "?" by :hg:`status`
54 - Empty directories: in fact Mercurial ignores directories unless
58 - Empty directories: in fact Mercurial ignores directories unless
55 they contain files under source control management
59 they contain files under source control management
56
60
57 But it will leave untouched:
61 But it will leave untouched:
58
62
59 - Modified and unmodified tracked files
63 - Modified and unmodified tracked files
60 - Ignored files (unless --all is specified)
64 - Ignored files (unless --all is specified)
61 - New files added to the repository (with :hg:`add`)
65 - New files added to the repository (with :hg:`add`)
62
66
63 The --files and --dirs options can be used to direct purge to delete
67 The --files and --dirs options can be used to direct purge to delete
64 only files, only directories, or both. If neither option is given,
68 only files, only directories, or both. If neither option is given,
65 both will be deleted.
69 both will be deleted.
66
70
67 If directories are given on the command line, only files in these
71 If directories are given on the command line, only files in these
68 directories are considered.
72 directories are considered.
69
73
70 Be careful with purge, as you could irreversibly delete some files
74 Be careful with purge, as you could irreversibly delete some files
71 you forgot to add to the repository. If you only want to print the
75 you forgot to add to the repository. If you only want to print the
72 list of files that this program would delete, use the --print
76 list of files that this program would delete, use the --print
73 option.
77 option.
74 '''
78 '''
75 act = not opts['print']
79 act = not opts['print']
76 eol = '\n'
80 eol = '\n'
77 if opts['print0']:
81 if opts['print0']:
78 eol = '\0'
82 eol = '\0'
79 act = False # --print0 implies --print
83 act = False # --print0 implies --print
80 removefiles = opts['files']
84 removefiles = opts['files']
81 removedirs = opts['dirs']
85 removedirs = opts['dirs']
82 if not removefiles and not removedirs:
86 if not removefiles and not removedirs:
83 removefiles = True
87 removefiles = True
84 removedirs = True
88 removedirs = True
85
89
86 def remove(remove_func, name):
90 def remove(remove_func, name):
87 if act:
91 if act:
88 try:
92 try:
89 remove_func(repo.wjoin(name))
93 remove_func(repo.wjoin(name))
90 except OSError:
94 except OSError:
91 m = _('%s cannot be removed') % name
95 m = _('%s cannot be removed') % name
92 if opts['abort_on_err']:
96 if opts['abort_on_err']:
93 raise util.Abort(m)
97 raise util.Abort(m)
94 ui.warn(_('warning: %s\n') % m)
98 ui.warn(_('warning: %s\n') % m)
95 else:
99 else:
96 ui.write('%s%s' % (name, eol))
100 ui.write('%s%s' % (name, eol))
97
101
98 match = scmutil.match(repo[None], dirs, opts)
102 match = scmutil.match(repo[None], dirs, opts)
99 if removedirs:
103 if removedirs:
100 directories = []
104 directories = []
101 match.explicitdir = match.traversedir = directories.append
105 match.explicitdir = match.traversedir = directories.append
102 status = repo.status(match=match, ignored=opts['all'], unknown=True)
106 status = repo.status(match=match, ignored=opts['all'], unknown=True)
103
107
104 if removefiles:
108 if removefiles:
105 for f in sorted(status.unknown + status.ignored):
109 for f in sorted(status.unknown + status.ignored):
106 if act:
110 if act:
107 ui.note(_('removing file %s\n') % f)
111 ui.note(_('removing file %s\n') % f)
108 remove(util.unlink, f)
112 remove(util.unlink, f)
109
113
110 if removedirs:
114 if removedirs:
111 for f in sorted(directories, reverse=True):
115 for f in sorted(directories, reverse=True):
112 if match(f) and not os.listdir(repo.wjoin(f)):
116 if match(f) and not os.listdir(repo.wjoin(f)):
113 if act:
117 if act:
114 ui.note(_('removing directory %s\n') % f)
118 ui.note(_('removing directory %s\n') % f)
115 remove(os.rmdir, f)
119 remove(os.rmdir, f)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now