##// END OF EJS Templates
util: extract all date-related utils in utils/dateutil module...
Boris Feld -
r36625:c6061cad default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -1,516 +1,516 b''
1 # synthrepo.py - repo synthesis
1 # synthrepo.py - repo synthesis
2 #
2 #
3 # Copyright 2012 Facebook
3 # Copyright 2012 Facebook
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''synthesize structurally interesting change history
8 '''synthesize structurally interesting change history
9
9
10 This extension is useful for creating a repository with properties
10 This extension is useful for creating a repository with properties
11 that are statistically similar to an existing repository. During
11 that are statistically similar to an existing repository. During
12 analysis, a simple probability table is constructed from the history
12 analysis, a simple probability table is constructed from the history
13 of an existing repository. During synthesis, these properties are
13 of an existing repository. During synthesis, these properties are
14 reconstructed.
14 reconstructed.
15
15
16 Properties that are analyzed and synthesized include the following:
16 Properties that are analyzed and synthesized include the following:
17
17
18 - Lines added or removed when an existing file is modified
18 - Lines added or removed when an existing file is modified
19 - Number and sizes of files added
19 - Number and sizes of files added
20 - Number of files removed
20 - Number of files removed
21 - Line lengths
21 - Line lengths
22 - Topological distance to parent changeset(s)
22 - Topological distance to parent changeset(s)
23 - Probability of a commit being a merge
23 - Probability of a commit being a merge
24 - Probability of a newly added file being added to a new directory
24 - Probability of a newly added file being added to a new directory
25 - Interarrival time, and time zone, of commits
25 - Interarrival time, and time zone, of commits
26 - Number of files in each directory
26 - Number of files in each directory
27
27
28 A few obvious properties that are not currently handled realistically:
28 A few obvious properties that are not currently handled realistically:
29
29
30 - Merges are treated as regular commits with two parents, which is not
30 - Merges are treated as regular commits with two parents, which is not
31 realistic
31 realistic
32 - Modifications are not treated as operations on hunks of lines, but
32 - Modifications are not treated as operations on hunks of lines, but
33 as insertions and deletions of randomly chosen single lines
33 as insertions and deletions of randomly chosen single lines
34 - Committer ID (always random)
34 - Committer ID (always random)
35 - Executability of files
35 - Executability of files
36 - Symlinks and binary files are ignored
36 - Symlinks and binary files are ignored
37 '''
37 '''
38
38
39 from __future__ import absolute_import
39 from __future__ import absolute_import
40 import bisect
40 import bisect
41 import collections
41 import collections
42 import itertools
42 import itertools
43 import json
43 import json
44 import os
44 import os
45 import random
45 import random
46 import sys
46 import sys
47 import time
47 import time
48
48
49 from mercurial.i18n import _
49 from mercurial.i18n import _
50 from mercurial.node import (
50 from mercurial.node import (
51 nullid,
51 nullid,
52 nullrev,
52 nullrev,
53 short,
53 short,
54 )
54 )
55 from mercurial import (
55 from mercurial import (
56 context,
56 context,
57 error,
57 error,
58 hg,
58 hg,
59 patch,
59 patch,
60 registrar,
60 registrar,
61 scmutil,
61 scmutil,
62 util,
63 )
62 )
63 from mercurial.utils import dateutil
64
64
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
67 # be specifying the version(s) of Mercurial they are tested with, or
67 # be specifying the version(s) of Mercurial they are tested with, or
68 # leave the attribute unspecified.
68 # leave the attribute unspecified.
69 testedwith = 'ships-with-hg-core'
69 testedwith = 'ships-with-hg-core'
70
70
71 cmdtable = {}
71 cmdtable = {}
72 command = registrar.command(cmdtable)
72 command = registrar.command(cmdtable)
73
73
74 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
74 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
75
75
76 def zerodict():
76 def zerodict():
77 return collections.defaultdict(lambda: 0)
77 return collections.defaultdict(lambda: 0)
78
78
79 def roundto(x, k):
79 def roundto(x, k):
80 if x > k * 2:
80 if x > k * 2:
81 return int(round(x / float(k)) * k)
81 return int(round(x / float(k)) * k)
82 return int(round(x))
82 return int(round(x))
83
83
84 def parsegitdiff(lines):
84 def parsegitdiff(lines):
85 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
85 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
86 binary = False
86 binary = False
87 for line in lines:
87 for line in lines:
88 start = line[:6]
88 start = line[:6]
89 if start == 'diff -':
89 if start == 'diff -':
90 if filename:
90 if filename:
91 yield filename, mar, lineadd, lineremove, binary
91 yield filename, mar, lineadd, lineremove, binary
92 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
92 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
93 filename = patch.gitre.match(line).group(1)
93 filename = patch.gitre.match(line).group(1)
94 elif start in newfile:
94 elif start in newfile:
95 mar = 'a'
95 mar = 'a'
96 elif start == 'GIT bi':
96 elif start == 'GIT bi':
97 binary = True
97 binary = True
98 elif start == 'delete':
98 elif start == 'delete':
99 mar = 'r'
99 mar = 'r'
100 elif start:
100 elif start:
101 s = start[0]
101 s = start[0]
102 if s == '-' and not line.startswith('--- '):
102 if s == '-' and not line.startswith('--- '):
103 lineremove += 1
103 lineremove += 1
104 elif s == '+' and not line.startswith('+++ '):
104 elif s == '+' and not line.startswith('+++ '):
105 lineadd[roundto(len(line) - 1, 5)] += 1
105 lineadd[roundto(len(line) - 1, 5)] += 1
106 if filename:
106 if filename:
107 yield filename, mar, lineadd, lineremove, binary
107 yield filename, mar, lineadd, lineremove, binary
108
108
109 @command('analyze',
109 @command('analyze',
110 [('o', 'output', '', _('write output to given file'), _('FILE')),
110 [('o', 'output', '', _('write output to given file'), _('FILE')),
111 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
111 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
112 _('hg analyze'), optionalrepo=True)
112 _('hg analyze'), optionalrepo=True)
113 def analyze(ui, repo, *revs, **opts):
113 def analyze(ui, repo, *revs, **opts):
114 '''create a simple model of a repository to use for later synthesis
114 '''create a simple model of a repository to use for later synthesis
115
115
116 This command examines every changeset in the given range (or all
116 This command examines every changeset in the given range (or all
117 of history if none are specified) and creates a simple statistical
117 of history if none are specified) and creates a simple statistical
118 model of the history of the repository. It also measures the directory
118 model of the history of the repository. It also measures the directory
119 structure of the repository as checked out.
119 structure of the repository as checked out.
120
120
121 The model is written out to a JSON file, and can be used by
121 The model is written out to a JSON file, and can be used by
122 :hg:`synthesize` to create or augment a repository with synthetic
122 :hg:`synthesize` to create or augment a repository with synthetic
123 commits that have a structure that is statistically similar to the
123 commits that have a structure that is statistically similar to the
124 analyzed repository.
124 analyzed repository.
125 '''
125 '''
126 root = repo.root
126 root = repo.root
127 if not root.endswith(os.path.sep):
127 if not root.endswith(os.path.sep):
128 root += os.path.sep
128 root += os.path.sep
129
129
130 revs = list(revs)
130 revs = list(revs)
131 revs.extend(opts['rev'])
131 revs.extend(opts['rev'])
132 if not revs:
132 if not revs:
133 revs = [':']
133 revs = [':']
134
134
135 output = opts['output']
135 output = opts['output']
136 if not output:
136 if not output:
137 output = os.path.basename(root) + '.json'
137 output = os.path.basename(root) + '.json'
138
138
139 if output == '-':
139 if output == '-':
140 fp = sys.stdout
140 fp = sys.stdout
141 else:
141 else:
142 fp = open(output, 'w')
142 fp = open(output, 'w')
143
143
144 # Always obtain file counts of each directory in the given root directory.
144 # Always obtain file counts of each directory in the given root directory.
145 def onerror(e):
145 def onerror(e):
146 ui.warn(_('error walking directory structure: %s\n') % e)
146 ui.warn(_('error walking directory structure: %s\n') % e)
147
147
148 dirs = {}
148 dirs = {}
149 rootprefixlen = len(root)
149 rootprefixlen = len(root)
150 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
150 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
151 dirpathfromroot = dirpath[rootprefixlen:]
151 dirpathfromroot = dirpath[rootprefixlen:]
152 dirs[dirpathfromroot] = len(filenames)
152 dirs[dirpathfromroot] = len(filenames)
153 if '.hg' in dirnames:
153 if '.hg' in dirnames:
154 dirnames.remove('.hg')
154 dirnames.remove('.hg')
155
155
156 lineschanged = zerodict()
156 lineschanged = zerodict()
157 children = zerodict()
157 children = zerodict()
158 p1distance = zerodict()
158 p1distance = zerodict()
159 p2distance = zerodict()
159 p2distance = zerodict()
160 linesinfilesadded = zerodict()
160 linesinfilesadded = zerodict()
161 fileschanged = zerodict()
161 fileschanged = zerodict()
162 filesadded = zerodict()
162 filesadded = zerodict()
163 filesremoved = zerodict()
163 filesremoved = zerodict()
164 linelengths = zerodict()
164 linelengths = zerodict()
165 interarrival = zerodict()
165 interarrival = zerodict()
166 parents = zerodict()
166 parents = zerodict()
167 dirsadded = zerodict()
167 dirsadded = zerodict()
168 tzoffset = zerodict()
168 tzoffset = zerodict()
169
169
170 # If a mercurial repo is available, also model the commit history.
170 # If a mercurial repo is available, also model the commit history.
171 if repo:
171 if repo:
172 revs = scmutil.revrange(repo, revs)
172 revs = scmutil.revrange(repo, revs)
173 revs.sort()
173 revs.sort()
174
174
175 progress = ui.progress
175 progress = ui.progress
176 _analyzing = _('analyzing')
176 _analyzing = _('analyzing')
177 _changesets = _('changesets')
177 _changesets = _('changesets')
178 _total = len(revs)
178 _total = len(revs)
179
179
180 for i, rev in enumerate(revs):
180 for i, rev in enumerate(revs):
181 progress(_analyzing, i, unit=_changesets, total=_total)
181 progress(_analyzing, i, unit=_changesets, total=_total)
182 ctx = repo[rev]
182 ctx = repo[rev]
183 pl = ctx.parents()
183 pl = ctx.parents()
184 pctx = pl[0]
184 pctx = pl[0]
185 prev = pctx.rev()
185 prev = pctx.rev()
186 children[prev] += 1
186 children[prev] += 1
187 p1distance[rev - prev] += 1
187 p1distance[rev - prev] += 1
188 parents[len(pl)] += 1
188 parents[len(pl)] += 1
189 tzoffset[ctx.date()[1]] += 1
189 tzoffset[ctx.date()[1]] += 1
190 if len(pl) > 1:
190 if len(pl) > 1:
191 p2distance[rev - pl[1].rev()] += 1
191 p2distance[rev - pl[1].rev()] += 1
192 if prev == rev - 1:
192 if prev == rev - 1:
193 lastctx = pctx
193 lastctx = pctx
194 else:
194 else:
195 lastctx = repo[rev - 1]
195 lastctx = repo[rev - 1]
196 if lastctx.rev() != nullrev:
196 if lastctx.rev() != nullrev:
197 timedelta = ctx.date()[0] - lastctx.date()[0]
197 timedelta = ctx.date()[0] - lastctx.date()[0]
198 interarrival[roundto(timedelta, 300)] += 1
198 interarrival[roundto(timedelta, 300)] += 1
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
202 if isbin:
202 if isbin:
203 continue
203 continue
204 added = sum(lineadd.itervalues(), 0)
204 added = sum(lineadd.itervalues(), 0)
205 if mar == 'm':
205 if mar == 'm':
206 if added and lineremove:
206 if added and lineremove:
207 lineschanged[roundto(added, 5),
207 lineschanged[roundto(added, 5),
208 roundto(lineremove, 5)] += 1
208 roundto(lineremove, 5)] += 1
209 filechanges += 1
209 filechanges += 1
210 elif mar == 'a':
210 elif mar == 'a':
211 fileadds += 1
211 fileadds += 1
212 if '/' in filename:
212 if '/' in filename:
213 filedir = filename.rsplit('/', 1)[0]
213 filedir = filename.rsplit('/', 1)[0]
214 if filedir not in pctx.dirs():
214 if filedir not in pctx.dirs():
215 diradds += 1
215 diradds += 1
216 linesinfilesadded[roundto(added, 5)] += 1
216 linesinfilesadded[roundto(added, 5)] += 1
217 elif mar == 'r':
217 elif mar == 'r':
218 fileremoves += 1
218 fileremoves += 1
219 for length, count in lineadd.iteritems():
219 for length, count in lineadd.iteritems():
220 linelengths[length] += count
220 linelengths[length] += count
221 fileschanged[filechanges] += 1
221 fileschanged[filechanges] += 1
222 filesadded[fileadds] += 1
222 filesadded[fileadds] += 1
223 dirsadded[diradds] += 1
223 dirsadded[diradds] += 1
224 filesremoved[fileremoves] += 1
224 filesremoved[fileremoves] += 1
225
225
226 invchildren = zerodict()
226 invchildren = zerodict()
227
227
228 for rev, count in children.iteritems():
228 for rev, count in children.iteritems():
229 invchildren[count] += 1
229 invchildren[count] += 1
230
230
231 if output != '-':
231 if output != '-':
232 ui.status(_('writing output to %s\n') % output)
232 ui.status(_('writing output to %s\n') % output)
233
233
234 def pronk(d):
234 def pronk(d):
235 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
235 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
236
236
237 json.dump({'revs': len(revs),
237 json.dump({'revs': len(revs),
238 'initdirs': pronk(dirs),
238 'initdirs': pronk(dirs),
239 'lineschanged': pronk(lineschanged),
239 'lineschanged': pronk(lineschanged),
240 'children': pronk(invchildren),
240 'children': pronk(invchildren),
241 'fileschanged': pronk(fileschanged),
241 'fileschanged': pronk(fileschanged),
242 'filesadded': pronk(filesadded),
242 'filesadded': pronk(filesadded),
243 'linesinfilesadded': pronk(linesinfilesadded),
243 'linesinfilesadded': pronk(linesinfilesadded),
244 'dirsadded': pronk(dirsadded),
244 'dirsadded': pronk(dirsadded),
245 'filesremoved': pronk(filesremoved),
245 'filesremoved': pronk(filesremoved),
246 'linelengths': pronk(linelengths),
246 'linelengths': pronk(linelengths),
247 'parents': pronk(parents),
247 'parents': pronk(parents),
248 'p1distance': pronk(p1distance),
248 'p1distance': pronk(p1distance),
249 'p2distance': pronk(p2distance),
249 'p2distance': pronk(p2distance),
250 'interarrival': pronk(interarrival),
250 'interarrival': pronk(interarrival),
251 'tzoffset': pronk(tzoffset),
251 'tzoffset': pronk(tzoffset),
252 },
252 },
253 fp)
253 fp)
254 fp.close()
254 fp.close()
255
255
256 @command('synthesize',
256 @command('synthesize',
257 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
257 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
258 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
258 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
259 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
259 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
260 _('hg synthesize [OPTION].. DESCFILE'))
260 _('hg synthesize [OPTION].. DESCFILE'))
261 def synthesize(ui, repo, descpath, **opts):
261 def synthesize(ui, repo, descpath, **opts):
262 '''synthesize commits based on a model of an existing repository
262 '''synthesize commits based on a model of an existing repository
263
263
264 The model must have been generated by :hg:`analyze`. Commits will
264 The model must have been generated by :hg:`analyze`. Commits will
265 be generated randomly according to the probabilities described in
265 be generated randomly according to the probabilities described in
266 the model. If --initfiles is set, the repository will be seeded with
266 the model. If --initfiles is set, the repository will be seeded with
267 the given number files following the modeled repository's directory
267 the given number files following the modeled repository's directory
268 structure.
268 structure.
269
269
270 When synthesizing new content, commit descriptions, and user
270 When synthesizing new content, commit descriptions, and user
271 names, words will be chosen randomly from a dictionary that is
271 names, words will be chosen randomly from a dictionary that is
272 presumed to contain one word per line. Use --dict to specify the
272 presumed to contain one word per line. Use --dict to specify the
273 path to an alternate dictionary to use.
273 path to an alternate dictionary to use.
274 '''
274 '''
275 try:
275 try:
276 fp = hg.openpath(ui, descpath)
276 fp = hg.openpath(ui, descpath)
277 except Exception as err:
277 except Exception as err:
278 raise error.Abort('%s: %s' % (descpath, err[0].strerror))
278 raise error.Abort('%s: %s' % (descpath, err[0].strerror))
279 desc = json.load(fp)
279 desc = json.load(fp)
280 fp.close()
280 fp.close()
281
281
282 def cdf(l):
282 def cdf(l):
283 if not l:
283 if not l:
284 return [], []
284 return [], []
285 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
285 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
286 t = float(sum(probs, 0))
286 t = float(sum(probs, 0))
287 s, cdfs = 0, []
287 s, cdfs = 0, []
288 for v in probs:
288 for v in probs:
289 s += v
289 s += v
290 cdfs.append(s / t)
290 cdfs.append(s / t)
291 return vals, cdfs
291 return vals, cdfs
292
292
293 lineschanged = cdf(desc['lineschanged'])
293 lineschanged = cdf(desc['lineschanged'])
294 fileschanged = cdf(desc['fileschanged'])
294 fileschanged = cdf(desc['fileschanged'])
295 filesadded = cdf(desc['filesadded'])
295 filesadded = cdf(desc['filesadded'])
296 dirsadded = cdf(desc['dirsadded'])
296 dirsadded = cdf(desc['dirsadded'])
297 filesremoved = cdf(desc['filesremoved'])
297 filesremoved = cdf(desc['filesremoved'])
298 linelengths = cdf(desc['linelengths'])
298 linelengths = cdf(desc['linelengths'])
299 parents = cdf(desc['parents'])
299 parents = cdf(desc['parents'])
300 p1distance = cdf(desc['p1distance'])
300 p1distance = cdf(desc['p1distance'])
301 p2distance = cdf(desc['p2distance'])
301 p2distance = cdf(desc['p2distance'])
302 interarrival = cdf(desc['interarrival'])
302 interarrival = cdf(desc['interarrival'])
303 linesinfilesadded = cdf(desc['linesinfilesadded'])
303 linesinfilesadded = cdf(desc['linesinfilesadded'])
304 tzoffset = cdf(desc['tzoffset'])
304 tzoffset = cdf(desc['tzoffset'])
305
305
306 dictfile = opts.get('dict') or '/usr/share/dict/words'
306 dictfile = opts.get('dict') or '/usr/share/dict/words'
307 try:
307 try:
308 fp = open(dictfile, 'rU')
308 fp = open(dictfile, 'rU')
309 except IOError as err:
309 except IOError as err:
310 raise error.Abort('%s: %s' % (dictfile, err.strerror))
310 raise error.Abort('%s: %s' % (dictfile, err.strerror))
311 words = fp.read().splitlines()
311 words = fp.read().splitlines()
312 fp.close()
312 fp.close()
313
313
314 initdirs = {}
314 initdirs = {}
315 if desc['initdirs']:
315 if desc['initdirs']:
316 for k, v in desc['initdirs']:
316 for k, v in desc['initdirs']:
317 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
317 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
318 initdirs = renamedirs(initdirs, words)
318 initdirs = renamedirs(initdirs, words)
319 initdirscdf = cdf(initdirs)
319 initdirscdf = cdf(initdirs)
320
320
321 def pick(cdf):
321 def pick(cdf):
322 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
322 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
323
323
324 def pickpath():
324 def pickpath():
325 return os.path.join(pick(initdirscdf), random.choice(words))
325 return os.path.join(pick(initdirscdf), random.choice(words))
326
326
327 def makeline(minimum=0):
327 def makeline(minimum=0):
328 total = max(minimum, pick(linelengths))
328 total = max(minimum, pick(linelengths))
329 c, l = 0, []
329 c, l = 0, []
330 while c < total:
330 while c < total:
331 w = random.choice(words)
331 w = random.choice(words)
332 c += len(w) + 1
332 c += len(w) + 1
333 l.append(w)
333 l.append(w)
334 return ' '.join(l)
334 return ' '.join(l)
335
335
336 wlock = repo.wlock()
336 wlock = repo.wlock()
337 lock = repo.lock()
337 lock = repo.lock()
338
338
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
340
340
341 progress = ui.progress
341 progress = ui.progress
342 _synthesizing = _('synthesizing')
342 _synthesizing = _('synthesizing')
343 _files = _('initial files')
343 _files = _('initial files')
344 _changesets = _('changesets')
344 _changesets = _('changesets')
345
345
346 # Synthesize a single initial revision adding files to the repo according
346 # Synthesize a single initial revision adding files to the repo according
347 # to the modeled directory structure.
347 # to the modeled directory structure.
348 initcount = int(opts['initfiles'])
348 initcount = int(opts['initfiles'])
349 if initcount and initdirs:
349 if initcount and initdirs:
350 pctx = repo[None].parents()[0]
350 pctx = repo[None].parents()[0]
351 dirs = set(pctx.dirs())
351 dirs = set(pctx.dirs())
352 files = {}
352 files = {}
353
353
354 def validpath(path):
354 def validpath(path):
355 # Don't pick filenames which are already directory names.
355 # Don't pick filenames which are already directory names.
356 if path in dirs:
356 if path in dirs:
357 return False
357 return False
358 # Don't pick directories which were used as file names.
358 # Don't pick directories which were used as file names.
359 while path:
359 while path:
360 if path in files:
360 if path in files:
361 return False
361 return False
362 path = os.path.dirname(path)
362 path = os.path.dirname(path)
363 return True
363 return True
364
364
365 for i in xrange(0, initcount):
365 for i in xrange(0, initcount):
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
367
367
368 path = pickpath()
368 path = pickpath()
369 while not validpath(path):
369 while not validpath(path):
370 path = pickpath()
370 path = pickpath()
371 data = '%s contents\n' % path
371 data = '%s contents\n' % path
372 files[path] = data
372 files[path] = data
373 dir = os.path.dirname(path)
373 dir = os.path.dirname(path)
374 while dir and dir not in dirs:
374 while dir and dir not in dirs:
375 dirs.add(dir)
375 dirs.add(dir)
376 dir = os.path.dirname(dir)
376 dir = os.path.dirname(dir)
377
377
378 def filectxfn(repo, memctx, path):
378 def filectxfn(repo, memctx, path):
379 return context.memfilectx(repo, memctx, path, files[path])
379 return context.memfilectx(repo, memctx, path, files[path])
380
380
381 ui.progress(_synthesizing, None)
381 ui.progress(_synthesizing, None)
382 message = 'synthesized wide repo with %d files' % (len(files),)
382 message = 'synthesized wide repo with %d files' % (len(files),)
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
384 files, filectxfn, ui.username(),
384 files, filectxfn, ui.username(),
385 '%d %d' % util.makedate())
385 '%d %d' % dateutil.makedate())
386 initnode = mc.commit()
386 initnode = mc.commit()
387 if ui.debugflag:
387 if ui.debugflag:
388 hexfn = hex
388 hexfn = hex
389 else:
389 else:
390 hexfn = short
390 hexfn = short
391 ui.status(_('added commit %s with %d files\n')
391 ui.status(_('added commit %s with %d files\n')
392 % (hexfn(initnode), len(files)))
392 % (hexfn(initnode), len(files)))
393
393
394 # Synthesize incremental revisions to the repository, adding repo depth.
394 # Synthesize incremental revisions to the repository, adding repo depth.
395 count = int(opts['count'])
395 count = int(opts['count'])
396 heads = set(map(repo.changelog.rev, repo.heads()))
396 heads = set(map(repo.changelog.rev, repo.heads()))
397 for i in xrange(count):
397 for i in xrange(count):
398 progress(_synthesizing, i, unit=_changesets, total=count)
398 progress(_synthesizing, i, unit=_changesets, total=count)
399
399
400 node = repo.changelog.node
400 node = repo.changelog.node
401 revs = len(repo)
401 revs = len(repo)
402
402
403 def pickhead(heads, distance):
403 def pickhead(heads, distance):
404 if heads:
404 if heads:
405 lheads = sorted(heads)
405 lheads = sorted(heads)
406 rev = revs - min(pick(distance), revs)
406 rev = revs - min(pick(distance), revs)
407 if rev < lheads[-1]:
407 if rev < lheads[-1]:
408 rev = lheads[bisect.bisect_left(lheads, rev)]
408 rev = lheads[bisect.bisect_left(lheads, rev)]
409 else:
409 else:
410 rev = lheads[-1]
410 rev = lheads[-1]
411 return rev, node(rev)
411 return rev, node(rev)
412 return nullrev, nullid
412 return nullrev, nullid
413
413
414 r1 = revs - min(pick(p1distance), revs)
414 r1 = revs - min(pick(p1distance), revs)
415 p1 = node(r1)
415 p1 = node(r1)
416
416
417 # the number of heads will grow without bound if we use a pure
417 # the number of heads will grow without bound if we use a pure
418 # model, so artificially constrain their proliferation
418 # model, so artificially constrain their proliferation
419 toomanyheads = len(heads) > random.randint(1, 20)
419 toomanyheads = len(heads) > random.randint(1, 20)
420 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
420 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
421 r2, p2 = pickhead(heads.difference([r1]), p2distance)
421 r2, p2 = pickhead(heads.difference([r1]), p2distance)
422 else:
422 else:
423 r2, p2 = nullrev, nullid
423 r2, p2 = nullrev, nullid
424
424
425 pl = [p1, p2]
425 pl = [p1, p2]
426 pctx = repo[r1]
426 pctx = repo[r1]
427 mf = pctx.manifest()
427 mf = pctx.manifest()
428 mfk = mf.keys()
428 mfk = mf.keys()
429 changes = {}
429 changes = {}
430 if mfk:
430 if mfk:
431 for __ in xrange(pick(fileschanged)):
431 for __ in xrange(pick(fileschanged)):
432 for __ in xrange(10):
432 for __ in xrange(10):
433 fctx = pctx.filectx(random.choice(mfk))
433 fctx = pctx.filectx(random.choice(mfk))
434 path = fctx.path()
434 path = fctx.path()
435 if not (path in nevertouch or fctx.isbinary() or
435 if not (path in nevertouch or fctx.isbinary() or
436 'l' in fctx.flags()):
436 'l' in fctx.flags()):
437 break
437 break
438 lines = fctx.data().splitlines()
438 lines = fctx.data().splitlines()
439 add, remove = pick(lineschanged)
439 add, remove = pick(lineschanged)
440 for __ in xrange(remove):
440 for __ in xrange(remove):
441 if not lines:
441 if not lines:
442 break
442 break
443 del lines[random.randrange(0, len(lines))]
443 del lines[random.randrange(0, len(lines))]
444 for __ in xrange(add):
444 for __ in xrange(add):
445 lines.insert(random.randint(0, len(lines)), makeline())
445 lines.insert(random.randint(0, len(lines)), makeline())
446 path = fctx.path()
446 path = fctx.path()
447 changes[path] = '\n'.join(lines) + '\n'
447 changes[path] = '\n'.join(lines) + '\n'
448 for __ in xrange(pick(filesremoved)):
448 for __ in xrange(pick(filesremoved)):
449 path = random.choice(mfk)
449 path = random.choice(mfk)
450 for __ in xrange(10):
450 for __ in xrange(10):
451 path = random.choice(mfk)
451 path = random.choice(mfk)
452 if path not in changes:
452 if path not in changes:
453 break
453 break
454 if filesadded:
454 if filesadded:
455 dirs = list(pctx.dirs())
455 dirs = list(pctx.dirs())
456 dirs.insert(0, '')
456 dirs.insert(0, '')
457 for __ in xrange(pick(filesadded)):
457 for __ in xrange(pick(filesadded)):
458 pathstr = ''
458 pathstr = ''
459 while pathstr in dirs:
459 while pathstr in dirs:
460 path = [random.choice(dirs)]
460 path = [random.choice(dirs)]
461 if pick(dirsadded):
461 if pick(dirsadded):
462 path.append(random.choice(words))
462 path.append(random.choice(words))
463 path.append(random.choice(words))
463 path.append(random.choice(words))
464 pathstr = '/'.join(filter(None, path))
464 pathstr = '/'.join(filter(None, path))
465 data = '\n'.join(makeline()
465 data = '\n'.join(makeline()
466 for __ in xrange(pick(linesinfilesadded))) + '\n'
466 for __ in xrange(pick(linesinfilesadded))) + '\n'
467 changes[pathstr] = data
467 changes[pathstr] = data
468 def filectxfn(repo, memctx, path):
468 def filectxfn(repo, memctx, path):
469 if path not in changes:
469 if path not in changes:
470 return None
470 return None
471 return context.memfilectx(repo, memctx, path, changes[path])
471 return context.memfilectx(repo, memctx, path, changes[path])
472 if not changes:
472 if not changes:
473 continue
473 continue
474 if revs:
474 if revs:
475 date = repo['tip'].date()[0] + pick(interarrival)
475 date = repo['tip'].date()[0] + pick(interarrival)
476 else:
476 else:
477 date = time.time() - (86400 * count)
477 date = time.time() - (86400 * count)
478 # dates in mercurial must be positive, fit in 32-bit signed integers.
478 # dates in mercurial must be positive, fit in 32-bit signed integers.
479 date = min(0x7fffffff, max(0, date))
479 date = min(0x7fffffff, max(0, date))
480 user = random.choice(words) + '@' + random.choice(words)
480 user = random.choice(words) + '@' + random.choice(words)
481 mc = context.memctx(repo, pl, makeline(minimum=2),
481 mc = context.memctx(repo, pl, makeline(minimum=2),
482 sorted(changes),
482 sorted(changes),
483 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
483 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
484 newnode = mc.commit()
484 newnode = mc.commit()
485 heads.add(repo.changelog.rev(newnode))
485 heads.add(repo.changelog.rev(newnode))
486 heads.discard(r1)
486 heads.discard(r1)
487 heads.discard(r2)
487 heads.discard(r2)
488
488
489 lock.release()
489 lock.release()
490 wlock.release()
490 wlock.release()
491
491
492 def renamedirs(dirs, words):
492 def renamedirs(dirs, words):
493 '''Randomly rename the directory names in the per-dir file count dict.'''
493 '''Randomly rename the directory names in the per-dir file count dict.'''
494 wordgen = itertools.cycle(words)
494 wordgen = itertools.cycle(words)
495 replacements = {'': ''}
495 replacements = {'': ''}
496 def rename(dirpath):
496 def rename(dirpath):
497 '''Recursively rename the directory and all path prefixes.
497 '''Recursively rename the directory and all path prefixes.
498
498
499 The mapping from path to renamed path is stored for all path prefixes
499 The mapping from path to renamed path is stored for all path prefixes
500 as in dynamic programming, ensuring linear runtime and consistent
500 as in dynamic programming, ensuring linear runtime and consistent
501 renaming regardless of iteration order through the model.
501 renaming regardless of iteration order through the model.
502 '''
502 '''
503 if dirpath in replacements:
503 if dirpath in replacements:
504 return replacements[dirpath]
504 return replacements[dirpath]
505 head, _ = os.path.split(dirpath)
505 head, _ = os.path.split(dirpath)
506 if head:
506 if head:
507 head = rename(head)
507 head = rename(head)
508 else:
508 else:
509 head = ''
509 head = ''
510 renamed = os.path.join(head, next(wordgen))
510 renamed = os.path.join(head, next(wordgen))
511 replacements[dirpath] = renamed
511 replacements[dirpath] = renamed
512 return renamed
512 return renamed
513 result = []
513 result = []
514 for dirpath, count in dirs.iteritems():
514 for dirpath, count in dirs.iteritems():
515 result.append([rename(dirpath.lstrip(os.sep)), count])
515 result.append([rename(dirpath.lstrip(os.sep)), count])
516 return result
516 return result
@@ -1,250 +1,251 b''
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track config key.
12 The events that get logged can be configured via the blackbox.track config key.
13
13
14 Examples::
14 Examples::
15
15
16 [blackbox]
16 [blackbox]
17 track = *
17 track = *
18 # dirty is *EXPENSIVE* (slow);
18 # dirty is *EXPENSIVE* (slow);
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
20 dirty = True
20 dirty = True
21 # record the source of log messages
21 # record the source of log messages
22 logsource = True
22 logsource = True
23
23
24 [blackbox]
24 [blackbox]
25 track = command, commandfinish, commandexception, exthook, pythonhook
25 track = command, commandfinish, commandexception, exthook, pythonhook
26
26
27 [blackbox]
27 [blackbox]
28 track = incoming
28 track = incoming
29
29
30 [blackbox]
30 [blackbox]
31 # limit the size of a log file
31 # limit the size of a log file
32 maxsize = 1.5 MB
32 maxsize = 1.5 MB
33 # rotate up to N log files when the current one gets too big
33 # rotate up to N log files when the current one gets too big
34 maxfiles = 3
34 maxfiles = 3
35
35
36 """
36 """
37
37
38 from __future__ import absolute_import
38 from __future__ import absolute_import
39
39
40 import errno
40 import errno
41 import re
41 import re
42
42
43 from mercurial.i18n import _
43 from mercurial.i18n import _
44 from mercurial.node import hex
44 from mercurial.node import hex
45
45
46 from mercurial import (
46 from mercurial import (
47 encoding,
47 encoding,
48 registrar,
48 registrar,
49 ui as uimod,
49 ui as uimod,
50 util,
50 util,
51 )
51 )
52 from mercurial.utils import dateutil
52
53
53 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
54 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
54 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
55 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
55 # be specifying the version(s) of Mercurial they are tested with, or
56 # be specifying the version(s) of Mercurial they are tested with, or
56 # leave the attribute unspecified.
57 # leave the attribute unspecified.
57 testedwith = 'ships-with-hg-core'
58 testedwith = 'ships-with-hg-core'
58
59
59 cmdtable = {}
60 cmdtable = {}
60 command = registrar.command(cmdtable)
61 command = registrar.command(cmdtable)
61
62
62 configtable = {}
63 configtable = {}
63 configitem = registrar.configitem(configtable)
64 configitem = registrar.configitem(configtable)
64
65
65 configitem('blackbox', 'dirty',
66 configitem('blackbox', 'dirty',
66 default=False,
67 default=False,
67 )
68 )
68 configitem('blackbox', 'maxsize',
69 configitem('blackbox', 'maxsize',
69 default='1 MB',
70 default='1 MB',
70 )
71 )
71 configitem('blackbox', 'logsource',
72 configitem('blackbox', 'logsource',
72 default=False,
73 default=False,
73 )
74 )
74 configitem('blackbox', 'maxfiles',
75 configitem('blackbox', 'maxfiles',
75 default=7,
76 default=7,
76 )
77 )
77 configitem('blackbox', 'track',
78 configitem('blackbox', 'track',
78 default=lambda: ['*'],
79 default=lambda: ['*'],
79 )
80 )
80
81
81 lastui = None
82 lastui = None
82
83
83 def _openlogfile(ui, vfs):
84 def _openlogfile(ui, vfs):
84 def rotate(oldpath, newpath):
85 def rotate(oldpath, newpath):
85 try:
86 try:
86 vfs.unlink(newpath)
87 vfs.unlink(newpath)
87 except OSError as err:
88 except OSError as err:
88 if err.errno != errno.ENOENT:
89 if err.errno != errno.ENOENT:
89 ui.debug("warning: cannot remove '%s': %s\n" %
90 ui.debug("warning: cannot remove '%s': %s\n" %
90 (newpath, err.strerror))
91 (newpath, err.strerror))
91 try:
92 try:
92 if newpath:
93 if newpath:
93 vfs.rename(oldpath, newpath)
94 vfs.rename(oldpath, newpath)
94 except OSError as err:
95 except OSError as err:
95 if err.errno != errno.ENOENT:
96 if err.errno != errno.ENOENT:
96 ui.debug("warning: cannot rename '%s' to '%s': %s\n" %
97 ui.debug("warning: cannot rename '%s' to '%s': %s\n" %
97 (newpath, oldpath, err.strerror))
98 (newpath, oldpath, err.strerror))
98
99
99 maxsize = ui.configbytes('blackbox', 'maxsize')
100 maxsize = ui.configbytes('blackbox', 'maxsize')
100 name = 'blackbox.log'
101 name = 'blackbox.log'
101 if maxsize > 0:
102 if maxsize > 0:
102 try:
103 try:
103 st = vfs.stat(name)
104 st = vfs.stat(name)
104 except OSError:
105 except OSError:
105 pass
106 pass
106 else:
107 else:
107 if st.st_size >= maxsize:
108 if st.st_size >= maxsize:
108 path = vfs.join(name)
109 path = vfs.join(name)
109 maxfiles = ui.configint('blackbox', 'maxfiles')
110 maxfiles = ui.configint('blackbox', 'maxfiles')
110 for i in xrange(maxfiles - 1, 1, -1):
111 for i in xrange(maxfiles - 1, 1, -1):
111 rotate(oldpath='%s.%d' % (path, i - 1),
112 rotate(oldpath='%s.%d' % (path, i - 1),
112 newpath='%s.%d' % (path, i))
113 newpath='%s.%d' % (path, i))
113 rotate(oldpath=path,
114 rotate(oldpath=path,
114 newpath=maxfiles > 0 and path + '.1')
115 newpath=maxfiles > 0 and path + '.1')
115 return vfs(name, 'a')
116 return vfs(name, 'a')
116
117
117 def wrapui(ui):
118 def wrapui(ui):
118 class blackboxui(ui.__class__):
119 class blackboxui(ui.__class__):
119 @property
120 @property
120 def _bbvfs(self):
121 def _bbvfs(self):
121 vfs = None
122 vfs = None
122 repo = getattr(self, '_bbrepo', None)
123 repo = getattr(self, '_bbrepo', None)
123 if repo:
124 if repo:
124 vfs = repo.vfs
125 vfs = repo.vfs
125 if not vfs.isdir('.'):
126 if not vfs.isdir('.'):
126 vfs = None
127 vfs = None
127 return vfs
128 return vfs
128
129
129 @util.propertycache
130 @util.propertycache
130 def track(self):
131 def track(self):
131 return self.configlist('blackbox', 'track')
132 return self.configlist('blackbox', 'track')
132
133
133 def debug(self, *msg, **opts):
134 def debug(self, *msg, **opts):
134 super(blackboxui, self).debug(*msg, **opts)
135 super(blackboxui, self).debug(*msg, **opts)
135 if self.debugflag:
136 if self.debugflag:
136 self.log('debug', '%s', ''.join(msg))
137 self.log('debug', '%s', ''.join(msg))
137
138
138 def log(self, event, *msg, **opts):
139 def log(self, event, *msg, **opts):
139 global lastui
140 global lastui
140 super(blackboxui, self).log(event, *msg, **opts)
141 super(blackboxui, self).log(event, *msg, **opts)
141
142
142 if not '*' in self.track and not event in self.track:
143 if not '*' in self.track and not event in self.track:
143 return
144 return
144
145
145 if self._bbvfs:
146 if self._bbvfs:
146 ui = self
147 ui = self
147 else:
148 else:
148 # certain ui instances exist outside the context of
149 # certain ui instances exist outside the context of
149 # a repo, so just default to the last blackbox that
150 # a repo, so just default to the last blackbox that
150 # was seen.
151 # was seen.
151 ui = lastui
152 ui = lastui
152
153
153 if not ui:
154 if not ui:
154 return
155 return
155 vfs = ui._bbvfs
156 vfs = ui._bbvfs
156 if not vfs:
157 if not vfs:
157 return
158 return
158
159
159 repo = getattr(ui, '_bbrepo', None)
160 repo = getattr(ui, '_bbrepo', None)
160 if not lastui or repo:
161 if not lastui or repo:
161 lastui = ui
162 lastui = ui
162 if getattr(ui, '_bbinlog', False):
163 if getattr(ui, '_bbinlog', False):
163 # recursion and failure guard
164 # recursion and failure guard
164 return
165 return
165 ui._bbinlog = True
166 ui._bbinlog = True
166 default = self.configdate('devel', 'default-date')
167 default = self.configdate('devel', 'default-date')
167 date = util.datestr(default, '%Y/%m/%d %H:%M:%S')
168 date = dateutil.datestr(default, '%Y/%m/%d %H:%M:%S')
168 user = util.getuser()
169 user = util.getuser()
169 pid = '%d' % util.getpid()
170 pid = '%d' % util.getpid()
170 formattedmsg = msg[0] % msg[1:]
171 formattedmsg = msg[0] % msg[1:]
171 rev = '(unknown)'
172 rev = '(unknown)'
172 changed = ''
173 changed = ''
173 if repo:
174 if repo:
174 ctx = repo[None]
175 ctx = repo[None]
175 parents = ctx.parents()
176 parents = ctx.parents()
176 rev = ('+'.join([hex(p.node()) for p in parents]))
177 rev = ('+'.join([hex(p.node()) for p in parents]))
177 if (ui.configbool('blackbox', 'dirty') and
178 if (ui.configbool('blackbox', 'dirty') and
178 ctx.dirty(missing=True, merge=False, branch=False)):
179 ctx.dirty(missing=True, merge=False, branch=False)):
179 changed = '+'
180 changed = '+'
180 if ui.configbool('blackbox', 'logsource'):
181 if ui.configbool('blackbox', 'logsource'):
181 src = ' [%s]' % event
182 src = ' [%s]' % event
182 else:
183 else:
183 src = ''
184 src = ''
184 try:
185 try:
185 fmt = '%s %s @%s%s (%s)%s> %s'
186 fmt = '%s %s @%s%s (%s)%s> %s'
186 args = (date, user, rev, changed, pid, src, formattedmsg)
187 args = (date, user, rev, changed, pid, src, formattedmsg)
187 with _openlogfile(ui, vfs) as fp:
188 with _openlogfile(ui, vfs) as fp:
188 fp.write(fmt % args)
189 fp.write(fmt % args)
189 except (IOError, OSError) as err:
190 except (IOError, OSError) as err:
190 self.debug('warning: cannot write to blackbox.log: %s\n' %
191 self.debug('warning: cannot write to blackbox.log: %s\n' %
191 encoding.strtolocal(err.strerror))
192 encoding.strtolocal(err.strerror))
192 # do not restore _bbinlog intentionally to avoid failed
193 # do not restore _bbinlog intentionally to avoid failed
193 # logging again
194 # logging again
194 else:
195 else:
195 ui._bbinlog = False
196 ui._bbinlog = False
196
197
197 def setrepo(self, repo):
198 def setrepo(self, repo):
198 self._bbrepo = repo
199 self._bbrepo = repo
199
200
200 ui.__class__ = blackboxui
201 ui.__class__ = blackboxui
201 uimod.ui = blackboxui
202 uimod.ui = blackboxui
202
203
203 def uisetup(ui):
204 def uisetup(ui):
204 wrapui(ui)
205 wrapui(ui)
205
206
206 def reposetup(ui, repo):
207 def reposetup(ui, repo):
207 # During 'hg pull' a httppeer repo is created to represent the remote repo.
208 # During 'hg pull' a httppeer repo is created to represent the remote repo.
208 # It doesn't have a .hg directory to put a blackbox in, so we don't do
209 # It doesn't have a .hg directory to put a blackbox in, so we don't do
209 # the blackbox setup for it.
210 # the blackbox setup for it.
210 if not repo.local():
211 if not repo.local():
211 return
212 return
212
213
213 if util.safehasattr(ui, 'setrepo'):
214 if util.safehasattr(ui, 'setrepo'):
214 ui.setrepo(repo)
215 ui.setrepo(repo)
215
216
216 # Set lastui even if ui.log is not called. This gives blackbox a
217 # Set lastui even if ui.log is not called. This gives blackbox a
217 # fallback place to log.
218 # fallback place to log.
218 global lastui
219 global lastui
219 if lastui is None:
220 if lastui is None:
220 lastui = ui
221 lastui = ui
221
222
222 repo._wlockfreeprefix.add('blackbox.log')
223 repo._wlockfreeprefix.add('blackbox.log')
223
224
224 @command('^blackbox',
225 @command('^blackbox',
225 [('l', 'limit', 10, _('the number of events to show')),
226 [('l', 'limit', 10, _('the number of events to show')),
226 ],
227 ],
227 _('hg blackbox [OPTION]...'))
228 _('hg blackbox [OPTION]...'))
228 def blackbox(ui, repo, *revs, **opts):
229 def blackbox(ui, repo, *revs, **opts):
229 '''view the recent repository events
230 '''view the recent repository events
230 '''
231 '''
231
232
232 if not repo.vfs.exists('blackbox.log'):
233 if not repo.vfs.exists('blackbox.log'):
233 return
234 return
234
235
235 limit = opts.get(r'limit')
236 limit = opts.get(r'limit')
236 fp = repo.vfs('blackbox.log', 'r')
237 fp = repo.vfs('blackbox.log', 'r')
237 lines = fp.read().split('\n')
238 lines = fp.read().split('\n')
238
239
239 count = 0
240 count = 0
240 output = []
241 output = []
241 for line in reversed(lines):
242 for line in reversed(lines):
242 if count >= limit:
243 if count >= limit:
243 break
244 break
244
245
245 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
246 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
246 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
247 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
247 count += 1
248 count += 1
248 output.append(line)
249 output.append(line)
249
250
250 ui.status('\n'.join(reversed(output)))
251 ui.status('\n'.join(reversed(output)))
@@ -1,211 +1,211 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 import datetime
13 import datetime
14 import os
14 import os
15 import time
15 import time
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 cmdutil,
19 cmdutil,
20 encoding,
20 encoding,
21 logcmdutil,
21 logcmdutil,
22 patch,
22 patch,
23 pycompat,
23 pycompat,
24 registrar,
24 registrar,
25 scmutil,
25 scmutil,
26 util,
27 )
26 )
27 from mercurial.utils import dateutil
28
28
29 cmdtable = {}
29 cmdtable = {}
30 command = registrar.command(cmdtable)
30 command = registrar.command(cmdtable)
31 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
32 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
33 # be specifying the version(s) of Mercurial they are tested with, or
33 # be specifying the version(s) of Mercurial they are tested with, or
34 # leave the attribute unspecified.
34 # leave the attribute unspecified.
35 testedwith = 'ships-with-hg-core'
35 testedwith = 'ships-with-hg-core'
36
36
37 def changedlines(ui, repo, ctx1, ctx2, fns):
37 def changedlines(ui, repo, ctx1, ctx2, fns):
38 added, removed = 0, 0
38 added, removed = 0, 0
39 fmatch = scmutil.matchfiles(repo, fns)
39 fmatch = scmutil.matchfiles(repo, fns)
40 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
40 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
41 for l in diff.split('\n'):
41 for l in diff.split('\n'):
42 if l.startswith("+") and not l.startswith("+++ "):
42 if l.startswith("+") and not l.startswith("+++ "):
43 added += 1
43 added += 1
44 elif l.startswith("-") and not l.startswith("--- "):
44 elif l.startswith("-") and not l.startswith("--- "):
45 removed += 1
45 removed += 1
46 return (added, removed)
46 return (added, removed)
47
47
48 def countrate(ui, repo, amap, *pats, **opts):
48 def countrate(ui, repo, amap, *pats, **opts):
49 """Calculate stats"""
49 """Calculate stats"""
50 opts = pycompat.byteskwargs(opts)
50 opts = pycompat.byteskwargs(opts)
51 if opts.get('dateformat'):
51 if opts.get('dateformat'):
52 def getkey(ctx):
52 def getkey(ctx):
53 t, tz = ctx.date()
53 t, tz = ctx.date()
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
55 return date.strftime(opts['dateformat'])
55 return date.strftime(opts['dateformat'])
56 else:
56 else:
57 tmpl = opts.get('oldtemplate') or opts.get('template')
57 tmpl = opts.get('oldtemplate') or opts.get('template')
58 tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
58 tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
59 def getkey(ctx):
59 def getkey(ctx):
60 ui.pushbuffer()
60 ui.pushbuffer()
61 tmpl.show(ctx)
61 tmpl.show(ctx)
62 return ui.popbuffer()
62 return ui.popbuffer()
63
63
64 state = {'count': 0}
64 state = {'count': 0}
65 rate = {}
65 rate = {}
66 df = False
66 df = False
67 if opts.get('date'):
67 if opts.get('date'):
68 df = util.matchdate(opts['date'])
68 df = dateutil.matchdate(opts['date'])
69
69
70 m = scmutil.match(repo[None], pats, opts)
70 m = scmutil.match(repo[None], pats, opts)
71 def prep(ctx, fns):
71 def prep(ctx, fns):
72 rev = ctx.rev()
72 rev = ctx.rev()
73 if df and not df(ctx.date()[0]): # doesn't match date format
73 if df and not df(ctx.date()[0]): # doesn't match date format
74 return
74 return
75
75
76 key = getkey(ctx).strip()
76 key = getkey(ctx).strip()
77 key = amap.get(key, key) # alias remap
77 key = amap.get(key, key) # alias remap
78 if opts.get('changesets'):
78 if opts.get('changesets'):
79 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
79 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
80 else:
80 else:
81 parents = ctx.parents()
81 parents = ctx.parents()
82 if len(parents) > 1:
82 if len(parents) > 1:
83 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
83 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
84 return
84 return
85
85
86 ctx1 = parents[0]
86 ctx1 = parents[0]
87 lines = changedlines(ui, repo, ctx1, ctx, fns)
87 lines = changedlines(ui, repo, ctx1, ctx, fns)
88 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
88 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
89
89
90 state['count'] += 1
90 state['count'] += 1
91 ui.progress(_('analyzing'), state['count'], total=len(repo),
91 ui.progress(_('analyzing'), state['count'], total=len(repo),
92 unit=_('revisions'))
92 unit=_('revisions'))
93
93
94 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
94 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
95 continue
95 continue
96
96
97 ui.progress(_('analyzing'), None)
97 ui.progress(_('analyzing'), None)
98
98
99 return rate
99 return rate
100
100
101
101
102 @command('churn',
102 @command('churn',
103 [('r', 'rev', [],
103 [('r', 'rev', [],
104 _('count rate for the specified revision or revset'), _('REV')),
104 _('count rate for the specified revision or revset'), _('REV')),
105 ('d', 'date', '',
105 ('d', 'date', '',
106 _('count rate for revisions matching date spec'), _('DATE')),
106 _('count rate for revisions matching date spec'), _('DATE')),
107 ('t', 'oldtemplate', '',
107 ('t', 'oldtemplate', '',
108 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
108 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
109 ('T', 'template', '{author|email}',
109 ('T', 'template', '{author|email}',
110 _('template to group changesets'), _('TEMPLATE')),
110 _('template to group changesets'), _('TEMPLATE')),
111 ('f', 'dateformat', '',
111 ('f', 'dateformat', '',
112 _('strftime-compatible format for grouping by date'), _('FORMAT')),
112 _('strftime-compatible format for grouping by date'), _('FORMAT')),
113 ('c', 'changesets', False, _('count rate by number of changesets')),
113 ('c', 'changesets', False, _('count rate by number of changesets')),
114 ('s', 'sort', False, _('sort by key (default: sort by count)')),
114 ('s', 'sort', False, _('sort by key (default: sort by count)')),
115 ('', 'diffstat', False, _('display added/removed lines separately')),
115 ('', 'diffstat', False, _('display added/removed lines separately')),
116 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
116 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
117 ] + cmdutil.walkopts,
117 ] + cmdutil.walkopts,
118 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
118 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
119 inferrepo=True)
119 inferrepo=True)
120 def churn(ui, repo, *pats, **opts):
120 def churn(ui, repo, *pats, **opts):
121 '''histogram of changes to the repository
121 '''histogram of changes to the repository
122
122
123 This command will display a histogram representing the number
123 This command will display a histogram representing the number
124 of changed lines or revisions, grouped according to the given
124 of changed lines or revisions, grouped according to the given
125 template. The default template will group changes by author.
125 template. The default template will group changes by author.
126 The --dateformat option may be used to group the results by
126 The --dateformat option may be used to group the results by
127 date instead.
127 date instead.
128
128
129 Statistics are based on the number of changed lines, or
129 Statistics are based on the number of changed lines, or
130 alternatively the number of matching revisions if the
130 alternatively the number of matching revisions if the
131 --changesets option is specified.
131 --changesets option is specified.
132
132
133 Examples::
133 Examples::
134
134
135 # display count of changed lines for every committer
135 # display count of changed lines for every committer
136 hg churn -T "{author|email}"
136 hg churn -T "{author|email}"
137
137
138 # display daily activity graph
138 # display daily activity graph
139 hg churn -f "%H" -s -c
139 hg churn -f "%H" -s -c
140
140
141 # display activity of developers by month
141 # display activity of developers by month
142 hg churn -f "%Y-%m" -s -c
142 hg churn -f "%Y-%m" -s -c
143
143
144 # display count of lines changed in every year
144 # display count of lines changed in every year
145 hg churn -f "%Y" -s
145 hg churn -f "%Y" -s
146
146
147 It is possible to map alternate email addresses to a main address
147 It is possible to map alternate email addresses to a main address
148 by providing a file using the following format::
148 by providing a file using the following format::
149
149
150 <alias email> = <actual email>
150 <alias email> = <actual email>
151
151
152 Such a file may be specified with the --aliases option, otherwise
152 Such a file may be specified with the --aliases option, otherwise
153 a .hgchurn file will be looked for in the working directory root.
153 a .hgchurn file will be looked for in the working directory root.
154 Aliases will be split from the rightmost "=".
154 Aliases will be split from the rightmost "=".
155 '''
155 '''
156 def pad(s, l):
156 def pad(s, l):
157 return s + " " * (l - encoding.colwidth(s))
157 return s + " " * (l - encoding.colwidth(s))
158
158
159 amap = {}
159 amap = {}
160 aliases = opts.get(r'aliases')
160 aliases = opts.get(r'aliases')
161 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
161 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
162 aliases = repo.wjoin('.hgchurn')
162 aliases = repo.wjoin('.hgchurn')
163 if aliases:
163 if aliases:
164 for l in open(aliases, "r"):
164 for l in open(aliases, "r"):
165 try:
165 try:
166 alias, actual = l.rsplit('=' in l and '=' or None, 1)
166 alias, actual = l.rsplit('=' in l and '=' or None, 1)
167 amap[alias.strip()] = actual.strip()
167 amap[alias.strip()] = actual.strip()
168 except ValueError:
168 except ValueError:
169 l = l.strip()
169 l = l.strip()
170 if l:
170 if l:
171 ui.warn(_("skipping malformed alias: %s\n") % l)
171 ui.warn(_("skipping malformed alias: %s\n") % l)
172 continue
172 continue
173
173
174 rate = list(countrate(ui, repo, amap, *pats, **opts).items())
174 rate = list(countrate(ui, repo, amap, *pats, **opts).items())
175 if not rate:
175 if not rate:
176 return
176 return
177
177
178 if opts.get(r'sort'):
178 if opts.get(r'sort'):
179 rate.sort()
179 rate.sort()
180 else:
180 else:
181 rate.sort(key=lambda x: (-sum(x[1]), x))
181 rate.sort(key=lambda x: (-sum(x[1]), x))
182
182
183 # Be careful not to have a zero maxcount (issue833)
183 # Be careful not to have a zero maxcount (issue833)
184 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
184 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
185 maxname = max(len(k) for k, v in rate)
185 maxname = max(len(k) for k, v in rate)
186
186
187 ttywidth = ui.termwidth()
187 ttywidth = ui.termwidth()
188 ui.debug("assuming %i character terminal\n" % ttywidth)
188 ui.debug("assuming %i character terminal\n" % ttywidth)
189 width = ttywidth - maxname - 2 - 2 - 2
189 width = ttywidth - maxname - 2 - 2 - 2
190
190
191 if opts.get(r'diffstat'):
191 if opts.get(r'diffstat'):
192 width -= 15
192 width -= 15
193 def format(name, diffstat):
193 def format(name, diffstat):
194 added, removed = diffstat
194 added, removed = diffstat
195 return "%s %15s %s%s\n" % (pad(name, maxname),
195 return "%s %15s %s%s\n" % (pad(name, maxname),
196 '+%d/-%d' % (added, removed),
196 '+%d/-%d' % (added, removed),
197 ui.label('+' * charnum(added),
197 ui.label('+' * charnum(added),
198 'diffstat.inserted'),
198 'diffstat.inserted'),
199 ui.label('-' * charnum(removed),
199 ui.label('-' * charnum(removed),
200 'diffstat.deleted'))
200 'diffstat.deleted'))
201 else:
201 else:
202 width -= 6
202 width -= 6
203 def format(name, count):
203 def format(name, count):
204 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
204 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
205 '*' * charnum(sum(count)))
205 '*' * charnum(sum(count)))
206
206
207 def charnum(count):
207 def charnum(count):
208 return int(round(count * width / maxcount))
208 return int(round(count * width / maxcount))
209
209
210 for name, count in rate:
210 for name, count in rate:
211 ui.write(format(name, count))
211 ui.write(format(name, count))
@@ -1,548 +1,548 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import base64
9 import base64
10 import datetime
10 import datetime
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import shlex
14 import shlex
15 import subprocess
15 import subprocess
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 encoding,
19 encoding,
20 error,
20 error,
21 phases,
21 phases,
22 pycompat,
22 pycompat,
23 util,
23 util,
24 )
24 )
25
25
26 pickle = util.pickle
26 pickle = util.pickle
27 propertycache = util.propertycache
27 propertycache = util.propertycache
28
28
29 def _encodeornone(d):
29 def _encodeornone(d):
30 if d is None:
30 if d is None:
31 return
31 return
32 return d.encode('latin1')
32 return d.encode('latin1')
33
33
34 class _shlexpy3proxy(object):
34 class _shlexpy3proxy(object):
35
35
36 def __init__(self, l):
36 def __init__(self, l):
37 self._l = l
37 self._l = l
38
38
39 def __iter__(self):
39 def __iter__(self):
40 return (_encodeornone(v) for v in self._l)
40 return (_encodeornone(v) for v in self._l)
41
41
42 def get_token(self):
42 def get_token(self):
43 return _encodeornone(self._l.get_token())
43 return _encodeornone(self._l.get_token())
44
44
45 @property
45 @property
46 def infile(self):
46 def infile(self):
47 return self._l.infile or '<unknown>'
47 return self._l.infile or '<unknown>'
48
48
49 @property
49 @property
50 def lineno(self):
50 def lineno(self):
51 return self._l.lineno
51 return self._l.lineno
52
52
53 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
53 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
54 if data is None:
54 if data is None:
55 if pycompat.ispy3:
55 if pycompat.ispy3:
56 data = open(filepath, 'r', encoding=r'latin1')
56 data = open(filepath, 'r', encoding=r'latin1')
57 else:
57 else:
58 data = open(filepath, 'r')
58 data = open(filepath, 'r')
59 else:
59 else:
60 if filepath is not None:
60 if filepath is not None:
61 raise error.ProgrammingError(
61 raise error.ProgrammingError(
62 'shlexer only accepts data or filepath, not both')
62 'shlexer only accepts data or filepath, not both')
63 if pycompat.ispy3:
63 if pycompat.ispy3:
64 data = data.decode('latin1')
64 data = data.decode('latin1')
65 l = shlex.shlex(data, infile=filepath, posix=True)
65 l = shlex.shlex(data, infile=filepath, posix=True)
66 if whitespace is not None:
66 if whitespace is not None:
67 l.whitespace_split = True
67 l.whitespace_split = True
68 if pycompat.ispy3:
68 if pycompat.ispy3:
69 l.whitespace += whitespace.decode('latin1')
69 l.whitespace += whitespace.decode('latin1')
70 else:
70 else:
71 l.whitespace += whitespace
71 l.whitespace += whitespace
72 if wordchars is not None:
72 if wordchars is not None:
73 if pycompat.ispy3:
73 if pycompat.ispy3:
74 l.wordchars += wordchars.decode('latin1')
74 l.wordchars += wordchars.decode('latin1')
75 else:
75 else:
76 l.wordchars += wordchars
76 l.wordchars += wordchars
77 if pycompat.ispy3:
77 if pycompat.ispy3:
78 return _shlexpy3proxy(l)
78 return _shlexpy3proxy(l)
79 return l
79 return l
80
80
81 def encodeargs(args):
81 def encodeargs(args):
82 def encodearg(s):
82 def encodearg(s):
83 lines = base64.encodestring(s)
83 lines = base64.encodestring(s)
84 lines = [l.splitlines()[0] for l in lines]
84 lines = [l.splitlines()[0] for l in lines]
85 return ''.join(lines)
85 return ''.join(lines)
86
86
87 s = pickle.dumps(args)
87 s = pickle.dumps(args)
88 return encodearg(s)
88 return encodearg(s)
89
89
90 def decodeargs(s):
90 def decodeargs(s):
91 s = base64.decodestring(s)
91 s = base64.decodestring(s)
92 return pickle.loads(s)
92 return pickle.loads(s)
93
93
94 class MissingTool(Exception):
94 class MissingTool(Exception):
95 pass
95 pass
96
96
97 def checktool(exe, name=None, abort=True):
97 def checktool(exe, name=None, abort=True):
98 name = name or exe
98 name = name or exe
99 if not util.findexe(exe):
99 if not util.findexe(exe):
100 if abort:
100 if abort:
101 exc = error.Abort
101 exc = error.Abort
102 else:
102 else:
103 exc = MissingTool
103 exc = MissingTool
104 raise exc(_('cannot find required "%s" tool') % name)
104 raise exc(_('cannot find required "%s" tool') % name)
105
105
106 class NoRepo(Exception):
106 class NoRepo(Exception):
107 pass
107 pass
108
108
109 SKIPREV = 'SKIP'
109 SKIPREV = 'SKIP'
110
110
111 class commit(object):
111 class commit(object):
112 def __init__(self, author, date, desc, parents, branch=None, rev=None,
112 def __init__(self, author, date, desc, parents, branch=None, rev=None,
113 extra=None, sortkey=None, saverev=True, phase=phases.draft,
113 extra=None, sortkey=None, saverev=True, phase=phases.draft,
114 optparents=None):
114 optparents=None):
115 self.author = author or 'unknown'
115 self.author = author or 'unknown'
116 self.date = date or '0 0'
116 self.date = date or '0 0'
117 self.desc = desc
117 self.desc = desc
118 self.parents = parents # will be converted and used as parents
118 self.parents = parents # will be converted and used as parents
119 self.optparents = optparents or [] # will be used if already converted
119 self.optparents = optparents or [] # will be used if already converted
120 self.branch = branch
120 self.branch = branch
121 self.rev = rev
121 self.rev = rev
122 self.extra = extra or {}
122 self.extra = extra or {}
123 self.sortkey = sortkey
123 self.sortkey = sortkey
124 self.saverev = saverev
124 self.saverev = saverev
125 self.phase = phase
125 self.phase = phase
126
126
127 class converter_source(object):
127 class converter_source(object):
128 """Conversion source interface"""
128 """Conversion source interface"""
129
129
130 def __init__(self, ui, repotype, path=None, revs=None):
130 def __init__(self, ui, repotype, path=None, revs=None):
131 """Initialize conversion source (or raise NoRepo("message")
131 """Initialize conversion source (or raise NoRepo("message")
132 exception if path is not a valid repository)"""
132 exception if path is not a valid repository)"""
133 self.ui = ui
133 self.ui = ui
134 self.path = path
134 self.path = path
135 self.revs = revs
135 self.revs = revs
136 self.repotype = repotype
136 self.repotype = repotype
137
137
138 self.encoding = 'utf-8'
138 self.encoding = 'utf-8'
139
139
140 def checkhexformat(self, revstr, mapname='splicemap'):
140 def checkhexformat(self, revstr, mapname='splicemap'):
141 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
141 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
142 such format for their revision numbering
142 such format for their revision numbering
143 """
143 """
144 if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
144 if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
145 raise error.Abort(_('%s entry %s is not a valid revision'
145 raise error.Abort(_('%s entry %s is not a valid revision'
146 ' identifier') % (mapname, revstr))
146 ' identifier') % (mapname, revstr))
147
147
148 def before(self):
148 def before(self):
149 pass
149 pass
150
150
151 def after(self):
151 def after(self):
152 pass
152 pass
153
153
154 def targetfilebelongstosource(self, targetfilename):
154 def targetfilebelongstosource(self, targetfilename):
155 """Returns true if the given targetfile belongs to the source repo. This
155 """Returns true if the given targetfile belongs to the source repo. This
156 is useful when only a subdirectory of the target belongs to the source
156 is useful when only a subdirectory of the target belongs to the source
157 repo."""
157 repo."""
158 # For normal full repo converts, this is always True.
158 # For normal full repo converts, this is always True.
159 return True
159 return True
160
160
161 def setrevmap(self, revmap):
161 def setrevmap(self, revmap):
162 """set the map of already-converted revisions"""
162 """set the map of already-converted revisions"""
163
163
164 def getheads(self):
164 def getheads(self):
165 """Return a list of this repository's heads"""
165 """Return a list of this repository's heads"""
166 raise NotImplementedError
166 raise NotImplementedError
167
167
168 def getfile(self, name, rev):
168 def getfile(self, name, rev):
169 """Return a pair (data, mode) where data is the file content
169 """Return a pair (data, mode) where data is the file content
170 as a string and mode one of '', 'x' or 'l'. rev is the
170 as a string and mode one of '', 'x' or 'l'. rev is the
171 identifier returned by a previous call to getchanges().
171 identifier returned by a previous call to getchanges().
172 Data is None if file is missing/deleted in rev.
172 Data is None if file is missing/deleted in rev.
173 """
173 """
174 raise NotImplementedError
174 raise NotImplementedError
175
175
176 def getchanges(self, version, full):
176 def getchanges(self, version, full):
177 """Returns a tuple of (files, copies, cleanp2).
177 """Returns a tuple of (files, copies, cleanp2).
178
178
179 files is a sorted list of (filename, id) tuples for all files
179 files is a sorted list of (filename, id) tuples for all files
180 changed between version and its first parent returned by
180 changed between version and its first parent returned by
181 getcommit(). If full, all files in that revision is returned.
181 getcommit(). If full, all files in that revision is returned.
182 id is the source revision id of the file.
182 id is the source revision id of the file.
183
183
184 copies is a dictionary of dest: source
184 copies is a dictionary of dest: source
185
185
186 cleanp2 is the set of files filenames that are clean against p2.
186 cleanp2 is the set of files filenames that are clean against p2.
187 (Files that are clean against p1 are already not in files (unless
187 (Files that are clean against p1 are already not in files (unless
188 full). This makes it possible to handle p2 clean files similarly.)
188 full). This makes it possible to handle p2 clean files similarly.)
189 """
189 """
190 raise NotImplementedError
190 raise NotImplementedError
191
191
192 def getcommit(self, version):
192 def getcommit(self, version):
193 """Return the commit object for version"""
193 """Return the commit object for version"""
194 raise NotImplementedError
194 raise NotImplementedError
195
195
196 def numcommits(self):
196 def numcommits(self):
197 """Return the number of commits in this source.
197 """Return the number of commits in this source.
198
198
199 If unknown, return None.
199 If unknown, return None.
200 """
200 """
201 return None
201 return None
202
202
203 def gettags(self):
203 def gettags(self):
204 """Return the tags as a dictionary of name: revision
204 """Return the tags as a dictionary of name: revision
205
205
206 Tag names must be UTF-8 strings.
206 Tag names must be UTF-8 strings.
207 """
207 """
208 raise NotImplementedError
208 raise NotImplementedError
209
209
210 def recode(self, s, encoding=None):
210 def recode(self, s, encoding=None):
211 if not encoding:
211 if not encoding:
212 encoding = self.encoding or 'utf-8'
212 encoding = self.encoding or 'utf-8'
213
213
214 if isinstance(s, unicode):
214 if isinstance(s, unicode):
215 return s.encode("utf-8")
215 return s.encode("utf-8")
216 try:
216 try:
217 return s.decode(encoding).encode("utf-8")
217 return s.decode(encoding).encode("utf-8")
218 except UnicodeError:
218 except UnicodeError:
219 try:
219 try:
220 return s.decode("latin-1").encode("utf-8")
220 return s.decode("latin-1").encode("utf-8")
221 except UnicodeError:
221 except UnicodeError:
222 return s.decode(encoding, "replace").encode("utf-8")
222 return s.decode(encoding, "replace").encode("utf-8")
223
223
224 def getchangedfiles(self, rev, i):
224 def getchangedfiles(self, rev, i):
225 """Return the files changed by rev compared to parent[i].
225 """Return the files changed by rev compared to parent[i].
226
226
227 i is an index selecting one of the parents of rev. The return
227 i is an index selecting one of the parents of rev. The return
228 value should be the list of files that are different in rev and
228 value should be the list of files that are different in rev and
229 this parent.
229 this parent.
230
230
231 If rev has no parents, i is None.
231 If rev has no parents, i is None.
232
232
233 This function is only needed to support --filemap
233 This function is only needed to support --filemap
234 """
234 """
235 raise NotImplementedError
235 raise NotImplementedError
236
236
237 def converted(self, rev, sinkrev):
237 def converted(self, rev, sinkrev):
238 '''Notify the source that a revision has been converted.'''
238 '''Notify the source that a revision has been converted.'''
239
239
240 def hasnativeorder(self):
240 def hasnativeorder(self):
241 """Return true if this source has a meaningful, native revision
241 """Return true if this source has a meaningful, native revision
242 order. For instance, Mercurial revisions are store sequentially
242 order. For instance, Mercurial revisions are store sequentially
243 while there is no such global ordering with Darcs.
243 while there is no such global ordering with Darcs.
244 """
244 """
245 return False
245 return False
246
246
247 def hasnativeclose(self):
247 def hasnativeclose(self):
248 """Return true if this source has ability to close branch.
248 """Return true if this source has ability to close branch.
249 """
249 """
250 return False
250 return False
251
251
252 def lookuprev(self, rev):
252 def lookuprev(self, rev):
253 """If rev is a meaningful revision reference in source, return
253 """If rev is a meaningful revision reference in source, return
254 the referenced identifier in the same format used by getcommit().
254 the referenced identifier in the same format used by getcommit().
255 return None otherwise.
255 return None otherwise.
256 """
256 """
257 return None
257 return None
258
258
259 def getbookmarks(self):
259 def getbookmarks(self):
260 """Return the bookmarks as a dictionary of name: revision
260 """Return the bookmarks as a dictionary of name: revision
261
261
262 Bookmark names are to be UTF-8 strings.
262 Bookmark names are to be UTF-8 strings.
263 """
263 """
264 return {}
264 return {}
265
265
266 def checkrevformat(self, revstr, mapname='splicemap'):
266 def checkrevformat(self, revstr, mapname='splicemap'):
267 """revstr is a string that describes a revision in the given
267 """revstr is a string that describes a revision in the given
268 source control system. Return true if revstr has correct
268 source control system. Return true if revstr has correct
269 format.
269 format.
270 """
270 """
271 return True
271 return True
272
272
273 class converter_sink(object):
273 class converter_sink(object):
274 """Conversion sink (target) interface"""
274 """Conversion sink (target) interface"""
275
275
276 def __init__(self, ui, repotype, path):
276 def __init__(self, ui, repotype, path):
277 """Initialize conversion sink (or raise NoRepo("message")
277 """Initialize conversion sink (or raise NoRepo("message")
278 exception if path is not a valid repository)
278 exception if path is not a valid repository)
279
279
280 created is a list of paths to remove if a fatal error occurs
280 created is a list of paths to remove if a fatal error occurs
281 later"""
281 later"""
282 self.ui = ui
282 self.ui = ui
283 self.path = path
283 self.path = path
284 self.created = []
284 self.created = []
285 self.repotype = repotype
285 self.repotype = repotype
286
286
287 def revmapfile(self):
287 def revmapfile(self):
288 """Path to a file that will contain lines
288 """Path to a file that will contain lines
289 source_rev_id sink_rev_id
289 source_rev_id sink_rev_id
290 mapping equivalent revision identifiers for each system."""
290 mapping equivalent revision identifiers for each system."""
291 raise NotImplementedError
291 raise NotImplementedError
292
292
293 def authorfile(self):
293 def authorfile(self):
294 """Path to a file that will contain lines
294 """Path to a file that will contain lines
295 srcauthor=dstauthor
295 srcauthor=dstauthor
296 mapping equivalent authors identifiers for each system."""
296 mapping equivalent authors identifiers for each system."""
297 return None
297 return None
298
298
299 def putcommit(self, files, copies, parents, commit, source, revmap, full,
299 def putcommit(self, files, copies, parents, commit, source, revmap, full,
300 cleanp2):
300 cleanp2):
301 """Create a revision with all changed files listed in 'files'
301 """Create a revision with all changed files listed in 'files'
302 and having listed parents. 'commit' is a commit object
302 and having listed parents. 'commit' is a commit object
303 containing at a minimum the author, date, and message for this
303 containing at a minimum the author, date, and message for this
304 changeset. 'files' is a list of (path, version) tuples,
304 changeset. 'files' is a list of (path, version) tuples,
305 'copies' is a dictionary mapping destinations to sources,
305 'copies' is a dictionary mapping destinations to sources,
306 'source' is the source repository, and 'revmap' is a mapfile
306 'source' is the source repository, and 'revmap' is a mapfile
307 of source revisions to converted revisions. Only getfile() and
307 of source revisions to converted revisions. Only getfile() and
308 lookuprev() should be called on 'source'. 'full' means that 'files'
308 lookuprev() should be called on 'source'. 'full' means that 'files'
309 is complete and all other files should be removed.
309 is complete and all other files should be removed.
310 'cleanp2' is a set of the filenames that are unchanged from p2
310 'cleanp2' is a set of the filenames that are unchanged from p2
311 (only in the common merge case where there two parents).
311 (only in the common merge case where there two parents).
312
312
313 Note that the sink repository is not told to update itself to
313 Note that the sink repository is not told to update itself to
314 a particular revision (or even what that revision would be)
314 a particular revision (or even what that revision would be)
315 before it receives the file data.
315 before it receives the file data.
316 """
316 """
317 raise NotImplementedError
317 raise NotImplementedError
318
318
319 def puttags(self, tags):
319 def puttags(self, tags):
320 """Put tags into sink.
320 """Put tags into sink.
321
321
322 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
322 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
323 Return a pair (tag_revision, tag_parent_revision), or (None, None)
323 Return a pair (tag_revision, tag_parent_revision), or (None, None)
324 if nothing was changed.
324 if nothing was changed.
325 """
325 """
326 raise NotImplementedError
326 raise NotImplementedError
327
327
328 def setbranch(self, branch, pbranches):
328 def setbranch(self, branch, pbranches):
329 """Set the current branch name. Called before the first putcommit
329 """Set the current branch name. Called before the first putcommit
330 on the branch.
330 on the branch.
331 branch: branch name for subsequent commits
331 branch: branch name for subsequent commits
332 pbranches: (converted parent revision, parent branch) tuples"""
332 pbranches: (converted parent revision, parent branch) tuples"""
333
333
334 def setfilemapmode(self, active):
334 def setfilemapmode(self, active):
335 """Tell the destination that we're using a filemap
335 """Tell the destination that we're using a filemap
336
336
337 Some converter_sources (svn in particular) can claim that a file
337 Some converter_sources (svn in particular) can claim that a file
338 was changed in a revision, even if there was no change. This method
338 was changed in a revision, even if there was no change. This method
339 tells the destination that we're using a filemap and that it should
339 tells the destination that we're using a filemap and that it should
340 filter empty revisions.
340 filter empty revisions.
341 """
341 """
342
342
343 def before(self):
343 def before(self):
344 pass
344 pass
345
345
346 def after(self):
346 def after(self):
347 pass
347 pass
348
348
349 def putbookmarks(self, bookmarks):
349 def putbookmarks(self, bookmarks):
350 """Put bookmarks into sink.
350 """Put bookmarks into sink.
351
351
352 bookmarks: {bookmarkname: sink_rev_id, ...}
352 bookmarks: {bookmarkname: sink_rev_id, ...}
353 where bookmarkname is an UTF-8 string.
353 where bookmarkname is an UTF-8 string.
354 """
354 """
355
355
356 def hascommitfrommap(self, rev):
356 def hascommitfrommap(self, rev):
357 """Return False if a rev mentioned in a filemap is known to not be
357 """Return False if a rev mentioned in a filemap is known to not be
358 present."""
358 present."""
359 raise NotImplementedError
359 raise NotImplementedError
360
360
361 def hascommitforsplicemap(self, rev):
361 def hascommitforsplicemap(self, rev):
362 """This method is for the special needs for splicemap handling and not
362 """This method is for the special needs for splicemap handling and not
363 for general use. Returns True if the sink contains rev, aborts on some
363 for general use. Returns True if the sink contains rev, aborts on some
364 special cases."""
364 special cases."""
365 raise NotImplementedError
365 raise NotImplementedError
366
366
367 class commandline(object):
367 class commandline(object):
368 def __init__(self, ui, command):
368 def __init__(self, ui, command):
369 self.ui = ui
369 self.ui = ui
370 self.command = command
370 self.command = command
371
371
372 def prerun(self):
372 def prerun(self):
373 pass
373 pass
374
374
375 def postrun(self):
375 def postrun(self):
376 pass
376 pass
377
377
378 def _cmdline(self, cmd, *args, **kwargs):
378 def _cmdline(self, cmd, *args, **kwargs):
379 kwargs = pycompat.byteskwargs(kwargs)
379 kwargs = pycompat.byteskwargs(kwargs)
380 cmdline = [self.command, cmd] + list(args)
380 cmdline = [self.command, cmd] + list(args)
381 for k, v in kwargs.iteritems():
381 for k, v in kwargs.iteritems():
382 if len(k) == 1:
382 if len(k) == 1:
383 cmdline.append('-' + k)
383 cmdline.append('-' + k)
384 else:
384 else:
385 cmdline.append('--' + k.replace('_', '-'))
385 cmdline.append('--' + k.replace('_', '-'))
386 try:
386 try:
387 if len(k) == 1:
387 if len(k) == 1:
388 cmdline.append('' + v)
388 cmdline.append('' + v)
389 else:
389 else:
390 cmdline[-1] += '=' + v
390 cmdline[-1] += '=' + v
391 except TypeError:
391 except TypeError:
392 pass
392 pass
393 cmdline = [util.shellquote(arg) for arg in cmdline]
393 cmdline = [util.shellquote(arg) for arg in cmdline]
394 if not self.ui.debugflag:
394 if not self.ui.debugflag:
395 cmdline += ['2>', pycompat.bytestr(os.devnull)]
395 cmdline += ['2>', pycompat.bytestr(os.devnull)]
396 cmdline = ' '.join(cmdline)
396 cmdline = ' '.join(cmdline)
397 return cmdline
397 return cmdline
398
398
399 def _run(self, cmd, *args, **kwargs):
399 def _run(self, cmd, *args, **kwargs):
400 def popen(cmdline):
400 def popen(cmdline):
401 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
401 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
402 close_fds=util.closefds,
402 close_fds=util.closefds,
403 stdout=subprocess.PIPE)
403 stdout=subprocess.PIPE)
404 return p
404 return p
405 return self._dorun(popen, cmd, *args, **kwargs)
405 return self._dorun(popen, cmd, *args, **kwargs)
406
406
407 def _run2(self, cmd, *args, **kwargs):
407 def _run2(self, cmd, *args, **kwargs):
408 return self._dorun(util.popen2, cmd, *args, **kwargs)
408 return self._dorun(util.popen2, cmd, *args, **kwargs)
409
409
410 def _run3(self, cmd, *args, **kwargs):
410 def _run3(self, cmd, *args, **kwargs):
411 return self._dorun(util.popen3, cmd, *args, **kwargs)
411 return self._dorun(util.popen3, cmd, *args, **kwargs)
412
412
413 def _dorun(self, openfunc, cmd, *args, **kwargs):
413 def _dorun(self, openfunc, cmd, *args, **kwargs):
414 cmdline = self._cmdline(cmd, *args, **kwargs)
414 cmdline = self._cmdline(cmd, *args, **kwargs)
415 self.ui.debug('running: %s\n' % (cmdline,))
415 self.ui.debug('running: %s\n' % (cmdline,))
416 self.prerun()
416 self.prerun()
417 try:
417 try:
418 return openfunc(cmdline)
418 return openfunc(cmdline)
419 finally:
419 finally:
420 self.postrun()
420 self.postrun()
421
421
422 def run(self, cmd, *args, **kwargs):
422 def run(self, cmd, *args, **kwargs):
423 p = self._run(cmd, *args, **kwargs)
423 p = self._run(cmd, *args, **kwargs)
424 output = p.communicate()[0]
424 output = p.communicate()[0]
425 self.ui.debug(output)
425 self.ui.debug(output)
426 return output, p.returncode
426 return output, p.returncode
427
427
428 def runlines(self, cmd, *args, **kwargs):
428 def runlines(self, cmd, *args, **kwargs):
429 p = self._run(cmd, *args, **kwargs)
429 p = self._run(cmd, *args, **kwargs)
430 output = p.stdout.readlines()
430 output = p.stdout.readlines()
431 p.wait()
431 p.wait()
432 self.ui.debug(''.join(output))
432 self.ui.debug(''.join(output))
433 return output, p.returncode
433 return output, p.returncode
434
434
435 def checkexit(self, status, output=''):
435 def checkexit(self, status, output=''):
436 if status:
436 if status:
437 if output:
437 if output:
438 self.ui.warn(_('%s error:\n') % self.command)
438 self.ui.warn(_('%s error:\n') % self.command)
439 self.ui.warn(output)
439 self.ui.warn(output)
440 msg = util.explainexit(status)[0]
440 msg = util.explainexit(status)[0]
441 raise error.Abort('%s %s' % (self.command, msg))
441 raise error.Abort('%s %s' % (self.command, msg))
442
442
443 def run0(self, cmd, *args, **kwargs):
443 def run0(self, cmd, *args, **kwargs):
444 output, status = self.run(cmd, *args, **kwargs)
444 output, status = self.run(cmd, *args, **kwargs)
445 self.checkexit(status, output)
445 self.checkexit(status, output)
446 return output
446 return output
447
447
448 def runlines0(self, cmd, *args, **kwargs):
448 def runlines0(self, cmd, *args, **kwargs):
449 output, status = self.runlines(cmd, *args, **kwargs)
449 output, status = self.runlines(cmd, *args, **kwargs)
450 self.checkexit(status, ''.join(output))
450 self.checkexit(status, ''.join(output))
451 return output
451 return output
452
452
453 @propertycache
453 @propertycache
454 def argmax(self):
454 def argmax(self):
455 # POSIX requires at least 4096 bytes for ARG_MAX
455 # POSIX requires at least 4096 bytes for ARG_MAX
456 argmax = 4096
456 argmax = 4096
457 try:
457 try:
458 argmax = os.sysconf("SC_ARG_MAX")
458 argmax = os.sysconf("SC_ARG_MAX")
459 except (AttributeError, ValueError):
459 except (AttributeError, ValueError):
460 pass
460 pass
461
461
462 # Windows shells impose their own limits on command line length,
462 # Windows shells impose their own limits on command line length,
463 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
463 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
464 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
464 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
465 # details about cmd.exe limitations.
465 # details about cmd.exe limitations.
466
466
467 # Since ARG_MAX is for command line _and_ environment, lower our limit
467 # Since ARG_MAX is for command line _and_ environment, lower our limit
468 # (and make happy Windows shells while doing this).
468 # (and make happy Windows shells while doing this).
469 return argmax // 2 - 1
469 return argmax // 2 - 1
470
470
471 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
471 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
472 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
472 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
473 limit = self.argmax - cmdlen
473 limit = self.argmax - cmdlen
474 numbytes = 0
474 numbytes = 0
475 fl = []
475 fl = []
476 for fn in arglist:
476 for fn in arglist:
477 b = len(fn) + 3
477 b = len(fn) + 3
478 if numbytes + b < limit or len(fl) == 0:
478 if numbytes + b < limit or len(fl) == 0:
479 fl.append(fn)
479 fl.append(fn)
480 numbytes += b
480 numbytes += b
481 else:
481 else:
482 yield fl
482 yield fl
483 fl = [fn]
483 fl = [fn]
484 numbytes = b
484 numbytes = b
485 if fl:
485 if fl:
486 yield fl
486 yield fl
487
487
488 def xargs(self, arglist, cmd, *args, **kwargs):
488 def xargs(self, arglist, cmd, *args, **kwargs):
489 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
489 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
490 self.run0(cmd, *(list(args) + l), **kwargs)
490 self.run0(cmd, *(list(args) + l), **kwargs)
491
491
492 class mapfile(dict):
492 class mapfile(dict):
493 def __init__(self, ui, path):
493 def __init__(self, ui, path):
494 super(mapfile, self).__init__()
494 super(mapfile, self).__init__()
495 self.ui = ui
495 self.ui = ui
496 self.path = path
496 self.path = path
497 self.fp = None
497 self.fp = None
498 self.order = []
498 self.order = []
499 self._read()
499 self._read()
500
500
501 def _read(self):
501 def _read(self):
502 if not self.path:
502 if not self.path:
503 return
503 return
504 try:
504 try:
505 fp = open(self.path, 'rb')
505 fp = open(self.path, 'rb')
506 except IOError as err:
506 except IOError as err:
507 if err.errno != errno.ENOENT:
507 if err.errno != errno.ENOENT:
508 raise
508 raise
509 return
509 return
510 for i, line in enumerate(util.iterfile(fp)):
510 for i, line in enumerate(util.iterfile(fp)):
511 line = line.splitlines()[0].rstrip()
511 line = line.splitlines()[0].rstrip()
512 if not line:
512 if not line:
513 # Ignore blank lines
513 # Ignore blank lines
514 continue
514 continue
515 try:
515 try:
516 key, value = line.rsplit(' ', 1)
516 key, value = line.rsplit(' ', 1)
517 except ValueError:
517 except ValueError:
518 raise error.Abort(
518 raise error.Abort(
519 _('syntax error in %s(%d): key/value pair expected')
519 _('syntax error in %s(%d): key/value pair expected')
520 % (self.path, i + 1))
520 % (self.path, i + 1))
521 if key not in self:
521 if key not in self:
522 self.order.append(key)
522 self.order.append(key)
523 super(mapfile, self).__setitem__(key, value)
523 super(mapfile, self).__setitem__(key, value)
524 fp.close()
524 fp.close()
525
525
526 def __setitem__(self, key, value):
526 def __setitem__(self, key, value):
527 if self.fp is None:
527 if self.fp is None:
528 try:
528 try:
529 self.fp = open(self.path, 'ab')
529 self.fp = open(self.path, 'ab')
530 except IOError as err:
530 except IOError as err:
531 raise error.Abort(
531 raise error.Abort(
532 _('could not open map file %r: %s') %
532 _('could not open map file %r: %s') %
533 (self.path, encoding.strtolocal(err.strerror)))
533 (self.path, encoding.strtolocal(err.strerror)))
534 self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
534 self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
535 self.fp.flush()
535 self.fp.flush()
536 super(mapfile, self).__setitem__(key, value)
536 super(mapfile, self).__setitem__(key, value)
537
537
538 def close(self):
538 def close(self):
539 if self.fp:
539 if self.fp:
540 self.fp.close()
540 self.fp.close()
541 self.fp = None
541 self.fp = None
542
542
543 def makedatetimestamp(t):
543 def makedatetimestamp(t):
544 """Like util.makedate() but for time t instead of current time"""
544 """Like dateutil.makedate() but for time t instead of current time"""
545 delta = (datetime.datetime.utcfromtimestamp(t) -
545 delta = (datetime.datetime.utcfromtimestamp(t) -
546 datetime.datetime.fromtimestamp(t))
546 datetime.datetime.fromtimestamp(t))
547 tz = delta.days * 86400 + delta.seconds
547 tz = delta.days * 86400 + delta.seconds
548 return t, tz
548 return t, tz
@@ -1,615 +1,616 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import collections
9 import collections
10 import os
10 import os
11 import shutil
11 import shutil
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial import (
14 from mercurial import (
15 encoding,
15 encoding,
16 error,
16 error,
17 hg,
17 hg,
18 pycompat,
18 pycompat,
19 scmutil,
19 scmutil,
20 util,
20 util,
21 )
21 )
22 from mercurial.utils import dateutil
22
23
23 from . import (
24 from . import (
24 bzr,
25 bzr,
25 common,
26 common,
26 cvs,
27 cvs,
27 darcs,
28 darcs,
28 filemap,
29 filemap,
29 git,
30 git,
30 gnuarch,
31 gnuarch,
31 hg as hgconvert,
32 hg as hgconvert,
32 monotone,
33 monotone,
33 p4,
34 p4,
34 subversion,
35 subversion,
35 )
36 )
36
37
37 mapfile = common.mapfile
38 mapfile = common.mapfile
38 MissingTool = common.MissingTool
39 MissingTool = common.MissingTool
39 NoRepo = common.NoRepo
40 NoRepo = common.NoRepo
40 SKIPREV = common.SKIPREV
41 SKIPREV = common.SKIPREV
41
42
42 bzr_source = bzr.bzr_source
43 bzr_source = bzr.bzr_source
43 convert_cvs = cvs.convert_cvs
44 convert_cvs = cvs.convert_cvs
44 convert_git = git.convert_git
45 convert_git = git.convert_git
45 darcs_source = darcs.darcs_source
46 darcs_source = darcs.darcs_source
46 gnuarch_source = gnuarch.gnuarch_source
47 gnuarch_source = gnuarch.gnuarch_source
47 mercurial_sink = hgconvert.mercurial_sink
48 mercurial_sink = hgconvert.mercurial_sink
48 mercurial_source = hgconvert.mercurial_source
49 mercurial_source = hgconvert.mercurial_source
49 monotone_source = monotone.monotone_source
50 monotone_source = monotone.monotone_source
50 p4_source = p4.p4_source
51 p4_source = p4.p4_source
51 svn_sink = subversion.svn_sink
52 svn_sink = subversion.svn_sink
52 svn_source = subversion.svn_source
53 svn_source = subversion.svn_source
53
54
54 orig_encoding = 'ascii'
55 orig_encoding = 'ascii'
55
56
56 def recode(s):
57 def recode(s):
57 if isinstance(s, unicode):
58 if isinstance(s, unicode):
58 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
59 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
59 else:
60 else:
60 return s.decode('utf-8').encode(
61 return s.decode('utf-8').encode(
61 pycompat.sysstr(orig_encoding), 'replace')
62 pycompat.sysstr(orig_encoding), 'replace')
62
63
63 def mapbranch(branch, branchmap):
64 def mapbranch(branch, branchmap):
64 '''
65 '''
65 >>> bmap = {b'default': b'branch1'}
66 >>> bmap = {b'default': b'branch1'}
66 >>> for i in [b'', None]:
67 >>> for i in [b'', None]:
67 ... mapbranch(i, bmap)
68 ... mapbranch(i, bmap)
68 'branch1'
69 'branch1'
69 'branch1'
70 'branch1'
70 >>> bmap = {b'None': b'branch2'}
71 >>> bmap = {b'None': b'branch2'}
71 >>> for i in [b'', None]:
72 >>> for i in [b'', None]:
72 ... mapbranch(i, bmap)
73 ... mapbranch(i, bmap)
73 'branch2'
74 'branch2'
74 'branch2'
75 'branch2'
75 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
76 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
76 >>> for i in [b'None', b'', None, b'default', b'branch5']:
77 >>> for i in [b'None', b'', None, b'default', b'branch5']:
77 ... mapbranch(i, bmap)
78 ... mapbranch(i, bmap)
78 'branch3'
79 'branch3'
79 'branch4'
80 'branch4'
80 'branch4'
81 'branch4'
81 'branch4'
82 'branch4'
82 'branch5'
83 'branch5'
83 '''
84 '''
84 # If branch is None or empty, this commit is coming from the source
85 # If branch is None or empty, this commit is coming from the source
85 # repository's default branch and destined for the default branch in the
86 # repository's default branch and destined for the default branch in the
86 # destination repository. For such commits, using a literal "default"
87 # destination repository. For such commits, using a literal "default"
87 # in branchmap below allows the user to map "default" to an alternate
88 # in branchmap below allows the user to map "default" to an alternate
88 # default branch in the destination repository.
89 # default branch in the destination repository.
89 branch = branchmap.get(branch or 'default', branch)
90 branch = branchmap.get(branch or 'default', branch)
90 # At some point we used "None" literal to denote the default branch,
91 # At some point we used "None" literal to denote the default branch,
91 # attempt to use that for backward compatibility.
92 # attempt to use that for backward compatibility.
92 if (not branch):
93 if (not branch):
93 branch = branchmap.get('None', branch)
94 branch = branchmap.get('None', branch)
94 return branch
95 return branch
95
96
96 source_converters = [
97 source_converters = [
97 ('cvs', convert_cvs, 'branchsort'),
98 ('cvs', convert_cvs, 'branchsort'),
98 ('git', convert_git, 'branchsort'),
99 ('git', convert_git, 'branchsort'),
99 ('svn', svn_source, 'branchsort'),
100 ('svn', svn_source, 'branchsort'),
100 ('hg', mercurial_source, 'sourcesort'),
101 ('hg', mercurial_source, 'sourcesort'),
101 ('darcs', darcs_source, 'branchsort'),
102 ('darcs', darcs_source, 'branchsort'),
102 ('mtn', monotone_source, 'branchsort'),
103 ('mtn', monotone_source, 'branchsort'),
103 ('gnuarch', gnuarch_source, 'branchsort'),
104 ('gnuarch', gnuarch_source, 'branchsort'),
104 ('bzr', bzr_source, 'branchsort'),
105 ('bzr', bzr_source, 'branchsort'),
105 ('p4', p4_source, 'branchsort'),
106 ('p4', p4_source, 'branchsort'),
106 ]
107 ]
107
108
108 sink_converters = [
109 sink_converters = [
109 ('hg', mercurial_sink),
110 ('hg', mercurial_sink),
110 ('svn', svn_sink),
111 ('svn', svn_sink),
111 ]
112 ]
112
113
113 def convertsource(ui, path, type, revs):
114 def convertsource(ui, path, type, revs):
114 exceptions = []
115 exceptions = []
115 if type and type not in [s[0] for s in source_converters]:
116 if type and type not in [s[0] for s in source_converters]:
116 raise error.Abort(_('%s: invalid source repository type') % type)
117 raise error.Abort(_('%s: invalid source repository type') % type)
117 for name, source, sortmode in source_converters:
118 for name, source, sortmode in source_converters:
118 try:
119 try:
119 if not type or name == type:
120 if not type or name == type:
120 return source(ui, name, path, revs), sortmode
121 return source(ui, name, path, revs), sortmode
121 except (NoRepo, MissingTool) as inst:
122 except (NoRepo, MissingTool) as inst:
122 exceptions.append(inst)
123 exceptions.append(inst)
123 if not ui.quiet:
124 if not ui.quiet:
124 for inst in exceptions:
125 for inst in exceptions:
125 ui.write("%s\n" % inst)
126 ui.write("%s\n" % inst)
126 raise error.Abort(_('%s: missing or unsupported repository') % path)
127 raise error.Abort(_('%s: missing or unsupported repository') % path)
127
128
128 def convertsink(ui, path, type):
129 def convertsink(ui, path, type):
129 if type and type not in [s[0] for s in sink_converters]:
130 if type and type not in [s[0] for s in sink_converters]:
130 raise error.Abort(_('%s: invalid destination repository type') % type)
131 raise error.Abort(_('%s: invalid destination repository type') % type)
131 for name, sink in sink_converters:
132 for name, sink in sink_converters:
132 try:
133 try:
133 if not type or name == type:
134 if not type or name == type:
134 return sink(ui, name, path)
135 return sink(ui, name, path)
135 except NoRepo as inst:
136 except NoRepo as inst:
136 ui.note(_("convert: %s\n") % inst)
137 ui.note(_("convert: %s\n") % inst)
137 except MissingTool as inst:
138 except MissingTool as inst:
138 raise error.Abort('%s\n' % inst)
139 raise error.Abort('%s\n' % inst)
139 raise error.Abort(_('%s: unknown repository type') % path)
140 raise error.Abort(_('%s: unknown repository type') % path)
140
141
141 class progresssource(object):
142 class progresssource(object):
142 def __init__(self, ui, source, filecount):
143 def __init__(self, ui, source, filecount):
143 self.ui = ui
144 self.ui = ui
144 self.source = source
145 self.source = source
145 self.filecount = filecount
146 self.filecount = filecount
146 self.retrieved = 0
147 self.retrieved = 0
147
148
148 def getfile(self, file, rev):
149 def getfile(self, file, rev):
149 self.retrieved += 1
150 self.retrieved += 1
150 self.ui.progress(_('getting files'), self.retrieved,
151 self.ui.progress(_('getting files'), self.retrieved,
151 item=file, total=self.filecount, unit=_('files'))
152 item=file, total=self.filecount, unit=_('files'))
152 return self.source.getfile(file, rev)
153 return self.source.getfile(file, rev)
153
154
154 def targetfilebelongstosource(self, targetfilename):
155 def targetfilebelongstosource(self, targetfilename):
155 return self.source.targetfilebelongstosource(targetfilename)
156 return self.source.targetfilebelongstosource(targetfilename)
156
157
157 def lookuprev(self, rev):
158 def lookuprev(self, rev):
158 return self.source.lookuprev(rev)
159 return self.source.lookuprev(rev)
159
160
160 def close(self):
161 def close(self):
161 self.ui.progress(_('getting files'), None)
162 self.ui.progress(_('getting files'), None)
162
163
163 class converter(object):
164 class converter(object):
164 def __init__(self, ui, source, dest, revmapfile, opts):
165 def __init__(self, ui, source, dest, revmapfile, opts):
165
166
166 self.source = source
167 self.source = source
167 self.dest = dest
168 self.dest = dest
168 self.ui = ui
169 self.ui = ui
169 self.opts = opts
170 self.opts = opts
170 self.commitcache = {}
171 self.commitcache = {}
171 self.authors = {}
172 self.authors = {}
172 self.authorfile = None
173 self.authorfile = None
173
174
174 # Record converted revisions persistently: maps source revision
175 # Record converted revisions persistently: maps source revision
175 # ID to target revision ID (both strings). (This is how
176 # ID to target revision ID (both strings). (This is how
176 # incremental conversions work.)
177 # incremental conversions work.)
177 self.map = mapfile(ui, revmapfile)
178 self.map = mapfile(ui, revmapfile)
178
179
179 # Read first the dst author map if any
180 # Read first the dst author map if any
180 authorfile = self.dest.authorfile()
181 authorfile = self.dest.authorfile()
181 if authorfile and os.path.exists(authorfile):
182 if authorfile and os.path.exists(authorfile):
182 self.readauthormap(authorfile)
183 self.readauthormap(authorfile)
183 # Extend/Override with new author map if necessary
184 # Extend/Override with new author map if necessary
184 if opts.get('authormap'):
185 if opts.get('authormap'):
185 self.readauthormap(opts.get('authormap'))
186 self.readauthormap(opts.get('authormap'))
186 self.authorfile = self.dest.authorfile()
187 self.authorfile = self.dest.authorfile()
187
188
188 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
189 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
189 self.branchmap = mapfile(ui, opts.get('branchmap'))
190 self.branchmap = mapfile(ui, opts.get('branchmap'))
190
191
191 def parsesplicemap(self, path):
192 def parsesplicemap(self, path):
192 """ check and validate the splicemap format and
193 """ check and validate the splicemap format and
193 return a child/parents dictionary.
194 return a child/parents dictionary.
194 Format checking has two parts.
195 Format checking has two parts.
195 1. generic format which is same across all source types
196 1. generic format which is same across all source types
196 2. specific format checking which may be different for
197 2. specific format checking which may be different for
197 different source type. This logic is implemented in
198 different source type. This logic is implemented in
198 checkrevformat function in source files like
199 checkrevformat function in source files like
199 hg.py, subversion.py etc.
200 hg.py, subversion.py etc.
200 """
201 """
201
202
202 if not path:
203 if not path:
203 return {}
204 return {}
204 m = {}
205 m = {}
205 try:
206 try:
206 fp = open(path, 'rb')
207 fp = open(path, 'rb')
207 for i, line in enumerate(util.iterfile(fp)):
208 for i, line in enumerate(util.iterfile(fp)):
208 line = line.splitlines()[0].rstrip()
209 line = line.splitlines()[0].rstrip()
209 if not line:
210 if not line:
210 # Ignore blank lines
211 # Ignore blank lines
211 continue
212 continue
212 # split line
213 # split line
213 lex = common.shlexer(data=line, whitespace=',')
214 lex = common.shlexer(data=line, whitespace=',')
214 line = list(lex)
215 line = list(lex)
215 # check number of parents
216 # check number of parents
216 if not (2 <= len(line) <= 3):
217 if not (2 <= len(line) <= 3):
217 raise error.Abort(_('syntax error in %s(%d): child parent1'
218 raise error.Abort(_('syntax error in %s(%d): child parent1'
218 '[,parent2] expected') % (path, i + 1))
219 '[,parent2] expected') % (path, i + 1))
219 for part in line:
220 for part in line:
220 self.source.checkrevformat(part)
221 self.source.checkrevformat(part)
221 child, p1, p2 = line[0], line[1:2], line[2:]
222 child, p1, p2 = line[0], line[1:2], line[2:]
222 if p1 == p2:
223 if p1 == p2:
223 m[child] = p1
224 m[child] = p1
224 else:
225 else:
225 m[child] = p1 + p2
226 m[child] = p1 + p2
226 # if file does not exist or error reading, exit
227 # if file does not exist or error reading, exit
227 except IOError:
228 except IOError:
228 raise error.Abort(_('splicemap file not found or error reading %s:')
229 raise error.Abort(_('splicemap file not found or error reading %s:')
229 % path)
230 % path)
230 return m
231 return m
231
232
232
233
233 def walktree(self, heads):
234 def walktree(self, heads):
234 '''Return a mapping that identifies the uncommitted parents of every
235 '''Return a mapping that identifies the uncommitted parents of every
235 uncommitted changeset.'''
236 uncommitted changeset.'''
236 visit = heads
237 visit = heads
237 known = set()
238 known = set()
238 parents = {}
239 parents = {}
239 numcommits = self.source.numcommits()
240 numcommits = self.source.numcommits()
240 while visit:
241 while visit:
241 n = visit.pop(0)
242 n = visit.pop(0)
242 if n in known:
243 if n in known:
243 continue
244 continue
244 if n in self.map:
245 if n in self.map:
245 m = self.map[n]
246 m = self.map[n]
246 if m == SKIPREV or self.dest.hascommitfrommap(m):
247 if m == SKIPREV or self.dest.hascommitfrommap(m):
247 continue
248 continue
248 known.add(n)
249 known.add(n)
249 self.ui.progress(_('scanning'), len(known), unit=_('revisions'),
250 self.ui.progress(_('scanning'), len(known), unit=_('revisions'),
250 total=numcommits)
251 total=numcommits)
251 commit = self.cachecommit(n)
252 commit = self.cachecommit(n)
252 parents[n] = []
253 parents[n] = []
253 for p in commit.parents:
254 for p in commit.parents:
254 parents[n].append(p)
255 parents[n].append(p)
255 visit.append(p)
256 visit.append(p)
256 self.ui.progress(_('scanning'), None)
257 self.ui.progress(_('scanning'), None)
257
258
258 return parents
259 return parents
259
260
260 def mergesplicemap(self, parents, splicemap):
261 def mergesplicemap(self, parents, splicemap):
261 """A splicemap redefines child/parent relationships. Check the
262 """A splicemap redefines child/parent relationships. Check the
262 map contains valid revision identifiers and merge the new
263 map contains valid revision identifiers and merge the new
263 links in the source graph.
264 links in the source graph.
264 """
265 """
265 for c in sorted(splicemap):
266 for c in sorted(splicemap):
266 if c not in parents:
267 if c not in parents:
267 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
268 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
268 # Could be in source but not converted during this run
269 # Could be in source but not converted during this run
269 self.ui.warn(_('splice map revision %s is not being '
270 self.ui.warn(_('splice map revision %s is not being '
270 'converted, ignoring\n') % c)
271 'converted, ignoring\n') % c)
271 continue
272 continue
272 pc = []
273 pc = []
273 for p in splicemap[c]:
274 for p in splicemap[c]:
274 # We do not have to wait for nodes already in dest.
275 # We do not have to wait for nodes already in dest.
275 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
276 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
276 continue
277 continue
277 # Parent is not in dest and not being converted, not good
278 # Parent is not in dest and not being converted, not good
278 if p not in parents:
279 if p not in parents:
279 raise error.Abort(_('unknown splice map parent: %s') % p)
280 raise error.Abort(_('unknown splice map parent: %s') % p)
280 pc.append(p)
281 pc.append(p)
281 parents[c] = pc
282 parents[c] = pc
282
283
283 def toposort(self, parents, sortmode):
284 def toposort(self, parents, sortmode):
284 '''Return an ordering such that every uncommitted changeset is
285 '''Return an ordering such that every uncommitted changeset is
285 preceded by all its uncommitted ancestors.'''
286 preceded by all its uncommitted ancestors.'''
286
287
287 def mapchildren(parents):
288 def mapchildren(parents):
288 """Return a (children, roots) tuple where 'children' maps parent
289 """Return a (children, roots) tuple where 'children' maps parent
289 revision identifiers to children ones, and 'roots' is the list of
290 revision identifiers to children ones, and 'roots' is the list of
290 revisions without parents. 'parents' must be a mapping of revision
291 revisions without parents. 'parents' must be a mapping of revision
291 identifier to its parents ones.
292 identifier to its parents ones.
292 """
293 """
293 visit = collections.deque(sorted(parents))
294 visit = collections.deque(sorted(parents))
294 seen = set()
295 seen = set()
295 children = {}
296 children = {}
296 roots = []
297 roots = []
297
298
298 while visit:
299 while visit:
299 n = visit.popleft()
300 n = visit.popleft()
300 if n in seen:
301 if n in seen:
301 continue
302 continue
302 seen.add(n)
303 seen.add(n)
303 # Ensure that nodes without parents are present in the
304 # Ensure that nodes without parents are present in the
304 # 'children' mapping.
305 # 'children' mapping.
305 children.setdefault(n, [])
306 children.setdefault(n, [])
306 hasparent = False
307 hasparent = False
307 for p in parents[n]:
308 for p in parents[n]:
308 if p not in self.map:
309 if p not in self.map:
309 visit.append(p)
310 visit.append(p)
310 hasparent = True
311 hasparent = True
311 children.setdefault(p, []).append(n)
312 children.setdefault(p, []).append(n)
312 if not hasparent:
313 if not hasparent:
313 roots.append(n)
314 roots.append(n)
314
315
315 return children, roots
316 return children, roots
316
317
317 # Sort functions are supposed to take a list of revisions which
318 # Sort functions are supposed to take a list of revisions which
318 # can be converted immediately and pick one
319 # can be converted immediately and pick one
319
320
320 def makebranchsorter():
321 def makebranchsorter():
321 """If the previously converted revision has a child in the
322 """If the previously converted revision has a child in the
322 eligible revisions list, pick it. Return the list head
323 eligible revisions list, pick it. Return the list head
323 otherwise. Branch sort attempts to minimize branch
324 otherwise. Branch sort attempts to minimize branch
324 switching, which is harmful for Mercurial backend
325 switching, which is harmful for Mercurial backend
325 compression.
326 compression.
326 """
327 """
327 prev = [None]
328 prev = [None]
328 def picknext(nodes):
329 def picknext(nodes):
329 next = nodes[0]
330 next = nodes[0]
330 for n in nodes:
331 for n in nodes:
331 if prev[0] in parents[n]:
332 if prev[0] in parents[n]:
332 next = n
333 next = n
333 break
334 break
334 prev[0] = next
335 prev[0] = next
335 return next
336 return next
336 return picknext
337 return picknext
337
338
338 def makesourcesorter():
339 def makesourcesorter():
339 """Source specific sort."""
340 """Source specific sort."""
340 keyfn = lambda n: self.commitcache[n].sortkey
341 keyfn = lambda n: self.commitcache[n].sortkey
341 def picknext(nodes):
342 def picknext(nodes):
342 return sorted(nodes, key=keyfn)[0]
343 return sorted(nodes, key=keyfn)[0]
343 return picknext
344 return picknext
344
345
345 def makeclosesorter():
346 def makeclosesorter():
346 """Close order sort."""
347 """Close order sort."""
347 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
348 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
348 self.commitcache[n].sortkey)
349 self.commitcache[n].sortkey)
349 def picknext(nodes):
350 def picknext(nodes):
350 return sorted(nodes, key=keyfn)[0]
351 return sorted(nodes, key=keyfn)[0]
351 return picknext
352 return picknext
352
353
353 def makedatesorter():
354 def makedatesorter():
354 """Sort revisions by date."""
355 """Sort revisions by date."""
355 dates = {}
356 dates = {}
356 def getdate(n):
357 def getdate(n):
357 if n not in dates:
358 if n not in dates:
358 dates[n] = util.parsedate(self.commitcache[n].date)
359 dates[n] = dateutil.parsedate(self.commitcache[n].date)
359 return dates[n]
360 return dates[n]
360
361
361 def picknext(nodes):
362 def picknext(nodes):
362 return min([(getdate(n), n) for n in nodes])[1]
363 return min([(getdate(n), n) for n in nodes])[1]
363
364
364 return picknext
365 return picknext
365
366
366 if sortmode == 'branchsort':
367 if sortmode == 'branchsort':
367 picknext = makebranchsorter()
368 picknext = makebranchsorter()
368 elif sortmode == 'datesort':
369 elif sortmode == 'datesort':
369 picknext = makedatesorter()
370 picknext = makedatesorter()
370 elif sortmode == 'sourcesort':
371 elif sortmode == 'sourcesort':
371 picknext = makesourcesorter()
372 picknext = makesourcesorter()
372 elif sortmode == 'closesort':
373 elif sortmode == 'closesort':
373 picknext = makeclosesorter()
374 picknext = makeclosesorter()
374 else:
375 else:
375 raise error.Abort(_('unknown sort mode: %s') % sortmode)
376 raise error.Abort(_('unknown sort mode: %s') % sortmode)
376
377
377 children, actives = mapchildren(parents)
378 children, actives = mapchildren(parents)
378
379
379 s = []
380 s = []
380 pendings = {}
381 pendings = {}
381 while actives:
382 while actives:
382 n = picknext(actives)
383 n = picknext(actives)
383 actives.remove(n)
384 actives.remove(n)
384 s.append(n)
385 s.append(n)
385
386
386 # Update dependents list
387 # Update dependents list
387 for c in children.get(n, []):
388 for c in children.get(n, []):
388 if c not in pendings:
389 if c not in pendings:
389 pendings[c] = [p for p in parents[c] if p not in self.map]
390 pendings[c] = [p for p in parents[c] if p not in self.map]
390 try:
391 try:
391 pendings[c].remove(n)
392 pendings[c].remove(n)
392 except ValueError:
393 except ValueError:
393 raise error.Abort(_('cycle detected between %s and %s')
394 raise error.Abort(_('cycle detected between %s and %s')
394 % (recode(c), recode(n)))
395 % (recode(c), recode(n)))
395 if not pendings[c]:
396 if not pendings[c]:
396 # Parents are converted, node is eligible
397 # Parents are converted, node is eligible
397 actives.insert(0, c)
398 actives.insert(0, c)
398 pendings[c] = None
399 pendings[c] = None
399
400
400 if len(s) != len(parents):
401 if len(s) != len(parents):
401 raise error.Abort(_("not all revisions were sorted"))
402 raise error.Abort(_("not all revisions were sorted"))
402
403
403 return s
404 return s
404
405
405 def writeauthormap(self):
406 def writeauthormap(self):
406 authorfile = self.authorfile
407 authorfile = self.authorfile
407 if authorfile:
408 if authorfile:
408 self.ui.status(_('writing author map file %s\n') % authorfile)
409 self.ui.status(_('writing author map file %s\n') % authorfile)
409 ofile = open(authorfile, 'wb+')
410 ofile = open(authorfile, 'wb+')
410 for author in self.authors:
411 for author in self.authors:
411 ofile.write(util.tonativeeol("%s=%s\n"
412 ofile.write(util.tonativeeol("%s=%s\n"
412 % (author, self.authors[author])))
413 % (author, self.authors[author])))
413 ofile.close()
414 ofile.close()
414
415
415 def readauthormap(self, authorfile):
416 def readauthormap(self, authorfile):
416 afile = open(authorfile, 'rb')
417 afile = open(authorfile, 'rb')
417 for line in afile:
418 for line in afile:
418
419
419 line = line.strip()
420 line = line.strip()
420 if not line or line.startswith('#'):
421 if not line or line.startswith('#'):
421 continue
422 continue
422
423
423 try:
424 try:
424 srcauthor, dstauthor = line.split('=', 1)
425 srcauthor, dstauthor = line.split('=', 1)
425 except ValueError:
426 except ValueError:
426 msg = _('ignoring bad line in author map file %s: %s\n')
427 msg = _('ignoring bad line in author map file %s: %s\n')
427 self.ui.warn(msg % (authorfile, line.rstrip()))
428 self.ui.warn(msg % (authorfile, line.rstrip()))
428 continue
429 continue
429
430
430 srcauthor = srcauthor.strip()
431 srcauthor = srcauthor.strip()
431 dstauthor = dstauthor.strip()
432 dstauthor = dstauthor.strip()
432 if self.authors.get(srcauthor) in (None, dstauthor):
433 if self.authors.get(srcauthor) in (None, dstauthor):
433 msg = _('mapping author %s to %s\n')
434 msg = _('mapping author %s to %s\n')
434 self.ui.debug(msg % (srcauthor, dstauthor))
435 self.ui.debug(msg % (srcauthor, dstauthor))
435 self.authors[srcauthor] = dstauthor
436 self.authors[srcauthor] = dstauthor
436 continue
437 continue
437
438
438 m = _('overriding mapping for author %s, was %s, will be %s\n')
439 m = _('overriding mapping for author %s, was %s, will be %s\n')
439 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
440 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
440
441
441 afile.close()
442 afile.close()
442
443
443 def cachecommit(self, rev):
444 def cachecommit(self, rev):
444 commit = self.source.getcommit(rev)
445 commit = self.source.getcommit(rev)
445 commit.author = self.authors.get(commit.author, commit.author)
446 commit.author = self.authors.get(commit.author, commit.author)
446 commit.branch = mapbranch(commit.branch, self.branchmap)
447 commit.branch = mapbranch(commit.branch, self.branchmap)
447 self.commitcache[rev] = commit
448 self.commitcache[rev] = commit
448 return commit
449 return commit
449
450
450 def copy(self, rev):
451 def copy(self, rev):
451 commit = self.commitcache[rev]
452 commit = self.commitcache[rev]
452 full = self.opts.get('full')
453 full = self.opts.get('full')
453 changes = self.source.getchanges(rev, full)
454 changes = self.source.getchanges(rev, full)
454 if isinstance(changes, bytes):
455 if isinstance(changes, bytes):
455 if changes == SKIPREV:
456 if changes == SKIPREV:
456 dest = SKIPREV
457 dest = SKIPREV
457 else:
458 else:
458 dest = self.map[changes]
459 dest = self.map[changes]
459 self.map[rev] = dest
460 self.map[rev] = dest
460 return
461 return
461 files, copies, cleanp2 = changes
462 files, copies, cleanp2 = changes
462 pbranches = []
463 pbranches = []
463 if commit.parents:
464 if commit.parents:
464 for prev in commit.parents:
465 for prev in commit.parents:
465 if prev not in self.commitcache:
466 if prev not in self.commitcache:
466 self.cachecommit(prev)
467 self.cachecommit(prev)
467 pbranches.append((self.map[prev],
468 pbranches.append((self.map[prev],
468 self.commitcache[prev].branch))
469 self.commitcache[prev].branch))
469 self.dest.setbranch(commit.branch, pbranches)
470 self.dest.setbranch(commit.branch, pbranches)
470 try:
471 try:
471 parents = self.splicemap[rev]
472 parents = self.splicemap[rev]
472 self.ui.status(_('spliced in %s as parents of %s\n') %
473 self.ui.status(_('spliced in %s as parents of %s\n') %
473 (_(' and ').join(parents), rev))
474 (_(' and ').join(parents), rev))
474 parents = [self.map.get(p, p) for p in parents]
475 parents = [self.map.get(p, p) for p in parents]
475 except KeyError:
476 except KeyError:
476 parents = [b[0] for b in pbranches]
477 parents = [b[0] for b in pbranches]
477 parents.extend(self.map[x]
478 parents.extend(self.map[x]
478 for x in commit.optparents
479 for x in commit.optparents
479 if x in self.map)
480 if x in self.map)
480 if len(pbranches) != 2:
481 if len(pbranches) != 2:
481 cleanp2 = set()
482 cleanp2 = set()
482 if len(parents) < 3:
483 if len(parents) < 3:
483 source = progresssource(self.ui, self.source, len(files))
484 source = progresssource(self.ui, self.source, len(files))
484 else:
485 else:
485 # For an octopus merge, we end up traversing the list of
486 # For an octopus merge, we end up traversing the list of
486 # changed files N-1 times. This tweak to the number of
487 # changed files N-1 times. This tweak to the number of
487 # files makes it so the progress bar doesn't overflow
488 # files makes it so the progress bar doesn't overflow
488 # itself.
489 # itself.
489 source = progresssource(self.ui, self.source,
490 source = progresssource(self.ui, self.source,
490 len(files) * (len(parents) - 1))
491 len(files) * (len(parents) - 1))
491 newnode = self.dest.putcommit(files, copies, parents, commit,
492 newnode = self.dest.putcommit(files, copies, parents, commit,
492 source, self.map, full, cleanp2)
493 source, self.map, full, cleanp2)
493 source.close()
494 source.close()
494 self.source.converted(rev, newnode)
495 self.source.converted(rev, newnode)
495 self.map[rev] = newnode
496 self.map[rev] = newnode
496
497
497 def convert(self, sortmode):
498 def convert(self, sortmode):
498 try:
499 try:
499 self.source.before()
500 self.source.before()
500 self.dest.before()
501 self.dest.before()
501 self.source.setrevmap(self.map)
502 self.source.setrevmap(self.map)
502 self.ui.status(_("scanning source...\n"))
503 self.ui.status(_("scanning source...\n"))
503 heads = self.source.getheads()
504 heads = self.source.getheads()
504 parents = self.walktree(heads)
505 parents = self.walktree(heads)
505 self.mergesplicemap(parents, self.splicemap)
506 self.mergesplicemap(parents, self.splicemap)
506 self.ui.status(_("sorting...\n"))
507 self.ui.status(_("sorting...\n"))
507 t = self.toposort(parents, sortmode)
508 t = self.toposort(parents, sortmode)
508 num = len(t)
509 num = len(t)
509 c = None
510 c = None
510
511
511 self.ui.status(_("converting...\n"))
512 self.ui.status(_("converting...\n"))
512 for i, c in enumerate(t):
513 for i, c in enumerate(t):
513 num -= 1
514 num -= 1
514 desc = self.commitcache[c].desc
515 desc = self.commitcache[c].desc
515 if "\n" in desc:
516 if "\n" in desc:
516 desc = desc.splitlines()[0]
517 desc = desc.splitlines()[0]
517 # convert log message to local encoding without using
518 # convert log message to local encoding without using
518 # tolocal() because the encoding.encoding convert()
519 # tolocal() because the encoding.encoding convert()
519 # uses is 'utf-8'
520 # uses is 'utf-8'
520 self.ui.status("%d %s\n" % (num, recode(desc)))
521 self.ui.status("%d %s\n" % (num, recode(desc)))
521 self.ui.note(_("source: %s\n") % recode(c))
522 self.ui.note(_("source: %s\n") % recode(c))
522 self.ui.progress(_('converting'), i, unit=_('revisions'),
523 self.ui.progress(_('converting'), i, unit=_('revisions'),
523 total=len(t))
524 total=len(t))
524 self.copy(c)
525 self.copy(c)
525 self.ui.progress(_('converting'), None)
526 self.ui.progress(_('converting'), None)
526
527
527 if not self.ui.configbool('convert', 'skiptags'):
528 if not self.ui.configbool('convert', 'skiptags'):
528 tags = self.source.gettags()
529 tags = self.source.gettags()
529 ctags = {}
530 ctags = {}
530 for k in tags:
531 for k in tags:
531 v = tags[k]
532 v = tags[k]
532 if self.map.get(v, SKIPREV) != SKIPREV:
533 if self.map.get(v, SKIPREV) != SKIPREV:
533 ctags[k] = self.map[v]
534 ctags[k] = self.map[v]
534
535
535 if c and ctags:
536 if c and ctags:
536 nrev, tagsparent = self.dest.puttags(ctags)
537 nrev, tagsparent = self.dest.puttags(ctags)
537 if nrev and tagsparent:
538 if nrev and tagsparent:
538 # write another hash correspondence to override the
539 # write another hash correspondence to override the
539 # previous one so we don't end up with extra tag heads
540 # previous one so we don't end up with extra tag heads
540 tagsparents = [e for e in self.map.iteritems()
541 tagsparents = [e for e in self.map.iteritems()
541 if e[1] == tagsparent]
542 if e[1] == tagsparent]
542 if tagsparents:
543 if tagsparents:
543 self.map[tagsparents[0][0]] = nrev
544 self.map[tagsparents[0][0]] = nrev
544
545
545 bookmarks = self.source.getbookmarks()
546 bookmarks = self.source.getbookmarks()
546 cbookmarks = {}
547 cbookmarks = {}
547 for k in bookmarks:
548 for k in bookmarks:
548 v = bookmarks[k]
549 v = bookmarks[k]
549 if self.map.get(v, SKIPREV) != SKIPREV:
550 if self.map.get(v, SKIPREV) != SKIPREV:
550 cbookmarks[k] = self.map[v]
551 cbookmarks[k] = self.map[v]
551
552
552 if c and cbookmarks:
553 if c and cbookmarks:
553 self.dest.putbookmarks(cbookmarks)
554 self.dest.putbookmarks(cbookmarks)
554
555
555 self.writeauthormap()
556 self.writeauthormap()
556 finally:
557 finally:
557 self.cleanup()
558 self.cleanup()
558
559
559 def cleanup(self):
560 def cleanup(self):
560 try:
561 try:
561 self.dest.after()
562 self.dest.after()
562 finally:
563 finally:
563 self.source.after()
564 self.source.after()
564 self.map.close()
565 self.map.close()
565
566
566 def convert(ui, src, dest=None, revmapfile=None, **opts):
567 def convert(ui, src, dest=None, revmapfile=None, **opts):
567 opts = pycompat.byteskwargs(opts)
568 opts = pycompat.byteskwargs(opts)
568 global orig_encoding
569 global orig_encoding
569 orig_encoding = encoding.encoding
570 orig_encoding = encoding.encoding
570 encoding.encoding = 'UTF-8'
571 encoding.encoding = 'UTF-8'
571
572
572 # support --authors as an alias for --authormap
573 # support --authors as an alias for --authormap
573 if not opts.get('authormap'):
574 if not opts.get('authormap'):
574 opts['authormap'] = opts.get('authors')
575 opts['authormap'] = opts.get('authors')
575
576
576 if not dest:
577 if not dest:
577 dest = hg.defaultdest(src) + "-hg"
578 dest = hg.defaultdest(src) + "-hg"
578 ui.status(_("assuming destination %s\n") % dest)
579 ui.status(_("assuming destination %s\n") % dest)
579
580
580 destc = convertsink(ui, dest, opts.get('dest_type'))
581 destc = convertsink(ui, dest, opts.get('dest_type'))
581 destc = scmutil.wrapconvertsink(destc)
582 destc = scmutil.wrapconvertsink(destc)
582
583
583 try:
584 try:
584 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
585 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
585 opts.get('rev'))
586 opts.get('rev'))
586 except Exception:
587 except Exception:
587 for path in destc.created:
588 for path in destc.created:
588 shutil.rmtree(path, True)
589 shutil.rmtree(path, True)
589 raise
590 raise
590
591
591 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
592 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
592 sortmode = [m for m in sortmodes if opts.get(m)]
593 sortmode = [m for m in sortmodes if opts.get(m)]
593 if len(sortmode) > 1:
594 if len(sortmode) > 1:
594 raise error.Abort(_('more than one sort mode specified'))
595 raise error.Abort(_('more than one sort mode specified'))
595 if sortmode:
596 if sortmode:
596 sortmode = sortmode[0]
597 sortmode = sortmode[0]
597 else:
598 else:
598 sortmode = defaultsort
599 sortmode = defaultsort
599
600
600 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
601 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
601 raise error.Abort(_('--sourcesort is not supported by this data source')
602 raise error.Abort(_('--sourcesort is not supported by this data source')
602 )
603 )
603 if sortmode == 'closesort' and not srcc.hasnativeclose():
604 if sortmode == 'closesort' and not srcc.hasnativeclose():
604 raise error.Abort(_('--closesort is not supported by this data source'))
605 raise error.Abort(_('--closesort is not supported by this data source'))
605
606
606 fmap = opts.get('filemap')
607 fmap = opts.get('filemap')
607 if fmap:
608 if fmap:
608 srcc = filemap.filemap_source(ui, srcc, fmap)
609 srcc = filemap.filemap_source(ui, srcc, fmap)
609 destc.setfilemapmode(True)
610 destc.setfilemapmode(True)
610
611
611 if not revmapfile:
612 if not revmapfile:
612 revmapfile = destc.revmapfile()
613 revmapfile = destc.revmapfile()
613
614
614 c = converter(ui, srcc, destc, revmapfile, opts)
615 c = converter(ui, srcc, destc, revmapfile, opts)
615 c.convert(sortmode)
616 c.convert(sortmode)
@@ -1,297 +1,298 b''
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import errno
9 import errno
10 import os
10 import os
11 import re
11 import re
12 import socket
12 import socket
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 encoding,
16 encoding,
17 error,
17 error,
18 pycompat,
18 pycompat,
19 util,
19 util,
20 )
20 )
21 from mercurial.utils import dateutil
21
22
22 from . import (
23 from . import (
23 common,
24 common,
24 cvsps,
25 cvsps,
25 )
26 )
26
27
27 stringio = util.stringio
28 stringio = util.stringio
28 checktool = common.checktool
29 checktool = common.checktool
29 commit = common.commit
30 commit = common.commit
30 converter_source = common.converter_source
31 converter_source = common.converter_source
31 makedatetimestamp = common.makedatetimestamp
32 makedatetimestamp = common.makedatetimestamp
32 NoRepo = common.NoRepo
33 NoRepo = common.NoRepo
33
34
34 class convert_cvs(converter_source):
35 class convert_cvs(converter_source):
35 def __init__(self, ui, repotype, path, revs=None):
36 def __init__(self, ui, repotype, path, revs=None):
36 super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
37 super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
37
38
38 cvs = os.path.join(path, "CVS")
39 cvs = os.path.join(path, "CVS")
39 if not os.path.exists(cvs):
40 if not os.path.exists(cvs):
40 raise NoRepo(_("%s does not look like a CVS checkout") % path)
41 raise NoRepo(_("%s does not look like a CVS checkout") % path)
41
42
42 checktool('cvs')
43 checktool('cvs')
43
44
44 self.changeset = None
45 self.changeset = None
45 self.files = {}
46 self.files = {}
46 self.tags = {}
47 self.tags = {}
47 self.lastbranch = {}
48 self.lastbranch = {}
48 self.socket = None
49 self.socket = None
49 self.cvsroot = open(os.path.join(cvs, "Root"), 'rb').read()[:-1]
50 self.cvsroot = open(os.path.join(cvs, "Root"), 'rb').read()[:-1]
50 self.cvsrepo = open(os.path.join(cvs, "Repository"), 'rb').read()[:-1]
51 self.cvsrepo = open(os.path.join(cvs, "Repository"), 'rb').read()[:-1]
51 self.encoding = encoding.encoding
52 self.encoding = encoding.encoding
52
53
53 self._connect()
54 self._connect()
54
55
55 def _parse(self):
56 def _parse(self):
56 if self.changeset is not None:
57 if self.changeset is not None:
57 return
58 return
58 self.changeset = {}
59 self.changeset = {}
59
60
60 maxrev = 0
61 maxrev = 0
61 if self.revs:
62 if self.revs:
62 if len(self.revs) > 1:
63 if len(self.revs) > 1:
63 raise error.Abort(_('cvs source does not support specifying '
64 raise error.Abort(_('cvs source does not support specifying '
64 'multiple revs'))
65 'multiple revs'))
65 # TODO: handle tags
66 # TODO: handle tags
66 try:
67 try:
67 # patchset number?
68 # patchset number?
68 maxrev = int(self.revs[0])
69 maxrev = int(self.revs[0])
69 except ValueError:
70 except ValueError:
70 raise error.Abort(_('revision %s is not a patchset number')
71 raise error.Abort(_('revision %s is not a patchset number')
71 % self.revs[0])
72 % self.revs[0])
72
73
73 d = pycompat.getcwd()
74 d = pycompat.getcwd()
74 try:
75 try:
75 os.chdir(self.path)
76 os.chdir(self.path)
76 id = None
77 id = None
77
78
78 cache = 'update'
79 cache = 'update'
79 if not self.ui.configbool('convert', 'cvsps.cache'):
80 if not self.ui.configbool('convert', 'cvsps.cache'):
80 cache = None
81 cache = None
81 db = cvsps.createlog(self.ui, cache=cache)
82 db = cvsps.createlog(self.ui, cache=cache)
82 db = cvsps.createchangeset(self.ui, db,
83 db = cvsps.createchangeset(self.ui, db,
83 fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
84 fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
84 mergeto=self.ui.config('convert', 'cvsps.mergeto'),
85 mergeto=self.ui.config('convert', 'cvsps.mergeto'),
85 mergefrom=self.ui.config('convert', 'cvsps.mergefrom'))
86 mergefrom=self.ui.config('convert', 'cvsps.mergefrom'))
86
87
87 for cs in db:
88 for cs in db:
88 if maxrev and cs.id > maxrev:
89 if maxrev and cs.id > maxrev:
89 break
90 break
90 id = str(cs.id)
91 id = str(cs.id)
91 cs.author = self.recode(cs.author)
92 cs.author = self.recode(cs.author)
92 self.lastbranch[cs.branch] = id
93 self.lastbranch[cs.branch] = id
93 cs.comment = self.recode(cs.comment)
94 cs.comment = self.recode(cs.comment)
94 if self.ui.configbool('convert', 'localtimezone'):
95 if self.ui.configbool('convert', 'localtimezone'):
95 cs.date = makedatetimestamp(cs.date[0])
96 cs.date = makedatetimestamp(cs.date[0])
96 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
97 date = dateutil.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
97 self.tags.update(dict.fromkeys(cs.tags, id))
98 self.tags.update(dict.fromkeys(cs.tags, id))
98
99
99 files = {}
100 files = {}
100 for f in cs.entries:
101 for f in cs.entries:
101 files[f.file] = "%s%s" % ('.'.join([str(x)
102 files[f.file] = "%s%s" % ('.'.join([str(x)
102 for x in f.revision]),
103 for x in f.revision]),
103 ['', '(DEAD)'][f.dead])
104 ['', '(DEAD)'][f.dead])
104
105
105 # add current commit to set
106 # add current commit to set
106 c = commit(author=cs.author, date=date,
107 c = commit(author=cs.author, date=date,
107 parents=[str(p.id) for p in cs.parents],
108 parents=[str(p.id) for p in cs.parents],
108 desc=cs.comment, branch=cs.branch or '')
109 desc=cs.comment, branch=cs.branch or '')
109 self.changeset[id] = c
110 self.changeset[id] = c
110 self.files[id] = files
111 self.files[id] = files
111
112
112 self.heads = self.lastbranch.values()
113 self.heads = self.lastbranch.values()
113 finally:
114 finally:
114 os.chdir(d)
115 os.chdir(d)
115
116
116 def _connect(self):
117 def _connect(self):
117 root = self.cvsroot
118 root = self.cvsroot
118 conntype = None
119 conntype = None
119 user, host = None, None
120 user, host = None, None
120 cmd = ['cvs', 'server']
121 cmd = ['cvs', 'server']
121
122
122 self.ui.status(_("connecting to %s\n") % root)
123 self.ui.status(_("connecting to %s\n") % root)
123
124
124 if root.startswith(":pserver:"):
125 if root.startswith(":pserver:"):
125 root = root[9:]
126 root = root[9:]
126 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
127 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
127 root)
128 root)
128 if m:
129 if m:
129 conntype = "pserver"
130 conntype = "pserver"
130 user, passw, serv, port, root = m.groups()
131 user, passw, serv, port, root = m.groups()
131 if not user:
132 if not user:
132 user = "anonymous"
133 user = "anonymous"
133 if not port:
134 if not port:
134 port = 2401
135 port = 2401
135 else:
136 else:
136 port = int(port)
137 port = int(port)
137 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
138 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
138 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
139 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
139
140
140 if not passw:
141 if not passw:
141 passw = "A"
142 passw = "A"
142 cvspass = os.path.expanduser("~/.cvspass")
143 cvspass = os.path.expanduser("~/.cvspass")
143 try:
144 try:
144 pf = open(cvspass, 'rb')
145 pf = open(cvspass, 'rb')
145 for line in pf.read().splitlines():
146 for line in pf.read().splitlines():
146 part1, part2 = line.split(' ', 1)
147 part1, part2 = line.split(' ', 1)
147 # /1 :pserver:user@example.com:2401/cvsroot/foo
148 # /1 :pserver:user@example.com:2401/cvsroot/foo
148 # Ah<Z
149 # Ah<Z
149 if part1 == '/1':
150 if part1 == '/1':
150 part1, part2 = part2.split(' ', 1)
151 part1, part2 = part2.split(' ', 1)
151 format = format1
152 format = format1
152 # :pserver:user@example.com:/cvsroot/foo Ah<Z
153 # :pserver:user@example.com:/cvsroot/foo Ah<Z
153 else:
154 else:
154 format = format0
155 format = format0
155 if part1 == format:
156 if part1 == format:
156 passw = part2
157 passw = part2
157 break
158 break
158 pf.close()
159 pf.close()
159 except IOError as inst:
160 except IOError as inst:
160 if inst.errno != errno.ENOENT:
161 if inst.errno != errno.ENOENT:
161 if not getattr(inst, 'filename', None):
162 if not getattr(inst, 'filename', None):
162 inst.filename = cvspass
163 inst.filename = cvspass
163 raise
164 raise
164
165
165 sck = socket.socket()
166 sck = socket.socket()
166 sck.connect((serv, port))
167 sck.connect((serv, port))
167 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
168 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
168 "END AUTH REQUEST", ""]))
169 "END AUTH REQUEST", ""]))
169 if sck.recv(128) != "I LOVE YOU\n":
170 if sck.recv(128) != "I LOVE YOU\n":
170 raise error.Abort(_("CVS pserver authentication failed"))
171 raise error.Abort(_("CVS pserver authentication failed"))
171
172
172 self.writep = self.readp = sck.makefile('r+')
173 self.writep = self.readp = sck.makefile('r+')
173
174
174 if not conntype and root.startswith(":local:"):
175 if not conntype and root.startswith(":local:"):
175 conntype = "local"
176 conntype = "local"
176 root = root[7:]
177 root = root[7:]
177
178
178 if not conntype:
179 if not conntype:
179 # :ext:user@host/home/user/path/to/cvsroot
180 # :ext:user@host/home/user/path/to/cvsroot
180 if root.startswith(":ext:"):
181 if root.startswith(":ext:"):
181 root = root[5:]
182 root = root[5:]
182 m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
183 m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
183 # Do not take Windows path "c:\foo\bar" for a connection strings
184 # Do not take Windows path "c:\foo\bar" for a connection strings
184 if os.path.isdir(root) or not m:
185 if os.path.isdir(root) or not m:
185 conntype = "local"
186 conntype = "local"
186 else:
187 else:
187 conntype = "rsh"
188 conntype = "rsh"
188 user, host, root = m.group(1), m.group(2), m.group(3)
189 user, host, root = m.group(1), m.group(2), m.group(3)
189
190
190 if conntype != "pserver":
191 if conntype != "pserver":
191 if conntype == "rsh":
192 if conntype == "rsh":
192 rsh = encoding.environ.get("CVS_RSH") or "ssh"
193 rsh = encoding.environ.get("CVS_RSH") or "ssh"
193 if user:
194 if user:
194 cmd = [rsh, '-l', user, host] + cmd
195 cmd = [rsh, '-l', user, host] + cmd
195 else:
196 else:
196 cmd = [rsh, host] + cmd
197 cmd = [rsh, host] + cmd
197
198
198 # popen2 does not support argument lists under Windows
199 # popen2 does not support argument lists under Windows
199 cmd = [util.shellquote(arg) for arg in cmd]
200 cmd = [util.shellquote(arg) for arg in cmd]
200 cmd = util.quotecommand(' '.join(cmd))
201 cmd = util.quotecommand(' '.join(cmd))
201 self.writep, self.readp = util.popen2(cmd)
202 self.writep, self.readp = util.popen2(cmd)
202
203
203 self.realroot = root
204 self.realroot = root
204
205
205 self.writep.write("Root %s\n" % root)
206 self.writep.write("Root %s\n" % root)
206 self.writep.write("Valid-responses ok error Valid-requests Mode"
207 self.writep.write("Valid-responses ok error Valid-requests Mode"
207 " M Mbinary E Checked-in Created Updated"
208 " M Mbinary E Checked-in Created Updated"
208 " Merged Removed\n")
209 " Merged Removed\n")
209 self.writep.write("valid-requests\n")
210 self.writep.write("valid-requests\n")
210 self.writep.flush()
211 self.writep.flush()
211 r = self.readp.readline()
212 r = self.readp.readline()
212 if not r.startswith("Valid-requests"):
213 if not r.startswith("Valid-requests"):
213 raise error.Abort(_('unexpected response from CVS server '
214 raise error.Abort(_('unexpected response from CVS server '
214 '(expected "Valid-requests", but got %r)')
215 '(expected "Valid-requests", but got %r)')
215 % r)
216 % r)
216 if "UseUnchanged" in r:
217 if "UseUnchanged" in r:
217 self.writep.write("UseUnchanged\n")
218 self.writep.write("UseUnchanged\n")
218 self.writep.flush()
219 self.writep.flush()
219 r = self.readp.readline()
220 r = self.readp.readline()
220
221
221 def getheads(self):
222 def getheads(self):
222 self._parse()
223 self._parse()
223 return self.heads
224 return self.heads
224
225
225 def getfile(self, name, rev):
226 def getfile(self, name, rev):
226
227
227 def chunkedread(fp, count):
228 def chunkedread(fp, count):
228 # file-objects returned by socket.makefile() do not handle
229 # file-objects returned by socket.makefile() do not handle
229 # large read() requests very well.
230 # large read() requests very well.
230 chunksize = 65536
231 chunksize = 65536
231 output = stringio()
232 output = stringio()
232 while count > 0:
233 while count > 0:
233 data = fp.read(min(count, chunksize))
234 data = fp.read(min(count, chunksize))
234 if not data:
235 if not data:
235 raise error.Abort(_("%d bytes missing from remote file")
236 raise error.Abort(_("%d bytes missing from remote file")
236 % count)
237 % count)
237 count -= len(data)
238 count -= len(data)
238 output.write(data)
239 output.write(data)
239 return output.getvalue()
240 return output.getvalue()
240
241
241 self._parse()
242 self._parse()
242 if rev.endswith("(DEAD)"):
243 if rev.endswith("(DEAD)"):
243 return None, None
244 return None, None
244
245
245 args = ("-N -P -kk -r %s --" % rev).split()
246 args = ("-N -P -kk -r %s --" % rev).split()
246 args.append(self.cvsrepo + '/' + name)
247 args.append(self.cvsrepo + '/' + name)
247 for x in args:
248 for x in args:
248 self.writep.write("Argument %s\n" % x)
249 self.writep.write("Argument %s\n" % x)
249 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
250 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
250 self.writep.flush()
251 self.writep.flush()
251
252
252 data = ""
253 data = ""
253 mode = None
254 mode = None
254 while True:
255 while True:
255 line = self.readp.readline()
256 line = self.readp.readline()
256 if line.startswith("Created ") or line.startswith("Updated "):
257 if line.startswith("Created ") or line.startswith("Updated "):
257 self.readp.readline() # path
258 self.readp.readline() # path
258 self.readp.readline() # entries
259 self.readp.readline() # entries
259 mode = self.readp.readline()[:-1]
260 mode = self.readp.readline()[:-1]
260 count = int(self.readp.readline()[:-1])
261 count = int(self.readp.readline()[:-1])
261 data = chunkedread(self.readp, count)
262 data = chunkedread(self.readp, count)
262 elif line.startswith(" "):
263 elif line.startswith(" "):
263 data += line[1:]
264 data += line[1:]
264 elif line.startswith("M "):
265 elif line.startswith("M "):
265 pass
266 pass
266 elif line.startswith("Mbinary "):
267 elif line.startswith("Mbinary "):
267 count = int(self.readp.readline()[:-1])
268 count = int(self.readp.readline()[:-1])
268 data = chunkedread(self.readp, count)
269 data = chunkedread(self.readp, count)
269 else:
270 else:
270 if line == "ok\n":
271 if line == "ok\n":
271 if mode is None:
272 if mode is None:
272 raise error.Abort(_('malformed response from CVS'))
273 raise error.Abort(_('malformed response from CVS'))
273 return (data, "x" in mode and "x" or "")
274 return (data, "x" in mode and "x" or "")
274 elif line.startswith("E "):
275 elif line.startswith("E "):
275 self.ui.warn(_("cvs server: %s\n") % line[2:])
276 self.ui.warn(_("cvs server: %s\n") % line[2:])
276 elif line.startswith("Remove"):
277 elif line.startswith("Remove"):
277 self.readp.readline()
278 self.readp.readline()
278 else:
279 else:
279 raise error.Abort(_("unknown CVS response: %s") % line)
280 raise error.Abort(_("unknown CVS response: %s") % line)
280
281
281 def getchanges(self, rev, full):
282 def getchanges(self, rev, full):
282 if full:
283 if full:
283 raise error.Abort(_("convert from cvs does not support --full"))
284 raise error.Abort(_("convert from cvs does not support --full"))
284 self._parse()
285 self._parse()
285 return sorted(self.files[rev].iteritems()), {}, set()
286 return sorted(self.files[rev].iteritems()), {}, set()
286
287
287 def getcommit(self, rev):
288 def getcommit(self, rev):
288 self._parse()
289 self._parse()
289 return self.changeset[rev]
290 return self.changeset[rev]
290
291
291 def gettags(self):
292 def gettags(self):
292 self._parse()
293 self._parse()
293 return self.tags
294 return self.tags
294
295
295 def getchangedfiles(self, rev, i):
296 def getchangedfiles(self, rev, i):
296 self._parse()
297 self._parse()
297 return sorted(self.files[rev])
298 return sorted(self.files[rev])
@@ -1,952 +1,953 b''
1 # Mercurial built-in replacement for cvsps.
1 # Mercurial built-in replacement for cvsps.
2 #
2 #
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import os
9 import os
10 import re
10 import re
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import (
13 from mercurial import (
14 encoding,
14 encoding,
15 error,
15 error,
16 hook,
16 hook,
17 pycompat,
17 pycompat,
18 util,
18 util,
19 )
19 )
20 from mercurial.utils import dateutil
20
21
21 pickle = util.pickle
22 pickle = util.pickle
22
23
23 class logentry(object):
24 class logentry(object):
24 '''Class logentry has the following attributes:
25 '''Class logentry has the following attributes:
25 .author - author name as CVS knows it
26 .author - author name as CVS knows it
26 .branch - name of branch this revision is on
27 .branch - name of branch this revision is on
27 .branches - revision tuple of branches starting at this revision
28 .branches - revision tuple of branches starting at this revision
28 .comment - commit message
29 .comment - commit message
29 .commitid - CVS commitid or None
30 .commitid - CVS commitid or None
30 .date - the commit date as a (time, tz) tuple
31 .date - the commit date as a (time, tz) tuple
31 .dead - true if file revision is dead
32 .dead - true if file revision is dead
32 .file - Name of file
33 .file - Name of file
33 .lines - a tuple (+lines, -lines) or None
34 .lines - a tuple (+lines, -lines) or None
34 .parent - Previous revision of this entry
35 .parent - Previous revision of this entry
35 .rcs - name of file as returned from CVS
36 .rcs - name of file as returned from CVS
36 .revision - revision number as tuple
37 .revision - revision number as tuple
37 .tags - list of tags on the file
38 .tags - list of tags on the file
38 .synthetic - is this a synthetic "file ... added on ..." revision?
39 .synthetic - is this a synthetic "file ... added on ..." revision?
39 .mergepoint - the branch that has been merged from (if present in
40 .mergepoint - the branch that has been merged from (if present in
40 rlog output) or None
41 rlog output) or None
41 .branchpoints - the branches that start at the current entry or empty
42 .branchpoints - the branches that start at the current entry or empty
42 '''
43 '''
43 def __init__(self, **entries):
44 def __init__(self, **entries):
44 self.synthetic = False
45 self.synthetic = False
45 self.__dict__.update(entries)
46 self.__dict__.update(entries)
46
47
47 def __repr__(self):
48 def __repr__(self):
48 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
49 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
49 return "%s(%s)"%(type(self).__name__, ", ".join(items))
50 return "%s(%s)"%(type(self).__name__, ", ".join(items))
50
51
51 class logerror(Exception):
52 class logerror(Exception):
52 pass
53 pass
53
54
54 def getrepopath(cvspath):
55 def getrepopath(cvspath):
55 """Return the repository path from a CVS path.
56 """Return the repository path from a CVS path.
56
57
57 >>> getrepopath(b'/foo/bar')
58 >>> getrepopath(b'/foo/bar')
58 '/foo/bar'
59 '/foo/bar'
59 >>> getrepopath(b'c:/foo/bar')
60 >>> getrepopath(b'c:/foo/bar')
60 '/foo/bar'
61 '/foo/bar'
61 >>> getrepopath(b':pserver:10/foo/bar')
62 >>> getrepopath(b':pserver:10/foo/bar')
62 '/foo/bar'
63 '/foo/bar'
63 >>> getrepopath(b':pserver:10c:/foo/bar')
64 >>> getrepopath(b':pserver:10c:/foo/bar')
64 '/foo/bar'
65 '/foo/bar'
65 >>> getrepopath(b':pserver:/foo/bar')
66 >>> getrepopath(b':pserver:/foo/bar')
66 '/foo/bar'
67 '/foo/bar'
67 >>> getrepopath(b':pserver:c:/foo/bar')
68 >>> getrepopath(b':pserver:c:/foo/bar')
68 '/foo/bar'
69 '/foo/bar'
69 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
70 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
70 '/foo/bar'
71 '/foo/bar'
71 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
72 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
72 '/foo/bar'
73 '/foo/bar'
73 >>> getrepopath(b'user@server/path/to/repository')
74 >>> getrepopath(b'user@server/path/to/repository')
74 '/path/to/repository'
75 '/path/to/repository'
75 """
76 """
76 # According to CVS manual, CVS paths are expressed like:
77 # According to CVS manual, CVS paths are expressed like:
77 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
78 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
78 #
79 #
79 # CVSpath is splitted into parts and then position of the first occurrence
80 # CVSpath is splitted into parts and then position of the first occurrence
80 # of the '/' char after the '@' is located. The solution is the rest of the
81 # of the '/' char after the '@' is located. The solution is the rest of the
81 # string after that '/' sign including it
82 # string after that '/' sign including it
82
83
83 parts = cvspath.split(':')
84 parts = cvspath.split(':')
84 atposition = parts[-1].find('@')
85 atposition = parts[-1].find('@')
85 start = 0
86 start = 0
86
87
87 if atposition != -1:
88 if atposition != -1:
88 start = atposition
89 start = atposition
89
90
90 repopath = parts[-1][parts[-1].find('/', start):]
91 repopath = parts[-1][parts[-1].find('/', start):]
91 return repopath
92 return repopath
92
93
93 def createlog(ui, directory=None, root="", rlog=True, cache=None):
94 def createlog(ui, directory=None, root="", rlog=True, cache=None):
94 '''Collect the CVS rlog'''
95 '''Collect the CVS rlog'''
95
96
96 # Because we store many duplicate commit log messages, reusing strings
97 # Because we store many duplicate commit log messages, reusing strings
97 # saves a lot of memory and pickle storage space.
98 # saves a lot of memory and pickle storage space.
98 _scache = {}
99 _scache = {}
99 def scache(s):
100 def scache(s):
100 "return a shared version of a string"
101 "return a shared version of a string"
101 return _scache.setdefault(s, s)
102 return _scache.setdefault(s, s)
102
103
103 ui.status(_('collecting CVS rlog\n'))
104 ui.status(_('collecting CVS rlog\n'))
104
105
105 log = [] # list of logentry objects containing the CVS state
106 log = [] # list of logentry objects containing the CVS state
106
107
107 # patterns to match in CVS (r)log output, by state of use
108 # patterns to match in CVS (r)log output, by state of use
108 re_00 = re.compile('RCS file: (.+)$')
109 re_00 = re.compile('RCS file: (.+)$')
109 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
110 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
110 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
111 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
111 re_03 = re.compile("(Cannot access.+CVSROOT)|"
112 re_03 = re.compile("(Cannot access.+CVSROOT)|"
112 "(can't create temporary directory.+)$")
113 "(can't create temporary directory.+)$")
113 re_10 = re.compile('Working file: (.+)$')
114 re_10 = re.compile('Working file: (.+)$')
114 re_20 = re.compile('symbolic names:')
115 re_20 = re.compile('symbolic names:')
115 re_30 = re.compile('\t(.+): ([\\d.]+)$')
116 re_30 = re.compile('\t(.+): ([\\d.]+)$')
116 re_31 = re.compile('----------------------------$')
117 re_31 = re.compile('----------------------------$')
117 re_32 = re.compile('======================================='
118 re_32 = re.compile('======================================='
118 '======================================$')
119 '======================================$')
119 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
120 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
120 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
121 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
121 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
122 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
122 r'(\s+commitid:\s+([^;]+);)?'
123 r'(\s+commitid:\s+([^;]+);)?'
123 r'(.*mergepoint:\s+([^;]+);)?')
124 r'(.*mergepoint:\s+([^;]+);)?')
124 re_70 = re.compile('branches: (.+);$')
125 re_70 = re.compile('branches: (.+);$')
125
126
126 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
127 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
127
128
128 prefix = '' # leading path to strip of what we get from CVS
129 prefix = '' # leading path to strip of what we get from CVS
129
130
130 if directory is None:
131 if directory is None:
131 # Current working directory
132 # Current working directory
132
133
133 # Get the real directory in the repository
134 # Get the real directory in the repository
134 try:
135 try:
135 prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
136 prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
136 directory = prefix
137 directory = prefix
137 if prefix == ".":
138 if prefix == ".":
138 prefix = ""
139 prefix = ""
139 except IOError:
140 except IOError:
140 raise logerror(_('not a CVS sandbox'))
141 raise logerror(_('not a CVS sandbox'))
141
142
142 if prefix and not prefix.endswith(pycompat.ossep):
143 if prefix and not prefix.endswith(pycompat.ossep):
143 prefix += pycompat.ossep
144 prefix += pycompat.ossep
144
145
145 # Use the Root file in the sandbox, if it exists
146 # Use the Root file in the sandbox, if it exists
146 try:
147 try:
147 root = open(os.path.join('CVS','Root'), 'rb').read().strip()
148 root = open(os.path.join('CVS','Root'), 'rb').read().strip()
148 except IOError:
149 except IOError:
149 pass
150 pass
150
151
151 if not root:
152 if not root:
152 root = encoding.environ.get('CVSROOT', '')
153 root = encoding.environ.get('CVSROOT', '')
153
154
154 # read log cache if one exists
155 # read log cache if one exists
155 oldlog = []
156 oldlog = []
156 date = None
157 date = None
157
158
158 if cache:
159 if cache:
159 cachedir = os.path.expanduser('~/.hg.cvsps')
160 cachedir = os.path.expanduser('~/.hg.cvsps')
160 if not os.path.exists(cachedir):
161 if not os.path.exists(cachedir):
161 os.mkdir(cachedir)
162 os.mkdir(cachedir)
162
163
163 # The cvsps cache pickle needs a uniquified name, based on the
164 # The cvsps cache pickle needs a uniquified name, based on the
164 # repository location. The address may have all sort of nasties
165 # repository location. The address may have all sort of nasties
165 # in it, slashes, colons and such. So here we take just the
166 # in it, slashes, colons and such. So here we take just the
166 # alphanumeric characters, concatenated in a way that does not
167 # alphanumeric characters, concatenated in a way that does not
167 # mix up the various components, so that
168 # mix up the various components, so that
168 # :pserver:user@server:/path
169 # :pserver:user@server:/path
169 # and
170 # and
170 # /pserver/user/server/path
171 # /pserver/user/server/path
171 # are mapped to different cache file names.
172 # are mapped to different cache file names.
172 cachefile = root.split(":") + [directory, "cache"]
173 cachefile = root.split(":") + [directory, "cache"]
173 cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
174 cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
174 cachefile = os.path.join(cachedir,
175 cachefile = os.path.join(cachedir,
175 '.'.join([s for s in cachefile if s]))
176 '.'.join([s for s in cachefile if s]))
176
177
177 if cache == 'update':
178 if cache == 'update':
178 try:
179 try:
179 ui.note(_('reading cvs log cache %s\n') % cachefile)
180 ui.note(_('reading cvs log cache %s\n') % cachefile)
180 oldlog = pickle.load(open(cachefile, 'rb'))
181 oldlog = pickle.load(open(cachefile, 'rb'))
181 for e in oldlog:
182 for e in oldlog:
182 if not (util.safehasattr(e, 'branchpoints') and
183 if not (util.safehasattr(e, 'branchpoints') and
183 util.safehasattr(e, 'commitid') and
184 util.safehasattr(e, 'commitid') and
184 util.safehasattr(e, 'mergepoint')):
185 util.safehasattr(e, 'mergepoint')):
185 ui.status(_('ignoring old cache\n'))
186 ui.status(_('ignoring old cache\n'))
186 oldlog = []
187 oldlog = []
187 break
188 break
188
189
189 ui.note(_('cache has %d log entries\n') % len(oldlog))
190 ui.note(_('cache has %d log entries\n') % len(oldlog))
190 except Exception as e:
191 except Exception as e:
191 ui.note(_('error reading cache: %r\n') % e)
192 ui.note(_('error reading cache: %r\n') % e)
192
193
193 if oldlog:
194 if oldlog:
194 date = oldlog[-1].date # last commit date as a (time,tz) tuple
195 date = oldlog[-1].date # last commit date as a (time,tz) tuple
195 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
196 date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
196
197
197 # build the CVS commandline
198 # build the CVS commandline
198 cmd = ['cvs', '-q']
199 cmd = ['cvs', '-q']
199 if root:
200 if root:
200 cmd.append('-d%s' % root)
201 cmd.append('-d%s' % root)
201 p = util.normpath(getrepopath(root))
202 p = util.normpath(getrepopath(root))
202 if not p.endswith('/'):
203 if not p.endswith('/'):
203 p += '/'
204 p += '/'
204 if prefix:
205 if prefix:
205 # looks like normpath replaces "" by "."
206 # looks like normpath replaces "" by "."
206 prefix = p + util.normpath(prefix)
207 prefix = p + util.normpath(prefix)
207 else:
208 else:
208 prefix = p
209 prefix = p
209 cmd.append(['log', 'rlog'][rlog])
210 cmd.append(['log', 'rlog'][rlog])
210 if date:
211 if date:
211 # no space between option and date string
212 # no space between option and date string
212 cmd.append('-d>%s' % date)
213 cmd.append('-d>%s' % date)
213 cmd.append(directory)
214 cmd.append(directory)
214
215
215 # state machine begins here
216 # state machine begins here
216 tags = {} # dictionary of revisions on current file with their tags
217 tags = {} # dictionary of revisions on current file with their tags
217 branchmap = {} # mapping between branch names and revision numbers
218 branchmap = {} # mapping between branch names and revision numbers
218 rcsmap = {}
219 rcsmap = {}
219 state = 0
220 state = 0
220 store = False # set when a new record can be appended
221 store = False # set when a new record can be appended
221
222
222 cmd = [util.shellquote(arg) for arg in cmd]
223 cmd = [util.shellquote(arg) for arg in cmd]
223 ui.note(_("running %s\n") % (' '.join(cmd)))
224 ui.note(_("running %s\n") % (' '.join(cmd)))
224 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
225 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
225
226
226 pfp = util.popen(' '.join(cmd))
227 pfp = util.popen(' '.join(cmd))
227 peek = pfp.readline()
228 peek = pfp.readline()
228 while True:
229 while True:
229 line = peek
230 line = peek
230 if line == '':
231 if line == '':
231 break
232 break
232 peek = pfp.readline()
233 peek = pfp.readline()
233 if line.endswith('\n'):
234 if line.endswith('\n'):
234 line = line[:-1]
235 line = line[:-1]
235 #ui.debug('state=%d line=%r\n' % (state, line))
236 #ui.debug('state=%d line=%r\n' % (state, line))
236
237
237 if state == 0:
238 if state == 0:
238 # initial state, consume input until we see 'RCS file'
239 # initial state, consume input until we see 'RCS file'
239 match = re_00.match(line)
240 match = re_00.match(line)
240 if match:
241 if match:
241 rcs = match.group(1)
242 rcs = match.group(1)
242 tags = {}
243 tags = {}
243 if rlog:
244 if rlog:
244 filename = util.normpath(rcs[:-2])
245 filename = util.normpath(rcs[:-2])
245 if filename.startswith(prefix):
246 if filename.startswith(prefix):
246 filename = filename[len(prefix):]
247 filename = filename[len(prefix):]
247 if filename.startswith('/'):
248 if filename.startswith('/'):
248 filename = filename[1:]
249 filename = filename[1:]
249 if filename.startswith('Attic/'):
250 if filename.startswith('Attic/'):
250 filename = filename[6:]
251 filename = filename[6:]
251 else:
252 else:
252 filename = filename.replace('/Attic/', '/')
253 filename = filename.replace('/Attic/', '/')
253 state = 2
254 state = 2
254 continue
255 continue
255 state = 1
256 state = 1
256 continue
257 continue
257 match = re_01.match(line)
258 match = re_01.match(line)
258 if match:
259 if match:
259 raise logerror(match.group(1))
260 raise logerror(match.group(1))
260 match = re_02.match(line)
261 match = re_02.match(line)
261 if match:
262 if match:
262 raise logerror(match.group(2))
263 raise logerror(match.group(2))
263 if re_03.match(line):
264 if re_03.match(line):
264 raise logerror(line)
265 raise logerror(line)
265
266
266 elif state == 1:
267 elif state == 1:
267 # expect 'Working file' (only when using log instead of rlog)
268 # expect 'Working file' (only when using log instead of rlog)
268 match = re_10.match(line)
269 match = re_10.match(line)
269 assert match, _('RCS file must be followed by working file')
270 assert match, _('RCS file must be followed by working file')
270 filename = util.normpath(match.group(1))
271 filename = util.normpath(match.group(1))
271 state = 2
272 state = 2
272
273
273 elif state == 2:
274 elif state == 2:
274 # expect 'symbolic names'
275 # expect 'symbolic names'
275 if re_20.match(line):
276 if re_20.match(line):
276 branchmap = {}
277 branchmap = {}
277 state = 3
278 state = 3
278
279
279 elif state == 3:
280 elif state == 3:
280 # read the symbolic names and store as tags
281 # read the symbolic names and store as tags
281 match = re_30.match(line)
282 match = re_30.match(line)
282 if match:
283 if match:
283 rev = [int(x) for x in match.group(2).split('.')]
284 rev = [int(x) for x in match.group(2).split('.')]
284
285
285 # Convert magic branch number to an odd-numbered one
286 # Convert magic branch number to an odd-numbered one
286 revn = len(rev)
287 revn = len(rev)
287 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
288 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
288 rev = rev[:-2] + rev[-1:]
289 rev = rev[:-2] + rev[-1:]
289 rev = tuple(rev)
290 rev = tuple(rev)
290
291
291 if rev not in tags:
292 if rev not in tags:
292 tags[rev] = []
293 tags[rev] = []
293 tags[rev].append(match.group(1))
294 tags[rev].append(match.group(1))
294 branchmap[match.group(1)] = match.group(2)
295 branchmap[match.group(1)] = match.group(2)
295
296
296 elif re_31.match(line):
297 elif re_31.match(line):
297 state = 5
298 state = 5
298 elif re_32.match(line):
299 elif re_32.match(line):
299 state = 0
300 state = 0
300
301
301 elif state == 4:
302 elif state == 4:
302 # expecting '------' separator before first revision
303 # expecting '------' separator before first revision
303 if re_31.match(line):
304 if re_31.match(line):
304 state = 5
305 state = 5
305 else:
306 else:
306 assert not re_32.match(line), _('must have at least '
307 assert not re_32.match(line), _('must have at least '
307 'some revisions')
308 'some revisions')
308
309
309 elif state == 5:
310 elif state == 5:
310 # expecting revision number and possibly (ignored) lock indication
311 # expecting revision number and possibly (ignored) lock indication
311 # we create the logentry here from values stored in states 0 to 4,
312 # we create the logentry here from values stored in states 0 to 4,
312 # as this state is re-entered for subsequent revisions of a file.
313 # as this state is re-entered for subsequent revisions of a file.
313 match = re_50.match(line)
314 match = re_50.match(line)
314 assert match, _('expected revision number')
315 assert match, _('expected revision number')
315 e = logentry(rcs=scache(rcs),
316 e = logentry(rcs=scache(rcs),
316 file=scache(filename),
317 file=scache(filename),
317 revision=tuple([int(x) for x in
318 revision=tuple([int(x) for x in
318 match.group(1).split('.')]),
319 match.group(1).split('.')]),
319 branches=[],
320 branches=[],
320 parent=None,
321 parent=None,
321 commitid=None,
322 commitid=None,
322 mergepoint=None,
323 mergepoint=None,
323 branchpoints=set())
324 branchpoints=set())
324
325
325 state = 6
326 state = 6
326
327
327 elif state == 6:
328 elif state == 6:
328 # expecting date, author, state, lines changed
329 # expecting date, author, state, lines changed
329 match = re_60.match(line)
330 match = re_60.match(line)
330 assert match, _('revision must be followed by date line')
331 assert match, _('revision must be followed by date line')
331 d = match.group(1)
332 d = match.group(1)
332 if d[2] == '/':
333 if d[2] == '/':
333 # Y2K
334 # Y2K
334 d = '19' + d
335 d = '19' + d
335
336
336 if len(d.split()) != 3:
337 if len(d.split()) != 3:
337 # cvs log dates always in GMT
338 # cvs log dates always in GMT
338 d = d + ' UTC'
339 d = d + ' UTC'
339 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
340 e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
340 '%Y/%m/%d %H:%M:%S',
341 '%Y/%m/%d %H:%M:%S',
341 '%Y-%m-%d %H:%M:%S'])
342 '%Y-%m-%d %H:%M:%S'])
342 e.author = scache(match.group(2))
343 e.author = scache(match.group(2))
343 e.dead = match.group(3).lower() == 'dead'
344 e.dead = match.group(3).lower() == 'dead'
344
345
345 if match.group(5):
346 if match.group(5):
346 if match.group(6):
347 if match.group(6):
347 e.lines = (int(match.group(5)), int(match.group(6)))
348 e.lines = (int(match.group(5)), int(match.group(6)))
348 else:
349 else:
349 e.lines = (int(match.group(5)), 0)
350 e.lines = (int(match.group(5)), 0)
350 elif match.group(6):
351 elif match.group(6):
351 e.lines = (0, int(match.group(6)))
352 e.lines = (0, int(match.group(6)))
352 else:
353 else:
353 e.lines = None
354 e.lines = None
354
355
355 if match.group(7): # cvs 1.12 commitid
356 if match.group(7): # cvs 1.12 commitid
356 e.commitid = match.group(8)
357 e.commitid = match.group(8)
357
358
358 if match.group(9): # cvsnt mergepoint
359 if match.group(9): # cvsnt mergepoint
359 myrev = match.group(10).split('.')
360 myrev = match.group(10).split('.')
360 if len(myrev) == 2: # head
361 if len(myrev) == 2: # head
361 e.mergepoint = 'HEAD'
362 e.mergepoint = 'HEAD'
362 else:
363 else:
363 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
364 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
364 branches = [b for b in branchmap if branchmap[b] == myrev]
365 branches = [b for b in branchmap if branchmap[b] == myrev]
365 assert len(branches) == 1, ('unknown branch: %s'
366 assert len(branches) == 1, ('unknown branch: %s'
366 % e.mergepoint)
367 % e.mergepoint)
367 e.mergepoint = branches[0]
368 e.mergepoint = branches[0]
368
369
369 e.comment = []
370 e.comment = []
370 state = 7
371 state = 7
371
372
372 elif state == 7:
373 elif state == 7:
373 # read the revision numbers of branches that start at this revision
374 # read the revision numbers of branches that start at this revision
374 # or store the commit log message otherwise
375 # or store the commit log message otherwise
375 m = re_70.match(line)
376 m = re_70.match(line)
376 if m:
377 if m:
377 e.branches = [tuple([int(y) for y in x.strip().split('.')])
378 e.branches = [tuple([int(y) for y in x.strip().split('.')])
378 for x in m.group(1).split(';')]
379 for x in m.group(1).split(';')]
379 state = 8
380 state = 8
380 elif re_31.match(line) and re_50.match(peek):
381 elif re_31.match(line) and re_50.match(peek):
381 state = 5
382 state = 5
382 store = True
383 store = True
383 elif re_32.match(line):
384 elif re_32.match(line):
384 state = 0
385 state = 0
385 store = True
386 store = True
386 else:
387 else:
387 e.comment.append(line)
388 e.comment.append(line)
388
389
389 elif state == 8:
390 elif state == 8:
390 # store commit log message
391 # store commit log message
391 if re_31.match(line):
392 if re_31.match(line):
392 cpeek = peek
393 cpeek = peek
393 if cpeek.endswith('\n'):
394 if cpeek.endswith('\n'):
394 cpeek = cpeek[:-1]
395 cpeek = cpeek[:-1]
395 if re_50.match(cpeek):
396 if re_50.match(cpeek):
396 state = 5
397 state = 5
397 store = True
398 store = True
398 else:
399 else:
399 e.comment.append(line)
400 e.comment.append(line)
400 elif re_32.match(line):
401 elif re_32.match(line):
401 state = 0
402 state = 0
402 store = True
403 store = True
403 else:
404 else:
404 e.comment.append(line)
405 e.comment.append(line)
405
406
406 # When a file is added on a branch B1, CVS creates a synthetic
407 # When a file is added on a branch B1, CVS creates a synthetic
407 # dead trunk revision 1.1 so that the branch has a root.
408 # dead trunk revision 1.1 so that the branch has a root.
408 # Likewise, if you merge such a file to a later branch B2 (one
409 # Likewise, if you merge such a file to a later branch B2 (one
409 # that already existed when the file was added on B1), CVS
410 # that already existed when the file was added on B1), CVS
410 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
411 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
411 # these revisions now, but mark them synthetic so
412 # these revisions now, but mark them synthetic so
412 # createchangeset() can take care of them.
413 # createchangeset() can take care of them.
413 if (store and
414 if (store and
414 e.dead and
415 e.dead and
415 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
416 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
416 len(e.comment) == 1 and
417 len(e.comment) == 1 and
417 file_added_re.match(e.comment[0])):
418 file_added_re.match(e.comment[0])):
418 ui.debug('found synthetic revision in %s: %r\n'
419 ui.debug('found synthetic revision in %s: %r\n'
419 % (e.rcs, e.comment[0]))
420 % (e.rcs, e.comment[0]))
420 e.synthetic = True
421 e.synthetic = True
421
422
422 if store:
423 if store:
423 # clean up the results and save in the log.
424 # clean up the results and save in the log.
424 store = False
425 store = False
425 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
426 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
426 e.comment = scache('\n'.join(e.comment))
427 e.comment = scache('\n'.join(e.comment))
427
428
428 revn = len(e.revision)
429 revn = len(e.revision)
429 if revn > 3 and (revn % 2) == 0:
430 if revn > 3 and (revn % 2) == 0:
430 e.branch = tags.get(e.revision[:-1], [None])[0]
431 e.branch = tags.get(e.revision[:-1], [None])[0]
431 else:
432 else:
432 e.branch = None
433 e.branch = None
433
434
434 # find the branches starting from this revision
435 # find the branches starting from this revision
435 branchpoints = set()
436 branchpoints = set()
436 for branch, revision in branchmap.iteritems():
437 for branch, revision in branchmap.iteritems():
437 revparts = tuple([int(i) for i in revision.split('.')])
438 revparts = tuple([int(i) for i in revision.split('.')])
438 if len(revparts) < 2: # bad tags
439 if len(revparts) < 2: # bad tags
439 continue
440 continue
440 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
441 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
441 # normal branch
442 # normal branch
442 if revparts[:-2] == e.revision:
443 if revparts[:-2] == e.revision:
443 branchpoints.add(branch)
444 branchpoints.add(branch)
444 elif revparts == (1, 1, 1): # vendor branch
445 elif revparts == (1, 1, 1): # vendor branch
445 if revparts in e.branches:
446 if revparts in e.branches:
446 branchpoints.add(branch)
447 branchpoints.add(branch)
447 e.branchpoints = branchpoints
448 e.branchpoints = branchpoints
448
449
449 log.append(e)
450 log.append(e)
450
451
451 rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
452 rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
452
453
453 if len(log) % 100 == 0:
454 if len(log) % 100 == 0:
454 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
455 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
455
456
456 log.sort(key=lambda x: (x.rcs, x.revision))
457 log.sort(key=lambda x: (x.rcs, x.revision))
457
458
458 # find parent revisions of individual files
459 # find parent revisions of individual files
459 versions = {}
460 versions = {}
460 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
461 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
461 rcs = e.rcs.replace('/Attic/', '/')
462 rcs = e.rcs.replace('/Attic/', '/')
462 if rcs in rcsmap:
463 if rcs in rcsmap:
463 e.rcs = rcsmap[rcs]
464 e.rcs = rcsmap[rcs]
464 branch = e.revision[:-1]
465 branch = e.revision[:-1]
465 versions[(e.rcs, branch)] = e.revision
466 versions[(e.rcs, branch)] = e.revision
466
467
467 for e in log:
468 for e in log:
468 branch = e.revision[:-1]
469 branch = e.revision[:-1]
469 p = versions.get((e.rcs, branch), None)
470 p = versions.get((e.rcs, branch), None)
470 if p is None:
471 if p is None:
471 p = e.revision[:-2]
472 p = e.revision[:-2]
472 e.parent = p
473 e.parent = p
473 versions[(e.rcs, branch)] = e.revision
474 versions[(e.rcs, branch)] = e.revision
474
475
475 # update the log cache
476 # update the log cache
476 if cache:
477 if cache:
477 if log:
478 if log:
478 # join up the old and new logs
479 # join up the old and new logs
479 log.sort(key=lambda x: x.date)
480 log.sort(key=lambda x: x.date)
480
481
481 if oldlog and oldlog[-1].date >= log[0].date:
482 if oldlog and oldlog[-1].date >= log[0].date:
482 raise logerror(_('log cache overlaps with new log entries,'
483 raise logerror(_('log cache overlaps with new log entries,'
483 ' re-run without cache.'))
484 ' re-run without cache.'))
484
485
485 log = oldlog + log
486 log = oldlog + log
486
487
487 # write the new cachefile
488 # write the new cachefile
488 ui.note(_('writing cvs log cache %s\n') % cachefile)
489 ui.note(_('writing cvs log cache %s\n') % cachefile)
489 pickle.dump(log, open(cachefile, 'wb'))
490 pickle.dump(log, open(cachefile, 'wb'))
490 else:
491 else:
491 log = oldlog
492 log = oldlog
492
493
493 ui.status(_('%d log entries\n') % len(log))
494 ui.status(_('%d log entries\n') % len(log))
494
495
495 encodings = ui.configlist('convert', 'cvsps.logencoding')
496 encodings = ui.configlist('convert', 'cvsps.logencoding')
496 if encodings:
497 if encodings:
497 def revstr(r):
498 def revstr(r):
498 # this is needed, because logentry.revision is a tuple of "int"
499 # this is needed, because logentry.revision is a tuple of "int"
499 # (e.g. (1, 2) for "1.2")
500 # (e.g. (1, 2) for "1.2")
500 return '.'.join(pycompat.maplist(pycompat.bytestr, r))
501 return '.'.join(pycompat.maplist(pycompat.bytestr, r))
501
502
502 for entry in log:
503 for entry in log:
503 comment = entry.comment
504 comment = entry.comment
504 for e in encodings:
505 for e in encodings:
505 try:
506 try:
506 entry.comment = comment.decode(e).encode('utf-8')
507 entry.comment = comment.decode(e).encode('utf-8')
507 if ui.debugflag:
508 if ui.debugflag:
508 ui.debug("transcoding by %s: %s of %s\n" %
509 ui.debug("transcoding by %s: %s of %s\n" %
509 (e, revstr(entry.revision), entry.file))
510 (e, revstr(entry.revision), entry.file))
510 break
511 break
511 except UnicodeDecodeError:
512 except UnicodeDecodeError:
512 pass # try next encoding
513 pass # try next encoding
513 except LookupError as inst: # unknown encoding, maybe
514 except LookupError as inst: # unknown encoding, maybe
514 raise error.Abort(inst,
515 raise error.Abort(inst,
515 hint=_('check convert.cvsps.logencoding'
516 hint=_('check convert.cvsps.logencoding'
516 ' configuration'))
517 ' configuration'))
517 else:
518 else:
518 raise error.Abort(_("no encoding can transcode"
519 raise error.Abort(_("no encoding can transcode"
519 " CVS log message for %s of %s")
520 " CVS log message for %s of %s")
520 % (revstr(entry.revision), entry.file),
521 % (revstr(entry.revision), entry.file),
521 hint=_('check convert.cvsps.logencoding'
522 hint=_('check convert.cvsps.logencoding'
522 ' configuration'))
523 ' configuration'))
523
524
524 hook.hook(ui, None, "cvslog", True, log=log)
525 hook.hook(ui, None, "cvslog", True, log=log)
525
526
526 return log
527 return log
527
528
528
529
529 class changeset(object):
530 class changeset(object):
530 '''Class changeset has the following attributes:
531 '''Class changeset has the following attributes:
531 .id - integer identifying this changeset (list index)
532 .id - integer identifying this changeset (list index)
532 .author - author name as CVS knows it
533 .author - author name as CVS knows it
533 .branch - name of branch this changeset is on, or None
534 .branch - name of branch this changeset is on, or None
534 .comment - commit message
535 .comment - commit message
535 .commitid - CVS commitid or None
536 .commitid - CVS commitid or None
536 .date - the commit date as a (time,tz) tuple
537 .date - the commit date as a (time,tz) tuple
537 .entries - list of logentry objects in this changeset
538 .entries - list of logentry objects in this changeset
538 .parents - list of one or two parent changesets
539 .parents - list of one or two parent changesets
539 .tags - list of tags on this changeset
540 .tags - list of tags on this changeset
540 .synthetic - from synthetic revision "file ... added on branch ..."
541 .synthetic - from synthetic revision "file ... added on branch ..."
541 .mergepoint- the branch that has been merged from or None
542 .mergepoint- the branch that has been merged from or None
542 .branchpoints- the branches that start at the current entry or empty
543 .branchpoints- the branches that start at the current entry or empty
543 '''
544 '''
544 def __init__(self, **entries):
545 def __init__(self, **entries):
545 self.id = None
546 self.id = None
546 self.synthetic = False
547 self.synthetic = False
547 self.__dict__.update(entries)
548 self.__dict__.update(entries)
548
549
549 def __repr__(self):
550 def __repr__(self):
550 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
551 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
551 return "%s(%s)"%(type(self).__name__, ", ".join(items))
552 return "%s(%s)"%(type(self).__name__, ", ".join(items))
552
553
553 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
554 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
554 '''Convert log into changesets.'''
555 '''Convert log into changesets.'''
555
556
556 ui.status(_('creating changesets\n'))
557 ui.status(_('creating changesets\n'))
557
558
558 # try to order commitids by date
559 # try to order commitids by date
559 mindate = {}
560 mindate = {}
560 for e in log:
561 for e in log:
561 if e.commitid:
562 if e.commitid:
562 mindate[e.commitid] = min(e.date, mindate.get(e.commitid))
563 mindate[e.commitid] = min(e.date, mindate.get(e.commitid))
563
564
564 # Merge changesets
565 # Merge changesets
565 log.sort(key=lambda x: (mindate.get(x.commitid), x.commitid, x.comment,
566 log.sort(key=lambda x: (mindate.get(x.commitid), x.commitid, x.comment,
566 x.author, x.branch, x.date, x.branchpoints))
567 x.author, x.branch, x.date, x.branchpoints))
567
568
568 changesets = []
569 changesets = []
569 files = set()
570 files = set()
570 c = None
571 c = None
571 for i, e in enumerate(log):
572 for i, e in enumerate(log):
572
573
573 # Check if log entry belongs to the current changeset or not.
574 # Check if log entry belongs to the current changeset or not.
574
575
575 # Since CVS is file-centric, two different file revisions with
576 # Since CVS is file-centric, two different file revisions with
576 # different branchpoints should be treated as belonging to two
577 # different branchpoints should be treated as belonging to two
577 # different changesets (and the ordering is important and not
578 # different changesets (and the ordering is important and not
578 # honoured by cvsps at this point).
579 # honoured by cvsps at this point).
579 #
580 #
580 # Consider the following case:
581 # Consider the following case:
581 # foo 1.1 branchpoints: [MYBRANCH]
582 # foo 1.1 branchpoints: [MYBRANCH]
582 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
583 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
583 #
584 #
584 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
585 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
585 # later version of foo may be in MYBRANCH2, so foo should be the
586 # later version of foo may be in MYBRANCH2, so foo should be the
586 # first changeset and bar the next and MYBRANCH and MYBRANCH2
587 # first changeset and bar the next and MYBRANCH and MYBRANCH2
587 # should both start off of the bar changeset. No provisions are
588 # should both start off of the bar changeset. No provisions are
588 # made to ensure that this is, in fact, what happens.
589 # made to ensure that this is, in fact, what happens.
589 if not (c and e.branchpoints == c.branchpoints and
590 if not (c and e.branchpoints == c.branchpoints and
590 (# cvs commitids
591 (# cvs commitids
591 (e.commitid is not None and e.commitid == c.commitid) or
592 (e.commitid is not None and e.commitid == c.commitid) or
592 (# no commitids, use fuzzy commit detection
593 (# no commitids, use fuzzy commit detection
593 (e.commitid is None or c.commitid is None) and
594 (e.commitid is None or c.commitid is None) and
594 e.comment == c.comment and
595 e.comment == c.comment and
595 e.author == c.author and
596 e.author == c.author and
596 e.branch == c.branch and
597 e.branch == c.branch and
597 ((c.date[0] + c.date[1]) <=
598 ((c.date[0] + c.date[1]) <=
598 (e.date[0] + e.date[1]) <=
599 (e.date[0] + e.date[1]) <=
599 (c.date[0] + c.date[1]) + fuzz) and
600 (c.date[0] + c.date[1]) + fuzz) and
600 e.file not in files))):
601 e.file not in files))):
601 c = changeset(comment=e.comment, author=e.author,
602 c = changeset(comment=e.comment, author=e.author,
602 branch=e.branch, date=e.date,
603 branch=e.branch, date=e.date,
603 entries=[], mergepoint=e.mergepoint,
604 entries=[], mergepoint=e.mergepoint,
604 branchpoints=e.branchpoints, commitid=e.commitid)
605 branchpoints=e.branchpoints, commitid=e.commitid)
605 changesets.append(c)
606 changesets.append(c)
606
607
607 files = set()
608 files = set()
608 if len(changesets) % 100 == 0:
609 if len(changesets) % 100 == 0:
609 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
610 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
610 ui.status(util.ellipsis(t, 80) + '\n')
611 ui.status(util.ellipsis(t, 80) + '\n')
611
612
612 c.entries.append(e)
613 c.entries.append(e)
613 files.add(e.file)
614 files.add(e.file)
614 c.date = e.date # changeset date is date of latest commit in it
615 c.date = e.date # changeset date is date of latest commit in it
615
616
616 # Mark synthetic changesets
617 # Mark synthetic changesets
617
618
618 for c in changesets:
619 for c in changesets:
619 # Synthetic revisions always get their own changeset, because
620 # Synthetic revisions always get their own changeset, because
620 # the log message includes the filename. E.g. if you add file3
621 # the log message includes the filename. E.g. if you add file3
621 # and file4 on a branch, you get four log entries and three
622 # and file4 on a branch, you get four log entries and three
622 # changesets:
623 # changesets:
623 # "File file3 was added on branch ..." (synthetic, 1 entry)
624 # "File file3 was added on branch ..." (synthetic, 1 entry)
624 # "File file4 was added on branch ..." (synthetic, 1 entry)
625 # "File file4 was added on branch ..." (synthetic, 1 entry)
625 # "Add file3 and file4 to fix ..." (real, 2 entries)
626 # "Add file3 and file4 to fix ..." (real, 2 entries)
626 # Hence the check for 1 entry here.
627 # Hence the check for 1 entry here.
627 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
628 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
628
629
629 # Sort files in each changeset
630 # Sort files in each changeset
630
631
631 def entitycompare(l, r):
632 def entitycompare(l, r):
632 'Mimic cvsps sorting order'
633 'Mimic cvsps sorting order'
633 l = l.file.split('/')
634 l = l.file.split('/')
634 r = r.file.split('/')
635 r = r.file.split('/')
635 nl = len(l)
636 nl = len(l)
636 nr = len(r)
637 nr = len(r)
637 n = min(nl, nr)
638 n = min(nl, nr)
638 for i in range(n):
639 for i in range(n):
639 if i + 1 == nl and nl < nr:
640 if i + 1 == nl and nl < nr:
640 return -1
641 return -1
641 elif i + 1 == nr and nl > nr:
642 elif i + 1 == nr and nl > nr:
642 return +1
643 return +1
643 elif l[i] < r[i]:
644 elif l[i] < r[i]:
644 return -1
645 return -1
645 elif l[i] > r[i]:
646 elif l[i] > r[i]:
646 return +1
647 return +1
647 return 0
648 return 0
648
649
649 for c in changesets:
650 for c in changesets:
650 c.entries.sort(entitycompare)
651 c.entries.sort(entitycompare)
651
652
652 # Sort changesets by date
653 # Sort changesets by date
653
654
654 odd = set()
655 odd = set()
655 def cscmp(l, r):
656 def cscmp(l, r):
656 d = sum(l.date) - sum(r.date)
657 d = sum(l.date) - sum(r.date)
657 if d:
658 if d:
658 return d
659 return d
659
660
660 # detect vendor branches and initial commits on a branch
661 # detect vendor branches and initial commits on a branch
661 le = {}
662 le = {}
662 for e in l.entries:
663 for e in l.entries:
663 le[e.rcs] = e.revision
664 le[e.rcs] = e.revision
664 re = {}
665 re = {}
665 for e in r.entries:
666 for e in r.entries:
666 re[e.rcs] = e.revision
667 re[e.rcs] = e.revision
667
668
668 d = 0
669 d = 0
669 for e in l.entries:
670 for e in l.entries:
670 if re.get(e.rcs, None) == e.parent:
671 if re.get(e.rcs, None) == e.parent:
671 assert not d
672 assert not d
672 d = 1
673 d = 1
673 break
674 break
674
675
675 for e in r.entries:
676 for e in r.entries:
676 if le.get(e.rcs, None) == e.parent:
677 if le.get(e.rcs, None) == e.parent:
677 if d:
678 if d:
678 odd.add((l, r))
679 odd.add((l, r))
679 d = -1
680 d = -1
680 break
681 break
681 # By this point, the changesets are sufficiently compared that
682 # By this point, the changesets are sufficiently compared that
682 # we don't really care about ordering. However, this leaves
683 # we don't really care about ordering. However, this leaves
683 # some race conditions in the tests, so we compare on the
684 # some race conditions in the tests, so we compare on the
684 # number of files modified, the files contained in each
685 # number of files modified, the files contained in each
685 # changeset, and the branchpoints in the change to ensure test
686 # changeset, and the branchpoints in the change to ensure test
686 # output remains stable.
687 # output remains stable.
687
688
688 # recommended replacement for cmp from
689 # recommended replacement for cmp from
689 # https://docs.python.org/3.0/whatsnew/3.0.html
690 # https://docs.python.org/3.0/whatsnew/3.0.html
690 c = lambda x, y: (x > y) - (x < y)
691 c = lambda x, y: (x > y) - (x < y)
691 # Sort bigger changes first.
692 # Sort bigger changes first.
692 if not d:
693 if not d:
693 d = c(len(l.entries), len(r.entries))
694 d = c(len(l.entries), len(r.entries))
694 # Try sorting by filename in the change.
695 # Try sorting by filename in the change.
695 if not d:
696 if not d:
696 d = c([e.file for e in l.entries], [e.file for e in r.entries])
697 d = c([e.file for e in l.entries], [e.file for e in r.entries])
697 # Try and put changes without a branch point before ones with
698 # Try and put changes without a branch point before ones with
698 # a branch point.
699 # a branch point.
699 if not d:
700 if not d:
700 d = c(len(l.branchpoints), len(r.branchpoints))
701 d = c(len(l.branchpoints), len(r.branchpoints))
701 return d
702 return d
702
703
703 changesets.sort(cscmp)
704 changesets.sort(cscmp)
704
705
705 # Collect tags
706 # Collect tags
706
707
707 globaltags = {}
708 globaltags = {}
708 for c in changesets:
709 for c in changesets:
709 for e in c.entries:
710 for e in c.entries:
710 for tag in e.tags:
711 for tag in e.tags:
711 # remember which is the latest changeset to have this tag
712 # remember which is the latest changeset to have this tag
712 globaltags[tag] = c
713 globaltags[tag] = c
713
714
714 for c in changesets:
715 for c in changesets:
715 tags = set()
716 tags = set()
716 for e in c.entries:
717 for e in c.entries:
717 tags.update(e.tags)
718 tags.update(e.tags)
718 # remember tags only if this is the latest changeset to have it
719 # remember tags only if this is the latest changeset to have it
719 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
720 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
720
721
721 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
722 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
722 # by inserting dummy changesets with two parents, and handle
723 # by inserting dummy changesets with two parents, and handle
723 # {{mergefrombranch BRANCHNAME}} by setting two parents.
724 # {{mergefrombranch BRANCHNAME}} by setting two parents.
724
725
725 if mergeto is None:
726 if mergeto is None:
726 mergeto = r'{{mergetobranch ([-\w]+)}}'
727 mergeto = r'{{mergetobranch ([-\w]+)}}'
727 if mergeto:
728 if mergeto:
728 mergeto = re.compile(mergeto)
729 mergeto = re.compile(mergeto)
729
730
730 if mergefrom is None:
731 if mergefrom is None:
731 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
732 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
732 if mergefrom:
733 if mergefrom:
733 mergefrom = re.compile(mergefrom)
734 mergefrom = re.compile(mergefrom)
734
735
735 versions = {} # changeset index where we saw any particular file version
736 versions = {} # changeset index where we saw any particular file version
736 branches = {} # changeset index where we saw a branch
737 branches = {} # changeset index where we saw a branch
737 n = len(changesets)
738 n = len(changesets)
738 i = 0
739 i = 0
739 while i < n:
740 while i < n:
740 c = changesets[i]
741 c = changesets[i]
741
742
742 for f in c.entries:
743 for f in c.entries:
743 versions[(f.rcs, f.revision)] = i
744 versions[(f.rcs, f.revision)] = i
744
745
745 p = None
746 p = None
746 if c.branch in branches:
747 if c.branch in branches:
747 p = branches[c.branch]
748 p = branches[c.branch]
748 else:
749 else:
749 # first changeset on a new branch
750 # first changeset on a new branch
750 # the parent is a changeset with the branch in its
751 # the parent is a changeset with the branch in its
751 # branchpoints such that it is the latest possible
752 # branchpoints such that it is the latest possible
752 # commit without any intervening, unrelated commits.
753 # commit without any intervening, unrelated commits.
753
754
754 for candidate in xrange(i):
755 for candidate in xrange(i):
755 if c.branch not in changesets[candidate].branchpoints:
756 if c.branch not in changesets[candidate].branchpoints:
756 if p is not None:
757 if p is not None:
757 break
758 break
758 continue
759 continue
759 p = candidate
760 p = candidate
760
761
761 c.parents = []
762 c.parents = []
762 if p is not None:
763 if p is not None:
763 p = changesets[p]
764 p = changesets[p]
764
765
765 # Ensure no changeset has a synthetic changeset as a parent.
766 # Ensure no changeset has a synthetic changeset as a parent.
766 while p.synthetic:
767 while p.synthetic:
767 assert len(p.parents) <= 1, \
768 assert len(p.parents) <= 1, \
768 _('synthetic changeset cannot have multiple parents')
769 _('synthetic changeset cannot have multiple parents')
769 if p.parents:
770 if p.parents:
770 p = p.parents[0]
771 p = p.parents[0]
771 else:
772 else:
772 p = None
773 p = None
773 break
774 break
774
775
775 if p is not None:
776 if p is not None:
776 c.parents.append(p)
777 c.parents.append(p)
777
778
778 if c.mergepoint:
779 if c.mergepoint:
779 if c.mergepoint == 'HEAD':
780 if c.mergepoint == 'HEAD':
780 c.mergepoint = None
781 c.mergepoint = None
781 c.parents.append(changesets[branches[c.mergepoint]])
782 c.parents.append(changesets[branches[c.mergepoint]])
782
783
783 if mergefrom:
784 if mergefrom:
784 m = mergefrom.search(c.comment)
785 m = mergefrom.search(c.comment)
785 if m:
786 if m:
786 m = m.group(1)
787 m = m.group(1)
787 if m == 'HEAD':
788 if m == 'HEAD':
788 m = None
789 m = None
789 try:
790 try:
790 candidate = changesets[branches[m]]
791 candidate = changesets[branches[m]]
791 except KeyError:
792 except KeyError:
792 ui.warn(_("warning: CVS commit message references "
793 ui.warn(_("warning: CVS commit message references "
793 "non-existent branch %r:\n%s\n")
794 "non-existent branch %r:\n%s\n")
794 % (m, c.comment))
795 % (m, c.comment))
795 if m in branches and c.branch != m and not candidate.synthetic:
796 if m in branches and c.branch != m and not candidate.synthetic:
796 c.parents.append(candidate)
797 c.parents.append(candidate)
797
798
798 if mergeto:
799 if mergeto:
799 m = mergeto.search(c.comment)
800 m = mergeto.search(c.comment)
800 if m:
801 if m:
801 if m.groups():
802 if m.groups():
802 m = m.group(1)
803 m = m.group(1)
803 if m == 'HEAD':
804 if m == 'HEAD':
804 m = None
805 m = None
805 else:
806 else:
806 m = None # if no group found then merge to HEAD
807 m = None # if no group found then merge to HEAD
807 if m in branches and c.branch != m:
808 if m in branches and c.branch != m:
808 # insert empty changeset for merge
809 # insert empty changeset for merge
809 cc = changeset(
810 cc = changeset(
810 author=c.author, branch=m, date=c.date,
811 author=c.author, branch=m, date=c.date,
811 comment='convert-repo: CVS merge from branch %s'
812 comment='convert-repo: CVS merge from branch %s'
812 % c.branch,
813 % c.branch,
813 entries=[], tags=[],
814 entries=[], tags=[],
814 parents=[changesets[branches[m]], c])
815 parents=[changesets[branches[m]], c])
815 changesets.insert(i + 1, cc)
816 changesets.insert(i + 1, cc)
816 branches[m] = i + 1
817 branches[m] = i + 1
817
818
818 # adjust our loop counters now we have inserted a new entry
819 # adjust our loop counters now we have inserted a new entry
819 n += 1
820 n += 1
820 i += 2
821 i += 2
821 continue
822 continue
822
823
823 branches[c.branch] = i
824 branches[c.branch] = i
824 i += 1
825 i += 1
825
826
826 # Drop synthetic changesets (safe now that we have ensured no other
827 # Drop synthetic changesets (safe now that we have ensured no other
827 # changesets can have them as parents).
828 # changesets can have them as parents).
828 i = 0
829 i = 0
829 while i < len(changesets):
830 while i < len(changesets):
830 if changesets[i].synthetic:
831 if changesets[i].synthetic:
831 del changesets[i]
832 del changesets[i]
832 else:
833 else:
833 i += 1
834 i += 1
834
835
835 # Number changesets
836 # Number changesets
836
837
837 for i, c in enumerate(changesets):
838 for i, c in enumerate(changesets):
838 c.id = i + 1
839 c.id = i + 1
839
840
840 if odd:
841 if odd:
841 for l, r in odd:
842 for l, r in odd:
842 if l.id is not None and r.id is not None:
843 if l.id is not None and r.id is not None:
843 ui.warn(_('changeset %d is both before and after %d\n')
844 ui.warn(_('changeset %d is both before and after %d\n')
844 % (l.id, r.id))
845 % (l.id, r.id))
845
846
846 ui.status(_('%d changeset entries\n') % len(changesets))
847 ui.status(_('%d changeset entries\n') % len(changesets))
847
848
848 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
849 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
849
850
850 return changesets
851 return changesets
851
852
852
853
853 def debugcvsps(ui, *args, **opts):
854 def debugcvsps(ui, *args, **opts):
854 '''Read CVS rlog for current directory or named path in
855 '''Read CVS rlog for current directory or named path in
855 repository, and convert the log to changesets based on matching
856 repository, and convert the log to changesets based on matching
856 commit log entries and dates.
857 commit log entries and dates.
857 '''
858 '''
858 opts = pycompat.byteskwargs(opts)
859 opts = pycompat.byteskwargs(opts)
859 if opts["new_cache"]:
860 if opts["new_cache"]:
860 cache = "write"
861 cache = "write"
861 elif opts["update_cache"]:
862 elif opts["update_cache"]:
862 cache = "update"
863 cache = "update"
863 else:
864 else:
864 cache = None
865 cache = None
865
866
866 revisions = opts["revisions"]
867 revisions = opts["revisions"]
867
868
868 try:
869 try:
869 if args:
870 if args:
870 log = []
871 log = []
871 for d in args:
872 for d in args:
872 log += createlog(ui, d, root=opts["root"], cache=cache)
873 log += createlog(ui, d, root=opts["root"], cache=cache)
873 else:
874 else:
874 log = createlog(ui, root=opts["root"], cache=cache)
875 log = createlog(ui, root=opts["root"], cache=cache)
875 except logerror as e:
876 except logerror as e:
876 ui.write("%r\n"%e)
877 ui.write("%r\n"%e)
877 return
878 return
878
879
879 changesets = createchangeset(ui, log, opts["fuzz"])
880 changesets = createchangeset(ui, log, opts["fuzz"])
880 del log
881 del log
881
882
882 # Print changesets (optionally filtered)
883 # Print changesets (optionally filtered)
883
884
884 off = len(revisions)
885 off = len(revisions)
885 branches = {} # latest version number in each branch
886 branches = {} # latest version number in each branch
886 ancestors = {} # parent branch
887 ancestors = {} # parent branch
887 for cs in changesets:
888 for cs in changesets:
888
889
889 if opts["ancestors"]:
890 if opts["ancestors"]:
890 if cs.branch not in branches and cs.parents and cs.parents[0].id:
891 if cs.branch not in branches and cs.parents and cs.parents[0].id:
891 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
892 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
892 cs.parents[0].id)
893 cs.parents[0].id)
893 branches[cs.branch] = cs.id
894 branches[cs.branch] = cs.id
894
895
895 # limit by branches
896 # limit by branches
896 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
897 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
897 continue
898 continue
898
899
899 if not off:
900 if not off:
900 # Note: trailing spaces on several lines here are needed to have
901 # Note: trailing spaces on several lines here are needed to have
901 # bug-for-bug compatibility with cvsps.
902 # bug-for-bug compatibility with cvsps.
902 ui.write('---------------------\n')
903 ui.write('---------------------\n')
903 ui.write(('PatchSet %d \n' % cs.id))
904 ui.write(('PatchSet %d \n' % cs.id))
904 ui.write(('Date: %s\n' % util.datestr(cs.date,
905 ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
905 '%Y/%m/%d %H:%M:%S %1%2')))
906 '%Y/%m/%d %H:%M:%S %1%2')))
906 ui.write(('Author: %s\n' % cs.author))
907 ui.write(('Author: %s\n' % cs.author))
907 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
908 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
908 ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
909 ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
909 ','.join(cs.tags) or '(none)')))
910 ','.join(cs.tags) or '(none)')))
910 if cs.branchpoints:
911 if cs.branchpoints:
911 ui.write(('Branchpoints: %s \n') %
912 ui.write(('Branchpoints: %s \n') %
912 ', '.join(sorted(cs.branchpoints)))
913 ', '.join(sorted(cs.branchpoints)))
913 if opts["parents"] and cs.parents:
914 if opts["parents"] and cs.parents:
914 if len(cs.parents) > 1:
915 if len(cs.parents) > 1:
915 ui.write(('Parents: %s\n' %
916 ui.write(('Parents: %s\n' %
916 (','.join([str(p.id) for p in cs.parents]))))
917 (','.join([str(p.id) for p in cs.parents]))))
917 else:
918 else:
918 ui.write(('Parent: %d\n' % cs.parents[0].id))
919 ui.write(('Parent: %d\n' % cs.parents[0].id))
919
920
920 if opts["ancestors"]:
921 if opts["ancestors"]:
921 b = cs.branch
922 b = cs.branch
922 r = []
923 r = []
923 while b:
924 while b:
924 b, c = ancestors[b]
925 b, c = ancestors[b]
925 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
926 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
926 if r:
927 if r:
927 ui.write(('Ancestors: %s\n' % (','.join(r))))
928 ui.write(('Ancestors: %s\n' % (','.join(r))))
928
929
929 ui.write(('Log:\n'))
930 ui.write(('Log:\n'))
930 ui.write('%s\n\n' % cs.comment)
931 ui.write('%s\n\n' % cs.comment)
931 ui.write(('Members: \n'))
932 ui.write(('Members: \n'))
932 for f in cs.entries:
933 for f in cs.entries:
933 fn = f.file
934 fn = f.file
934 if fn.startswith(opts["prefix"]):
935 if fn.startswith(opts["prefix"]):
935 fn = fn[len(opts["prefix"]):]
936 fn = fn[len(opts["prefix"]):]
936 ui.write('\t%s:%s->%s%s \n' % (
937 ui.write('\t%s:%s->%s%s \n' % (
937 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
938 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
938 '.'.join([str(x) for x in f.revision]),
939 '.'.join([str(x) for x in f.revision]),
939 ['', '(DEAD)'][f.dead]))
940 ['', '(DEAD)'][f.dead]))
940 ui.write('\n')
941 ui.write('\n')
941
942
942 # have we seen the start tag?
943 # have we seen the start tag?
943 if revisions and off:
944 if revisions and off:
944 if revisions[0] == str(cs.id) or \
945 if revisions[0] == str(cs.id) or \
945 revisions[0] in cs.tags:
946 revisions[0] in cs.tags:
946 off = False
947 off = False
947
948
948 # see if we reached the end tag
949 # see if we reached the end tag
949 if len(revisions) > 1 and not off:
950 if len(revisions) > 1 and not off:
950 if revisions[1] == str(cs.id) or \
951 if revisions[1] == str(cs.id) or \
951 revisions[1] in cs.tags:
952 revisions[1] in cs.tags:
952 break
953 break
@@ -1,221 +1,224 b''
1 # darcs.py - darcs support for the convert extension
1 # darcs.py - darcs support for the convert extension
2 #
2 #
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import errno
9 import errno
10 import os
10 import os
11 import re
11 import re
12 import shutil
12 import shutil
13 import tempfile
13 import tempfile
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 error,
16 error,
17 util,
17 util,
18 )
18 )
19 from mercurial.utils import dateutil
19 from . import common
20 from . import common
20 NoRepo = common.NoRepo
21 NoRepo = common.NoRepo
21
22
22 # The naming drift of ElementTree is fun!
23 # The naming drift of ElementTree is fun!
23
24
24 try:
25 try:
25 import xml.etree.cElementTree.ElementTree as ElementTree
26 import xml.etree.cElementTree.ElementTree as ElementTree
26 import xml.etree.cElementTree.XMLParser as XMLParser
27 import xml.etree.cElementTree.XMLParser as XMLParser
27 except ImportError:
28 except ImportError:
28 try:
29 try:
29 import xml.etree.ElementTree.ElementTree as ElementTree
30 import xml.etree.ElementTree.ElementTree as ElementTree
30 import xml.etree.ElementTree.XMLParser as XMLParser
31 import xml.etree.ElementTree.XMLParser as XMLParser
31 except ImportError:
32 except ImportError:
32 try:
33 try:
33 import elementtree.cElementTree.ElementTree as ElementTree
34 import elementtree.cElementTree.ElementTree as ElementTree
34 import elementtree.cElementTree.XMLParser as XMLParser
35 import elementtree.cElementTree.XMLParser as XMLParser
35 except ImportError:
36 except ImportError:
36 try:
37 try:
37 import elementtree.ElementTree.ElementTree as ElementTree
38 import elementtree.ElementTree.ElementTree as ElementTree
38 import elementtree.ElementTree.XMLParser as XMLParser
39 import elementtree.ElementTree.XMLParser as XMLParser
39 except ImportError:
40 except ImportError:
40 pass
41 pass
41
42
42 class darcs_source(common.converter_source, common.commandline):
43 class darcs_source(common.converter_source, common.commandline):
43 def __init__(self, ui, repotype, path, revs=None):
44 def __init__(self, ui, repotype, path, revs=None):
44 common.converter_source.__init__(self, ui, repotype, path, revs=revs)
45 common.converter_source.__init__(self, ui, repotype, path, revs=revs)
45 common.commandline.__init__(self, ui, 'darcs')
46 common.commandline.__init__(self, ui, 'darcs')
46
47
47 # check for _darcs, ElementTree so that we can easily skip
48 # check for _darcs, ElementTree so that we can easily skip
48 # test-convert-darcs if ElementTree is not around
49 # test-convert-darcs if ElementTree is not around
49 if not os.path.exists(os.path.join(path, '_darcs')):
50 if not os.path.exists(os.path.join(path, '_darcs')):
50 raise NoRepo(_("%s does not look like a darcs repository") % path)
51 raise NoRepo(_("%s does not look like a darcs repository") % path)
51
52
52 common.checktool('darcs')
53 common.checktool('darcs')
53 version = self.run0('--version').splitlines()[0].strip()
54 version = self.run0('--version').splitlines()[0].strip()
54 if version < '2.1':
55 if version < '2.1':
55 raise error.Abort(_('darcs version 2.1 or newer needed (found %r)')
56 raise error.Abort(_('darcs version 2.1 or newer needed (found %r)')
56 % version)
57 % version)
57
58
58 if "ElementTree" not in globals():
59 if "ElementTree" not in globals():
59 raise error.Abort(_("Python ElementTree module is not available"))
60 raise error.Abort(_("Python ElementTree module is not available"))
60
61
61 self.path = os.path.realpath(path)
62 self.path = os.path.realpath(path)
62
63
63 self.lastrev = None
64 self.lastrev = None
64 self.changes = {}
65 self.changes = {}
65 self.parents = {}
66 self.parents = {}
66 self.tags = {}
67 self.tags = {}
67
68
68 # Check darcs repository format
69 # Check darcs repository format
69 format = self.format()
70 format = self.format()
70 if format:
71 if format:
71 if format in ('darcs-1.0', 'hashed'):
72 if format in ('darcs-1.0', 'hashed'):
72 raise NoRepo(_("%s repository format is unsupported, "
73 raise NoRepo(_("%s repository format is unsupported, "
73 "please upgrade") % format)
74 "please upgrade") % format)
74 else:
75 else:
75 self.ui.warn(_('failed to detect repository format!'))
76 self.ui.warn(_('failed to detect repository format!'))
76
77
77 def before(self):
78 def before(self):
78 self.tmppath = tempfile.mkdtemp(
79 self.tmppath = tempfile.mkdtemp(
79 prefix='convert-' + os.path.basename(self.path) + '-')
80 prefix='convert-' + os.path.basename(self.path) + '-')
80 output, status = self.run('init', repodir=self.tmppath)
81 output, status = self.run('init', repodir=self.tmppath)
81 self.checkexit(status)
82 self.checkexit(status)
82
83
83 tree = self.xml('changes', xml_output=True, summary=True,
84 tree = self.xml('changes', xml_output=True, summary=True,
84 repodir=self.path)
85 repodir=self.path)
85 tagname = None
86 tagname = None
86 child = None
87 child = None
87 for elt in tree.findall('patch'):
88 for elt in tree.findall('patch'):
88 node = elt.get('hash')
89 node = elt.get('hash')
89 name = elt.findtext('name', '')
90 name = elt.findtext('name', '')
90 if name.startswith('TAG '):
91 if name.startswith('TAG '):
91 tagname = name[4:].strip()
92 tagname = name[4:].strip()
92 elif tagname is not None:
93 elif tagname is not None:
93 self.tags[tagname] = node
94 self.tags[tagname] = node
94 tagname = None
95 tagname = None
95 self.changes[node] = elt
96 self.changes[node] = elt
96 self.parents[child] = [node]
97 self.parents[child] = [node]
97 child = node
98 child = node
98 self.parents[child] = []
99 self.parents[child] = []
99
100
100 def after(self):
101 def after(self):
101 self.ui.debug('cleaning up %s\n' % self.tmppath)
102 self.ui.debug('cleaning up %s\n' % self.tmppath)
102 shutil.rmtree(self.tmppath, ignore_errors=True)
103 shutil.rmtree(self.tmppath, ignore_errors=True)
103
104
104 def recode(self, s, encoding=None):
105 def recode(self, s, encoding=None):
105 if isinstance(s, unicode):
106 if isinstance(s, unicode):
106 # XMLParser returns unicode objects for anything it can't
107 # XMLParser returns unicode objects for anything it can't
107 # encode into ASCII. We convert them back to str to get
108 # encode into ASCII. We convert them back to str to get
108 # recode's normal conversion behavior.
109 # recode's normal conversion behavior.
109 s = s.encode('latin-1')
110 s = s.encode('latin-1')
110 return super(darcs_source, self).recode(s, encoding)
111 return super(darcs_source, self).recode(s, encoding)
111
112
112 def xml(self, cmd, **kwargs):
113 def xml(self, cmd, **kwargs):
113 # NOTE: darcs is currently encoding agnostic and will print
114 # NOTE: darcs is currently encoding agnostic and will print
114 # patch metadata byte-for-byte, even in the XML changelog.
115 # patch metadata byte-for-byte, even in the XML changelog.
115 etree = ElementTree()
116 etree = ElementTree()
116 # While we are decoding the XML as latin-1 to be as liberal as
117 # While we are decoding the XML as latin-1 to be as liberal as
117 # possible, etree will still raise an exception if any
118 # possible, etree will still raise an exception if any
118 # non-printable characters are in the XML changelog.
119 # non-printable characters are in the XML changelog.
119 parser = XMLParser(encoding='latin-1')
120 parser = XMLParser(encoding='latin-1')
120 p = self._run(cmd, **kwargs)
121 p = self._run(cmd, **kwargs)
121 etree.parse(p.stdout, parser=parser)
122 etree.parse(p.stdout, parser=parser)
122 p.wait()
123 p.wait()
123 self.checkexit(p.returncode)
124 self.checkexit(p.returncode)
124 return etree.getroot()
125 return etree.getroot()
125
126
126 def format(self):
127 def format(self):
127 output, status = self.run('show', 'repo', no_files=True,
128 output, status = self.run('show', 'repo', no_files=True,
128 repodir=self.path)
129 repodir=self.path)
129 self.checkexit(status)
130 self.checkexit(status)
130 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
131 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
131 if not m:
132 if not m:
132 return None
133 return None
133 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
134 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
134
135
135 def manifest(self):
136 def manifest(self):
136 man = []
137 man = []
137 output, status = self.run('show', 'files', no_directories=True,
138 output, status = self.run('show', 'files', no_directories=True,
138 repodir=self.tmppath)
139 repodir=self.tmppath)
139 self.checkexit(status)
140 self.checkexit(status)
140 for line in output.split('\n'):
141 for line in output.split('\n'):
141 path = line[2:]
142 path = line[2:]
142 if path:
143 if path:
143 man.append(path)
144 man.append(path)
144 return man
145 return man
145
146
146 def getheads(self):
147 def getheads(self):
147 return self.parents[None]
148 return self.parents[None]
148
149
149 def getcommit(self, rev):
150 def getcommit(self, rev):
150 elt = self.changes[rev]
151 elt = self.changes[rev]
151 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
152 dateformat = '%a %b %d %H:%M:%S %Z %Y'
153 date = dateutil.strdate(elt.get('local_date'), dateformat)
152 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
154 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
153 # etree can return unicode objects for name, comment, and author,
155 # etree can return unicode objects for name, comment, and author,
154 # so recode() is used to ensure str objects are emitted.
156 # so recode() is used to ensure str objects are emitted.
157 newdateformat = '%Y-%m-%d %H:%M:%S %1%2'
155 return common.commit(author=self.recode(elt.get('author')),
158 return common.commit(author=self.recode(elt.get('author')),
156 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
159 date=dateutil.datestr(date, newdateformat),
157 desc=self.recode(desc).strip(),
160 desc=self.recode(desc).strip(),
158 parents=self.parents[rev])
161 parents=self.parents[rev])
159
162
160 def pull(self, rev):
163 def pull(self, rev):
161 output, status = self.run('pull', self.path, all=True,
164 output, status = self.run('pull', self.path, all=True,
162 match='hash %s' % rev,
165 match='hash %s' % rev,
163 no_test=True, no_posthook=True,
166 no_test=True, no_posthook=True,
164 external_merge='/bin/false',
167 external_merge='/bin/false',
165 repodir=self.tmppath)
168 repodir=self.tmppath)
166 if status:
169 if status:
167 if output.find('We have conflicts in') == -1:
170 if output.find('We have conflicts in') == -1:
168 self.checkexit(status, output)
171 self.checkexit(status, output)
169 output, status = self.run('revert', all=True, repodir=self.tmppath)
172 output, status = self.run('revert', all=True, repodir=self.tmppath)
170 self.checkexit(status, output)
173 self.checkexit(status, output)
171
174
172 def getchanges(self, rev, full):
175 def getchanges(self, rev, full):
173 if full:
176 if full:
174 raise error.Abort(_("convert from darcs does not support --full"))
177 raise error.Abort(_("convert from darcs does not support --full"))
175 copies = {}
178 copies = {}
176 changes = []
179 changes = []
177 man = None
180 man = None
178 for elt in self.changes[rev].find('summary').getchildren():
181 for elt in self.changes[rev].find('summary').getchildren():
179 if elt.tag in ('add_directory', 'remove_directory'):
182 if elt.tag in ('add_directory', 'remove_directory'):
180 continue
183 continue
181 if elt.tag == 'move':
184 if elt.tag == 'move':
182 if man is None:
185 if man is None:
183 man = self.manifest()
186 man = self.manifest()
184 source, dest = elt.get('from'), elt.get('to')
187 source, dest = elt.get('from'), elt.get('to')
185 if source in man:
188 if source in man:
186 # File move
189 # File move
187 changes.append((source, rev))
190 changes.append((source, rev))
188 changes.append((dest, rev))
191 changes.append((dest, rev))
189 copies[dest] = source
192 copies[dest] = source
190 else:
193 else:
191 # Directory move, deduce file moves from manifest
194 # Directory move, deduce file moves from manifest
192 source = source + '/'
195 source = source + '/'
193 for f in man:
196 for f in man:
194 if not f.startswith(source):
197 if not f.startswith(source):
195 continue
198 continue
196 fdest = dest + '/' + f[len(source):]
199 fdest = dest + '/' + f[len(source):]
197 changes.append((f, rev))
200 changes.append((f, rev))
198 changes.append((fdest, rev))
201 changes.append((fdest, rev))
199 copies[fdest] = f
202 copies[fdest] = f
200 else:
203 else:
201 changes.append((elt.text.strip(), rev))
204 changes.append((elt.text.strip(), rev))
202 self.pull(rev)
205 self.pull(rev)
203 self.lastrev = rev
206 self.lastrev = rev
204 return sorted(changes), copies, set()
207 return sorted(changes), copies, set()
205
208
206 def getfile(self, name, rev):
209 def getfile(self, name, rev):
207 if rev != self.lastrev:
210 if rev != self.lastrev:
208 raise error.Abort(_('internal calling inconsistency'))
211 raise error.Abort(_('internal calling inconsistency'))
209 path = os.path.join(self.tmppath, name)
212 path = os.path.join(self.tmppath, name)
210 try:
213 try:
211 data = util.readfile(path)
214 data = util.readfile(path)
212 mode = os.lstat(path).st_mode
215 mode = os.lstat(path).st_mode
213 except IOError as inst:
216 except IOError as inst:
214 if inst.errno == errno.ENOENT:
217 if inst.errno == errno.ENOENT:
215 return None, None
218 return None, None
216 raise
219 raise
217 mode = (mode & 0o111) and 'x' or ''
220 mode = (mode & 0o111) and 'x' or ''
218 return data, mode
221 return data, mode
219
222
220 def gettags(self):
223 def gettags(self):
221 return self.tags
224 return self.tags
@@ -1,352 +1,353 b''
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import email.parser as emailparser
10 import email.parser as emailparser
11 import os
11 import os
12 import shutil
12 import shutil
13 import stat
13 import stat
14 import tempfile
14 import tempfile
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial import (
17 from mercurial import (
18 encoding,
18 encoding,
19 error,
19 error,
20 util,
20 util,
21 )
21 )
22 from mercurial.utils import dateutil
22 from . import common
23 from . import common
23
24
24 class gnuarch_source(common.converter_source, common.commandline):
25 class gnuarch_source(common.converter_source, common.commandline):
25
26
26 class gnuarch_rev(object):
27 class gnuarch_rev(object):
27 def __init__(self, rev):
28 def __init__(self, rev):
28 self.rev = rev
29 self.rev = rev
29 self.summary = ''
30 self.summary = ''
30 self.date = None
31 self.date = None
31 self.author = ''
32 self.author = ''
32 self.continuationof = None
33 self.continuationof = None
33 self.add_files = []
34 self.add_files = []
34 self.mod_files = []
35 self.mod_files = []
35 self.del_files = []
36 self.del_files = []
36 self.ren_files = {}
37 self.ren_files = {}
37 self.ren_dirs = {}
38 self.ren_dirs = {}
38
39
39 def __init__(self, ui, repotype, path, revs=None):
40 def __init__(self, ui, repotype, path, revs=None):
40 super(gnuarch_source, self).__init__(ui, repotype, path, revs=revs)
41 super(gnuarch_source, self).__init__(ui, repotype, path, revs=revs)
41
42
42 if not os.path.exists(os.path.join(path, '{arch}')):
43 if not os.path.exists(os.path.join(path, '{arch}')):
43 raise common.NoRepo(_("%s does not look like a GNU Arch repository")
44 raise common.NoRepo(_("%s does not look like a GNU Arch repository")
44 % path)
45 % path)
45
46
46 # Could use checktool, but we want to check for baz or tla.
47 # Could use checktool, but we want to check for baz or tla.
47 self.execmd = None
48 self.execmd = None
48 if util.findexe('baz'):
49 if util.findexe('baz'):
49 self.execmd = 'baz'
50 self.execmd = 'baz'
50 else:
51 else:
51 if util.findexe('tla'):
52 if util.findexe('tla'):
52 self.execmd = 'tla'
53 self.execmd = 'tla'
53 else:
54 else:
54 raise error.Abort(_('cannot find a GNU Arch tool'))
55 raise error.Abort(_('cannot find a GNU Arch tool'))
55
56
56 common.commandline.__init__(self, ui, self.execmd)
57 common.commandline.__init__(self, ui, self.execmd)
57
58
58 self.path = os.path.realpath(path)
59 self.path = os.path.realpath(path)
59 self.tmppath = None
60 self.tmppath = None
60
61
61 self.treeversion = None
62 self.treeversion = None
62 self.lastrev = None
63 self.lastrev = None
63 self.changes = {}
64 self.changes = {}
64 self.parents = {}
65 self.parents = {}
65 self.tags = {}
66 self.tags = {}
66 self.catlogparser = emailparser.Parser()
67 self.catlogparser = emailparser.Parser()
67 self.encoding = encoding.encoding
68 self.encoding = encoding.encoding
68 self.archives = []
69 self.archives = []
69
70
70 def before(self):
71 def before(self):
71 # Get registered archives
72 # Get registered archives
72 self.archives = [i.rstrip('\n')
73 self.archives = [i.rstrip('\n')
73 for i in self.runlines0('archives', '-n')]
74 for i in self.runlines0('archives', '-n')]
74
75
75 if self.execmd == 'tla':
76 if self.execmd == 'tla':
76 output = self.run0('tree-version', self.path)
77 output = self.run0('tree-version', self.path)
77 else:
78 else:
78 output = self.run0('tree-version', '-d', self.path)
79 output = self.run0('tree-version', '-d', self.path)
79 self.treeversion = output.strip()
80 self.treeversion = output.strip()
80
81
81 # Get name of temporary directory
82 # Get name of temporary directory
82 version = self.treeversion.split('/')
83 version = self.treeversion.split('/')
83 self.tmppath = os.path.join(tempfile.gettempdir(),
84 self.tmppath = os.path.join(tempfile.gettempdir(),
84 'hg-%s' % version[1])
85 'hg-%s' % version[1])
85
86
86 # Generate parents dictionary
87 # Generate parents dictionary
87 self.parents[None] = []
88 self.parents[None] = []
88 treeversion = self.treeversion
89 treeversion = self.treeversion
89 child = None
90 child = None
90 while treeversion:
91 while treeversion:
91 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
92 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
92
93
93 archive = treeversion.split('/')[0]
94 archive = treeversion.split('/')[0]
94 if archive not in self.archives:
95 if archive not in self.archives:
95 self.ui.status(_('tree analysis stopped because it points to '
96 self.ui.status(_('tree analysis stopped because it points to '
96 'an unregistered archive %s...\n') % archive)
97 'an unregistered archive %s...\n') % archive)
97 break
98 break
98
99
99 # Get the complete list of revisions for that tree version
100 # Get the complete list of revisions for that tree version
100 output, status = self.runlines('revisions', '-r', '-f', treeversion)
101 output, status = self.runlines('revisions', '-r', '-f', treeversion)
101 self.checkexit(status, 'failed retrieving revisions for %s'
102 self.checkexit(status, 'failed retrieving revisions for %s'
102 % treeversion)
103 % treeversion)
103
104
104 # No new iteration unless a revision has a continuation-of header
105 # No new iteration unless a revision has a continuation-of header
105 treeversion = None
106 treeversion = None
106
107
107 for l in output:
108 for l in output:
108 rev = l.strip()
109 rev = l.strip()
109 self.changes[rev] = self.gnuarch_rev(rev)
110 self.changes[rev] = self.gnuarch_rev(rev)
110 self.parents[rev] = []
111 self.parents[rev] = []
111
112
112 # Read author, date and summary
113 # Read author, date and summary
113 catlog, status = self.run('cat-log', '-d', self.path, rev)
114 catlog, status = self.run('cat-log', '-d', self.path, rev)
114 if status:
115 if status:
115 catlog = self.run0('cat-archive-log', rev)
116 catlog = self.run0('cat-archive-log', rev)
116 self._parsecatlog(catlog, rev)
117 self._parsecatlog(catlog, rev)
117
118
118 # Populate the parents map
119 # Populate the parents map
119 self.parents[child].append(rev)
120 self.parents[child].append(rev)
120
121
121 # Keep track of the current revision as the child of the next
122 # Keep track of the current revision as the child of the next
122 # revision scanned
123 # revision scanned
123 child = rev
124 child = rev
124
125
125 # Check if we have to follow the usual incremental history
126 # Check if we have to follow the usual incremental history
126 # or if we have to 'jump' to a different treeversion given
127 # or if we have to 'jump' to a different treeversion given
127 # by the continuation-of header.
128 # by the continuation-of header.
128 if self.changes[rev].continuationof:
129 if self.changes[rev].continuationof:
129 treeversion = '--'.join(
130 treeversion = '--'.join(
130 self.changes[rev].continuationof.split('--')[:-1])
131 self.changes[rev].continuationof.split('--')[:-1])
131 break
132 break
132
133
133 # If we reached a base-0 revision w/o any continuation-of
134 # If we reached a base-0 revision w/o any continuation-of
134 # header, it means the tree history ends here.
135 # header, it means the tree history ends here.
135 if rev[-6:] == 'base-0':
136 if rev[-6:] == 'base-0':
136 break
137 break
137
138
138 def after(self):
139 def after(self):
139 self.ui.debug('cleaning up %s\n' % self.tmppath)
140 self.ui.debug('cleaning up %s\n' % self.tmppath)
140 shutil.rmtree(self.tmppath, ignore_errors=True)
141 shutil.rmtree(self.tmppath, ignore_errors=True)
141
142
142 def getheads(self):
143 def getheads(self):
143 return self.parents[None]
144 return self.parents[None]
144
145
145 def getfile(self, name, rev):
146 def getfile(self, name, rev):
146 if rev != self.lastrev:
147 if rev != self.lastrev:
147 raise error.Abort(_('internal calling inconsistency'))
148 raise error.Abort(_('internal calling inconsistency'))
148
149
149 if not os.path.lexists(os.path.join(self.tmppath, name)):
150 if not os.path.lexists(os.path.join(self.tmppath, name)):
150 return None, None
151 return None, None
151
152
152 return self._getfile(name, rev)
153 return self._getfile(name, rev)
153
154
154 def getchanges(self, rev, full):
155 def getchanges(self, rev, full):
155 if full:
156 if full:
156 raise error.Abort(_("convert from arch does not support --full"))
157 raise error.Abort(_("convert from arch does not support --full"))
157 self._update(rev)
158 self._update(rev)
158 changes = []
159 changes = []
159 copies = {}
160 copies = {}
160
161
161 for f in self.changes[rev].add_files:
162 for f in self.changes[rev].add_files:
162 changes.append((f, rev))
163 changes.append((f, rev))
163
164
164 for f in self.changes[rev].mod_files:
165 for f in self.changes[rev].mod_files:
165 changes.append((f, rev))
166 changes.append((f, rev))
166
167
167 for f in self.changes[rev].del_files:
168 for f in self.changes[rev].del_files:
168 changes.append((f, rev))
169 changes.append((f, rev))
169
170
170 for src in self.changes[rev].ren_files:
171 for src in self.changes[rev].ren_files:
171 to = self.changes[rev].ren_files[src]
172 to = self.changes[rev].ren_files[src]
172 changes.append((src, rev))
173 changes.append((src, rev))
173 changes.append((to, rev))
174 changes.append((to, rev))
174 copies[to] = src
175 copies[to] = src
175
176
176 for src in self.changes[rev].ren_dirs:
177 for src in self.changes[rev].ren_dirs:
177 to = self.changes[rev].ren_dirs[src]
178 to = self.changes[rev].ren_dirs[src]
178 chgs, cps = self._rendirchanges(src, to)
179 chgs, cps = self._rendirchanges(src, to)
179 changes += [(f, rev) for f in chgs]
180 changes += [(f, rev) for f in chgs]
180 copies.update(cps)
181 copies.update(cps)
181
182
182 self.lastrev = rev
183 self.lastrev = rev
183 return sorted(set(changes)), copies, set()
184 return sorted(set(changes)), copies, set()
184
185
185 def getcommit(self, rev):
186 def getcommit(self, rev):
186 changes = self.changes[rev]
187 changes = self.changes[rev]
187 return common.commit(author=changes.author, date=changes.date,
188 return common.commit(author=changes.author, date=changes.date,
188 desc=changes.summary, parents=self.parents[rev],
189 desc=changes.summary, parents=self.parents[rev],
189 rev=rev)
190 rev=rev)
190
191
191 def gettags(self):
192 def gettags(self):
192 return self.tags
193 return self.tags
193
194
194 def _execute(self, cmd, *args, **kwargs):
195 def _execute(self, cmd, *args, **kwargs):
195 cmdline = [self.execmd, cmd]
196 cmdline = [self.execmd, cmd]
196 cmdline += args
197 cmdline += args
197 cmdline = [util.shellquote(arg) for arg in cmdline]
198 cmdline = [util.shellquote(arg) for arg in cmdline]
198 cmdline += ['>', os.devnull, '2>', os.devnull]
199 cmdline += ['>', os.devnull, '2>', os.devnull]
199 cmdline = util.quotecommand(' '.join(cmdline))
200 cmdline = util.quotecommand(' '.join(cmdline))
200 self.ui.debug(cmdline, '\n')
201 self.ui.debug(cmdline, '\n')
201 return os.system(cmdline)
202 return os.system(cmdline)
202
203
203 def _update(self, rev):
204 def _update(self, rev):
204 self.ui.debug('applying revision %s...\n' % rev)
205 self.ui.debug('applying revision %s...\n' % rev)
205 changeset, status = self.runlines('replay', '-d', self.tmppath,
206 changeset, status = self.runlines('replay', '-d', self.tmppath,
206 rev)
207 rev)
207 if status:
208 if status:
208 # Something went wrong while merging (baz or tla
209 # Something went wrong while merging (baz or tla
209 # issue?), get latest revision and try from there
210 # issue?), get latest revision and try from there
210 shutil.rmtree(self.tmppath, ignore_errors=True)
211 shutil.rmtree(self.tmppath, ignore_errors=True)
211 self._obtainrevision(rev)
212 self._obtainrevision(rev)
212 else:
213 else:
213 old_rev = self.parents[rev][0]
214 old_rev = self.parents[rev][0]
214 self.ui.debug('computing changeset between %s and %s...\n'
215 self.ui.debug('computing changeset between %s and %s...\n'
215 % (old_rev, rev))
216 % (old_rev, rev))
216 self._parsechangeset(changeset, rev)
217 self._parsechangeset(changeset, rev)
217
218
218 def _getfile(self, name, rev):
219 def _getfile(self, name, rev):
219 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
220 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
220 if stat.S_ISLNK(mode):
221 if stat.S_ISLNK(mode):
221 data = os.readlink(os.path.join(self.tmppath, name))
222 data = os.readlink(os.path.join(self.tmppath, name))
222 if mode:
223 if mode:
223 mode = 'l'
224 mode = 'l'
224 else:
225 else:
225 mode = ''
226 mode = ''
226 else:
227 else:
227 data = open(os.path.join(self.tmppath, name), 'rb').read()
228 data = open(os.path.join(self.tmppath, name), 'rb').read()
228 mode = (mode & 0o111) and 'x' or ''
229 mode = (mode & 0o111) and 'x' or ''
229 return data, mode
230 return data, mode
230
231
231 def _exclude(self, name):
232 def _exclude(self, name):
232 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
233 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
233 for exc in exclude:
234 for exc in exclude:
234 if name.find(exc) != -1:
235 if name.find(exc) != -1:
235 return True
236 return True
236 return False
237 return False
237
238
238 def _readcontents(self, path):
239 def _readcontents(self, path):
239 files = []
240 files = []
240 contents = os.listdir(path)
241 contents = os.listdir(path)
241 while len(contents) > 0:
242 while len(contents) > 0:
242 c = contents.pop()
243 c = contents.pop()
243 p = os.path.join(path, c)
244 p = os.path.join(path, c)
244 # os.walk could be used, but here we avoid internal GNU
245 # os.walk could be used, but here we avoid internal GNU
245 # Arch files and directories, thus saving a lot time.
246 # Arch files and directories, thus saving a lot time.
246 if not self._exclude(p):
247 if not self._exclude(p):
247 if os.path.isdir(p):
248 if os.path.isdir(p):
248 contents += [os.path.join(c, f) for f in os.listdir(p)]
249 contents += [os.path.join(c, f) for f in os.listdir(p)]
249 else:
250 else:
250 files.append(c)
251 files.append(c)
251 return files
252 return files
252
253
253 def _rendirchanges(self, src, dest):
254 def _rendirchanges(self, src, dest):
254 changes = []
255 changes = []
255 copies = {}
256 copies = {}
256 files = self._readcontents(os.path.join(self.tmppath, dest))
257 files = self._readcontents(os.path.join(self.tmppath, dest))
257 for f in files:
258 for f in files:
258 s = os.path.join(src, f)
259 s = os.path.join(src, f)
259 d = os.path.join(dest, f)
260 d = os.path.join(dest, f)
260 changes.append(s)
261 changes.append(s)
261 changes.append(d)
262 changes.append(d)
262 copies[d] = s
263 copies[d] = s
263 return changes, copies
264 return changes, copies
264
265
265 def _obtainrevision(self, rev):
266 def _obtainrevision(self, rev):
266 self.ui.debug('obtaining revision %s...\n' % rev)
267 self.ui.debug('obtaining revision %s...\n' % rev)
267 output = self._execute('get', rev, self.tmppath)
268 output = self._execute('get', rev, self.tmppath)
268 self.checkexit(output)
269 self.checkexit(output)
269 self.ui.debug('analyzing revision %s...\n' % rev)
270 self.ui.debug('analyzing revision %s...\n' % rev)
270 files = self._readcontents(self.tmppath)
271 files = self._readcontents(self.tmppath)
271 self.changes[rev].add_files += files
272 self.changes[rev].add_files += files
272
273
273 def _stripbasepath(self, path):
274 def _stripbasepath(self, path):
274 if path.startswith('./'):
275 if path.startswith('./'):
275 return path[2:]
276 return path[2:]
276 return path
277 return path
277
278
278 def _parsecatlog(self, data, rev):
279 def _parsecatlog(self, data, rev):
279 try:
280 try:
280 catlog = self.catlogparser.parsestr(data)
281 catlog = self.catlogparser.parsestr(data)
281
282
282 # Commit date
283 # Commit date
283 self.changes[rev].date = util.datestr(
284 self.changes[rev].date = dateutil.datestr(
284 util.strdate(catlog['Standard-date'],
285 dateutil.strdate(catlog['Standard-date'],
285 '%Y-%m-%d %H:%M:%S'))
286 '%Y-%m-%d %H:%M:%S'))
286
287
287 # Commit author
288 # Commit author
288 self.changes[rev].author = self.recode(catlog['Creator'])
289 self.changes[rev].author = self.recode(catlog['Creator'])
289
290
290 # Commit description
291 # Commit description
291 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
292 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
292 catlog.get_payload()))
293 catlog.get_payload()))
293 self.changes[rev].summary = self.recode(self.changes[rev].summary)
294 self.changes[rev].summary = self.recode(self.changes[rev].summary)
294
295
295 # Commit revision origin when dealing with a branch or tag
296 # Commit revision origin when dealing with a branch or tag
296 if 'Continuation-of' in catlog:
297 if 'Continuation-of' in catlog:
297 self.changes[rev].continuationof = self.recode(
298 self.changes[rev].continuationof = self.recode(
298 catlog['Continuation-of'])
299 catlog['Continuation-of'])
299 except Exception:
300 except Exception:
300 raise error.Abort(_('could not parse cat-log of %s') % rev)
301 raise error.Abort(_('could not parse cat-log of %s') % rev)
301
302
302 def _parsechangeset(self, data, rev):
303 def _parsechangeset(self, data, rev):
303 for l in data:
304 for l in data:
304 l = l.strip()
305 l = l.strip()
305 # Added file (ignore added directory)
306 # Added file (ignore added directory)
306 if l.startswith('A') and not l.startswith('A/'):
307 if l.startswith('A') and not l.startswith('A/'):
307 file = self._stripbasepath(l[1:].strip())
308 file = self._stripbasepath(l[1:].strip())
308 if not self._exclude(file):
309 if not self._exclude(file):
309 self.changes[rev].add_files.append(file)
310 self.changes[rev].add_files.append(file)
310 # Deleted file (ignore deleted directory)
311 # Deleted file (ignore deleted directory)
311 elif l.startswith('D') and not l.startswith('D/'):
312 elif l.startswith('D') and not l.startswith('D/'):
312 file = self._stripbasepath(l[1:].strip())
313 file = self._stripbasepath(l[1:].strip())
313 if not self._exclude(file):
314 if not self._exclude(file):
314 self.changes[rev].del_files.append(file)
315 self.changes[rev].del_files.append(file)
315 # Modified binary file
316 # Modified binary file
316 elif l.startswith('Mb'):
317 elif l.startswith('Mb'):
317 file = self._stripbasepath(l[2:].strip())
318 file = self._stripbasepath(l[2:].strip())
318 if not self._exclude(file):
319 if not self._exclude(file):
319 self.changes[rev].mod_files.append(file)
320 self.changes[rev].mod_files.append(file)
320 # Modified link
321 # Modified link
321 elif l.startswith('M->'):
322 elif l.startswith('M->'):
322 file = self._stripbasepath(l[3:].strip())
323 file = self._stripbasepath(l[3:].strip())
323 if not self._exclude(file):
324 if not self._exclude(file):
324 self.changes[rev].mod_files.append(file)
325 self.changes[rev].mod_files.append(file)
325 # Modified file
326 # Modified file
326 elif l.startswith('M'):
327 elif l.startswith('M'):
327 file = self._stripbasepath(l[1:].strip())
328 file = self._stripbasepath(l[1:].strip())
328 if not self._exclude(file):
329 if not self._exclude(file):
329 self.changes[rev].mod_files.append(file)
330 self.changes[rev].mod_files.append(file)
330 # Renamed file (or link)
331 # Renamed file (or link)
331 elif l.startswith('=>'):
332 elif l.startswith('=>'):
332 files = l[2:].strip().split(' ')
333 files = l[2:].strip().split(' ')
333 if len(files) == 1:
334 if len(files) == 1:
334 files = l[2:].strip().split('\t')
335 files = l[2:].strip().split('\t')
335 src = self._stripbasepath(files[0])
336 src = self._stripbasepath(files[0])
336 dst = self._stripbasepath(files[1])
337 dst = self._stripbasepath(files[1])
337 if not self._exclude(src) and not self._exclude(dst):
338 if not self._exclude(src) and not self._exclude(dst):
338 self.changes[rev].ren_files[src] = dst
339 self.changes[rev].ren_files[src] = dst
339 # Conversion from file to link or from link to file (modified)
340 # Conversion from file to link or from link to file (modified)
340 elif l.startswith('ch'):
341 elif l.startswith('ch'):
341 file = self._stripbasepath(l[2:].strip())
342 file = self._stripbasepath(l[2:].strip())
342 if not self._exclude(file):
343 if not self._exclude(file):
343 self.changes[rev].mod_files.append(file)
344 self.changes[rev].mod_files.append(file)
344 # Renamed directory
345 # Renamed directory
345 elif l.startswith('/>'):
346 elif l.startswith('/>'):
346 dirs = l[2:].strip().split(' ')
347 dirs = l[2:].strip().split(' ')
347 if len(dirs) == 1:
348 if len(dirs) == 1:
348 dirs = l[2:].strip().split('\t')
349 dirs = l[2:].strip().split('\t')
349 src = self._stripbasepath(dirs[0])
350 src = self._stripbasepath(dirs[0])
350 dst = self._stripbasepath(dirs[1])
351 dst = self._stripbasepath(dirs[1])
351 if not self._exclude(src) and not self._exclude(dst):
352 if not self._exclude(src) and not self._exclude(dst):
352 self.changes[rev].ren_dirs[src] = dst
353 self.changes[rev].ren_dirs[src] = dst
@@ -1,650 +1,651 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 import os
21 import os
22 import re
22 import re
23 import time
23 import time
24
24
25 from mercurial.i18n import _
25 from mercurial.i18n import _
26 from mercurial import (
26 from mercurial import (
27 bookmarks,
27 bookmarks,
28 context,
28 context,
29 error,
29 error,
30 exchange,
30 exchange,
31 hg,
31 hg,
32 lock as lockmod,
32 lock as lockmod,
33 merge as mergemod,
33 merge as mergemod,
34 node as nodemod,
34 node as nodemod,
35 phases,
35 phases,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 )
38 )
39 from mercurial.utils import dateutil
39 stringio = util.stringio
40 stringio = util.stringio
40
41
41 from . import common
42 from . import common
42 mapfile = common.mapfile
43 mapfile = common.mapfile
43 NoRepo = common.NoRepo
44 NoRepo = common.NoRepo
44
45
45 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
46 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
46
47
47 class mercurial_sink(common.converter_sink):
48 class mercurial_sink(common.converter_sink):
48 def __init__(self, ui, repotype, path):
49 def __init__(self, ui, repotype, path):
49 common.converter_sink.__init__(self, ui, repotype, path)
50 common.converter_sink.__init__(self, ui, repotype, path)
50 self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
51 self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
51 self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
52 self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
52 self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
53 self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
53 self.lastbranch = None
54 self.lastbranch = None
54 if os.path.isdir(path) and len(os.listdir(path)) > 0:
55 if os.path.isdir(path) and len(os.listdir(path)) > 0:
55 try:
56 try:
56 self.repo = hg.repository(self.ui, path)
57 self.repo = hg.repository(self.ui, path)
57 if not self.repo.local():
58 if not self.repo.local():
58 raise NoRepo(_('%s is not a local Mercurial repository')
59 raise NoRepo(_('%s is not a local Mercurial repository')
59 % path)
60 % path)
60 except error.RepoError as err:
61 except error.RepoError as err:
61 ui.traceback()
62 ui.traceback()
62 raise NoRepo(err.args[0])
63 raise NoRepo(err.args[0])
63 else:
64 else:
64 try:
65 try:
65 ui.status(_('initializing destination %s repository\n') % path)
66 ui.status(_('initializing destination %s repository\n') % path)
66 self.repo = hg.repository(self.ui, path, create=True)
67 self.repo = hg.repository(self.ui, path, create=True)
67 if not self.repo.local():
68 if not self.repo.local():
68 raise NoRepo(_('%s is not a local Mercurial repository')
69 raise NoRepo(_('%s is not a local Mercurial repository')
69 % path)
70 % path)
70 self.created.append(path)
71 self.created.append(path)
71 except error.RepoError:
72 except error.RepoError:
72 ui.traceback()
73 ui.traceback()
73 raise NoRepo(_("could not create hg repository %s as sink")
74 raise NoRepo(_("could not create hg repository %s as sink")
74 % path)
75 % path)
75 self.lock = None
76 self.lock = None
76 self.wlock = None
77 self.wlock = None
77 self.filemapmode = False
78 self.filemapmode = False
78 self.subrevmaps = {}
79 self.subrevmaps = {}
79
80
80 def before(self):
81 def before(self):
81 self.ui.debug('run hg sink pre-conversion action\n')
82 self.ui.debug('run hg sink pre-conversion action\n')
82 self.wlock = self.repo.wlock()
83 self.wlock = self.repo.wlock()
83 self.lock = self.repo.lock()
84 self.lock = self.repo.lock()
84
85
85 def after(self):
86 def after(self):
86 self.ui.debug('run hg sink post-conversion action\n')
87 self.ui.debug('run hg sink post-conversion action\n')
87 if self.lock:
88 if self.lock:
88 self.lock.release()
89 self.lock.release()
89 if self.wlock:
90 if self.wlock:
90 self.wlock.release()
91 self.wlock.release()
91
92
92 def revmapfile(self):
93 def revmapfile(self):
93 return self.repo.vfs.join("shamap")
94 return self.repo.vfs.join("shamap")
94
95
95 def authorfile(self):
96 def authorfile(self):
96 return self.repo.vfs.join("authormap")
97 return self.repo.vfs.join("authormap")
97
98
98 def setbranch(self, branch, pbranches):
99 def setbranch(self, branch, pbranches):
99 if not self.clonebranches:
100 if not self.clonebranches:
100 return
101 return
101
102
102 setbranch = (branch != self.lastbranch)
103 setbranch = (branch != self.lastbranch)
103 self.lastbranch = branch
104 self.lastbranch = branch
104 if not branch:
105 if not branch:
105 branch = 'default'
106 branch = 'default'
106 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
107 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
107 if pbranches:
108 if pbranches:
108 pbranch = pbranches[0][1]
109 pbranch = pbranches[0][1]
109 else:
110 else:
110 pbranch = 'default'
111 pbranch = 'default'
111
112
112 branchpath = os.path.join(self.path, branch)
113 branchpath = os.path.join(self.path, branch)
113 if setbranch:
114 if setbranch:
114 self.after()
115 self.after()
115 try:
116 try:
116 self.repo = hg.repository(self.ui, branchpath)
117 self.repo = hg.repository(self.ui, branchpath)
117 except Exception:
118 except Exception:
118 self.repo = hg.repository(self.ui, branchpath, create=True)
119 self.repo = hg.repository(self.ui, branchpath, create=True)
119 self.before()
120 self.before()
120
121
121 # pbranches may bring revisions from other branches (merge parents)
122 # pbranches may bring revisions from other branches (merge parents)
122 # Make sure we have them, or pull them.
123 # Make sure we have them, or pull them.
123 missings = {}
124 missings = {}
124 for b in pbranches:
125 for b in pbranches:
125 try:
126 try:
126 self.repo.lookup(b[0])
127 self.repo.lookup(b[0])
127 except Exception:
128 except Exception:
128 missings.setdefault(b[1], []).append(b[0])
129 missings.setdefault(b[1], []).append(b[0])
129
130
130 if missings:
131 if missings:
131 self.after()
132 self.after()
132 for pbranch, heads in sorted(missings.iteritems()):
133 for pbranch, heads in sorted(missings.iteritems()):
133 pbranchpath = os.path.join(self.path, pbranch)
134 pbranchpath = os.path.join(self.path, pbranch)
134 prepo = hg.peer(self.ui, {}, pbranchpath)
135 prepo = hg.peer(self.ui, {}, pbranchpath)
135 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
136 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
136 exchange.pull(self.repo, prepo,
137 exchange.pull(self.repo, prepo,
137 [prepo.lookup(h) for h in heads])
138 [prepo.lookup(h) for h in heads])
138 self.before()
139 self.before()
139
140
140 def _rewritetags(self, source, revmap, data):
141 def _rewritetags(self, source, revmap, data):
141 fp = stringio()
142 fp = stringio()
142 for line in data.splitlines():
143 for line in data.splitlines():
143 s = line.split(' ', 1)
144 s = line.split(' ', 1)
144 if len(s) != 2:
145 if len(s) != 2:
145 continue
146 continue
146 revid = revmap.get(source.lookuprev(s[0]))
147 revid = revmap.get(source.lookuprev(s[0]))
147 if not revid:
148 if not revid:
148 if s[0] == nodemod.nullhex:
149 if s[0] == nodemod.nullhex:
149 revid = s[0]
150 revid = s[0]
150 else:
151 else:
151 continue
152 continue
152 fp.write('%s %s\n' % (revid, s[1]))
153 fp.write('%s %s\n' % (revid, s[1]))
153 return fp.getvalue()
154 return fp.getvalue()
154
155
155 def _rewritesubstate(self, source, data):
156 def _rewritesubstate(self, source, data):
156 fp = stringio()
157 fp = stringio()
157 for line in data.splitlines():
158 for line in data.splitlines():
158 s = line.split(' ', 1)
159 s = line.split(' ', 1)
159 if len(s) != 2:
160 if len(s) != 2:
160 continue
161 continue
161
162
162 revid = s[0]
163 revid = s[0]
163 subpath = s[1]
164 subpath = s[1]
164 if revid != nodemod.nullhex:
165 if revid != nodemod.nullhex:
165 revmap = self.subrevmaps.get(subpath)
166 revmap = self.subrevmaps.get(subpath)
166 if revmap is None:
167 if revmap is None:
167 revmap = mapfile(self.ui,
168 revmap = mapfile(self.ui,
168 self.repo.wjoin(subpath, '.hg/shamap'))
169 self.repo.wjoin(subpath, '.hg/shamap'))
169 self.subrevmaps[subpath] = revmap
170 self.subrevmaps[subpath] = revmap
170
171
171 # It is reasonable that one or more of the subrepos don't
172 # It is reasonable that one or more of the subrepos don't
172 # need to be converted, in which case they can be cloned
173 # need to be converted, in which case they can be cloned
173 # into place instead of converted. Therefore, only warn
174 # into place instead of converted. Therefore, only warn
174 # once.
175 # once.
175 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
176 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
176 if len(revmap) == 0:
177 if len(revmap) == 0:
177 sub = self.repo.wvfs.reljoin(subpath, '.hg')
178 sub = self.repo.wvfs.reljoin(subpath, '.hg')
178
179
179 if self.repo.wvfs.exists(sub):
180 if self.repo.wvfs.exists(sub):
180 self.ui.warn(msg % subpath)
181 self.ui.warn(msg % subpath)
181
182
182 newid = revmap.get(revid)
183 newid = revmap.get(revid)
183 if not newid:
184 if not newid:
184 if len(revmap) > 0:
185 if len(revmap) > 0:
185 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
186 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
186 (revid, subpath))
187 (revid, subpath))
187 else:
188 else:
188 revid = newid
189 revid = newid
189
190
190 fp.write('%s %s\n' % (revid, subpath))
191 fp.write('%s %s\n' % (revid, subpath))
191
192
192 return fp.getvalue()
193 return fp.getvalue()
193
194
194 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
195 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
195 """Calculates the files from p2 that we need to pull in when merging p1
196 """Calculates the files from p2 that we need to pull in when merging p1
196 and p2, given that the merge is coming from the given source.
197 and p2, given that the merge is coming from the given source.
197
198
198 This prevents us from losing files that only exist in the target p2 and
199 This prevents us from losing files that only exist in the target p2 and
199 that don't come from the source repo (like if you're merging multiple
200 that don't come from the source repo (like if you're merging multiple
200 repositories together).
201 repositories together).
201 """
202 """
202 anc = [p1ctx.ancestor(p2ctx)]
203 anc = [p1ctx.ancestor(p2ctx)]
203 # Calculate what files are coming from p2
204 # Calculate what files are coming from p2
204 actions, diverge, rename = mergemod.calculateupdates(
205 actions, diverge, rename = mergemod.calculateupdates(
205 self.repo, p1ctx, p2ctx, anc,
206 self.repo, p1ctx, p2ctx, anc,
206 True, # branchmerge
207 True, # branchmerge
207 True, # force
208 True, # force
208 False, # acceptremote
209 False, # acceptremote
209 False, # followcopies
210 False, # followcopies
210 )
211 )
211
212
212 for file, (action, info, msg) in actions.iteritems():
213 for file, (action, info, msg) in actions.iteritems():
213 if source.targetfilebelongstosource(file):
214 if source.targetfilebelongstosource(file):
214 # If the file belongs to the source repo, ignore the p2
215 # If the file belongs to the source repo, ignore the p2
215 # since it will be covered by the existing fileset.
216 # since it will be covered by the existing fileset.
216 continue
217 continue
217
218
218 # If the file requires actual merging, abort. We don't have enough
219 # If the file requires actual merging, abort. We don't have enough
219 # context to resolve merges correctly.
220 # context to resolve merges correctly.
220 if action in ['m', 'dm', 'cd', 'dc']:
221 if action in ['m', 'dm', 'cd', 'dc']:
221 raise error.Abort(_("unable to convert merge commit "
222 raise error.Abort(_("unable to convert merge commit "
222 "since target parents do not merge cleanly (file "
223 "since target parents do not merge cleanly (file "
223 "%s, parents %s and %s)") % (file, p1ctx,
224 "%s, parents %s and %s)") % (file, p1ctx,
224 p2ctx))
225 p2ctx))
225 elif action == 'k':
226 elif action == 'k':
226 # 'keep' means nothing changed from p1
227 # 'keep' means nothing changed from p1
227 continue
228 continue
228 else:
229 else:
229 # Any other change means we want to take the p2 version
230 # Any other change means we want to take the p2 version
230 yield file
231 yield file
231
232
232 def putcommit(self, files, copies, parents, commit, source, revmap, full,
233 def putcommit(self, files, copies, parents, commit, source, revmap, full,
233 cleanp2):
234 cleanp2):
234 files = dict(files)
235 files = dict(files)
235
236
236 def getfilectx(repo, memctx, f):
237 def getfilectx(repo, memctx, f):
237 if p2ctx and f in p2files and f not in copies:
238 if p2ctx and f in p2files and f not in copies:
238 self.ui.debug('reusing %s from p2\n' % f)
239 self.ui.debug('reusing %s from p2\n' % f)
239 try:
240 try:
240 return p2ctx[f]
241 return p2ctx[f]
241 except error.ManifestLookupError:
242 except error.ManifestLookupError:
242 # If the file doesn't exist in p2, then we're syncing a
243 # If the file doesn't exist in p2, then we're syncing a
243 # delete, so just return None.
244 # delete, so just return None.
244 return None
245 return None
245 try:
246 try:
246 v = files[f]
247 v = files[f]
247 except KeyError:
248 except KeyError:
248 return None
249 return None
249 data, mode = source.getfile(f, v)
250 data, mode = source.getfile(f, v)
250 if data is None:
251 if data is None:
251 return None
252 return None
252 if f == '.hgtags':
253 if f == '.hgtags':
253 data = self._rewritetags(source, revmap, data)
254 data = self._rewritetags(source, revmap, data)
254 if f == '.hgsubstate':
255 if f == '.hgsubstate':
255 data = self._rewritesubstate(source, data)
256 data = self._rewritesubstate(source, data)
256 return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
257 return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
257 'x' in mode, copies.get(f))
258 'x' in mode, copies.get(f))
258
259
259 pl = []
260 pl = []
260 for p in parents:
261 for p in parents:
261 if p not in pl:
262 if p not in pl:
262 pl.append(p)
263 pl.append(p)
263 parents = pl
264 parents = pl
264 nparents = len(parents)
265 nparents = len(parents)
265 if self.filemapmode and nparents == 1:
266 if self.filemapmode and nparents == 1:
266 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
267 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
267 parent = parents[0]
268 parent = parents[0]
268
269
269 if len(parents) < 2:
270 if len(parents) < 2:
270 parents.append(nodemod.nullid)
271 parents.append(nodemod.nullid)
271 if len(parents) < 2:
272 if len(parents) < 2:
272 parents.append(nodemod.nullid)
273 parents.append(nodemod.nullid)
273 p2 = parents.pop(0)
274 p2 = parents.pop(0)
274
275
275 text = commit.desc
276 text = commit.desc
276
277
277 sha1s = re.findall(sha1re, text)
278 sha1s = re.findall(sha1re, text)
278 for sha1 in sha1s:
279 for sha1 in sha1s:
279 oldrev = source.lookuprev(sha1)
280 oldrev = source.lookuprev(sha1)
280 newrev = revmap.get(oldrev)
281 newrev = revmap.get(oldrev)
281 if newrev is not None:
282 if newrev is not None:
282 text = text.replace(sha1, newrev[:len(sha1)])
283 text = text.replace(sha1, newrev[:len(sha1)])
283
284
284 extra = commit.extra.copy()
285 extra = commit.extra.copy()
285
286
286 sourcename = self.repo.ui.config('convert', 'hg.sourcename')
287 sourcename = self.repo.ui.config('convert', 'hg.sourcename')
287 if sourcename:
288 if sourcename:
288 extra['convert_source'] = sourcename
289 extra['convert_source'] = sourcename
289
290
290 for label in ('source', 'transplant_source', 'rebase_source',
291 for label in ('source', 'transplant_source', 'rebase_source',
291 'intermediate-source'):
292 'intermediate-source'):
292 node = extra.get(label)
293 node = extra.get(label)
293
294
294 if node is None:
295 if node is None:
295 continue
296 continue
296
297
297 # Only transplant stores its reference in binary
298 # Only transplant stores its reference in binary
298 if label == 'transplant_source':
299 if label == 'transplant_source':
299 node = nodemod.hex(node)
300 node = nodemod.hex(node)
300
301
301 newrev = revmap.get(node)
302 newrev = revmap.get(node)
302 if newrev is not None:
303 if newrev is not None:
303 if label == 'transplant_source':
304 if label == 'transplant_source':
304 newrev = nodemod.bin(newrev)
305 newrev = nodemod.bin(newrev)
305
306
306 extra[label] = newrev
307 extra[label] = newrev
307
308
308 if self.branchnames and commit.branch:
309 if self.branchnames and commit.branch:
309 extra['branch'] = commit.branch
310 extra['branch'] = commit.branch
310 if commit.rev and commit.saverev:
311 if commit.rev and commit.saverev:
311 extra['convert_revision'] = commit.rev
312 extra['convert_revision'] = commit.rev
312
313
313 while parents:
314 while parents:
314 p1 = p2
315 p1 = p2
315 p2 = parents.pop(0)
316 p2 = parents.pop(0)
316 p1ctx = self.repo[p1]
317 p1ctx = self.repo[p1]
317 p2ctx = None
318 p2ctx = None
318 if p2 != nodemod.nullid:
319 if p2 != nodemod.nullid:
319 p2ctx = self.repo[p2]
320 p2ctx = self.repo[p2]
320 fileset = set(files)
321 fileset = set(files)
321 if full:
322 if full:
322 fileset.update(self.repo[p1])
323 fileset.update(self.repo[p1])
323 fileset.update(self.repo[p2])
324 fileset.update(self.repo[p2])
324
325
325 if p2ctx:
326 if p2ctx:
326 p2files = set(cleanp2)
327 p2files = set(cleanp2)
327 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
328 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
328 p2files.add(file)
329 p2files.add(file)
329 fileset.add(file)
330 fileset.add(file)
330
331
331 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
332 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
332 getfilectx, commit.author, commit.date, extra)
333 getfilectx, commit.author, commit.date, extra)
333
334
334 # We won't know if the conversion changes the node until after the
335 # We won't know if the conversion changes the node until after the
335 # commit, so copy the source's phase for now.
336 # commit, so copy the source's phase for now.
336 self.repo.ui.setconfig('phases', 'new-commit',
337 self.repo.ui.setconfig('phases', 'new-commit',
337 phases.phasenames[commit.phase], 'convert')
338 phases.phasenames[commit.phase], 'convert')
338
339
339 with self.repo.transaction("convert") as tr:
340 with self.repo.transaction("convert") as tr:
340 node = nodemod.hex(self.repo.commitctx(ctx))
341 node = nodemod.hex(self.repo.commitctx(ctx))
341
342
342 # If the node value has changed, but the phase is lower than
343 # If the node value has changed, but the phase is lower than
343 # draft, set it back to draft since it hasn't been exposed
344 # draft, set it back to draft since it hasn't been exposed
344 # anywhere.
345 # anywhere.
345 if commit.rev != node:
346 if commit.rev != node:
346 ctx = self.repo[node]
347 ctx = self.repo[node]
347 if ctx.phase() < phases.draft:
348 if ctx.phase() < phases.draft:
348 phases.registernew(self.repo, tr, phases.draft,
349 phases.registernew(self.repo, tr, phases.draft,
349 [ctx.node()])
350 [ctx.node()])
350
351
351 text = "(octopus merge fixup)\n"
352 text = "(octopus merge fixup)\n"
352 p2 = node
353 p2 = node
353
354
354 if self.filemapmode and nparents == 1:
355 if self.filemapmode and nparents == 1:
355 man = self.repo.manifestlog._revlog
356 man = self.repo.manifestlog._revlog
356 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
357 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
357 closed = 'close' in commit.extra
358 closed = 'close' in commit.extra
358 if not closed and not man.cmp(m1node, man.revision(mnode)):
359 if not closed and not man.cmp(m1node, man.revision(mnode)):
359 self.ui.status(_("filtering out empty revision\n"))
360 self.ui.status(_("filtering out empty revision\n"))
360 self.repo.rollback(force=True)
361 self.repo.rollback(force=True)
361 return parent
362 return parent
362 return p2
363 return p2
363
364
364 def puttags(self, tags):
365 def puttags(self, tags):
365 try:
366 try:
366 parentctx = self.repo[self.tagsbranch]
367 parentctx = self.repo[self.tagsbranch]
367 tagparent = parentctx.node()
368 tagparent = parentctx.node()
368 except error.RepoError:
369 except error.RepoError:
369 parentctx = None
370 parentctx = None
370 tagparent = nodemod.nullid
371 tagparent = nodemod.nullid
371
372
372 oldlines = set()
373 oldlines = set()
373 for branch, heads in self.repo.branchmap().iteritems():
374 for branch, heads in self.repo.branchmap().iteritems():
374 for h in heads:
375 for h in heads:
375 if '.hgtags' in self.repo[h]:
376 if '.hgtags' in self.repo[h]:
376 oldlines.update(
377 oldlines.update(
377 set(self.repo[h]['.hgtags'].data().splitlines(True)))
378 set(self.repo[h]['.hgtags'].data().splitlines(True)))
378 oldlines = sorted(list(oldlines))
379 oldlines = sorted(list(oldlines))
379
380
380 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
381 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
381 if newlines == oldlines:
382 if newlines == oldlines:
382 return None, None
383 return None, None
383
384
384 # if the old and new tags match, then there is nothing to update
385 # if the old and new tags match, then there is nothing to update
385 oldtags = set()
386 oldtags = set()
386 newtags = set()
387 newtags = set()
387 for line in oldlines:
388 for line in oldlines:
388 s = line.strip().split(' ', 1)
389 s = line.strip().split(' ', 1)
389 if len(s) != 2:
390 if len(s) != 2:
390 continue
391 continue
391 oldtags.add(s[1])
392 oldtags.add(s[1])
392 for line in newlines:
393 for line in newlines:
393 s = line.strip().split(' ', 1)
394 s = line.strip().split(' ', 1)
394 if len(s) != 2:
395 if len(s) != 2:
395 continue
396 continue
396 if s[1] not in oldtags:
397 if s[1] not in oldtags:
397 newtags.add(s[1].strip())
398 newtags.add(s[1].strip())
398
399
399 if not newtags:
400 if not newtags:
400 return None, None
401 return None, None
401
402
402 data = "".join(newlines)
403 data = "".join(newlines)
403 def getfilectx(repo, memctx, f):
404 def getfilectx(repo, memctx, f):
404 return context.memfilectx(repo, memctx, f, data, False, False, None)
405 return context.memfilectx(repo, memctx, f, data, False, False, None)
405
406
406 self.ui.status(_("updating tags\n"))
407 self.ui.status(_("updating tags\n"))
407 date = "%s 0" % int(time.mktime(time.gmtime()))
408 date = "%s 0" % int(time.mktime(time.gmtime()))
408 extra = {'branch': self.tagsbranch}
409 extra = {'branch': self.tagsbranch}
409 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
410 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
410 [".hgtags"], getfilectx, "convert-repo", date,
411 [".hgtags"], getfilectx, "convert-repo", date,
411 extra)
412 extra)
412 node = self.repo.commitctx(ctx)
413 node = self.repo.commitctx(ctx)
413 return nodemod.hex(node), nodemod.hex(tagparent)
414 return nodemod.hex(node), nodemod.hex(tagparent)
414
415
415 def setfilemapmode(self, active):
416 def setfilemapmode(self, active):
416 self.filemapmode = active
417 self.filemapmode = active
417
418
418 def putbookmarks(self, updatedbookmark):
419 def putbookmarks(self, updatedbookmark):
419 if not len(updatedbookmark):
420 if not len(updatedbookmark):
420 return
421 return
421 wlock = lock = tr = None
422 wlock = lock = tr = None
422 try:
423 try:
423 wlock = self.repo.wlock()
424 wlock = self.repo.wlock()
424 lock = self.repo.lock()
425 lock = self.repo.lock()
425 tr = self.repo.transaction('bookmark')
426 tr = self.repo.transaction('bookmark')
426 self.ui.status(_("updating bookmarks\n"))
427 self.ui.status(_("updating bookmarks\n"))
427 destmarks = self.repo._bookmarks
428 destmarks = self.repo._bookmarks
428 changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
429 changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
429 for bookmark in updatedbookmark]
430 for bookmark in updatedbookmark]
430 destmarks.applychanges(self.repo, tr, changes)
431 destmarks.applychanges(self.repo, tr, changes)
431 tr.close()
432 tr.close()
432 finally:
433 finally:
433 lockmod.release(lock, wlock, tr)
434 lockmod.release(lock, wlock, tr)
434
435
435 def hascommitfrommap(self, rev):
436 def hascommitfrommap(self, rev):
436 # the exact semantics of clonebranches is unclear so we can't say no
437 # the exact semantics of clonebranches is unclear so we can't say no
437 return rev in self.repo or self.clonebranches
438 return rev in self.repo or self.clonebranches
438
439
439 def hascommitforsplicemap(self, rev):
440 def hascommitforsplicemap(self, rev):
440 if rev not in self.repo and self.clonebranches:
441 if rev not in self.repo and self.clonebranches:
441 raise error.Abort(_('revision %s not found in destination '
442 raise error.Abort(_('revision %s not found in destination '
442 'repository (lookups with clonebranches=true '
443 'repository (lookups with clonebranches=true '
443 'are not implemented)') % rev)
444 'are not implemented)') % rev)
444 return rev in self.repo
445 return rev in self.repo
445
446
446 class mercurial_source(common.converter_source):
447 class mercurial_source(common.converter_source):
447 def __init__(self, ui, repotype, path, revs=None):
448 def __init__(self, ui, repotype, path, revs=None):
448 common.converter_source.__init__(self, ui, repotype, path, revs)
449 common.converter_source.__init__(self, ui, repotype, path, revs)
449 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
450 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
450 self.ignored = set()
451 self.ignored = set()
451 self.saverev = ui.configbool('convert', 'hg.saverev')
452 self.saverev = ui.configbool('convert', 'hg.saverev')
452 try:
453 try:
453 self.repo = hg.repository(self.ui, path)
454 self.repo = hg.repository(self.ui, path)
454 # try to provoke an exception if this isn't really a hg
455 # try to provoke an exception if this isn't really a hg
455 # repo, but some other bogus compatible-looking url
456 # repo, but some other bogus compatible-looking url
456 if not self.repo.local():
457 if not self.repo.local():
457 raise error.RepoError
458 raise error.RepoError
458 except error.RepoError:
459 except error.RepoError:
459 ui.traceback()
460 ui.traceback()
460 raise NoRepo(_("%s is not a local Mercurial repository") % path)
461 raise NoRepo(_("%s is not a local Mercurial repository") % path)
461 self.lastrev = None
462 self.lastrev = None
462 self.lastctx = None
463 self.lastctx = None
463 self._changescache = None, None
464 self._changescache = None, None
464 self.convertfp = None
465 self.convertfp = None
465 # Restrict converted revisions to startrev descendants
466 # Restrict converted revisions to startrev descendants
466 startnode = ui.config('convert', 'hg.startrev')
467 startnode = ui.config('convert', 'hg.startrev')
467 hgrevs = ui.config('convert', 'hg.revs')
468 hgrevs = ui.config('convert', 'hg.revs')
468 if hgrevs is None:
469 if hgrevs is None:
469 if startnode is not None:
470 if startnode is not None:
470 try:
471 try:
471 startnode = self.repo.lookup(startnode)
472 startnode = self.repo.lookup(startnode)
472 except error.RepoError:
473 except error.RepoError:
473 raise error.Abort(_('%s is not a valid start revision')
474 raise error.Abort(_('%s is not a valid start revision')
474 % startnode)
475 % startnode)
475 startrev = self.repo.changelog.rev(startnode)
476 startrev = self.repo.changelog.rev(startnode)
476 children = {startnode: 1}
477 children = {startnode: 1}
477 for r in self.repo.changelog.descendants([startrev]):
478 for r in self.repo.changelog.descendants([startrev]):
478 children[self.repo.changelog.node(r)] = 1
479 children[self.repo.changelog.node(r)] = 1
479 self.keep = children.__contains__
480 self.keep = children.__contains__
480 else:
481 else:
481 self.keep = util.always
482 self.keep = util.always
482 if revs:
483 if revs:
483 self._heads = [self.repo[r].node() for r in revs]
484 self._heads = [self.repo[r].node() for r in revs]
484 else:
485 else:
485 self._heads = self.repo.heads()
486 self._heads = self.repo.heads()
486 else:
487 else:
487 if revs or startnode is not None:
488 if revs or startnode is not None:
488 raise error.Abort(_('hg.revs cannot be combined with '
489 raise error.Abort(_('hg.revs cannot be combined with '
489 'hg.startrev or --rev'))
490 'hg.startrev or --rev'))
490 nodes = set()
491 nodes = set()
491 parents = set()
492 parents = set()
492 for r in scmutil.revrange(self.repo, [hgrevs]):
493 for r in scmutil.revrange(self.repo, [hgrevs]):
493 ctx = self.repo[r]
494 ctx = self.repo[r]
494 nodes.add(ctx.node())
495 nodes.add(ctx.node())
495 parents.update(p.node() for p in ctx.parents())
496 parents.update(p.node() for p in ctx.parents())
496 self.keep = nodes.__contains__
497 self.keep = nodes.__contains__
497 self._heads = nodes - parents
498 self._heads = nodes - parents
498
499
499 def _changectx(self, rev):
500 def _changectx(self, rev):
500 if self.lastrev != rev:
501 if self.lastrev != rev:
501 self.lastctx = self.repo[rev]
502 self.lastctx = self.repo[rev]
502 self.lastrev = rev
503 self.lastrev = rev
503 return self.lastctx
504 return self.lastctx
504
505
505 def _parents(self, ctx):
506 def _parents(self, ctx):
506 return [p for p in ctx.parents() if p and self.keep(p.node())]
507 return [p for p in ctx.parents() if p and self.keep(p.node())]
507
508
508 def getheads(self):
509 def getheads(self):
509 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
510 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
510
511
511 def getfile(self, name, rev):
512 def getfile(self, name, rev):
512 try:
513 try:
513 fctx = self._changectx(rev)[name]
514 fctx = self._changectx(rev)[name]
514 return fctx.data(), fctx.flags()
515 return fctx.data(), fctx.flags()
515 except error.LookupError:
516 except error.LookupError:
516 return None, None
517 return None, None
517
518
518 def _changedfiles(self, ctx1, ctx2):
519 def _changedfiles(self, ctx1, ctx2):
519 ma, r = [], []
520 ma, r = [], []
520 maappend = ma.append
521 maappend = ma.append
521 rappend = r.append
522 rappend = r.append
522 d = ctx1.manifest().diff(ctx2.manifest())
523 d = ctx1.manifest().diff(ctx2.manifest())
523 for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
524 for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
524 if node2 is None:
525 if node2 is None:
525 rappend(f)
526 rappend(f)
526 else:
527 else:
527 maappend(f)
528 maappend(f)
528 return ma, r
529 return ma, r
529
530
530 def getchanges(self, rev, full):
531 def getchanges(self, rev, full):
531 ctx = self._changectx(rev)
532 ctx = self._changectx(rev)
532 parents = self._parents(ctx)
533 parents = self._parents(ctx)
533 if full or not parents:
534 if full or not parents:
534 files = copyfiles = ctx.manifest()
535 files = copyfiles = ctx.manifest()
535 if parents:
536 if parents:
536 if self._changescache[0] == rev:
537 if self._changescache[0] == rev:
537 ma, r = self._changescache[1]
538 ma, r = self._changescache[1]
538 else:
539 else:
539 ma, r = self._changedfiles(parents[0], ctx)
540 ma, r = self._changedfiles(parents[0], ctx)
540 if not full:
541 if not full:
541 files = ma + r
542 files = ma + r
542 copyfiles = ma
543 copyfiles = ma
543 # _getcopies() is also run for roots and before filtering so missing
544 # _getcopies() is also run for roots and before filtering so missing
544 # revlogs are detected early
545 # revlogs are detected early
545 copies = self._getcopies(ctx, parents, copyfiles)
546 copies = self._getcopies(ctx, parents, copyfiles)
546 cleanp2 = set()
547 cleanp2 = set()
547 if len(parents) == 2:
548 if len(parents) == 2:
548 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
549 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
549 for f, value in d.iteritems():
550 for f, value in d.iteritems():
550 if value is None:
551 if value is None:
551 cleanp2.add(f)
552 cleanp2.add(f)
552 changes = [(f, rev) for f in files if f not in self.ignored]
553 changes = [(f, rev) for f in files if f not in self.ignored]
553 changes.sort()
554 changes.sort()
554 return changes, copies, cleanp2
555 return changes, copies, cleanp2
555
556
556 def _getcopies(self, ctx, parents, files):
557 def _getcopies(self, ctx, parents, files):
557 copies = {}
558 copies = {}
558 for name in files:
559 for name in files:
559 if name in self.ignored:
560 if name in self.ignored:
560 continue
561 continue
561 try:
562 try:
562 copysource, _copynode = ctx.filectx(name).renamed()
563 copysource, _copynode = ctx.filectx(name).renamed()
563 if copysource in self.ignored:
564 if copysource in self.ignored:
564 continue
565 continue
565 # Ignore copy sources not in parent revisions
566 # Ignore copy sources not in parent revisions
566 if not any(copysource in p for p in parents):
567 if not any(copysource in p for p in parents):
567 continue
568 continue
568 copies[name] = copysource
569 copies[name] = copysource
569 except TypeError:
570 except TypeError:
570 pass
571 pass
571 except error.LookupError as e:
572 except error.LookupError as e:
572 if not self.ignoreerrors:
573 if not self.ignoreerrors:
573 raise
574 raise
574 self.ignored.add(name)
575 self.ignored.add(name)
575 self.ui.warn(_('ignoring: %s\n') % e)
576 self.ui.warn(_('ignoring: %s\n') % e)
576 return copies
577 return copies
577
578
578 def getcommit(self, rev):
579 def getcommit(self, rev):
579 ctx = self._changectx(rev)
580 ctx = self._changectx(rev)
580 _parents = self._parents(ctx)
581 _parents = self._parents(ctx)
581 parents = [p.hex() for p in _parents]
582 parents = [p.hex() for p in _parents]
582 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
583 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
583 crev = rev
584 crev = rev
584
585
585 return common.commit(author=ctx.user(),
586 return common.commit(author=ctx.user(),
586 date=util.datestr(ctx.date(),
587 date=dateutil.datestr(ctx.date(),
587 '%Y-%m-%d %H:%M:%S %1%2'),
588 '%Y-%m-%d %H:%M:%S %1%2'),
588 desc=ctx.description(),
589 desc=ctx.description(),
589 rev=crev,
590 rev=crev,
590 parents=parents,
591 parents=parents,
591 optparents=optparents,
592 optparents=optparents,
592 branch=ctx.branch(),
593 branch=ctx.branch(),
593 extra=ctx.extra(),
594 extra=ctx.extra(),
594 sortkey=ctx.rev(),
595 sortkey=ctx.rev(),
595 saverev=self.saverev,
596 saverev=self.saverev,
596 phase=ctx.phase())
597 phase=ctx.phase())
597
598
598 def gettags(self):
599 def gettags(self):
599 # This will get written to .hgtags, filter non global tags out.
600 # This will get written to .hgtags, filter non global tags out.
600 tags = [t for t in self.repo.tagslist()
601 tags = [t for t in self.repo.tagslist()
601 if self.repo.tagtype(t[0]) == 'global']
602 if self.repo.tagtype(t[0]) == 'global']
602 return dict([(name, nodemod.hex(node)) for name, node in tags
603 return dict([(name, nodemod.hex(node)) for name, node in tags
603 if self.keep(node)])
604 if self.keep(node)])
604
605
605 def getchangedfiles(self, rev, i):
606 def getchangedfiles(self, rev, i):
606 ctx = self._changectx(rev)
607 ctx = self._changectx(rev)
607 parents = self._parents(ctx)
608 parents = self._parents(ctx)
608 if not parents and i is None:
609 if not parents and i is None:
609 i = 0
610 i = 0
610 ma, r = ctx.manifest().keys(), []
611 ma, r = ctx.manifest().keys(), []
611 else:
612 else:
612 i = i or 0
613 i = i or 0
613 ma, r = self._changedfiles(parents[i], ctx)
614 ma, r = self._changedfiles(parents[i], ctx)
614 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
615 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
615
616
616 if i == 0:
617 if i == 0:
617 self._changescache = (rev, (ma, r))
618 self._changescache = (rev, (ma, r))
618
619
619 return ma + r
620 return ma + r
620
621
621 def converted(self, rev, destrev):
622 def converted(self, rev, destrev):
622 if self.convertfp is None:
623 if self.convertfp is None:
623 self.convertfp = open(self.repo.vfs.join('shamap'), 'ab')
624 self.convertfp = open(self.repo.vfs.join('shamap'), 'ab')
624 self.convertfp.write(util.tonativeeol('%s %s\n' % (destrev, rev)))
625 self.convertfp.write(util.tonativeeol('%s %s\n' % (destrev, rev)))
625 self.convertfp.flush()
626 self.convertfp.flush()
626
627
627 def before(self):
628 def before(self):
628 self.ui.debug('run hg source pre-conversion action\n')
629 self.ui.debug('run hg source pre-conversion action\n')
629
630
630 def after(self):
631 def after(self):
631 self.ui.debug('run hg source post-conversion action\n')
632 self.ui.debug('run hg source post-conversion action\n')
632
633
633 def hasnativeorder(self):
634 def hasnativeorder(self):
634 return True
635 return True
635
636
636 def hasnativeclose(self):
637 def hasnativeclose(self):
637 return True
638 return True
638
639
639 def lookuprev(self, rev):
640 def lookuprev(self, rev):
640 try:
641 try:
641 return nodemod.hex(self.repo.lookup(rev))
642 return nodemod.hex(self.repo.lookup(rev))
642 except (error.RepoError, error.LookupError):
643 except (error.RepoError, error.LookupError):
643 return None
644 return None
644
645
645 def getbookmarks(self):
646 def getbookmarks(self):
646 return bookmarks.listbookmarks(self.repo)
647 return bookmarks.listbookmarks(self.repo)
647
648
648 def checkrevformat(self, revstr, mapname='splicemap'):
649 def checkrevformat(self, revstr, mapname='splicemap'):
649 """ Mercurial, revision string is a 40 byte hex """
650 """ Mercurial, revision string is a 40 byte hex """
650 self.checkhexformat(revstr, mapname)
651 self.checkhexformat(revstr, mapname)
@@ -1,373 +1,374 b''
1 # monotone.py - monotone support for the convert extension
1 # monotone.py - monotone support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
4 # others
4 # others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import re
11 import re
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial import (
14 from mercurial import (
15 error,
15 error,
16 pycompat,
16 pycompat,
17 util,
18 )
17 )
18 from mercurial.utils import dateutil
19
19
20 from . import common
20 from . import common
21
21
22 class monotone_source(common.converter_source, common.commandline):
22 class monotone_source(common.converter_source, common.commandline):
23 def __init__(self, ui, repotype, path=None, revs=None):
23 def __init__(self, ui, repotype, path=None, revs=None):
24 common.converter_source.__init__(self, ui, repotype, path, revs)
24 common.converter_source.__init__(self, ui, repotype, path, revs)
25 if revs and len(revs) > 1:
25 if revs and len(revs) > 1:
26 raise error.Abort(_('monotone source does not support specifying '
26 raise error.Abort(_('monotone source does not support specifying '
27 'multiple revs'))
27 'multiple revs'))
28 common.commandline.__init__(self, ui, 'mtn')
28 common.commandline.__init__(self, ui, 'mtn')
29
29
30 self.ui = ui
30 self.ui = ui
31 self.path = path
31 self.path = path
32 self.automatestdio = False
32 self.automatestdio = False
33 self.revs = revs
33 self.revs = revs
34
34
35 norepo = common.NoRepo(_("%s does not look like a monotone repository")
35 norepo = common.NoRepo(_("%s does not look like a monotone repository")
36 % path)
36 % path)
37 if not os.path.exists(os.path.join(path, '_MTN')):
37 if not os.path.exists(os.path.join(path, '_MTN')):
38 # Could be a monotone repository (SQLite db file)
38 # Could be a monotone repository (SQLite db file)
39 try:
39 try:
40 f = open(path, 'rb')
40 f = open(path, 'rb')
41 header = f.read(16)
41 header = f.read(16)
42 f.close()
42 f.close()
43 except IOError:
43 except IOError:
44 header = ''
44 header = ''
45 if header != 'SQLite format 3\x00':
45 if header != 'SQLite format 3\x00':
46 raise norepo
46 raise norepo
47
47
48 # regular expressions for parsing monotone output
48 # regular expressions for parsing monotone output
49 space = br'\s*'
49 space = br'\s*'
50 name = br'\s+"((?:\\"|[^"])*)"\s*'
50 name = br'\s+"((?:\\"|[^"])*)"\s*'
51 value = name
51 value = name
52 revision = br'\s+\[(\w+)\]\s*'
52 revision = br'\s+\[(\w+)\]\s*'
53 lines = br'(?:.|\n)+'
53 lines = br'(?:.|\n)+'
54
54
55 self.dir_re = re.compile(space + "dir" + name)
55 self.dir_re = re.compile(space + "dir" + name)
56 self.file_re = re.compile(space + "file" + name +
56 self.file_re = re.compile(space + "file" + name +
57 "content" + revision)
57 "content" + revision)
58 self.add_file_re = re.compile(space + "add_file" + name +
58 self.add_file_re = re.compile(space + "add_file" + name +
59 "content" + revision)
59 "content" + revision)
60 self.patch_re = re.compile(space + "patch" + name +
60 self.patch_re = re.compile(space + "patch" + name +
61 "from" + revision + "to" + revision)
61 "from" + revision + "to" + revision)
62 self.rename_re = re.compile(space + "rename" + name + "to" + name)
62 self.rename_re = re.compile(space + "rename" + name + "to" + name)
63 self.delete_re = re.compile(space + "delete" + name)
63 self.delete_re = re.compile(space + "delete" + name)
64 self.tag_re = re.compile(space + "tag" + name + "revision" +
64 self.tag_re = re.compile(space + "tag" + name + "revision" +
65 revision)
65 revision)
66 self.cert_re = re.compile(lines + space + "name" + name +
66 self.cert_re = re.compile(lines + space + "name" + name +
67 "value" + value)
67 "value" + value)
68
68
69 attr = space + "file" + lines + space + "attr" + space
69 attr = space + "file" + lines + space + "attr" + space
70 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
70 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
71 space + '"true"')
71 space + '"true"')
72
72
73 # cached data
73 # cached data
74 self.manifest_rev = None
74 self.manifest_rev = None
75 self.manifest = None
75 self.manifest = None
76 self.files = None
76 self.files = None
77 self.dirs = None
77 self.dirs = None
78
78
79 common.checktool('mtn', abort=False)
79 common.checktool('mtn', abort=False)
80
80
81 def mtnrun(self, *args, **kwargs):
81 def mtnrun(self, *args, **kwargs):
82 if self.automatestdio:
82 if self.automatestdio:
83 return self.mtnrunstdio(*args, **kwargs)
83 return self.mtnrunstdio(*args, **kwargs)
84 else:
84 else:
85 return self.mtnrunsingle(*args, **kwargs)
85 return self.mtnrunsingle(*args, **kwargs)
86
86
87 def mtnrunsingle(self, *args, **kwargs):
87 def mtnrunsingle(self, *args, **kwargs):
88 kwargs[r'd'] = self.path
88 kwargs[r'd'] = self.path
89 return self.run0('automate', *args, **kwargs)
89 return self.run0('automate', *args, **kwargs)
90
90
91 def mtnrunstdio(self, *args, **kwargs):
91 def mtnrunstdio(self, *args, **kwargs):
92 # Prepare the command in automate stdio format
92 # Prepare the command in automate stdio format
93 kwargs = pycompat.byteskwargs(kwargs)
93 kwargs = pycompat.byteskwargs(kwargs)
94 command = []
94 command = []
95 for k, v in kwargs.iteritems():
95 for k, v in kwargs.iteritems():
96 command.append("%s:%s" % (len(k), k))
96 command.append("%s:%s" % (len(k), k))
97 if v:
97 if v:
98 command.append("%s:%s" % (len(v), v))
98 command.append("%s:%s" % (len(v), v))
99 if command:
99 if command:
100 command.insert(0, 'o')
100 command.insert(0, 'o')
101 command.append('e')
101 command.append('e')
102
102
103 command.append('l')
103 command.append('l')
104 for arg in args:
104 for arg in args:
105 command += "%s:%s" % (len(arg), arg)
105 command += "%s:%s" % (len(arg), arg)
106 command.append('e')
106 command.append('e')
107 command = ''.join(command)
107 command = ''.join(command)
108
108
109 self.ui.debug("mtn: sending '%s'\n" % command)
109 self.ui.debug("mtn: sending '%s'\n" % command)
110 self.mtnwritefp.write(command)
110 self.mtnwritefp.write(command)
111 self.mtnwritefp.flush()
111 self.mtnwritefp.flush()
112
112
113 return self.mtnstdioreadcommandoutput(command)
113 return self.mtnstdioreadcommandoutput(command)
114
114
115 def mtnstdioreadpacket(self):
115 def mtnstdioreadpacket(self):
116 read = None
116 read = None
117 commandnbr = ''
117 commandnbr = ''
118 while read != ':':
118 while read != ':':
119 read = self.mtnreadfp.read(1)
119 read = self.mtnreadfp.read(1)
120 if not read:
120 if not read:
121 raise error.Abort(_('bad mtn packet - no end of commandnbr'))
121 raise error.Abort(_('bad mtn packet - no end of commandnbr'))
122 commandnbr += read
122 commandnbr += read
123 commandnbr = commandnbr[:-1]
123 commandnbr = commandnbr[:-1]
124
124
125 stream = self.mtnreadfp.read(1)
125 stream = self.mtnreadfp.read(1)
126 if stream not in 'mewptl':
126 if stream not in 'mewptl':
127 raise error.Abort(_('bad mtn packet - bad stream type %s') % stream)
127 raise error.Abort(_('bad mtn packet - bad stream type %s') % stream)
128
128
129 read = self.mtnreadfp.read(1)
129 read = self.mtnreadfp.read(1)
130 if read != ':':
130 if read != ':':
131 raise error.Abort(_('bad mtn packet - no divider before size'))
131 raise error.Abort(_('bad mtn packet - no divider before size'))
132
132
133 read = None
133 read = None
134 lengthstr = ''
134 lengthstr = ''
135 while read != ':':
135 while read != ':':
136 read = self.mtnreadfp.read(1)
136 read = self.mtnreadfp.read(1)
137 if not read:
137 if not read:
138 raise error.Abort(_('bad mtn packet - no end of packet size'))
138 raise error.Abort(_('bad mtn packet - no end of packet size'))
139 lengthstr += read
139 lengthstr += read
140 try:
140 try:
141 length = long(lengthstr[:-1])
141 length = long(lengthstr[:-1])
142 except TypeError:
142 except TypeError:
143 raise error.Abort(_('bad mtn packet - bad packet size %s')
143 raise error.Abort(_('bad mtn packet - bad packet size %s')
144 % lengthstr)
144 % lengthstr)
145
145
146 read = self.mtnreadfp.read(length)
146 read = self.mtnreadfp.read(length)
147 if len(read) != length:
147 if len(read) != length:
148 raise error.Abort(_("bad mtn packet - unable to read full packet "
148 raise error.Abort(_("bad mtn packet - unable to read full packet "
149 "read %s of %s") % (len(read), length))
149 "read %s of %s") % (len(read), length))
150
150
151 return (commandnbr, stream, length, read)
151 return (commandnbr, stream, length, read)
152
152
153 def mtnstdioreadcommandoutput(self, command):
153 def mtnstdioreadcommandoutput(self, command):
154 retval = []
154 retval = []
155 while True:
155 while True:
156 commandnbr, stream, length, output = self.mtnstdioreadpacket()
156 commandnbr, stream, length, output = self.mtnstdioreadpacket()
157 self.ui.debug('mtn: read packet %s:%s:%s\n' %
157 self.ui.debug('mtn: read packet %s:%s:%s\n' %
158 (commandnbr, stream, length))
158 (commandnbr, stream, length))
159
159
160 if stream == 'l':
160 if stream == 'l':
161 # End of command
161 # End of command
162 if output != '0':
162 if output != '0':
163 raise error.Abort(_("mtn command '%s' returned %s") %
163 raise error.Abort(_("mtn command '%s' returned %s") %
164 (command, output))
164 (command, output))
165 break
165 break
166 elif stream in 'ew':
166 elif stream in 'ew':
167 # Error, warning output
167 # Error, warning output
168 self.ui.warn(_('%s error:\n') % self.command)
168 self.ui.warn(_('%s error:\n') % self.command)
169 self.ui.warn(output)
169 self.ui.warn(output)
170 elif stream == 'p':
170 elif stream == 'p':
171 # Progress messages
171 # Progress messages
172 self.ui.debug('mtn: ' + output)
172 self.ui.debug('mtn: ' + output)
173 elif stream == 'm':
173 elif stream == 'm':
174 # Main stream - command output
174 # Main stream - command output
175 retval.append(output)
175 retval.append(output)
176
176
177 return ''.join(retval)
177 return ''.join(retval)
178
178
179 def mtnloadmanifest(self, rev):
179 def mtnloadmanifest(self, rev):
180 if self.manifest_rev == rev:
180 if self.manifest_rev == rev:
181 return
181 return
182 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
182 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
183 self.manifest_rev = rev
183 self.manifest_rev = rev
184 self.files = {}
184 self.files = {}
185 self.dirs = {}
185 self.dirs = {}
186
186
187 for e in self.manifest:
187 for e in self.manifest:
188 m = self.file_re.match(e)
188 m = self.file_re.match(e)
189 if m:
189 if m:
190 attr = ""
190 attr = ""
191 name = m.group(1)
191 name = m.group(1)
192 node = m.group(2)
192 node = m.group(2)
193 if self.attr_execute_re.match(e):
193 if self.attr_execute_re.match(e):
194 attr += "x"
194 attr += "x"
195 self.files[name] = (node, attr)
195 self.files[name] = (node, attr)
196 m = self.dir_re.match(e)
196 m = self.dir_re.match(e)
197 if m:
197 if m:
198 self.dirs[m.group(1)] = True
198 self.dirs[m.group(1)] = True
199
199
200 def mtnisfile(self, name, rev):
200 def mtnisfile(self, name, rev):
201 # a non-file could be a directory or a deleted or renamed file
201 # a non-file could be a directory or a deleted or renamed file
202 self.mtnloadmanifest(rev)
202 self.mtnloadmanifest(rev)
203 return name in self.files
203 return name in self.files
204
204
205 def mtnisdir(self, name, rev):
205 def mtnisdir(self, name, rev):
206 self.mtnloadmanifest(rev)
206 self.mtnloadmanifest(rev)
207 return name in self.dirs
207 return name in self.dirs
208
208
209 def mtngetcerts(self, rev):
209 def mtngetcerts(self, rev):
210 certs = {"author":"<missing>", "date":"<missing>",
210 certs = {"author":"<missing>", "date":"<missing>",
211 "changelog":"<missing>", "branch":"<missing>"}
211 "changelog":"<missing>", "branch":"<missing>"}
212 certlist = self.mtnrun("certs", rev)
212 certlist = self.mtnrun("certs", rev)
213 # mtn < 0.45:
213 # mtn < 0.45:
214 # key "test@selenic.com"
214 # key "test@selenic.com"
215 # mtn >= 0.45:
215 # mtn >= 0.45:
216 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
216 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
217 certlist = re.split('\n\n key ["\[]', certlist)
217 certlist = re.split('\n\n key ["\[]', certlist)
218 for e in certlist:
218 for e in certlist:
219 m = self.cert_re.match(e)
219 m = self.cert_re.match(e)
220 if m:
220 if m:
221 name, value = m.groups()
221 name, value = m.groups()
222 value = value.replace(r'\"', '"')
222 value = value.replace(r'\"', '"')
223 value = value.replace(r'\\', '\\')
223 value = value.replace(r'\\', '\\')
224 certs[name] = value
224 certs[name] = value
225 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
225 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
226 # and all times are stored in UTC
226 # and all times are stored in UTC
227 certs["date"] = certs["date"].split('.')[0] + " UTC"
227 certs["date"] = certs["date"].split('.')[0] + " UTC"
228 return certs
228 return certs
229
229
230 # implement the converter_source interface:
230 # implement the converter_source interface:
231
231
232 def getheads(self):
232 def getheads(self):
233 if not self.revs:
233 if not self.revs:
234 return self.mtnrun("leaves").splitlines()
234 return self.mtnrun("leaves").splitlines()
235 else:
235 else:
236 return self.revs
236 return self.revs
237
237
238 def getchanges(self, rev, full):
238 def getchanges(self, rev, full):
239 if full:
239 if full:
240 raise error.Abort(_("convert from monotone does not support "
240 raise error.Abort(_("convert from monotone does not support "
241 "--full"))
241 "--full"))
242 revision = self.mtnrun("get_revision", rev).split("\n\n")
242 revision = self.mtnrun("get_revision", rev).split("\n\n")
243 files = {}
243 files = {}
244 ignoremove = {}
244 ignoremove = {}
245 renameddirs = []
245 renameddirs = []
246 copies = {}
246 copies = {}
247 for e in revision:
247 for e in revision:
248 m = self.add_file_re.match(e)
248 m = self.add_file_re.match(e)
249 if m:
249 if m:
250 files[m.group(1)] = rev
250 files[m.group(1)] = rev
251 ignoremove[m.group(1)] = rev
251 ignoremove[m.group(1)] = rev
252 m = self.patch_re.match(e)
252 m = self.patch_re.match(e)
253 if m:
253 if m:
254 files[m.group(1)] = rev
254 files[m.group(1)] = rev
255 # Delete/rename is handled later when the convert engine
255 # Delete/rename is handled later when the convert engine
256 # discovers an IOError exception from getfile,
256 # discovers an IOError exception from getfile,
257 # but only if we add the "from" file to the list of changes.
257 # but only if we add the "from" file to the list of changes.
258 m = self.delete_re.match(e)
258 m = self.delete_re.match(e)
259 if m:
259 if m:
260 files[m.group(1)] = rev
260 files[m.group(1)] = rev
261 m = self.rename_re.match(e)
261 m = self.rename_re.match(e)
262 if m:
262 if m:
263 toname = m.group(2)
263 toname = m.group(2)
264 fromname = m.group(1)
264 fromname = m.group(1)
265 if self.mtnisfile(toname, rev):
265 if self.mtnisfile(toname, rev):
266 ignoremove[toname] = 1
266 ignoremove[toname] = 1
267 copies[toname] = fromname
267 copies[toname] = fromname
268 files[toname] = rev
268 files[toname] = rev
269 files[fromname] = rev
269 files[fromname] = rev
270 elif self.mtnisdir(toname, rev):
270 elif self.mtnisdir(toname, rev):
271 renameddirs.append((fromname, toname))
271 renameddirs.append((fromname, toname))
272
272
273 # Directory renames can be handled only once we have recorded
273 # Directory renames can be handled only once we have recorded
274 # all new files
274 # all new files
275 for fromdir, todir in renameddirs:
275 for fromdir, todir in renameddirs:
276 renamed = {}
276 renamed = {}
277 for tofile in self.files:
277 for tofile in self.files:
278 if tofile in ignoremove:
278 if tofile in ignoremove:
279 continue
279 continue
280 if tofile.startswith(todir + '/'):
280 if tofile.startswith(todir + '/'):
281 renamed[tofile] = fromdir + tofile[len(todir):]
281 renamed[tofile] = fromdir + tofile[len(todir):]
282 # Avoid chained moves like:
282 # Avoid chained moves like:
283 # d1(/a) => d3/d1(/a)
283 # d1(/a) => d3/d1(/a)
284 # d2 => d3
284 # d2 => d3
285 ignoremove[tofile] = 1
285 ignoremove[tofile] = 1
286 for tofile, fromfile in renamed.items():
286 for tofile, fromfile in renamed.items():
287 self.ui.debug (_("copying file in renamed directory "
287 self.ui.debug (_("copying file in renamed directory "
288 "from '%s' to '%s'")
288 "from '%s' to '%s'")
289 % (fromfile, tofile), '\n')
289 % (fromfile, tofile), '\n')
290 files[tofile] = rev
290 files[tofile] = rev
291 copies[tofile] = fromfile
291 copies[tofile] = fromfile
292 for fromfile in renamed.values():
292 for fromfile in renamed.values():
293 files[fromfile] = rev
293 files[fromfile] = rev
294
294
295 return (files.items(), copies, set())
295 return (files.items(), copies, set())
296
296
297 def getfile(self, name, rev):
297 def getfile(self, name, rev):
298 if not self.mtnisfile(name, rev):
298 if not self.mtnisfile(name, rev):
299 return None, None
299 return None, None
300 try:
300 try:
301 data = self.mtnrun("get_file_of", name, r=rev)
301 data = self.mtnrun("get_file_of", name, r=rev)
302 except Exception:
302 except Exception:
303 return None, None
303 return None, None
304 self.mtnloadmanifest(rev)
304 self.mtnloadmanifest(rev)
305 node, attr = self.files.get(name, (None, ""))
305 node, attr = self.files.get(name, (None, ""))
306 return data, attr
306 return data, attr
307
307
308 def getcommit(self, rev):
308 def getcommit(self, rev):
309 extra = {}
309 extra = {}
310 certs = self.mtngetcerts(rev)
310 certs = self.mtngetcerts(rev)
311 if certs.get('suspend') == certs["branch"]:
311 if certs.get('suspend') == certs["branch"]:
312 extra['close'] = 1
312 extra['close'] = 1
313 dateformat = "%Y-%m-%dT%H:%M:%S"
313 return common.commit(
314 return common.commit(
314 author=certs["author"],
315 author=certs["author"],
315 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
316 date=dateutil.datestr(dateutil.strdate(certs["date"], dateformat)),
316 desc=certs["changelog"],
317 desc=certs["changelog"],
317 rev=rev,
318 rev=rev,
318 parents=self.mtnrun("parents", rev).splitlines(),
319 parents=self.mtnrun("parents", rev).splitlines(),
319 branch=certs["branch"],
320 branch=certs["branch"],
320 extra=extra)
321 extra=extra)
321
322
322 def gettags(self):
323 def gettags(self):
323 tags = {}
324 tags = {}
324 for e in self.mtnrun("tags").split("\n\n"):
325 for e in self.mtnrun("tags").split("\n\n"):
325 m = self.tag_re.match(e)
326 m = self.tag_re.match(e)
326 if m:
327 if m:
327 tags[m.group(1)] = m.group(2)
328 tags[m.group(1)] = m.group(2)
328 return tags
329 return tags
329
330
330 def getchangedfiles(self, rev, i):
331 def getchangedfiles(self, rev, i):
331 # This function is only needed to support --filemap
332 # This function is only needed to support --filemap
332 # ... and we don't support that
333 # ... and we don't support that
333 raise NotImplementedError
334 raise NotImplementedError
334
335
335 def before(self):
336 def before(self):
336 # Check if we have a new enough version to use automate stdio
337 # Check if we have a new enough version to use automate stdio
337 version = 0.0
338 version = 0.0
338 try:
339 try:
339 versionstr = self.mtnrunsingle("interface_version")
340 versionstr = self.mtnrunsingle("interface_version")
340 version = float(versionstr)
341 version = float(versionstr)
341 except Exception:
342 except Exception:
342 raise error.Abort(_("unable to determine mtn automate interface "
343 raise error.Abort(_("unable to determine mtn automate interface "
343 "version"))
344 "version"))
344
345
345 if version >= 12.0:
346 if version >= 12.0:
346 self.automatestdio = True
347 self.automatestdio = True
347 self.ui.debug("mtn automate version %s - using automate stdio\n" %
348 self.ui.debug("mtn automate version %s - using automate stdio\n" %
348 version)
349 version)
349
350
350 # launch the long-running automate stdio process
351 # launch the long-running automate stdio process
351 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
352 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
352 '-d', self.path)
353 '-d', self.path)
353 # read the headers
354 # read the headers
354 read = self.mtnreadfp.readline()
355 read = self.mtnreadfp.readline()
355 if read != 'format-version: 2\n':
356 if read != 'format-version: 2\n':
356 raise error.Abort(_('mtn automate stdio header unexpected: %s')
357 raise error.Abort(_('mtn automate stdio header unexpected: %s')
357 % read)
358 % read)
358 while read != '\n':
359 while read != '\n':
359 read = self.mtnreadfp.readline()
360 read = self.mtnreadfp.readline()
360 if not read:
361 if not read:
361 raise error.Abort(_("failed to reach end of mtn automate "
362 raise error.Abort(_("failed to reach end of mtn automate "
362 "stdio headers"))
363 "stdio headers"))
363 else:
364 else:
364 self.ui.debug("mtn automate version %s - not using automate stdio "
365 self.ui.debug("mtn automate version %s - not using automate stdio "
365 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
366 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
366
367
367 def after(self):
368 def after(self):
368 if self.automatestdio:
369 if self.automatestdio:
369 self.mtnwritefp.close()
370 self.mtnwritefp.close()
370 self.mtnwritefp = None
371 self.mtnwritefp = None
371 self.mtnreadfp.close()
372 self.mtnreadfp.close()
372 self.mtnreadfp = None
373 self.mtnreadfp = None
373
374
@@ -1,373 +1,374 b''
1 # Perforce source for convert extension.
1 # Perforce source for convert extension.
2 #
2 #
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import marshal
9 import marshal
10 import re
10 import re
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import (
13 from mercurial import (
14 error,
14 error,
15 util,
15 util,
16 )
16 )
17 from mercurial.utils import dateutil
17
18
18 from . import common
19 from . import common
19
20
20 def loaditer(f):
21 def loaditer(f):
21 "Yield the dictionary objects generated by p4"
22 "Yield the dictionary objects generated by p4"
22 try:
23 try:
23 while True:
24 while True:
24 d = marshal.load(f)
25 d = marshal.load(f)
25 if not d:
26 if not d:
26 break
27 break
27 yield d
28 yield d
28 except EOFError:
29 except EOFError:
29 pass
30 pass
30
31
31 def decodefilename(filename):
32 def decodefilename(filename):
32 """Perforce escapes special characters @, #, *, or %
33 """Perforce escapes special characters @, #, *, or %
33 with %40, %23, %2A, or %25 respectively
34 with %40, %23, %2A, or %25 respectively
34
35
35 >>> decodefilename(b'portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
36 >>> decodefilename(b'portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
36 'portable-net45%2Bnetcore45%2Bwp8%2BMonoAndroid'
37 'portable-net45%2Bnetcore45%2Bwp8%2BMonoAndroid'
37 >>> decodefilename(b'//Depot/Directory/%2525/%2523/%23%40.%2A')
38 >>> decodefilename(b'//Depot/Directory/%2525/%2523/%23%40.%2A')
38 '//Depot/Directory/%25/%23/#@.*'
39 '//Depot/Directory/%25/%23/#@.*'
39 """
40 """
40 replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
41 replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
41 for k, v in replacements:
42 for k, v in replacements:
42 filename = filename.replace(k, v)
43 filename = filename.replace(k, v)
43 return filename
44 return filename
44
45
45 class p4_source(common.converter_source):
46 class p4_source(common.converter_source):
46 def __init__(self, ui, repotype, path, revs=None):
47 def __init__(self, ui, repotype, path, revs=None):
47 # avoid import cycle
48 # avoid import cycle
48 from . import convcmd
49 from . import convcmd
49
50
50 super(p4_source, self).__init__(ui, repotype, path, revs=revs)
51 super(p4_source, self).__init__(ui, repotype, path, revs=revs)
51
52
52 if "/" in path and not path.startswith('//'):
53 if "/" in path and not path.startswith('//'):
53 raise common.NoRepo(_('%s does not look like a P4 repository') %
54 raise common.NoRepo(_('%s does not look like a P4 repository') %
54 path)
55 path)
55
56
56 common.checktool('p4', abort=False)
57 common.checktool('p4', abort=False)
57
58
58 self.revmap = {}
59 self.revmap = {}
59 self.encoding = self.ui.config('convert', 'p4.encoding',
60 self.encoding = self.ui.config('convert', 'p4.encoding',
60 convcmd.orig_encoding)
61 convcmd.orig_encoding)
61 self.re_type = re.compile(
62 self.re_type = re.compile(
62 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
63 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
63 "(\+\w+)?$")
64 "(\+\w+)?$")
64 self.re_keywords = re.compile(
65 self.re_keywords = re.compile(
65 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
66 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
66 r":[^$\n]*\$")
67 r":[^$\n]*\$")
67 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
68 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
68
69
69 if revs and len(revs) > 1:
70 if revs and len(revs) > 1:
70 raise error.Abort(_("p4 source does not support specifying "
71 raise error.Abort(_("p4 source does not support specifying "
71 "multiple revisions"))
72 "multiple revisions"))
72
73
73 def setrevmap(self, revmap):
74 def setrevmap(self, revmap):
74 """Sets the parsed revmap dictionary.
75 """Sets the parsed revmap dictionary.
75
76
76 Revmap stores mappings from a source revision to a target revision.
77 Revmap stores mappings from a source revision to a target revision.
77 It is set in convertcmd.convert and provided by the user as a file
78 It is set in convertcmd.convert and provided by the user as a file
78 on the commandline.
79 on the commandline.
79
80
80 Revisions in the map are considered beeing present in the
81 Revisions in the map are considered beeing present in the
81 repository and ignored during _parse(). This allows for incremental
82 repository and ignored during _parse(). This allows for incremental
82 imports if a revmap is provided.
83 imports if a revmap is provided.
83 """
84 """
84 self.revmap = revmap
85 self.revmap = revmap
85
86
86 def _parse_view(self, path):
87 def _parse_view(self, path):
87 "Read changes affecting the path"
88 "Read changes affecting the path"
88 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
89 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
89 stdout = util.popen(cmd, mode='rb')
90 stdout = util.popen(cmd, mode='rb')
90 p4changes = {}
91 p4changes = {}
91 for d in loaditer(stdout):
92 for d in loaditer(stdout):
92 c = d.get("change", None)
93 c = d.get("change", None)
93 if c:
94 if c:
94 p4changes[c] = True
95 p4changes[c] = True
95 return p4changes
96 return p4changes
96
97
97 def _parse(self, ui, path):
98 def _parse(self, ui, path):
98 "Prepare list of P4 filenames and revisions to import"
99 "Prepare list of P4 filenames and revisions to import"
99 p4changes = {}
100 p4changes = {}
100 changeset = {}
101 changeset = {}
101 files_map = {}
102 files_map = {}
102 copies_map = {}
103 copies_map = {}
103 localname = {}
104 localname = {}
104 depotname = {}
105 depotname = {}
105 heads = []
106 heads = []
106
107
107 ui.status(_('reading p4 views\n'))
108 ui.status(_('reading p4 views\n'))
108
109
109 # read client spec or view
110 # read client spec or view
110 if "/" in path:
111 if "/" in path:
111 p4changes.update(self._parse_view(path))
112 p4changes.update(self._parse_view(path))
112 if path.startswith("//") and path.endswith("/..."):
113 if path.startswith("//") and path.endswith("/..."):
113 views = {path[:-3]:""}
114 views = {path[:-3]:""}
114 else:
115 else:
115 views = {"//": ""}
116 views = {"//": ""}
116 else:
117 else:
117 cmd = 'p4 -G client -o %s' % util.shellquote(path)
118 cmd = 'p4 -G client -o %s' % util.shellquote(path)
118 clientspec = marshal.load(util.popen(cmd, mode='rb'))
119 clientspec = marshal.load(util.popen(cmd, mode='rb'))
119
120
120 views = {}
121 views = {}
121 for client in clientspec:
122 for client in clientspec:
122 if client.startswith("View"):
123 if client.startswith("View"):
123 sview, cview = clientspec[client].split()
124 sview, cview = clientspec[client].split()
124 p4changes.update(self._parse_view(sview))
125 p4changes.update(self._parse_view(sview))
125 if sview.endswith("...") and cview.endswith("..."):
126 if sview.endswith("...") and cview.endswith("..."):
126 sview = sview[:-3]
127 sview = sview[:-3]
127 cview = cview[:-3]
128 cview = cview[:-3]
128 cview = cview[2:]
129 cview = cview[2:]
129 cview = cview[cview.find("/") + 1:]
130 cview = cview[cview.find("/") + 1:]
130 views[sview] = cview
131 views[sview] = cview
131
132
132 # list of changes that affect our source files
133 # list of changes that affect our source files
133 p4changes = p4changes.keys()
134 p4changes = p4changes.keys()
134 p4changes.sort(key=int)
135 p4changes.sort(key=int)
135
136
136 # list with depot pathnames, longest first
137 # list with depot pathnames, longest first
137 vieworder = views.keys()
138 vieworder = views.keys()
138 vieworder.sort(key=len, reverse=True)
139 vieworder.sort(key=len, reverse=True)
139
140
140 # handle revision limiting
141 # handle revision limiting
141 startrev = self.ui.config('convert', 'p4.startrev')
142 startrev = self.ui.config('convert', 'p4.startrev')
142
143
143 # now read the full changelists to get the list of file revisions
144 # now read the full changelists to get the list of file revisions
144 ui.status(_('collecting p4 changelists\n'))
145 ui.status(_('collecting p4 changelists\n'))
145 lastid = None
146 lastid = None
146 for change in p4changes:
147 for change in p4changes:
147 if startrev and int(change) < int(startrev):
148 if startrev and int(change) < int(startrev):
148 continue
149 continue
149 if self.revs and int(change) > int(self.revs[0]):
150 if self.revs and int(change) > int(self.revs[0]):
150 continue
151 continue
151 if change in self.revmap:
152 if change in self.revmap:
152 # Ignore already present revisions, but set the parent pointer.
153 # Ignore already present revisions, but set the parent pointer.
153 lastid = change
154 lastid = change
154 continue
155 continue
155
156
156 if lastid:
157 if lastid:
157 parents = [lastid]
158 parents = [lastid]
158 else:
159 else:
159 parents = []
160 parents = []
160
161
161 d = self._fetch_revision(change)
162 d = self._fetch_revision(change)
162 c = self._construct_commit(d, parents)
163 c = self._construct_commit(d, parents)
163
164
164 descarr = c.desc.splitlines(True)
165 descarr = c.desc.splitlines(True)
165 if len(descarr) > 0:
166 if len(descarr) > 0:
166 shortdesc = descarr[0].rstrip('\r\n')
167 shortdesc = descarr[0].rstrip('\r\n')
167 else:
168 else:
168 shortdesc = '**empty changelist description**'
169 shortdesc = '**empty changelist description**'
169
170
170 t = '%s %s' % (c.rev, repr(shortdesc)[1:-1])
171 t = '%s %s' % (c.rev, repr(shortdesc)[1:-1])
171 ui.status(util.ellipsis(t, 80) + '\n')
172 ui.status(util.ellipsis(t, 80) + '\n')
172
173
173 files = []
174 files = []
174 copies = {}
175 copies = {}
175 copiedfiles = []
176 copiedfiles = []
176 i = 0
177 i = 0
177 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
178 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
178 oldname = d["depotFile%d" % i]
179 oldname = d["depotFile%d" % i]
179 filename = None
180 filename = None
180 for v in vieworder:
181 for v in vieworder:
181 if oldname.lower().startswith(v.lower()):
182 if oldname.lower().startswith(v.lower()):
182 filename = decodefilename(views[v] + oldname[len(v):])
183 filename = decodefilename(views[v] + oldname[len(v):])
183 break
184 break
184 if filename:
185 if filename:
185 files.append((filename, d["rev%d" % i]))
186 files.append((filename, d["rev%d" % i]))
186 depotname[filename] = oldname
187 depotname[filename] = oldname
187 if (d.get("action%d" % i) == "move/add"):
188 if (d.get("action%d" % i) == "move/add"):
188 copiedfiles.append(filename)
189 copiedfiles.append(filename)
189 localname[oldname] = filename
190 localname[oldname] = filename
190 i += 1
191 i += 1
191
192
192 # Collect information about copied files
193 # Collect information about copied files
193 for filename in copiedfiles:
194 for filename in copiedfiles:
194 oldname = depotname[filename]
195 oldname = depotname[filename]
195
196
196 flcmd = 'p4 -G filelog %s' \
197 flcmd = 'p4 -G filelog %s' \
197 % util.shellquote(oldname)
198 % util.shellquote(oldname)
198 flstdout = util.popen(flcmd, mode='rb')
199 flstdout = util.popen(flcmd, mode='rb')
199
200
200 copiedfilename = None
201 copiedfilename = None
201 for d in loaditer(flstdout):
202 for d in loaditer(flstdout):
202 copiedoldname = None
203 copiedoldname = None
203
204
204 i = 0
205 i = 0
205 while ("change%d" % i) in d:
206 while ("change%d" % i) in d:
206 if (d["change%d" % i] == change and
207 if (d["change%d" % i] == change and
207 d["action%d" % i] == "move/add"):
208 d["action%d" % i] == "move/add"):
208 j = 0
209 j = 0
209 while ("file%d,%d" % (i, j)) in d:
210 while ("file%d,%d" % (i, j)) in d:
210 if d["how%d,%d" % (i, j)] == "moved from":
211 if d["how%d,%d" % (i, j)] == "moved from":
211 copiedoldname = d["file%d,%d" % (i, j)]
212 copiedoldname = d["file%d,%d" % (i, j)]
212 break
213 break
213 j += 1
214 j += 1
214 i += 1
215 i += 1
215
216
216 if copiedoldname and copiedoldname in localname:
217 if copiedoldname and copiedoldname in localname:
217 copiedfilename = localname[copiedoldname]
218 copiedfilename = localname[copiedoldname]
218 break
219 break
219
220
220 if copiedfilename:
221 if copiedfilename:
221 copies[filename] = copiedfilename
222 copies[filename] = copiedfilename
222 else:
223 else:
223 ui.warn(_("cannot find source for copied file: %s@%s\n")
224 ui.warn(_("cannot find source for copied file: %s@%s\n")
224 % (filename, change))
225 % (filename, change))
225
226
226 changeset[change] = c
227 changeset[change] = c
227 files_map[change] = files
228 files_map[change] = files
228 copies_map[change] = copies
229 copies_map[change] = copies
229 lastid = change
230 lastid = change
230
231
231 if lastid and len(changeset) > 0:
232 if lastid and len(changeset) > 0:
232 heads = [lastid]
233 heads = [lastid]
233
234
234 return {
235 return {
235 'changeset': changeset,
236 'changeset': changeset,
236 'files': files_map,
237 'files': files_map,
237 'copies': copies_map,
238 'copies': copies_map,
238 'heads': heads,
239 'heads': heads,
239 'depotname': depotname,
240 'depotname': depotname,
240 }
241 }
241
242
242 @util.propertycache
243 @util.propertycache
243 def _parse_once(self):
244 def _parse_once(self):
244 return self._parse(self.ui, self.path)
245 return self._parse(self.ui, self.path)
245
246
246 @util.propertycache
247 @util.propertycache
247 def copies(self):
248 def copies(self):
248 return self._parse_once['copies']
249 return self._parse_once['copies']
249
250
250 @util.propertycache
251 @util.propertycache
251 def files(self):
252 def files(self):
252 return self._parse_once['files']
253 return self._parse_once['files']
253
254
254 @util.propertycache
255 @util.propertycache
255 def changeset(self):
256 def changeset(self):
256 return self._parse_once['changeset']
257 return self._parse_once['changeset']
257
258
258 @util.propertycache
259 @util.propertycache
259 def heads(self):
260 def heads(self):
260 return self._parse_once['heads']
261 return self._parse_once['heads']
261
262
262 @util.propertycache
263 @util.propertycache
263 def depotname(self):
264 def depotname(self):
264 return self._parse_once['depotname']
265 return self._parse_once['depotname']
265
266
266 def getheads(self):
267 def getheads(self):
267 return self.heads
268 return self.heads
268
269
269 def getfile(self, name, rev):
270 def getfile(self, name, rev):
270 cmd = 'p4 -G print %s' \
271 cmd = 'p4 -G print %s' \
271 % util.shellquote("%s#%s" % (self.depotname[name], rev))
272 % util.shellquote("%s#%s" % (self.depotname[name], rev))
272
273
273 lasterror = None
274 lasterror = None
274 while True:
275 while True:
275 stdout = util.popen(cmd, mode='rb')
276 stdout = util.popen(cmd, mode='rb')
276
277
277 mode = None
278 mode = None
278 contents = []
279 contents = []
279 keywords = None
280 keywords = None
280
281
281 for d in loaditer(stdout):
282 for d in loaditer(stdout):
282 code = d["code"]
283 code = d["code"]
283 data = d.get("data")
284 data = d.get("data")
284
285
285 if code == "error":
286 if code == "error":
286 # if this is the first time error happened
287 # if this is the first time error happened
287 # re-attempt getting the file
288 # re-attempt getting the file
288 if not lasterror:
289 if not lasterror:
289 lasterror = IOError(d["generic"], data)
290 lasterror = IOError(d["generic"], data)
290 # this will exit inner-most for-loop
291 # this will exit inner-most for-loop
291 break
292 break
292 else:
293 else:
293 raise lasterror
294 raise lasterror
294
295
295 elif code == "stat":
296 elif code == "stat":
296 action = d.get("action")
297 action = d.get("action")
297 if action in ["purge", "delete", "move/delete"]:
298 if action in ["purge", "delete", "move/delete"]:
298 return None, None
299 return None, None
299 p4type = self.re_type.match(d["type"])
300 p4type = self.re_type.match(d["type"])
300 if p4type:
301 if p4type:
301 mode = ""
302 mode = ""
302 flags = ((p4type.group(1) or "")
303 flags = ((p4type.group(1) or "")
303 + (p4type.group(3) or ""))
304 + (p4type.group(3) or ""))
304 if "x" in flags:
305 if "x" in flags:
305 mode = "x"
306 mode = "x"
306 if p4type.group(2) == "symlink":
307 if p4type.group(2) == "symlink":
307 mode = "l"
308 mode = "l"
308 if "ko" in flags:
309 if "ko" in flags:
309 keywords = self.re_keywords_old
310 keywords = self.re_keywords_old
310 elif "k" in flags:
311 elif "k" in flags:
311 keywords = self.re_keywords
312 keywords = self.re_keywords
312
313
313 elif code == "text" or code == "binary":
314 elif code == "text" or code == "binary":
314 contents.append(data)
315 contents.append(data)
315
316
316 lasterror = None
317 lasterror = None
317
318
318 if not lasterror:
319 if not lasterror:
319 break
320 break
320
321
321 if mode is None:
322 if mode is None:
322 return None, None
323 return None, None
323
324
324 contents = ''.join(contents)
325 contents = ''.join(contents)
325
326
326 if keywords:
327 if keywords:
327 contents = keywords.sub("$\\1$", contents)
328 contents = keywords.sub("$\\1$", contents)
328 if mode == "l" and contents.endswith("\n"):
329 if mode == "l" and contents.endswith("\n"):
329 contents = contents[:-1]
330 contents = contents[:-1]
330
331
331 return contents, mode
332 return contents, mode
332
333
333 def getchanges(self, rev, full):
334 def getchanges(self, rev, full):
334 if full:
335 if full:
335 raise error.Abort(_("convert from p4 does not support --full"))
336 raise error.Abort(_("convert from p4 does not support --full"))
336 return self.files[rev], self.copies[rev], set()
337 return self.files[rev], self.copies[rev], set()
337
338
338 def _construct_commit(self, obj, parents=None):
339 def _construct_commit(self, obj, parents=None):
339 """
340 """
340 Constructs a common.commit object from an unmarshalled
341 Constructs a common.commit object from an unmarshalled
341 `p4 describe` output
342 `p4 describe` output
342 """
343 """
343 desc = self.recode(obj.get("desc", ""))
344 desc = self.recode(obj.get("desc", ""))
344 date = (int(obj["time"]), 0) # timezone not set
345 date = (int(obj["time"]), 0) # timezone not set
345 if parents is None:
346 if parents is None:
346 parents = []
347 parents = []
347
348
348 return common.commit(author=self.recode(obj["user"]),
349 return common.commit(author=self.recode(obj["user"]),
349 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
350 date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
350 parents=parents, desc=desc, branch=None, rev=obj['change'],
351 parents=parents, desc=desc, branch=None, rev=obj['change'],
351 extra={"p4": obj['change'], "convert_revision": obj['change']})
352 extra={"p4": obj['change'], "convert_revision": obj['change']})
352
353
353 def _fetch_revision(self, rev):
354 def _fetch_revision(self, rev):
354 """Return an output of `p4 describe` including author, commit date as
355 """Return an output of `p4 describe` including author, commit date as
355 a dictionary."""
356 a dictionary."""
356 cmd = "p4 -G describe -s %s" % rev
357 cmd = "p4 -G describe -s %s" % rev
357 stdout = util.popen(cmd, mode='rb')
358 stdout = util.popen(cmd, mode='rb')
358 return marshal.load(stdout)
359 return marshal.load(stdout)
359
360
360 def getcommit(self, rev):
361 def getcommit(self, rev):
361 if rev in self.changeset:
362 if rev in self.changeset:
362 return self.changeset[rev]
363 return self.changeset[rev]
363 elif rev in self.revmap:
364 elif rev in self.revmap:
364 d = self._fetch_revision(rev)
365 d = self._fetch_revision(rev)
365 return self._construct_commit(d, parents=None)
366 return self._construct_commit(d, parents=None)
366 raise error.Abort(
367 raise error.Abort(
367 _("cannot find %s in the revmap or parsed changesets") % rev)
368 _("cannot find %s in the revmap or parsed changesets") % rev)
368
369
369 def gettags(self):
370 def gettags(self):
370 return {}
371 return {}
371
372
372 def getchangedfiles(self, rev, i):
373 def getchangedfiles(self, rev, i):
373 return sorted([x[0] for x in self.files[rev]])
374 return sorted([x[0] for x in self.files[rev]])
@@ -1,1356 +1,1357 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5
5
6 import os
6 import os
7 import re
7 import re
8 import tempfile
8 import tempfile
9 import xml.dom.minidom
9 import xml.dom.minidom
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import (
12 from mercurial import (
13 encoding,
13 encoding,
14 error,
14 error,
15 pycompat,
15 pycompat,
16 util,
16 util,
17 vfs as vfsmod,
17 vfs as vfsmod,
18 )
18 )
19 from mercurial.utils import dateutil
19
20
20 from . import common
21 from . import common
21
22
22 pickle = util.pickle
23 pickle = util.pickle
23 stringio = util.stringio
24 stringio = util.stringio
24 propertycache = util.propertycache
25 propertycache = util.propertycache
25 urlerr = util.urlerr
26 urlerr = util.urlerr
26 urlreq = util.urlreq
27 urlreq = util.urlreq
27
28
28 commandline = common.commandline
29 commandline = common.commandline
29 commit = common.commit
30 commit = common.commit
30 converter_sink = common.converter_sink
31 converter_sink = common.converter_sink
31 converter_source = common.converter_source
32 converter_source = common.converter_source
32 decodeargs = common.decodeargs
33 decodeargs = common.decodeargs
33 encodeargs = common.encodeargs
34 encodeargs = common.encodeargs
34 makedatetimestamp = common.makedatetimestamp
35 makedatetimestamp = common.makedatetimestamp
35 mapfile = common.mapfile
36 mapfile = common.mapfile
36 MissingTool = common.MissingTool
37 MissingTool = common.MissingTool
37 NoRepo = common.NoRepo
38 NoRepo = common.NoRepo
38
39
39 # Subversion stuff. Works best with very recent Python SVN bindings
40 # Subversion stuff. Works best with very recent Python SVN bindings
40 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
41 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
41 # these bindings.
42 # these bindings.
42
43
43 try:
44 try:
44 import svn
45 import svn
45 import svn.client
46 import svn.client
46 import svn.core
47 import svn.core
47 import svn.ra
48 import svn.ra
48 import svn.delta
49 import svn.delta
49 from . import transport
50 from . import transport
50 import warnings
51 import warnings
51 warnings.filterwarnings('ignore',
52 warnings.filterwarnings('ignore',
52 module='svn.core',
53 module='svn.core',
53 category=DeprecationWarning)
54 category=DeprecationWarning)
54 svn.core.SubversionException # trigger import to catch error
55 svn.core.SubversionException # trigger import to catch error
55
56
56 except ImportError:
57 except ImportError:
57 svn = None
58 svn = None
58
59
59 class SvnPathNotFound(Exception):
60 class SvnPathNotFound(Exception):
60 pass
61 pass
61
62
62 def revsplit(rev):
63 def revsplit(rev):
63 """Parse a revision string and return (uuid, path, revnum).
64 """Parse a revision string and return (uuid, path, revnum).
64 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
65 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
65 ... b'/proj%20B/mytrunk/mytrunk@1')
66 ... b'/proj%20B/mytrunk/mytrunk@1')
66 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
67 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
67 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
68 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
68 ('', '', 1)
69 ('', '', 1)
69 >>> revsplit(b'@7')
70 >>> revsplit(b'@7')
70 ('', '', 7)
71 ('', '', 7)
71 >>> revsplit(b'7')
72 >>> revsplit(b'7')
72 ('', '', 0)
73 ('', '', 0)
73 >>> revsplit(b'bad')
74 >>> revsplit(b'bad')
74 ('', '', 0)
75 ('', '', 0)
75 """
76 """
76 parts = rev.rsplit('@', 1)
77 parts = rev.rsplit('@', 1)
77 revnum = 0
78 revnum = 0
78 if len(parts) > 1:
79 if len(parts) > 1:
79 revnum = int(parts[1])
80 revnum = int(parts[1])
80 parts = parts[0].split('/', 1)
81 parts = parts[0].split('/', 1)
81 uuid = ''
82 uuid = ''
82 mod = ''
83 mod = ''
83 if len(parts) > 1 and parts[0].startswith('svn:'):
84 if len(parts) > 1 and parts[0].startswith('svn:'):
84 uuid = parts[0][4:]
85 uuid = parts[0][4:]
85 mod = '/' + parts[1]
86 mod = '/' + parts[1]
86 return uuid, mod, revnum
87 return uuid, mod, revnum
87
88
88 def quote(s):
89 def quote(s):
89 # As of svn 1.7, many svn calls expect "canonical" paths. In
90 # As of svn 1.7, many svn calls expect "canonical" paths. In
90 # theory, we should call svn.core.*canonicalize() on all paths
91 # theory, we should call svn.core.*canonicalize() on all paths
91 # before passing them to the API. Instead, we assume the base url
92 # before passing them to the API. Instead, we assume the base url
92 # is canonical and copy the behaviour of svn URL encoding function
93 # is canonical and copy the behaviour of svn URL encoding function
93 # so we can extend it safely with new components. The "safe"
94 # so we can extend it safely with new components. The "safe"
94 # characters were taken from the "svn_uri__char_validity" table in
95 # characters were taken from the "svn_uri__char_validity" table in
95 # libsvn_subr/path.c.
96 # libsvn_subr/path.c.
96 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
97 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
97
98
98 def geturl(path):
99 def geturl(path):
99 try:
100 try:
100 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
101 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
101 except svn.core.SubversionException:
102 except svn.core.SubversionException:
102 # svn.client.url_from_path() fails with local repositories
103 # svn.client.url_from_path() fails with local repositories
103 pass
104 pass
104 if os.path.isdir(path):
105 if os.path.isdir(path):
105 path = os.path.normpath(os.path.abspath(path))
106 path = os.path.normpath(os.path.abspath(path))
106 if pycompat.iswindows:
107 if pycompat.iswindows:
107 path = '/' + util.normpath(path)
108 path = '/' + util.normpath(path)
108 # Module URL is later compared with the repository URL returned
109 # Module URL is later compared with the repository URL returned
109 # by svn API, which is UTF-8.
110 # by svn API, which is UTF-8.
110 path = encoding.tolocal(path)
111 path = encoding.tolocal(path)
111 path = 'file://%s' % quote(path)
112 path = 'file://%s' % quote(path)
112 return svn.core.svn_path_canonicalize(path)
113 return svn.core.svn_path_canonicalize(path)
113
114
114 def optrev(number):
115 def optrev(number):
115 optrev = svn.core.svn_opt_revision_t()
116 optrev = svn.core.svn_opt_revision_t()
116 optrev.kind = svn.core.svn_opt_revision_number
117 optrev.kind = svn.core.svn_opt_revision_number
117 optrev.value.number = number
118 optrev.value.number = number
118 return optrev
119 return optrev
119
120
120 class changedpath(object):
121 class changedpath(object):
121 def __init__(self, p):
122 def __init__(self, p):
122 self.copyfrom_path = p.copyfrom_path
123 self.copyfrom_path = p.copyfrom_path
123 self.copyfrom_rev = p.copyfrom_rev
124 self.copyfrom_rev = p.copyfrom_rev
124 self.action = p.action
125 self.action = p.action
125
126
126 def get_log_child(fp, url, paths, start, end, limit=0,
127 def get_log_child(fp, url, paths, start, end, limit=0,
127 discover_changed_paths=True, strict_node_history=False):
128 discover_changed_paths=True, strict_node_history=False):
128 protocol = -1
129 protocol = -1
129 def receiver(orig_paths, revnum, author, date, message, pool):
130 def receiver(orig_paths, revnum, author, date, message, pool):
130 paths = {}
131 paths = {}
131 if orig_paths is not None:
132 if orig_paths is not None:
132 for k, v in orig_paths.iteritems():
133 for k, v in orig_paths.iteritems():
133 paths[k] = changedpath(v)
134 paths[k] = changedpath(v)
134 pickle.dump((paths, revnum, author, date, message),
135 pickle.dump((paths, revnum, author, date, message),
135 fp, protocol)
136 fp, protocol)
136
137
137 try:
138 try:
138 # Use an ra of our own so that our parent can consume
139 # Use an ra of our own so that our parent can consume
139 # our results without confusing the server.
140 # our results without confusing the server.
140 t = transport.SvnRaTransport(url=url)
141 t = transport.SvnRaTransport(url=url)
141 svn.ra.get_log(t.ra, paths, start, end, limit,
142 svn.ra.get_log(t.ra, paths, start, end, limit,
142 discover_changed_paths,
143 discover_changed_paths,
143 strict_node_history,
144 strict_node_history,
144 receiver)
145 receiver)
145 except IOError:
146 except IOError:
146 # Caller may interrupt the iteration
147 # Caller may interrupt the iteration
147 pickle.dump(None, fp, protocol)
148 pickle.dump(None, fp, protocol)
148 except Exception as inst:
149 except Exception as inst:
149 pickle.dump(str(inst), fp, protocol)
150 pickle.dump(str(inst), fp, protocol)
150 else:
151 else:
151 pickle.dump(None, fp, protocol)
152 pickle.dump(None, fp, protocol)
152 fp.flush()
153 fp.flush()
153 # With large history, cleanup process goes crazy and suddenly
154 # With large history, cleanup process goes crazy and suddenly
154 # consumes *huge* amount of memory. The output file being closed,
155 # consumes *huge* amount of memory. The output file being closed,
155 # there is no need for clean termination.
156 # there is no need for clean termination.
156 os._exit(0)
157 os._exit(0)
157
158
158 def debugsvnlog(ui, **opts):
159 def debugsvnlog(ui, **opts):
159 """Fetch SVN log in a subprocess and channel them back to parent to
160 """Fetch SVN log in a subprocess and channel them back to parent to
160 avoid memory collection issues.
161 avoid memory collection issues.
161 """
162 """
162 if svn is None:
163 if svn is None:
163 raise error.Abort(_('debugsvnlog could not load Subversion python '
164 raise error.Abort(_('debugsvnlog could not load Subversion python '
164 'bindings'))
165 'bindings'))
165
166
166 args = decodeargs(ui.fin.read())
167 args = decodeargs(ui.fin.read())
167 get_log_child(ui.fout, *args)
168 get_log_child(ui.fout, *args)
168
169
169 class logstream(object):
170 class logstream(object):
170 """Interruptible revision log iterator."""
171 """Interruptible revision log iterator."""
171 def __init__(self, stdout):
172 def __init__(self, stdout):
172 self._stdout = stdout
173 self._stdout = stdout
173
174
174 def __iter__(self):
175 def __iter__(self):
175 while True:
176 while True:
176 try:
177 try:
177 entry = pickle.load(self._stdout)
178 entry = pickle.load(self._stdout)
178 except EOFError:
179 except EOFError:
179 raise error.Abort(_('Mercurial failed to run itself, check'
180 raise error.Abort(_('Mercurial failed to run itself, check'
180 ' hg executable is in PATH'))
181 ' hg executable is in PATH'))
181 try:
182 try:
182 orig_paths, revnum, author, date, message = entry
183 orig_paths, revnum, author, date, message = entry
183 except (TypeError, ValueError):
184 except (TypeError, ValueError):
184 if entry is None:
185 if entry is None:
185 break
186 break
186 raise error.Abort(_("log stream exception '%s'") % entry)
187 raise error.Abort(_("log stream exception '%s'") % entry)
187 yield entry
188 yield entry
188
189
189 def close(self):
190 def close(self):
190 if self._stdout:
191 if self._stdout:
191 self._stdout.close()
192 self._stdout.close()
192 self._stdout = None
193 self._stdout = None
193
194
194 class directlogstream(list):
195 class directlogstream(list):
195 """Direct revision log iterator.
196 """Direct revision log iterator.
196 This can be used for debugging and development but it will probably leak
197 This can be used for debugging and development but it will probably leak
197 memory and is not suitable for real conversions."""
198 memory and is not suitable for real conversions."""
198 def __init__(self, url, paths, start, end, limit=0,
199 def __init__(self, url, paths, start, end, limit=0,
199 discover_changed_paths=True, strict_node_history=False):
200 discover_changed_paths=True, strict_node_history=False):
200
201
201 def receiver(orig_paths, revnum, author, date, message, pool):
202 def receiver(orig_paths, revnum, author, date, message, pool):
202 paths = {}
203 paths = {}
203 if orig_paths is not None:
204 if orig_paths is not None:
204 for k, v in orig_paths.iteritems():
205 for k, v in orig_paths.iteritems():
205 paths[k] = changedpath(v)
206 paths[k] = changedpath(v)
206 self.append((paths, revnum, author, date, message))
207 self.append((paths, revnum, author, date, message))
207
208
208 # Use an ra of our own so that our parent can consume
209 # Use an ra of our own so that our parent can consume
209 # our results without confusing the server.
210 # our results without confusing the server.
210 t = transport.SvnRaTransport(url=url)
211 t = transport.SvnRaTransport(url=url)
211 svn.ra.get_log(t.ra, paths, start, end, limit,
212 svn.ra.get_log(t.ra, paths, start, end, limit,
212 discover_changed_paths,
213 discover_changed_paths,
213 strict_node_history,
214 strict_node_history,
214 receiver)
215 receiver)
215
216
216 def close(self):
217 def close(self):
217 pass
218 pass
218
219
219 # Check to see if the given path is a local Subversion repo. Verify this by
220 # Check to see if the given path is a local Subversion repo. Verify this by
220 # looking for several svn-specific files and directories in the given
221 # looking for several svn-specific files and directories in the given
221 # directory.
222 # directory.
222 def filecheck(ui, path, proto):
223 def filecheck(ui, path, proto):
223 for x in ('locks', 'hooks', 'format', 'db'):
224 for x in ('locks', 'hooks', 'format', 'db'):
224 if not os.path.exists(os.path.join(path, x)):
225 if not os.path.exists(os.path.join(path, x)):
225 return False
226 return False
226 return True
227 return True
227
228
228 # Check to see if a given path is the root of an svn repo over http. We verify
229 # Check to see if a given path is the root of an svn repo over http. We verify
229 # this by requesting a version-controlled URL we know can't exist and looking
230 # this by requesting a version-controlled URL we know can't exist and looking
230 # for the svn-specific "not found" XML.
231 # for the svn-specific "not found" XML.
231 def httpcheck(ui, path, proto):
232 def httpcheck(ui, path, proto):
232 try:
233 try:
233 opener = urlreq.buildopener()
234 opener = urlreq.buildopener()
234 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path), 'rb')
235 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path), 'rb')
235 data = rsp.read()
236 data = rsp.read()
236 except urlerr.httperror as inst:
237 except urlerr.httperror as inst:
237 if inst.code != 404:
238 if inst.code != 404:
238 # Except for 404 we cannot know for sure this is not an svn repo
239 # Except for 404 we cannot know for sure this is not an svn repo
239 ui.warn(_('svn: cannot probe remote repository, assume it could '
240 ui.warn(_('svn: cannot probe remote repository, assume it could '
240 'be a subversion repository. Use --source-type if you '
241 'be a subversion repository. Use --source-type if you '
241 'know better.\n'))
242 'know better.\n'))
242 return True
243 return True
243 data = inst.fp.read()
244 data = inst.fp.read()
244 except Exception:
245 except Exception:
245 # Could be urlerr.urlerror if the URL is invalid or anything else.
246 # Could be urlerr.urlerror if the URL is invalid or anything else.
246 return False
247 return False
247 return '<m:human-readable errcode="160013">' in data
248 return '<m:human-readable errcode="160013">' in data
248
249
249 protomap = {'http': httpcheck,
250 protomap = {'http': httpcheck,
250 'https': httpcheck,
251 'https': httpcheck,
251 'file': filecheck,
252 'file': filecheck,
252 }
253 }
253 def issvnurl(ui, url):
254 def issvnurl(ui, url):
254 try:
255 try:
255 proto, path = url.split('://', 1)
256 proto, path = url.split('://', 1)
256 if proto == 'file':
257 if proto == 'file':
257 if (pycompat.iswindows and path[:1] == '/'
258 if (pycompat.iswindows and path[:1] == '/'
258 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
259 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
259 path = path[:2] + ':/' + path[6:]
260 path = path[:2] + ':/' + path[6:]
260 path = urlreq.url2pathname(path)
261 path = urlreq.url2pathname(path)
261 except ValueError:
262 except ValueError:
262 proto = 'file'
263 proto = 'file'
263 path = os.path.abspath(url)
264 path = os.path.abspath(url)
264 if proto == 'file':
265 if proto == 'file':
265 path = util.pconvert(path)
266 path = util.pconvert(path)
266 check = protomap.get(proto, lambda *args: False)
267 check = protomap.get(proto, lambda *args: False)
267 while '/' in path:
268 while '/' in path:
268 if check(ui, path, proto):
269 if check(ui, path, proto):
269 return True
270 return True
270 path = path.rsplit('/', 1)[0]
271 path = path.rsplit('/', 1)[0]
271 return False
272 return False
272
273
273 # SVN conversion code stolen from bzr-svn and tailor
274 # SVN conversion code stolen from bzr-svn and tailor
274 #
275 #
275 # Subversion looks like a versioned filesystem, branches structures
276 # Subversion looks like a versioned filesystem, branches structures
276 # are defined by conventions and not enforced by the tool. First,
277 # are defined by conventions and not enforced by the tool. First,
277 # we define the potential branches (modules) as "trunk" and "branches"
278 # we define the potential branches (modules) as "trunk" and "branches"
278 # children directories. Revisions are then identified by their
279 # children directories. Revisions are then identified by their
279 # module and revision number (and a repository identifier).
280 # module and revision number (and a repository identifier).
280 #
281 #
281 # The revision graph is really a tree (or a forest). By default, a
282 # The revision graph is really a tree (or a forest). By default, a
282 # revision parent is the previous revision in the same module. If the
283 # revision parent is the previous revision in the same module. If the
283 # module directory is copied/moved from another module then the
284 # module directory is copied/moved from another module then the
284 # revision is the module root and its parent the source revision in
285 # revision is the module root and its parent the source revision in
285 # the parent module. A revision has at most one parent.
286 # the parent module. A revision has at most one parent.
286 #
287 #
287 class svn_source(converter_source):
288 class svn_source(converter_source):
288 def __init__(self, ui, repotype, url, revs=None):
289 def __init__(self, ui, repotype, url, revs=None):
289 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
290 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
290
291
291 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
292 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
292 (os.path.exists(url) and
293 (os.path.exists(url) and
293 os.path.exists(os.path.join(url, '.svn'))) or
294 os.path.exists(os.path.join(url, '.svn'))) or
294 issvnurl(ui, url)):
295 issvnurl(ui, url)):
295 raise NoRepo(_("%s does not look like a Subversion repository")
296 raise NoRepo(_("%s does not look like a Subversion repository")
296 % url)
297 % url)
297 if svn is None:
298 if svn is None:
298 raise MissingTool(_('could not load Subversion python bindings'))
299 raise MissingTool(_('could not load Subversion python bindings'))
299
300
300 try:
301 try:
301 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
302 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
302 if version < (1, 4):
303 if version < (1, 4):
303 raise MissingTool(_('Subversion python bindings %d.%d found, '
304 raise MissingTool(_('Subversion python bindings %d.%d found, '
304 '1.4 or later required') % version)
305 '1.4 or later required') % version)
305 except AttributeError:
306 except AttributeError:
306 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
307 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
307 'or later required'))
308 'or later required'))
308
309
309 self.lastrevs = {}
310 self.lastrevs = {}
310
311
311 latest = None
312 latest = None
312 try:
313 try:
313 # Support file://path@rev syntax. Useful e.g. to convert
314 # Support file://path@rev syntax. Useful e.g. to convert
314 # deleted branches.
315 # deleted branches.
315 at = url.rfind('@')
316 at = url.rfind('@')
316 if at >= 0:
317 if at >= 0:
317 latest = int(url[at + 1:])
318 latest = int(url[at + 1:])
318 url = url[:at]
319 url = url[:at]
319 except ValueError:
320 except ValueError:
320 pass
321 pass
321 self.url = geturl(url)
322 self.url = geturl(url)
322 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
323 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
323 try:
324 try:
324 self.transport = transport.SvnRaTransport(url=self.url)
325 self.transport = transport.SvnRaTransport(url=self.url)
325 self.ra = self.transport.ra
326 self.ra = self.transport.ra
326 self.ctx = self.transport.client
327 self.ctx = self.transport.client
327 self.baseurl = svn.ra.get_repos_root(self.ra)
328 self.baseurl = svn.ra.get_repos_root(self.ra)
328 # Module is either empty or a repository path starting with
329 # Module is either empty or a repository path starting with
329 # a slash and not ending with a slash.
330 # a slash and not ending with a slash.
330 self.module = urlreq.unquote(self.url[len(self.baseurl):])
331 self.module = urlreq.unquote(self.url[len(self.baseurl):])
331 self.prevmodule = None
332 self.prevmodule = None
332 self.rootmodule = self.module
333 self.rootmodule = self.module
333 self.commits = {}
334 self.commits = {}
334 self.paths = {}
335 self.paths = {}
335 self.uuid = svn.ra.get_uuid(self.ra)
336 self.uuid = svn.ra.get_uuid(self.ra)
336 except svn.core.SubversionException:
337 except svn.core.SubversionException:
337 ui.traceback()
338 ui.traceback()
338 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
339 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
339 svn.core.SVN_VER_MINOR,
340 svn.core.SVN_VER_MINOR,
340 svn.core.SVN_VER_MICRO)
341 svn.core.SVN_VER_MICRO)
341 raise NoRepo(_("%s does not look like a Subversion repository "
342 raise NoRepo(_("%s does not look like a Subversion repository "
342 "to libsvn version %s")
343 "to libsvn version %s")
343 % (self.url, svnversion))
344 % (self.url, svnversion))
344
345
345 if revs:
346 if revs:
346 if len(revs) > 1:
347 if len(revs) > 1:
347 raise error.Abort(_('subversion source does not support '
348 raise error.Abort(_('subversion source does not support '
348 'specifying multiple revisions'))
349 'specifying multiple revisions'))
349 try:
350 try:
350 latest = int(revs[0])
351 latest = int(revs[0])
351 except ValueError:
352 except ValueError:
352 raise error.Abort(_('svn: revision %s is not an integer') %
353 raise error.Abort(_('svn: revision %s is not an integer') %
353 revs[0])
354 revs[0])
354
355
355 trunkcfg = self.ui.config('convert', 'svn.trunk')
356 trunkcfg = self.ui.config('convert', 'svn.trunk')
356 if trunkcfg is None:
357 if trunkcfg is None:
357 trunkcfg = 'trunk'
358 trunkcfg = 'trunk'
358 self.trunkname = trunkcfg.strip('/')
359 self.trunkname = trunkcfg.strip('/')
359 self.startrev = self.ui.config('convert', 'svn.startrev')
360 self.startrev = self.ui.config('convert', 'svn.startrev')
360 try:
361 try:
361 self.startrev = int(self.startrev)
362 self.startrev = int(self.startrev)
362 if self.startrev < 0:
363 if self.startrev < 0:
363 self.startrev = 0
364 self.startrev = 0
364 except ValueError:
365 except ValueError:
365 raise error.Abort(_('svn: start revision %s is not an integer')
366 raise error.Abort(_('svn: start revision %s is not an integer')
366 % self.startrev)
367 % self.startrev)
367
368
368 try:
369 try:
369 self.head = self.latest(self.module, latest)
370 self.head = self.latest(self.module, latest)
370 except SvnPathNotFound:
371 except SvnPathNotFound:
371 self.head = None
372 self.head = None
372 if not self.head:
373 if not self.head:
373 raise error.Abort(_('no revision found in module %s')
374 raise error.Abort(_('no revision found in module %s')
374 % self.module)
375 % self.module)
375 self.last_changed = self.revnum(self.head)
376 self.last_changed = self.revnum(self.head)
376
377
377 self._changescache = (None, None)
378 self._changescache = (None, None)
378
379
379 if os.path.exists(os.path.join(url, '.svn/entries')):
380 if os.path.exists(os.path.join(url, '.svn/entries')):
380 self.wc = url
381 self.wc = url
381 else:
382 else:
382 self.wc = None
383 self.wc = None
383 self.convertfp = None
384 self.convertfp = None
384
385
385 def setrevmap(self, revmap):
386 def setrevmap(self, revmap):
386 lastrevs = {}
387 lastrevs = {}
387 for revid in revmap:
388 for revid in revmap:
388 uuid, module, revnum = revsplit(revid)
389 uuid, module, revnum = revsplit(revid)
389 lastrevnum = lastrevs.setdefault(module, revnum)
390 lastrevnum = lastrevs.setdefault(module, revnum)
390 if revnum > lastrevnum:
391 if revnum > lastrevnum:
391 lastrevs[module] = revnum
392 lastrevs[module] = revnum
392 self.lastrevs = lastrevs
393 self.lastrevs = lastrevs
393
394
394 def exists(self, path, optrev):
395 def exists(self, path, optrev):
395 try:
396 try:
396 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
397 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
397 optrev, False, self.ctx)
398 optrev, False, self.ctx)
398 return True
399 return True
399 except svn.core.SubversionException:
400 except svn.core.SubversionException:
400 return False
401 return False
401
402
402 def getheads(self):
403 def getheads(self):
403
404
404 def isdir(path, revnum):
405 def isdir(path, revnum):
405 kind = self._checkpath(path, revnum)
406 kind = self._checkpath(path, revnum)
406 return kind == svn.core.svn_node_dir
407 return kind == svn.core.svn_node_dir
407
408
408 def getcfgpath(name, rev):
409 def getcfgpath(name, rev):
409 cfgpath = self.ui.config('convert', 'svn.' + name)
410 cfgpath = self.ui.config('convert', 'svn.' + name)
410 if cfgpath is not None and cfgpath.strip() == '':
411 if cfgpath is not None and cfgpath.strip() == '':
411 return None
412 return None
412 path = (cfgpath or name).strip('/')
413 path = (cfgpath or name).strip('/')
413 if not self.exists(path, rev):
414 if not self.exists(path, rev):
414 if self.module.endswith(path) and name == 'trunk':
415 if self.module.endswith(path) and name == 'trunk':
415 # we are converting from inside this directory
416 # we are converting from inside this directory
416 return None
417 return None
417 if cfgpath:
418 if cfgpath:
418 raise error.Abort(_('expected %s to be at %r, but not found'
419 raise error.Abort(_('expected %s to be at %r, but not found'
419 ) % (name, path))
420 ) % (name, path))
420 return None
421 return None
421 self.ui.note(_('found %s at %r\n') % (name, path))
422 self.ui.note(_('found %s at %r\n') % (name, path))
422 return path
423 return path
423
424
424 rev = optrev(self.last_changed)
425 rev = optrev(self.last_changed)
425 oldmodule = ''
426 oldmodule = ''
426 trunk = getcfgpath('trunk', rev)
427 trunk = getcfgpath('trunk', rev)
427 self.tags = getcfgpath('tags', rev)
428 self.tags = getcfgpath('tags', rev)
428 branches = getcfgpath('branches', rev)
429 branches = getcfgpath('branches', rev)
429
430
430 # If the project has a trunk or branches, we will extract heads
431 # If the project has a trunk or branches, we will extract heads
431 # from them. We keep the project root otherwise.
432 # from them. We keep the project root otherwise.
432 if trunk:
433 if trunk:
433 oldmodule = self.module or ''
434 oldmodule = self.module or ''
434 self.module += '/' + trunk
435 self.module += '/' + trunk
435 self.head = self.latest(self.module, self.last_changed)
436 self.head = self.latest(self.module, self.last_changed)
436 if not self.head:
437 if not self.head:
437 raise error.Abort(_('no revision found in module %s')
438 raise error.Abort(_('no revision found in module %s')
438 % self.module)
439 % self.module)
439
440
440 # First head in the list is the module's head
441 # First head in the list is the module's head
441 self.heads = [self.head]
442 self.heads = [self.head]
442 if self.tags is not None:
443 if self.tags is not None:
443 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
444 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
444
445
445 # Check if branches bring a few more heads to the list
446 # Check if branches bring a few more heads to the list
446 if branches:
447 if branches:
447 rpath = self.url.strip('/')
448 rpath = self.url.strip('/')
448 branchnames = svn.client.ls(rpath + '/' + quote(branches),
449 branchnames = svn.client.ls(rpath + '/' + quote(branches),
449 rev, False, self.ctx)
450 rev, False, self.ctx)
450 for branch in sorted(branchnames):
451 for branch in sorted(branchnames):
451 module = '%s/%s/%s' % (oldmodule, branches, branch)
452 module = '%s/%s/%s' % (oldmodule, branches, branch)
452 if not isdir(module, self.last_changed):
453 if not isdir(module, self.last_changed):
453 continue
454 continue
454 brevid = self.latest(module, self.last_changed)
455 brevid = self.latest(module, self.last_changed)
455 if not brevid:
456 if not brevid:
456 self.ui.note(_('ignoring empty branch %s\n') % branch)
457 self.ui.note(_('ignoring empty branch %s\n') % branch)
457 continue
458 continue
458 self.ui.note(_('found branch %s at %d\n') %
459 self.ui.note(_('found branch %s at %d\n') %
459 (branch, self.revnum(brevid)))
460 (branch, self.revnum(brevid)))
460 self.heads.append(brevid)
461 self.heads.append(brevid)
461
462
462 if self.startrev and self.heads:
463 if self.startrev and self.heads:
463 if len(self.heads) > 1:
464 if len(self.heads) > 1:
464 raise error.Abort(_('svn: start revision is not supported '
465 raise error.Abort(_('svn: start revision is not supported '
465 'with more than one branch'))
466 'with more than one branch'))
466 revnum = self.revnum(self.heads[0])
467 revnum = self.revnum(self.heads[0])
467 if revnum < self.startrev:
468 if revnum < self.startrev:
468 raise error.Abort(
469 raise error.Abort(
469 _('svn: no revision found after start revision %d')
470 _('svn: no revision found after start revision %d')
470 % self.startrev)
471 % self.startrev)
471
472
472 return self.heads
473 return self.heads
473
474
474 def _getchanges(self, rev, full):
475 def _getchanges(self, rev, full):
475 (paths, parents) = self.paths[rev]
476 (paths, parents) = self.paths[rev]
476 copies = {}
477 copies = {}
477 if parents:
478 if parents:
478 files, self.removed, copies = self.expandpaths(rev, paths, parents)
479 files, self.removed, copies = self.expandpaths(rev, paths, parents)
479 if full or not parents:
480 if full or not parents:
480 # Perform a full checkout on roots
481 # Perform a full checkout on roots
481 uuid, module, revnum = revsplit(rev)
482 uuid, module, revnum = revsplit(rev)
482 entries = svn.client.ls(self.baseurl + quote(module),
483 entries = svn.client.ls(self.baseurl + quote(module),
483 optrev(revnum), True, self.ctx)
484 optrev(revnum), True, self.ctx)
484 files = [n for n, e in entries.iteritems()
485 files = [n for n, e in entries.iteritems()
485 if e.kind == svn.core.svn_node_file]
486 if e.kind == svn.core.svn_node_file]
486 self.removed = set()
487 self.removed = set()
487
488
488 files.sort()
489 files.sort()
489 files = zip(files, [rev] * len(files))
490 files = zip(files, [rev] * len(files))
490 return (files, copies)
491 return (files, copies)
491
492
492 def getchanges(self, rev, full):
493 def getchanges(self, rev, full):
493 # reuse cache from getchangedfiles
494 # reuse cache from getchangedfiles
494 if self._changescache[0] == rev and not full:
495 if self._changescache[0] == rev and not full:
495 (files, copies) = self._changescache[1]
496 (files, copies) = self._changescache[1]
496 else:
497 else:
497 (files, copies) = self._getchanges(rev, full)
498 (files, copies) = self._getchanges(rev, full)
498 # caller caches the result, so free it here to release memory
499 # caller caches the result, so free it here to release memory
499 del self.paths[rev]
500 del self.paths[rev]
500 return (files, copies, set())
501 return (files, copies, set())
501
502
502 def getchangedfiles(self, rev, i):
503 def getchangedfiles(self, rev, i):
503 # called from filemap - cache computed values for reuse in getchanges
504 # called from filemap - cache computed values for reuse in getchanges
504 (files, copies) = self._getchanges(rev, False)
505 (files, copies) = self._getchanges(rev, False)
505 self._changescache = (rev, (files, copies))
506 self._changescache = (rev, (files, copies))
506 return [f[0] for f in files]
507 return [f[0] for f in files]
507
508
508 def getcommit(self, rev):
509 def getcommit(self, rev):
509 if rev not in self.commits:
510 if rev not in self.commits:
510 uuid, module, revnum = revsplit(rev)
511 uuid, module, revnum = revsplit(rev)
511 self.module = module
512 self.module = module
512 self.reparent(module)
513 self.reparent(module)
513 # We assume that:
514 # We assume that:
514 # - requests for revisions after "stop" come from the
515 # - requests for revisions after "stop" come from the
515 # revision graph backward traversal. Cache all of them
516 # revision graph backward traversal. Cache all of them
516 # down to stop, they will be used eventually.
517 # down to stop, they will be used eventually.
517 # - requests for revisions before "stop" come to get
518 # - requests for revisions before "stop" come to get
518 # isolated branches parents. Just fetch what is needed.
519 # isolated branches parents. Just fetch what is needed.
519 stop = self.lastrevs.get(module, 0)
520 stop = self.lastrevs.get(module, 0)
520 if revnum < stop:
521 if revnum < stop:
521 stop = revnum + 1
522 stop = revnum + 1
522 self._fetch_revisions(revnum, stop)
523 self._fetch_revisions(revnum, stop)
523 if rev not in self.commits:
524 if rev not in self.commits:
524 raise error.Abort(_('svn: revision %s not found') % revnum)
525 raise error.Abort(_('svn: revision %s not found') % revnum)
525 revcommit = self.commits[rev]
526 revcommit = self.commits[rev]
526 # caller caches the result, so free it here to release memory
527 # caller caches the result, so free it here to release memory
527 del self.commits[rev]
528 del self.commits[rev]
528 return revcommit
529 return revcommit
529
530
530 def checkrevformat(self, revstr, mapname='splicemap'):
531 def checkrevformat(self, revstr, mapname='splicemap'):
531 """ fails if revision format does not match the correct format"""
532 """ fails if revision format does not match the correct format"""
532 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
533 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
533 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
534 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
534 r'{12,12}(.*)\@[0-9]+$',revstr):
535 r'{12,12}(.*)\@[0-9]+$',revstr):
535 raise error.Abort(_('%s entry %s is not a valid revision'
536 raise error.Abort(_('%s entry %s is not a valid revision'
536 ' identifier') % (mapname, revstr))
537 ' identifier') % (mapname, revstr))
537
538
538 def numcommits(self):
539 def numcommits(self):
539 return int(self.head.rsplit('@', 1)[1]) - self.startrev
540 return int(self.head.rsplit('@', 1)[1]) - self.startrev
540
541
541 def gettags(self):
542 def gettags(self):
542 tags = {}
543 tags = {}
543 if self.tags is None:
544 if self.tags is None:
544 return tags
545 return tags
545
546
546 # svn tags are just a convention, project branches left in a
547 # svn tags are just a convention, project branches left in a
547 # 'tags' directory. There is no other relationship than
548 # 'tags' directory. There is no other relationship than
548 # ancestry, which is expensive to discover and makes them hard
549 # ancestry, which is expensive to discover and makes them hard
549 # to update incrementally. Worse, past revisions may be
550 # to update incrementally. Worse, past revisions may be
550 # referenced by tags far away in the future, requiring a deep
551 # referenced by tags far away in the future, requiring a deep
551 # history traversal on every calculation. Current code
552 # history traversal on every calculation. Current code
552 # performs a single backward traversal, tracking moves within
553 # performs a single backward traversal, tracking moves within
553 # the tags directory (tag renaming) and recording a new tag
554 # the tags directory (tag renaming) and recording a new tag
554 # everytime a project is copied from outside the tags
555 # everytime a project is copied from outside the tags
555 # directory. It also lists deleted tags, this behaviour may
556 # directory. It also lists deleted tags, this behaviour may
556 # change in the future.
557 # change in the future.
557 pendings = []
558 pendings = []
558 tagspath = self.tags
559 tagspath = self.tags
559 start = svn.ra.get_latest_revnum(self.ra)
560 start = svn.ra.get_latest_revnum(self.ra)
560 stream = self._getlog([self.tags], start, self.startrev)
561 stream = self._getlog([self.tags], start, self.startrev)
561 try:
562 try:
562 for entry in stream:
563 for entry in stream:
563 origpaths, revnum, author, date, message = entry
564 origpaths, revnum, author, date, message = entry
564 if not origpaths:
565 if not origpaths:
565 origpaths = []
566 origpaths = []
566 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
567 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
567 in origpaths.iteritems() if e.copyfrom_path]
568 in origpaths.iteritems() if e.copyfrom_path]
568 # Apply moves/copies from more specific to general
569 # Apply moves/copies from more specific to general
569 copies.sort(reverse=True)
570 copies.sort(reverse=True)
570
571
571 srctagspath = tagspath
572 srctagspath = tagspath
572 if copies and copies[-1][2] == tagspath:
573 if copies and copies[-1][2] == tagspath:
573 # Track tags directory moves
574 # Track tags directory moves
574 srctagspath = copies.pop()[0]
575 srctagspath = copies.pop()[0]
575
576
576 for source, sourcerev, dest in copies:
577 for source, sourcerev, dest in copies:
577 if not dest.startswith(tagspath + '/'):
578 if not dest.startswith(tagspath + '/'):
578 continue
579 continue
579 for tag in pendings:
580 for tag in pendings:
580 if tag[0].startswith(dest):
581 if tag[0].startswith(dest):
581 tagpath = source + tag[0][len(dest):]
582 tagpath = source + tag[0][len(dest):]
582 tag[:2] = [tagpath, sourcerev]
583 tag[:2] = [tagpath, sourcerev]
583 break
584 break
584 else:
585 else:
585 pendings.append([source, sourcerev, dest])
586 pendings.append([source, sourcerev, dest])
586
587
587 # Filter out tags with children coming from different
588 # Filter out tags with children coming from different
588 # parts of the repository like:
589 # parts of the repository like:
589 # /tags/tag.1 (from /trunk:10)
590 # /tags/tag.1 (from /trunk:10)
590 # /tags/tag.1/foo (from /branches/foo:12)
591 # /tags/tag.1/foo (from /branches/foo:12)
591 # Here/tags/tag.1 discarded as well as its children.
592 # Here/tags/tag.1 discarded as well as its children.
592 # It happens with tools like cvs2svn. Such tags cannot
593 # It happens with tools like cvs2svn. Such tags cannot
593 # be represented in mercurial.
594 # be represented in mercurial.
594 addeds = dict((p, e.copyfrom_path) for p, e
595 addeds = dict((p, e.copyfrom_path) for p, e
595 in origpaths.iteritems()
596 in origpaths.iteritems()
596 if e.action == 'A' and e.copyfrom_path)
597 if e.action == 'A' and e.copyfrom_path)
597 badroots = set()
598 badroots = set()
598 for destroot in addeds:
599 for destroot in addeds:
599 for source, sourcerev, dest in pendings:
600 for source, sourcerev, dest in pendings:
600 if (not dest.startswith(destroot + '/')
601 if (not dest.startswith(destroot + '/')
601 or source.startswith(addeds[destroot] + '/')):
602 or source.startswith(addeds[destroot] + '/')):
602 continue
603 continue
603 badroots.add(destroot)
604 badroots.add(destroot)
604 break
605 break
605
606
606 for badroot in badroots:
607 for badroot in badroots:
607 pendings = [p for p in pendings if p[2] != badroot
608 pendings = [p for p in pendings if p[2] != badroot
608 and not p[2].startswith(badroot + '/')]
609 and not p[2].startswith(badroot + '/')]
609
610
610 # Tell tag renamings from tag creations
611 # Tell tag renamings from tag creations
611 renamings = []
612 renamings = []
612 for source, sourcerev, dest in pendings:
613 for source, sourcerev, dest in pendings:
613 tagname = dest.split('/')[-1]
614 tagname = dest.split('/')[-1]
614 if source.startswith(srctagspath):
615 if source.startswith(srctagspath):
615 renamings.append([source, sourcerev, tagname])
616 renamings.append([source, sourcerev, tagname])
616 continue
617 continue
617 if tagname in tags:
618 if tagname in tags:
618 # Keep the latest tag value
619 # Keep the latest tag value
619 continue
620 continue
620 # From revision may be fake, get one with changes
621 # From revision may be fake, get one with changes
621 try:
622 try:
622 tagid = self.latest(source, sourcerev)
623 tagid = self.latest(source, sourcerev)
623 if tagid and tagname not in tags:
624 if tagid and tagname not in tags:
624 tags[tagname] = tagid
625 tags[tagname] = tagid
625 except SvnPathNotFound:
626 except SvnPathNotFound:
626 # It happens when we are following directories
627 # It happens when we are following directories
627 # we assumed were copied with their parents
628 # we assumed were copied with their parents
628 # but were really created in the tag
629 # but were really created in the tag
629 # directory.
630 # directory.
630 pass
631 pass
631 pendings = renamings
632 pendings = renamings
632 tagspath = srctagspath
633 tagspath = srctagspath
633 finally:
634 finally:
634 stream.close()
635 stream.close()
635 return tags
636 return tags
636
637
637 def converted(self, rev, destrev):
638 def converted(self, rev, destrev):
638 if not self.wc:
639 if not self.wc:
639 return
640 return
640 if self.convertfp is None:
641 if self.convertfp is None:
641 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
642 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
642 'ab')
643 'ab')
643 self.convertfp.write(util.tonativeeol('%s %d\n'
644 self.convertfp.write(util.tonativeeol('%s %d\n'
644 % (destrev, self.revnum(rev))))
645 % (destrev, self.revnum(rev))))
645 self.convertfp.flush()
646 self.convertfp.flush()
646
647
647 def revid(self, revnum, module=None):
648 def revid(self, revnum, module=None):
648 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
649 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
649
650
650 def revnum(self, rev):
651 def revnum(self, rev):
651 return int(rev.split('@')[-1])
652 return int(rev.split('@')[-1])
652
653
653 def latest(self, path, stop=None):
654 def latest(self, path, stop=None):
654 """Find the latest revid affecting path, up to stop revision
655 """Find the latest revid affecting path, up to stop revision
655 number. If stop is None, default to repository latest
656 number. If stop is None, default to repository latest
656 revision. It may return a revision in a different module,
657 revision. It may return a revision in a different module,
657 since a branch may be moved without a change being
658 since a branch may be moved without a change being
658 reported. Return None if computed module does not belong to
659 reported. Return None if computed module does not belong to
659 rootmodule subtree.
660 rootmodule subtree.
660 """
661 """
661 def findchanges(path, start, stop=None):
662 def findchanges(path, start, stop=None):
662 stream = self._getlog([path], start, stop or 1)
663 stream = self._getlog([path], start, stop or 1)
663 try:
664 try:
664 for entry in stream:
665 for entry in stream:
665 paths, revnum, author, date, message = entry
666 paths, revnum, author, date, message = entry
666 if stop is None and paths:
667 if stop is None and paths:
667 # We do not know the latest changed revision,
668 # We do not know the latest changed revision,
668 # keep the first one with changed paths.
669 # keep the first one with changed paths.
669 break
670 break
670 if revnum <= stop:
671 if revnum <= stop:
671 break
672 break
672
673
673 for p in paths:
674 for p in paths:
674 if (not path.startswith(p) or
675 if (not path.startswith(p) or
675 not paths[p].copyfrom_path):
676 not paths[p].copyfrom_path):
676 continue
677 continue
677 newpath = paths[p].copyfrom_path + path[len(p):]
678 newpath = paths[p].copyfrom_path + path[len(p):]
678 self.ui.debug("branch renamed from %s to %s at %d\n" %
679 self.ui.debug("branch renamed from %s to %s at %d\n" %
679 (path, newpath, revnum))
680 (path, newpath, revnum))
680 path = newpath
681 path = newpath
681 break
682 break
682 if not paths:
683 if not paths:
683 revnum = None
684 revnum = None
684 return revnum, path
685 return revnum, path
685 finally:
686 finally:
686 stream.close()
687 stream.close()
687
688
688 if not path.startswith(self.rootmodule):
689 if not path.startswith(self.rootmodule):
689 # Requests on foreign branches may be forbidden at server level
690 # Requests on foreign branches may be forbidden at server level
690 self.ui.debug('ignoring foreign branch %r\n' % path)
691 self.ui.debug('ignoring foreign branch %r\n' % path)
691 return None
692 return None
692
693
693 if stop is None:
694 if stop is None:
694 stop = svn.ra.get_latest_revnum(self.ra)
695 stop = svn.ra.get_latest_revnum(self.ra)
695 try:
696 try:
696 prevmodule = self.reparent('')
697 prevmodule = self.reparent('')
697 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
698 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
698 self.reparent(prevmodule)
699 self.reparent(prevmodule)
699 except svn.core.SubversionException:
700 except svn.core.SubversionException:
700 dirent = None
701 dirent = None
701 if not dirent:
702 if not dirent:
702 raise SvnPathNotFound(_('%s not found up to revision %d')
703 raise SvnPathNotFound(_('%s not found up to revision %d')
703 % (path, stop))
704 % (path, stop))
704
705
705 # stat() gives us the previous revision on this line of
706 # stat() gives us the previous revision on this line of
706 # development, but it might be in *another module*. Fetch the
707 # development, but it might be in *another module*. Fetch the
707 # log and detect renames down to the latest revision.
708 # log and detect renames down to the latest revision.
708 revnum, realpath = findchanges(path, stop, dirent.created_rev)
709 revnum, realpath = findchanges(path, stop, dirent.created_rev)
709 if revnum is None:
710 if revnum is None:
710 # Tools like svnsync can create empty revision, when
711 # Tools like svnsync can create empty revision, when
711 # synchronizing only a subtree for instance. These empty
712 # synchronizing only a subtree for instance. These empty
712 # revisions created_rev still have their original values
713 # revisions created_rev still have their original values
713 # despite all changes having disappeared and can be
714 # despite all changes having disappeared and can be
714 # returned by ra.stat(), at least when stating the root
715 # returned by ra.stat(), at least when stating the root
715 # module. In that case, do not trust created_rev and scan
716 # module. In that case, do not trust created_rev and scan
716 # the whole history.
717 # the whole history.
717 revnum, realpath = findchanges(path, stop)
718 revnum, realpath = findchanges(path, stop)
718 if revnum is None:
719 if revnum is None:
719 self.ui.debug('ignoring empty branch %r\n' % realpath)
720 self.ui.debug('ignoring empty branch %r\n' % realpath)
720 return None
721 return None
721
722
722 if not realpath.startswith(self.rootmodule):
723 if not realpath.startswith(self.rootmodule):
723 self.ui.debug('ignoring foreign branch %r\n' % realpath)
724 self.ui.debug('ignoring foreign branch %r\n' % realpath)
724 return None
725 return None
725 return self.revid(revnum, realpath)
726 return self.revid(revnum, realpath)
726
727
727 def reparent(self, module):
728 def reparent(self, module):
728 """Reparent the svn transport and return the previous parent."""
729 """Reparent the svn transport and return the previous parent."""
729 if self.prevmodule == module:
730 if self.prevmodule == module:
730 return module
731 return module
731 svnurl = self.baseurl + quote(module)
732 svnurl = self.baseurl + quote(module)
732 prevmodule = self.prevmodule
733 prevmodule = self.prevmodule
733 if prevmodule is None:
734 if prevmodule is None:
734 prevmodule = ''
735 prevmodule = ''
735 self.ui.debug("reparent to %s\n" % svnurl)
736 self.ui.debug("reparent to %s\n" % svnurl)
736 svn.ra.reparent(self.ra, svnurl)
737 svn.ra.reparent(self.ra, svnurl)
737 self.prevmodule = module
738 self.prevmodule = module
738 return prevmodule
739 return prevmodule
739
740
740 def expandpaths(self, rev, paths, parents):
741 def expandpaths(self, rev, paths, parents):
741 changed, removed = set(), set()
742 changed, removed = set(), set()
742 copies = {}
743 copies = {}
743
744
744 new_module, revnum = revsplit(rev)[1:]
745 new_module, revnum = revsplit(rev)[1:]
745 if new_module != self.module:
746 if new_module != self.module:
746 self.module = new_module
747 self.module = new_module
747 self.reparent(self.module)
748 self.reparent(self.module)
748
749
749 for i, (path, ent) in enumerate(paths):
750 for i, (path, ent) in enumerate(paths):
750 self.ui.progress(_('scanning paths'), i, item=path,
751 self.ui.progress(_('scanning paths'), i, item=path,
751 total=len(paths), unit=_('paths'))
752 total=len(paths), unit=_('paths'))
752 entrypath = self.getrelpath(path)
753 entrypath = self.getrelpath(path)
753
754
754 kind = self._checkpath(entrypath, revnum)
755 kind = self._checkpath(entrypath, revnum)
755 if kind == svn.core.svn_node_file:
756 if kind == svn.core.svn_node_file:
756 changed.add(self.recode(entrypath))
757 changed.add(self.recode(entrypath))
757 if not ent.copyfrom_path or not parents:
758 if not ent.copyfrom_path or not parents:
758 continue
759 continue
759 # Copy sources not in parent revisions cannot be
760 # Copy sources not in parent revisions cannot be
760 # represented, ignore their origin for now
761 # represented, ignore their origin for now
761 pmodule, prevnum = revsplit(parents[0])[1:]
762 pmodule, prevnum = revsplit(parents[0])[1:]
762 if ent.copyfrom_rev < prevnum:
763 if ent.copyfrom_rev < prevnum:
763 continue
764 continue
764 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
765 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
765 if not copyfrom_path:
766 if not copyfrom_path:
766 continue
767 continue
767 self.ui.debug("copied to %s from %s@%s\n" %
768 self.ui.debug("copied to %s from %s@%s\n" %
768 (entrypath, copyfrom_path, ent.copyfrom_rev))
769 (entrypath, copyfrom_path, ent.copyfrom_rev))
769 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
770 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
770 elif kind == 0: # gone, but had better be a deleted *file*
771 elif kind == 0: # gone, but had better be a deleted *file*
771 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
772 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
772 pmodule, prevnum = revsplit(parents[0])[1:]
773 pmodule, prevnum = revsplit(parents[0])[1:]
773 parentpath = pmodule + "/" + entrypath
774 parentpath = pmodule + "/" + entrypath
774 fromkind = self._checkpath(entrypath, prevnum, pmodule)
775 fromkind = self._checkpath(entrypath, prevnum, pmodule)
775
776
776 if fromkind == svn.core.svn_node_file:
777 if fromkind == svn.core.svn_node_file:
777 removed.add(self.recode(entrypath))
778 removed.add(self.recode(entrypath))
778 elif fromkind == svn.core.svn_node_dir:
779 elif fromkind == svn.core.svn_node_dir:
779 oroot = parentpath.strip('/')
780 oroot = parentpath.strip('/')
780 nroot = path.strip('/')
781 nroot = path.strip('/')
781 children = self._iterfiles(oroot, prevnum)
782 children = self._iterfiles(oroot, prevnum)
782 for childpath in children:
783 for childpath in children:
783 childpath = childpath.replace(oroot, nroot)
784 childpath = childpath.replace(oroot, nroot)
784 childpath = self.getrelpath("/" + childpath, pmodule)
785 childpath = self.getrelpath("/" + childpath, pmodule)
785 if childpath:
786 if childpath:
786 removed.add(self.recode(childpath))
787 removed.add(self.recode(childpath))
787 else:
788 else:
788 self.ui.debug('unknown path in revision %d: %s\n' % \
789 self.ui.debug('unknown path in revision %d: %s\n' % \
789 (revnum, path))
790 (revnum, path))
790 elif kind == svn.core.svn_node_dir:
791 elif kind == svn.core.svn_node_dir:
791 if ent.action == 'M':
792 if ent.action == 'M':
792 # If the directory just had a prop change,
793 # If the directory just had a prop change,
793 # then we shouldn't need to look for its children.
794 # then we shouldn't need to look for its children.
794 continue
795 continue
795 if ent.action == 'R' and parents:
796 if ent.action == 'R' and parents:
796 # If a directory is replacing a file, mark the previous
797 # If a directory is replacing a file, mark the previous
797 # file as deleted
798 # file as deleted
798 pmodule, prevnum = revsplit(parents[0])[1:]
799 pmodule, prevnum = revsplit(parents[0])[1:]
799 pkind = self._checkpath(entrypath, prevnum, pmodule)
800 pkind = self._checkpath(entrypath, prevnum, pmodule)
800 if pkind == svn.core.svn_node_file:
801 if pkind == svn.core.svn_node_file:
801 removed.add(self.recode(entrypath))
802 removed.add(self.recode(entrypath))
802 elif pkind == svn.core.svn_node_dir:
803 elif pkind == svn.core.svn_node_dir:
803 # We do not know what files were kept or removed,
804 # We do not know what files were kept or removed,
804 # mark them all as changed.
805 # mark them all as changed.
805 for childpath in self._iterfiles(pmodule, prevnum):
806 for childpath in self._iterfiles(pmodule, prevnum):
806 childpath = self.getrelpath("/" + childpath)
807 childpath = self.getrelpath("/" + childpath)
807 if childpath:
808 if childpath:
808 changed.add(self.recode(childpath))
809 changed.add(self.recode(childpath))
809
810
810 for childpath in self._iterfiles(path, revnum):
811 for childpath in self._iterfiles(path, revnum):
811 childpath = self.getrelpath("/" + childpath)
812 childpath = self.getrelpath("/" + childpath)
812 if childpath:
813 if childpath:
813 changed.add(self.recode(childpath))
814 changed.add(self.recode(childpath))
814
815
815 # Handle directory copies
816 # Handle directory copies
816 if not ent.copyfrom_path or not parents:
817 if not ent.copyfrom_path or not parents:
817 continue
818 continue
818 # Copy sources not in parent revisions cannot be
819 # Copy sources not in parent revisions cannot be
819 # represented, ignore their origin for now
820 # represented, ignore their origin for now
820 pmodule, prevnum = revsplit(parents[0])[1:]
821 pmodule, prevnum = revsplit(parents[0])[1:]
821 if ent.copyfrom_rev < prevnum:
822 if ent.copyfrom_rev < prevnum:
822 continue
823 continue
823 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
824 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
824 if not copyfrompath:
825 if not copyfrompath:
825 continue
826 continue
826 self.ui.debug("mark %s came from %s:%d\n"
827 self.ui.debug("mark %s came from %s:%d\n"
827 % (path, copyfrompath, ent.copyfrom_rev))
828 % (path, copyfrompath, ent.copyfrom_rev))
828 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
829 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
829 for childpath in children:
830 for childpath in children:
830 childpath = self.getrelpath("/" + childpath, pmodule)
831 childpath = self.getrelpath("/" + childpath, pmodule)
831 if not childpath:
832 if not childpath:
832 continue
833 continue
833 copytopath = path + childpath[len(copyfrompath):]
834 copytopath = path + childpath[len(copyfrompath):]
834 copytopath = self.getrelpath(copytopath)
835 copytopath = self.getrelpath(copytopath)
835 copies[self.recode(copytopath)] = self.recode(childpath)
836 copies[self.recode(copytopath)] = self.recode(childpath)
836
837
837 self.ui.progress(_('scanning paths'), None)
838 self.ui.progress(_('scanning paths'), None)
838 changed.update(removed)
839 changed.update(removed)
839 return (list(changed), removed, copies)
840 return (list(changed), removed, copies)
840
841
841 def _fetch_revisions(self, from_revnum, to_revnum):
842 def _fetch_revisions(self, from_revnum, to_revnum):
842 if from_revnum < to_revnum:
843 if from_revnum < to_revnum:
843 from_revnum, to_revnum = to_revnum, from_revnum
844 from_revnum, to_revnum = to_revnum, from_revnum
844
845
845 self.child_cset = None
846 self.child_cset = None
846
847
847 def parselogentry(orig_paths, revnum, author, date, message):
848 def parselogentry(orig_paths, revnum, author, date, message):
848 """Return the parsed commit object or None, and True if
849 """Return the parsed commit object or None, and True if
849 the revision is a branch root.
850 the revision is a branch root.
850 """
851 """
851 self.ui.debug("parsing revision %d (%d changes)\n" %
852 self.ui.debug("parsing revision %d (%d changes)\n" %
852 (revnum, len(orig_paths)))
853 (revnum, len(orig_paths)))
853
854
854 branched = False
855 branched = False
855 rev = self.revid(revnum)
856 rev = self.revid(revnum)
856 # branch log might return entries for a parent we already have
857 # branch log might return entries for a parent we already have
857
858
858 if rev in self.commits or revnum < to_revnum:
859 if rev in self.commits or revnum < to_revnum:
859 return None, branched
860 return None, branched
860
861
861 parents = []
862 parents = []
862 # check whether this revision is the start of a branch or part
863 # check whether this revision is the start of a branch or part
863 # of a branch renaming
864 # of a branch renaming
864 orig_paths = sorted(orig_paths.iteritems())
865 orig_paths = sorted(orig_paths.iteritems())
865 root_paths = [(p, e) for p, e in orig_paths
866 root_paths = [(p, e) for p, e in orig_paths
866 if self.module.startswith(p)]
867 if self.module.startswith(p)]
867 if root_paths:
868 if root_paths:
868 path, ent = root_paths[-1]
869 path, ent = root_paths[-1]
869 if ent.copyfrom_path:
870 if ent.copyfrom_path:
870 branched = True
871 branched = True
871 newpath = ent.copyfrom_path + self.module[len(path):]
872 newpath = ent.copyfrom_path + self.module[len(path):]
872 # ent.copyfrom_rev may not be the actual last revision
873 # ent.copyfrom_rev may not be the actual last revision
873 previd = self.latest(newpath, ent.copyfrom_rev)
874 previd = self.latest(newpath, ent.copyfrom_rev)
874 if previd is not None:
875 if previd is not None:
875 prevmodule, prevnum = revsplit(previd)[1:]
876 prevmodule, prevnum = revsplit(previd)[1:]
876 if prevnum >= self.startrev:
877 if prevnum >= self.startrev:
877 parents = [previd]
878 parents = [previd]
878 self.ui.note(
879 self.ui.note(
879 _('found parent of branch %s at %d: %s\n') %
880 _('found parent of branch %s at %d: %s\n') %
880 (self.module, prevnum, prevmodule))
881 (self.module, prevnum, prevmodule))
881 else:
882 else:
882 self.ui.debug("no copyfrom path, don't know what to do.\n")
883 self.ui.debug("no copyfrom path, don't know what to do.\n")
883
884
884 paths = []
885 paths = []
885 # filter out unrelated paths
886 # filter out unrelated paths
886 for path, ent in orig_paths:
887 for path, ent in orig_paths:
887 if self.getrelpath(path) is None:
888 if self.getrelpath(path) is None:
888 continue
889 continue
889 paths.append((path, ent))
890 paths.append((path, ent))
890
891
891 # Example SVN datetime. Includes microseconds.
892 # Example SVN datetime. Includes microseconds.
892 # ISO-8601 conformant
893 # ISO-8601 conformant
893 # '2007-01-04T17:35:00.902377Z'
894 # '2007-01-04T17:35:00.902377Z'
894 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
895 date = dateutil.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
895 if self.ui.configbool('convert', 'localtimezone'):
896 if self.ui.configbool('convert', 'localtimezone'):
896 date = makedatetimestamp(date[0])
897 date = makedatetimestamp(date[0])
897
898
898 if message:
899 if message:
899 log = self.recode(message)
900 log = self.recode(message)
900 else:
901 else:
901 log = ''
902 log = ''
902
903
903 if author:
904 if author:
904 author = self.recode(author)
905 author = self.recode(author)
905 else:
906 else:
906 author = ''
907 author = ''
907
908
908 try:
909 try:
909 branch = self.module.split("/")[-1]
910 branch = self.module.split("/")[-1]
910 if branch == self.trunkname:
911 if branch == self.trunkname:
911 branch = None
912 branch = None
912 except IndexError:
913 except IndexError:
913 branch = None
914 branch = None
914
915
915 cset = commit(author=author,
916 cset = commit(author=author,
916 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
917 date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
917 desc=log,
918 desc=log,
918 parents=parents,
919 parents=parents,
919 branch=branch,
920 branch=branch,
920 rev=rev)
921 rev=rev)
921
922
922 self.commits[rev] = cset
923 self.commits[rev] = cset
923 # The parents list is *shared* among self.paths and the
924 # The parents list is *shared* among self.paths and the
924 # commit object. Both will be updated below.
925 # commit object. Both will be updated below.
925 self.paths[rev] = (paths, cset.parents)
926 self.paths[rev] = (paths, cset.parents)
926 if self.child_cset and not self.child_cset.parents:
927 if self.child_cset and not self.child_cset.parents:
927 self.child_cset.parents[:] = [rev]
928 self.child_cset.parents[:] = [rev]
928 self.child_cset = cset
929 self.child_cset = cset
929 return cset, branched
930 return cset, branched
930
931
931 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
932 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
932 (self.module, from_revnum, to_revnum))
933 (self.module, from_revnum, to_revnum))
933
934
934 try:
935 try:
935 firstcset = None
936 firstcset = None
936 lastonbranch = False
937 lastonbranch = False
937 stream = self._getlog([self.module], from_revnum, to_revnum)
938 stream = self._getlog([self.module], from_revnum, to_revnum)
938 try:
939 try:
939 for entry in stream:
940 for entry in stream:
940 paths, revnum, author, date, message = entry
941 paths, revnum, author, date, message = entry
941 if revnum < self.startrev:
942 if revnum < self.startrev:
942 lastonbranch = True
943 lastonbranch = True
943 break
944 break
944 if not paths:
945 if not paths:
945 self.ui.debug('revision %d has no entries\n' % revnum)
946 self.ui.debug('revision %d has no entries\n' % revnum)
946 # If we ever leave the loop on an empty
947 # If we ever leave the loop on an empty
947 # revision, do not try to get a parent branch
948 # revision, do not try to get a parent branch
948 lastonbranch = lastonbranch or revnum == 0
949 lastonbranch = lastonbranch or revnum == 0
949 continue
950 continue
950 cset, lastonbranch = parselogentry(paths, revnum, author,
951 cset, lastonbranch = parselogentry(paths, revnum, author,
951 date, message)
952 date, message)
952 if cset:
953 if cset:
953 firstcset = cset
954 firstcset = cset
954 if lastonbranch:
955 if lastonbranch:
955 break
956 break
956 finally:
957 finally:
957 stream.close()
958 stream.close()
958
959
959 if not lastonbranch and firstcset and not firstcset.parents:
960 if not lastonbranch and firstcset and not firstcset.parents:
960 # The first revision of the sequence (the last fetched one)
961 # The first revision of the sequence (the last fetched one)
961 # has invalid parents if not a branch root. Find the parent
962 # has invalid parents if not a branch root. Find the parent
962 # revision now, if any.
963 # revision now, if any.
963 try:
964 try:
964 firstrevnum = self.revnum(firstcset.rev)
965 firstrevnum = self.revnum(firstcset.rev)
965 if firstrevnum > 1:
966 if firstrevnum > 1:
966 latest = self.latest(self.module, firstrevnum - 1)
967 latest = self.latest(self.module, firstrevnum - 1)
967 if latest:
968 if latest:
968 firstcset.parents.append(latest)
969 firstcset.parents.append(latest)
969 except SvnPathNotFound:
970 except SvnPathNotFound:
970 pass
971 pass
971 except svn.core.SubversionException as xxx_todo_changeme:
972 except svn.core.SubversionException as xxx_todo_changeme:
972 (inst, num) = xxx_todo_changeme.args
973 (inst, num) = xxx_todo_changeme.args
973 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
974 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
974 raise error.Abort(_('svn: branch has no revision %s')
975 raise error.Abort(_('svn: branch has no revision %s')
975 % to_revnum)
976 % to_revnum)
976 raise
977 raise
977
978
978 def getfile(self, file, rev):
979 def getfile(self, file, rev):
979 # TODO: ra.get_file transmits the whole file instead of diffs.
980 # TODO: ra.get_file transmits the whole file instead of diffs.
980 if file in self.removed:
981 if file in self.removed:
981 return None, None
982 return None, None
982 mode = ''
983 mode = ''
983 try:
984 try:
984 new_module, revnum = revsplit(rev)[1:]
985 new_module, revnum = revsplit(rev)[1:]
985 if self.module != new_module:
986 if self.module != new_module:
986 self.module = new_module
987 self.module = new_module
987 self.reparent(self.module)
988 self.reparent(self.module)
988 io = stringio()
989 io = stringio()
989 info = svn.ra.get_file(self.ra, file, revnum, io)
990 info = svn.ra.get_file(self.ra, file, revnum, io)
990 data = io.getvalue()
991 data = io.getvalue()
991 # ra.get_file() seems to keep a reference on the input buffer
992 # ra.get_file() seems to keep a reference on the input buffer
992 # preventing collection. Release it explicitly.
993 # preventing collection. Release it explicitly.
993 io.close()
994 io.close()
994 if isinstance(info, list):
995 if isinstance(info, list):
995 info = info[-1]
996 info = info[-1]
996 mode = ("svn:executable" in info) and 'x' or ''
997 mode = ("svn:executable" in info) and 'x' or ''
997 mode = ("svn:special" in info) and 'l' or mode
998 mode = ("svn:special" in info) and 'l' or mode
998 except svn.core.SubversionException as e:
999 except svn.core.SubversionException as e:
999 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
1000 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
1000 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
1001 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
1001 if e.apr_err in notfound: # File not found
1002 if e.apr_err in notfound: # File not found
1002 return None, None
1003 return None, None
1003 raise
1004 raise
1004 if mode == 'l':
1005 if mode == 'l':
1005 link_prefix = "link "
1006 link_prefix = "link "
1006 if data.startswith(link_prefix):
1007 if data.startswith(link_prefix):
1007 data = data[len(link_prefix):]
1008 data = data[len(link_prefix):]
1008 return data, mode
1009 return data, mode
1009
1010
1010 def _iterfiles(self, path, revnum):
1011 def _iterfiles(self, path, revnum):
1011 """Enumerate all files in path at revnum, recursively."""
1012 """Enumerate all files in path at revnum, recursively."""
1012 path = path.strip('/')
1013 path = path.strip('/')
1013 pool = svn.core.Pool()
1014 pool = svn.core.Pool()
1014 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1015 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1015 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1016 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1016 if path:
1017 if path:
1017 path += '/'
1018 path += '/'
1018 return ((path + p) for p, e in entries.iteritems()
1019 return ((path + p) for p, e in entries.iteritems()
1019 if e.kind == svn.core.svn_node_file)
1020 if e.kind == svn.core.svn_node_file)
1020
1021
1021 def getrelpath(self, path, module=None):
1022 def getrelpath(self, path, module=None):
1022 if module is None:
1023 if module is None:
1023 module = self.module
1024 module = self.module
1024 # Given the repository url of this wc, say
1025 # Given the repository url of this wc, say
1025 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1026 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1026 # extract the "entry" portion (a relative path) from what
1027 # extract the "entry" portion (a relative path) from what
1027 # svn log --xml says, i.e.
1028 # svn log --xml says, i.e.
1028 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1029 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1029 # that is to say "tests/PloneTestCase.py"
1030 # that is to say "tests/PloneTestCase.py"
1030 if path.startswith(module):
1031 if path.startswith(module):
1031 relative = path.rstrip('/')[len(module):]
1032 relative = path.rstrip('/')[len(module):]
1032 if relative.startswith('/'):
1033 if relative.startswith('/'):
1033 return relative[1:]
1034 return relative[1:]
1034 elif relative == '':
1035 elif relative == '':
1035 return relative
1036 return relative
1036
1037
1037 # The path is outside our tracked tree...
1038 # The path is outside our tracked tree...
1038 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1039 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1039 return None
1040 return None
1040
1041
1041 def _checkpath(self, path, revnum, module=None):
1042 def _checkpath(self, path, revnum, module=None):
1042 if module is not None:
1043 if module is not None:
1043 prevmodule = self.reparent('')
1044 prevmodule = self.reparent('')
1044 path = module + '/' + path
1045 path = module + '/' + path
1045 try:
1046 try:
1046 # ra.check_path does not like leading slashes very much, it leads
1047 # ra.check_path does not like leading slashes very much, it leads
1047 # to PROPFIND subversion errors
1048 # to PROPFIND subversion errors
1048 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1049 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1049 finally:
1050 finally:
1050 if module is not None:
1051 if module is not None:
1051 self.reparent(prevmodule)
1052 self.reparent(prevmodule)
1052
1053
1053 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1054 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1054 strict_node_history=False):
1055 strict_node_history=False):
1055 # Normalize path names, svn >= 1.5 only wants paths relative to
1056 # Normalize path names, svn >= 1.5 only wants paths relative to
1056 # supplied URL
1057 # supplied URL
1057 relpaths = []
1058 relpaths = []
1058 for p in paths:
1059 for p in paths:
1059 if not p.startswith('/'):
1060 if not p.startswith('/'):
1060 p = self.module + '/' + p
1061 p = self.module + '/' + p
1061 relpaths.append(p.strip('/'))
1062 relpaths.append(p.strip('/'))
1062 args = [self.baseurl, relpaths, start, end, limit,
1063 args = [self.baseurl, relpaths, start, end, limit,
1063 discover_changed_paths, strict_node_history]
1064 discover_changed_paths, strict_node_history]
1064 # developer config: convert.svn.debugsvnlog
1065 # developer config: convert.svn.debugsvnlog
1065 if not self.ui.configbool('convert', 'svn.debugsvnlog'):
1066 if not self.ui.configbool('convert', 'svn.debugsvnlog'):
1066 return directlogstream(*args)
1067 return directlogstream(*args)
1067 arg = encodeargs(args)
1068 arg = encodeargs(args)
1068 hgexe = util.hgexecutable()
1069 hgexe = util.hgexecutable()
1069 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1070 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1070 stdin, stdout = util.popen2(util.quotecommand(cmd))
1071 stdin, stdout = util.popen2(util.quotecommand(cmd))
1071 stdin.write(arg)
1072 stdin.write(arg)
1072 try:
1073 try:
1073 stdin.close()
1074 stdin.close()
1074 except IOError:
1075 except IOError:
1075 raise error.Abort(_('Mercurial failed to run itself, check'
1076 raise error.Abort(_('Mercurial failed to run itself, check'
1076 ' hg executable is in PATH'))
1077 ' hg executable is in PATH'))
1077 return logstream(stdout)
1078 return logstream(stdout)
1078
1079
1079 pre_revprop_change = '''#!/bin/sh
1080 pre_revprop_change = '''#!/bin/sh
1080
1081
1081 REPOS="$1"
1082 REPOS="$1"
1082 REV="$2"
1083 REV="$2"
1083 USER="$3"
1084 USER="$3"
1084 PROPNAME="$4"
1085 PROPNAME="$4"
1085 ACTION="$5"
1086 ACTION="$5"
1086
1087
1087 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1088 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1088 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1089 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1089 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1090 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1090
1091
1091 echo "Changing prohibited revision property" >&2
1092 echo "Changing prohibited revision property" >&2
1092 exit 1
1093 exit 1
1093 '''
1094 '''
1094
1095
1095 class svn_sink(converter_sink, commandline):
1096 class svn_sink(converter_sink, commandline):
1096 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1097 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1097 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1098 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1098
1099
1099 def prerun(self):
1100 def prerun(self):
1100 if self.wc:
1101 if self.wc:
1101 os.chdir(self.wc)
1102 os.chdir(self.wc)
1102
1103
1103 def postrun(self):
1104 def postrun(self):
1104 if self.wc:
1105 if self.wc:
1105 os.chdir(self.cwd)
1106 os.chdir(self.cwd)
1106
1107
1107 def join(self, name):
1108 def join(self, name):
1108 return os.path.join(self.wc, '.svn', name)
1109 return os.path.join(self.wc, '.svn', name)
1109
1110
1110 def revmapfile(self):
1111 def revmapfile(self):
1111 return self.join('hg-shamap')
1112 return self.join('hg-shamap')
1112
1113
1113 def authorfile(self):
1114 def authorfile(self):
1114 return self.join('hg-authormap')
1115 return self.join('hg-authormap')
1115
1116
1116 def __init__(self, ui, repotype, path):
1117 def __init__(self, ui, repotype, path):
1117
1118
1118 converter_sink.__init__(self, ui, repotype, path)
1119 converter_sink.__init__(self, ui, repotype, path)
1119 commandline.__init__(self, ui, 'svn')
1120 commandline.__init__(self, ui, 'svn')
1120 self.delete = []
1121 self.delete = []
1121 self.setexec = []
1122 self.setexec = []
1122 self.delexec = []
1123 self.delexec = []
1123 self.copies = []
1124 self.copies = []
1124 self.wc = None
1125 self.wc = None
1125 self.cwd = pycompat.getcwd()
1126 self.cwd = pycompat.getcwd()
1126
1127
1127 created = False
1128 created = False
1128 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1129 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1129 self.wc = os.path.realpath(path)
1130 self.wc = os.path.realpath(path)
1130 self.run0('update')
1131 self.run0('update')
1131 else:
1132 else:
1132 if not re.search(br'^(file|http|https|svn|svn\+ssh)\://', path):
1133 if not re.search(br'^(file|http|https|svn|svn\+ssh)\://', path):
1133 path = os.path.realpath(path)
1134 path = os.path.realpath(path)
1134 if os.path.isdir(os.path.dirname(path)):
1135 if os.path.isdir(os.path.dirname(path)):
1135 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1136 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1136 ui.status(_('initializing svn repository %r\n') %
1137 ui.status(_('initializing svn repository %r\n') %
1137 os.path.basename(path))
1138 os.path.basename(path))
1138 commandline(ui, 'svnadmin').run0('create', path)
1139 commandline(ui, 'svnadmin').run0('create', path)
1139 created = path
1140 created = path
1140 path = util.normpath(path)
1141 path = util.normpath(path)
1141 if not path.startswith('/'):
1142 if not path.startswith('/'):
1142 path = '/' + path
1143 path = '/' + path
1143 path = 'file://' + path
1144 path = 'file://' + path
1144
1145
1145 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1146 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1146 '-wc')
1147 '-wc')
1147 ui.status(_('initializing svn working copy %r\n')
1148 ui.status(_('initializing svn working copy %r\n')
1148 % os.path.basename(wcpath))
1149 % os.path.basename(wcpath))
1149 self.run0('checkout', path, wcpath)
1150 self.run0('checkout', path, wcpath)
1150
1151
1151 self.wc = wcpath
1152 self.wc = wcpath
1152 self.opener = vfsmod.vfs(self.wc)
1153 self.opener = vfsmod.vfs(self.wc)
1153 self.wopener = vfsmod.vfs(self.wc)
1154 self.wopener = vfsmod.vfs(self.wc)
1154 self.childmap = mapfile(ui, self.join('hg-childmap'))
1155 self.childmap = mapfile(ui, self.join('hg-childmap'))
1155 if util.checkexec(self.wc):
1156 if util.checkexec(self.wc):
1156 self.is_exec = util.isexec
1157 self.is_exec = util.isexec
1157 else:
1158 else:
1158 self.is_exec = None
1159 self.is_exec = None
1159
1160
1160 if created:
1161 if created:
1161 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1162 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1162 fp = open(hook, 'wb')
1163 fp = open(hook, 'wb')
1163 fp.write(pre_revprop_change)
1164 fp.write(pre_revprop_change)
1164 fp.close()
1165 fp.close()
1165 util.setflags(hook, False, True)
1166 util.setflags(hook, False, True)
1166
1167
1167 output = self.run0('info')
1168 output = self.run0('info')
1168 self.uuid = self.uuid_re.search(output).group(1).strip()
1169 self.uuid = self.uuid_re.search(output).group(1).strip()
1169
1170
1170 def wjoin(self, *names):
1171 def wjoin(self, *names):
1171 return os.path.join(self.wc, *names)
1172 return os.path.join(self.wc, *names)
1172
1173
1173 @propertycache
1174 @propertycache
1174 def manifest(self):
1175 def manifest(self):
1175 # As of svn 1.7, the "add" command fails when receiving
1176 # As of svn 1.7, the "add" command fails when receiving
1176 # already tracked entries, so we have to track and filter them
1177 # already tracked entries, so we have to track and filter them
1177 # ourselves.
1178 # ourselves.
1178 m = set()
1179 m = set()
1179 output = self.run0('ls', recursive=True, xml=True)
1180 output = self.run0('ls', recursive=True, xml=True)
1180 doc = xml.dom.minidom.parseString(output)
1181 doc = xml.dom.minidom.parseString(output)
1181 for e in doc.getElementsByTagName('entry'):
1182 for e in doc.getElementsByTagName('entry'):
1182 for n in e.childNodes:
1183 for n in e.childNodes:
1183 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1184 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1184 continue
1185 continue
1185 name = ''.join(c.data for c in n.childNodes
1186 name = ''.join(c.data for c in n.childNodes
1186 if c.nodeType == c.TEXT_NODE)
1187 if c.nodeType == c.TEXT_NODE)
1187 # Entries are compared with names coming from
1188 # Entries are compared with names coming from
1188 # mercurial, so bytes with undefined encoding. Our
1189 # mercurial, so bytes with undefined encoding. Our
1189 # best bet is to assume they are in local
1190 # best bet is to assume they are in local
1190 # encoding. They will be passed to command line calls
1191 # encoding. They will be passed to command line calls
1191 # later anyway, so they better be.
1192 # later anyway, so they better be.
1192 m.add(encoding.unitolocal(name))
1193 m.add(encoding.unitolocal(name))
1193 break
1194 break
1194 return m
1195 return m
1195
1196
1196 def putfile(self, filename, flags, data):
1197 def putfile(self, filename, flags, data):
1197 if 'l' in flags:
1198 if 'l' in flags:
1198 self.wopener.symlink(data, filename)
1199 self.wopener.symlink(data, filename)
1199 else:
1200 else:
1200 try:
1201 try:
1201 if os.path.islink(self.wjoin(filename)):
1202 if os.path.islink(self.wjoin(filename)):
1202 os.unlink(filename)
1203 os.unlink(filename)
1203 except OSError:
1204 except OSError:
1204 pass
1205 pass
1205 self.wopener.write(filename, data)
1206 self.wopener.write(filename, data)
1206
1207
1207 if self.is_exec:
1208 if self.is_exec:
1208 if self.is_exec(self.wjoin(filename)):
1209 if self.is_exec(self.wjoin(filename)):
1209 if 'x' not in flags:
1210 if 'x' not in flags:
1210 self.delexec.append(filename)
1211 self.delexec.append(filename)
1211 else:
1212 else:
1212 if 'x' in flags:
1213 if 'x' in flags:
1213 self.setexec.append(filename)
1214 self.setexec.append(filename)
1214 util.setflags(self.wjoin(filename), False, 'x' in flags)
1215 util.setflags(self.wjoin(filename), False, 'x' in flags)
1215
1216
1216 def _copyfile(self, source, dest):
1217 def _copyfile(self, source, dest):
1217 # SVN's copy command pukes if the destination file exists, but
1218 # SVN's copy command pukes if the destination file exists, but
1218 # our copyfile method expects to record a copy that has
1219 # our copyfile method expects to record a copy that has
1219 # already occurred. Cross the semantic gap.
1220 # already occurred. Cross the semantic gap.
1220 wdest = self.wjoin(dest)
1221 wdest = self.wjoin(dest)
1221 exists = os.path.lexists(wdest)
1222 exists = os.path.lexists(wdest)
1222 if exists:
1223 if exists:
1223 fd, tempname = tempfile.mkstemp(
1224 fd, tempname = tempfile.mkstemp(
1224 prefix='hg-copy-', dir=os.path.dirname(wdest))
1225 prefix='hg-copy-', dir=os.path.dirname(wdest))
1225 os.close(fd)
1226 os.close(fd)
1226 os.unlink(tempname)
1227 os.unlink(tempname)
1227 os.rename(wdest, tempname)
1228 os.rename(wdest, tempname)
1228 try:
1229 try:
1229 self.run0('copy', source, dest)
1230 self.run0('copy', source, dest)
1230 finally:
1231 finally:
1231 self.manifest.add(dest)
1232 self.manifest.add(dest)
1232 if exists:
1233 if exists:
1233 try:
1234 try:
1234 os.unlink(wdest)
1235 os.unlink(wdest)
1235 except OSError:
1236 except OSError:
1236 pass
1237 pass
1237 os.rename(tempname, wdest)
1238 os.rename(tempname, wdest)
1238
1239
1239 def dirs_of(self, files):
1240 def dirs_of(self, files):
1240 dirs = set()
1241 dirs = set()
1241 for f in files:
1242 for f in files:
1242 if os.path.isdir(self.wjoin(f)):
1243 if os.path.isdir(self.wjoin(f)):
1243 dirs.add(f)
1244 dirs.add(f)
1244 i = len(f)
1245 i = len(f)
1245 for i in iter(lambda: f.rfind('/', 0, i), -1):
1246 for i in iter(lambda: f.rfind('/', 0, i), -1):
1246 dirs.add(f[:i])
1247 dirs.add(f[:i])
1247 return dirs
1248 return dirs
1248
1249
1249 def add_dirs(self, files):
1250 def add_dirs(self, files):
1250 add_dirs = [d for d in sorted(self.dirs_of(files))
1251 add_dirs = [d for d in sorted(self.dirs_of(files))
1251 if d not in self.manifest]
1252 if d not in self.manifest]
1252 if add_dirs:
1253 if add_dirs:
1253 self.manifest.update(add_dirs)
1254 self.manifest.update(add_dirs)
1254 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1255 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1255 return add_dirs
1256 return add_dirs
1256
1257
1257 def add_files(self, files):
1258 def add_files(self, files):
1258 files = [f for f in files if f not in self.manifest]
1259 files = [f for f in files if f not in self.manifest]
1259 if files:
1260 if files:
1260 self.manifest.update(files)
1261 self.manifest.update(files)
1261 self.xargs(files, 'add', quiet=True)
1262 self.xargs(files, 'add', quiet=True)
1262 return files
1263 return files
1263
1264
1264 def addchild(self, parent, child):
1265 def addchild(self, parent, child):
1265 self.childmap[parent] = child
1266 self.childmap[parent] = child
1266
1267
1267 def revid(self, rev):
1268 def revid(self, rev):
1268 return u"svn:%s@%s" % (self.uuid, rev)
1269 return u"svn:%s@%s" % (self.uuid, rev)
1269
1270
1270 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1271 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1271 cleanp2):
1272 cleanp2):
1272 for parent in parents:
1273 for parent in parents:
1273 try:
1274 try:
1274 return self.revid(self.childmap[parent])
1275 return self.revid(self.childmap[parent])
1275 except KeyError:
1276 except KeyError:
1276 pass
1277 pass
1277
1278
1278 # Apply changes to working copy
1279 # Apply changes to working copy
1279 for f, v in files:
1280 for f, v in files:
1280 data, mode = source.getfile(f, v)
1281 data, mode = source.getfile(f, v)
1281 if data is None:
1282 if data is None:
1282 self.delete.append(f)
1283 self.delete.append(f)
1283 else:
1284 else:
1284 self.putfile(f, mode, data)
1285 self.putfile(f, mode, data)
1285 if f in copies:
1286 if f in copies:
1286 self.copies.append([copies[f], f])
1287 self.copies.append([copies[f], f])
1287 if full:
1288 if full:
1288 self.delete.extend(sorted(self.manifest.difference(files)))
1289 self.delete.extend(sorted(self.manifest.difference(files)))
1289 files = [f[0] for f in files]
1290 files = [f[0] for f in files]
1290
1291
1291 entries = set(self.delete)
1292 entries = set(self.delete)
1292 files = frozenset(files)
1293 files = frozenset(files)
1293 entries.update(self.add_dirs(files.difference(entries)))
1294 entries.update(self.add_dirs(files.difference(entries)))
1294 if self.copies:
1295 if self.copies:
1295 for s, d in self.copies:
1296 for s, d in self.copies:
1296 self._copyfile(s, d)
1297 self._copyfile(s, d)
1297 self.copies = []
1298 self.copies = []
1298 if self.delete:
1299 if self.delete:
1299 self.xargs(self.delete, 'delete')
1300 self.xargs(self.delete, 'delete')
1300 for f in self.delete:
1301 for f in self.delete:
1301 self.manifest.remove(f)
1302 self.manifest.remove(f)
1302 self.delete = []
1303 self.delete = []
1303 entries.update(self.add_files(files.difference(entries)))
1304 entries.update(self.add_files(files.difference(entries)))
1304 if self.delexec:
1305 if self.delexec:
1305 self.xargs(self.delexec, 'propdel', 'svn:executable')
1306 self.xargs(self.delexec, 'propdel', 'svn:executable')
1306 self.delexec = []
1307 self.delexec = []
1307 if self.setexec:
1308 if self.setexec:
1308 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1309 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1309 self.setexec = []
1310 self.setexec = []
1310
1311
1311 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1312 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1312 fp = os.fdopen(fd, pycompat.sysstr('wb'))
1313 fp = os.fdopen(fd, pycompat.sysstr('wb'))
1313 fp.write(util.tonativeeol(commit.desc))
1314 fp.write(util.tonativeeol(commit.desc))
1314 fp.close()
1315 fp.close()
1315 try:
1316 try:
1316 output = self.run0('commit',
1317 output = self.run0('commit',
1317 username=util.shortuser(commit.author),
1318 username=util.shortuser(commit.author),
1318 file=messagefile,
1319 file=messagefile,
1319 encoding='utf-8')
1320 encoding='utf-8')
1320 try:
1321 try:
1321 rev = self.commit_re.search(output).group(1)
1322 rev = self.commit_re.search(output).group(1)
1322 except AttributeError:
1323 except AttributeError:
1323 if parents and not files:
1324 if parents and not files:
1324 return parents[0]
1325 return parents[0]
1325 self.ui.warn(_('unexpected svn output:\n'))
1326 self.ui.warn(_('unexpected svn output:\n'))
1326 self.ui.warn(output)
1327 self.ui.warn(output)
1327 raise error.Abort(_('unable to cope with svn output'))
1328 raise error.Abort(_('unable to cope with svn output'))
1328 if commit.rev:
1329 if commit.rev:
1329 self.run('propset', 'hg:convert-rev', commit.rev,
1330 self.run('propset', 'hg:convert-rev', commit.rev,
1330 revprop=True, revision=rev)
1331 revprop=True, revision=rev)
1331 if commit.branch and commit.branch != 'default':
1332 if commit.branch and commit.branch != 'default':
1332 self.run('propset', 'hg:convert-branch', commit.branch,
1333 self.run('propset', 'hg:convert-branch', commit.branch,
1333 revprop=True, revision=rev)
1334 revprop=True, revision=rev)
1334 for parent in parents:
1335 for parent in parents:
1335 self.addchild(parent, rev)
1336 self.addchild(parent, rev)
1336 return self.revid(rev)
1337 return self.revid(rev)
1337 finally:
1338 finally:
1338 os.unlink(messagefile)
1339 os.unlink(messagefile)
1339
1340
1340 def puttags(self, tags):
1341 def puttags(self, tags):
1341 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1342 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1342 return None, None
1343 return None, None
1343
1344
1344 def hascommitfrommap(self, rev):
1345 def hascommitfrommap(self, rev):
1345 # We trust that revisions referenced in a map still is present
1346 # We trust that revisions referenced in a map still is present
1346 # TODO: implement something better if necessary and feasible
1347 # TODO: implement something better if necessary and feasible
1347 return True
1348 return True
1348
1349
1349 def hascommitforsplicemap(self, rev):
1350 def hascommitforsplicemap(self, rev):
1350 # This is not correct as one can convert to an existing subversion
1351 # This is not correct as one can convert to an existing subversion
1351 # repository and childmap would not list all revisions. Too bad.
1352 # repository and childmap would not list all revisions. Too bad.
1352 if rev in self.childmap:
1353 if rev in self.childmap:
1353 return True
1354 return True
1354 raise error.Abort(_('splice map revision %s not found in subversion '
1355 raise error.Abort(_('splice map revision %s not found in subversion '
1355 'child map (revision lookups are not implemented)')
1356 'child map (revision lookups are not implemented)')
1356 % rev)
1357 % rev)
@@ -1,167 +1,168 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''pull, update and merge in one command (DEPRECATED)'''
8 '''pull, update and merge in one command (DEPRECATED)'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.node import (
13 from mercurial.node import (
14 short,
14 short,
15 )
15 )
16 from mercurial import (
16 from mercurial import (
17 cmdutil,
17 cmdutil,
18 error,
18 error,
19 exchange,
19 exchange,
20 hg,
20 hg,
21 lock,
21 lock,
22 pycompat,
22 pycompat,
23 registrar,
23 registrar,
24 util,
24 util,
25 )
25 )
26 from mercurial.utils import dateutil
26
27
27 release = lock.release
28 release = lock.release
28 cmdtable = {}
29 cmdtable = {}
29 command = registrar.command(cmdtable)
30 command = registrar.command(cmdtable)
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
33 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
34 # leave the attribute unspecified.
34 testedwith = 'ships-with-hg-core'
35 testedwith = 'ships-with-hg-core'
35
36
36 @command('fetch',
37 @command('fetch',
37 [('r', 'rev', [],
38 [('r', 'rev', [],
38 _('a specific revision you would like to pull'), _('REV')),
39 _('a specific revision you would like to pull'), _('REV')),
39 ('', 'edit', None, _('invoke editor on commit messages')),
40 ('', 'edit', None, _('invoke editor on commit messages')),
40 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
41 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
41 ('', 'switch-parent', None, _('switch parents when merging')),
42 ('', 'switch-parent', None, _('switch parents when merging')),
42 ] + cmdutil.commitopts + cmdutil.commitopts2 + cmdutil.remoteopts,
43 ] + cmdutil.commitopts + cmdutil.commitopts2 + cmdutil.remoteopts,
43 _('hg fetch [SOURCE]'))
44 _('hg fetch [SOURCE]'))
44 def fetch(ui, repo, source='default', **opts):
45 def fetch(ui, repo, source='default', **opts):
45 '''pull changes from a remote repository, merge new changes if needed.
46 '''pull changes from a remote repository, merge new changes if needed.
46
47
47 This finds all changes from the repository at the specified path
48 This finds all changes from the repository at the specified path
48 or URL and adds them to the local repository.
49 or URL and adds them to the local repository.
49
50
50 If the pulled changes add a new branch head, the head is
51 If the pulled changes add a new branch head, the head is
51 automatically merged, and the result of the merge is committed.
52 automatically merged, and the result of the merge is committed.
52 Otherwise, the working directory is updated to include the new
53 Otherwise, the working directory is updated to include the new
53 changes.
54 changes.
54
55
55 When a merge is needed, the working directory is first updated to
56 When a merge is needed, the working directory is first updated to
56 the newly pulled changes. Local changes are then merged into the
57 the newly pulled changes. Local changes are then merged into the
57 pulled changes. To switch the merge order, use --switch-parent.
58 pulled changes. To switch the merge order, use --switch-parent.
58
59
59 See :hg:`help dates` for a list of formats valid for -d/--date.
60 See :hg:`help dates` for a list of formats valid for -d/--date.
60
61
61 Returns 0 on success.
62 Returns 0 on success.
62 '''
63 '''
63
64
64 opts = pycompat.byteskwargs(opts)
65 opts = pycompat.byteskwargs(opts)
65 date = opts.get('date')
66 date = opts.get('date')
66 if date:
67 if date:
67 opts['date'] = util.parsedate(date)
68 opts['date'] = dateutil.parsedate(date)
68
69
69 parent, _p2 = repo.dirstate.parents()
70 parent, _p2 = repo.dirstate.parents()
70 branch = repo.dirstate.branch()
71 branch = repo.dirstate.branch()
71 try:
72 try:
72 branchnode = repo.branchtip(branch)
73 branchnode = repo.branchtip(branch)
73 except error.RepoLookupError:
74 except error.RepoLookupError:
74 branchnode = None
75 branchnode = None
75 if parent != branchnode:
76 if parent != branchnode:
76 raise error.Abort(_('working directory not at branch tip'),
77 raise error.Abort(_('working directory not at branch tip'),
77 hint=_("use 'hg update' to check out branch tip"))
78 hint=_("use 'hg update' to check out branch tip"))
78
79
79 wlock = lock = None
80 wlock = lock = None
80 try:
81 try:
81 wlock = repo.wlock()
82 wlock = repo.wlock()
82 lock = repo.lock()
83 lock = repo.lock()
83
84
84 cmdutil.bailifchanged(repo)
85 cmdutil.bailifchanged(repo)
85
86
86 bheads = repo.branchheads(branch)
87 bheads = repo.branchheads(branch)
87 bheads = [head for head in bheads if len(repo[head].children()) == 0]
88 bheads = [head for head in bheads if len(repo[head].children()) == 0]
88 if len(bheads) > 1:
89 if len(bheads) > 1:
89 raise error.Abort(_('multiple heads in this branch '
90 raise error.Abort(_('multiple heads in this branch '
90 '(use "hg heads ." and "hg merge" to merge)'))
91 '(use "hg heads ." and "hg merge" to merge)'))
91
92
92 other = hg.peer(repo, opts, ui.expandpath(source))
93 other = hg.peer(repo, opts, ui.expandpath(source))
93 ui.status(_('pulling from %s\n') %
94 ui.status(_('pulling from %s\n') %
94 util.hidepassword(ui.expandpath(source)))
95 util.hidepassword(ui.expandpath(source)))
95 revs = None
96 revs = None
96 if opts['rev']:
97 if opts['rev']:
97 try:
98 try:
98 revs = [other.lookup(rev) for rev in opts['rev']]
99 revs = [other.lookup(rev) for rev in opts['rev']]
99 except error.CapabilityError:
100 except error.CapabilityError:
100 err = _("other repository doesn't support revision lookup, "
101 err = _("other repository doesn't support revision lookup, "
101 "so a rev cannot be specified.")
102 "so a rev cannot be specified.")
102 raise error.Abort(err)
103 raise error.Abort(err)
103
104
104 # Are there any changes at all?
105 # Are there any changes at all?
105 modheads = exchange.pull(repo, other, heads=revs).cgresult
106 modheads = exchange.pull(repo, other, heads=revs).cgresult
106 if modheads == 0:
107 if modheads == 0:
107 return 0
108 return 0
108
109
109 # Is this a simple fast-forward along the current branch?
110 # Is this a simple fast-forward along the current branch?
110 newheads = repo.branchheads(branch)
111 newheads = repo.branchheads(branch)
111 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
112 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
112 if len(newheads) == 1 and len(newchildren):
113 if len(newheads) == 1 and len(newchildren):
113 if newchildren[0] != parent:
114 if newchildren[0] != parent:
114 return hg.update(repo, newchildren[0])
115 return hg.update(repo, newchildren[0])
115 else:
116 else:
116 return 0
117 return 0
117
118
118 # Are there more than one additional branch heads?
119 # Are there more than one additional branch heads?
119 newchildren = [n for n in newchildren if n != parent]
120 newchildren = [n for n in newchildren if n != parent]
120 newparent = parent
121 newparent = parent
121 if newchildren:
122 if newchildren:
122 newparent = newchildren[0]
123 newparent = newchildren[0]
123 hg.clean(repo, newparent)
124 hg.clean(repo, newparent)
124 newheads = [n for n in newheads if n != newparent]
125 newheads = [n for n in newheads if n != newparent]
125 if len(newheads) > 1:
126 if len(newheads) > 1:
126 ui.status(_('not merging with %d other new branch heads '
127 ui.status(_('not merging with %d other new branch heads '
127 '(use "hg heads ." and "hg merge" to merge them)\n') %
128 '(use "hg heads ." and "hg merge" to merge them)\n') %
128 (len(newheads) - 1))
129 (len(newheads) - 1))
129 return 1
130 return 1
130
131
131 if not newheads:
132 if not newheads:
132 return 0
133 return 0
133
134
134 # Otherwise, let's merge.
135 # Otherwise, let's merge.
135 err = False
136 err = False
136 if newheads:
137 if newheads:
137 # By default, we consider the repository we're pulling
138 # By default, we consider the repository we're pulling
138 # *from* as authoritative, so we merge our changes into
139 # *from* as authoritative, so we merge our changes into
139 # theirs.
140 # theirs.
140 if opts['switch_parent']:
141 if opts['switch_parent']:
141 firstparent, secondparent = newparent, newheads[0]
142 firstparent, secondparent = newparent, newheads[0]
142 else:
143 else:
143 firstparent, secondparent = newheads[0], newparent
144 firstparent, secondparent = newheads[0], newparent
144 ui.status(_('updating to %d:%s\n') %
145 ui.status(_('updating to %d:%s\n') %
145 (repo.changelog.rev(firstparent),
146 (repo.changelog.rev(firstparent),
146 short(firstparent)))
147 short(firstparent)))
147 hg.clean(repo, firstparent)
148 hg.clean(repo, firstparent)
148 ui.status(_('merging with %d:%s\n') %
149 ui.status(_('merging with %d:%s\n') %
149 (repo.changelog.rev(secondparent), short(secondparent)))
150 (repo.changelog.rev(secondparent), short(secondparent)))
150 err = hg.merge(repo, secondparent, remind=False)
151 err = hg.merge(repo, secondparent, remind=False)
151
152
152 if not err:
153 if not err:
153 # we don't translate commit messages
154 # we don't translate commit messages
154 message = (cmdutil.logmessage(ui, opts) or
155 message = (cmdutil.logmessage(ui, opts) or
155 ('Automated merge with %s' %
156 ('Automated merge with %s' %
156 util.removeauth(other.url())))
157 util.removeauth(other.url())))
157 editopt = opts.get('edit') or opts.get('force_editor')
158 editopt = opts.get('edit') or opts.get('force_editor')
158 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
159 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
159 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
160 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
160 ui.status(_('new changeset %d:%s merges remote changes '
161 ui.status(_('new changeset %d:%s merges remote changes '
161 'with local\n') % (repo.changelog.rev(n),
162 'with local\n') % (repo.changelog.rev(n),
162 short(n)))
163 short(n)))
163
164
164 return err
165 return err
165
166
166 finally:
167 finally:
167 release(lock, wlock)
168 release(lock, wlock)
@@ -1,327 +1,328 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import os
11 import os
12 import tempfile
12 import tempfile
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 cmdutil,
16 cmdutil,
17 error,
17 error,
18 match,
18 match,
19 node as hgnode,
19 node as hgnode,
20 pycompat,
20 pycompat,
21 registrar,
21 registrar,
22 util,
22 util,
23 )
23 )
24 from mercurial.utils import dateutil
24
25
25 cmdtable = {}
26 cmdtable = {}
26 command = registrar.command(cmdtable)
27 command = registrar.command(cmdtable)
27 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
28 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
28 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
29 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
29 # be specifying the version(s) of Mercurial they are tested with, or
30 # be specifying the version(s) of Mercurial they are tested with, or
30 # leave the attribute unspecified.
31 # leave the attribute unspecified.
31 testedwith = 'ships-with-hg-core'
32 testedwith = 'ships-with-hg-core'
32
33
33 configtable = {}
34 configtable = {}
34 configitem = registrar.configitem(configtable)
35 configitem = registrar.configitem(configtable)
35
36
36 configitem('gpg', 'cmd',
37 configitem('gpg', 'cmd',
37 default='gpg',
38 default='gpg',
38 )
39 )
39 configitem('gpg', 'key',
40 configitem('gpg', 'key',
40 default=None,
41 default=None,
41 )
42 )
42 configitem('gpg', '.*',
43 configitem('gpg', '.*',
43 default=None,
44 default=None,
44 generic=True,
45 generic=True,
45 )
46 )
46
47
47 class gpg(object):
48 class gpg(object):
48 def __init__(self, path, key=None):
49 def __init__(self, path, key=None):
49 self.path = path
50 self.path = path
50 self.key = (key and " --local-user \"%s\"" % key) or ""
51 self.key = (key and " --local-user \"%s\"" % key) or ""
51
52
52 def sign(self, data):
53 def sign(self, data):
53 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
54 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
54 return util.filter(data, gpgcmd)
55 return util.filter(data, gpgcmd)
55
56
56 def verify(self, data, sig):
57 def verify(self, data, sig):
57 """ returns of the good and bad signatures"""
58 """ returns of the good and bad signatures"""
58 sigfile = datafile = None
59 sigfile = datafile = None
59 try:
60 try:
60 # create temporary files
61 # create temporary files
61 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
62 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
62 fp = os.fdopen(fd, pycompat.sysstr('wb'))
63 fp = os.fdopen(fd, pycompat.sysstr('wb'))
63 fp.write(sig)
64 fp.write(sig)
64 fp.close()
65 fp.close()
65 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
66 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
66 fp = os.fdopen(fd, pycompat.sysstr('wb'))
67 fp = os.fdopen(fd, pycompat.sysstr('wb'))
67 fp.write(data)
68 fp.write(data)
68 fp.close()
69 fp.close()
69 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
70 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
70 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
71 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
71 ret = util.filter("", gpgcmd)
72 ret = util.filter("", gpgcmd)
72 finally:
73 finally:
73 for f in (sigfile, datafile):
74 for f in (sigfile, datafile):
74 try:
75 try:
75 if f:
76 if f:
76 os.unlink(f)
77 os.unlink(f)
77 except OSError:
78 except OSError:
78 pass
79 pass
79 keys = []
80 keys = []
80 key, fingerprint = None, None
81 key, fingerprint = None, None
81 for l in ret.splitlines():
82 for l in ret.splitlines():
82 # see DETAILS in the gnupg documentation
83 # see DETAILS in the gnupg documentation
83 # filter the logger output
84 # filter the logger output
84 if not l.startswith("[GNUPG:]"):
85 if not l.startswith("[GNUPG:]"):
85 continue
86 continue
86 l = l[9:]
87 l = l[9:]
87 if l.startswith("VALIDSIG"):
88 if l.startswith("VALIDSIG"):
88 # fingerprint of the primary key
89 # fingerprint of the primary key
89 fingerprint = l.split()[10]
90 fingerprint = l.split()[10]
90 elif l.startswith("ERRSIG"):
91 elif l.startswith("ERRSIG"):
91 key = l.split(" ", 3)[:2]
92 key = l.split(" ", 3)[:2]
92 key.append("")
93 key.append("")
93 fingerprint = None
94 fingerprint = None
94 elif (l.startswith("GOODSIG") or
95 elif (l.startswith("GOODSIG") or
95 l.startswith("EXPSIG") or
96 l.startswith("EXPSIG") or
96 l.startswith("EXPKEYSIG") or
97 l.startswith("EXPKEYSIG") or
97 l.startswith("BADSIG")):
98 l.startswith("BADSIG")):
98 if key is not None:
99 if key is not None:
99 keys.append(key + [fingerprint])
100 keys.append(key + [fingerprint])
100 key = l.split(" ", 2)
101 key = l.split(" ", 2)
101 fingerprint = None
102 fingerprint = None
102 if key is not None:
103 if key is not None:
103 keys.append(key + [fingerprint])
104 keys.append(key + [fingerprint])
104 return keys
105 return keys
105
106
106 def newgpg(ui, **opts):
107 def newgpg(ui, **opts):
107 """create a new gpg instance"""
108 """create a new gpg instance"""
108 gpgpath = ui.config("gpg", "cmd")
109 gpgpath = ui.config("gpg", "cmd")
109 gpgkey = opts.get(r'key')
110 gpgkey = opts.get(r'key')
110 if not gpgkey:
111 if not gpgkey:
111 gpgkey = ui.config("gpg", "key")
112 gpgkey = ui.config("gpg", "key")
112 return gpg(gpgpath, gpgkey)
113 return gpg(gpgpath, gpgkey)
113
114
114 def sigwalk(repo):
115 def sigwalk(repo):
115 """
116 """
116 walk over every sigs, yields a couple
117 walk over every sigs, yields a couple
117 ((node, version, sig), (filename, linenumber))
118 ((node, version, sig), (filename, linenumber))
118 """
119 """
119 def parsefile(fileiter, context):
120 def parsefile(fileiter, context):
120 ln = 1
121 ln = 1
121 for l in fileiter:
122 for l in fileiter:
122 if not l:
123 if not l:
123 continue
124 continue
124 yield (l.split(" ", 2), (context, ln))
125 yield (l.split(" ", 2), (context, ln))
125 ln += 1
126 ln += 1
126
127
127 # read the heads
128 # read the heads
128 fl = repo.file(".hgsigs")
129 fl = repo.file(".hgsigs")
129 for r in reversed(fl.heads()):
130 for r in reversed(fl.heads()):
130 fn = ".hgsigs|%s" % hgnode.short(r)
131 fn = ".hgsigs|%s" % hgnode.short(r)
131 for item in parsefile(fl.read(r).splitlines(), fn):
132 for item in parsefile(fl.read(r).splitlines(), fn):
132 yield item
133 yield item
133 try:
134 try:
134 # read local signatures
135 # read local signatures
135 fn = "localsigs"
136 fn = "localsigs"
136 for item in parsefile(repo.vfs(fn), fn):
137 for item in parsefile(repo.vfs(fn), fn):
137 yield item
138 yield item
138 except IOError:
139 except IOError:
139 pass
140 pass
140
141
141 def getkeys(ui, repo, mygpg, sigdata, context):
142 def getkeys(ui, repo, mygpg, sigdata, context):
142 """get the keys who signed a data"""
143 """get the keys who signed a data"""
143 fn, ln = context
144 fn, ln = context
144 node, version, sig = sigdata
145 node, version, sig = sigdata
145 prefix = "%s:%d" % (fn, ln)
146 prefix = "%s:%d" % (fn, ln)
146 node = hgnode.bin(node)
147 node = hgnode.bin(node)
147
148
148 data = node2txt(repo, node, version)
149 data = node2txt(repo, node, version)
149 sig = binascii.a2b_base64(sig)
150 sig = binascii.a2b_base64(sig)
150 keys = mygpg.verify(data, sig)
151 keys = mygpg.verify(data, sig)
151
152
152 validkeys = []
153 validkeys = []
153 # warn for expired key and/or sigs
154 # warn for expired key and/or sigs
154 for key in keys:
155 for key in keys:
155 if key[0] == "ERRSIG":
156 if key[0] == "ERRSIG":
156 ui.write(_("%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
157 ui.write(_("%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
157 continue
158 continue
158 if key[0] == "BADSIG":
159 if key[0] == "BADSIG":
159 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
160 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
160 continue
161 continue
161 if key[0] == "EXPSIG":
162 if key[0] == "EXPSIG":
162 ui.write(_("%s Note: Signature has expired"
163 ui.write(_("%s Note: Signature has expired"
163 " (signed by: \"%s\")\n") % (prefix, key[2]))
164 " (signed by: \"%s\")\n") % (prefix, key[2]))
164 elif key[0] == "EXPKEYSIG":
165 elif key[0] == "EXPKEYSIG":
165 ui.write(_("%s Note: This key has expired"
166 ui.write(_("%s Note: This key has expired"
166 " (signed by: \"%s\")\n") % (prefix, key[2]))
167 " (signed by: \"%s\")\n") % (prefix, key[2]))
167 validkeys.append((key[1], key[2], key[3]))
168 validkeys.append((key[1], key[2], key[3]))
168 return validkeys
169 return validkeys
169
170
170 @command("sigs", [], _('hg sigs'))
171 @command("sigs", [], _('hg sigs'))
171 def sigs(ui, repo):
172 def sigs(ui, repo):
172 """list signed changesets"""
173 """list signed changesets"""
173 mygpg = newgpg(ui)
174 mygpg = newgpg(ui)
174 revs = {}
175 revs = {}
175
176
176 for data, context in sigwalk(repo):
177 for data, context in sigwalk(repo):
177 node, version, sig = data
178 node, version, sig = data
178 fn, ln = context
179 fn, ln = context
179 try:
180 try:
180 n = repo.lookup(node)
181 n = repo.lookup(node)
181 except KeyError:
182 except KeyError:
182 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
183 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
183 continue
184 continue
184 r = repo.changelog.rev(n)
185 r = repo.changelog.rev(n)
185 keys = getkeys(ui, repo, mygpg, data, context)
186 keys = getkeys(ui, repo, mygpg, data, context)
186 if not keys:
187 if not keys:
187 continue
188 continue
188 revs.setdefault(r, [])
189 revs.setdefault(r, [])
189 revs[r].extend(keys)
190 revs[r].extend(keys)
190 for rev in sorted(revs, reverse=True):
191 for rev in sorted(revs, reverse=True):
191 for k in revs[rev]:
192 for k in revs[rev]:
192 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
193 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
193 ui.write("%-30s %s\n" % (keystr(ui, k), r))
194 ui.write("%-30s %s\n" % (keystr(ui, k), r))
194
195
195 @command("sigcheck", [], _('hg sigcheck REV'))
196 @command("sigcheck", [], _('hg sigcheck REV'))
196 def sigcheck(ui, repo, rev):
197 def sigcheck(ui, repo, rev):
197 """verify all the signatures there may be for a particular revision"""
198 """verify all the signatures there may be for a particular revision"""
198 mygpg = newgpg(ui)
199 mygpg = newgpg(ui)
199 rev = repo.lookup(rev)
200 rev = repo.lookup(rev)
200 hexrev = hgnode.hex(rev)
201 hexrev = hgnode.hex(rev)
201 keys = []
202 keys = []
202
203
203 for data, context in sigwalk(repo):
204 for data, context in sigwalk(repo):
204 node, version, sig = data
205 node, version, sig = data
205 if node == hexrev:
206 if node == hexrev:
206 k = getkeys(ui, repo, mygpg, data, context)
207 k = getkeys(ui, repo, mygpg, data, context)
207 if k:
208 if k:
208 keys.extend(k)
209 keys.extend(k)
209
210
210 if not keys:
211 if not keys:
211 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
212 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
212 return
213 return
213
214
214 # print summary
215 # print summary
215 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
216 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
216 for key in keys:
217 for key in keys:
217 ui.write(" %s\n" % keystr(ui, key))
218 ui.write(" %s\n" % keystr(ui, key))
218
219
219 def keystr(ui, key):
220 def keystr(ui, key):
220 """associate a string to a key (username, comment)"""
221 """associate a string to a key (username, comment)"""
221 keyid, user, fingerprint = key
222 keyid, user, fingerprint = key
222 comment = ui.config("gpg", fingerprint)
223 comment = ui.config("gpg", fingerprint)
223 if comment:
224 if comment:
224 return "%s (%s)" % (user, comment)
225 return "%s (%s)" % (user, comment)
225 else:
226 else:
226 return user
227 return user
227
228
228 @command("sign",
229 @command("sign",
229 [('l', 'local', None, _('make the signature local')),
230 [('l', 'local', None, _('make the signature local')),
230 ('f', 'force', None, _('sign even if the sigfile is modified')),
231 ('f', 'force', None, _('sign even if the sigfile is modified')),
231 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
232 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
232 ('k', 'key', '',
233 ('k', 'key', '',
233 _('the key id to sign with'), _('ID')),
234 _('the key id to sign with'), _('ID')),
234 ('m', 'message', '',
235 ('m', 'message', '',
235 _('use text as commit message'), _('TEXT')),
236 _('use text as commit message'), _('TEXT')),
236 ('e', 'edit', False, _('invoke editor on commit messages')),
237 ('e', 'edit', False, _('invoke editor on commit messages')),
237 ] + cmdutil.commitopts2,
238 ] + cmdutil.commitopts2,
238 _('hg sign [OPTION]... [REV]...'))
239 _('hg sign [OPTION]... [REV]...'))
239 def sign(ui, repo, *revs, **opts):
240 def sign(ui, repo, *revs, **opts):
240 """add a signature for the current or given revision
241 """add a signature for the current or given revision
241
242
242 If no revision is given, the parent of the working directory is used,
243 If no revision is given, the parent of the working directory is used,
243 or tip if no revision is checked out.
244 or tip if no revision is checked out.
244
245
245 The ``gpg.cmd`` config setting can be used to specify the command
246 The ``gpg.cmd`` config setting can be used to specify the command
246 to run. A default key can be specified with ``gpg.key``.
247 to run. A default key can be specified with ``gpg.key``.
247
248
248 See :hg:`help dates` for a list of formats valid for -d/--date.
249 See :hg:`help dates` for a list of formats valid for -d/--date.
249 """
250 """
250 with repo.wlock():
251 with repo.wlock():
251 return _dosign(ui, repo, *revs, **opts)
252 return _dosign(ui, repo, *revs, **opts)
252
253
253 def _dosign(ui, repo, *revs, **opts):
254 def _dosign(ui, repo, *revs, **opts):
254 mygpg = newgpg(ui, **opts)
255 mygpg = newgpg(ui, **opts)
255 opts = pycompat.byteskwargs(opts)
256 opts = pycompat.byteskwargs(opts)
256 sigver = "0"
257 sigver = "0"
257 sigmessage = ""
258 sigmessage = ""
258
259
259 date = opts.get('date')
260 date = opts.get('date')
260 if date:
261 if date:
261 opts['date'] = util.parsedate(date)
262 opts['date'] = dateutil.parsedate(date)
262
263
263 if revs:
264 if revs:
264 nodes = [repo.lookup(n) for n in revs]
265 nodes = [repo.lookup(n) for n in revs]
265 else:
266 else:
266 nodes = [node for node in repo.dirstate.parents()
267 nodes = [node for node in repo.dirstate.parents()
267 if node != hgnode.nullid]
268 if node != hgnode.nullid]
268 if len(nodes) > 1:
269 if len(nodes) > 1:
269 raise error.Abort(_('uncommitted merge - please provide a '
270 raise error.Abort(_('uncommitted merge - please provide a '
270 'specific revision'))
271 'specific revision'))
271 if not nodes:
272 if not nodes:
272 nodes = [repo.changelog.tip()]
273 nodes = [repo.changelog.tip()]
273
274
274 for n in nodes:
275 for n in nodes:
275 hexnode = hgnode.hex(n)
276 hexnode = hgnode.hex(n)
276 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
277 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
277 hgnode.short(n)))
278 hgnode.short(n)))
278 # build data
279 # build data
279 data = node2txt(repo, n, sigver)
280 data = node2txt(repo, n, sigver)
280 sig = mygpg.sign(data)
281 sig = mygpg.sign(data)
281 if not sig:
282 if not sig:
282 raise error.Abort(_("error while signing"))
283 raise error.Abort(_("error while signing"))
283 sig = binascii.b2a_base64(sig)
284 sig = binascii.b2a_base64(sig)
284 sig = sig.replace("\n", "")
285 sig = sig.replace("\n", "")
285 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
286 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
286
287
287 # write it
288 # write it
288 if opts['local']:
289 if opts['local']:
289 repo.vfs.append("localsigs", sigmessage)
290 repo.vfs.append("localsigs", sigmessage)
290 return
291 return
291
292
292 if not opts["force"]:
293 if not opts["force"]:
293 msigs = match.exact(repo.root, '', ['.hgsigs'])
294 msigs = match.exact(repo.root, '', ['.hgsigs'])
294 if any(repo.status(match=msigs, unknown=True, ignored=True)):
295 if any(repo.status(match=msigs, unknown=True, ignored=True)):
295 raise error.Abort(_("working copy of .hgsigs is changed "),
296 raise error.Abort(_("working copy of .hgsigs is changed "),
296 hint=_("please commit .hgsigs manually"))
297 hint=_("please commit .hgsigs manually"))
297
298
298 sigsfile = repo.wvfs(".hgsigs", "ab")
299 sigsfile = repo.wvfs(".hgsigs", "ab")
299 sigsfile.write(sigmessage)
300 sigsfile.write(sigmessage)
300 sigsfile.close()
301 sigsfile.close()
301
302
302 if '.hgsigs' not in repo.dirstate:
303 if '.hgsigs' not in repo.dirstate:
303 repo[None].add([".hgsigs"])
304 repo[None].add([".hgsigs"])
304
305
305 if opts["no_commit"]:
306 if opts["no_commit"]:
306 return
307 return
307
308
308 message = opts['message']
309 message = opts['message']
309 if not message:
310 if not message:
310 # we don't translate commit messages
311 # we don't translate commit messages
311 message = "\n".join(["Added signature for changeset %s"
312 message = "\n".join(["Added signature for changeset %s"
312 % hgnode.short(n)
313 % hgnode.short(n)
313 for n in nodes])
314 for n in nodes])
314 try:
315 try:
315 editor = cmdutil.getcommiteditor(editform='gpg.sign',
316 editor = cmdutil.getcommiteditor(editform='gpg.sign',
316 **pycompat.strkwargs(opts))
317 **pycompat.strkwargs(opts))
317 repo.commit(message, opts['user'], opts['date'], match=msigs,
318 repo.commit(message, opts['user'], opts['date'], match=msigs,
318 editor=editor)
319 editor=editor)
319 except ValueError as inst:
320 except ValueError as inst:
320 raise error.Abort(str(inst))
321 raise error.Abort(str(inst))
321
322
322 def node2txt(repo, node, ver):
323 def node2txt(repo, node, ver):
323 """map a manifest into some text"""
324 """map a manifest into some text"""
324 if ver == "0":
325 if ver == "0":
325 return "%s\n" % hgnode.hex(node)
326 return "%s\n" % hgnode.hex(node)
326 else:
327 else:
327 raise error.Abort(_("unknown signature version"))
328 raise error.Abort(_("unknown signature version"))
@@ -1,516 +1,517 b''
1 # journal.py
1 # journal.py
2 #
2 #
3 # Copyright 2014-2016 Facebook, Inc.
3 # Copyright 2014-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """track previous positions of bookmarks (EXPERIMENTAL)
7 """track previous positions of bookmarks (EXPERIMENTAL)
8
8
9 This extension adds a new command: `hg journal`, which shows you where
9 This extension adds a new command: `hg journal`, which shows you where
10 bookmarks were previously located.
10 bookmarks were previously located.
11
11
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import collections
16 import collections
17 import errno
17 import errno
18 import os
18 import os
19 import weakref
19 import weakref
20
20
21 from mercurial.i18n import _
21 from mercurial.i18n import _
22
22
23 from mercurial import (
23 from mercurial import (
24 bookmarks,
24 bookmarks,
25 cmdutil,
25 cmdutil,
26 dispatch,
26 dispatch,
27 error,
27 error,
28 extensions,
28 extensions,
29 hg,
29 hg,
30 localrepo,
30 localrepo,
31 lock,
31 lock,
32 logcmdutil,
32 logcmdutil,
33 node,
33 node,
34 pycompat,
34 pycompat,
35 registrar,
35 registrar,
36 util,
36 util,
37 )
37 )
38 from mercurial.utils import dateutil
38
39
39 cmdtable = {}
40 cmdtable = {}
40 command = registrar.command(cmdtable)
41 command = registrar.command(cmdtable)
41
42
42 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
43 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
43 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
44 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
44 # be specifying the version(s) of Mercurial they are tested with, or
45 # be specifying the version(s) of Mercurial they are tested with, or
45 # leave the attribute unspecified.
46 # leave the attribute unspecified.
46 testedwith = 'ships-with-hg-core'
47 testedwith = 'ships-with-hg-core'
47
48
48 # storage format version; increment when the format changes
49 # storage format version; increment when the format changes
49 storageversion = 0
50 storageversion = 0
50
51
51 # namespaces
52 # namespaces
52 bookmarktype = 'bookmark'
53 bookmarktype = 'bookmark'
53 wdirparenttype = 'wdirparent'
54 wdirparenttype = 'wdirparent'
54 # In a shared repository, what shared feature name is used
55 # In a shared repository, what shared feature name is used
55 # to indicate this namespace is shared with the source?
56 # to indicate this namespace is shared with the source?
56 sharednamespaces = {
57 sharednamespaces = {
57 bookmarktype: hg.sharedbookmarks,
58 bookmarktype: hg.sharedbookmarks,
58 }
59 }
59
60
60 # Journal recording, register hooks and storage object
61 # Journal recording, register hooks and storage object
61 def extsetup(ui):
62 def extsetup(ui):
62 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
63 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
63 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
64 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
64 extensions.wrapfilecache(
65 extensions.wrapfilecache(
65 localrepo.localrepository, 'dirstate', wrapdirstate)
66 localrepo.localrepository, 'dirstate', wrapdirstate)
66 extensions.wrapfunction(hg, 'postshare', wrappostshare)
67 extensions.wrapfunction(hg, 'postshare', wrappostshare)
67 extensions.wrapfunction(hg, 'copystore', unsharejournal)
68 extensions.wrapfunction(hg, 'copystore', unsharejournal)
68
69
69 def reposetup(ui, repo):
70 def reposetup(ui, repo):
70 if repo.local():
71 if repo.local():
71 repo.journal = journalstorage(repo)
72 repo.journal = journalstorage(repo)
72 repo._wlockfreeprefix.add('namejournal')
73 repo._wlockfreeprefix.add('namejournal')
73
74
74 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
75 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
75 if cached:
76 if cached:
76 # already instantiated dirstate isn't yet marked as
77 # already instantiated dirstate isn't yet marked as
77 # "journal"-ing, even though repo.dirstate() was already
78 # "journal"-ing, even though repo.dirstate() was already
78 # wrapped by own wrapdirstate()
79 # wrapped by own wrapdirstate()
79 _setupdirstate(repo, dirstate)
80 _setupdirstate(repo, dirstate)
80
81
81 def runcommand(orig, lui, repo, cmd, fullargs, *args):
82 def runcommand(orig, lui, repo, cmd, fullargs, *args):
82 """Track the command line options for recording in the journal"""
83 """Track the command line options for recording in the journal"""
83 journalstorage.recordcommand(*fullargs)
84 journalstorage.recordcommand(*fullargs)
84 return orig(lui, repo, cmd, fullargs, *args)
85 return orig(lui, repo, cmd, fullargs, *args)
85
86
86 def _setupdirstate(repo, dirstate):
87 def _setupdirstate(repo, dirstate):
87 dirstate.journalstorage = repo.journal
88 dirstate.journalstorage = repo.journal
88 dirstate.addparentchangecallback('journal', recorddirstateparents)
89 dirstate.addparentchangecallback('journal', recorddirstateparents)
89
90
90 # hooks to record dirstate changes
91 # hooks to record dirstate changes
91 def wrapdirstate(orig, repo):
92 def wrapdirstate(orig, repo):
92 """Make journal storage available to the dirstate object"""
93 """Make journal storage available to the dirstate object"""
93 dirstate = orig(repo)
94 dirstate = orig(repo)
94 if util.safehasattr(repo, 'journal'):
95 if util.safehasattr(repo, 'journal'):
95 _setupdirstate(repo, dirstate)
96 _setupdirstate(repo, dirstate)
96 return dirstate
97 return dirstate
97
98
98 def recorddirstateparents(dirstate, old, new):
99 def recorddirstateparents(dirstate, old, new):
99 """Records all dirstate parent changes in the journal."""
100 """Records all dirstate parent changes in the journal."""
100 old = list(old)
101 old = list(old)
101 new = list(new)
102 new = list(new)
102 if util.safehasattr(dirstate, 'journalstorage'):
103 if util.safehasattr(dirstate, 'journalstorage'):
103 # only record two hashes if there was a merge
104 # only record two hashes if there was a merge
104 oldhashes = old[:1] if old[1] == node.nullid else old
105 oldhashes = old[:1] if old[1] == node.nullid else old
105 newhashes = new[:1] if new[1] == node.nullid else new
106 newhashes = new[:1] if new[1] == node.nullid else new
106 dirstate.journalstorage.record(
107 dirstate.journalstorage.record(
107 wdirparenttype, '.', oldhashes, newhashes)
108 wdirparenttype, '.', oldhashes, newhashes)
108
109
109 # hooks to record bookmark changes (both local and remote)
110 # hooks to record bookmark changes (both local and remote)
110 def recordbookmarks(orig, store, fp):
111 def recordbookmarks(orig, store, fp):
111 """Records all bookmark changes in the journal."""
112 """Records all bookmark changes in the journal."""
112 repo = store._repo
113 repo = store._repo
113 if util.safehasattr(repo, 'journal'):
114 if util.safehasattr(repo, 'journal'):
114 oldmarks = bookmarks.bmstore(repo)
115 oldmarks = bookmarks.bmstore(repo)
115 for mark, value in store.iteritems():
116 for mark, value in store.iteritems():
116 oldvalue = oldmarks.get(mark, node.nullid)
117 oldvalue = oldmarks.get(mark, node.nullid)
117 if value != oldvalue:
118 if value != oldvalue:
118 repo.journal.record(bookmarktype, mark, oldvalue, value)
119 repo.journal.record(bookmarktype, mark, oldvalue, value)
119 return orig(store, fp)
120 return orig(store, fp)
120
121
121 # shared repository support
122 # shared repository support
122 def _readsharedfeatures(repo):
123 def _readsharedfeatures(repo):
123 """A set of shared features for this repository"""
124 """A set of shared features for this repository"""
124 try:
125 try:
125 return set(repo.vfs.read('shared').splitlines())
126 return set(repo.vfs.read('shared').splitlines())
126 except IOError as inst:
127 except IOError as inst:
127 if inst.errno != errno.ENOENT:
128 if inst.errno != errno.ENOENT:
128 raise
129 raise
129 return set()
130 return set()
130
131
131 def _mergeentriesiter(*iterables, **kwargs):
132 def _mergeentriesiter(*iterables, **kwargs):
132 """Given a set of sorted iterables, yield the next entry in merged order
133 """Given a set of sorted iterables, yield the next entry in merged order
133
134
134 Note that by default entries go from most recent to oldest.
135 Note that by default entries go from most recent to oldest.
135 """
136 """
136 order = kwargs.pop(r'order', max)
137 order = kwargs.pop(r'order', max)
137 iterables = [iter(it) for it in iterables]
138 iterables = [iter(it) for it in iterables]
138 # this tracks still active iterables; iterables are deleted as they are
139 # this tracks still active iterables; iterables are deleted as they are
139 # exhausted, which is why this is a dictionary and why each entry also
140 # exhausted, which is why this is a dictionary and why each entry also
140 # stores the key. Entries are mutable so we can store the next value each
141 # stores the key. Entries are mutable so we can store the next value each
141 # time.
142 # time.
142 iterable_map = {}
143 iterable_map = {}
143 for key, it in enumerate(iterables):
144 for key, it in enumerate(iterables):
144 try:
145 try:
145 iterable_map[key] = [next(it), key, it]
146 iterable_map[key] = [next(it), key, it]
146 except StopIteration:
147 except StopIteration:
147 # empty entry, can be ignored
148 # empty entry, can be ignored
148 pass
149 pass
149
150
150 while iterable_map:
151 while iterable_map:
151 value, key, it = order(iterable_map.itervalues())
152 value, key, it = order(iterable_map.itervalues())
152 yield value
153 yield value
153 try:
154 try:
154 iterable_map[key][0] = next(it)
155 iterable_map[key][0] = next(it)
155 except StopIteration:
156 except StopIteration:
156 # this iterable is empty, remove it from consideration
157 # this iterable is empty, remove it from consideration
157 del iterable_map[key]
158 del iterable_map[key]
158
159
159 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
160 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
160 """Mark this shared working copy as sharing journal information"""
161 """Mark this shared working copy as sharing journal information"""
161 with destrepo.wlock():
162 with destrepo.wlock():
162 orig(sourcerepo, destrepo, **kwargs)
163 orig(sourcerepo, destrepo, **kwargs)
163 with destrepo.vfs('shared', 'a') as fp:
164 with destrepo.vfs('shared', 'a') as fp:
164 fp.write('journal\n')
165 fp.write('journal\n')
165
166
166 def unsharejournal(orig, ui, repo, repopath):
167 def unsharejournal(orig, ui, repo, repopath):
167 """Copy shared journal entries into this repo when unsharing"""
168 """Copy shared journal entries into this repo when unsharing"""
168 if (repo.path == repopath and repo.shared() and
169 if (repo.path == repopath and repo.shared() and
169 util.safehasattr(repo, 'journal')):
170 util.safehasattr(repo, 'journal')):
170 sharedrepo = hg.sharedreposource(repo)
171 sharedrepo = hg.sharedreposource(repo)
171 sharedfeatures = _readsharedfeatures(repo)
172 sharedfeatures = _readsharedfeatures(repo)
172 if sharedrepo and sharedfeatures > {'journal'}:
173 if sharedrepo and sharedfeatures > {'journal'}:
173 # there is a shared repository and there are shared journal entries
174 # there is a shared repository and there are shared journal entries
174 # to copy. move shared date over from source to destination but
175 # to copy. move shared date over from source to destination but
175 # move the local file first
176 # move the local file first
176 if repo.vfs.exists('namejournal'):
177 if repo.vfs.exists('namejournal'):
177 journalpath = repo.vfs.join('namejournal')
178 journalpath = repo.vfs.join('namejournal')
178 util.rename(journalpath, journalpath + '.bak')
179 util.rename(journalpath, journalpath + '.bak')
179 storage = repo.journal
180 storage = repo.journal
180 local = storage._open(
181 local = storage._open(
181 repo.vfs, filename='namejournal.bak', _newestfirst=False)
182 repo.vfs, filename='namejournal.bak', _newestfirst=False)
182 shared = (
183 shared = (
183 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
184 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
184 if sharednamespaces.get(e.namespace) in sharedfeatures)
185 if sharednamespaces.get(e.namespace) in sharedfeatures)
185 for entry in _mergeentriesiter(local, shared, order=min):
186 for entry in _mergeentriesiter(local, shared, order=min):
186 storage._write(repo.vfs, entry)
187 storage._write(repo.vfs, entry)
187
188
188 return orig(ui, repo, repopath)
189 return orig(ui, repo, repopath)
189
190
190 class journalentry(collections.namedtuple(
191 class journalentry(collections.namedtuple(
191 u'journalentry',
192 u'journalentry',
192 u'timestamp user command namespace name oldhashes newhashes')):
193 u'timestamp user command namespace name oldhashes newhashes')):
193 """Individual journal entry
194 """Individual journal entry
194
195
195 * timestamp: a mercurial (time, timezone) tuple
196 * timestamp: a mercurial (time, timezone) tuple
196 * user: the username that ran the command
197 * user: the username that ran the command
197 * namespace: the entry namespace, an opaque string
198 * namespace: the entry namespace, an opaque string
198 * name: the name of the changed item, opaque string with meaning in the
199 * name: the name of the changed item, opaque string with meaning in the
199 namespace
200 namespace
200 * command: the hg command that triggered this record
201 * command: the hg command that triggered this record
201 * oldhashes: a tuple of one or more binary hashes for the old location
202 * oldhashes: a tuple of one or more binary hashes for the old location
202 * newhashes: a tuple of one or more binary hashes for the new location
203 * newhashes: a tuple of one or more binary hashes for the new location
203
204
204 Handles serialisation from and to the storage format. Fields are
205 Handles serialisation from and to the storage format. Fields are
205 separated by newlines, hashes are written out in hex separated by commas,
206 separated by newlines, hashes are written out in hex separated by commas,
206 timestamp and timezone are separated by a space.
207 timestamp and timezone are separated by a space.
207
208
208 """
209 """
209 @classmethod
210 @classmethod
210 def fromstorage(cls, line):
211 def fromstorage(cls, line):
211 (time, user, command, namespace, name,
212 (time, user, command, namespace, name,
212 oldhashes, newhashes) = line.split('\n')
213 oldhashes, newhashes) = line.split('\n')
213 timestamp, tz = time.split()
214 timestamp, tz = time.split()
214 timestamp, tz = float(timestamp), int(tz)
215 timestamp, tz = float(timestamp), int(tz)
215 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
216 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
216 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
217 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
217 return cls(
218 return cls(
218 (timestamp, tz), user, command, namespace, name,
219 (timestamp, tz), user, command, namespace, name,
219 oldhashes, newhashes)
220 oldhashes, newhashes)
220
221
221 def __str__(self):
222 def __str__(self):
222 """String representation for storage"""
223 """String representation for storage"""
223 time = ' '.join(map(str, self.timestamp))
224 time = ' '.join(map(str, self.timestamp))
224 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
225 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
225 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
226 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
226 return '\n'.join((
227 return '\n'.join((
227 time, self.user, self.command, self.namespace, self.name,
228 time, self.user, self.command, self.namespace, self.name,
228 oldhashes, newhashes))
229 oldhashes, newhashes))
229
230
230 class journalstorage(object):
231 class journalstorage(object):
231 """Storage for journal entries
232 """Storage for journal entries
232
233
233 Entries are divided over two files; one with entries that pertain to the
234 Entries are divided over two files; one with entries that pertain to the
234 local working copy *only*, and one with entries that are shared across
235 local working copy *only*, and one with entries that are shared across
235 multiple working copies when shared using the share extension.
236 multiple working copies when shared using the share extension.
236
237
237 Entries are stored with NUL bytes as separators. See the journalentry
238 Entries are stored with NUL bytes as separators. See the journalentry
238 class for the per-entry structure.
239 class for the per-entry structure.
239
240
240 The file format starts with an integer version, delimited by a NUL.
241 The file format starts with an integer version, delimited by a NUL.
241
242
242 This storage uses a dedicated lock; this makes it easier to avoid issues
243 This storage uses a dedicated lock; this makes it easier to avoid issues
243 with adding entries that added when the regular wlock is unlocked (e.g.
244 with adding entries that added when the regular wlock is unlocked (e.g.
244 the dirstate).
245 the dirstate).
245
246
246 """
247 """
247 _currentcommand = ()
248 _currentcommand = ()
248 _lockref = None
249 _lockref = None
249
250
250 def __init__(self, repo):
251 def __init__(self, repo):
251 self.user = util.getuser()
252 self.user = util.getuser()
252 self.ui = repo.ui
253 self.ui = repo.ui
253 self.vfs = repo.vfs
254 self.vfs = repo.vfs
254
255
255 # is this working copy using a shared storage?
256 # is this working copy using a shared storage?
256 self.sharedfeatures = self.sharedvfs = None
257 self.sharedfeatures = self.sharedvfs = None
257 if repo.shared():
258 if repo.shared():
258 features = _readsharedfeatures(repo)
259 features = _readsharedfeatures(repo)
259 sharedrepo = hg.sharedreposource(repo)
260 sharedrepo = hg.sharedreposource(repo)
260 if sharedrepo is not None and 'journal' in features:
261 if sharedrepo is not None and 'journal' in features:
261 self.sharedvfs = sharedrepo.vfs
262 self.sharedvfs = sharedrepo.vfs
262 self.sharedfeatures = features
263 self.sharedfeatures = features
263
264
264 # track the current command for recording in journal entries
265 # track the current command for recording in journal entries
265 @property
266 @property
266 def command(self):
267 def command(self):
267 commandstr = ' '.join(
268 commandstr = ' '.join(
268 map(util.shellquote, journalstorage._currentcommand))
269 map(util.shellquote, journalstorage._currentcommand))
269 if '\n' in commandstr:
270 if '\n' in commandstr:
270 # truncate multi-line commands
271 # truncate multi-line commands
271 commandstr = commandstr.partition('\n')[0] + ' ...'
272 commandstr = commandstr.partition('\n')[0] + ' ...'
272 return commandstr
273 return commandstr
273
274
274 @classmethod
275 @classmethod
275 def recordcommand(cls, *fullargs):
276 def recordcommand(cls, *fullargs):
276 """Set the current hg arguments, stored with recorded entries"""
277 """Set the current hg arguments, stored with recorded entries"""
277 # Set the current command on the class because we may have started
278 # Set the current command on the class because we may have started
278 # with a non-local repo (cloning for example).
279 # with a non-local repo (cloning for example).
279 cls._currentcommand = fullargs
280 cls._currentcommand = fullargs
280
281
281 def _currentlock(self, lockref):
282 def _currentlock(self, lockref):
282 """Returns the lock if it's held, or None if it's not.
283 """Returns the lock if it's held, or None if it's not.
283
284
284 (This is copied from the localrepo class)
285 (This is copied from the localrepo class)
285 """
286 """
286 if lockref is None:
287 if lockref is None:
287 return None
288 return None
288 l = lockref()
289 l = lockref()
289 if l is None or not l.held:
290 if l is None or not l.held:
290 return None
291 return None
291 return l
292 return l
292
293
293 def jlock(self, vfs):
294 def jlock(self, vfs):
294 """Create a lock for the journal file"""
295 """Create a lock for the journal file"""
295 if self._currentlock(self._lockref) is not None:
296 if self._currentlock(self._lockref) is not None:
296 raise error.Abort(_('journal lock does not support nesting'))
297 raise error.Abort(_('journal lock does not support nesting'))
297 desc = _('journal of %s') % vfs.base
298 desc = _('journal of %s') % vfs.base
298 try:
299 try:
299 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
300 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
300 except error.LockHeld as inst:
301 except error.LockHeld as inst:
301 self.ui.warn(
302 self.ui.warn(
302 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
303 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
303 # default to 600 seconds timeout
304 # default to 600 seconds timeout
304 l = lock.lock(
305 l = lock.lock(
305 vfs, 'namejournal.lock',
306 vfs, 'namejournal.lock',
306 self.ui.configint("ui", "timeout"), desc=desc)
307 self.ui.configint("ui", "timeout"), desc=desc)
307 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
308 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
308 self._lockref = weakref.ref(l)
309 self._lockref = weakref.ref(l)
309 return l
310 return l
310
311
311 def record(self, namespace, name, oldhashes, newhashes):
312 def record(self, namespace, name, oldhashes, newhashes):
312 """Record a new journal entry
313 """Record a new journal entry
313
314
314 * namespace: an opaque string; this can be used to filter on the type
315 * namespace: an opaque string; this can be used to filter on the type
315 of recorded entries.
316 of recorded entries.
316 * name: the name defining this entry; for bookmarks, this is the
317 * name: the name defining this entry; for bookmarks, this is the
317 bookmark name. Can be filtered on when retrieving entries.
318 bookmark name. Can be filtered on when retrieving entries.
318 * oldhashes and newhashes: each a single binary hash, or a list of
319 * oldhashes and newhashes: each a single binary hash, or a list of
319 binary hashes. These represent the old and new position of the named
320 binary hashes. These represent the old and new position of the named
320 item.
321 item.
321
322
322 """
323 """
323 if not isinstance(oldhashes, list):
324 if not isinstance(oldhashes, list):
324 oldhashes = [oldhashes]
325 oldhashes = [oldhashes]
325 if not isinstance(newhashes, list):
326 if not isinstance(newhashes, list):
326 newhashes = [newhashes]
327 newhashes = [newhashes]
327
328
328 entry = journalentry(
329 entry = journalentry(
329 util.makedate(), self.user, self.command, namespace, name,
330 dateutil.makedate(), self.user, self.command, namespace, name,
330 oldhashes, newhashes)
331 oldhashes, newhashes)
331
332
332 vfs = self.vfs
333 vfs = self.vfs
333 if self.sharedvfs is not None:
334 if self.sharedvfs is not None:
334 # write to the shared repository if this feature is being
335 # write to the shared repository if this feature is being
335 # shared between working copies.
336 # shared between working copies.
336 if sharednamespaces.get(namespace) in self.sharedfeatures:
337 if sharednamespaces.get(namespace) in self.sharedfeatures:
337 vfs = self.sharedvfs
338 vfs = self.sharedvfs
338
339
339 self._write(vfs, entry)
340 self._write(vfs, entry)
340
341
341 def _write(self, vfs, entry):
342 def _write(self, vfs, entry):
342 with self.jlock(vfs):
343 with self.jlock(vfs):
343 version = None
344 version = None
344 # open file in amend mode to ensure it is created if missing
345 # open file in amend mode to ensure it is created if missing
345 with vfs('namejournal', mode='a+b') as f:
346 with vfs('namejournal', mode='a+b') as f:
346 f.seek(0, os.SEEK_SET)
347 f.seek(0, os.SEEK_SET)
347 # Read just enough bytes to get a version number (up to 2
348 # Read just enough bytes to get a version number (up to 2
348 # digits plus separator)
349 # digits plus separator)
349 version = f.read(3).partition('\0')[0]
350 version = f.read(3).partition('\0')[0]
350 if version and version != str(storageversion):
351 if version and version != str(storageversion):
351 # different version of the storage. Exit early (and not
352 # different version of the storage. Exit early (and not
352 # write anything) if this is not a version we can handle or
353 # write anything) if this is not a version we can handle or
353 # the file is corrupt. In future, perhaps rotate the file
354 # the file is corrupt. In future, perhaps rotate the file
354 # instead?
355 # instead?
355 self.ui.warn(
356 self.ui.warn(
356 _("unsupported journal file version '%s'\n") % version)
357 _("unsupported journal file version '%s'\n") % version)
357 return
358 return
358 if not version:
359 if not version:
359 # empty file, write version first
360 # empty file, write version first
360 f.write(str(storageversion) + '\0')
361 f.write(str(storageversion) + '\0')
361 f.seek(0, os.SEEK_END)
362 f.seek(0, os.SEEK_END)
362 f.write(str(entry) + '\0')
363 f.write(str(entry) + '\0')
363
364
364 def filtered(self, namespace=None, name=None):
365 def filtered(self, namespace=None, name=None):
365 """Yield all journal entries with the given namespace or name
366 """Yield all journal entries with the given namespace or name
366
367
367 Both the namespace and the name are optional; if neither is given all
368 Both the namespace and the name are optional; if neither is given all
368 entries in the journal are produced.
369 entries in the journal are produced.
369
370
370 Matching supports regular expressions by using the `re:` prefix
371 Matching supports regular expressions by using the `re:` prefix
371 (use `literal:` to match names or namespaces that start with `re:`)
372 (use `literal:` to match names or namespaces that start with `re:`)
372
373
373 """
374 """
374 if namespace is not None:
375 if namespace is not None:
375 namespace = util.stringmatcher(namespace)[-1]
376 namespace = util.stringmatcher(namespace)[-1]
376 if name is not None:
377 if name is not None:
377 name = util.stringmatcher(name)[-1]
378 name = util.stringmatcher(name)[-1]
378 for entry in self:
379 for entry in self:
379 if namespace is not None and not namespace(entry.namespace):
380 if namespace is not None and not namespace(entry.namespace):
380 continue
381 continue
381 if name is not None and not name(entry.name):
382 if name is not None and not name(entry.name):
382 continue
383 continue
383 yield entry
384 yield entry
384
385
385 def __iter__(self):
386 def __iter__(self):
386 """Iterate over the storage
387 """Iterate over the storage
387
388
388 Yields journalentry instances for each contained journal record.
389 Yields journalentry instances for each contained journal record.
389
390
390 """
391 """
391 local = self._open(self.vfs)
392 local = self._open(self.vfs)
392
393
393 if self.sharedvfs is None:
394 if self.sharedvfs is None:
394 return local
395 return local
395
396
396 # iterate over both local and shared entries, but only those
397 # iterate over both local and shared entries, but only those
397 # shared entries that are among the currently shared features
398 # shared entries that are among the currently shared features
398 shared = (
399 shared = (
399 e for e in self._open(self.sharedvfs)
400 e for e in self._open(self.sharedvfs)
400 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
401 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
401 return _mergeentriesiter(local, shared)
402 return _mergeentriesiter(local, shared)
402
403
403 def _open(self, vfs, filename='namejournal', _newestfirst=True):
404 def _open(self, vfs, filename='namejournal', _newestfirst=True):
404 if not vfs.exists(filename):
405 if not vfs.exists(filename):
405 return
406 return
406
407
407 with vfs(filename) as f:
408 with vfs(filename) as f:
408 raw = f.read()
409 raw = f.read()
409
410
410 lines = raw.split('\0')
411 lines = raw.split('\0')
411 version = lines and lines[0]
412 version = lines and lines[0]
412 if version != str(storageversion):
413 if version != str(storageversion):
413 version = version or _('not available')
414 version = version or _('not available')
414 raise error.Abort(_("unknown journal file version '%s'") % version)
415 raise error.Abort(_("unknown journal file version '%s'") % version)
415
416
416 # Skip the first line, it's a version number. Normally we iterate over
417 # Skip the first line, it's a version number. Normally we iterate over
417 # these in reverse order to list newest first; only when copying across
418 # these in reverse order to list newest first; only when copying across
418 # a shared storage do we forgo reversing.
419 # a shared storage do we forgo reversing.
419 lines = lines[1:]
420 lines = lines[1:]
420 if _newestfirst:
421 if _newestfirst:
421 lines = reversed(lines)
422 lines = reversed(lines)
422 for line in lines:
423 for line in lines:
423 if not line:
424 if not line:
424 continue
425 continue
425 yield journalentry.fromstorage(line)
426 yield journalentry.fromstorage(line)
426
427
427 # journal reading
428 # journal reading
428 # log options that don't make sense for journal
429 # log options that don't make sense for journal
429 _ignoreopts = ('no-merges', 'graph')
430 _ignoreopts = ('no-merges', 'graph')
430 @command(
431 @command(
431 'journal', [
432 'journal', [
432 ('', 'all', None, 'show history for all names'),
433 ('', 'all', None, 'show history for all names'),
433 ('c', 'commits', None, 'show commit metadata'),
434 ('c', 'commits', None, 'show commit metadata'),
434 ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
435 ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
435 '[OPTION]... [BOOKMARKNAME]')
436 '[OPTION]... [BOOKMARKNAME]')
436 def journal(ui, repo, *args, **opts):
437 def journal(ui, repo, *args, **opts):
437 """show the previous position of bookmarks and the working copy
438 """show the previous position of bookmarks and the working copy
438
439
439 The journal is used to see the previous commits that bookmarks and the
440 The journal is used to see the previous commits that bookmarks and the
440 working copy pointed to. By default the previous locations for the working
441 working copy pointed to. By default the previous locations for the working
441 copy. Passing a bookmark name will show all the previous positions of
442 copy. Passing a bookmark name will show all the previous positions of
442 that bookmark. Use the --all switch to show previous locations for all
443 that bookmark. Use the --all switch to show previous locations for all
443 bookmarks and the working copy; each line will then include the bookmark
444 bookmarks and the working copy; each line will then include the bookmark
444 name, or '.' for the working copy, as well.
445 name, or '.' for the working copy, as well.
445
446
446 If `name` starts with `re:`, the remainder of the name is treated as
447 If `name` starts with `re:`, the remainder of the name is treated as
447 a regular expression. To match a name that actually starts with `re:`,
448 a regular expression. To match a name that actually starts with `re:`,
448 use the prefix `literal:`.
449 use the prefix `literal:`.
449
450
450 By default hg journal only shows the commit hash and the command that was
451 By default hg journal only shows the commit hash and the command that was
451 running at that time. -v/--verbose will show the prior hash, the user, and
452 running at that time. -v/--verbose will show the prior hash, the user, and
452 the time at which it happened.
453 the time at which it happened.
453
454
454 Use -c/--commits to output log information on each commit hash; at this
455 Use -c/--commits to output log information on each commit hash; at this
455 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
456 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
456 switches to alter the log output for these.
457 switches to alter the log output for these.
457
458
458 `hg journal -T json` can be used to produce machine readable output.
459 `hg journal -T json` can be used to produce machine readable output.
459
460
460 """
461 """
461 opts = pycompat.byteskwargs(opts)
462 opts = pycompat.byteskwargs(opts)
462 name = '.'
463 name = '.'
463 if opts.get('all'):
464 if opts.get('all'):
464 if args:
465 if args:
465 raise error.Abort(
466 raise error.Abort(
466 _("You can't combine --all and filtering on a name"))
467 _("You can't combine --all and filtering on a name"))
467 name = None
468 name = None
468 if args:
469 if args:
469 name = args[0]
470 name = args[0]
470
471
471 fm = ui.formatter('journal', opts)
472 fm = ui.formatter('journal', opts)
472
473
473 if opts.get("template") != "json":
474 if opts.get("template") != "json":
474 if name is None:
475 if name is None:
475 displayname = _('the working copy and bookmarks')
476 displayname = _('the working copy and bookmarks')
476 else:
477 else:
477 displayname = "'%s'" % name
478 displayname = "'%s'" % name
478 ui.status(_("previous locations of %s:\n") % displayname)
479 ui.status(_("previous locations of %s:\n") % displayname)
479
480
480 limit = logcmdutil.getlimit(opts)
481 limit = logcmdutil.getlimit(opts)
481 entry = None
482 entry = None
482 ui.pager('journal')
483 ui.pager('journal')
483 for count, entry in enumerate(repo.journal.filtered(name=name)):
484 for count, entry in enumerate(repo.journal.filtered(name=name)):
484 if count == limit:
485 if count == limit:
485 break
486 break
486 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
487 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
487 name='node', sep=',')
488 name='node', sep=',')
488 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
489 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
489 name='node', sep=',')
490 name='node', sep=',')
490
491
491 fm.startitem()
492 fm.startitem()
492 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
493 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
493 fm.write('newhashes', '%s', newhashesstr)
494 fm.write('newhashes', '%s', newhashesstr)
494 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
495 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
495 fm.condwrite(
496 fm.condwrite(
496 opts.get('all') or name.startswith('re:'),
497 opts.get('all') or name.startswith('re:'),
497 'name', ' %-8s', entry.name)
498 'name', ' %-8s', entry.name)
498
499
499 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
500 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
500 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
501 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
501 fm.write('command', ' %s\n', entry.command)
502 fm.write('command', ' %s\n', entry.command)
502
503
503 if opts.get("commits"):
504 if opts.get("commits"):
504 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
505 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
505 for hash in entry.newhashes:
506 for hash in entry.newhashes:
506 try:
507 try:
507 ctx = repo[hash]
508 ctx = repo[hash]
508 displayer.show(ctx)
509 displayer.show(ctx)
509 except error.RepoLookupError as e:
510 except error.RepoLookupError as e:
510 fm.write('repolookuperror', "%s\n\n", str(e))
511 fm.write('repolookuperror', "%s\n\n", str(e))
511 displayer.close()
512 displayer.close()
512
513
513 fm.end()
514 fm.end()
514
515
515 if entry is None:
516 if entry is None:
516 ui.status(_("no recorded locations\n"))
517 ui.status(_("no recorded locations\n"))
@@ -1,808 +1,811 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85
85
86 from __future__ import absolute_import
86 from __future__ import absolute_import
87
87
88 import os
88 import os
89 import re
89 import re
90 import tempfile
90 import tempfile
91 import weakref
91 import weakref
92
92
93 from mercurial.i18n import _
93 from mercurial.i18n import _
94 from mercurial.hgweb import webcommands
94 from mercurial.hgweb import webcommands
95
95
96 from mercurial import (
96 from mercurial import (
97 cmdutil,
97 cmdutil,
98 context,
98 context,
99 dispatch,
99 dispatch,
100 error,
100 error,
101 extensions,
101 extensions,
102 filelog,
102 filelog,
103 localrepo,
103 localrepo,
104 logcmdutil,
104 logcmdutil,
105 match,
105 match,
106 patch,
106 patch,
107 pathutil,
107 pathutil,
108 pycompat,
108 pycompat,
109 registrar,
109 registrar,
110 scmutil,
110 scmutil,
111 templatefilters,
111 templatefilters,
112 util,
112 util,
113 )
113 )
114 from mercurial.utils import dateutil
114
115
115 cmdtable = {}
116 cmdtable = {}
116 command = registrar.command(cmdtable)
117 command = registrar.command(cmdtable)
117 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
118 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
118 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
119 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
119 # be specifying the version(s) of Mercurial they are tested with, or
120 # be specifying the version(s) of Mercurial they are tested with, or
120 # leave the attribute unspecified.
121 # leave the attribute unspecified.
121 testedwith = 'ships-with-hg-core'
122 testedwith = 'ships-with-hg-core'
122
123
123 # hg commands that do not act on keywords
124 # hg commands that do not act on keywords
124 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
125 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
125 ' outgoing push tip verify convert email glog')
126 ' outgoing push tip verify convert email glog')
126
127
127 # webcommands that do not act on keywords
128 # webcommands that do not act on keywords
128 nokwwebcommands = ('annotate changeset rev filediff diff comparison')
129 nokwwebcommands = ('annotate changeset rev filediff diff comparison')
129
130
130 # hg commands that trigger expansion only when writing to working dir,
131 # hg commands that trigger expansion only when writing to working dir,
131 # not when reading filelog, and unexpand when reading from working dir
132 # not when reading filelog, and unexpand when reading from working dir
132 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
133 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
133 ' unshelve rebase graft backout histedit fetch')
134 ' unshelve rebase graft backout histedit fetch')
134
135
135 # names of extensions using dorecord
136 # names of extensions using dorecord
136 recordextensions = 'record'
137 recordextensions = 'record'
137
138
138 colortable = {
139 colortable = {
139 'kwfiles.enabled': 'green bold',
140 'kwfiles.enabled': 'green bold',
140 'kwfiles.deleted': 'cyan bold underline',
141 'kwfiles.deleted': 'cyan bold underline',
141 'kwfiles.enabledunknown': 'green',
142 'kwfiles.enabledunknown': 'green',
142 'kwfiles.ignored': 'bold',
143 'kwfiles.ignored': 'bold',
143 'kwfiles.ignoredunknown': 'none'
144 'kwfiles.ignoredunknown': 'none'
144 }
145 }
145
146
146 templatefilter = registrar.templatefilter()
147 templatefilter = registrar.templatefilter()
147
148
148 configtable = {}
149 configtable = {}
149 configitem = registrar.configitem(configtable)
150 configitem = registrar.configitem(configtable)
150
151
151 configitem('keywordset', 'svn',
152 configitem('keywordset', 'svn',
152 default=False,
153 default=False,
153 )
154 )
154 # date like in cvs' $Date
155 # date like in cvs' $Date
155 @templatefilter('utcdate')
156 @templatefilter('utcdate')
156 def utcdate(text):
157 def utcdate(text):
157 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
158 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
158 '''
159 '''
159 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
160 dateformat = '%Y/%m/%d %H:%M:%S'
161 return dateutil.datestr((dateutil.parsedate(text)[0], 0), dateformat)
160 # date like in svn's $Date
162 # date like in svn's $Date
161 @templatefilter('svnisodate')
163 @templatefilter('svnisodate')
162 def svnisodate(text):
164 def svnisodate(text):
163 '''Date. Returns a date in this format: "2009-08-18 13:00:13
165 '''Date. Returns a date in this format: "2009-08-18 13:00:13
164 +0200 (Tue, 18 Aug 2009)".
166 +0200 (Tue, 18 Aug 2009)".
165 '''
167 '''
166 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
168 return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
167 # date like in svn's $Id
169 # date like in svn's $Id
168 @templatefilter('svnutcdate')
170 @templatefilter('svnutcdate')
169 def svnutcdate(text):
171 def svnutcdate(text):
170 '''Date. Returns a UTC-date in this format: "2009-08-18
172 '''Date. Returns a UTC-date in this format: "2009-08-18
171 11:00:13Z".
173 11:00:13Z".
172 '''
174 '''
173 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
175 dateformat = '%Y-%m-%d %H:%M:%SZ'
176 return dateutil.datestr((dateutil.parsedate(text)[0], 0), dateformat)
174
177
175 # make keyword tools accessible
178 # make keyword tools accessible
176 kwtools = {'hgcmd': ''}
179 kwtools = {'hgcmd': ''}
177
180
178 def _defaultkwmaps(ui):
181 def _defaultkwmaps(ui):
179 '''Returns default keywordmaps according to keywordset configuration.'''
182 '''Returns default keywordmaps according to keywordset configuration.'''
180 templates = {
183 templates = {
181 'Revision': '{node|short}',
184 'Revision': '{node|short}',
182 'Author': '{author|user}',
185 'Author': '{author|user}',
183 }
186 }
184 kwsets = ({
187 kwsets = ({
185 'Date': '{date|utcdate}',
188 'Date': '{date|utcdate}',
186 'RCSfile': '{file|basename},v',
189 'RCSfile': '{file|basename},v',
187 'RCSFile': '{file|basename},v', # kept for backwards compatibility
190 'RCSFile': '{file|basename},v', # kept for backwards compatibility
188 # with hg-keyword
191 # with hg-keyword
189 'Source': '{root}/{file},v',
192 'Source': '{root}/{file},v',
190 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
193 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
191 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
194 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
192 }, {
195 }, {
193 'Date': '{date|svnisodate}',
196 'Date': '{date|svnisodate}',
194 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
197 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
195 'LastChangedRevision': '{node|short}',
198 'LastChangedRevision': '{node|short}',
196 'LastChangedBy': '{author|user}',
199 'LastChangedBy': '{author|user}',
197 'LastChangedDate': '{date|svnisodate}',
200 'LastChangedDate': '{date|svnisodate}',
198 })
201 })
199 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
202 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
200 return templates
203 return templates
201
204
202 def _shrinktext(text, subfunc):
205 def _shrinktext(text, subfunc):
203 '''Helper for keyword expansion removal in text.
206 '''Helper for keyword expansion removal in text.
204 Depending on subfunc also returns number of substitutions.'''
207 Depending on subfunc also returns number of substitutions.'''
205 return subfunc(r'$\1$', text)
208 return subfunc(r'$\1$', text)
206
209
207 def _preselect(wstatus, changed):
210 def _preselect(wstatus, changed):
208 '''Retrieves modified and added files from a working directory state
211 '''Retrieves modified and added files from a working directory state
209 and returns the subset of each contained in given changed files
212 and returns the subset of each contained in given changed files
210 retrieved from a change context.'''
213 retrieved from a change context.'''
211 modified = [f for f in wstatus.modified if f in changed]
214 modified = [f for f in wstatus.modified if f in changed]
212 added = [f for f in wstatus.added if f in changed]
215 added = [f for f in wstatus.added if f in changed]
213 return modified, added
216 return modified, added
214
217
215
218
216 class kwtemplater(object):
219 class kwtemplater(object):
217 '''
220 '''
218 Sets up keyword templates, corresponding keyword regex, and
221 Sets up keyword templates, corresponding keyword regex, and
219 provides keyword substitution functions.
222 provides keyword substitution functions.
220 '''
223 '''
221
224
222 def __init__(self, ui, repo, inc, exc):
225 def __init__(self, ui, repo, inc, exc):
223 self.ui = ui
226 self.ui = ui
224 self._repo = weakref.ref(repo)
227 self._repo = weakref.ref(repo)
225 self.match = match.match(repo.root, '', [], inc, exc)
228 self.match = match.match(repo.root, '', [], inc, exc)
226 self.restrict = kwtools['hgcmd'] in restricted.split()
229 self.restrict = kwtools['hgcmd'] in restricted.split()
227 self.postcommit = False
230 self.postcommit = False
228
231
229 kwmaps = self.ui.configitems('keywordmaps')
232 kwmaps = self.ui.configitems('keywordmaps')
230 if kwmaps: # override default templates
233 if kwmaps: # override default templates
231 self.templates = dict(kwmaps)
234 self.templates = dict(kwmaps)
232 else:
235 else:
233 self.templates = _defaultkwmaps(self.ui)
236 self.templates = _defaultkwmaps(self.ui)
234
237
235 @property
238 @property
236 def repo(self):
239 def repo(self):
237 return self._repo()
240 return self._repo()
238
241
239 @util.propertycache
242 @util.propertycache
240 def escape(self):
243 def escape(self):
241 '''Returns bar-separated and escaped keywords.'''
244 '''Returns bar-separated and escaped keywords.'''
242 return '|'.join(map(re.escape, self.templates.keys()))
245 return '|'.join(map(re.escape, self.templates.keys()))
243
246
244 @util.propertycache
247 @util.propertycache
245 def rekw(self):
248 def rekw(self):
246 '''Returns regex for unexpanded keywords.'''
249 '''Returns regex for unexpanded keywords.'''
247 return re.compile(r'\$(%s)\$' % self.escape)
250 return re.compile(r'\$(%s)\$' % self.escape)
248
251
249 @util.propertycache
252 @util.propertycache
250 def rekwexp(self):
253 def rekwexp(self):
251 '''Returns regex for expanded keywords.'''
254 '''Returns regex for expanded keywords.'''
252 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
255 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
253
256
254 def substitute(self, data, path, ctx, subfunc):
257 def substitute(self, data, path, ctx, subfunc):
255 '''Replaces keywords in data with expanded template.'''
258 '''Replaces keywords in data with expanded template.'''
256 def kwsub(mobj):
259 def kwsub(mobj):
257 kw = mobj.group(1)
260 kw = mobj.group(1)
258 ct = logcmdutil.maketemplater(self.ui, self.repo,
261 ct = logcmdutil.maketemplater(self.ui, self.repo,
259 self.templates[kw])
262 self.templates[kw])
260 self.ui.pushbuffer()
263 self.ui.pushbuffer()
261 ct.show(ctx, root=self.repo.root, file=path)
264 ct.show(ctx, root=self.repo.root, file=path)
262 ekw = templatefilters.firstline(self.ui.popbuffer())
265 ekw = templatefilters.firstline(self.ui.popbuffer())
263 return '$%s: %s $' % (kw, ekw)
266 return '$%s: %s $' % (kw, ekw)
264 return subfunc(kwsub, data)
267 return subfunc(kwsub, data)
265
268
266 def linkctx(self, path, fileid):
269 def linkctx(self, path, fileid):
267 '''Similar to filelog.linkrev, but returns a changectx.'''
270 '''Similar to filelog.linkrev, but returns a changectx.'''
268 return self.repo.filectx(path, fileid=fileid).changectx()
271 return self.repo.filectx(path, fileid=fileid).changectx()
269
272
270 def expand(self, path, node, data):
273 def expand(self, path, node, data):
271 '''Returns data with keywords expanded.'''
274 '''Returns data with keywords expanded.'''
272 if not self.restrict and self.match(path) and not util.binary(data):
275 if not self.restrict and self.match(path) and not util.binary(data):
273 ctx = self.linkctx(path, node)
276 ctx = self.linkctx(path, node)
274 return self.substitute(data, path, ctx, self.rekw.sub)
277 return self.substitute(data, path, ctx, self.rekw.sub)
275 return data
278 return data
276
279
277 def iskwfile(self, cand, ctx):
280 def iskwfile(self, cand, ctx):
278 '''Returns subset of candidates which are configured for keyword
281 '''Returns subset of candidates which are configured for keyword
279 expansion but are not symbolic links.'''
282 expansion but are not symbolic links.'''
280 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
283 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
281
284
282 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
285 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
283 '''Overwrites selected files expanding/shrinking keywords.'''
286 '''Overwrites selected files expanding/shrinking keywords.'''
284 if self.restrict or lookup or self.postcommit: # exclude kw_copy
287 if self.restrict or lookup or self.postcommit: # exclude kw_copy
285 candidates = self.iskwfile(candidates, ctx)
288 candidates = self.iskwfile(candidates, ctx)
286 if not candidates:
289 if not candidates:
287 return
290 return
288 kwcmd = self.restrict and lookup # kwexpand/kwshrink
291 kwcmd = self.restrict and lookup # kwexpand/kwshrink
289 if self.restrict or expand and lookup:
292 if self.restrict or expand and lookup:
290 mf = ctx.manifest()
293 mf = ctx.manifest()
291 if self.restrict or rekw:
294 if self.restrict or rekw:
292 re_kw = self.rekw
295 re_kw = self.rekw
293 else:
296 else:
294 re_kw = self.rekwexp
297 re_kw = self.rekwexp
295 if expand:
298 if expand:
296 msg = _('overwriting %s expanding keywords\n')
299 msg = _('overwriting %s expanding keywords\n')
297 else:
300 else:
298 msg = _('overwriting %s shrinking keywords\n')
301 msg = _('overwriting %s shrinking keywords\n')
299 for f in candidates:
302 for f in candidates:
300 if self.restrict:
303 if self.restrict:
301 data = self.repo.file(f).read(mf[f])
304 data = self.repo.file(f).read(mf[f])
302 else:
305 else:
303 data = self.repo.wread(f)
306 data = self.repo.wread(f)
304 if util.binary(data):
307 if util.binary(data):
305 continue
308 continue
306 if expand:
309 if expand:
307 parents = ctx.parents()
310 parents = ctx.parents()
308 if lookup:
311 if lookup:
309 ctx = self.linkctx(f, mf[f])
312 ctx = self.linkctx(f, mf[f])
310 elif self.restrict and len(parents) > 1:
313 elif self.restrict and len(parents) > 1:
311 # merge commit
314 # merge commit
312 # in case of conflict f is in modified state during
315 # in case of conflict f is in modified state during
313 # merge, even if f does not differ from f in parent
316 # merge, even if f does not differ from f in parent
314 for p in parents:
317 for p in parents:
315 if f in p and not p[f].cmp(ctx[f]):
318 if f in p and not p[f].cmp(ctx[f]):
316 ctx = p[f].changectx()
319 ctx = p[f].changectx()
317 break
320 break
318 data, found = self.substitute(data, f, ctx, re_kw.subn)
321 data, found = self.substitute(data, f, ctx, re_kw.subn)
319 elif self.restrict:
322 elif self.restrict:
320 found = re_kw.search(data)
323 found = re_kw.search(data)
321 else:
324 else:
322 data, found = _shrinktext(data, re_kw.subn)
325 data, found = _shrinktext(data, re_kw.subn)
323 if found:
326 if found:
324 self.ui.note(msg % f)
327 self.ui.note(msg % f)
325 fp = self.repo.wvfs(f, "wb", atomictemp=True)
328 fp = self.repo.wvfs(f, "wb", atomictemp=True)
326 fp.write(data)
329 fp.write(data)
327 fp.close()
330 fp.close()
328 if kwcmd:
331 if kwcmd:
329 self.repo.dirstate.normal(f)
332 self.repo.dirstate.normal(f)
330 elif self.postcommit:
333 elif self.postcommit:
331 self.repo.dirstate.normallookup(f)
334 self.repo.dirstate.normallookup(f)
332
335
333 def shrink(self, fname, text):
336 def shrink(self, fname, text):
334 '''Returns text with all keyword substitutions removed.'''
337 '''Returns text with all keyword substitutions removed.'''
335 if self.match(fname) and not util.binary(text):
338 if self.match(fname) and not util.binary(text):
336 return _shrinktext(text, self.rekwexp.sub)
339 return _shrinktext(text, self.rekwexp.sub)
337 return text
340 return text
338
341
339 def shrinklines(self, fname, lines):
342 def shrinklines(self, fname, lines):
340 '''Returns lines with keyword substitutions removed.'''
343 '''Returns lines with keyword substitutions removed.'''
341 if self.match(fname):
344 if self.match(fname):
342 text = ''.join(lines)
345 text = ''.join(lines)
343 if not util.binary(text):
346 if not util.binary(text):
344 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
347 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
345 return lines
348 return lines
346
349
347 def wread(self, fname, data):
350 def wread(self, fname, data):
348 '''If in restricted mode returns data read from wdir with
351 '''If in restricted mode returns data read from wdir with
349 keyword substitutions removed.'''
352 keyword substitutions removed.'''
350 if self.restrict:
353 if self.restrict:
351 return self.shrink(fname, data)
354 return self.shrink(fname, data)
352 return data
355 return data
353
356
354 class kwfilelog(filelog.filelog):
357 class kwfilelog(filelog.filelog):
355 '''
358 '''
356 Subclass of filelog to hook into its read, add, cmp methods.
359 Subclass of filelog to hook into its read, add, cmp methods.
357 Keywords are "stored" unexpanded, and processed on reading.
360 Keywords are "stored" unexpanded, and processed on reading.
358 '''
361 '''
359 def __init__(self, opener, kwt, path):
362 def __init__(self, opener, kwt, path):
360 super(kwfilelog, self).__init__(opener, path)
363 super(kwfilelog, self).__init__(opener, path)
361 self.kwt = kwt
364 self.kwt = kwt
362 self.path = path
365 self.path = path
363
366
364 def read(self, node):
367 def read(self, node):
365 '''Expands keywords when reading filelog.'''
368 '''Expands keywords when reading filelog.'''
366 data = super(kwfilelog, self).read(node)
369 data = super(kwfilelog, self).read(node)
367 if self.renamed(node):
370 if self.renamed(node):
368 return data
371 return data
369 return self.kwt.expand(self.path, node, data)
372 return self.kwt.expand(self.path, node, data)
370
373
371 def add(self, text, meta, tr, link, p1=None, p2=None):
374 def add(self, text, meta, tr, link, p1=None, p2=None):
372 '''Removes keyword substitutions when adding to filelog.'''
375 '''Removes keyword substitutions when adding to filelog.'''
373 text = self.kwt.shrink(self.path, text)
376 text = self.kwt.shrink(self.path, text)
374 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
377 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
375
378
376 def cmp(self, node, text):
379 def cmp(self, node, text):
377 '''Removes keyword substitutions for comparison.'''
380 '''Removes keyword substitutions for comparison.'''
378 text = self.kwt.shrink(self.path, text)
381 text = self.kwt.shrink(self.path, text)
379 return super(kwfilelog, self).cmp(node, text)
382 return super(kwfilelog, self).cmp(node, text)
380
383
381 def _status(ui, repo, wctx, kwt, *pats, **opts):
384 def _status(ui, repo, wctx, kwt, *pats, **opts):
382 '''Bails out if [keyword] configuration is not active.
385 '''Bails out if [keyword] configuration is not active.
383 Returns status of working directory.'''
386 Returns status of working directory.'''
384 if kwt:
387 if kwt:
385 opts = pycompat.byteskwargs(opts)
388 opts = pycompat.byteskwargs(opts)
386 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
389 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
387 unknown=opts.get('unknown') or opts.get('all'))
390 unknown=opts.get('unknown') or opts.get('all'))
388 if ui.configitems('keyword'):
391 if ui.configitems('keyword'):
389 raise error.Abort(_('[keyword] patterns cannot match'))
392 raise error.Abort(_('[keyword] patterns cannot match'))
390 raise error.Abort(_('no [keyword] patterns configured'))
393 raise error.Abort(_('no [keyword] patterns configured'))
391
394
392 def _kwfwrite(ui, repo, expand, *pats, **opts):
395 def _kwfwrite(ui, repo, expand, *pats, **opts):
393 '''Selects files and passes them to kwtemplater.overwrite.'''
396 '''Selects files and passes them to kwtemplater.overwrite.'''
394 wctx = repo[None]
397 wctx = repo[None]
395 if len(wctx.parents()) > 1:
398 if len(wctx.parents()) > 1:
396 raise error.Abort(_('outstanding uncommitted merge'))
399 raise error.Abort(_('outstanding uncommitted merge'))
397 kwt = getattr(repo, '_keywordkwt', None)
400 kwt = getattr(repo, '_keywordkwt', None)
398 with repo.wlock():
401 with repo.wlock():
399 status = _status(ui, repo, wctx, kwt, *pats, **opts)
402 status = _status(ui, repo, wctx, kwt, *pats, **opts)
400 if status.modified or status.added or status.removed or status.deleted:
403 if status.modified or status.added or status.removed or status.deleted:
401 raise error.Abort(_('outstanding uncommitted changes'))
404 raise error.Abort(_('outstanding uncommitted changes'))
402 kwt.overwrite(wctx, status.clean, True, expand)
405 kwt.overwrite(wctx, status.clean, True, expand)
403
406
404 @command('kwdemo',
407 @command('kwdemo',
405 [('d', 'default', None, _('show default keyword template maps')),
408 [('d', 'default', None, _('show default keyword template maps')),
406 ('f', 'rcfile', '',
409 ('f', 'rcfile', '',
407 _('read maps from rcfile'), _('FILE'))],
410 _('read maps from rcfile'), _('FILE'))],
408 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
411 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
409 optionalrepo=True)
412 optionalrepo=True)
410 def demo(ui, repo, *args, **opts):
413 def demo(ui, repo, *args, **opts):
411 '''print [keywordmaps] configuration and an expansion example
414 '''print [keywordmaps] configuration and an expansion example
412
415
413 Show current, custom, or default keyword template maps and their
416 Show current, custom, or default keyword template maps and their
414 expansions.
417 expansions.
415
418
416 Extend the current configuration by specifying maps as arguments
419 Extend the current configuration by specifying maps as arguments
417 and using -f/--rcfile to source an external hgrc file.
420 and using -f/--rcfile to source an external hgrc file.
418
421
419 Use -d/--default to disable current configuration.
422 Use -d/--default to disable current configuration.
420
423
421 See :hg:`help templates` for information on templates and filters.
424 See :hg:`help templates` for information on templates and filters.
422 '''
425 '''
423 def demoitems(section, items):
426 def demoitems(section, items):
424 ui.write('[%s]\n' % section)
427 ui.write('[%s]\n' % section)
425 for k, v in sorted(items):
428 for k, v in sorted(items):
426 ui.write('%s = %s\n' % (k, v))
429 ui.write('%s = %s\n' % (k, v))
427
430
428 fn = 'demo.txt'
431 fn = 'demo.txt'
429 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
432 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
430 ui.note(_('creating temporary repository at %s\n') % tmpdir)
433 ui.note(_('creating temporary repository at %s\n') % tmpdir)
431 if repo is None:
434 if repo is None:
432 baseui = ui
435 baseui = ui
433 else:
436 else:
434 baseui = repo.baseui
437 baseui = repo.baseui
435 repo = localrepo.localrepository(baseui, tmpdir, True)
438 repo = localrepo.localrepository(baseui, tmpdir, True)
436 ui.setconfig('keyword', fn, '', 'keyword')
439 ui.setconfig('keyword', fn, '', 'keyword')
437 svn = ui.configbool('keywordset', 'svn')
440 svn = ui.configbool('keywordset', 'svn')
438 # explicitly set keywordset for demo output
441 # explicitly set keywordset for demo output
439 ui.setconfig('keywordset', 'svn', svn, 'keyword')
442 ui.setconfig('keywordset', 'svn', svn, 'keyword')
440
443
441 uikwmaps = ui.configitems('keywordmaps')
444 uikwmaps = ui.configitems('keywordmaps')
442 if args or opts.get(r'rcfile'):
445 if args or opts.get(r'rcfile'):
443 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
446 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
444 if uikwmaps:
447 if uikwmaps:
445 ui.status(_('\textending current template maps\n'))
448 ui.status(_('\textending current template maps\n'))
446 if opts.get(r'default') or not uikwmaps:
449 if opts.get(r'default') or not uikwmaps:
447 if svn:
450 if svn:
448 ui.status(_('\toverriding default svn keywordset\n'))
451 ui.status(_('\toverriding default svn keywordset\n'))
449 else:
452 else:
450 ui.status(_('\toverriding default cvs keywordset\n'))
453 ui.status(_('\toverriding default cvs keywordset\n'))
451 if opts.get(r'rcfile'):
454 if opts.get(r'rcfile'):
452 ui.readconfig(opts.get('rcfile'))
455 ui.readconfig(opts.get('rcfile'))
453 if args:
456 if args:
454 # simulate hgrc parsing
457 # simulate hgrc parsing
455 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
458 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
456 repo.vfs.write('hgrc', rcmaps)
459 repo.vfs.write('hgrc', rcmaps)
457 ui.readconfig(repo.vfs.join('hgrc'))
460 ui.readconfig(repo.vfs.join('hgrc'))
458 kwmaps = dict(ui.configitems('keywordmaps'))
461 kwmaps = dict(ui.configitems('keywordmaps'))
459 elif opts.get(r'default'):
462 elif opts.get(r'default'):
460 if svn:
463 if svn:
461 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
464 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
462 else:
465 else:
463 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
466 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
464 kwmaps = _defaultkwmaps(ui)
467 kwmaps = _defaultkwmaps(ui)
465 if uikwmaps:
468 if uikwmaps:
466 ui.status(_('\tdisabling current template maps\n'))
469 ui.status(_('\tdisabling current template maps\n'))
467 for k, v in kwmaps.iteritems():
470 for k, v in kwmaps.iteritems():
468 ui.setconfig('keywordmaps', k, v, 'keyword')
471 ui.setconfig('keywordmaps', k, v, 'keyword')
469 else:
472 else:
470 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
473 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
471 if uikwmaps:
474 if uikwmaps:
472 kwmaps = dict(uikwmaps)
475 kwmaps = dict(uikwmaps)
473 else:
476 else:
474 kwmaps = _defaultkwmaps(ui)
477 kwmaps = _defaultkwmaps(ui)
475
478
476 uisetup(ui)
479 uisetup(ui)
477 reposetup(ui, repo)
480 reposetup(ui, repo)
478 ui.write(('[extensions]\nkeyword =\n'))
481 ui.write(('[extensions]\nkeyword =\n'))
479 demoitems('keyword', ui.configitems('keyword'))
482 demoitems('keyword', ui.configitems('keyword'))
480 demoitems('keywordset', ui.configitems('keywordset'))
483 demoitems('keywordset', ui.configitems('keywordset'))
481 demoitems('keywordmaps', kwmaps.iteritems())
484 demoitems('keywordmaps', kwmaps.iteritems())
482 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
485 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
483 repo.wvfs.write(fn, keywords)
486 repo.wvfs.write(fn, keywords)
484 repo[None].add([fn])
487 repo[None].add([fn])
485 ui.note(_('\nkeywords written to %s:\n') % fn)
488 ui.note(_('\nkeywords written to %s:\n') % fn)
486 ui.note(keywords)
489 ui.note(keywords)
487 with repo.wlock():
490 with repo.wlock():
488 repo.dirstate.setbranch('demobranch')
491 repo.dirstate.setbranch('demobranch')
489 for name, cmd in ui.configitems('hooks'):
492 for name, cmd in ui.configitems('hooks'):
490 if name.split('.', 1)[0].find('commit') > -1:
493 if name.split('.', 1)[0].find('commit') > -1:
491 repo.ui.setconfig('hooks', name, '', 'keyword')
494 repo.ui.setconfig('hooks', name, '', 'keyword')
492 msg = _('hg keyword configuration and expansion example')
495 msg = _('hg keyword configuration and expansion example')
493 ui.note(("hg ci -m '%s'\n" % msg))
496 ui.note(("hg ci -m '%s'\n" % msg))
494 repo.commit(text=msg)
497 repo.commit(text=msg)
495 ui.status(_('\n\tkeywords expanded\n'))
498 ui.status(_('\n\tkeywords expanded\n'))
496 ui.write(repo.wread(fn))
499 ui.write(repo.wread(fn))
497 repo.wvfs.rmtree(repo.root)
500 repo.wvfs.rmtree(repo.root)
498
501
499 @command('kwexpand',
502 @command('kwexpand',
500 cmdutil.walkopts,
503 cmdutil.walkopts,
501 _('hg kwexpand [OPTION]... [FILE]...'),
504 _('hg kwexpand [OPTION]... [FILE]...'),
502 inferrepo=True)
505 inferrepo=True)
503 def expand(ui, repo, *pats, **opts):
506 def expand(ui, repo, *pats, **opts):
504 '''expand keywords in the working directory
507 '''expand keywords in the working directory
505
508
506 Run after (re)enabling keyword expansion.
509 Run after (re)enabling keyword expansion.
507
510
508 kwexpand refuses to run if given files contain local changes.
511 kwexpand refuses to run if given files contain local changes.
509 '''
512 '''
510 # 3rd argument sets expansion to True
513 # 3rd argument sets expansion to True
511 _kwfwrite(ui, repo, True, *pats, **opts)
514 _kwfwrite(ui, repo, True, *pats, **opts)
512
515
513 @command('kwfiles',
516 @command('kwfiles',
514 [('A', 'all', None, _('show keyword status flags of all files')),
517 [('A', 'all', None, _('show keyword status flags of all files')),
515 ('i', 'ignore', None, _('show files excluded from expansion')),
518 ('i', 'ignore', None, _('show files excluded from expansion')),
516 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
519 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
517 ] + cmdutil.walkopts,
520 ] + cmdutil.walkopts,
518 _('hg kwfiles [OPTION]... [FILE]...'),
521 _('hg kwfiles [OPTION]... [FILE]...'),
519 inferrepo=True)
522 inferrepo=True)
520 def files(ui, repo, *pats, **opts):
523 def files(ui, repo, *pats, **opts):
521 '''show files configured for keyword expansion
524 '''show files configured for keyword expansion
522
525
523 List which files in the working directory are matched by the
526 List which files in the working directory are matched by the
524 [keyword] configuration patterns.
527 [keyword] configuration patterns.
525
528
526 Useful to prevent inadvertent keyword expansion and to speed up
529 Useful to prevent inadvertent keyword expansion and to speed up
527 execution by including only files that are actual candidates for
530 execution by including only files that are actual candidates for
528 expansion.
531 expansion.
529
532
530 See :hg:`help keyword` on how to construct patterns both for
533 See :hg:`help keyword` on how to construct patterns both for
531 inclusion and exclusion of files.
534 inclusion and exclusion of files.
532
535
533 With -A/--all and -v/--verbose the codes used to show the status
536 With -A/--all and -v/--verbose the codes used to show the status
534 of files are::
537 of files are::
535
538
536 K = keyword expansion candidate
539 K = keyword expansion candidate
537 k = keyword expansion candidate (not tracked)
540 k = keyword expansion candidate (not tracked)
538 I = ignored
541 I = ignored
539 i = ignored (not tracked)
542 i = ignored (not tracked)
540 '''
543 '''
541 kwt = getattr(repo, '_keywordkwt', None)
544 kwt = getattr(repo, '_keywordkwt', None)
542 wctx = repo[None]
545 wctx = repo[None]
543 status = _status(ui, repo, wctx, kwt, *pats, **opts)
546 status = _status(ui, repo, wctx, kwt, *pats, **opts)
544 if pats:
547 if pats:
545 cwd = repo.getcwd()
548 cwd = repo.getcwd()
546 else:
549 else:
547 cwd = ''
550 cwd = ''
548 files = []
551 files = []
549 opts = pycompat.byteskwargs(opts)
552 opts = pycompat.byteskwargs(opts)
550 if not opts.get('unknown') or opts.get('all'):
553 if not opts.get('unknown') or opts.get('all'):
551 files = sorted(status.modified + status.added + status.clean)
554 files = sorted(status.modified + status.added + status.clean)
552 kwfiles = kwt.iskwfile(files, wctx)
555 kwfiles = kwt.iskwfile(files, wctx)
553 kwdeleted = kwt.iskwfile(status.deleted, wctx)
556 kwdeleted = kwt.iskwfile(status.deleted, wctx)
554 kwunknown = kwt.iskwfile(status.unknown, wctx)
557 kwunknown = kwt.iskwfile(status.unknown, wctx)
555 if not opts.get('ignore') or opts.get('all'):
558 if not opts.get('ignore') or opts.get('all'):
556 showfiles = kwfiles, kwdeleted, kwunknown
559 showfiles = kwfiles, kwdeleted, kwunknown
557 else:
560 else:
558 showfiles = [], [], []
561 showfiles = [], [], []
559 if opts.get('all') or opts.get('ignore'):
562 if opts.get('all') or opts.get('ignore'):
560 showfiles += ([f for f in files if f not in kwfiles],
563 showfiles += ([f for f in files if f not in kwfiles],
561 [f for f in status.unknown if f not in kwunknown])
564 [f for f in status.unknown if f not in kwunknown])
562 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
565 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
563 kwstates = zip(kwlabels, 'K!kIi', showfiles)
566 kwstates = zip(kwlabels, 'K!kIi', showfiles)
564 fm = ui.formatter('kwfiles', opts)
567 fm = ui.formatter('kwfiles', opts)
565 fmt = '%.0s%s\n'
568 fmt = '%.0s%s\n'
566 if opts.get('all') or ui.verbose:
569 if opts.get('all') or ui.verbose:
567 fmt = '%s %s\n'
570 fmt = '%s %s\n'
568 for kwstate, char, filenames in kwstates:
571 for kwstate, char, filenames in kwstates:
569 label = 'kwfiles.' + kwstate
572 label = 'kwfiles.' + kwstate
570 for f in filenames:
573 for f in filenames:
571 fm.startitem()
574 fm.startitem()
572 fm.write('kwstatus path', fmt, char,
575 fm.write('kwstatus path', fmt, char,
573 repo.pathto(f, cwd), label=label)
576 repo.pathto(f, cwd), label=label)
574 fm.end()
577 fm.end()
575
578
576 @command('kwshrink',
579 @command('kwshrink',
577 cmdutil.walkopts,
580 cmdutil.walkopts,
578 _('hg kwshrink [OPTION]... [FILE]...'),
581 _('hg kwshrink [OPTION]... [FILE]...'),
579 inferrepo=True)
582 inferrepo=True)
580 def shrink(ui, repo, *pats, **opts):
583 def shrink(ui, repo, *pats, **opts):
581 '''revert expanded keywords in the working directory
584 '''revert expanded keywords in the working directory
582
585
583 Must be run before changing/disabling active keywords.
586 Must be run before changing/disabling active keywords.
584
587
585 kwshrink refuses to run if given files contain local changes.
588 kwshrink refuses to run if given files contain local changes.
586 '''
589 '''
587 # 3rd argument sets expansion to False
590 # 3rd argument sets expansion to False
588 _kwfwrite(ui, repo, False, *pats, **opts)
591 _kwfwrite(ui, repo, False, *pats, **opts)
589
592
590 # monkeypatches
593 # monkeypatches
591
594
592 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
595 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
593 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
596 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
594 rejects or conflicts due to expanded keywords in working dir.'''
597 rejects or conflicts due to expanded keywords in working dir.'''
595 orig(self, ui, gp, backend, store, eolmode)
598 orig(self, ui, gp, backend, store, eolmode)
596 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
599 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
597 if kwt:
600 if kwt:
598 # shrink keywords read from working dir
601 # shrink keywords read from working dir
599 self.lines = kwt.shrinklines(self.fname, self.lines)
602 self.lines = kwt.shrinklines(self.fname, self.lines)
600
603
601 def kwdiff(orig, repo, *args, **kwargs):
604 def kwdiff(orig, repo, *args, **kwargs):
602 '''Monkeypatch patch.diff to avoid expansion.'''
605 '''Monkeypatch patch.diff to avoid expansion.'''
603 kwt = getattr(repo, '_keywordkwt', None)
606 kwt = getattr(repo, '_keywordkwt', None)
604 if kwt:
607 if kwt:
605 restrict = kwt.restrict
608 restrict = kwt.restrict
606 kwt.restrict = True
609 kwt.restrict = True
607 try:
610 try:
608 for chunk in orig(repo, *args, **kwargs):
611 for chunk in orig(repo, *args, **kwargs):
609 yield chunk
612 yield chunk
610 finally:
613 finally:
611 if kwt:
614 if kwt:
612 kwt.restrict = restrict
615 kwt.restrict = restrict
613
616
614 def kwweb_skip(orig, web, req, tmpl):
617 def kwweb_skip(orig, web, req, tmpl):
615 '''Wraps webcommands.x turning off keyword expansion.'''
618 '''Wraps webcommands.x turning off keyword expansion.'''
616 kwt = getattr(web.repo, '_keywordkwt', None)
619 kwt = getattr(web.repo, '_keywordkwt', None)
617 if kwt:
620 if kwt:
618 origmatch = kwt.match
621 origmatch = kwt.match
619 kwt.match = util.never
622 kwt.match = util.never
620 try:
623 try:
621 for chunk in orig(web, req, tmpl):
624 for chunk in orig(web, req, tmpl):
622 yield chunk
625 yield chunk
623 finally:
626 finally:
624 if kwt:
627 if kwt:
625 kwt.match = origmatch
628 kwt.match = origmatch
626
629
627 def kw_amend(orig, ui, repo, old, extra, pats, opts):
630 def kw_amend(orig, ui, repo, old, extra, pats, opts):
628 '''Wraps cmdutil.amend expanding keywords after amend.'''
631 '''Wraps cmdutil.amend expanding keywords after amend.'''
629 kwt = getattr(repo, '_keywordkwt', None)
632 kwt = getattr(repo, '_keywordkwt', None)
630 if kwt is None:
633 if kwt is None:
631 return orig(ui, repo, old, extra, pats, opts)
634 return orig(ui, repo, old, extra, pats, opts)
632 with repo.wlock():
635 with repo.wlock():
633 kwt.postcommit = True
636 kwt.postcommit = True
634 newid = orig(ui, repo, old, extra, pats, opts)
637 newid = orig(ui, repo, old, extra, pats, opts)
635 if newid != old.node():
638 if newid != old.node():
636 ctx = repo[newid]
639 ctx = repo[newid]
637 kwt.restrict = True
640 kwt.restrict = True
638 kwt.overwrite(ctx, ctx.files(), False, True)
641 kwt.overwrite(ctx, ctx.files(), False, True)
639 kwt.restrict = False
642 kwt.restrict = False
640 return newid
643 return newid
641
644
642 def kw_copy(orig, ui, repo, pats, opts, rename=False):
645 def kw_copy(orig, ui, repo, pats, opts, rename=False):
643 '''Wraps cmdutil.copy so that copy/rename destinations do not
646 '''Wraps cmdutil.copy so that copy/rename destinations do not
644 contain expanded keywords.
647 contain expanded keywords.
645 Note that the source of a regular file destination may also be a
648 Note that the source of a regular file destination may also be a
646 symlink:
649 symlink:
647 hg cp sym x -> x is symlink
650 hg cp sym x -> x is symlink
648 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
651 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
649 For the latter we have to follow the symlink to find out whether its
652 For the latter we have to follow the symlink to find out whether its
650 target is configured for expansion and we therefore must unexpand the
653 target is configured for expansion and we therefore must unexpand the
651 keywords in the destination.'''
654 keywords in the destination.'''
652 kwt = getattr(repo, '_keywordkwt', None)
655 kwt = getattr(repo, '_keywordkwt', None)
653 if kwt is None:
656 if kwt is None:
654 return orig(ui, repo, pats, opts, rename)
657 return orig(ui, repo, pats, opts, rename)
655 with repo.wlock():
658 with repo.wlock():
656 orig(ui, repo, pats, opts, rename)
659 orig(ui, repo, pats, opts, rename)
657 if opts.get('dry_run'):
660 if opts.get('dry_run'):
658 return
661 return
659 wctx = repo[None]
662 wctx = repo[None]
660 cwd = repo.getcwd()
663 cwd = repo.getcwd()
661
664
662 def haskwsource(dest):
665 def haskwsource(dest):
663 '''Returns true if dest is a regular file and configured for
666 '''Returns true if dest is a regular file and configured for
664 expansion or a symlink which points to a file configured for
667 expansion or a symlink which points to a file configured for
665 expansion. '''
668 expansion. '''
666 source = repo.dirstate.copied(dest)
669 source = repo.dirstate.copied(dest)
667 if 'l' in wctx.flags(source):
670 if 'l' in wctx.flags(source):
668 source = pathutil.canonpath(repo.root, cwd,
671 source = pathutil.canonpath(repo.root, cwd,
669 os.path.realpath(source))
672 os.path.realpath(source))
670 return kwt.match(source)
673 return kwt.match(source)
671
674
672 candidates = [f for f in repo.dirstate.copies() if
675 candidates = [f for f in repo.dirstate.copies() if
673 'l' not in wctx.flags(f) and haskwsource(f)]
676 'l' not in wctx.flags(f) and haskwsource(f)]
674 kwt.overwrite(wctx, candidates, False, False)
677 kwt.overwrite(wctx, candidates, False, False)
675
678
676 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
679 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
677 '''Wraps record.dorecord expanding keywords after recording.'''
680 '''Wraps record.dorecord expanding keywords after recording.'''
678 kwt = getattr(repo, '_keywordkwt', None)
681 kwt = getattr(repo, '_keywordkwt', None)
679 if kwt is None:
682 if kwt is None:
680 return orig(ui, repo, commitfunc, *pats, **opts)
683 return orig(ui, repo, commitfunc, *pats, **opts)
681 with repo.wlock():
684 with repo.wlock():
682 # record returns 0 even when nothing has changed
685 # record returns 0 even when nothing has changed
683 # therefore compare nodes before and after
686 # therefore compare nodes before and after
684 kwt.postcommit = True
687 kwt.postcommit = True
685 ctx = repo['.']
688 ctx = repo['.']
686 wstatus = ctx.status()
689 wstatus = ctx.status()
687 ret = orig(ui, repo, commitfunc, *pats, **opts)
690 ret = orig(ui, repo, commitfunc, *pats, **opts)
688 recctx = repo['.']
691 recctx = repo['.']
689 if ctx != recctx:
692 if ctx != recctx:
690 modified, added = _preselect(wstatus, recctx.files())
693 modified, added = _preselect(wstatus, recctx.files())
691 kwt.restrict = False
694 kwt.restrict = False
692 kwt.overwrite(recctx, modified, False, True)
695 kwt.overwrite(recctx, modified, False, True)
693 kwt.overwrite(recctx, added, False, True, True)
696 kwt.overwrite(recctx, added, False, True, True)
694 kwt.restrict = True
697 kwt.restrict = True
695 return ret
698 return ret
696
699
697 def kwfilectx_cmp(orig, self, fctx):
700 def kwfilectx_cmp(orig, self, fctx):
698 if fctx._customcmp:
701 if fctx._customcmp:
699 return fctx.cmp(self)
702 return fctx.cmp(self)
700 kwt = getattr(self._repo, '_keywordkwt', None)
703 kwt = getattr(self._repo, '_keywordkwt', None)
701 if kwt is None:
704 if kwt is None:
702 return orig(self, fctx)
705 return orig(self, fctx)
703 # keyword affects data size, comparing wdir and filelog size does
706 # keyword affects data size, comparing wdir and filelog size does
704 # not make sense
707 # not make sense
705 if (fctx._filenode is None and
708 if (fctx._filenode is None and
706 (self._repo._encodefilterpats or
709 (self._repo._encodefilterpats or
707 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
710 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
708 self.size() - 4 == fctx.size()) or
711 self.size() - 4 == fctx.size()) or
709 self.size() == fctx.size()):
712 self.size() == fctx.size()):
710 return self._filelog.cmp(self._filenode, fctx.data())
713 return self._filelog.cmp(self._filenode, fctx.data())
711 return True
714 return True
712
715
713 def uisetup(ui):
716 def uisetup(ui):
714 ''' Monkeypatches dispatch._parse to retrieve user command.
717 ''' Monkeypatches dispatch._parse to retrieve user command.
715 Overrides file method to return kwfilelog instead of filelog
718 Overrides file method to return kwfilelog instead of filelog
716 if file matches user configuration.
719 if file matches user configuration.
717 Wraps commit to overwrite configured files with updated
720 Wraps commit to overwrite configured files with updated
718 keyword substitutions.
721 keyword substitutions.
719 Monkeypatches patch and webcommands.'''
722 Monkeypatches patch and webcommands.'''
720
723
721 def kwdispatch_parse(orig, ui, args):
724 def kwdispatch_parse(orig, ui, args):
722 '''Monkeypatch dispatch._parse to obtain running hg command.'''
725 '''Monkeypatch dispatch._parse to obtain running hg command.'''
723 cmd, func, args, options, cmdoptions = orig(ui, args)
726 cmd, func, args, options, cmdoptions = orig(ui, args)
724 kwtools['hgcmd'] = cmd
727 kwtools['hgcmd'] = cmd
725 return cmd, func, args, options, cmdoptions
728 return cmd, func, args, options, cmdoptions
726
729
727 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
730 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
728
731
729 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
732 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
730 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
733 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
731 extensions.wrapfunction(patch, 'diff', kwdiff)
734 extensions.wrapfunction(patch, 'diff', kwdiff)
732 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
735 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
733 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
736 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
734 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
737 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
735 for c in nokwwebcommands.split():
738 for c in nokwwebcommands.split():
736 extensions.wrapfunction(webcommands, c, kwweb_skip)
739 extensions.wrapfunction(webcommands, c, kwweb_skip)
737
740
738 def reposetup(ui, repo):
741 def reposetup(ui, repo):
739 '''Sets up repo as kwrepo for keyword substitution.'''
742 '''Sets up repo as kwrepo for keyword substitution.'''
740
743
741 try:
744 try:
742 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
745 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
743 or '.hg' in util.splitpath(repo.root)
746 or '.hg' in util.splitpath(repo.root)
744 or repo._url.startswith('bundle:')):
747 or repo._url.startswith('bundle:')):
745 return
748 return
746 except AttributeError:
749 except AttributeError:
747 pass
750 pass
748
751
749 inc, exc = [], ['.hg*']
752 inc, exc = [], ['.hg*']
750 for pat, opt in ui.configitems('keyword'):
753 for pat, opt in ui.configitems('keyword'):
751 if opt != 'ignore':
754 if opt != 'ignore':
752 inc.append(pat)
755 inc.append(pat)
753 else:
756 else:
754 exc.append(pat)
757 exc.append(pat)
755 if not inc:
758 if not inc:
756 return
759 return
757
760
758 kwt = kwtemplater(ui, repo, inc, exc)
761 kwt = kwtemplater(ui, repo, inc, exc)
759
762
760 class kwrepo(repo.__class__):
763 class kwrepo(repo.__class__):
761 def file(self, f):
764 def file(self, f):
762 if f[0] == '/':
765 if f[0] == '/':
763 f = f[1:]
766 f = f[1:]
764 return kwfilelog(self.svfs, kwt, f)
767 return kwfilelog(self.svfs, kwt, f)
765
768
766 def wread(self, filename):
769 def wread(self, filename):
767 data = super(kwrepo, self).wread(filename)
770 data = super(kwrepo, self).wread(filename)
768 return kwt.wread(filename, data)
771 return kwt.wread(filename, data)
769
772
770 def commit(self, *args, **opts):
773 def commit(self, *args, **opts):
771 # use custom commitctx for user commands
774 # use custom commitctx for user commands
772 # other extensions can still wrap repo.commitctx directly
775 # other extensions can still wrap repo.commitctx directly
773 self.commitctx = self.kwcommitctx
776 self.commitctx = self.kwcommitctx
774 try:
777 try:
775 return super(kwrepo, self).commit(*args, **opts)
778 return super(kwrepo, self).commit(*args, **opts)
776 finally:
779 finally:
777 del self.commitctx
780 del self.commitctx
778
781
779 def kwcommitctx(self, ctx, error=False):
782 def kwcommitctx(self, ctx, error=False):
780 n = super(kwrepo, self).commitctx(ctx, error)
783 n = super(kwrepo, self).commitctx(ctx, error)
781 # no lock needed, only called from repo.commit() which already locks
784 # no lock needed, only called from repo.commit() which already locks
782 if not kwt.postcommit:
785 if not kwt.postcommit:
783 restrict = kwt.restrict
786 restrict = kwt.restrict
784 kwt.restrict = True
787 kwt.restrict = True
785 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
788 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
786 False, True)
789 False, True)
787 kwt.restrict = restrict
790 kwt.restrict = restrict
788 return n
791 return n
789
792
790 def rollback(self, dryrun=False, force=False):
793 def rollback(self, dryrun=False, force=False):
791 with self.wlock():
794 with self.wlock():
792 origrestrict = kwt.restrict
795 origrestrict = kwt.restrict
793 try:
796 try:
794 if not dryrun:
797 if not dryrun:
795 changed = self['.'].files()
798 changed = self['.'].files()
796 ret = super(kwrepo, self).rollback(dryrun, force)
799 ret = super(kwrepo, self).rollback(dryrun, force)
797 if not dryrun:
800 if not dryrun:
798 ctx = self['.']
801 ctx = self['.']
799 modified, added = _preselect(ctx.status(), changed)
802 modified, added = _preselect(ctx.status(), changed)
800 kwt.restrict = False
803 kwt.restrict = False
801 kwt.overwrite(ctx, modified, True, True)
804 kwt.overwrite(ctx, modified, True, True)
802 kwt.overwrite(ctx, added, True, False)
805 kwt.overwrite(ctx, added, True, False)
803 return ret
806 return ret
804 finally:
807 finally:
805 kwt.restrict = origrestrict
808 kwt.restrict = origrestrict
806
809
807 repo.__class__ = kwrepo
810 repo.__class__ = kwrepo
808 repo._keywordkwt = kwt
811 repo._keywordkwt = kwt
@@ -1,3654 +1,3655 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help COMMAND` for more details)::
17 Common tasks (use :hg:`help COMMAND` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import, print_function
65 from __future__ import absolute_import, print_function
66
66
67 import errno
67 import errno
68 import os
68 import os
69 import re
69 import re
70 import shutil
70 import shutil
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import (
72 from mercurial.node import (
73 bin,
73 bin,
74 hex,
74 hex,
75 nullid,
75 nullid,
76 nullrev,
76 nullrev,
77 short,
77 short,
78 )
78 )
79 from mercurial import (
79 from mercurial import (
80 cmdutil,
80 cmdutil,
81 commands,
81 commands,
82 dirstateguard,
82 dirstateguard,
83 encoding,
83 encoding,
84 error,
84 error,
85 extensions,
85 extensions,
86 hg,
86 hg,
87 localrepo,
87 localrepo,
88 lock as lockmod,
88 lock as lockmod,
89 logcmdutil,
89 logcmdutil,
90 patch as patchmod,
90 patch as patchmod,
91 phases,
91 phases,
92 pycompat,
92 pycompat,
93 registrar,
93 registrar,
94 revsetlang,
94 revsetlang,
95 scmutil,
95 scmutil,
96 smartset,
96 smartset,
97 subrepoutil,
97 subrepoutil,
98 util,
98 util,
99 vfs as vfsmod,
99 vfs as vfsmod,
100 )
100 )
101 from mercurial.utils import dateutil
101
102
102 release = lockmod.release
103 release = lockmod.release
103 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
104 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
104
105
105 cmdtable = {}
106 cmdtable = {}
106 command = registrar.command(cmdtable)
107 command = registrar.command(cmdtable)
107 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
108 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
108 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
109 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
109 # be specifying the version(s) of Mercurial they are tested with, or
110 # be specifying the version(s) of Mercurial they are tested with, or
110 # leave the attribute unspecified.
111 # leave the attribute unspecified.
111 testedwith = 'ships-with-hg-core'
112 testedwith = 'ships-with-hg-core'
112
113
113 configtable = {}
114 configtable = {}
114 configitem = registrar.configitem(configtable)
115 configitem = registrar.configitem(configtable)
115
116
116 configitem('mq', 'git',
117 configitem('mq', 'git',
117 default='auto',
118 default='auto',
118 )
119 )
119 configitem('mq', 'keepchanges',
120 configitem('mq', 'keepchanges',
120 default=False,
121 default=False,
121 )
122 )
122 configitem('mq', 'plain',
123 configitem('mq', 'plain',
123 default=False,
124 default=False,
124 )
125 )
125 configitem('mq', 'secret',
126 configitem('mq', 'secret',
126 default=False,
127 default=False,
127 )
128 )
128
129
129 # force load strip extension formerly included in mq and import some utility
130 # force load strip extension formerly included in mq and import some utility
130 try:
131 try:
131 stripext = extensions.find('strip')
132 stripext = extensions.find('strip')
132 except KeyError:
133 except KeyError:
133 # note: load is lazy so we could avoid the try-except,
134 # note: load is lazy so we could avoid the try-except,
134 # but I (marmoute) prefer this explicit code.
135 # but I (marmoute) prefer this explicit code.
135 class dummyui(object):
136 class dummyui(object):
136 def debug(self, msg):
137 def debug(self, msg):
137 pass
138 pass
138 stripext = extensions.load(dummyui(), 'strip', '')
139 stripext = extensions.load(dummyui(), 'strip', '')
139
140
140 strip = stripext.strip
141 strip = stripext.strip
141 checksubstate = stripext.checksubstate
142 checksubstate = stripext.checksubstate
142 checklocalchanges = stripext.checklocalchanges
143 checklocalchanges = stripext.checklocalchanges
143
144
144
145
145 # Patch names looks like unix-file names.
146 # Patch names looks like unix-file names.
146 # They must be joinable with queue directory and result in the patch path.
147 # They must be joinable with queue directory and result in the patch path.
147 normname = util.normpath
148 normname = util.normpath
148
149
149 class statusentry(object):
150 class statusentry(object):
150 def __init__(self, node, name):
151 def __init__(self, node, name):
151 self.node, self.name = node, name
152 self.node, self.name = node, name
152
153
153 def __bytes__(self):
154 def __bytes__(self):
154 return hex(self.node) + ':' + self.name
155 return hex(self.node) + ':' + self.name
155
156
156 __str__ = encoding.strmethod(__bytes__)
157 __str__ = encoding.strmethod(__bytes__)
157 __repr__ = encoding.strmethod(__bytes__)
158 __repr__ = encoding.strmethod(__bytes__)
158
159
159 # The order of the headers in 'hg export' HG patches:
160 # The order of the headers in 'hg export' HG patches:
160 HGHEADERS = [
161 HGHEADERS = [
161 # '# HG changeset patch',
162 # '# HG changeset patch',
162 '# User ',
163 '# User ',
163 '# Date ',
164 '# Date ',
164 '# ',
165 '# ',
165 '# Branch ',
166 '# Branch ',
166 '# Node ID ',
167 '# Node ID ',
167 '# Parent ', # can occur twice for merges - but that is not relevant for mq
168 '# Parent ', # can occur twice for merges - but that is not relevant for mq
168 ]
169 ]
169 # The order of headers in plain 'mail style' patches:
170 # The order of headers in plain 'mail style' patches:
170 PLAINHEADERS = {
171 PLAINHEADERS = {
171 'from': 0,
172 'from': 0,
172 'date': 1,
173 'date': 1,
173 'subject': 2,
174 'subject': 2,
174 }
175 }
175
176
176 def inserthgheader(lines, header, value):
177 def inserthgheader(lines, header, value):
177 """Assuming lines contains a HG patch header, add a header line with value.
178 """Assuming lines contains a HG patch header, add a header line with value.
178 >>> try: inserthgheader([], b'# Date ', b'z')
179 >>> try: inserthgheader([], b'# Date ', b'z')
179 ... except ValueError as inst: print("oops")
180 ... except ValueError as inst: print("oops")
180 oops
181 oops
181 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
182 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
182 ['# HG changeset patch', '# Date z']
183 ['# HG changeset patch', '# Date z']
183 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
184 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
184 ['# HG changeset patch', '# Date z', '']
185 ['# HG changeset patch', '# Date z', '']
185 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
186 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
186 ['# HG changeset patch', '# User y', '# Date z']
187 ['# HG changeset patch', '# User y', '# Date z']
187 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
188 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
188 ... b'# User ', b'z')
189 ... b'# User ', b'z')
189 ['# HG changeset patch', '# Date x', '# User z']
190 ['# HG changeset patch', '# Date x', '# User z']
190 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
191 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
191 ['# HG changeset patch', '# Date z']
192 ['# HG changeset patch', '# Date z']
192 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
193 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
193 ... b'# Date ', b'z')
194 ... b'# Date ', b'z')
194 ['# HG changeset patch', '# Date z', '', '# Date y']
195 ['# HG changeset patch', '# Date z', '', '# Date y']
195 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
196 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
196 ... b'# Date ', b'z')
197 ... b'# Date ', b'z')
197 ['# HG changeset patch', '# Date z', '# Parent y']
198 ['# HG changeset patch', '# Date z', '# Parent y']
198 """
199 """
199 start = lines.index('# HG changeset patch') + 1
200 start = lines.index('# HG changeset patch') + 1
200 newindex = HGHEADERS.index(header)
201 newindex = HGHEADERS.index(header)
201 bestpos = len(lines)
202 bestpos = len(lines)
202 for i in range(start, len(lines)):
203 for i in range(start, len(lines)):
203 line = lines[i]
204 line = lines[i]
204 if not line.startswith('# '):
205 if not line.startswith('# '):
205 bestpos = min(bestpos, i)
206 bestpos = min(bestpos, i)
206 break
207 break
207 for lineindex, h in enumerate(HGHEADERS):
208 for lineindex, h in enumerate(HGHEADERS):
208 if line.startswith(h):
209 if line.startswith(h):
209 if lineindex == newindex:
210 if lineindex == newindex:
210 lines[i] = header + value
211 lines[i] = header + value
211 return lines
212 return lines
212 if lineindex > newindex:
213 if lineindex > newindex:
213 bestpos = min(bestpos, i)
214 bestpos = min(bestpos, i)
214 break # next line
215 break # next line
215 lines.insert(bestpos, header + value)
216 lines.insert(bestpos, header + value)
216 return lines
217 return lines
217
218
218 def insertplainheader(lines, header, value):
219 def insertplainheader(lines, header, value):
219 """For lines containing a plain patch header, add a header line with value.
220 """For lines containing a plain patch header, add a header line with value.
220 >>> insertplainheader([], b'Date', b'z')
221 >>> insertplainheader([], b'Date', b'z')
221 ['Date: z']
222 ['Date: z']
222 >>> insertplainheader([b''], b'Date', b'z')
223 >>> insertplainheader([b''], b'Date', b'z')
223 ['Date: z', '']
224 ['Date: z', '']
224 >>> insertplainheader([b'x'], b'Date', b'z')
225 >>> insertplainheader([b'x'], b'Date', b'z')
225 ['Date: z', '', 'x']
226 ['Date: z', '', 'x']
226 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
227 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
227 ['From: y', 'Date: z', '', 'x']
228 ['From: y', 'Date: z', '', 'x']
228 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
229 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
229 [' date : x', 'From: z', '']
230 [' date : x', 'From: z', '']
230 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
231 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
231 ['Date: z', '', 'Date: y']
232 ['Date: z', '', 'Date: y']
232 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
233 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
233 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
234 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
234 """
235 """
235 newprio = PLAINHEADERS[header.lower()]
236 newprio = PLAINHEADERS[header.lower()]
236 bestpos = len(lines)
237 bestpos = len(lines)
237 for i, line in enumerate(lines):
238 for i, line in enumerate(lines):
238 if ':' in line:
239 if ':' in line:
239 lheader = line.split(':', 1)[0].strip().lower()
240 lheader = line.split(':', 1)[0].strip().lower()
240 lprio = PLAINHEADERS.get(lheader, newprio + 1)
241 lprio = PLAINHEADERS.get(lheader, newprio + 1)
241 if lprio == newprio:
242 if lprio == newprio:
242 lines[i] = '%s: %s' % (header, value)
243 lines[i] = '%s: %s' % (header, value)
243 return lines
244 return lines
244 if lprio > newprio and i < bestpos:
245 if lprio > newprio and i < bestpos:
245 bestpos = i
246 bestpos = i
246 else:
247 else:
247 if line:
248 if line:
248 lines.insert(i, '')
249 lines.insert(i, '')
249 if i < bestpos:
250 if i < bestpos:
250 bestpos = i
251 bestpos = i
251 break
252 break
252 lines.insert(bestpos, '%s: %s' % (header, value))
253 lines.insert(bestpos, '%s: %s' % (header, value))
253 return lines
254 return lines
254
255
255 class patchheader(object):
256 class patchheader(object):
256 def __init__(self, pf, plainmode=False):
257 def __init__(self, pf, plainmode=False):
257 def eatdiff(lines):
258 def eatdiff(lines):
258 while lines:
259 while lines:
259 l = lines[-1]
260 l = lines[-1]
260 if (l.startswith("diff -") or
261 if (l.startswith("diff -") or
261 l.startswith("Index:") or
262 l.startswith("Index:") or
262 l.startswith("===========")):
263 l.startswith("===========")):
263 del lines[-1]
264 del lines[-1]
264 else:
265 else:
265 break
266 break
266 def eatempty(lines):
267 def eatempty(lines):
267 while lines:
268 while lines:
268 if not lines[-1].strip():
269 if not lines[-1].strip():
269 del lines[-1]
270 del lines[-1]
270 else:
271 else:
271 break
272 break
272
273
273 message = []
274 message = []
274 comments = []
275 comments = []
275 user = None
276 user = None
276 date = None
277 date = None
277 parent = None
278 parent = None
278 format = None
279 format = None
279 subject = None
280 subject = None
280 branch = None
281 branch = None
281 nodeid = None
282 nodeid = None
282 diffstart = 0
283 diffstart = 0
283
284
284 for line in open(pf, 'rb'):
285 for line in open(pf, 'rb'):
285 line = line.rstrip()
286 line = line.rstrip()
286 if (line.startswith('diff --git')
287 if (line.startswith('diff --git')
287 or (diffstart and line.startswith('+++ '))):
288 or (diffstart and line.startswith('+++ '))):
288 diffstart = 2
289 diffstart = 2
289 break
290 break
290 diffstart = 0 # reset
291 diffstart = 0 # reset
291 if line.startswith("--- "):
292 if line.startswith("--- "):
292 diffstart = 1
293 diffstart = 1
293 continue
294 continue
294 elif format == "hgpatch":
295 elif format == "hgpatch":
295 # parse values when importing the result of an hg export
296 # parse values when importing the result of an hg export
296 if line.startswith("# User "):
297 if line.startswith("# User "):
297 user = line[7:]
298 user = line[7:]
298 elif line.startswith("# Date "):
299 elif line.startswith("# Date "):
299 date = line[7:]
300 date = line[7:]
300 elif line.startswith("# Parent "):
301 elif line.startswith("# Parent "):
301 parent = line[9:].lstrip() # handle double trailing space
302 parent = line[9:].lstrip() # handle double trailing space
302 elif line.startswith("# Branch "):
303 elif line.startswith("# Branch "):
303 branch = line[9:]
304 branch = line[9:]
304 elif line.startswith("# Node ID "):
305 elif line.startswith("# Node ID "):
305 nodeid = line[10:]
306 nodeid = line[10:]
306 elif not line.startswith("# ") and line:
307 elif not line.startswith("# ") and line:
307 message.append(line)
308 message.append(line)
308 format = None
309 format = None
309 elif line == '# HG changeset patch':
310 elif line == '# HG changeset patch':
310 message = []
311 message = []
311 format = "hgpatch"
312 format = "hgpatch"
312 elif (format != "tagdone" and (line.startswith("Subject: ") or
313 elif (format != "tagdone" and (line.startswith("Subject: ") or
313 line.startswith("subject: "))):
314 line.startswith("subject: "))):
314 subject = line[9:]
315 subject = line[9:]
315 format = "tag"
316 format = "tag"
316 elif (format != "tagdone" and (line.startswith("From: ") or
317 elif (format != "tagdone" and (line.startswith("From: ") or
317 line.startswith("from: "))):
318 line.startswith("from: "))):
318 user = line[6:]
319 user = line[6:]
319 format = "tag"
320 format = "tag"
320 elif (format != "tagdone" and (line.startswith("Date: ") or
321 elif (format != "tagdone" and (line.startswith("Date: ") or
321 line.startswith("date: "))):
322 line.startswith("date: "))):
322 date = line[6:]
323 date = line[6:]
323 format = "tag"
324 format = "tag"
324 elif format == "tag" and line == "":
325 elif format == "tag" and line == "":
325 # when looking for tags (subject: from: etc) they
326 # when looking for tags (subject: from: etc) they
326 # end once you find a blank line in the source
327 # end once you find a blank line in the source
327 format = "tagdone"
328 format = "tagdone"
328 elif message or line:
329 elif message or line:
329 message.append(line)
330 message.append(line)
330 comments.append(line)
331 comments.append(line)
331
332
332 eatdiff(message)
333 eatdiff(message)
333 eatdiff(comments)
334 eatdiff(comments)
334 # Remember the exact starting line of the patch diffs before consuming
335 # Remember the exact starting line of the patch diffs before consuming
335 # empty lines, for external use by TortoiseHg and others
336 # empty lines, for external use by TortoiseHg and others
336 self.diffstartline = len(comments)
337 self.diffstartline = len(comments)
337 eatempty(message)
338 eatempty(message)
338 eatempty(comments)
339 eatempty(comments)
339
340
340 # make sure message isn't empty
341 # make sure message isn't empty
341 if format and format.startswith("tag") and subject:
342 if format and format.startswith("tag") and subject:
342 message.insert(0, subject)
343 message.insert(0, subject)
343
344
344 self.message = message
345 self.message = message
345 self.comments = comments
346 self.comments = comments
346 self.user = user
347 self.user = user
347 self.date = date
348 self.date = date
348 self.parent = parent
349 self.parent = parent
349 # nodeid and branch are for external use by TortoiseHg and others
350 # nodeid and branch are for external use by TortoiseHg and others
350 self.nodeid = nodeid
351 self.nodeid = nodeid
351 self.branch = branch
352 self.branch = branch
352 self.haspatch = diffstart > 1
353 self.haspatch = diffstart > 1
353 self.plainmode = (plainmode or
354 self.plainmode = (plainmode or
354 '# HG changeset patch' not in self.comments and
355 '# HG changeset patch' not in self.comments and
355 any(c.startswith('Date: ') or
356 any(c.startswith('Date: ') or
356 c.startswith('From: ')
357 c.startswith('From: ')
357 for c in self.comments))
358 for c in self.comments))
358
359
359 def setuser(self, user):
360 def setuser(self, user):
360 try:
361 try:
361 inserthgheader(self.comments, '# User ', user)
362 inserthgheader(self.comments, '# User ', user)
362 except ValueError:
363 except ValueError:
363 if self.plainmode:
364 if self.plainmode:
364 insertplainheader(self.comments, 'From', user)
365 insertplainheader(self.comments, 'From', user)
365 else:
366 else:
366 tmp = ['# HG changeset patch', '# User ' + user]
367 tmp = ['# HG changeset patch', '# User ' + user]
367 self.comments = tmp + self.comments
368 self.comments = tmp + self.comments
368 self.user = user
369 self.user = user
369
370
370 def setdate(self, date):
371 def setdate(self, date):
371 try:
372 try:
372 inserthgheader(self.comments, '# Date ', date)
373 inserthgheader(self.comments, '# Date ', date)
373 except ValueError:
374 except ValueError:
374 if self.plainmode:
375 if self.plainmode:
375 insertplainheader(self.comments, 'Date', date)
376 insertplainheader(self.comments, 'Date', date)
376 else:
377 else:
377 tmp = ['# HG changeset patch', '# Date ' + date]
378 tmp = ['# HG changeset patch', '# Date ' + date]
378 self.comments = tmp + self.comments
379 self.comments = tmp + self.comments
379 self.date = date
380 self.date = date
380
381
381 def setparent(self, parent):
382 def setparent(self, parent):
382 try:
383 try:
383 inserthgheader(self.comments, '# Parent ', parent)
384 inserthgheader(self.comments, '# Parent ', parent)
384 except ValueError:
385 except ValueError:
385 if not self.plainmode:
386 if not self.plainmode:
386 tmp = ['# HG changeset patch', '# Parent ' + parent]
387 tmp = ['# HG changeset patch', '# Parent ' + parent]
387 self.comments = tmp + self.comments
388 self.comments = tmp + self.comments
388 self.parent = parent
389 self.parent = parent
389
390
390 def setmessage(self, message):
391 def setmessage(self, message):
391 if self.comments:
392 if self.comments:
392 self._delmsg()
393 self._delmsg()
393 self.message = [message]
394 self.message = [message]
394 if message:
395 if message:
395 if self.plainmode and self.comments and self.comments[-1]:
396 if self.plainmode and self.comments and self.comments[-1]:
396 self.comments.append('')
397 self.comments.append('')
397 self.comments.append(message)
398 self.comments.append(message)
398
399
399 def __bytes__(self):
400 def __bytes__(self):
400 s = '\n'.join(self.comments).rstrip()
401 s = '\n'.join(self.comments).rstrip()
401 if not s:
402 if not s:
402 return ''
403 return ''
403 return s + '\n\n'
404 return s + '\n\n'
404
405
405 __str__ = encoding.strmethod(__bytes__)
406 __str__ = encoding.strmethod(__bytes__)
406
407
407 def _delmsg(self):
408 def _delmsg(self):
408 '''Remove existing message, keeping the rest of the comments fields.
409 '''Remove existing message, keeping the rest of the comments fields.
409 If comments contains 'subject: ', message will prepend
410 If comments contains 'subject: ', message will prepend
410 the field and a blank line.'''
411 the field and a blank line.'''
411 if self.message:
412 if self.message:
412 subj = 'subject: ' + self.message[0].lower()
413 subj = 'subject: ' + self.message[0].lower()
413 for i in xrange(len(self.comments)):
414 for i in xrange(len(self.comments)):
414 if subj == self.comments[i].lower():
415 if subj == self.comments[i].lower():
415 del self.comments[i]
416 del self.comments[i]
416 self.message = self.message[2:]
417 self.message = self.message[2:]
417 break
418 break
418 ci = 0
419 ci = 0
419 for mi in self.message:
420 for mi in self.message:
420 while mi != self.comments[ci]:
421 while mi != self.comments[ci]:
421 ci += 1
422 ci += 1
422 del self.comments[ci]
423 del self.comments[ci]
423
424
424 def newcommit(repo, phase, *args, **kwargs):
425 def newcommit(repo, phase, *args, **kwargs):
425 """helper dedicated to ensure a commit respect mq.secret setting
426 """helper dedicated to ensure a commit respect mq.secret setting
426
427
427 It should be used instead of repo.commit inside the mq source for operation
428 It should be used instead of repo.commit inside the mq source for operation
428 creating new changeset.
429 creating new changeset.
429 """
430 """
430 repo = repo.unfiltered()
431 repo = repo.unfiltered()
431 if phase is None:
432 if phase is None:
432 if repo.ui.configbool('mq', 'secret'):
433 if repo.ui.configbool('mq', 'secret'):
433 phase = phases.secret
434 phase = phases.secret
434 overrides = {('ui', 'allowemptycommit'): True}
435 overrides = {('ui', 'allowemptycommit'): True}
435 if phase is not None:
436 if phase is not None:
436 overrides[('phases', 'new-commit')] = phase
437 overrides[('phases', 'new-commit')] = phase
437 with repo.ui.configoverride(overrides, 'mq'):
438 with repo.ui.configoverride(overrides, 'mq'):
438 repo.ui.setconfig('ui', 'allowemptycommit', True)
439 repo.ui.setconfig('ui', 'allowemptycommit', True)
439 return repo.commit(*args, **kwargs)
440 return repo.commit(*args, **kwargs)
440
441
441 class AbortNoCleanup(error.Abort):
442 class AbortNoCleanup(error.Abort):
442 pass
443 pass
443
444
444 class queue(object):
445 class queue(object):
445 def __init__(self, ui, baseui, path, patchdir=None):
446 def __init__(self, ui, baseui, path, patchdir=None):
446 self.basepath = path
447 self.basepath = path
447 try:
448 try:
448 with open(os.path.join(path, 'patches.queue'), r'rb') as fh:
449 with open(os.path.join(path, 'patches.queue'), r'rb') as fh:
449 cur = fh.read().rstrip()
450 cur = fh.read().rstrip()
450
451
451 if not cur:
452 if not cur:
452 curpath = os.path.join(path, 'patches')
453 curpath = os.path.join(path, 'patches')
453 else:
454 else:
454 curpath = os.path.join(path, 'patches-' + cur)
455 curpath = os.path.join(path, 'patches-' + cur)
455 except IOError:
456 except IOError:
456 curpath = os.path.join(path, 'patches')
457 curpath = os.path.join(path, 'patches')
457 self.path = patchdir or curpath
458 self.path = patchdir or curpath
458 self.opener = vfsmod.vfs(self.path)
459 self.opener = vfsmod.vfs(self.path)
459 self.ui = ui
460 self.ui = ui
460 self.baseui = baseui
461 self.baseui = baseui
461 self.applieddirty = False
462 self.applieddirty = False
462 self.seriesdirty = False
463 self.seriesdirty = False
463 self.added = []
464 self.added = []
464 self.seriespath = "series"
465 self.seriespath = "series"
465 self.statuspath = "status"
466 self.statuspath = "status"
466 self.guardspath = "guards"
467 self.guardspath = "guards"
467 self.activeguards = None
468 self.activeguards = None
468 self.guardsdirty = False
469 self.guardsdirty = False
469 # Handle mq.git as a bool with extended values
470 # Handle mq.git as a bool with extended values
470 gitmode = ui.config('mq', 'git').lower()
471 gitmode = ui.config('mq', 'git').lower()
471 boolmode = util.parsebool(gitmode)
472 boolmode = util.parsebool(gitmode)
472 if boolmode is not None:
473 if boolmode is not None:
473 if boolmode:
474 if boolmode:
474 gitmode = 'yes'
475 gitmode = 'yes'
475 else:
476 else:
476 gitmode = 'no'
477 gitmode = 'no'
477 self.gitmode = gitmode
478 self.gitmode = gitmode
478 # deprecated config: mq.plain
479 # deprecated config: mq.plain
479 self.plainmode = ui.configbool('mq', 'plain')
480 self.plainmode = ui.configbool('mq', 'plain')
480 self.checkapplied = True
481 self.checkapplied = True
481
482
482 @util.propertycache
483 @util.propertycache
483 def applied(self):
484 def applied(self):
484 def parselines(lines):
485 def parselines(lines):
485 for l in lines:
486 for l in lines:
486 entry = l.split(':', 1)
487 entry = l.split(':', 1)
487 if len(entry) > 1:
488 if len(entry) > 1:
488 n, name = entry
489 n, name = entry
489 yield statusentry(bin(n), name)
490 yield statusentry(bin(n), name)
490 elif l.strip():
491 elif l.strip():
491 self.ui.warn(_('malformated mq status line: %s\n') % entry)
492 self.ui.warn(_('malformated mq status line: %s\n') % entry)
492 # else we ignore empty lines
493 # else we ignore empty lines
493 try:
494 try:
494 lines = self.opener.read(self.statuspath).splitlines()
495 lines = self.opener.read(self.statuspath).splitlines()
495 return list(parselines(lines))
496 return list(parselines(lines))
496 except IOError as e:
497 except IOError as e:
497 if e.errno == errno.ENOENT:
498 if e.errno == errno.ENOENT:
498 return []
499 return []
499 raise
500 raise
500
501
501 @util.propertycache
502 @util.propertycache
502 def fullseries(self):
503 def fullseries(self):
503 try:
504 try:
504 return self.opener.read(self.seriespath).splitlines()
505 return self.opener.read(self.seriespath).splitlines()
505 except IOError as e:
506 except IOError as e:
506 if e.errno == errno.ENOENT:
507 if e.errno == errno.ENOENT:
507 return []
508 return []
508 raise
509 raise
509
510
510 @util.propertycache
511 @util.propertycache
511 def series(self):
512 def series(self):
512 self.parseseries()
513 self.parseseries()
513 return self.series
514 return self.series
514
515
515 @util.propertycache
516 @util.propertycache
516 def seriesguards(self):
517 def seriesguards(self):
517 self.parseseries()
518 self.parseseries()
518 return self.seriesguards
519 return self.seriesguards
519
520
520 def invalidate(self):
521 def invalidate(self):
521 for a in 'applied fullseries series seriesguards'.split():
522 for a in 'applied fullseries series seriesguards'.split():
522 if a in self.__dict__:
523 if a in self.__dict__:
523 delattr(self, a)
524 delattr(self, a)
524 self.applieddirty = False
525 self.applieddirty = False
525 self.seriesdirty = False
526 self.seriesdirty = False
526 self.guardsdirty = False
527 self.guardsdirty = False
527 self.activeguards = None
528 self.activeguards = None
528
529
529 def diffopts(self, opts=None, patchfn=None, plain=False):
530 def diffopts(self, opts=None, patchfn=None, plain=False):
530 """Return diff options tweaked for this mq use, possibly upgrading to
531 """Return diff options tweaked for this mq use, possibly upgrading to
531 git format, and possibly plain and without lossy options."""
532 git format, and possibly plain and without lossy options."""
532 diffopts = patchmod.difffeatureopts(self.ui, opts,
533 diffopts = patchmod.difffeatureopts(self.ui, opts,
533 git=True, whitespace=not plain, formatchanging=not plain)
534 git=True, whitespace=not plain, formatchanging=not plain)
534 if self.gitmode == 'auto':
535 if self.gitmode == 'auto':
535 diffopts.upgrade = True
536 diffopts.upgrade = True
536 elif self.gitmode == 'keep':
537 elif self.gitmode == 'keep':
537 pass
538 pass
538 elif self.gitmode in ('yes', 'no'):
539 elif self.gitmode in ('yes', 'no'):
539 diffopts.git = self.gitmode == 'yes'
540 diffopts.git = self.gitmode == 'yes'
540 else:
541 else:
541 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
542 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
542 ' got %s') % self.gitmode)
543 ' got %s') % self.gitmode)
543 if patchfn:
544 if patchfn:
544 diffopts = self.patchopts(diffopts, patchfn)
545 diffopts = self.patchopts(diffopts, patchfn)
545 return diffopts
546 return diffopts
546
547
547 def patchopts(self, diffopts, *patches):
548 def patchopts(self, diffopts, *patches):
548 """Return a copy of input diff options with git set to true if
549 """Return a copy of input diff options with git set to true if
549 referenced patch is a git patch and should be preserved as such.
550 referenced patch is a git patch and should be preserved as such.
550 """
551 """
551 diffopts = diffopts.copy()
552 diffopts = diffopts.copy()
552 if not diffopts.git and self.gitmode == 'keep':
553 if not diffopts.git and self.gitmode == 'keep':
553 for patchfn in patches:
554 for patchfn in patches:
554 patchf = self.opener(patchfn, 'r')
555 patchf = self.opener(patchfn, 'r')
555 # if the patch was a git patch, refresh it as a git patch
556 # if the patch was a git patch, refresh it as a git patch
556 diffopts.git = any(line.startswith('diff --git')
557 diffopts.git = any(line.startswith('diff --git')
557 for line in patchf)
558 for line in patchf)
558 patchf.close()
559 patchf.close()
559 return diffopts
560 return diffopts
560
561
561 def join(self, *p):
562 def join(self, *p):
562 return os.path.join(self.path, *p)
563 return os.path.join(self.path, *p)
563
564
564 def findseries(self, patch):
565 def findseries(self, patch):
565 def matchpatch(l):
566 def matchpatch(l):
566 l = l.split('#', 1)[0]
567 l = l.split('#', 1)[0]
567 return l.strip() == patch
568 return l.strip() == patch
568 for index, l in enumerate(self.fullseries):
569 for index, l in enumerate(self.fullseries):
569 if matchpatch(l):
570 if matchpatch(l):
570 return index
571 return index
571 return None
572 return None
572
573
573 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
574 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
574
575
575 def parseseries(self):
576 def parseseries(self):
576 self.series = []
577 self.series = []
577 self.seriesguards = []
578 self.seriesguards = []
578 for l in self.fullseries:
579 for l in self.fullseries:
579 h = l.find('#')
580 h = l.find('#')
580 if h == -1:
581 if h == -1:
581 patch = l
582 patch = l
582 comment = ''
583 comment = ''
583 elif h == 0:
584 elif h == 0:
584 continue
585 continue
585 else:
586 else:
586 patch = l[:h]
587 patch = l[:h]
587 comment = l[h:]
588 comment = l[h:]
588 patch = patch.strip()
589 patch = patch.strip()
589 if patch:
590 if patch:
590 if patch in self.series:
591 if patch in self.series:
591 raise error.Abort(_('%s appears more than once in %s') %
592 raise error.Abort(_('%s appears more than once in %s') %
592 (patch, self.join(self.seriespath)))
593 (patch, self.join(self.seriespath)))
593 self.series.append(patch)
594 self.series.append(patch)
594 self.seriesguards.append(self.guard_re.findall(comment))
595 self.seriesguards.append(self.guard_re.findall(comment))
595
596
596 def checkguard(self, guard):
597 def checkguard(self, guard):
597 if not guard:
598 if not guard:
598 return _('guard cannot be an empty string')
599 return _('guard cannot be an empty string')
599 bad_chars = '# \t\r\n\f'
600 bad_chars = '# \t\r\n\f'
600 first = guard[0]
601 first = guard[0]
601 if first in '-+':
602 if first in '-+':
602 return (_('guard %r starts with invalid character: %r') %
603 return (_('guard %r starts with invalid character: %r') %
603 (guard, first))
604 (guard, first))
604 for c in bad_chars:
605 for c in bad_chars:
605 if c in guard:
606 if c in guard:
606 return _('invalid character in guard %r: %r') % (guard, c)
607 return _('invalid character in guard %r: %r') % (guard, c)
607
608
608 def setactive(self, guards):
609 def setactive(self, guards):
609 for guard in guards:
610 for guard in guards:
610 bad = self.checkguard(guard)
611 bad = self.checkguard(guard)
611 if bad:
612 if bad:
612 raise error.Abort(bad)
613 raise error.Abort(bad)
613 guards = sorted(set(guards))
614 guards = sorted(set(guards))
614 self.ui.debug('active guards: %s\n' % ' '.join(guards))
615 self.ui.debug('active guards: %s\n' % ' '.join(guards))
615 self.activeguards = guards
616 self.activeguards = guards
616 self.guardsdirty = True
617 self.guardsdirty = True
617
618
618 def active(self):
619 def active(self):
619 if self.activeguards is None:
620 if self.activeguards is None:
620 self.activeguards = []
621 self.activeguards = []
621 try:
622 try:
622 guards = self.opener.read(self.guardspath).split()
623 guards = self.opener.read(self.guardspath).split()
623 except IOError as err:
624 except IOError as err:
624 if err.errno != errno.ENOENT:
625 if err.errno != errno.ENOENT:
625 raise
626 raise
626 guards = []
627 guards = []
627 for i, guard in enumerate(guards):
628 for i, guard in enumerate(guards):
628 bad = self.checkguard(guard)
629 bad = self.checkguard(guard)
629 if bad:
630 if bad:
630 self.ui.warn('%s:%d: %s\n' %
631 self.ui.warn('%s:%d: %s\n' %
631 (self.join(self.guardspath), i + 1, bad))
632 (self.join(self.guardspath), i + 1, bad))
632 else:
633 else:
633 self.activeguards.append(guard)
634 self.activeguards.append(guard)
634 return self.activeguards
635 return self.activeguards
635
636
636 def setguards(self, idx, guards):
637 def setguards(self, idx, guards):
637 for g in guards:
638 for g in guards:
638 if len(g) < 2:
639 if len(g) < 2:
639 raise error.Abort(_('guard %r too short') % g)
640 raise error.Abort(_('guard %r too short') % g)
640 if g[0] not in '-+':
641 if g[0] not in '-+':
641 raise error.Abort(_('guard %r starts with invalid char') % g)
642 raise error.Abort(_('guard %r starts with invalid char') % g)
642 bad = self.checkguard(g[1:])
643 bad = self.checkguard(g[1:])
643 if bad:
644 if bad:
644 raise error.Abort(bad)
645 raise error.Abort(bad)
645 drop = self.guard_re.sub('', self.fullseries[idx])
646 drop = self.guard_re.sub('', self.fullseries[idx])
646 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
647 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
647 self.parseseries()
648 self.parseseries()
648 self.seriesdirty = True
649 self.seriesdirty = True
649
650
650 def pushable(self, idx):
651 def pushable(self, idx):
651 if isinstance(idx, bytes):
652 if isinstance(idx, bytes):
652 idx = self.series.index(idx)
653 idx = self.series.index(idx)
653 patchguards = self.seriesguards[idx]
654 patchguards = self.seriesguards[idx]
654 if not patchguards:
655 if not patchguards:
655 return True, None
656 return True, None
656 guards = self.active()
657 guards = self.active()
657 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
658 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
658 if exactneg:
659 if exactneg:
659 return False, repr(exactneg[0])
660 return False, repr(exactneg[0])
660 pos = [g for g in patchguards if g[0] == '+']
661 pos = [g for g in patchguards if g[0] == '+']
661 exactpos = [g for g in pos if g[1:] in guards]
662 exactpos = [g for g in pos if g[1:] in guards]
662 if pos:
663 if pos:
663 if exactpos:
664 if exactpos:
664 return True, repr(exactpos[0])
665 return True, repr(exactpos[0])
665 return False, ' '.join(map(repr, pos))
666 return False, ' '.join(map(repr, pos))
666 return True, ''
667 return True, ''
667
668
668 def explainpushable(self, idx, all_patches=False):
669 def explainpushable(self, idx, all_patches=False):
669 if all_patches:
670 if all_patches:
670 write = self.ui.write
671 write = self.ui.write
671 else:
672 else:
672 write = self.ui.warn
673 write = self.ui.warn
673
674
674 if all_patches or self.ui.verbose:
675 if all_patches or self.ui.verbose:
675 if isinstance(idx, str):
676 if isinstance(idx, str):
676 idx = self.series.index(idx)
677 idx = self.series.index(idx)
677 pushable, why = self.pushable(idx)
678 pushable, why = self.pushable(idx)
678 if all_patches and pushable:
679 if all_patches and pushable:
679 if why is None:
680 if why is None:
680 write(_('allowing %s - no guards in effect\n') %
681 write(_('allowing %s - no guards in effect\n') %
681 self.series[idx])
682 self.series[idx])
682 else:
683 else:
683 if not why:
684 if not why:
684 write(_('allowing %s - no matching negative guards\n') %
685 write(_('allowing %s - no matching negative guards\n') %
685 self.series[idx])
686 self.series[idx])
686 else:
687 else:
687 write(_('allowing %s - guarded by %s\n') %
688 write(_('allowing %s - guarded by %s\n') %
688 (self.series[idx], why))
689 (self.series[idx], why))
689 if not pushable:
690 if not pushable:
690 if why:
691 if why:
691 write(_('skipping %s - guarded by %s\n') %
692 write(_('skipping %s - guarded by %s\n') %
692 (self.series[idx], why))
693 (self.series[idx], why))
693 else:
694 else:
694 write(_('skipping %s - no matching guards\n') %
695 write(_('skipping %s - no matching guards\n') %
695 self.series[idx])
696 self.series[idx])
696
697
697 def savedirty(self):
698 def savedirty(self):
698 def writelist(items, path):
699 def writelist(items, path):
699 fp = self.opener(path, 'wb')
700 fp = self.opener(path, 'wb')
700 for i in items:
701 for i in items:
701 fp.write("%s\n" % i)
702 fp.write("%s\n" % i)
702 fp.close()
703 fp.close()
703 if self.applieddirty:
704 if self.applieddirty:
704 writelist(map(bytes, self.applied), self.statuspath)
705 writelist(map(bytes, self.applied), self.statuspath)
705 self.applieddirty = False
706 self.applieddirty = False
706 if self.seriesdirty:
707 if self.seriesdirty:
707 writelist(self.fullseries, self.seriespath)
708 writelist(self.fullseries, self.seriespath)
708 self.seriesdirty = False
709 self.seriesdirty = False
709 if self.guardsdirty:
710 if self.guardsdirty:
710 writelist(self.activeguards, self.guardspath)
711 writelist(self.activeguards, self.guardspath)
711 self.guardsdirty = False
712 self.guardsdirty = False
712 if self.added:
713 if self.added:
713 qrepo = self.qrepo()
714 qrepo = self.qrepo()
714 if qrepo:
715 if qrepo:
715 qrepo[None].add(f for f in self.added if f not in qrepo[None])
716 qrepo[None].add(f for f in self.added if f not in qrepo[None])
716 self.added = []
717 self.added = []
717
718
718 def removeundo(self, repo):
719 def removeundo(self, repo):
719 undo = repo.sjoin('undo')
720 undo = repo.sjoin('undo')
720 if not os.path.exists(undo):
721 if not os.path.exists(undo):
721 return
722 return
722 try:
723 try:
723 os.unlink(undo)
724 os.unlink(undo)
724 except OSError as inst:
725 except OSError as inst:
725 self.ui.warn(_('error removing undo: %s\n') % str(inst))
726 self.ui.warn(_('error removing undo: %s\n') % str(inst))
726
727
727 def backup(self, repo, files, copy=False):
728 def backup(self, repo, files, copy=False):
728 # backup local changes in --force case
729 # backup local changes in --force case
729 for f in sorted(files):
730 for f in sorted(files):
730 absf = repo.wjoin(f)
731 absf = repo.wjoin(f)
731 if os.path.lexists(absf):
732 if os.path.lexists(absf):
732 self.ui.note(_('saving current version of %s as %s\n') %
733 self.ui.note(_('saving current version of %s as %s\n') %
733 (f, scmutil.origpath(self.ui, repo, f)))
734 (f, scmutil.origpath(self.ui, repo, f)))
734
735
735 absorig = scmutil.origpath(self.ui, repo, absf)
736 absorig = scmutil.origpath(self.ui, repo, absf)
736 if copy:
737 if copy:
737 util.copyfile(absf, absorig)
738 util.copyfile(absf, absorig)
738 else:
739 else:
739 util.rename(absf, absorig)
740 util.rename(absf, absorig)
740
741
741 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
742 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
742 fp=None, changes=None, opts=None):
743 fp=None, changes=None, opts=None):
743 if opts is None:
744 if opts is None:
744 opts = {}
745 opts = {}
745 stat = opts.get('stat')
746 stat = opts.get('stat')
746 m = scmutil.match(repo[node1], files, opts)
747 m = scmutil.match(repo[node1], files, opts)
747 logcmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
748 logcmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
748 changes, stat, fp)
749 changes, stat, fp)
749
750
750 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
751 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
751 # first try just applying the patch
752 # first try just applying the patch
752 (err, n) = self.apply(repo, [patch], update_status=False,
753 (err, n) = self.apply(repo, [patch], update_status=False,
753 strict=True, merge=rev)
754 strict=True, merge=rev)
754
755
755 if err == 0:
756 if err == 0:
756 return (err, n)
757 return (err, n)
757
758
758 if n is None:
759 if n is None:
759 raise error.Abort(_("apply failed for patch %s") % patch)
760 raise error.Abort(_("apply failed for patch %s") % patch)
760
761
761 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
762 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
762
763
763 # apply failed, strip away that rev and merge.
764 # apply failed, strip away that rev and merge.
764 hg.clean(repo, head)
765 hg.clean(repo, head)
765 strip(self.ui, repo, [n], update=False, backup=False)
766 strip(self.ui, repo, [n], update=False, backup=False)
766
767
767 ctx = repo[rev]
768 ctx = repo[rev]
768 ret = hg.merge(repo, rev)
769 ret = hg.merge(repo, rev)
769 if ret:
770 if ret:
770 raise error.Abort(_("update returned %d") % ret)
771 raise error.Abort(_("update returned %d") % ret)
771 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
772 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
772 if n is None:
773 if n is None:
773 raise error.Abort(_("repo commit failed"))
774 raise error.Abort(_("repo commit failed"))
774 try:
775 try:
775 ph = patchheader(mergeq.join(patch), self.plainmode)
776 ph = patchheader(mergeq.join(patch), self.plainmode)
776 except Exception:
777 except Exception:
777 raise error.Abort(_("unable to read %s") % patch)
778 raise error.Abort(_("unable to read %s") % patch)
778
779
779 diffopts = self.patchopts(diffopts, patch)
780 diffopts = self.patchopts(diffopts, patch)
780 patchf = self.opener(patch, "w")
781 patchf = self.opener(patch, "w")
781 comments = str(ph)
782 comments = str(ph)
782 if comments:
783 if comments:
783 patchf.write(comments)
784 patchf.write(comments)
784 self.printdiff(repo, diffopts, head, n, fp=patchf)
785 self.printdiff(repo, diffopts, head, n, fp=patchf)
785 patchf.close()
786 patchf.close()
786 self.removeundo(repo)
787 self.removeundo(repo)
787 return (0, n)
788 return (0, n)
788
789
789 def qparents(self, repo, rev=None):
790 def qparents(self, repo, rev=None):
790 """return the mq handled parent or p1
791 """return the mq handled parent or p1
791
792
792 In some case where mq get himself in being the parent of a merge the
793 In some case where mq get himself in being the parent of a merge the
793 appropriate parent may be p2.
794 appropriate parent may be p2.
794 (eg: an in progress merge started with mq disabled)
795 (eg: an in progress merge started with mq disabled)
795
796
796 If no parent are managed by mq, p1 is returned.
797 If no parent are managed by mq, p1 is returned.
797 """
798 """
798 if rev is None:
799 if rev is None:
799 (p1, p2) = repo.dirstate.parents()
800 (p1, p2) = repo.dirstate.parents()
800 if p2 == nullid:
801 if p2 == nullid:
801 return p1
802 return p1
802 if not self.applied:
803 if not self.applied:
803 return None
804 return None
804 return self.applied[-1].node
805 return self.applied[-1].node
805 p1, p2 = repo.changelog.parents(rev)
806 p1, p2 = repo.changelog.parents(rev)
806 if p2 != nullid and p2 in [x.node for x in self.applied]:
807 if p2 != nullid and p2 in [x.node for x in self.applied]:
807 return p2
808 return p2
808 return p1
809 return p1
809
810
810 def mergepatch(self, repo, mergeq, series, diffopts):
811 def mergepatch(self, repo, mergeq, series, diffopts):
811 if not self.applied:
812 if not self.applied:
812 # each of the patches merged in will have two parents. This
813 # each of the patches merged in will have two parents. This
813 # can confuse the qrefresh, qdiff, and strip code because it
814 # can confuse the qrefresh, qdiff, and strip code because it
814 # needs to know which parent is actually in the patch queue.
815 # needs to know which parent is actually in the patch queue.
815 # so, we insert a merge marker with only one parent. This way
816 # so, we insert a merge marker with only one parent. This way
816 # the first patch in the queue is never a merge patch
817 # the first patch in the queue is never a merge patch
817 #
818 #
818 pname = ".hg.patches.merge.marker"
819 pname = ".hg.patches.merge.marker"
819 n = newcommit(repo, None, '[mq]: merge marker', force=True)
820 n = newcommit(repo, None, '[mq]: merge marker', force=True)
820 self.removeundo(repo)
821 self.removeundo(repo)
821 self.applied.append(statusentry(n, pname))
822 self.applied.append(statusentry(n, pname))
822 self.applieddirty = True
823 self.applieddirty = True
823
824
824 head = self.qparents(repo)
825 head = self.qparents(repo)
825
826
826 for patch in series:
827 for patch in series:
827 patch = mergeq.lookup(patch, strict=True)
828 patch = mergeq.lookup(patch, strict=True)
828 if not patch:
829 if not patch:
829 self.ui.warn(_("patch %s does not exist\n") % patch)
830 self.ui.warn(_("patch %s does not exist\n") % patch)
830 return (1, None)
831 return (1, None)
831 pushable, reason = self.pushable(patch)
832 pushable, reason = self.pushable(patch)
832 if not pushable:
833 if not pushable:
833 self.explainpushable(patch, all_patches=True)
834 self.explainpushable(patch, all_patches=True)
834 continue
835 continue
835 info = mergeq.isapplied(patch)
836 info = mergeq.isapplied(patch)
836 if not info:
837 if not info:
837 self.ui.warn(_("patch %s is not applied\n") % patch)
838 self.ui.warn(_("patch %s is not applied\n") % patch)
838 return (1, None)
839 return (1, None)
839 rev = info[1]
840 rev = info[1]
840 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
841 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
841 if head:
842 if head:
842 self.applied.append(statusentry(head, patch))
843 self.applied.append(statusentry(head, patch))
843 self.applieddirty = True
844 self.applieddirty = True
844 if err:
845 if err:
845 return (err, head)
846 return (err, head)
846 self.savedirty()
847 self.savedirty()
847 return (0, head)
848 return (0, head)
848
849
849 def patch(self, repo, patchfile):
850 def patch(self, repo, patchfile):
850 '''Apply patchfile to the working directory.
851 '''Apply patchfile to the working directory.
851 patchfile: name of patch file'''
852 patchfile: name of patch file'''
852 files = set()
853 files = set()
853 try:
854 try:
854 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
855 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
855 files=files, eolmode=None)
856 files=files, eolmode=None)
856 return (True, list(files), fuzz)
857 return (True, list(files), fuzz)
857 except Exception as inst:
858 except Exception as inst:
858 self.ui.note(util.forcebytestr(inst) + '\n')
859 self.ui.note(util.forcebytestr(inst) + '\n')
859 if not self.ui.verbose:
860 if not self.ui.verbose:
860 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
861 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
861 self.ui.traceback()
862 self.ui.traceback()
862 return (False, list(files), False)
863 return (False, list(files), False)
863
864
864 def apply(self, repo, series, list=False, update_status=True,
865 def apply(self, repo, series, list=False, update_status=True,
865 strict=False, patchdir=None, merge=None, all_files=None,
866 strict=False, patchdir=None, merge=None, all_files=None,
866 tobackup=None, keepchanges=False):
867 tobackup=None, keepchanges=False):
867 wlock = lock = tr = None
868 wlock = lock = tr = None
868 try:
869 try:
869 wlock = repo.wlock()
870 wlock = repo.wlock()
870 lock = repo.lock()
871 lock = repo.lock()
871 tr = repo.transaction("qpush")
872 tr = repo.transaction("qpush")
872 try:
873 try:
873 ret = self._apply(repo, series, list, update_status,
874 ret = self._apply(repo, series, list, update_status,
874 strict, patchdir, merge, all_files=all_files,
875 strict, patchdir, merge, all_files=all_files,
875 tobackup=tobackup, keepchanges=keepchanges)
876 tobackup=tobackup, keepchanges=keepchanges)
876 tr.close()
877 tr.close()
877 self.savedirty()
878 self.savedirty()
878 return ret
879 return ret
879 except AbortNoCleanup:
880 except AbortNoCleanup:
880 tr.close()
881 tr.close()
881 self.savedirty()
882 self.savedirty()
882 raise
883 raise
883 except: # re-raises
884 except: # re-raises
884 try:
885 try:
885 tr.abort()
886 tr.abort()
886 finally:
887 finally:
887 self.invalidate()
888 self.invalidate()
888 raise
889 raise
889 finally:
890 finally:
890 release(tr, lock, wlock)
891 release(tr, lock, wlock)
891 self.removeundo(repo)
892 self.removeundo(repo)
892
893
893 def _apply(self, repo, series, list=False, update_status=True,
894 def _apply(self, repo, series, list=False, update_status=True,
894 strict=False, patchdir=None, merge=None, all_files=None,
895 strict=False, patchdir=None, merge=None, all_files=None,
895 tobackup=None, keepchanges=False):
896 tobackup=None, keepchanges=False):
896 """returns (error, hash)
897 """returns (error, hash)
897
898
898 error = 1 for unable to read, 2 for patch failed, 3 for patch
899 error = 1 for unable to read, 2 for patch failed, 3 for patch
899 fuzz. tobackup is None or a set of files to backup before they
900 fuzz. tobackup is None or a set of files to backup before they
900 are modified by a patch.
901 are modified by a patch.
901 """
902 """
902 # TODO unify with commands.py
903 # TODO unify with commands.py
903 if not patchdir:
904 if not patchdir:
904 patchdir = self.path
905 patchdir = self.path
905 err = 0
906 err = 0
906 n = None
907 n = None
907 for patchname in series:
908 for patchname in series:
908 pushable, reason = self.pushable(patchname)
909 pushable, reason = self.pushable(patchname)
909 if not pushable:
910 if not pushable:
910 self.explainpushable(patchname, all_patches=True)
911 self.explainpushable(patchname, all_patches=True)
911 continue
912 continue
912 self.ui.status(_("applying %s\n") % patchname)
913 self.ui.status(_("applying %s\n") % patchname)
913 pf = os.path.join(patchdir, patchname)
914 pf = os.path.join(patchdir, patchname)
914
915
915 try:
916 try:
916 ph = patchheader(self.join(patchname), self.plainmode)
917 ph = patchheader(self.join(patchname), self.plainmode)
917 except IOError:
918 except IOError:
918 self.ui.warn(_("unable to read %s\n") % patchname)
919 self.ui.warn(_("unable to read %s\n") % patchname)
919 err = 1
920 err = 1
920 break
921 break
921
922
922 message = ph.message
923 message = ph.message
923 if not message:
924 if not message:
924 # The commit message should not be translated
925 # The commit message should not be translated
925 message = "imported patch %s\n" % patchname
926 message = "imported patch %s\n" % patchname
926 else:
927 else:
927 if list:
928 if list:
928 # The commit message should not be translated
929 # The commit message should not be translated
929 message.append("\nimported patch %s" % patchname)
930 message.append("\nimported patch %s" % patchname)
930 message = '\n'.join(message)
931 message = '\n'.join(message)
931
932
932 if ph.haspatch:
933 if ph.haspatch:
933 if tobackup:
934 if tobackup:
934 touched = patchmod.changedfiles(self.ui, repo, pf)
935 touched = patchmod.changedfiles(self.ui, repo, pf)
935 touched = set(touched) & tobackup
936 touched = set(touched) & tobackup
936 if touched and keepchanges:
937 if touched and keepchanges:
937 raise AbortNoCleanup(
938 raise AbortNoCleanup(
938 _("conflicting local changes found"),
939 _("conflicting local changes found"),
939 hint=_("did you forget to qrefresh?"))
940 hint=_("did you forget to qrefresh?"))
940 self.backup(repo, touched, copy=True)
941 self.backup(repo, touched, copy=True)
941 tobackup = tobackup - touched
942 tobackup = tobackup - touched
942 (patcherr, files, fuzz) = self.patch(repo, pf)
943 (patcherr, files, fuzz) = self.patch(repo, pf)
943 if all_files is not None:
944 if all_files is not None:
944 all_files.update(files)
945 all_files.update(files)
945 patcherr = not patcherr
946 patcherr = not patcherr
946 else:
947 else:
947 self.ui.warn(_("patch %s is empty\n") % patchname)
948 self.ui.warn(_("patch %s is empty\n") % patchname)
948 patcherr, files, fuzz = 0, [], 0
949 patcherr, files, fuzz = 0, [], 0
949
950
950 if merge and files:
951 if merge and files:
951 # Mark as removed/merged and update dirstate parent info
952 # Mark as removed/merged and update dirstate parent info
952 removed = []
953 removed = []
953 merged = []
954 merged = []
954 for f in files:
955 for f in files:
955 if os.path.lexists(repo.wjoin(f)):
956 if os.path.lexists(repo.wjoin(f)):
956 merged.append(f)
957 merged.append(f)
957 else:
958 else:
958 removed.append(f)
959 removed.append(f)
959 with repo.dirstate.parentchange():
960 with repo.dirstate.parentchange():
960 for f in removed:
961 for f in removed:
961 repo.dirstate.remove(f)
962 repo.dirstate.remove(f)
962 for f in merged:
963 for f in merged:
963 repo.dirstate.merge(f)
964 repo.dirstate.merge(f)
964 p1, p2 = repo.dirstate.parents()
965 p1, p2 = repo.dirstate.parents()
965 repo.setparents(p1, merge)
966 repo.setparents(p1, merge)
966
967
967 if all_files and '.hgsubstate' in all_files:
968 if all_files and '.hgsubstate' in all_files:
968 wctx = repo[None]
969 wctx = repo[None]
969 pctx = repo['.']
970 pctx = repo['.']
970 overwrite = False
971 overwrite = False
971 mergedsubstate = subrepoutil.submerge(repo, pctx, wctx, wctx,
972 mergedsubstate = subrepoutil.submerge(repo, pctx, wctx, wctx,
972 overwrite)
973 overwrite)
973 files += mergedsubstate.keys()
974 files += mergedsubstate.keys()
974
975
975 match = scmutil.matchfiles(repo, files or [])
976 match = scmutil.matchfiles(repo, files or [])
976 oldtip = repo['tip']
977 oldtip = repo['tip']
977 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
978 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
978 force=True)
979 force=True)
979 if repo['tip'] == oldtip:
980 if repo['tip'] == oldtip:
980 raise error.Abort(_("qpush exactly duplicates child changeset"))
981 raise error.Abort(_("qpush exactly duplicates child changeset"))
981 if n is None:
982 if n is None:
982 raise error.Abort(_("repository commit failed"))
983 raise error.Abort(_("repository commit failed"))
983
984
984 if update_status:
985 if update_status:
985 self.applied.append(statusentry(n, patchname))
986 self.applied.append(statusentry(n, patchname))
986
987
987 if patcherr:
988 if patcherr:
988 self.ui.warn(_("patch failed, rejects left in working "
989 self.ui.warn(_("patch failed, rejects left in working "
989 "directory\n"))
990 "directory\n"))
990 err = 2
991 err = 2
991 break
992 break
992
993
993 if fuzz and strict:
994 if fuzz and strict:
994 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
995 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
995 err = 3
996 err = 3
996 break
997 break
997 return (err, n)
998 return (err, n)
998
999
999 def _cleanup(self, patches, numrevs, keep=False):
1000 def _cleanup(self, patches, numrevs, keep=False):
1000 if not keep:
1001 if not keep:
1001 r = self.qrepo()
1002 r = self.qrepo()
1002 if r:
1003 if r:
1003 r[None].forget(patches)
1004 r[None].forget(patches)
1004 for p in patches:
1005 for p in patches:
1005 try:
1006 try:
1006 os.unlink(self.join(p))
1007 os.unlink(self.join(p))
1007 except OSError as inst:
1008 except OSError as inst:
1008 if inst.errno != errno.ENOENT:
1009 if inst.errno != errno.ENOENT:
1009 raise
1010 raise
1010
1011
1011 qfinished = []
1012 qfinished = []
1012 if numrevs:
1013 if numrevs:
1013 qfinished = self.applied[:numrevs]
1014 qfinished = self.applied[:numrevs]
1014 del self.applied[:numrevs]
1015 del self.applied[:numrevs]
1015 self.applieddirty = True
1016 self.applieddirty = True
1016
1017
1017 unknown = []
1018 unknown = []
1018
1019
1019 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
1020 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
1020 reverse=True):
1021 reverse=True):
1021 if i is not None:
1022 if i is not None:
1022 del self.fullseries[i]
1023 del self.fullseries[i]
1023 else:
1024 else:
1024 unknown.append(p)
1025 unknown.append(p)
1025
1026
1026 if unknown:
1027 if unknown:
1027 if numrevs:
1028 if numrevs:
1028 rev = dict((entry.name, entry.node) for entry in qfinished)
1029 rev = dict((entry.name, entry.node) for entry in qfinished)
1029 for p in unknown:
1030 for p in unknown:
1030 msg = _('revision %s refers to unknown patches: %s\n')
1031 msg = _('revision %s refers to unknown patches: %s\n')
1031 self.ui.warn(msg % (short(rev[p]), p))
1032 self.ui.warn(msg % (short(rev[p]), p))
1032 else:
1033 else:
1033 msg = _('unknown patches: %s\n')
1034 msg = _('unknown patches: %s\n')
1034 raise error.Abort(''.join(msg % p for p in unknown))
1035 raise error.Abort(''.join(msg % p for p in unknown))
1035
1036
1036 self.parseseries()
1037 self.parseseries()
1037 self.seriesdirty = True
1038 self.seriesdirty = True
1038 return [entry.node for entry in qfinished]
1039 return [entry.node for entry in qfinished]
1039
1040
1040 def _revpatches(self, repo, revs):
1041 def _revpatches(self, repo, revs):
1041 firstrev = repo[self.applied[0].node].rev()
1042 firstrev = repo[self.applied[0].node].rev()
1042 patches = []
1043 patches = []
1043 for i, rev in enumerate(revs):
1044 for i, rev in enumerate(revs):
1044
1045
1045 if rev < firstrev:
1046 if rev < firstrev:
1046 raise error.Abort(_('revision %d is not managed') % rev)
1047 raise error.Abort(_('revision %d is not managed') % rev)
1047
1048
1048 ctx = repo[rev]
1049 ctx = repo[rev]
1049 base = self.applied[i].node
1050 base = self.applied[i].node
1050 if ctx.node() != base:
1051 if ctx.node() != base:
1051 msg = _('cannot delete revision %d above applied patches')
1052 msg = _('cannot delete revision %d above applied patches')
1052 raise error.Abort(msg % rev)
1053 raise error.Abort(msg % rev)
1053
1054
1054 patch = self.applied[i].name
1055 patch = self.applied[i].name
1055 for fmt in ('[mq]: %s', 'imported patch %s'):
1056 for fmt in ('[mq]: %s', 'imported patch %s'):
1056 if ctx.description() == fmt % patch:
1057 if ctx.description() == fmt % patch:
1057 msg = _('patch %s finalized without changeset message\n')
1058 msg = _('patch %s finalized without changeset message\n')
1058 repo.ui.status(msg % patch)
1059 repo.ui.status(msg % patch)
1059 break
1060 break
1060
1061
1061 patches.append(patch)
1062 patches.append(patch)
1062 return patches
1063 return patches
1063
1064
1064 def finish(self, repo, revs):
1065 def finish(self, repo, revs):
1065 # Manually trigger phase computation to ensure phasedefaults is
1066 # Manually trigger phase computation to ensure phasedefaults is
1066 # executed before we remove the patches.
1067 # executed before we remove the patches.
1067 repo._phasecache
1068 repo._phasecache
1068 patches = self._revpatches(repo, sorted(revs))
1069 patches = self._revpatches(repo, sorted(revs))
1069 qfinished = self._cleanup(patches, len(patches))
1070 qfinished = self._cleanup(patches, len(patches))
1070 if qfinished and repo.ui.configbool('mq', 'secret'):
1071 if qfinished and repo.ui.configbool('mq', 'secret'):
1071 # only use this logic when the secret option is added
1072 # only use this logic when the secret option is added
1072 oldqbase = repo[qfinished[0]]
1073 oldqbase = repo[qfinished[0]]
1073 tphase = phases.newcommitphase(repo.ui)
1074 tphase = phases.newcommitphase(repo.ui)
1074 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1075 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1075 with repo.transaction('qfinish') as tr:
1076 with repo.transaction('qfinish') as tr:
1076 phases.advanceboundary(repo, tr, tphase, qfinished)
1077 phases.advanceboundary(repo, tr, tphase, qfinished)
1077
1078
1078 def delete(self, repo, patches, opts):
1079 def delete(self, repo, patches, opts):
1079 if not patches and not opts.get('rev'):
1080 if not patches and not opts.get('rev'):
1080 raise error.Abort(_('qdelete requires at least one revision or '
1081 raise error.Abort(_('qdelete requires at least one revision or '
1081 'patch name'))
1082 'patch name'))
1082
1083
1083 realpatches = []
1084 realpatches = []
1084 for patch in patches:
1085 for patch in patches:
1085 patch = self.lookup(patch, strict=True)
1086 patch = self.lookup(patch, strict=True)
1086 info = self.isapplied(patch)
1087 info = self.isapplied(patch)
1087 if info:
1088 if info:
1088 raise error.Abort(_("cannot delete applied patch %s") % patch)
1089 raise error.Abort(_("cannot delete applied patch %s") % patch)
1089 if patch not in self.series:
1090 if patch not in self.series:
1090 raise error.Abort(_("patch %s not in series file") % patch)
1091 raise error.Abort(_("patch %s not in series file") % patch)
1091 if patch not in realpatches:
1092 if patch not in realpatches:
1092 realpatches.append(patch)
1093 realpatches.append(patch)
1093
1094
1094 numrevs = 0
1095 numrevs = 0
1095 if opts.get('rev'):
1096 if opts.get('rev'):
1096 if not self.applied:
1097 if not self.applied:
1097 raise error.Abort(_('no patches applied'))
1098 raise error.Abort(_('no patches applied'))
1098 revs = scmutil.revrange(repo, opts.get('rev'))
1099 revs = scmutil.revrange(repo, opts.get('rev'))
1099 revs.sort()
1100 revs.sort()
1100 revpatches = self._revpatches(repo, revs)
1101 revpatches = self._revpatches(repo, revs)
1101 realpatches += revpatches
1102 realpatches += revpatches
1102 numrevs = len(revpatches)
1103 numrevs = len(revpatches)
1103
1104
1104 self._cleanup(realpatches, numrevs, opts.get('keep'))
1105 self._cleanup(realpatches, numrevs, opts.get('keep'))
1105
1106
1106 def checktoppatch(self, repo):
1107 def checktoppatch(self, repo):
1107 '''check that working directory is at qtip'''
1108 '''check that working directory is at qtip'''
1108 if self.applied:
1109 if self.applied:
1109 top = self.applied[-1].node
1110 top = self.applied[-1].node
1110 patch = self.applied[-1].name
1111 patch = self.applied[-1].name
1111 if repo.dirstate.p1() != top:
1112 if repo.dirstate.p1() != top:
1112 raise error.Abort(_("working directory revision is not qtip"))
1113 raise error.Abort(_("working directory revision is not qtip"))
1113 return top, patch
1114 return top, patch
1114 return None, None
1115 return None, None
1115
1116
1116 def putsubstate2changes(self, substatestate, changes):
1117 def putsubstate2changes(self, substatestate, changes):
1117 for files in changes[:3]:
1118 for files in changes[:3]:
1118 if '.hgsubstate' in files:
1119 if '.hgsubstate' in files:
1119 return # already listed up
1120 return # already listed up
1120 # not yet listed up
1121 # not yet listed up
1121 if substatestate in 'a?':
1122 if substatestate in 'a?':
1122 changes[1].append('.hgsubstate')
1123 changes[1].append('.hgsubstate')
1123 elif substatestate in 'r':
1124 elif substatestate in 'r':
1124 changes[2].append('.hgsubstate')
1125 changes[2].append('.hgsubstate')
1125 else: # modified
1126 else: # modified
1126 changes[0].append('.hgsubstate')
1127 changes[0].append('.hgsubstate')
1127
1128
1128 def checklocalchanges(self, repo, force=False, refresh=True):
1129 def checklocalchanges(self, repo, force=False, refresh=True):
1129 excsuffix = ''
1130 excsuffix = ''
1130 if refresh:
1131 if refresh:
1131 excsuffix = ', qrefresh first'
1132 excsuffix = ', qrefresh first'
1132 # plain versions for i18n tool to detect them
1133 # plain versions for i18n tool to detect them
1133 _("local changes found, qrefresh first")
1134 _("local changes found, qrefresh first")
1134 _("local changed subrepos found, qrefresh first")
1135 _("local changed subrepos found, qrefresh first")
1135 return checklocalchanges(repo, force, excsuffix)
1136 return checklocalchanges(repo, force, excsuffix)
1136
1137
1137 _reserved = ('series', 'status', 'guards', '.', '..')
1138 _reserved = ('series', 'status', 'guards', '.', '..')
1138 def checkreservedname(self, name):
1139 def checkreservedname(self, name):
1139 if name in self._reserved:
1140 if name in self._reserved:
1140 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1141 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1141 % name)
1142 % name)
1142 if name != name.strip():
1143 if name != name.strip():
1143 # whitespace is stripped by parseseries()
1144 # whitespace is stripped by parseseries()
1144 raise error.Abort(_('patch name cannot begin or end with '
1145 raise error.Abort(_('patch name cannot begin or end with '
1145 'whitespace'))
1146 'whitespace'))
1146 for prefix in ('.hg', '.mq'):
1147 for prefix in ('.hg', '.mq'):
1147 if name.startswith(prefix):
1148 if name.startswith(prefix):
1148 raise error.Abort(_('patch name cannot begin with "%s"')
1149 raise error.Abort(_('patch name cannot begin with "%s"')
1149 % prefix)
1150 % prefix)
1150 for c in ('#', ':', '\r', '\n'):
1151 for c in ('#', ':', '\r', '\n'):
1151 if c in name:
1152 if c in name:
1152 raise error.Abort(_('%r cannot be used in the name of a patch')
1153 raise error.Abort(_('%r cannot be used in the name of a patch')
1153 % c)
1154 % c)
1154
1155
1155 def checkpatchname(self, name, force=False):
1156 def checkpatchname(self, name, force=False):
1156 self.checkreservedname(name)
1157 self.checkreservedname(name)
1157 if not force and os.path.exists(self.join(name)):
1158 if not force and os.path.exists(self.join(name)):
1158 if os.path.isdir(self.join(name)):
1159 if os.path.isdir(self.join(name)):
1159 raise error.Abort(_('"%s" already exists as a directory')
1160 raise error.Abort(_('"%s" already exists as a directory')
1160 % name)
1161 % name)
1161 else:
1162 else:
1162 raise error.Abort(_('patch "%s" already exists') % name)
1163 raise error.Abort(_('patch "%s" already exists') % name)
1163
1164
1164 def makepatchname(self, title, fallbackname):
1165 def makepatchname(self, title, fallbackname):
1165 """Return a suitable filename for title, adding a suffix to make
1166 """Return a suitable filename for title, adding a suffix to make
1166 it unique in the existing list"""
1167 it unique in the existing list"""
1167 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1168 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1168 namebase = namebase[:75] # avoid too long name (issue5117)
1169 namebase = namebase[:75] # avoid too long name (issue5117)
1169 if namebase:
1170 if namebase:
1170 try:
1171 try:
1171 self.checkreservedname(namebase)
1172 self.checkreservedname(namebase)
1172 except error.Abort:
1173 except error.Abort:
1173 namebase = fallbackname
1174 namebase = fallbackname
1174 else:
1175 else:
1175 namebase = fallbackname
1176 namebase = fallbackname
1176 name = namebase
1177 name = namebase
1177 i = 0
1178 i = 0
1178 while True:
1179 while True:
1179 if name not in self.fullseries:
1180 if name not in self.fullseries:
1180 try:
1181 try:
1181 self.checkpatchname(name)
1182 self.checkpatchname(name)
1182 break
1183 break
1183 except error.Abort:
1184 except error.Abort:
1184 pass
1185 pass
1185 i += 1
1186 i += 1
1186 name = '%s__%d' % (namebase, i)
1187 name = '%s__%d' % (namebase, i)
1187 return name
1188 return name
1188
1189
1189 def checkkeepchanges(self, keepchanges, force):
1190 def checkkeepchanges(self, keepchanges, force):
1190 if force and keepchanges:
1191 if force and keepchanges:
1191 raise error.Abort(_('cannot use both --force and --keep-changes'))
1192 raise error.Abort(_('cannot use both --force and --keep-changes'))
1192
1193
1193 def new(self, repo, patchfn, *pats, **opts):
1194 def new(self, repo, patchfn, *pats, **opts):
1194 """options:
1195 """options:
1195 msg: a string or a no-argument function returning a string
1196 msg: a string or a no-argument function returning a string
1196 """
1197 """
1197 opts = pycompat.byteskwargs(opts)
1198 opts = pycompat.byteskwargs(opts)
1198 msg = opts.get('msg')
1199 msg = opts.get('msg')
1199 edit = opts.get('edit')
1200 edit = opts.get('edit')
1200 editform = opts.get('editform', 'mq.qnew')
1201 editform = opts.get('editform', 'mq.qnew')
1201 user = opts.get('user')
1202 user = opts.get('user')
1202 date = opts.get('date')
1203 date = opts.get('date')
1203 if date:
1204 if date:
1204 date = util.parsedate(date)
1205 date = dateutil.parsedate(date)
1205 diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
1206 diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
1206 if opts.get('checkname', True):
1207 if opts.get('checkname', True):
1207 self.checkpatchname(patchfn)
1208 self.checkpatchname(patchfn)
1208 inclsubs = checksubstate(repo)
1209 inclsubs = checksubstate(repo)
1209 if inclsubs:
1210 if inclsubs:
1210 substatestate = repo.dirstate['.hgsubstate']
1211 substatestate = repo.dirstate['.hgsubstate']
1211 if opts.get('include') or opts.get('exclude') or pats:
1212 if opts.get('include') or opts.get('exclude') or pats:
1212 # detect missing files in pats
1213 # detect missing files in pats
1213 def badfn(f, msg):
1214 def badfn(f, msg):
1214 if f != '.hgsubstate': # .hgsubstate is auto-created
1215 if f != '.hgsubstate': # .hgsubstate is auto-created
1215 raise error.Abort('%s: %s' % (f, msg))
1216 raise error.Abort('%s: %s' % (f, msg))
1216 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1217 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1217 changes = repo.status(match=match)
1218 changes = repo.status(match=match)
1218 else:
1219 else:
1219 changes = self.checklocalchanges(repo, force=True)
1220 changes = self.checklocalchanges(repo, force=True)
1220 commitfiles = list(inclsubs)
1221 commitfiles = list(inclsubs)
1221 for files in changes[:3]:
1222 for files in changes[:3]:
1222 commitfiles.extend(files)
1223 commitfiles.extend(files)
1223 match = scmutil.matchfiles(repo, commitfiles)
1224 match = scmutil.matchfiles(repo, commitfiles)
1224 if len(repo[None].parents()) > 1:
1225 if len(repo[None].parents()) > 1:
1225 raise error.Abort(_('cannot manage merge changesets'))
1226 raise error.Abort(_('cannot manage merge changesets'))
1226 self.checktoppatch(repo)
1227 self.checktoppatch(repo)
1227 insert = self.fullseriesend()
1228 insert = self.fullseriesend()
1228 with repo.wlock():
1229 with repo.wlock():
1229 try:
1230 try:
1230 # if patch file write fails, abort early
1231 # if patch file write fails, abort early
1231 p = self.opener(patchfn, "w")
1232 p = self.opener(patchfn, "w")
1232 except IOError as e:
1233 except IOError as e:
1233 raise error.Abort(_('cannot write patch "%s": %s')
1234 raise error.Abort(_('cannot write patch "%s": %s')
1234 % (patchfn, encoding.strtolocal(e.strerror)))
1235 % (patchfn, encoding.strtolocal(e.strerror)))
1235 try:
1236 try:
1236 defaultmsg = "[mq]: %s" % patchfn
1237 defaultmsg = "[mq]: %s" % patchfn
1237 editor = cmdutil.getcommiteditor(editform=editform)
1238 editor = cmdutil.getcommiteditor(editform=editform)
1238 if edit:
1239 if edit:
1239 def finishdesc(desc):
1240 def finishdesc(desc):
1240 if desc.rstrip():
1241 if desc.rstrip():
1241 return desc
1242 return desc
1242 else:
1243 else:
1243 return defaultmsg
1244 return defaultmsg
1244 # i18n: this message is shown in editor with "HG: " prefix
1245 # i18n: this message is shown in editor with "HG: " prefix
1245 extramsg = _('Leave message empty to use default message.')
1246 extramsg = _('Leave message empty to use default message.')
1246 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1247 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1247 extramsg=extramsg,
1248 extramsg=extramsg,
1248 editform=editform)
1249 editform=editform)
1249 commitmsg = msg
1250 commitmsg = msg
1250 else:
1251 else:
1251 commitmsg = msg or defaultmsg
1252 commitmsg = msg or defaultmsg
1252
1253
1253 n = newcommit(repo, None, commitmsg, user, date, match=match,
1254 n = newcommit(repo, None, commitmsg, user, date, match=match,
1254 force=True, editor=editor)
1255 force=True, editor=editor)
1255 if n is None:
1256 if n is None:
1256 raise error.Abort(_("repo commit failed"))
1257 raise error.Abort(_("repo commit failed"))
1257 try:
1258 try:
1258 self.fullseries[insert:insert] = [patchfn]
1259 self.fullseries[insert:insert] = [patchfn]
1259 self.applied.append(statusentry(n, patchfn))
1260 self.applied.append(statusentry(n, patchfn))
1260 self.parseseries()
1261 self.parseseries()
1261 self.seriesdirty = True
1262 self.seriesdirty = True
1262 self.applieddirty = True
1263 self.applieddirty = True
1263 nctx = repo[n]
1264 nctx = repo[n]
1264 ph = patchheader(self.join(patchfn), self.plainmode)
1265 ph = patchheader(self.join(patchfn), self.plainmode)
1265 if user:
1266 if user:
1266 ph.setuser(user)
1267 ph.setuser(user)
1267 if date:
1268 if date:
1268 ph.setdate('%d %d' % date)
1269 ph.setdate('%d %d' % date)
1269 ph.setparent(hex(nctx.p1().node()))
1270 ph.setparent(hex(nctx.p1().node()))
1270 msg = nctx.description().strip()
1271 msg = nctx.description().strip()
1271 if msg == defaultmsg.strip():
1272 if msg == defaultmsg.strip():
1272 msg = ''
1273 msg = ''
1273 ph.setmessage(msg)
1274 ph.setmessage(msg)
1274 p.write(bytes(ph))
1275 p.write(bytes(ph))
1275 if commitfiles:
1276 if commitfiles:
1276 parent = self.qparents(repo, n)
1277 parent = self.qparents(repo, n)
1277 if inclsubs:
1278 if inclsubs:
1278 self.putsubstate2changes(substatestate, changes)
1279 self.putsubstate2changes(substatestate, changes)
1279 chunks = patchmod.diff(repo, node1=parent, node2=n,
1280 chunks = patchmod.diff(repo, node1=parent, node2=n,
1280 changes=changes, opts=diffopts)
1281 changes=changes, opts=diffopts)
1281 for chunk in chunks:
1282 for chunk in chunks:
1282 p.write(chunk)
1283 p.write(chunk)
1283 p.close()
1284 p.close()
1284 r = self.qrepo()
1285 r = self.qrepo()
1285 if r:
1286 if r:
1286 r[None].add([patchfn])
1287 r[None].add([patchfn])
1287 except: # re-raises
1288 except: # re-raises
1288 repo.rollback()
1289 repo.rollback()
1289 raise
1290 raise
1290 except Exception:
1291 except Exception:
1291 patchpath = self.join(patchfn)
1292 patchpath = self.join(patchfn)
1292 try:
1293 try:
1293 os.unlink(patchpath)
1294 os.unlink(patchpath)
1294 except OSError:
1295 except OSError:
1295 self.ui.warn(_('error unlinking %s\n') % patchpath)
1296 self.ui.warn(_('error unlinking %s\n') % patchpath)
1296 raise
1297 raise
1297 self.removeundo(repo)
1298 self.removeundo(repo)
1298
1299
1299 def isapplied(self, patch):
1300 def isapplied(self, patch):
1300 """returns (index, rev, patch)"""
1301 """returns (index, rev, patch)"""
1301 for i, a in enumerate(self.applied):
1302 for i, a in enumerate(self.applied):
1302 if a.name == patch:
1303 if a.name == patch:
1303 return (i, a.node, a.name)
1304 return (i, a.node, a.name)
1304 return None
1305 return None
1305
1306
1306 # if the exact patch name does not exist, we try a few
1307 # if the exact patch name does not exist, we try a few
1307 # variations. If strict is passed, we try only #1
1308 # variations. If strict is passed, we try only #1
1308 #
1309 #
1309 # 1) a number (as string) to indicate an offset in the series file
1310 # 1) a number (as string) to indicate an offset in the series file
1310 # 2) a unique substring of the patch name was given
1311 # 2) a unique substring of the patch name was given
1311 # 3) patchname[-+]num to indicate an offset in the series file
1312 # 3) patchname[-+]num to indicate an offset in the series file
1312 def lookup(self, patch, strict=False):
1313 def lookup(self, patch, strict=False):
1313 def partialname(s):
1314 def partialname(s):
1314 if s in self.series:
1315 if s in self.series:
1315 return s
1316 return s
1316 matches = [x for x in self.series if s in x]
1317 matches = [x for x in self.series if s in x]
1317 if len(matches) > 1:
1318 if len(matches) > 1:
1318 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1319 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1319 for m in matches:
1320 for m in matches:
1320 self.ui.warn(' %s\n' % m)
1321 self.ui.warn(' %s\n' % m)
1321 return None
1322 return None
1322 if matches:
1323 if matches:
1323 return matches[0]
1324 return matches[0]
1324 if self.series and self.applied:
1325 if self.series and self.applied:
1325 if s == 'qtip':
1326 if s == 'qtip':
1326 return self.series[self.seriesend(True) - 1]
1327 return self.series[self.seriesend(True) - 1]
1327 if s == 'qbase':
1328 if s == 'qbase':
1328 return self.series[0]
1329 return self.series[0]
1329 return None
1330 return None
1330
1331
1331 if patch in self.series:
1332 if patch in self.series:
1332 return patch
1333 return patch
1333
1334
1334 if not os.path.isfile(self.join(patch)):
1335 if not os.path.isfile(self.join(patch)):
1335 try:
1336 try:
1336 sno = int(patch)
1337 sno = int(patch)
1337 except (ValueError, OverflowError):
1338 except (ValueError, OverflowError):
1338 pass
1339 pass
1339 else:
1340 else:
1340 if -len(self.series) <= sno < len(self.series):
1341 if -len(self.series) <= sno < len(self.series):
1341 return self.series[sno]
1342 return self.series[sno]
1342
1343
1343 if not strict:
1344 if not strict:
1344 res = partialname(patch)
1345 res = partialname(patch)
1345 if res:
1346 if res:
1346 return res
1347 return res
1347 minus = patch.rfind('-')
1348 minus = patch.rfind('-')
1348 if minus >= 0:
1349 if minus >= 0:
1349 res = partialname(patch[:minus])
1350 res = partialname(patch[:minus])
1350 if res:
1351 if res:
1351 i = self.series.index(res)
1352 i = self.series.index(res)
1352 try:
1353 try:
1353 off = int(patch[minus + 1:] or 1)
1354 off = int(patch[minus + 1:] or 1)
1354 except (ValueError, OverflowError):
1355 except (ValueError, OverflowError):
1355 pass
1356 pass
1356 else:
1357 else:
1357 if i - off >= 0:
1358 if i - off >= 0:
1358 return self.series[i - off]
1359 return self.series[i - off]
1359 plus = patch.rfind('+')
1360 plus = patch.rfind('+')
1360 if plus >= 0:
1361 if plus >= 0:
1361 res = partialname(patch[:plus])
1362 res = partialname(patch[:plus])
1362 if res:
1363 if res:
1363 i = self.series.index(res)
1364 i = self.series.index(res)
1364 try:
1365 try:
1365 off = int(patch[plus + 1:] or 1)
1366 off = int(patch[plus + 1:] or 1)
1366 except (ValueError, OverflowError):
1367 except (ValueError, OverflowError):
1367 pass
1368 pass
1368 else:
1369 else:
1369 if i + off < len(self.series):
1370 if i + off < len(self.series):
1370 return self.series[i + off]
1371 return self.series[i + off]
1371 raise error.Abort(_("patch %s not in series") % patch)
1372 raise error.Abort(_("patch %s not in series") % patch)
1372
1373
1373 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1374 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1374 all=False, move=False, exact=False, nobackup=False,
1375 all=False, move=False, exact=False, nobackup=False,
1375 keepchanges=False):
1376 keepchanges=False):
1376 self.checkkeepchanges(keepchanges, force)
1377 self.checkkeepchanges(keepchanges, force)
1377 diffopts = self.diffopts()
1378 diffopts = self.diffopts()
1378 with repo.wlock():
1379 with repo.wlock():
1379 heads = []
1380 heads = []
1380 for hs in repo.branchmap().itervalues():
1381 for hs in repo.branchmap().itervalues():
1381 heads.extend(hs)
1382 heads.extend(hs)
1382 if not heads:
1383 if not heads:
1383 heads = [nullid]
1384 heads = [nullid]
1384 if repo.dirstate.p1() not in heads and not exact:
1385 if repo.dirstate.p1() not in heads and not exact:
1385 self.ui.status(_("(working directory not at a head)\n"))
1386 self.ui.status(_("(working directory not at a head)\n"))
1386
1387
1387 if not self.series:
1388 if not self.series:
1388 self.ui.warn(_('no patches in series\n'))
1389 self.ui.warn(_('no patches in series\n'))
1389 return 0
1390 return 0
1390
1391
1391 # Suppose our series file is: A B C and the current 'top'
1392 # Suppose our series file is: A B C and the current 'top'
1392 # patch is B. qpush C should be performed (moving forward)
1393 # patch is B. qpush C should be performed (moving forward)
1393 # qpush B is a NOP (no change) qpush A is an error (can't
1394 # qpush B is a NOP (no change) qpush A is an error (can't
1394 # go backwards with qpush)
1395 # go backwards with qpush)
1395 if patch:
1396 if patch:
1396 patch = self.lookup(patch)
1397 patch = self.lookup(patch)
1397 info = self.isapplied(patch)
1398 info = self.isapplied(patch)
1398 if info and info[0] >= len(self.applied) - 1:
1399 if info and info[0] >= len(self.applied) - 1:
1399 self.ui.warn(
1400 self.ui.warn(
1400 _('qpush: %s is already at the top\n') % patch)
1401 _('qpush: %s is already at the top\n') % patch)
1401 return 0
1402 return 0
1402
1403
1403 pushable, reason = self.pushable(patch)
1404 pushable, reason = self.pushable(patch)
1404 if pushable:
1405 if pushable:
1405 if self.series.index(patch) < self.seriesend():
1406 if self.series.index(patch) < self.seriesend():
1406 raise error.Abort(
1407 raise error.Abort(
1407 _("cannot push to a previous patch: %s") % patch)
1408 _("cannot push to a previous patch: %s") % patch)
1408 else:
1409 else:
1409 if reason:
1410 if reason:
1410 reason = _('guarded by %s') % reason
1411 reason = _('guarded by %s') % reason
1411 else:
1412 else:
1412 reason = _('no matching guards')
1413 reason = _('no matching guards')
1413 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1414 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1414 return 1
1415 return 1
1415 elif all:
1416 elif all:
1416 patch = self.series[-1]
1417 patch = self.series[-1]
1417 if self.isapplied(patch):
1418 if self.isapplied(patch):
1418 self.ui.warn(_('all patches are currently applied\n'))
1419 self.ui.warn(_('all patches are currently applied\n'))
1419 return 0
1420 return 0
1420
1421
1421 # Following the above example, starting at 'top' of B:
1422 # Following the above example, starting at 'top' of B:
1422 # qpush should be performed (pushes C), but a subsequent
1423 # qpush should be performed (pushes C), but a subsequent
1423 # qpush without an argument is an error (nothing to
1424 # qpush without an argument is an error (nothing to
1424 # apply). This allows a loop of "...while hg qpush..." to
1425 # apply). This allows a loop of "...while hg qpush..." to
1425 # work as it detects an error when done
1426 # work as it detects an error when done
1426 start = self.seriesend()
1427 start = self.seriesend()
1427 if start == len(self.series):
1428 if start == len(self.series):
1428 self.ui.warn(_('patch series already fully applied\n'))
1429 self.ui.warn(_('patch series already fully applied\n'))
1429 return 1
1430 return 1
1430 if not force and not keepchanges:
1431 if not force and not keepchanges:
1431 self.checklocalchanges(repo, refresh=self.applied)
1432 self.checklocalchanges(repo, refresh=self.applied)
1432
1433
1433 if exact:
1434 if exact:
1434 if keepchanges:
1435 if keepchanges:
1435 raise error.Abort(
1436 raise error.Abort(
1436 _("cannot use --exact and --keep-changes together"))
1437 _("cannot use --exact and --keep-changes together"))
1437 if move:
1438 if move:
1438 raise error.Abort(_('cannot use --exact and --move '
1439 raise error.Abort(_('cannot use --exact and --move '
1439 'together'))
1440 'together'))
1440 if self.applied:
1441 if self.applied:
1441 raise error.Abort(_('cannot push --exact with applied '
1442 raise error.Abort(_('cannot push --exact with applied '
1442 'patches'))
1443 'patches'))
1443 root = self.series[start]
1444 root = self.series[start]
1444 target = patchheader(self.join(root), self.plainmode).parent
1445 target = patchheader(self.join(root), self.plainmode).parent
1445 if not target:
1446 if not target:
1446 raise error.Abort(
1447 raise error.Abort(
1447 _("%s does not have a parent recorded") % root)
1448 _("%s does not have a parent recorded") % root)
1448 if not repo[target] == repo['.']:
1449 if not repo[target] == repo['.']:
1449 hg.update(repo, target)
1450 hg.update(repo, target)
1450
1451
1451 if move:
1452 if move:
1452 if not patch:
1453 if not patch:
1453 raise error.Abort(_("please specify the patch to move"))
1454 raise error.Abort(_("please specify the patch to move"))
1454 for fullstart, rpn in enumerate(self.fullseries):
1455 for fullstart, rpn in enumerate(self.fullseries):
1455 # strip markers for patch guards
1456 # strip markers for patch guards
1456 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1457 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1457 break
1458 break
1458 for i, rpn in enumerate(self.fullseries[fullstart:]):
1459 for i, rpn in enumerate(self.fullseries[fullstart:]):
1459 # strip markers for patch guards
1460 # strip markers for patch guards
1460 if self.guard_re.split(rpn, 1)[0] == patch:
1461 if self.guard_re.split(rpn, 1)[0] == patch:
1461 break
1462 break
1462 index = fullstart + i
1463 index = fullstart + i
1463 assert index < len(self.fullseries)
1464 assert index < len(self.fullseries)
1464 fullpatch = self.fullseries[index]
1465 fullpatch = self.fullseries[index]
1465 del self.fullseries[index]
1466 del self.fullseries[index]
1466 self.fullseries.insert(fullstart, fullpatch)
1467 self.fullseries.insert(fullstart, fullpatch)
1467 self.parseseries()
1468 self.parseseries()
1468 self.seriesdirty = True
1469 self.seriesdirty = True
1469
1470
1470 self.applieddirty = True
1471 self.applieddirty = True
1471 if start > 0:
1472 if start > 0:
1472 self.checktoppatch(repo)
1473 self.checktoppatch(repo)
1473 if not patch:
1474 if not patch:
1474 patch = self.series[start]
1475 patch = self.series[start]
1475 end = start + 1
1476 end = start + 1
1476 else:
1477 else:
1477 end = self.series.index(patch, start) + 1
1478 end = self.series.index(patch, start) + 1
1478
1479
1479 tobackup = set()
1480 tobackup = set()
1480 if (not nobackup and force) or keepchanges:
1481 if (not nobackup and force) or keepchanges:
1481 status = self.checklocalchanges(repo, force=True)
1482 status = self.checklocalchanges(repo, force=True)
1482 if keepchanges:
1483 if keepchanges:
1483 tobackup.update(status.modified + status.added +
1484 tobackup.update(status.modified + status.added +
1484 status.removed + status.deleted)
1485 status.removed + status.deleted)
1485 else:
1486 else:
1486 tobackup.update(status.modified + status.added)
1487 tobackup.update(status.modified + status.added)
1487
1488
1488 s = self.series[start:end]
1489 s = self.series[start:end]
1489 all_files = set()
1490 all_files = set()
1490 try:
1491 try:
1491 if mergeq:
1492 if mergeq:
1492 ret = self.mergepatch(repo, mergeq, s, diffopts)
1493 ret = self.mergepatch(repo, mergeq, s, diffopts)
1493 else:
1494 else:
1494 ret = self.apply(repo, s, list, all_files=all_files,
1495 ret = self.apply(repo, s, list, all_files=all_files,
1495 tobackup=tobackup, keepchanges=keepchanges)
1496 tobackup=tobackup, keepchanges=keepchanges)
1496 except AbortNoCleanup:
1497 except AbortNoCleanup:
1497 raise
1498 raise
1498 except: # re-raises
1499 except: # re-raises
1499 self.ui.warn(_('cleaning up working directory...\n'))
1500 self.ui.warn(_('cleaning up working directory...\n'))
1500 cmdutil.revert(self.ui, repo, repo['.'],
1501 cmdutil.revert(self.ui, repo, repo['.'],
1501 repo.dirstate.parents(), no_backup=True)
1502 repo.dirstate.parents(), no_backup=True)
1502 # only remove unknown files that we know we touched or
1503 # only remove unknown files that we know we touched or
1503 # created while patching
1504 # created while patching
1504 for f in all_files:
1505 for f in all_files:
1505 if f not in repo.dirstate:
1506 if f not in repo.dirstate:
1506 repo.wvfs.unlinkpath(f, ignoremissing=True)
1507 repo.wvfs.unlinkpath(f, ignoremissing=True)
1507 self.ui.warn(_('done\n'))
1508 self.ui.warn(_('done\n'))
1508 raise
1509 raise
1509
1510
1510 if not self.applied:
1511 if not self.applied:
1511 return ret[0]
1512 return ret[0]
1512 top = self.applied[-1].name
1513 top = self.applied[-1].name
1513 if ret[0] and ret[0] > 1:
1514 if ret[0] and ret[0] > 1:
1514 msg = _("errors during apply, please fix and qrefresh %s\n")
1515 msg = _("errors during apply, please fix and qrefresh %s\n")
1515 self.ui.write(msg % top)
1516 self.ui.write(msg % top)
1516 else:
1517 else:
1517 self.ui.write(_("now at: %s\n") % top)
1518 self.ui.write(_("now at: %s\n") % top)
1518 return ret[0]
1519 return ret[0]
1519
1520
1520 def pop(self, repo, patch=None, force=False, update=True, all=False,
1521 def pop(self, repo, patch=None, force=False, update=True, all=False,
1521 nobackup=False, keepchanges=False):
1522 nobackup=False, keepchanges=False):
1522 self.checkkeepchanges(keepchanges, force)
1523 self.checkkeepchanges(keepchanges, force)
1523 with repo.wlock():
1524 with repo.wlock():
1524 if patch:
1525 if patch:
1525 # index, rev, patch
1526 # index, rev, patch
1526 info = self.isapplied(patch)
1527 info = self.isapplied(patch)
1527 if not info:
1528 if not info:
1528 patch = self.lookup(patch)
1529 patch = self.lookup(patch)
1529 info = self.isapplied(patch)
1530 info = self.isapplied(patch)
1530 if not info:
1531 if not info:
1531 raise error.Abort(_("patch %s is not applied") % patch)
1532 raise error.Abort(_("patch %s is not applied") % patch)
1532
1533
1533 if not self.applied:
1534 if not self.applied:
1534 # Allow qpop -a to work repeatedly,
1535 # Allow qpop -a to work repeatedly,
1535 # but not qpop without an argument
1536 # but not qpop without an argument
1536 self.ui.warn(_("no patches applied\n"))
1537 self.ui.warn(_("no patches applied\n"))
1537 return not all
1538 return not all
1538
1539
1539 if all:
1540 if all:
1540 start = 0
1541 start = 0
1541 elif patch:
1542 elif patch:
1542 start = info[0] + 1
1543 start = info[0] + 1
1543 else:
1544 else:
1544 start = len(self.applied) - 1
1545 start = len(self.applied) - 1
1545
1546
1546 if start >= len(self.applied):
1547 if start >= len(self.applied):
1547 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1548 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1548 return
1549 return
1549
1550
1550 if not update:
1551 if not update:
1551 parents = repo.dirstate.parents()
1552 parents = repo.dirstate.parents()
1552 rr = [x.node for x in self.applied]
1553 rr = [x.node for x in self.applied]
1553 for p in parents:
1554 for p in parents:
1554 if p in rr:
1555 if p in rr:
1555 self.ui.warn(_("qpop: forcing dirstate update\n"))
1556 self.ui.warn(_("qpop: forcing dirstate update\n"))
1556 update = True
1557 update = True
1557 else:
1558 else:
1558 parents = [p.node() for p in repo[None].parents()]
1559 parents = [p.node() for p in repo[None].parents()]
1559 update = any(entry.node in parents
1560 update = any(entry.node in parents
1560 for entry in self.applied[start:])
1561 for entry in self.applied[start:])
1561
1562
1562 tobackup = set()
1563 tobackup = set()
1563 if update:
1564 if update:
1564 s = self.checklocalchanges(repo, force=force or keepchanges)
1565 s = self.checklocalchanges(repo, force=force or keepchanges)
1565 if force:
1566 if force:
1566 if not nobackup:
1567 if not nobackup:
1567 tobackup.update(s.modified + s.added)
1568 tobackup.update(s.modified + s.added)
1568 elif keepchanges:
1569 elif keepchanges:
1569 tobackup.update(s.modified + s.added +
1570 tobackup.update(s.modified + s.added +
1570 s.removed + s.deleted)
1571 s.removed + s.deleted)
1571
1572
1572 self.applieddirty = True
1573 self.applieddirty = True
1573 end = len(self.applied)
1574 end = len(self.applied)
1574 rev = self.applied[start].node
1575 rev = self.applied[start].node
1575
1576
1576 try:
1577 try:
1577 heads = repo.changelog.heads(rev)
1578 heads = repo.changelog.heads(rev)
1578 except error.LookupError:
1579 except error.LookupError:
1579 node = short(rev)
1580 node = short(rev)
1580 raise error.Abort(_('trying to pop unknown node %s') % node)
1581 raise error.Abort(_('trying to pop unknown node %s') % node)
1581
1582
1582 if heads != [self.applied[-1].node]:
1583 if heads != [self.applied[-1].node]:
1583 raise error.Abort(_("popping would remove a revision not "
1584 raise error.Abort(_("popping would remove a revision not "
1584 "managed by this patch queue"))
1585 "managed by this patch queue"))
1585 if not repo[self.applied[-1].node].mutable():
1586 if not repo[self.applied[-1].node].mutable():
1586 raise error.Abort(
1587 raise error.Abort(
1587 _("popping would remove a public revision"),
1588 _("popping would remove a public revision"),
1588 hint=_("see 'hg help phases' for details"))
1589 hint=_("see 'hg help phases' for details"))
1589
1590
1590 # we know there are no local changes, so we can make a simplified
1591 # we know there are no local changes, so we can make a simplified
1591 # form of hg.update.
1592 # form of hg.update.
1592 if update:
1593 if update:
1593 qp = self.qparents(repo, rev)
1594 qp = self.qparents(repo, rev)
1594 ctx = repo[qp]
1595 ctx = repo[qp]
1595 m, a, r, d = repo.status(qp, '.')[:4]
1596 m, a, r, d = repo.status(qp, '.')[:4]
1596 if d:
1597 if d:
1597 raise error.Abort(_("deletions found between repo revs"))
1598 raise error.Abort(_("deletions found between repo revs"))
1598
1599
1599 tobackup = set(a + m + r) & tobackup
1600 tobackup = set(a + m + r) & tobackup
1600 if keepchanges and tobackup:
1601 if keepchanges and tobackup:
1601 raise error.Abort(_("local changes found, qrefresh first"))
1602 raise error.Abort(_("local changes found, qrefresh first"))
1602 self.backup(repo, tobackup)
1603 self.backup(repo, tobackup)
1603 with repo.dirstate.parentchange():
1604 with repo.dirstate.parentchange():
1604 for f in a:
1605 for f in a:
1605 repo.wvfs.unlinkpath(f, ignoremissing=True)
1606 repo.wvfs.unlinkpath(f, ignoremissing=True)
1606 repo.dirstate.drop(f)
1607 repo.dirstate.drop(f)
1607 for f in m + r:
1608 for f in m + r:
1608 fctx = ctx[f]
1609 fctx = ctx[f]
1609 repo.wwrite(f, fctx.data(), fctx.flags())
1610 repo.wwrite(f, fctx.data(), fctx.flags())
1610 repo.dirstate.normal(f)
1611 repo.dirstate.normal(f)
1611 repo.setparents(qp, nullid)
1612 repo.setparents(qp, nullid)
1612 for patch in reversed(self.applied[start:end]):
1613 for patch in reversed(self.applied[start:end]):
1613 self.ui.status(_("popping %s\n") % patch.name)
1614 self.ui.status(_("popping %s\n") % patch.name)
1614 del self.applied[start:end]
1615 del self.applied[start:end]
1615 strip(self.ui, repo, [rev], update=False, backup=False)
1616 strip(self.ui, repo, [rev], update=False, backup=False)
1616 for s, state in repo['.'].substate.items():
1617 for s, state in repo['.'].substate.items():
1617 repo['.'].sub(s).get(state)
1618 repo['.'].sub(s).get(state)
1618 if self.applied:
1619 if self.applied:
1619 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1620 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1620 else:
1621 else:
1621 self.ui.write(_("patch queue now empty\n"))
1622 self.ui.write(_("patch queue now empty\n"))
1622
1623
1623 def diff(self, repo, pats, opts):
1624 def diff(self, repo, pats, opts):
1624 top, patch = self.checktoppatch(repo)
1625 top, patch = self.checktoppatch(repo)
1625 if not top:
1626 if not top:
1626 self.ui.write(_("no patches applied\n"))
1627 self.ui.write(_("no patches applied\n"))
1627 return
1628 return
1628 qp = self.qparents(repo, top)
1629 qp = self.qparents(repo, top)
1629 if opts.get('reverse'):
1630 if opts.get('reverse'):
1630 node1, node2 = None, qp
1631 node1, node2 = None, qp
1631 else:
1632 else:
1632 node1, node2 = qp, None
1633 node1, node2 = qp, None
1633 diffopts = self.diffopts(opts, patch)
1634 diffopts = self.diffopts(opts, patch)
1634 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1635 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1635
1636
1636 def refresh(self, repo, pats=None, **opts):
1637 def refresh(self, repo, pats=None, **opts):
1637 opts = pycompat.byteskwargs(opts)
1638 opts = pycompat.byteskwargs(opts)
1638 if not self.applied:
1639 if not self.applied:
1639 self.ui.write(_("no patches applied\n"))
1640 self.ui.write(_("no patches applied\n"))
1640 return 1
1641 return 1
1641 msg = opts.get('msg', '').rstrip()
1642 msg = opts.get('msg', '').rstrip()
1642 edit = opts.get('edit')
1643 edit = opts.get('edit')
1643 editform = opts.get('editform', 'mq.qrefresh')
1644 editform = opts.get('editform', 'mq.qrefresh')
1644 newuser = opts.get('user')
1645 newuser = opts.get('user')
1645 newdate = opts.get('date')
1646 newdate = opts.get('date')
1646 if newdate:
1647 if newdate:
1647 newdate = '%d %d' % util.parsedate(newdate)
1648 newdate = '%d %d' % dateutil.parsedate(newdate)
1648 wlock = repo.wlock()
1649 wlock = repo.wlock()
1649
1650
1650 try:
1651 try:
1651 self.checktoppatch(repo)
1652 self.checktoppatch(repo)
1652 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1653 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1653 if repo.changelog.heads(top) != [top]:
1654 if repo.changelog.heads(top) != [top]:
1654 raise error.Abort(_("cannot qrefresh a revision with children"))
1655 raise error.Abort(_("cannot qrefresh a revision with children"))
1655 if not repo[top].mutable():
1656 if not repo[top].mutable():
1656 raise error.Abort(_("cannot qrefresh public revision"),
1657 raise error.Abort(_("cannot qrefresh public revision"),
1657 hint=_("see 'hg help phases' for details"))
1658 hint=_("see 'hg help phases' for details"))
1658
1659
1659 cparents = repo.changelog.parents(top)
1660 cparents = repo.changelog.parents(top)
1660 patchparent = self.qparents(repo, top)
1661 patchparent = self.qparents(repo, top)
1661
1662
1662 inclsubs = checksubstate(repo, hex(patchparent))
1663 inclsubs = checksubstate(repo, hex(patchparent))
1663 if inclsubs:
1664 if inclsubs:
1664 substatestate = repo.dirstate['.hgsubstate']
1665 substatestate = repo.dirstate['.hgsubstate']
1665
1666
1666 ph = patchheader(self.join(patchfn), self.plainmode)
1667 ph = patchheader(self.join(patchfn), self.plainmode)
1667 diffopts = self.diffopts({'git': opts.get('git')}, patchfn,
1668 diffopts = self.diffopts({'git': opts.get('git')}, patchfn,
1668 plain=True)
1669 plain=True)
1669 if newuser:
1670 if newuser:
1670 ph.setuser(newuser)
1671 ph.setuser(newuser)
1671 if newdate:
1672 if newdate:
1672 ph.setdate(newdate)
1673 ph.setdate(newdate)
1673 ph.setparent(hex(patchparent))
1674 ph.setparent(hex(patchparent))
1674
1675
1675 # only commit new patch when write is complete
1676 # only commit new patch when write is complete
1676 patchf = self.opener(patchfn, 'w', atomictemp=True)
1677 patchf = self.opener(patchfn, 'w', atomictemp=True)
1677
1678
1678 # update the dirstate in place, strip off the qtip commit
1679 # update the dirstate in place, strip off the qtip commit
1679 # and then commit.
1680 # and then commit.
1680 #
1681 #
1681 # this should really read:
1682 # this should really read:
1682 # mm, dd, aa = repo.status(top, patchparent)[:3]
1683 # mm, dd, aa = repo.status(top, patchparent)[:3]
1683 # but we do it backwards to take advantage of manifest/changelog
1684 # but we do it backwards to take advantage of manifest/changelog
1684 # caching against the next repo.status call
1685 # caching against the next repo.status call
1685 mm, aa, dd = repo.status(patchparent, top)[:3]
1686 mm, aa, dd = repo.status(patchparent, top)[:3]
1686 changes = repo.changelog.read(top)
1687 changes = repo.changelog.read(top)
1687 man = repo.manifestlog[changes[0]].read()
1688 man = repo.manifestlog[changes[0]].read()
1688 aaa = aa[:]
1689 aaa = aa[:]
1689 match1 = scmutil.match(repo[None], pats, opts)
1690 match1 = scmutil.match(repo[None], pats, opts)
1690 # in short mode, we only diff the files included in the
1691 # in short mode, we only diff the files included in the
1691 # patch already plus specified files
1692 # patch already plus specified files
1692 if opts.get('short'):
1693 if opts.get('short'):
1693 # if amending a patch, we start with existing
1694 # if amending a patch, we start with existing
1694 # files plus specified files - unfiltered
1695 # files plus specified files - unfiltered
1695 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1696 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1696 # filter with include/exclude options
1697 # filter with include/exclude options
1697 match1 = scmutil.match(repo[None], opts=opts)
1698 match1 = scmutil.match(repo[None], opts=opts)
1698 else:
1699 else:
1699 match = scmutil.matchall(repo)
1700 match = scmutil.matchall(repo)
1700 m, a, r, d = repo.status(match=match)[:4]
1701 m, a, r, d = repo.status(match=match)[:4]
1701 mm = set(mm)
1702 mm = set(mm)
1702 aa = set(aa)
1703 aa = set(aa)
1703 dd = set(dd)
1704 dd = set(dd)
1704
1705
1705 # we might end up with files that were added between
1706 # we might end up with files that were added between
1706 # qtip and the dirstate parent, but then changed in the
1707 # qtip and the dirstate parent, but then changed in the
1707 # local dirstate. in this case, we want them to only
1708 # local dirstate. in this case, we want them to only
1708 # show up in the added section
1709 # show up in the added section
1709 for x in m:
1710 for x in m:
1710 if x not in aa:
1711 if x not in aa:
1711 mm.add(x)
1712 mm.add(x)
1712 # we might end up with files added by the local dirstate that
1713 # we might end up with files added by the local dirstate that
1713 # were deleted by the patch. In this case, they should only
1714 # were deleted by the patch. In this case, they should only
1714 # show up in the changed section.
1715 # show up in the changed section.
1715 for x in a:
1716 for x in a:
1716 if x in dd:
1717 if x in dd:
1717 dd.remove(x)
1718 dd.remove(x)
1718 mm.add(x)
1719 mm.add(x)
1719 else:
1720 else:
1720 aa.add(x)
1721 aa.add(x)
1721 # make sure any files deleted in the local dirstate
1722 # make sure any files deleted in the local dirstate
1722 # are not in the add or change column of the patch
1723 # are not in the add or change column of the patch
1723 forget = []
1724 forget = []
1724 for x in d + r:
1725 for x in d + r:
1725 if x in aa:
1726 if x in aa:
1726 aa.remove(x)
1727 aa.remove(x)
1727 forget.append(x)
1728 forget.append(x)
1728 continue
1729 continue
1729 else:
1730 else:
1730 mm.discard(x)
1731 mm.discard(x)
1731 dd.add(x)
1732 dd.add(x)
1732
1733
1733 m = list(mm)
1734 m = list(mm)
1734 r = list(dd)
1735 r = list(dd)
1735 a = list(aa)
1736 a = list(aa)
1736
1737
1737 # create 'match' that includes the files to be recommitted.
1738 # create 'match' that includes the files to be recommitted.
1738 # apply match1 via repo.status to ensure correct case handling.
1739 # apply match1 via repo.status to ensure correct case handling.
1739 cm, ca, cr, cd = repo.status(patchparent, match=match1)[:4]
1740 cm, ca, cr, cd = repo.status(patchparent, match=match1)[:4]
1740 allmatches = set(cm + ca + cr + cd)
1741 allmatches = set(cm + ca + cr + cd)
1741 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1742 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1742
1743
1743 files = set(inclsubs)
1744 files = set(inclsubs)
1744 for x in refreshchanges:
1745 for x in refreshchanges:
1745 files.update(x)
1746 files.update(x)
1746 match = scmutil.matchfiles(repo, files)
1747 match = scmutil.matchfiles(repo, files)
1747
1748
1748 bmlist = repo[top].bookmarks()
1749 bmlist = repo[top].bookmarks()
1749
1750
1750 dsguard = None
1751 dsguard = None
1751 try:
1752 try:
1752 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1753 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1753 if diffopts.git or diffopts.upgrade:
1754 if diffopts.git or diffopts.upgrade:
1754 copies = {}
1755 copies = {}
1755 for dst in a:
1756 for dst in a:
1756 src = repo.dirstate.copied(dst)
1757 src = repo.dirstate.copied(dst)
1757 # during qfold, the source file for copies may
1758 # during qfold, the source file for copies may
1758 # be removed. Treat this as a simple add.
1759 # be removed. Treat this as a simple add.
1759 if src is not None and src in repo.dirstate:
1760 if src is not None and src in repo.dirstate:
1760 copies.setdefault(src, []).append(dst)
1761 copies.setdefault(src, []).append(dst)
1761 repo.dirstate.add(dst)
1762 repo.dirstate.add(dst)
1762 # remember the copies between patchparent and qtip
1763 # remember the copies between patchparent and qtip
1763 for dst in aaa:
1764 for dst in aaa:
1764 f = repo.file(dst)
1765 f = repo.file(dst)
1765 src = f.renamed(man[dst])
1766 src = f.renamed(man[dst])
1766 if src:
1767 if src:
1767 copies.setdefault(src[0], []).extend(
1768 copies.setdefault(src[0], []).extend(
1768 copies.get(dst, []))
1769 copies.get(dst, []))
1769 if dst in a:
1770 if dst in a:
1770 copies[src[0]].append(dst)
1771 copies[src[0]].append(dst)
1771 # we can't copy a file created by the patch itself
1772 # we can't copy a file created by the patch itself
1772 if dst in copies:
1773 if dst in copies:
1773 del copies[dst]
1774 del copies[dst]
1774 for src, dsts in copies.iteritems():
1775 for src, dsts in copies.iteritems():
1775 for dst in dsts:
1776 for dst in dsts:
1776 repo.dirstate.copy(src, dst)
1777 repo.dirstate.copy(src, dst)
1777 else:
1778 else:
1778 for dst in a:
1779 for dst in a:
1779 repo.dirstate.add(dst)
1780 repo.dirstate.add(dst)
1780 # Drop useless copy information
1781 # Drop useless copy information
1781 for f in list(repo.dirstate.copies()):
1782 for f in list(repo.dirstate.copies()):
1782 repo.dirstate.copy(None, f)
1783 repo.dirstate.copy(None, f)
1783 for f in r:
1784 for f in r:
1784 repo.dirstate.remove(f)
1785 repo.dirstate.remove(f)
1785 # if the patch excludes a modified file, mark that
1786 # if the patch excludes a modified file, mark that
1786 # file with mtime=0 so status can see it.
1787 # file with mtime=0 so status can see it.
1787 mm = []
1788 mm = []
1788 for i in xrange(len(m) - 1, -1, -1):
1789 for i in xrange(len(m) - 1, -1, -1):
1789 if not match1(m[i]):
1790 if not match1(m[i]):
1790 mm.append(m[i])
1791 mm.append(m[i])
1791 del m[i]
1792 del m[i]
1792 for f in m:
1793 for f in m:
1793 repo.dirstate.normal(f)
1794 repo.dirstate.normal(f)
1794 for f in mm:
1795 for f in mm:
1795 repo.dirstate.normallookup(f)
1796 repo.dirstate.normallookup(f)
1796 for f in forget:
1797 for f in forget:
1797 repo.dirstate.drop(f)
1798 repo.dirstate.drop(f)
1798
1799
1799 user = ph.user or changes[1]
1800 user = ph.user or changes[1]
1800
1801
1801 oldphase = repo[top].phase()
1802 oldphase = repo[top].phase()
1802
1803
1803 # assumes strip can roll itself back if interrupted
1804 # assumes strip can roll itself back if interrupted
1804 repo.setparents(*cparents)
1805 repo.setparents(*cparents)
1805 self.applied.pop()
1806 self.applied.pop()
1806 self.applieddirty = True
1807 self.applieddirty = True
1807 strip(self.ui, repo, [top], update=False, backup=False)
1808 strip(self.ui, repo, [top], update=False, backup=False)
1808 dsguard.close()
1809 dsguard.close()
1809 finally:
1810 finally:
1810 release(dsguard)
1811 release(dsguard)
1811
1812
1812 try:
1813 try:
1813 # might be nice to attempt to roll back strip after this
1814 # might be nice to attempt to roll back strip after this
1814
1815
1815 defaultmsg = "[mq]: %s" % patchfn
1816 defaultmsg = "[mq]: %s" % patchfn
1816 editor = cmdutil.getcommiteditor(editform=editform)
1817 editor = cmdutil.getcommiteditor(editform=editform)
1817 if edit:
1818 if edit:
1818 def finishdesc(desc):
1819 def finishdesc(desc):
1819 if desc.rstrip():
1820 if desc.rstrip():
1820 ph.setmessage(desc)
1821 ph.setmessage(desc)
1821 return desc
1822 return desc
1822 return defaultmsg
1823 return defaultmsg
1823 # i18n: this message is shown in editor with "HG: " prefix
1824 # i18n: this message is shown in editor with "HG: " prefix
1824 extramsg = _('Leave message empty to use default message.')
1825 extramsg = _('Leave message empty to use default message.')
1825 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1826 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1826 extramsg=extramsg,
1827 extramsg=extramsg,
1827 editform=editform)
1828 editform=editform)
1828 message = msg or "\n".join(ph.message)
1829 message = msg or "\n".join(ph.message)
1829 elif not msg:
1830 elif not msg:
1830 if not ph.message:
1831 if not ph.message:
1831 message = defaultmsg
1832 message = defaultmsg
1832 else:
1833 else:
1833 message = "\n".join(ph.message)
1834 message = "\n".join(ph.message)
1834 else:
1835 else:
1835 message = msg
1836 message = msg
1836 ph.setmessage(msg)
1837 ph.setmessage(msg)
1837
1838
1838 # Ensure we create a new changeset in the same phase than
1839 # Ensure we create a new changeset in the same phase than
1839 # the old one.
1840 # the old one.
1840 lock = tr = None
1841 lock = tr = None
1841 try:
1842 try:
1842 lock = repo.lock()
1843 lock = repo.lock()
1843 tr = repo.transaction('mq')
1844 tr = repo.transaction('mq')
1844 n = newcommit(repo, oldphase, message, user, ph.date,
1845 n = newcommit(repo, oldphase, message, user, ph.date,
1845 match=match, force=True, editor=editor)
1846 match=match, force=True, editor=editor)
1846 # only write patch after a successful commit
1847 # only write patch after a successful commit
1847 c = [list(x) for x in refreshchanges]
1848 c = [list(x) for x in refreshchanges]
1848 if inclsubs:
1849 if inclsubs:
1849 self.putsubstate2changes(substatestate, c)
1850 self.putsubstate2changes(substatestate, c)
1850 chunks = patchmod.diff(repo, patchparent,
1851 chunks = patchmod.diff(repo, patchparent,
1851 changes=c, opts=diffopts)
1852 changes=c, opts=diffopts)
1852 comments = bytes(ph)
1853 comments = bytes(ph)
1853 if comments:
1854 if comments:
1854 patchf.write(comments)
1855 patchf.write(comments)
1855 for chunk in chunks:
1856 for chunk in chunks:
1856 patchf.write(chunk)
1857 patchf.write(chunk)
1857 patchf.close()
1858 patchf.close()
1858
1859
1859 marks = repo._bookmarks
1860 marks = repo._bookmarks
1860 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
1861 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
1861 tr.close()
1862 tr.close()
1862
1863
1863 self.applied.append(statusentry(n, patchfn))
1864 self.applied.append(statusentry(n, patchfn))
1864 finally:
1865 finally:
1865 lockmod.release(tr, lock)
1866 lockmod.release(tr, lock)
1866 except: # re-raises
1867 except: # re-raises
1867 ctx = repo[cparents[0]]
1868 ctx = repo[cparents[0]]
1868 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1869 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1869 self.savedirty()
1870 self.savedirty()
1870 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1871 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1871 '(revert --all, qpush to recover)\n'))
1872 '(revert --all, qpush to recover)\n'))
1872 raise
1873 raise
1873 finally:
1874 finally:
1874 wlock.release()
1875 wlock.release()
1875 self.removeundo(repo)
1876 self.removeundo(repo)
1876
1877
1877 def init(self, repo, create=False):
1878 def init(self, repo, create=False):
1878 if not create and os.path.isdir(self.path):
1879 if not create and os.path.isdir(self.path):
1879 raise error.Abort(_("patch queue directory already exists"))
1880 raise error.Abort(_("patch queue directory already exists"))
1880 try:
1881 try:
1881 os.mkdir(self.path)
1882 os.mkdir(self.path)
1882 except OSError as inst:
1883 except OSError as inst:
1883 if inst.errno != errno.EEXIST or not create:
1884 if inst.errno != errno.EEXIST or not create:
1884 raise
1885 raise
1885 if create:
1886 if create:
1886 return self.qrepo(create=True)
1887 return self.qrepo(create=True)
1887
1888
1888 def unapplied(self, repo, patch=None):
1889 def unapplied(self, repo, patch=None):
1889 if patch and patch not in self.series:
1890 if patch and patch not in self.series:
1890 raise error.Abort(_("patch %s is not in series file") % patch)
1891 raise error.Abort(_("patch %s is not in series file") % patch)
1891 if not patch:
1892 if not patch:
1892 start = self.seriesend()
1893 start = self.seriesend()
1893 else:
1894 else:
1894 start = self.series.index(patch) + 1
1895 start = self.series.index(patch) + 1
1895 unapplied = []
1896 unapplied = []
1896 for i in xrange(start, len(self.series)):
1897 for i in xrange(start, len(self.series)):
1897 pushable, reason = self.pushable(i)
1898 pushable, reason = self.pushable(i)
1898 if pushable:
1899 if pushable:
1899 unapplied.append((i, self.series[i]))
1900 unapplied.append((i, self.series[i]))
1900 self.explainpushable(i)
1901 self.explainpushable(i)
1901 return unapplied
1902 return unapplied
1902
1903
1903 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1904 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1904 summary=False):
1905 summary=False):
1905 def displayname(pfx, patchname, state):
1906 def displayname(pfx, patchname, state):
1906 if pfx:
1907 if pfx:
1907 self.ui.write(pfx)
1908 self.ui.write(pfx)
1908 if summary:
1909 if summary:
1909 ph = patchheader(self.join(patchname), self.plainmode)
1910 ph = patchheader(self.join(patchname), self.plainmode)
1910 if ph.message:
1911 if ph.message:
1911 msg = ph.message[0]
1912 msg = ph.message[0]
1912 else:
1913 else:
1913 msg = ''
1914 msg = ''
1914
1915
1915 if self.ui.formatted():
1916 if self.ui.formatted():
1916 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1917 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1917 if width > 0:
1918 if width > 0:
1918 msg = util.ellipsis(msg, width)
1919 msg = util.ellipsis(msg, width)
1919 else:
1920 else:
1920 msg = ''
1921 msg = ''
1921 self.ui.write(patchname, label='qseries.' + state)
1922 self.ui.write(patchname, label='qseries.' + state)
1922 self.ui.write(': ')
1923 self.ui.write(': ')
1923 self.ui.write(msg, label='qseries.message.' + state)
1924 self.ui.write(msg, label='qseries.message.' + state)
1924 else:
1925 else:
1925 self.ui.write(patchname, label='qseries.' + state)
1926 self.ui.write(patchname, label='qseries.' + state)
1926 self.ui.write('\n')
1927 self.ui.write('\n')
1927
1928
1928 applied = set([p.name for p in self.applied])
1929 applied = set([p.name for p in self.applied])
1929 if length is None:
1930 if length is None:
1930 length = len(self.series) - start
1931 length = len(self.series) - start
1931 if not missing:
1932 if not missing:
1932 if self.ui.verbose:
1933 if self.ui.verbose:
1933 idxwidth = len(str(start + length - 1))
1934 idxwidth = len(str(start + length - 1))
1934 for i in xrange(start, start + length):
1935 for i in xrange(start, start + length):
1935 patch = self.series[i]
1936 patch = self.series[i]
1936 if patch in applied:
1937 if patch in applied:
1937 char, state = 'A', 'applied'
1938 char, state = 'A', 'applied'
1938 elif self.pushable(i)[0]:
1939 elif self.pushable(i)[0]:
1939 char, state = 'U', 'unapplied'
1940 char, state = 'U', 'unapplied'
1940 else:
1941 else:
1941 char, state = 'G', 'guarded'
1942 char, state = 'G', 'guarded'
1942 pfx = ''
1943 pfx = ''
1943 if self.ui.verbose:
1944 if self.ui.verbose:
1944 pfx = '%*d %s ' % (idxwidth, i, char)
1945 pfx = '%*d %s ' % (idxwidth, i, char)
1945 elif status and status != char:
1946 elif status and status != char:
1946 continue
1947 continue
1947 displayname(pfx, patch, state)
1948 displayname(pfx, patch, state)
1948 else:
1949 else:
1949 msng_list = []
1950 msng_list = []
1950 for root, dirs, files in os.walk(self.path):
1951 for root, dirs, files in os.walk(self.path):
1951 d = root[len(self.path) + 1:]
1952 d = root[len(self.path) + 1:]
1952 for f in files:
1953 for f in files:
1953 fl = os.path.join(d, f)
1954 fl = os.path.join(d, f)
1954 if (fl not in self.series and
1955 if (fl not in self.series and
1955 fl not in (self.statuspath, self.seriespath,
1956 fl not in (self.statuspath, self.seriespath,
1956 self.guardspath)
1957 self.guardspath)
1957 and not fl.startswith('.')):
1958 and not fl.startswith('.')):
1958 msng_list.append(fl)
1959 msng_list.append(fl)
1959 for x in sorted(msng_list):
1960 for x in sorted(msng_list):
1960 pfx = self.ui.verbose and ('D ') or ''
1961 pfx = self.ui.verbose and ('D ') or ''
1961 displayname(pfx, x, 'missing')
1962 displayname(pfx, x, 'missing')
1962
1963
1963 def issaveline(self, l):
1964 def issaveline(self, l):
1964 if l.name == '.hg.patches.save.line':
1965 if l.name == '.hg.patches.save.line':
1965 return True
1966 return True
1966
1967
1967 def qrepo(self, create=False):
1968 def qrepo(self, create=False):
1968 ui = self.baseui.copy()
1969 ui = self.baseui.copy()
1969 # copy back attributes set by ui.pager()
1970 # copy back attributes set by ui.pager()
1970 if self.ui.pageractive and not ui.pageractive:
1971 if self.ui.pageractive and not ui.pageractive:
1971 ui.pageractive = self.ui.pageractive
1972 ui.pageractive = self.ui.pageractive
1972 # internal config: ui.formatted
1973 # internal config: ui.formatted
1973 ui.setconfig('ui', 'formatted',
1974 ui.setconfig('ui', 'formatted',
1974 self.ui.config('ui', 'formatted'), 'mqpager')
1975 self.ui.config('ui', 'formatted'), 'mqpager')
1975 ui.setconfig('ui', 'interactive',
1976 ui.setconfig('ui', 'interactive',
1976 self.ui.config('ui', 'interactive'), 'mqpager')
1977 self.ui.config('ui', 'interactive'), 'mqpager')
1977 if create or os.path.isdir(self.join(".hg")):
1978 if create or os.path.isdir(self.join(".hg")):
1978 return hg.repository(ui, path=self.path, create=create)
1979 return hg.repository(ui, path=self.path, create=create)
1979
1980
1980 def restore(self, repo, rev, delete=None, qupdate=None):
1981 def restore(self, repo, rev, delete=None, qupdate=None):
1981 desc = repo[rev].description().strip()
1982 desc = repo[rev].description().strip()
1982 lines = desc.splitlines()
1983 lines = desc.splitlines()
1983 i = 0
1984 i = 0
1984 datastart = None
1985 datastart = None
1985 series = []
1986 series = []
1986 applied = []
1987 applied = []
1987 qpp = None
1988 qpp = None
1988 for i, line in enumerate(lines):
1989 for i, line in enumerate(lines):
1989 if line == 'Patch Data:':
1990 if line == 'Patch Data:':
1990 datastart = i + 1
1991 datastart = i + 1
1991 elif line.startswith('Dirstate:'):
1992 elif line.startswith('Dirstate:'):
1992 l = line.rstrip()
1993 l = line.rstrip()
1993 l = l[10:].split(' ')
1994 l = l[10:].split(' ')
1994 qpp = [bin(x) for x in l]
1995 qpp = [bin(x) for x in l]
1995 elif datastart is not None:
1996 elif datastart is not None:
1996 l = line.rstrip()
1997 l = line.rstrip()
1997 n, name = l.split(':', 1)
1998 n, name = l.split(':', 1)
1998 if n:
1999 if n:
1999 applied.append(statusentry(bin(n), name))
2000 applied.append(statusentry(bin(n), name))
2000 else:
2001 else:
2001 series.append(l)
2002 series.append(l)
2002 if datastart is None:
2003 if datastart is None:
2003 self.ui.warn(_("no saved patch data found\n"))
2004 self.ui.warn(_("no saved patch data found\n"))
2004 return 1
2005 return 1
2005 self.ui.warn(_("restoring status: %s\n") % lines[0])
2006 self.ui.warn(_("restoring status: %s\n") % lines[0])
2006 self.fullseries = series
2007 self.fullseries = series
2007 self.applied = applied
2008 self.applied = applied
2008 self.parseseries()
2009 self.parseseries()
2009 self.seriesdirty = True
2010 self.seriesdirty = True
2010 self.applieddirty = True
2011 self.applieddirty = True
2011 heads = repo.changelog.heads()
2012 heads = repo.changelog.heads()
2012 if delete:
2013 if delete:
2013 if rev not in heads:
2014 if rev not in heads:
2014 self.ui.warn(_("save entry has children, leaving it alone\n"))
2015 self.ui.warn(_("save entry has children, leaving it alone\n"))
2015 else:
2016 else:
2016 self.ui.warn(_("removing save entry %s\n") % short(rev))
2017 self.ui.warn(_("removing save entry %s\n") % short(rev))
2017 pp = repo.dirstate.parents()
2018 pp = repo.dirstate.parents()
2018 if rev in pp:
2019 if rev in pp:
2019 update = True
2020 update = True
2020 else:
2021 else:
2021 update = False
2022 update = False
2022 strip(self.ui, repo, [rev], update=update, backup=False)
2023 strip(self.ui, repo, [rev], update=update, backup=False)
2023 if qpp:
2024 if qpp:
2024 self.ui.warn(_("saved queue repository parents: %s %s\n") %
2025 self.ui.warn(_("saved queue repository parents: %s %s\n") %
2025 (short(qpp[0]), short(qpp[1])))
2026 (short(qpp[0]), short(qpp[1])))
2026 if qupdate:
2027 if qupdate:
2027 self.ui.status(_("updating queue directory\n"))
2028 self.ui.status(_("updating queue directory\n"))
2028 r = self.qrepo()
2029 r = self.qrepo()
2029 if not r:
2030 if not r:
2030 self.ui.warn(_("unable to load queue repository\n"))
2031 self.ui.warn(_("unable to load queue repository\n"))
2031 return 1
2032 return 1
2032 hg.clean(r, qpp[0])
2033 hg.clean(r, qpp[0])
2033
2034
2034 def save(self, repo, msg=None):
2035 def save(self, repo, msg=None):
2035 if not self.applied:
2036 if not self.applied:
2036 self.ui.warn(_("save: no patches applied, exiting\n"))
2037 self.ui.warn(_("save: no patches applied, exiting\n"))
2037 return 1
2038 return 1
2038 if self.issaveline(self.applied[-1]):
2039 if self.issaveline(self.applied[-1]):
2039 self.ui.warn(_("status is already saved\n"))
2040 self.ui.warn(_("status is already saved\n"))
2040 return 1
2041 return 1
2041
2042
2042 if not msg:
2043 if not msg:
2043 msg = _("hg patches saved state")
2044 msg = _("hg patches saved state")
2044 else:
2045 else:
2045 msg = "hg patches: " + msg.rstrip('\r\n')
2046 msg = "hg patches: " + msg.rstrip('\r\n')
2046 r = self.qrepo()
2047 r = self.qrepo()
2047 if r:
2048 if r:
2048 pp = r.dirstate.parents()
2049 pp = r.dirstate.parents()
2049 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2050 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2050 msg += "\n\nPatch Data:\n"
2051 msg += "\n\nPatch Data:\n"
2051 msg += ''.join('%s\n' % x for x in self.applied)
2052 msg += ''.join('%s\n' % x for x in self.applied)
2052 msg += ''.join(':%s\n' % x for x in self.fullseries)
2053 msg += ''.join(':%s\n' % x for x in self.fullseries)
2053 n = repo.commit(msg, force=True)
2054 n = repo.commit(msg, force=True)
2054 if not n:
2055 if not n:
2055 self.ui.warn(_("repo commit failed\n"))
2056 self.ui.warn(_("repo commit failed\n"))
2056 return 1
2057 return 1
2057 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2058 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2058 self.applieddirty = True
2059 self.applieddirty = True
2059 self.removeundo(repo)
2060 self.removeundo(repo)
2060
2061
2061 def fullseriesend(self):
2062 def fullseriesend(self):
2062 if self.applied:
2063 if self.applied:
2063 p = self.applied[-1].name
2064 p = self.applied[-1].name
2064 end = self.findseries(p)
2065 end = self.findseries(p)
2065 if end is None:
2066 if end is None:
2066 return len(self.fullseries)
2067 return len(self.fullseries)
2067 return end + 1
2068 return end + 1
2068 return 0
2069 return 0
2069
2070
2070 def seriesend(self, all_patches=False):
2071 def seriesend(self, all_patches=False):
2071 """If all_patches is False, return the index of the next pushable patch
2072 """If all_patches is False, return the index of the next pushable patch
2072 in the series, or the series length. If all_patches is True, return the
2073 in the series, or the series length. If all_patches is True, return the
2073 index of the first patch past the last applied one.
2074 index of the first patch past the last applied one.
2074 """
2075 """
2075 end = 0
2076 end = 0
2076 def nextpatch(start):
2077 def nextpatch(start):
2077 if all_patches or start >= len(self.series):
2078 if all_patches or start >= len(self.series):
2078 return start
2079 return start
2079 for i in xrange(start, len(self.series)):
2080 for i in xrange(start, len(self.series)):
2080 p, reason = self.pushable(i)
2081 p, reason = self.pushable(i)
2081 if p:
2082 if p:
2082 return i
2083 return i
2083 self.explainpushable(i)
2084 self.explainpushable(i)
2084 return len(self.series)
2085 return len(self.series)
2085 if self.applied:
2086 if self.applied:
2086 p = self.applied[-1].name
2087 p = self.applied[-1].name
2087 try:
2088 try:
2088 end = self.series.index(p)
2089 end = self.series.index(p)
2089 except ValueError:
2090 except ValueError:
2090 return 0
2091 return 0
2091 return nextpatch(end + 1)
2092 return nextpatch(end + 1)
2092 return nextpatch(end)
2093 return nextpatch(end)
2093
2094
2094 def appliedname(self, index):
2095 def appliedname(self, index):
2095 pname = self.applied[index].name
2096 pname = self.applied[index].name
2096 if not self.ui.verbose:
2097 if not self.ui.verbose:
2097 p = pname
2098 p = pname
2098 else:
2099 else:
2099 p = str(self.series.index(pname)) + " " + pname
2100 p = str(self.series.index(pname)) + " " + pname
2100 return p
2101 return p
2101
2102
2102 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2103 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2103 force=None, git=False):
2104 force=None, git=False):
2104 def checkseries(patchname):
2105 def checkseries(patchname):
2105 if patchname in self.series:
2106 if patchname in self.series:
2106 raise error.Abort(_('patch %s is already in the series file')
2107 raise error.Abort(_('patch %s is already in the series file')
2107 % patchname)
2108 % patchname)
2108
2109
2109 if rev:
2110 if rev:
2110 if files:
2111 if files:
2111 raise error.Abort(_('option "-r" not valid when importing '
2112 raise error.Abort(_('option "-r" not valid when importing '
2112 'files'))
2113 'files'))
2113 rev = scmutil.revrange(repo, rev)
2114 rev = scmutil.revrange(repo, rev)
2114 rev.sort(reverse=True)
2115 rev.sort(reverse=True)
2115 elif not files:
2116 elif not files:
2116 raise error.Abort(_('no files or revisions specified'))
2117 raise error.Abort(_('no files or revisions specified'))
2117 if (len(files) > 1 or len(rev) > 1) and patchname:
2118 if (len(files) > 1 or len(rev) > 1) and patchname:
2118 raise error.Abort(_('option "-n" not valid when importing multiple '
2119 raise error.Abort(_('option "-n" not valid when importing multiple '
2119 'patches'))
2120 'patches'))
2120 imported = []
2121 imported = []
2121 if rev:
2122 if rev:
2122 # If mq patches are applied, we can only import revisions
2123 # If mq patches are applied, we can only import revisions
2123 # that form a linear path to qbase.
2124 # that form a linear path to qbase.
2124 # Otherwise, they should form a linear path to a head.
2125 # Otherwise, they should form a linear path to a head.
2125 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2126 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2126 if len(heads) > 1:
2127 if len(heads) > 1:
2127 raise error.Abort(_('revision %d is the root of more than one '
2128 raise error.Abort(_('revision %d is the root of more than one '
2128 'branch') % rev.last())
2129 'branch') % rev.last())
2129 if self.applied:
2130 if self.applied:
2130 base = repo.changelog.node(rev.first())
2131 base = repo.changelog.node(rev.first())
2131 if base in [n.node for n in self.applied]:
2132 if base in [n.node for n in self.applied]:
2132 raise error.Abort(_('revision %d is already managed')
2133 raise error.Abort(_('revision %d is already managed')
2133 % rev.first())
2134 % rev.first())
2134 if heads != [self.applied[-1].node]:
2135 if heads != [self.applied[-1].node]:
2135 raise error.Abort(_('revision %d is not the parent of '
2136 raise error.Abort(_('revision %d is not the parent of '
2136 'the queue') % rev.first())
2137 'the queue') % rev.first())
2137 base = repo.changelog.rev(self.applied[0].node)
2138 base = repo.changelog.rev(self.applied[0].node)
2138 lastparent = repo.changelog.parentrevs(base)[0]
2139 lastparent = repo.changelog.parentrevs(base)[0]
2139 else:
2140 else:
2140 if heads != [repo.changelog.node(rev.first())]:
2141 if heads != [repo.changelog.node(rev.first())]:
2141 raise error.Abort(_('revision %d has unmanaged children')
2142 raise error.Abort(_('revision %d has unmanaged children')
2142 % rev.first())
2143 % rev.first())
2143 lastparent = None
2144 lastparent = None
2144
2145
2145 diffopts = self.diffopts({'git': git})
2146 diffopts = self.diffopts({'git': git})
2146 with repo.transaction('qimport') as tr:
2147 with repo.transaction('qimport') as tr:
2147 for r in rev:
2148 for r in rev:
2148 if not repo[r].mutable():
2149 if not repo[r].mutable():
2149 raise error.Abort(_('revision %d is not mutable') % r,
2150 raise error.Abort(_('revision %d is not mutable') % r,
2150 hint=_("see 'hg help phases' "
2151 hint=_("see 'hg help phases' "
2151 'for details'))
2152 'for details'))
2152 p1, p2 = repo.changelog.parentrevs(r)
2153 p1, p2 = repo.changelog.parentrevs(r)
2153 n = repo.changelog.node(r)
2154 n = repo.changelog.node(r)
2154 if p2 != nullrev:
2155 if p2 != nullrev:
2155 raise error.Abort(_('cannot import merge revision %d')
2156 raise error.Abort(_('cannot import merge revision %d')
2156 % r)
2157 % r)
2157 if lastparent and lastparent != r:
2158 if lastparent and lastparent != r:
2158 raise error.Abort(_('revision %d is not the parent of '
2159 raise error.Abort(_('revision %d is not the parent of '
2159 '%d')
2160 '%d')
2160 % (r, lastparent))
2161 % (r, lastparent))
2161 lastparent = p1
2162 lastparent = p1
2162
2163
2163 if not patchname:
2164 if not patchname:
2164 patchname = self.makepatchname(
2165 patchname = self.makepatchname(
2165 repo[r].description().split('\n', 1)[0],
2166 repo[r].description().split('\n', 1)[0],
2166 '%d.diff' % r)
2167 '%d.diff' % r)
2167 checkseries(patchname)
2168 checkseries(patchname)
2168 self.checkpatchname(patchname, force)
2169 self.checkpatchname(patchname, force)
2169 self.fullseries.insert(0, patchname)
2170 self.fullseries.insert(0, patchname)
2170
2171
2171 patchf = self.opener(patchname, "w")
2172 patchf = self.opener(patchname, "w")
2172 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2173 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2173 patchf.close()
2174 patchf.close()
2174
2175
2175 se = statusentry(n, patchname)
2176 se = statusentry(n, patchname)
2176 self.applied.insert(0, se)
2177 self.applied.insert(0, se)
2177
2178
2178 self.added.append(patchname)
2179 self.added.append(patchname)
2179 imported.append(patchname)
2180 imported.append(patchname)
2180 patchname = None
2181 patchname = None
2181 if rev and repo.ui.configbool('mq', 'secret'):
2182 if rev and repo.ui.configbool('mq', 'secret'):
2182 # if we added anything with --rev, move the secret root
2183 # if we added anything with --rev, move the secret root
2183 phases.retractboundary(repo, tr, phases.secret, [n])
2184 phases.retractboundary(repo, tr, phases.secret, [n])
2184 self.parseseries()
2185 self.parseseries()
2185 self.applieddirty = True
2186 self.applieddirty = True
2186 self.seriesdirty = True
2187 self.seriesdirty = True
2187
2188
2188 for i, filename in enumerate(files):
2189 for i, filename in enumerate(files):
2189 if existing:
2190 if existing:
2190 if filename == '-':
2191 if filename == '-':
2191 raise error.Abort(_('-e is incompatible with import from -')
2192 raise error.Abort(_('-e is incompatible with import from -')
2192 )
2193 )
2193 filename = normname(filename)
2194 filename = normname(filename)
2194 self.checkreservedname(filename)
2195 self.checkreservedname(filename)
2195 if util.url(filename).islocal():
2196 if util.url(filename).islocal():
2196 originpath = self.join(filename)
2197 originpath = self.join(filename)
2197 if not os.path.isfile(originpath):
2198 if not os.path.isfile(originpath):
2198 raise error.Abort(
2199 raise error.Abort(
2199 _("patch %s does not exist") % filename)
2200 _("patch %s does not exist") % filename)
2200
2201
2201 if patchname:
2202 if patchname:
2202 self.checkpatchname(patchname, force)
2203 self.checkpatchname(patchname, force)
2203
2204
2204 self.ui.write(_('renaming %s to %s\n')
2205 self.ui.write(_('renaming %s to %s\n')
2205 % (filename, patchname))
2206 % (filename, patchname))
2206 util.rename(originpath, self.join(patchname))
2207 util.rename(originpath, self.join(patchname))
2207 else:
2208 else:
2208 patchname = filename
2209 patchname = filename
2209
2210
2210 else:
2211 else:
2211 if filename == '-' and not patchname:
2212 if filename == '-' and not patchname:
2212 raise error.Abort(_('need --name to import a patch from -'))
2213 raise error.Abort(_('need --name to import a patch from -'))
2213 elif not patchname:
2214 elif not patchname:
2214 patchname = normname(os.path.basename(filename.rstrip('/')))
2215 patchname = normname(os.path.basename(filename.rstrip('/')))
2215 self.checkpatchname(patchname, force)
2216 self.checkpatchname(patchname, force)
2216 try:
2217 try:
2217 if filename == '-':
2218 if filename == '-':
2218 text = self.ui.fin.read()
2219 text = self.ui.fin.read()
2219 else:
2220 else:
2220 fp = hg.openpath(self.ui, filename)
2221 fp = hg.openpath(self.ui, filename)
2221 text = fp.read()
2222 text = fp.read()
2222 fp.close()
2223 fp.close()
2223 except (OSError, IOError):
2224 except (OSError, IOError):
2224 raise error.Abort(_("unable to read file %s") % filename)
2225 raise error.Abort(_("unable to read file %s") % filename)
2225 patchf = self.opener(patchname, "w")
2226 patchf = self.opener(patchname, "w")
2226 patchf.write(text)
2227 patchf.write(text)
2227 patchf.close()
2228 patchf.close()
2228 if not force:
2229 if not force:
2229 checkseries(patchname)
2230 checkseries(patchname)
2230 if patchname not in self.series:
2231 if patchname not in self.series:
2231 index = self.fullseriesend() + i
2232 index = self.fullseriesend() + i
2232 self.fullseries[index:index] = [patchname]
2233 self.fullseries[index:index] = [patchname]
2233 self.parseseries()
2234 self.parseseries()
2234 self.seriesdirty = True
2235 self.seriesdirty = True
2235 self.ui.warn(_("adding %s to series file\n") % patchname)
2236 self.ui.warn(_("adding %s to series file\n") % patchname)
2236 self.added.append(patchname)
2237 self.added.append(patchname)
2237 imported.append(patchname)
2238 imported.append(patchname)
2238 patchname = None
2239 patchname = None
2239
2240
2240 self.removeundo(repo)
2241 self.removeundo(repo)
2241 return imported
2242 return imported
2242
2243
2243 def fixkeepchangesopts(ui, opts):
2244 def fixkeepchangesopts(ui, opts):
2244 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2245 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2245 or opts.get('exact')):
2246 or opts.get('exact')):
2246 return opts
2247 return opts
2247 opts = dict(opts)
2248 opts = dict(opts)
2248 opts['keep_changes'] = True
2249 opts['keep_changes'] = True
2249 return opts
2250 return opts
2250
2251
2251 @command("qdelete|qremove|qrm",
2252 @command("qdelete|qremove|qrm",
2252 [('k', 'keep', None, _('keep patch file')),
2253 [('k', 'keep', None, _('keep patch file')),
2253 ('r', 'rev', [],
2254 ('r', 'rev', [],
2254 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2255 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2255 _('hg qdelete [-k] [PATCH]...'))
2256 _('hg qdelete [-k] [PATCH]...'))
2256 def delete(ui, repo, *patches, **opts):
2257 def delete(ui, repo, *patches, **opts):
2257 """remove patches from queue
2258 """remove patches from queue
2258
2259
2259 The patches must not be applied, and at least one patch is required. Exact
2260 The patches must not be applied, and at least one patch is required. Exact
2260 patch identifiers must be given. With -k/--keep, the patch files are
2261 patch identifiers must be given. With -k/--keep, the patch files are
2261 preserved in the patch directory.
2262 preserved in the patch directory.
2262
2263
2263 To stop managing a patch and move it into permanent history,
2264 To stop managing a patch and move it into permanent history,
2264 use the :hg:`qfinish` command."""
2265 use the :hg:`qfinish` command."""
2265 q = repo.mq
2266 q = repo.mq
2266 q.delete(repo, patches, pycompat.byteskwargs(opts))
2267 q.delete(repo, patches, pycompat.byteskwargs(opts))
2267 q.savedirty()
2268 q.savedirty()
2268 return 0
2269 return 0
2269
2270
2270 @command("qapplied",
2271 @command("qapplied",
2271 [('1', 'last', None, _('show only the preceding applied patch'))
2272 [('1', 'last', None, _('show only the preceding applied patch'))
2272 ] + seriesopts,
2273 ] + seriesopts,
2273 _('hg qapplied [-1] [-s] [PATCH]'))
2274 _('hg qapplied [-1] [-s] [PATCH]'))
2274 def applied(ui, repo, patch=None, **opts):
2275 def applied(ui, repo, patch=None, **opts):
2275 """print the patches already applied
2276 """print the patches already applied
2276
2277
2277 Returns 0 on success."""
2278 Returns 0 on success."""
2278
2279
2279 q = repo.mq
2280 q = repo.mq
2280 opts = pycompat.byteskwargs(opts)
2281 opts = pycompat.byteskwargs(opts)
2281
2282
2282 if patch:
2283 if patch:
2283 if patch not in q.series:
2284 if patch not in q.series:
2284 raise error.Abort(_("patch %s is not in series file") % patch)
2285 raise error.Abort(_("patch %s is not in series file") % patch)
2285 end = q.series.index(patch) + 1
2286 end = q.series.index(patch) + 1
2286 else:
2287 else:
2287 end = q.seriesend(True)
2288 end = q.seriesend(True)
2288
2289
2289 if opts.get('last') and not end:
2290 if opts.get('last') and not end:
2290 ui.write(_("no patches applied\n"))
2291 ui.write(_("no patches applied\n"))
2291 return 1
2292 return 1
2292 elif opts.get('last') and end == 1:
2293 elif opts.get('last') and end == 1:
2293 ui.write(_("only one patch applied\n"))
2294 ui.write(_("only one patch applied\n"))
2294 return 1
2295 return 1
2295 elif opts.get('last'):
2296 elif opts.get('last'):
2296 start = end - 2
2297 start = end - 2
2297 end = 1
2298 end = 1
2298 else:
2299 else:
2299 start = 0
2300 start = 0
2300
2301
2301 q.qseries(repo, length=end, start=start, status='A',
2302 q.qseries(repo, length=end, start=start, status='A',
2302 summary=opts.get('summary'))
2303 summary=opts.get('summary'))
2303
2304
2304
2305
2305 @command("qunapplied",
2306 @command("qunapplied",
2306 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2307 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2307 _('hg qunapplied [-1] [-s] [PATCH]'))
2308 _('hg qunapplied [-1] [-s] [PATCH]'))
2308 def unapplied(ui, repo, patch=None, **opts):
2309 def unapplied(ui, repo, patch=None, **opts):
2309 """print the patches not yet applied
2310 """print the patches not yet applied
2310
2311
2311 Returns 0 on success."""
2312 Returns 0 on success."""
2312
2313
2313 q = repo.mq
2314 q = repo.mq
2314 opts = pycompat.byteskwargs(opts)
2315 opts = pycompat.byteskwargs(opts)
2315 if patch:
2316 if patch:
2316 if patch not in q.series:
2317 if patch not in q.series:
2317 raise error.Abort(_("patch %s is not in series file") % patch)
2318 raise error.Abort(_("patch %s is not in series file") % patch)
2318 start = q.series.index(patch) + 1
2319 start = q.series.index(patch) + 1
2319 else:
2320 else:
2320 start = q.seriesend(True)
2321 start = q.seriesend(True)
2321
2322
2322 if start == len(q.series) and opts.get('first'):
2323 if start == len(q.series) and opts.get('first'):
2323 ui.write(_("all patches applied\n"))
2324 ui.write(_("all patches applied\n"))
2324 return 1
2325 return 1
2325
2326
2326 if opts.get('first'):
2327 if opts.get('first'):
2327 length = 1
2328 length = 1
2328 else:
2329 else:
2329 length = None
2330 length = None
2330 q.qseries(repo, start=start, length=length, status='U',
2331 q.qseries(repo, start=start, length=length, status='U',
2331 summary=opts.get('summary'))
2332 summary=opts.get('summary'))
2332
2333
2333 @command("qimport",
2334 @command("qimport",
2334 [('e', 'existing', None, _('import file in patch directory')),
2335 [('e', 'existing', None, _('import file in patch directory')),
2335 ('n', 'name', '',
2336 ('n', 'name', '',
2336 _('name of patch file'), _('NAME')),
2337 _('name of patch file'), _('NAME')),
2337 ('f', 'force', None, _('overwrite existing files')),
2338 ('f', 'force', None, _('overwrite existing files')),
2338 ('r', 'rev', [],
2339 ('r', 'rev', [],
2339 _('place existing revisions under mq control'), _('REV')),
2340 _('place existing revisions under mq control'), _('REV')),
2340 ('g', 'git', None, _('use git extended diff format')),
2341 ('g', 'git', None, _('use git extended diff format')),
2341 ('P', 'push', None, _('qpush after importing'))],
2342 ('P', 'push', None, _('qpush after importing'))],
2342 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2343 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2343 def qimport(ui, repo, *filename, **opts):
2344 def qimport(ui, repo, *filename, **opts):
2344 """import a patch or existing changeset
2345 """import a patch or existing changeset
2345
2346
2346 The patch is inserted into the series after the last applied
2347 The patch is inserted into the series after the last applied
2347 patch. If no patches have been applied, qimport prepends the patch
2348 patch. If no patches have been applied, qimport prepends the patch
2348 to the series.
2349 to the series.
2349
2350
2350 The patch will have the same name as its source file unless you
2351 The patch will have the same name as its source file unless you
2351 give it a new one with -n/--name.
2352 give it a new one with -n/--name.
2352
2353
2353 You can register an existing patch inside the patch directory with
2354 You can register an existing patch inside the patch directory with
2354 the -e/--existing flag.
2355 the -e/--existing flag.
2355
2356
2356 With -f/--force, an existing patch of the same name will be
2357 With -f/--force, an existing patch of the same name will be
2357 overwritten.
2358 overwritten.
2358
2359
2359 An existing changeset may be placed under mq control with -r/--rev
2360 An existing changeset may be placed under mq control with -r/--rev
2360 (e.g. qimport --rev . -n patch will place the current revision
2361 (e.g. qimport --rev . -n patch will place the current revision
2361 under mq control). With -g/--git, patches imported with --rev will
2362 under mq control). With -g/--git, patches imported with --rev will
2362 use the git diff format. See the diffs help topic for information
2363 use the git diff format. See the diffs help topic for information
2363 on why this is important for preserving rename/copy information
2364 on why this is important for preserving rename/copy information
2364 and permission changes. Use :hg:`qfinish` to remove changesets
2365 and permission changes. Use :hg:`qfinish` to remove changesets
2365 from mq control.
2366 from mq control.
2366
2367
2367 To import a patch from standard input, pass - as the patch file.
2368 To import a patch from standard input, pass - as the patch file.
2368 When importing from standard input, a patch name must be specified
2369 When importing from standard input, a patch name must be specified
2369 using the --name flag.
2370 using the --name flag.
2370
2371
2371 To import an existing patch while renaming it::
2372 To import an existing patch while renaming it::
2372
2373
2373 hg qimport -e existing-patch -n new-name
2374 hg qimport -e existing-patch -n new-name
2374
2375
2375 Returns 0 if import succeeded.
2376 Returns 0 if import succeeded.
2376 """
2377 """
2377 opts = pycompat.byteskwargs(opts)
2378 opts = pycompat.byteskwargs(opts)
2378 with repo.lock(): # cause this may move phase
2379 with repo.lock(): # cause this may move phase
2379 q = repo.mq
2380 q = repo.mq
2380 try:
2381 try:
2381 imported = q.qimport(
2382 imported = q.qimport(
2382 repo, filename, patchname=opts.get('name'),
2383 repo, filename, patchname=opts.get('name'),
2383 existing=opts.get('existing'), force=opts.get('force'),
2384 existing=opts.get('existing'), force=opts.get('force'),
2384 rev=opts.get('rev'), git=opts.get('git'))
2385 rev=opts.get('rev'), git=opts.get('git'))
2385 finally:
2386 finally:
2386 q.savedirty()
2387 q.savedirty()
2387
2388
2388 if imported and opts.get('push') and not opts.get('rev'):
2389 if imported and opts.get('push') and not opts.get('rev'):
2389 return q.push(repo, imported[-1])
2390 return q.push(repo, imported[-1])
2390 return 0
2391 return 0
2391
2392
2392 def qinit(ui, repo, create):
2393 def qinit(ui, repo, create):
2393 """initialize a new queue repository
2394 """initialize a new queue repository
2394
2395
2395 This command also creates a series file for ordering patches, and
2396 This command also creates a series file for ordering patches, and
2396 an mq-specific .hgignore file in the queue repository, to exclude
2397 an mq-specific .hgignore file in the queue repository, to exclude
2397 the status and guards files (these contain mostly transient state).
2398 the status and guards files (these contain mostly transient state).
2398
2399
2399 Returns 0 if initialization succeeded."""
2400 Returns 0 if initialization succeeded."""
2400 q = repo.mq
2401 q = repo.mq
2401 r = q.init(repo, create)
2402 r = q.init(repo, create)
2402 q.savedirty()
2403 q.savedirty()
2403 if r:
2404 if r:
2404 if not os.path.exists(r.wjoin('.hgignore')):
2405 if not os.path.exists(r.wjoin('.hgignore')):
2405 fp = r.wvfs('.hgignore', 'w')
2406 fp = r.wvfs('.hgignore', 'w')
2406 fp.write('^\\.hg\n')
2407 fp.write('^\\.hg\n')
2407 fp.write('^\\.mq\n')
2408 fp.write('^\\.mq\n')
2408 fp.write('syntax: glob\n')
2409 fp.write('syntax: glob\n')
2409 fp.write('status\n')
2410 fp.write('status\n')
2410 fp.write('guards\n')
2411 fp.write('guards\n')
2411 fp.close()
2412 fp.close()
2412 if not os.path.exists(r.wjoin('series')):
2413 if not os.path.exists(r.wjoin('series')):
2413 r.wvfs('series', 'w').close()
2414 r.wvfs('series', 'w').close()
2414 r[None].add(['.hgignore', 'series'])
2415 r[None].add(['.hgignore', 'series'])
2415 commands.add(ui, r)
2416 commands.add(ui, r)
2416 return 0
2417 return 0
2417
2418
2418 @command("^qinit",
2419 @command("^qinit",
2419 [('c', 'create-repo', None, _('create queue repository'))],
2420 [('c', 'create-repo', None, _('create queue repository'))],
2420 _('hg qinit [-c]'))
2421 _('hg qinit [-c]'))
2421 def init(ui, repo, **opts):
2422 def init(ui, repo, **opts):
2422 """init a new queue repository (DEPRECATED)
2423 """init a new queue repository (DEPRECATED)
2423
2424
2424 The queue repository is unversioned by default. If
2425 The queue repository is unversioned by default. If
2425 -c/--create-repo is specified, qinit will create a separate nested
2426 -c/--create-repo is specified, qinit will create a separate nested
2426 repository for patches (qinit -c may also be run later to convert
2427 repository for patches (qinit -c may also be run later to convert
2427 an unversioned patch repository into a versioned one). You can use
2428 an unversioned patch repository into a versioned one). You can use
2428 qcommit to commit changes to this queue repository.
2429 qcommit to commit changes to this queue repository.
2429
2430
2430 This command is deprecated. Without -c, it's implied by other relevant
2431 This command is deprecated. Without -c, it's implied by other relevant
2431 commands. With -c, use :hg:`init --mq` instead."""
2432 commands. With -c, use :hg:`init --mq` instead."""
2432 return qinit(ui, repo, create=opts.get(r'create_repo'))
2433 return qinit(ui, repo, create=opts.get(r'create_repo'))
2433
2434
2434 @command("qclone",
2435 @command("qclone",
2435 [('', 'pull', None, _('use pull protocol to copy metadata')),
2436 [('', 'pull', None, _('use pull protocol to copy metadata')),
2436 ('U', 'noupdate', None,
2437 ('U', 'noupdate', None,
2437 _('do not update the new working directories')),
2438 _('do not update the new working directories')),
2438 ('', 'uncompressed', None,
2439 ('', 'uncompressed', None,
2439 _('use uncompressed transfer (fast over LAN)')),
2440 _('use uncompressed transfer (fast over LAN)')),
2440 ('p', 'patches', '',
2441 ('p', 'patches', '',
2441 _('location of source patch repository'), _('REPO')),
2442 _('location of source patch repository'), _('REPO')),
2442 ] + cmdutil.remoteopts,
2443 ] + cmdutil.remoteopts,
2443 _('hg qclone [OPTION]... SOURCE [DEST]'),
2444 _('hg qclone [OPTION]... SOURCE [DEST]'),
2444 norepo=True)
2445 norepo=True)
2445 def clone(ui, source, dest=None, **opts):
2446 def clone(ui, source, dest=None, **opts):
2446 '''clone main and patch repository at same time
2447 '''clone main and patch repository at same time
2447
2448
2448 If source is local, destination will have no patches applied. If
2449 If source is local, destination will have no patches applied. If
2449 source is remote, this command can not check if patches are
2450 source is remote, this command can not check if patches are
2450 applied in source, so cannot guarantee that patches are not
2451 applied in source, so cannot guarantee that patches are not
2451 applied in destination. If you clone remote repository, be sure
2452 applied in destination. If you clone remote repository, be sure
2452 before that it has no patches applied.
2453 before that it has no patches applied.
2453
2454
2454 Source patch repository is looked for in <src>/.hg/patches by
2455 Source patch repository is looked for in <src>/.hg/patches by
2455 default. Use -p <url> to change.
2456 default. Use -p <url> to change.
2456
2457
2457 The patch directory must be a nested Mercurial repository, as
2458 The patch directory must be a nested Mercurial repository, as
2458 would be created by :hg:`init --mq`.
2459 would be created by :hg:`init --mq`.
2459
2460
2460 Return 0 on success.
2461 Return 0 on success.
2461 '''
2462 '''
2462 opts = pycompat.byteskwargs(opts)
2463 opts = pycompat.byteskwargs(opts)
2463 def patchdir(repo):
2464 def patchdir(repo):
2464 """compute a patch repo url from a repo object"""
2465 """compute a patch repo url from a repo object"""
2465 url = repo.url()
2466 url = repo.url()
2466 if url.endswith('/'):
2467 if url.endswith('/'):
2467 url = url[:-1]
2468 url = url[:-1]
2468 return url + '/.hg/patches'
2469 return url + '/.hg/patches'
2469
2470
2470 # main repo (destination and sources)
2471 # main repo (destination and sources)
2471 if dest is None:
2472 if dest is None:
2472 dest = hg.defaultdest(source)
2473 dest = hg.defaultdest(source)
2473 sr = hg.peer(ui, opts, ui.expandpath(source))
2474 sr = hg.peer(ui, opts, ui.expandpath(source))
2474
2475
2475 # patches repo (source only)
2476 # patches repo (source only)
2476 if opts.get('patches'):
2477 if opts.get('patches'):
2477 patchespath = ui.expandpath(opts.get('patches'))
2478 patchespath = ui.expandpath(opts.get('patches'))
2478 else:
2479 else:
2479 patchespath = patchdir(sr)
2480 patchespath = patchdir(sr)
2480 try:
2481 try:
2481 hg.peer(ui, opts, patchespath)
2482 hg.peer(ui, opts, patchespath)
2482 except error.RepoError:
2483 except error.RepoError:
2483 raise error.Abort(_('versioned patch repository not found'
2484 raise error.Abort(_('versioned patch repository not found'
2484 ' (see init --mq)'))
2485 ' (see init --mq)'))
2485 qbase, destrev = None, None
2486 qbase, destrev = None, None
2486 if sr.local():
2487 if sr.local():
2487 repo = sr.local()
2488 repo = sr.local()
2488 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2489 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2489 qbase = repo.mq.applied[0].node
2490 qbase = repo.mq.applied[0].node
2490 if not hg.islocal(dest):
2491 if not hg.islocal(dest):
2491 heads = set(repo.heads())
2492 heads = set(repo.heads())
2492 destrev = list(heads.difference(repo.heads(qbase)))
2493 destrev = list(heads.difference(repo.heads(qbase)))
2493 destrev.append(repo.changelog.parents(qbase)[0])
2494 destrev.append(repo.changelog.parents(qbase)[0])
2494 elif sr.capable('lookup'):
2495 elif sr.capable('lookup'):
2495 try:
2496 try:
2496 qbase = sr.lookup('qbase')
2497 qbase = sr.lookup('qbase')
2497 except error.RepoError:
2498 except error.RepoError:
2498 pass
2499 pass
2499
2500
2500 ui.note(_('cloning main repository\n'))
2501 ui.note(_('cloning main repository\n'))
2501 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2502 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2502 pull=opts.get('pull'),
2503 pull=opts.get('pull'),
2503 rev=destrev,
2504 rev=destrev,
2504 update=False,
2505 update=False,
2505 stream=opts.get('uncompressed'))
2506 stream=opts.get('uncompressed'))
2506
2507
2507 ui.note(_('cloning patch repository\n'))
2508 ui.note(_('cloning patch repository\n'))
2508 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2509 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2509 pull=opts.get('pull'), update=not opts.get('noupdate'),
2510 pull=opts.get('pull'), update=not opts.get('noupdate'),
2510 stream=opts.get('uncompressed'))
2511 stream=opts.get('uncompressed'))
2511
2512
2512 if dr.local():
2513 if dr.local():
2513 repo = dr.local()
2514 repo = dr.local()
2514 if qbase:
2515 if qbase:
2515 ui.note(_('stripping applied patches from destination '
2516 ui.note(_('stripping applied patches from destination '
2516 'repository\n'))
2517 'repository\n'))
2517 strip(ui, repo, [qbase], update=False, backup=None)
2518 strip(ui, repo, [qbase], update=False, backup=None)
2518 if not opts.get('noupdate'):
2519 if not opts.get('noupdate'):
2519 ui.note(_('updating destination repository\n'))
2520 ui.note(_('updating destination repository\n'))
2520 hg.update(repo, repo.changelog.tip())
2521 hg.update(repo, repo.changelog.tip())
2521
2522
2522 @command("qcommit|qci",
2523 @command("qcommit|qci",
2523 commands.table["^commit|ci"][1],
2524 commands.table["^commit|ci"][1],
2524 _('hg qcommit [OPTION]... [FILE]...'),
2525 _('hg qcommit [OPTION]... [FILE]...'),
2525 inferrepo=True)
2526 inferrepo=True)
2526 def commit(ui, repo, *pats, **opts):
2527 def commit(ui, repo, *pats, **opts):
2527 """commit changes in the queue repository (DEPRECATED)
2528 """commit changes in the queue repository (DEPRECATED)
2528
2529
2529 This command is deprecated; use :hg:`commit --mq` instead."""
2530 This command is deprecated; use :hg:`commit --mq` instead."""
2530 q = repo.mq
2531 q = repo.mq
2531 r = q.qrepo()
2532 r = q.qrepo()
2532 if not r:
2533 if not r:
2533 raise error.Abort('no queue repository')
2534 raise error.Abort('no queue repository')
2534 commands.commit(r.ui, r, *pats, **opts)
2535 commands.commit(r.ui, r, *pats, **opts)
2535
2536
2536 @command("qseries",
2537 @command("qseries",
2537 [('m', 'missing', None, _('print patches not in series')),
2538 [('m', 'missing', None, _('print patches not in series')),
2538 ] + seriesopts,
2539 ] + seriesopts,
2539 _('hg qseries [-ms]'))
2540 _('hg qseries [-ms]'))
2540 def series(ui, repo, **opts):
2541 def series(ui, repo, **opts):
2541 """print the entire series file
2542 """print the entire series file
2542
2543
2543 Returns 0 on success."""
2544 Returns 0 on success."""
2544 repo.mq.qseries(repo, missing=opts.get(r'missing'),
2545 repo.mq.qseries(repo, missing=opts.get(r'missing'),
2545 summary=opts.get(r'summary'))
2546 summary=opts.get(r'summary'))
2546 return 0
2547 return 0
2547
2548
2548 @command("qtop", seriesopts, _('hg qtop [-s]'))
2549 @command("qtop", seriesopts, _('hg qtop [-s]'))
2549 def top(ui, repo, **opts):
2550 def top(ui, repo, **opts):
2550 """print the name of the current patch
2551 """print the name of the current patch
2551
2552
2552 Returns 0 on success."""
2553 Returns 0 on success."""
2553 q = repo.mq
2554 q = repo.mq
2554 if q.applied:
2555 if q.applied:
2555 t = q.seriesend(True)
2556 t = q.seriesend(True)
2556 else:
2557 else:
2557 t = 0
2558 t = 0
2558
2559
2559 if t:
2560 if t:
2560 q.qseries(repo, start=t - 1, length=1, status='A',
2561 q.qseries(repo, start=t - 1, length=1, status='A',
2561 summary=opts.get(r'summary'))
2562 summary=opts.get(r'summary'))
2562 else:
2563 else:
2563 ui.write(_("no patches applied\n"))
2564 ui.write(_("no patches applied\n"))
2564 return 1
2565 return 1
2565
2566
2566 @command("qnext", seriesopts, _('hg qnext [-s]'))
2567 @command("qnext", seriesopts, _('hg qnext [-s]'))
2567 def next(ui, repo, **opts):
2568 def next(ui, repo, **opts):
2568 """print the name of the next pushable patch
2569 """print the name of the next pushable patch
2569
2570
2570 Returns 0 on success."""
2571 Returns 0 on success."""
2571 q = repo.mq
2572 q = repo.mq
2572 end = q.seriesend()
2573 end = q.seriesend()
2573 if end == len(q.series):
2574 if end == len(q.series):
2574 ui.write(_("all patches applied\n"))
2575 ui.write(_("all patches applied\n"))
2575 return 1
2576 return 1
2576 q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
2577 q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
2577
2578
2578 @command("qprev", seriesopts, _('hg qprev [-s]'))
2579 @command("qprev", seriesopts, _('hg qprev [-s]'))
2579 def prev(ui, repo, **opts):
2580 def prev(ui, repo, **opts):
2580 """print the name of the preceding applied patch
2581 """print the name of the preceding applied patch
2581
2582
2582 Returns 0 on success."""
2583 Returns 0 on success."""
2583 q = repo.mq
2584 q = repo.mq
2584 l = len(q.applied)
2585 l = len(q.applied)
2585 if l == 1:
2586 if l == 1:
2586 ui.write(_("only one patch applied\n"))
2587 ui.write(_("only one patch applied\n"))
2587 return 1
2588 return 1
2588 if not l:
2589 if not l:
2589 ui.write(_("no patches applied\n"))
2590 ui.write(_("no patches applied\n"))
2590 return 1
2591 return 1
2591 idx = q.series.index(q.applied[-2].name)
2592 idx = q.series.index(q.applied[-2].name)
2592 q.qseries(repo, start=idx, length=1, status='A',
2593 q.qseries(repo, start=idx, length=1, status='A',
2593 summary=opts.get(r'summary'))
2594 summary=opts.get(r'summary'))
2594
2595
2595 def setupheaderopts(ui, opts):
2596 def setupheaderopts(ui, opts):
2596 if not opts.get('user') and opts.get('currentuser'):
2597 if not opts.get('user') and opts.get('currentuser'):
2597 opts['user'] = ui.username()
2598 opts['user'] = ui.username()
2598 if not opts.get('date') and opts.get('currentdate'):
2599 if not opts.get('date') and opts.get('currentdate'):
2599 opts['date'] = "%d %d" % util.makedate()
2600 opts['date'] = "%d %d" % dateutil.makedate()
2600
2601
2601 @command("^qnew",
2602 @command("^qnew",
2602 [('e', 'edit', None, _('invoke editor on commit messages')),
2603 [('e', 'edit', None, _('invoke editor on commit messages')),
2603 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2604 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2604 ('g', 'git', None, _('use git extended diff format')),
2605 ('g', 'git', None, _('use git extended diff format')),
2605 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2606 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2606 ('u', 'user', '',
2607 ('u', 'user', '',
2607 _('add "From: <USER>" to patch'), _('USER')),
2608 _('add "From: <USER>" to patch'), _('USER')),
2608 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2609 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2609 ('d', 'date', '',
2610 ('d', 'date', '',
2610 _('add "Date: <DATE>" to patch'), _('DATE'))
2611 _('add "Date: <DATE>" to patch'), _('DATE'))
2611 ] + cmdutil.walkopts + cmdutil.commitopts,
2612 ] + cmdutil.walkopts + cmdutil.commitopts,
2612 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2613 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2613 inferrepo=True)
2614 inferrepo=True)
2614 def new(ui, repo, patch, *args, **opts):
2615 def new(ui, repo, patch, *args, **opts):
2615 """create a new patch
2616 """create a new patch
2616
2617
2617 qnew creates a new patch on top of the currently-applied patch (if
2618 qnew creates a new patch on top of the currently-applied patch (if
2618 any). The patch will be initialized with any outstanding changes
2619 any). The patch will be initialized with any outstanding changes
2619 in the working directory. You may also use -I/--include,
2620 in the working directory. You may also use -I/--include,
2620 -X/--exclude, and/or a list of files after the patch name to add
2621 -X/--exclude, and/or a list of files after the patch name to add
2621 only changes to matching files to the new patch, leaving the rest
2622 only changes to matching files to the new patch, leaving the rest
2622 as uncommitted modifications.
2623 as uncommitted modifications.
2623
2624
2624 -u/--user and -d/--date can be used to set the (given) user and
2625 -u/--user and -d/--date can be used to set the (given) user and
2625 date, respectively. -U/--currentuser and -D/--currentdate set user
2626 date, respectively. -U/--currentuser and -D/--currentdate set user
2626 to current user and date to current date.
2627 to current user and date to current date.
2627
2628
2628 -e/--edit, -m/--message or -l/--logfile set the patch header as
2629 -e/--edit, -m/--message or -l/--logfile set the patch header as
2629 well as the commit message. If none is specified, the header is
2630 well as the commit message. If none is specified, the header is
2630 empty and the commit message is '[mq]: PATCH'.
2631 empty and the commit message is '[mq]: PATCH'.
2631
2632
2632 Use the -g/--git option to keep the patch in the git extended diff
2633 Use the -g/--git option to keep the patch in the git extended diff
2633 format. Read the diffs help topic for more information on why this
2634 format. Read the diffs help topic for more information on why this
2634 is important for preserving permission changes and copy/rename
2635 is important for preserving permission changes and copy/rename
2635 information.
2636 information.
2636
2637
2637 Returns 0 on successful creation of a new patch.
2638 Returns 0 on successful creation of a new patch.
2638 """
2639 """
2639 opts = pycompat.byteskwargs(opts)
2640 opts = pycompat.byteskwargs(opts)
2640 msg = cmdutil.logmessage(ui, opts)
2641 msg = cmdutil.logmessage(ui, opts)
2641 q = repo.mq
2642 q = repo.mq
2642 opts['msg'] = msg
2643 opts['msg'] = msg
2643 setupheaderopts(ui, opts)
2644 setupheaderopts(ui, opts)
2644 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
2645 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
2645 q.savedirty()
2646 q.savedirty()
2646 return 0
2647 return 0
2647
2648
2648 @command("^qrefresh",
2649 @command("^qrefresh",
2649 [('e', 'edit', None, _('invoke editor on commit messages')),
2650 [('e', 'edit', None, _('invoke editor on commit messages')),
2650 ('g', 'git', None, _('use git extended diff format')),
2651 ('g', 'git', None, _('use git extended diff format')),
2651 ('s', 'short', None,
2652 ('s', 'short', None,
2652 _('refresh only files already in the patch and specified files')),
2653 _('refresh only files already in the patch and specified files')),
2653 ('U', 'currentuser', None,
2654 ('U', 'currentuser', None,
2654 _('add/update author field in patch with current user')),
2655 _('add/update author field in patch with current user')),
2655 ('u', 'user', '',
2656 ('u', 'user', '',
2656 _('add/update author field in patch with given user'), _('USER')),
2657 _('add/update author field in patch with given user'), _('USER')),
2657 ('D', 'currentdate', None,
2658 ('D', 'currentdate', None,
2658 _('add/update date field in patch with current date')),
2659 _('add/update date field in patch with current date')),
2659 ('d', 'date', '',
2660 ('d', 'date', '',
2660 _('add/update date field in patch with given date'), _('DATE'))
2661 _('add/update date field in patch with given date'), _('DATE'))
2661 ] + cmdutil.walkopts + cmdutil.commitopts,
2662 ] + cmdutil.walkopts + cmdutil.commitopts,
2662 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2663 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2663 inferrepo=True)
2664 inferrepo=True)
2664 def refresh(ui, repo, *pats, **opts):
2665 def refresh(ui, repo, *pats, **opts):
2665 """update the current patch
2666 """update the current patch
2666
2667
2667 If any file patterns are provided, the refreshed patch will
2668 If any file patterns are provided, the refreshed patch will
2668 contain only the modifications that match those patterns; the
2669 contain only the modifications that match those patterns; the
2669 remaining modifications will remain in the working directory.
2670 remaining modifications will remain in the working directory.
2670
2671
2671 If -s/--short is specified, files currently included in the patch
2672 If -s/--short is specified, files currently included in the patch
2672 will be refreshed just like matched files and remain in the patch.
2673 will be refreshed just like matched files and remain in the patch.
2673
2674
2674 If -e/--edit is specified, Mercurial will start your configured editor for
2675 If -e/--edit is specified, Mercurial will start your configured editor for
2675 you to enter a message. In case qrefresh fails, you will find a backup of
2676 you to enter a message. In case qrefresh fails, you will find a backup of
2676 your message in ``.hg/last-message.txt``.
2677 your message in ``.hg/last-message.txt``.
2677
2678
2678 hg add/remove/copy/rename work as usual, though you might want to
2679 hg add/remove/copy/rename work as usual, though you might want to
2679 use git-style patches (-g/--git or [diff] git=1) to track copies
2680 use git-style patches (-g/--git or [diff] git=1) to track copies
2680 and renames. See the diffs help topic for more information on the
2681 and renames. See the diffs help topic for more information on the
2681 git diff format.
2682 git diff format.
2682
2683
2683 Returns 0 on success.
2684 Returns 0 on success.
2684 """
2685 """
2685 opts = pycompat.byteskwargs(opts)
2686 opts = pycompat.byteskwargs(opts)
2686 q = repo.mq
2687 q = repo.mq
2687 message = cmdutil.logmessage(ui, opts)
2688 message = cmdutil.logmessage(ui, opts)
2688 setupheaderopts(ui, opts)
2689 setupheaderopts(ui, opts)
2689 with repo.wlock():
2690 with repo.wlock():
2690 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
2691 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
2691 q.savedirty()
2692 q.savedirty()
2692 return ret
2693 return ret
2693
2694
2694 @command("^qdiff",
2695 @command("^qdiff",
2695 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
2696 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
2696 _('hg qdiff [OPTION]... [FILE]...'),
2697 _('hg qdiff [OPTION]... [FILE]...'),
2697 inferrepo=True)
2698 inferrepo=True)
2698 def diff(ui, repo, *pats, **opts):
2699 def diff(ui, repo, *pats, **opts):
2699 """diff of the current patch and subsequent modifications
2700 """diff of the current patch and subsequent modifications
2700
2701
2701 Shows a diff which includes the current patch as well as any
2702 Shows a diff which includes the current patch as well as any
2702 changes which have been made in the working directory since the
2703 changes which have been made in the working directory since the
2703 last refresh (thus showing what the current patch would become
2704 last refresh (thus showing what the current patch would become
2704 after a qrefresh).
2705 after a qrefresh).
2705
2706
2706 Use :hg:`diff` if you only want to see the changes made since the
2707 Use :hg:`diff` if you only want to see the changes made since the
2707 last qrefresh, or :hg:`export qtip` if you want to see changes
2708 last qrefresh, or :hg:`export qtip` if you want to see changes
2708 made by the current patch without including changes made since the
2709 made by the current patch without including changes made since the
2709 qrefresh.
2710 qrefresh.
2710
2711
2711 Returns 0 on success.
2712 Returns 0 on success.
2712 """
2713 """
2713 ui.pager('qdiff')
2714 ui.pager('qdiff')
2714 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
2715 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
2715 return 0
2716 return 0
2716
2717
2717 @command('qfold',
2718 @command('qfold',
2718 [('e', 'edit', None, _('invoke editor on commit messages')),
2719 [('e', 'edit', None, _('invoke editor on commit messages')),
2719 ('k', 'keep', None, _('keep folded patch files')),
2720 ('k', 'keep', None, _('keep folded patch files')),
2720 ] + cmdutil.commitopts,
2721 ] + cmdutil.commitopts,
2721 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2722 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2722 def fold(ui, repo, *files, **opts):
2723 def fold(ui, repo, *files, **opts):
2723 """fold the named patches into the current patch
2724 """fold the named patches into the current patch
2724
2725
2725 Patches must not yet be applied. Each patch will be successively
2726 Patches must not yet be applied. Each patch will be successively
2726 applied to the current patch in the order given. If all the
2727 applied to the current patch in the order given. If all the
2727 patches apply successfully, the current patch will be refreshed
2728 patches apply successfully, the current patch will be refreshed
2728 with the new cumulative patch, and the folded patches will be
2729 with the new cumulative patch, and the folded patches will be
2729 deleted. With -k/--keep, the folded patch files will not be
2730 deleted. With -k/--keep, the folded patch files will not be
2730 removed afterwards.
2731 removed afterwards.
2731
2732
2732 The header for each folded patch will be concatenated with the
2733 The header for each folded patch will be concatenated with the
2733 current patch header, separated by a line of ``* * *``.
2734 current patch header, separated by a line of ``* * *``.
2734
2735
2735 Returns 0 on success."""
2736 Returns 0 on success."""
2736 opts = pycompat.byteskwargs(opts)
2737 opts = pycompat.byteskwargs(opts)
2737 q = repo.mq
2738 q = repo.mq
2738 if not files:
2739 if not files:
2739 raise error.Abort(_('qfold requires at least one patch name'))
2740 raise error.Abort(_('qfold requires at least one patch name'))
2740 if not q.checktoppatch(repo)[0]:
2741 if not q.checktoppatch(repo)[0]:
2741 raise error.Abort(_('no patches applied'))
2742 raise error.Abort(_('no patches applied'))
2742 q.checklocalchanges(repo)
2743 q.checklocalchanges(repo)
2743
2744
2744 message = cmdutil.logmessage(ui, opts)
2745 message = cmdutil.logmessage(ui, opts)
2745
2746
2746 parent = q.lookup('qtip')
2747 parent = q.lookup('qtip')
2747 patches = []
2748 patches = []
2748 messages = []
2749 messages = []
2749 for f in files:
2750 for f in files:
2750 p = q.lookup(f)
2751 p = q.lookup(f)
2751 if p in patches or p == parent:
2752 if p in patches or p == parent:
2752 ui.warn(_('skipping already folded patch %s\n') % p)
2753 ui.warn(_('skipping already folded patch %s\n') % p)
2753 if q.isapplied(p):
2754 if q.isapplied(p):
2754 raise error.Abort(_('qfold cannot fold already applied patch %s')
2755 raise error.Abort(_('qfold cannot fold already applied patch %s')
2755 % p)
2756 % p)
2756 patches.append(p)
2757 patches.append(p)
2757
2758
2758 for p in patches:
2759 for p in patches:
2759 if not message:
2760 if not message:
2760 ph = patchheader(q.join(p), q.plainmode)
2761 ph = patchheader(q.join(p), q.plainmode)
2761 if ph.message:
2762 if ph.message:
2762 messages.append(ph.message)
2763 messages.append(ph.message)
2763 pf = q.join(p)
2764 pf = q.join(p)
2764 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2765 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2765 if not patchsuccess:
2766 if not patchsuccess:
2766 raise error.Abort(_('error folding patch %s') % p)
2767 raise error.Abort(_('error folding patch %s') % p)
2767
2768
2768 if not message:
2769 if not message:
2769 ph = patchheader(q.join(parent), q.plainmode)
2770 ph = patchheader(q.join(parent), q.plainmode)
2770 message = ph.message
2771 message = ph.message
2771 for msg in messages:
2772 for msg in messages:
2772 if msg:
2773 if msg:
2773 if message:
2774 if message:
2774 message.append('* * *')
2775 message.append('* * *')
2775 message.extend(msg)
2776 message.extend(msg)
2776 message = '\n'.join(message)
2777 message = '\n'.join(message)
2777
2778
2778 diffopts = q.patchopts(q.diffopts(), *patches)
2779 diffopts = q.patchopts(q.diffopts(), *patches)
2779 with repo.wlock():
2780 with repo.wlock():
2780 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2781 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2781 editform='mq.qfold')
2782 editform='mq.qfold')
2782 q.delete(repo, patches, opts)
2783 q.delete(repo, patches, opts)
2783 q.savedirty()
2784 q.savedirty()
2784
2785
2785 @command("qgoto",
2786 @command("qgoto",
2786 [('', 'keep-changes', None,
2787 [('', 'keep-changes', None,
2787 _('tolerate non-conflicting local changes')),
2788 _('tolerate non-conflicting local changes')),
2788 ('f', 'force', None, _('overwrite any local changes')),
2789 ('f', 'force', None, _('overwrite any local changes')),
2789 ('', 'no-backup', None, _('do not save backup copies of files'))],
2790 ('', 'no-backup', None, _('do not save backup copies of files'))],
2790 _('hg qgoto [OPTION]... PATCH'))
2791 _('hg qgoto [OPTION]... PATCH'))
2791 def goto(ui, repo, patch, **opts):
2792 def goto(ui, repo, patch, **opts):
2792 '''push or pop patches until named patch is at top of stack
2793 '''push or pop patches until named patch is at top of stack
2793
2794
2794 Returns 0 on success.'''
2795 Returns 0 on success.'''
2795 opts = pycompat.byteskwargs(opts)
2796 opts = pycompat.byteskwargs(opts)
2796 opts = fixkeepchangesopts(ui, opts)
2797 opts = fixkeepchangesopts(ui, opts)
2797 q = repo.mq
2798 q = repo.mq
2798 patch = q.lookup(patch)
2799 patch = q.lookup(patch)
2799 nobackup = opts.get('no_backup')
2800 nobackup = opts.get('no_backup')
2800 keepchanges = opts.get('keep_changes')
2801 keepchanges = opts.get('keep_changes')
2801 if q.isapplied(patch):
2802 if q.isapplied(patch):
2802 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2803 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2803 keepchanges=keepchanges)
2804 keepchanges=keepchanges)
2804 else:
2805 else:
2805 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2806 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2806 keepchanges=keepchanges)
2807 keepchanges=keepchanges)
2807 q.savedirty()
2808 q.savedirty()
2808 return ret
2809 return ret
2809
2810
2810 @command("qguard",
2811 @command("qguard",
2811 [('l', 'list', None, _('list all patches and guards')),
2812 [('l', 'list', None, _('list all patches and guards')),
2812 ('n', 'none', None, _('drop all guards'))],
2813 ('n', 'none', None, _('drop all guards'))],
2813 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2814 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2814 def guard(ui, repo, *args, **opts):
2815 def guard(ui, repo, *args, **opts):
2815 '''set or print guards for a patch
2816 '''set or print guards for a patch
2816
2817
2817 Guards control whether a patch can be pushed. A patch with no
2818 Guards control whether a patch can be pushed. A patch with no
2818 guards is always pushed. A patch with a positive guard ("+foo") is
2819 guards is always pushed. A patch with a positive guard ("+foo") is
2819 pushed only if the :hg:`qselect` command has activated it. A patch with
2820 pushed only if the :hg:`qselect` command has activated it. A patch with
2820 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2821 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2821 has activated it.
2822 has activated it.
2822
2823
2823 With no arguments, print the currently active guards.
2824 With no arguments, print the currently active guards.
2824 With arguments, set guards for the named patch.
2825 With arguments, set guards for the named patch.
2825
2826
2826 .. note::
2827 .. note::
2827
2828
2828 Specifying negative guards now requires '--'.
2829 Specifying negative guards now requires '--'.
2829
2830
2830 To set guards on another patch::
2831 To set guards on another patch::
2831
2832
2832 hg qguard other.patch -- +2.6.17 -stable
2833 hg qguard other.patch -- +2.6.17 -stable
2833
2834
2834 Returns 0 on success.
2835 Returns 0 on success.
2835 '''
2836 '''
2836 def status(idx):
2837 def status(idx):
2837 guards = q.seriesguards[idx] or ['unguarded']
2838 guards = q.seriesguards[idx] or ['unguarded']
2838 if q.series[idx] in applied:
2839 if q.series[idx] in applied:
2839 state = 'applied'
2840 state = 'applied'
2840 elif q.pushable(idx)[0]:
2841 elif q.pushable(idx)[0]:
2841 state = 'unapplied'
2842 state = 'unapplied'
2842 else:
2843 else:
2843 state = 'guarded'
2844 state = 'guarded'
2844 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2845 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2845 ui.write('%s: ' % ui.label(q.series[idx], label))
2846 ui.write('%s: ' % ui.label(q.series[idx], label))
2846
2847
2847 for i, guard in enumerate(guards):
2848 for i, guard in enumerate(guards):
2848 if guard.startswith('+'):
2849 if guard.startswith('+'):
2849 ui.write(guard, label='qguard.positive')
2850 ui.write(guard, label='qguard.positive')
2850 elif guard.startswith('-'):
2851 elif guard.startswith('-'):
2851 ui.write(guard, label='qguard.negative')
2852 ui.write(guard, label='qguard.negative')
2852 else:
2853 else:
2853 ui.write(guard, label='qguard.unguarded')
2854 ui.write(guard, label='qguard.unguarded')
2854 if i != len(guards) - 1:
2855 if i != len(guards) - 1:
2855 ui.write(' ')
2856 ui.write(' ')
2856 ui.write('\n')
2857 ui.write('\n')
2857 q = repo.mq
2858 q = repo.mq
2858 applied = set(p.name for p in q.applied)
2859 applied = set(p.name for p in q.applied)
2859 patch = None
2860 patch = None
2860 args = list(args)
2861 args = list(args)
2861 if opts.get(r'list'):
2862 if opts.get(r'list'):
2862 if args or opts.get('none'):
2863 if args or opts.get('none'):
2863 raise error.Abort(_('cannot mix -l/--list with options or '
2864 raise error.Abort(_('cannot mix -l/--list with options or '
2864 'arguments'))
2865 'arguments'))
2865 for i in xrange(len(q.series)):
2866 for i in xrange(len(q.series)):
2866 status(i)
2867 status(i)
2867 return
2868 return
2868 if not args or args[0][0:1] in '-+':
2869 if not args or args[0][0:1] in '-+':
2869 if not q.applied:
2870 if not q.applied:
2870 raise error.Abort(_('no patches applied'))
2871 raise error.Abort(_('no patches applied'))
2871 patch = q.applied[-1].name
2872 patch = q.applied[-1].name
2872 if patch is None and args[0][0:1] not in '-+':
2873 if patch is None and args[0][0:1] not in '-+':
2873 patch = args.pop(0)
2874 patch = args.pop(0)
2874 if patch is None:
2875 if patch is None:
2875 raise error.Abort(_('no patch to work with'))
2876 raise error.Abort(_('no patch to work with'))
2876 if args or opts.get('none'):
2877 if args or opts.get('none'):
2877 idx = q.findseries(patch)
2878 idx = q.findseries(patch)
2878 if idx is None:
2879 if idx is None:
2879 raise error.Abort(_('no patch named %s') % patch)
2880 raise error.Abort(_('no patch named %s') % patch)
2880 q.setguards(idx, args)
2881 q.setguards(idx, args)
2881 q.savedirty()
2882 q.savedirty()
2882 else:
2883 else:
2883 status(q.series.index(q.lookup(patch)))
2884 status(q.series.index(q.lookup(patch)))
2884
2885
2885 @command("qheader", [], _('hg qheader [PATCH]'))
2886 @command("qheader", [], _('hg qheader [PATCH]'))
2886 def header(ui, repo, patch=None):
2887 def header(ui, repo, patch=None):
2887 """print the header of the topmost or specified patch
2888 """print the header of the topmost or specified patch
2888
2889
2889 Returns 0 on success."""
2890 Returns 0 on success."""
2890 q = repo.mq
2891 q = repo.mq
2891
2892
2892 if patch:
2893 if patch:
2893 patch = q.lookup(patch)
2894 patch = q.lookup(patch)
2894 else:
2895 else:
2895 if not q.applied:
2896 if not q.applied:
2896 ui.write(_('no patches applied\n'))
2897 ui.write(_('no patches applied\n'))
2897 return 1
2898 return 1
2898 patch = q.lookup('qtip')
2899 patch = q.lookup('qtip')
2899 ph = patchheader(q.join(patch), q.plainmode)
2900 ph = patchheader(q.join(patch), q.plainmode)
2900
2901
2901 ui.write('\n'.join(ph.message) + '\n')
2902 ui.write('\n'.join(ph.message) + '\n')
2902
2903
2903 def lastsavename(path):
2904 def lastsavename(path):
2904 (directory, base) = os.path.split(path)
2905 (directory, base) = os.path.split(path)
2905 names = os.listdir(directory)
2906 names = os.listdir(directory)
2906 namere = re.compile("%s.([0-9]+)" % base)
2907 namere = re.compile("%s.([0-9]+)" % base)
2907 maxindex = None
2908 maxindex = None
2908 maxname = None
2909 maxname = None
2909 for f in names:
2910 for f in names:
2910 m = namere.match(f)
2911 m = namere.match(f)
2911 if m:
2912 if m:
2912 index = int(m.group(1))
2913 index = int(m.group(1))
2913 if maxindex is None or index > maxindex:
2914 if maxindex is None or index > maxindex:
2914 maxindex = index
2915 maxindex = index
2915 maxname = f
2916 maxname = f
2916 if maxname:
2917 if maxname:
2917 return (os.path.join(directory, maxname), maxindex)
2918 return (os.path.join(directory, maxname), maxindex)
2918 return (None, None)
2919 return (None, None)
2919
2920
2920 def savename(path):
2921 def savename(path):
2921 (last, index) = lastsavename(path)
2922 (last, index) = lastsavename(path)
2922 if last is None:
2923 if last is None:
2923 index = 0
2924 index = 0
2924 newpath = path + ".%d" % (index + 1)
2925 newpath = path + ".%d" % (index + 1)
2925 return newpath
2926 return newpath
2926
2927
2927 @command("^qpush",
2928 @command("^qpush",
2928 [('', 'keep-changes', None,
2929 [('', 'keep-changes', None,
2929 _('tolerate non-conflicting local changes')),
2930 _('tolerate non-conflicting local changes')),
2930 ('f', 'force', None, _('apply on top of local changes')),
2931 ('f', 'force', None, _('apply on top of local changes')),
2931 ('e', 'exact', None,
2932 ('e', 'exact', None,
2932 _('apply the target patch to its recorded parent')),
2933 _('apply the target patch to its recorded parent')),
2933 ('l', 'list', None, _('list patch name in commit text')),
2934 ('l', 'list', None, _('list patch name in commit text')),
2934 ('a', 'all', None, _('apply all patches')),
2935 ('a', 'all', None, _('apply all patches')),
2935 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2936 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2936 ('n', 'name', '',
2937 ('n', 'name', '',
2937 _('merge queue name (DEPRECATED)'), _('NAME')),
2938 _('merge queue name (DEPRECATED)'), _('NAME')),
2938 ('', 'move', None,
2939 ('', 'move', None,
2939 _('reorder patch series and apply only the patch')),
2940 _('reorder patch series and apply only the patch')),
2940 ('', 'no-backup', None, _('do not save backup copies of files'))],
2941 ('', 'no-backup', None, _('do not save backup copies of files'))],
2941 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2942 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2942 def push(ui, repo, patch=None, **opts):
2943 def push(ui, repo, patch=None, **opts):
2943 """push the next patch onto the stack
2944 """push the next patch onto the stack
2944
2945
2945 By default, abort if the working directory contains uncommitted
2946 By default, abort if the working directory contains uncommitted
2946 changes. With --keep-changes, abort only if the uncommitted files
2947 changes. With --keep-changes, abort only if the uncommitted files
2947 overlap with patched files. With -f/--force, backup and patch over
2948 overlap with patched files. With -f/--force, backup and patch over
2948 uncommitted changes.
2949 uncommitted changes.
2949
2950
2950 Return 0 on success.
2951 Return 0 on success.
2951 """
2952 """
2952 q = repo.mq
2953 q = repo.mq
2953 mergeq = None
2954 mergeq = None
2954
2955
2955 opts = pycompat.byteskwargs(opts)
2956 opts = pycompat.byteskwargs(opts)
2956 opts = fixkeepchangesopts(ui, opts)
2957 opts = fixkeepchangesopts(ui, opts)
2957 if opts.get('merge'):
2958 if opts.get('merge'):
2958 if opts.get('name'):
2959 if opts.get('name'):
2959 newpath = repo.vfs.join(opts.get('name'))
2960 newpath = repo.vfs.join(opts.get('name'))
2960 else:
2961 else:
2961 newpath, i = lastsavename(q.path)
2962 newpath, i = lastsavename(q.path)
2962 if not newpath:
2963 if not newpath:
2963 ui.warn(_("no saved queues found, please use -n\n"))
2964 ui.warn(_("no saved queues found, please use -n\n"))
2964 return 1
2965 return 1
2965 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2966 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2966 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2967 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2967 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2968 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2968 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2969 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2969 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2970 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2970 keepchanges=opts.get('keep_changes'))
2971 keepchanges=opts.get('keep_changes'))
2971 return ret
2972 return ret
2972
2973
2973 @command("^qpop",
2974 @command("^qpop",
2974 [('a', 'all', None, _('pop all patches')),
2975 [('a', 'all', None, _('pop all patches')),
2975 ('n', 'name', '',
2976 ('n', 'name', '',
2976 _('queue name to pop (DEPRECATED)'), _('NAME')),
2977 _('queue name to pop (DEPRECATED)'), _('NAME')),
2977 ('', 'keep-changes', None,
2978 ('', 'keep-changes', None,
2978 _('tolerate non-conflicting local changes')),
2979 _('tolerate non-conflicting local changes')),
2979 ('f', 'force', None, _('forget any local changes to patched files')),
2980 ('f', 'force', None, _('forget any local changes to patched files')),
2980 ('', 'no-backup', None, _('do not save backup copies of files'))],
2981 ('', 'no-backup', None, _('do not save backup copies of files'))],
2981 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2982 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2982 def pop(ui, repo, patch=None, **opts):
2983 def pop(ui, repo, patch=None, **opts):
2983 """pop the current patch off the stack
2984 """pop the current patch off the stack
2984
2985
2985 Without argument, pops off the top of the patch stack. If given a
2986 Without argument, pops off the top of the patch stack. If given a
2986 patch name, keeps popping off patches until the named patch is at
2987 patch name, keeps popping off patches until the named patch is at
2987 the top of the stack.
2988 the top of the stack.
2988
2989
2989 By default, abort if the working directory contains uncommitted
2990 By default, abort if the working directory contains uncommitted
2990 changes. With --keep-changes, abort only if the uncommitted files
2991 changes. With --keep-changes, abort only if the uncommitted files
2991 overlap with patched files. With -f/--force, backup and discard
2992 overlap with patched files. With -f/--force, backup and discard
2992 changes made to such files.
2993 changes made to such files.
2993
2994
2994 Return 0 on success.
2995 Return 0 on success.
2995 """
2996 """
2996 opts = pycompat.byteskwargs(opts)
2997 opts = pycompat.byteskwargs(opts)
2997 opts = fixkeepchangesopts(ui, opts)
2998 opts = fixkeepchangesopts(ui, opts)
2998 localupdate = True
2999 localupdate = True
2999 if opts.get('name'):
3000 if opts.get('name'):
3000 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
3001 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
3001 ui.warn(_('using patch queue: %s\n') % q.path)
3002 ui.warn(_('using patch queue: %s\n') % q.path)
3002 localupdate = False
3003 localupdate = False
3003 else:
3004 else:
3004 q = repo.mq
3005 q = repo.mq
3005 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
3006 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
3006 all=opts.get('all'), nobackup=opts.get('no_backup'),
3007 all=opts.get('all'), nobackup=opts.get('no_backup'),
3007 keepchanges=opts.get('keep_changes'))
3008 keepchanges=opts.get('keep_changes'))
3008 q.savedirty()
3009 q.savedirty()
3009 return ret
3010 return ret
3010
3011
3011 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
3012 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
3012 def rename(ui, repo, patch, name=None, **opts):
3013 def rename(ui, repo, patch, name=None, **opts):
3013 """rename a patch
3014 """rename a patch
3014
3015
3015 With one argument, renames the current patch to PATCH1.
3016 With one argument, renames the current patch to PATCH1.
3016 With two arguments, renames PATCH1 to PATCH2.
3017 With two arguments, renames PATCH1 to PATCH2.
3017
3018
3018 Returns 0 on success."""
3019 Returns 0 on success."""
3019 q = repo.mq
3020 q = repo.mq
3020 if not name:
3021 if not name:
3021 name = patch
3022 name = patch
3022 patch = None
3023 patch = None
3023
3024
3024 if patch:
3025 if patch:
3025 patch = q.lookup(patch)
3026 patch = q.lookup(patch)
3026 else:
3027 else:
3027 if not q.applied:
3028 if not q.applied:
3028 ui.write(_('no patches applied\n'))
3029 ui.write(_('no patches applied\n'))
3029 return
3030 return
3030 patch = q.lookup('qtip')
3031 patch = q.lookup('qtip')
3031 absdest = q.join(name)
3032 absdest = q.join(name)
3032 if os.path.isdir(absdest):
3033 if os.path.isdir(absdest):
3033 name = normname(os.path.join(name, os.path.basename(patch)))
3034 name = normname(os.path.join(name, os.path.basename(patch)))
3034 absdest = q.join(name)
3035 absdest = q.join(name)
3035 q.checkpatchname(name)
3036 q.checkpatchname(name)
3036
3037
3037 ui.note(_('renaming %s to %s\n') % (patch, name))
3038 ui.note(_('renaming %s to %s\n') % (patch, name))
3038 i = q.findseries(patch)
3039 i = q.findseries(patch)
3039 guards = q.guard_re.findall(q.fullseries[i])
3040 guards = q.guard_re.findall(q.fullseries[i])
3040 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3041 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3041 q.parseseries()
3042 q.parseseries()
3042 q.seriesdirty = True
3043 q.seriesdirty = True
3043
3044
3044 info = q.isapplied(patch)
3045 info = q.isapplied(patch)
3045 if info:
3046 if info:
3046 q.applied[info[0]] = statusentry(info[1], name)
3047 q.applied[info[0]] = statusentry(info[1], name)
3047 q.applieddirty = True
3048 q.applieddirty = True
3048
3049
3049 destdir = os.path.dirname(absdest)
3050 destdir = os.path.dirname(absdest)
3050 if not os.path.isdir(destdir):
3051 if not os.path.isdir(destdir):
3051 os.makedirs(destdir)
3052 os.makedirs(destdir)
3052 util.rename(q.join(patch), absdest)
3053 util.rename(q.join(patch), absdest)
3053 r = q.qrepo()
3054 r = q.qrepo()
3054 if r and patch in r.dirstate:
3055 if r and patch in r.dirstate:
3055 wctx = r[None]
3056 wctx = r[None]
3056 with r.wlock():
3057 with r.wlock():
3057 if r.dirstate[patch] == 'a':
3058 if r.dirstate[patch] == 'a':
3058 r.dirstate.drop(patch)
3059 r.dirstate.drop(patch)
3059 r.dirstate.add(name)
3060 r.dirstate.add(name)
3060 else:
3061 else:
3061 wctx.copy(patch, name)
3062 wctx.copy(patch, name)
3062 wctx.forget([patch])
3063 wctx.forget([patch])
3063
3064
3064 q.savedirty()
3065 q.savedirty()
3065
3066
3066 @command("qrestore",
3067 @command("qrestore",
3067 [('d', 'delete', None, _('delete save entry')),
3068 [('d', 'delete', None, _('delete save entry')),
3068 ('u', 'update', None, _('update queue working directory'))],
3069 ('u', 'update', None, _('update queue working directory'))],
3069 _('hg qrestore [-d] [-u] REV'))
3070 _('hg qrestore [-d] [-u] REV'))
3070 def restore(ui, repo, rev, **opts):
3071 def restore(ui, repo, rev, **opts):
3071 """restore the queue state saved by a revision (DEPRECATED)
3072 """restore the queue state saved by a revision (DEPRECATED)
3072
3073
3073 This command is deprecated, use :hg:`rebase` instead."""
3074 This command is deprecated, use :hg:`rebase` instead."""
3074 rev = repo.lookup(rev)
3075 rev = repo.lookup(rev)
3075 q = repo.mq
3076 q = repo.mq
3076 q.restore(repo, rev, delete=opts.get(r'delete'),
3077 q.restore(repo, rev, delete=opts.get(r'delete'),
3077 qupdate=opts.get(r'update'))
3078 qupdate=opts.get(r'update'))
3078 q.savedirty()
3079 q.savedirty()
3079 return 0
3080 return 0
3080
3081
3081 @command("qsave",
3082 @command("qsave",
3082 [('c', 'copy', None, _('copy patch directory')),
3083 [('c', 'copy', None, _('copy patch directory')),
3083 ('n', 'name', '',
3084 ('n', 'name', '',
3084 _('copy directory name'), _('NAME')),
3085 _('copy directory name'), _('NAME')),
3085 ('e', 'empty', None, _('clear queue status file')),
3086 ('e', 'empty', None, _('clear queue status file')),
3086 ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
3087 ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
3087 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3088 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3088 def save(ui, repo, **opts):
3089 def save(ui, repo, **opts):
3089 """save current queue state (DEPRECATED)
3090 """save current queue state (DEPRECATED)
3090
3091
3091 This command is deprecated, use :hg:`rebase` instead."""
3092 This command is deprecated, use :hg:`rebase` instead."""
3092 q = repo.mq
3093 q = repo.mq
3093 opts = pycompat.byteskwargs(opts)
3094 opts = pycompat.byteskwargs(opts)
3094 message = cmdutil.logmessage(ui, opts)
3095 message = cmdutil.logmessage(ui, opts)
3095 ret = q.save(repo, msg=message)
3096 ret = q.save(repo, msg=message)
3096 if ret:
3097 if ret:
3097 return ret
3098 return ret
3098 q.savedirty() # save to .hg/patches before copying
3099 q.savedirty() # save to .hg/patches before copying
3099 if opts.get('copy'):
3100 if opts.get('copy'):
3100 path = q.path
3101 path = q.path
3101 if opts.get('name'):
3102 if opts.get('name'):
3102 newpath = os.path.join(q.basepath, opts.get('name'))
3103 newpath = os.path.join(q.basepath, opts.get('name'))
3103 if os.path.exists(newpath):
3104 if os.path.exists(newpath):
3104 if not os.path.isdir(newpath):
3105 if not os.path.isdir(newpath):
3105 raise error.Abort(_('destination %s exists and is not '
3106 raise error.Abort(_('destination %s exists and is not '
3106 'a directory') % newpath)
3107 'a directory') % newpath)
3107 if not opts.get('force'):
3108 if not opts.get('force'):
3108 raise error.Abort(_('destination %s exists, '
3109 raise error.Abort(_('destination %s exists, '
3109 'use -f to force') % newpath)
3110 'use -f to force') % newpath)
3110 else:
3111 else:
3111 newpath = savename(path)
3112 newpath = savename(path)
3112 ui.warn(_("copy %s to %s\n") % (path, newpath))
3113 ui.warn(_("copy %s to %s\n") % (path, newpath))
3113 util.copyfiles(path, newpath)
3114 util.copyfiles(path, newpath)
3114 if opts.get('empty'):
3115 if opts.get('empty'):
3115 del q.applied[:]
3116 del q.applied[:]
3116 q.applieddirty = True
3117 q.applieddirty = True
3117 q.savedirty()
3118 q.savedirty()
3118 return 0
3119 return 0
3119
3120
3120
3121
3121 @command("qselect",
3122 @command("qselect",
3122 [('n', 'none', None, _('disable all guards')),
3123 [('n', 'none', None, _('disable all guards')),
3123 ('s', 'series', None, _('list all guards in series file')),
3124 ('s', 'series', None, _('list all guards in series file')),
3124 ('', 'pop', None, _('pop to before first guarded applied patch')),
3125 ('', 'pop', None, _('pop to before first guarded applied patch')),
3125 ('', 'reapply', None, _('pop, then reapply patches'))],
3126 ('', 'reapply', None, _('pop, then reapply patches'))],
3126 _('hg qselect [OPTION]... [GUARD]...'))
3127 _('hg qselect [OPTION]... [GUARD]...'))
3127 def select(ui, repo, *args, **opts):
3128 def select(ui, repo, *args, **opts):
3128 '''set or print guarded patches to push
3129 '''set or print guarded patches to push
3129
3130
3130 Use the :hg:`qguard` command to set or print guards on patch, then use
3131 Use the :hg:`qguard` command to set or print guards on patch, then use
3131 qselect to tell mq which guards to use. A patch will be pushed if
3132 qselect to tell mq which guards to use. A patch will be pushed if
3132 it has no guards or any positive guards match the currently
3133 it has no guards or any positive guards match the currently
3133 selected guard, but will not be pushed if any negative guards
3134 selected guard, but will not be pushed if any negative guards
3134 match the current guard. For example::
3135 match the current guard. For example::
3135
3136
3136 qguard foo.patch -- -stable (negative guard)
3137 qguard foo.patch -- -stable (negative guard)
3137 qguard bar.patch +stable (positive guard)
3138 qguard bar.patch +stable (positive guard)
3138 qselect stable
3139 qselect stable
3139
3140
3140 This activates the "stable" guard. mq will skip foo.patch (because
3141 This activates the "stable" guard. mq will skip foo.patch (because
3141 it has a negative match) but push bar.patch (because it has a
3142 it has a negative match) but push bar.patch (because it has a
3142 positive match).
3143 positive match).
3143
3144
3144 With no arguments, prints the currently active guards.
3145 With no arguments, prints the currently active guards.
3145 With one argument, sets the active guard.
3146 With one argument, sets the active guard.
3146
3147
3147 Use -n/--none to deactivate guards (no other arguments needed).
3148 Use -n/--none to deactivate guards (no other arguments needed).
3148 When no guards are active, patches with positive guards are
3149 When no guards are active, patches with positive guards are
3149 skipped and patches with negative guards are pushed.
3150 skipped and patches with negative guards are pushed.
3150
3151
3151 qselect can change the guards on applied patches. It does not pop
3152 qselect can change the guards on applied patches. It does not pop
3152 guarded patches by default. Use --pop to pop back to the last
3153 guarded patches by default. Use --pop to pop back to the last
3153 applied patch that is not guarded. Use --reapply (which implies
3154 applied patch that is not guarded. Use --reapply (which implies
3154 --pop) to push back to the current patch afterwards, but skip
3155 --pop) to push back to the current patch afterwards, but skip
3155 guarded patches.
3156 guarded patches.
3156
3157
3157 Use -s/--series to print a list of all guards in the series file
3158 Use -s/--series to print a list of all guards in the series file
3158 (no other arguments needed). Use -v for more information.
3159 (no other arguments needed). Use -v for more information.
3159
3160
3160 Returns 0 on success.'''
3161 Returns 0 on success.'''
3161
3162
3162 q = repo.mq
3163 q = repo.mq
3163 opts = pycompat.byteskwargs(opts)
3164 opts = pycompat.byteskwargs(opts)
3164 guards = q.active()
3165 guards = q.active()
3165 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3166 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3166 if args or opts.get('none'):
3167 if args or opts.get('none'):
3167 old_unapplied = q.unapplied(repo)
3168 old_unapplied = q.unapplied(repo)
3168 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3169 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3169 q.setactive(args)
3170 q.setactive(args)
3170 q.savedirty()
3171 q.savedirty()
3171 if not args:
3172 if not args:
3172 ui.status(_('guards deactivated\n'))
3173 ui.status(_('guards deactivated\n'))
3173 if not opts.get('pop') and not opts.get('reapply'):
3174 if not opts.get('pop') and not opts.get('reapply'):
3174 unapplied = q.unapplied(repo)
3175 unapplied = q.unapplied(repo)
3175 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3176 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3176 if len(unapplied) != len(old_unapplied):
3177 if len(unapplied) != len(old_unapplied):
3177 ui.status(_('number of unguarded, unapplied patches has '
3178 ui.status(_('number of unguarded, unapplied patches has '
3178 'changed from %d to %d\n') %
3179 'changed from %d to %d\n') %
3179 (len(old_unapplied), len(unapplied)))
3180 (len(old_unapplied), len(unapplied)))
3180 if len(guarded) != len(old_guarded):
3181 if len(guarded) != len(old_guarded):
3181 ui.status(_('number of guarded, applied patches has changed '
3182 ui.status(_('number of guarded, applied patches has changed '
3182 'from %d to %d\n') %
3183 'from %d to %d\n') %
3183 (len(old_guarded), len(guarded)))
3184 (len(old_guarded), len(guarded)))
3184 elif opts.get('series'):
3185 elif opts.get('series'):
3185 guards = {}
3186 guards = {}
3186 noguards = 0
3187 noguards = 0
3187 for gs in q.seriesguards:
3188 for gs in q.seriesguards:
3188 if not gs:
3189 if not gs:
3189 noguards += 1
3190 noguards += 1
3190 for g in gs:
3191 for g in gs:
3191 guards.setdefault(g, 0)
3192 guards.setdefault(g, 0)
3192 guards[g] += 1
3193 guards[g] += 1
3193 if ui.verbose:
3194 if ui.verbose:
3194 guards['NONE'] = noguards
3195 guards['NONE'] = noguards
3195 guards = list(guards.items())
3196 guards = list(guards.items())
3196 guards.sort(key=lambda x: x[0][1:])
3197 guards.sort(key=lambda x: x[0][1:])
3197 if guards:
3198 if guards:
3198 ui.note(_('guards in series file:\n'))
3199 ui.note(_('guards in series file:\n'))
3199 for guard, count in guards:
3200 for guard, count in guards:
3200 ui.note('%2d ' % count)
3201 ui.note('%2d ' % count)
3201 ui.write(guard, '\n')
3202 ui.write(guard, '\n')
3202 else:
3203 else:
3203 ui.note(_('no guards in series file\n'))
3204 ui.note(_('no guards in series file\n'))
3204 else:
3205 else:
3205 if guards:
3206 if guards:
3206 ui.note(_('active guards:\n'))
3207 ui.note(_('active guards:\n'))
3207 for g in guards:
3208 for g in guards:
3208 ui.write(g, '\n')
3209 ui.write(g, '\n')
3209 else:
3210 else:
3210 ui.write(_('no active guards\n'))
3211 ui.write(_('no active guards\n'))
3211 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3212 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3212 popped = False
3213 popped = False
3213 if opts.get('pop') or opts.get('reapply'):
3214 if opts.get('pop') or opts.get('reapply'):
3214 for i in xrange(len(q.applied)):
3215 for i in xrange(len(q.applied)):
3215 if not pushable(i):
3216 if not pushable(i):
3216 ui.status(_('popping guarded patches\n'))
3217 ui.status(_('popping guarded patches\n'))
3217 popped = True
3218 popped = True
3218 if i == 0:
3219 if i == 0:
3219 q.pop(repo, all=True)
3220 q.pop(repo, all=True)
3220 else:
3221 else:
3221 q.pop(repo, q.applied[i - 1].name)
3222 q.pop(repo, q.applied[i - 1].name)
3222 break
3223 break
3223 if popped:
3224 if popped:
3224 try:
3225 try:
3225 if reapply:
3226 if reapply:
3226 ui.status(_('reapplying unguarded patches\n'))
3227 ui.status(_('reapplying unguarded patches\n'))
3227 q.push(repo, reapply)
3228 q.push(repo, reapply)
3228 finally:
3229 finally:
3229 q.savedirty()
3230 q.savedirty()
3230
3231
3231 @command("qfinish",
3232 @command("qfinish",
3232 [('a', 'applied', None, _('finish all applied changesets'))],
3233 [('a', 'applied', None, _('finish all applied changesets'))],
3233 _('hg qfinish [-a] [REV]...'))
3234 _('hg qfinish [-a] [REV]...'))
3234 def finish(ui, repo, *revrange, **opts):
3235 def finish(ui, repo, *revrange, **opts):
3235 """move applied patches into repository history
3236 """move applied patches into repository history
3236
3237
3237 Finishes the specified revisions (corresponding to applied
3238 Finishes the specified revisions (corresponding to applied
3238 patches) by moving them out of mq control into regular repository
3239 patches) by moving them out of mq control into regular repository
3239 history.
3240 history.
3240
3241
3241 Accepts a revision range or the -a/--applied option. If --applied
3242 Accepts a revision range or the -a/--applied option. If --applied
3242 is specified, all applied mq revisions are removed from mq
3243 is specified, all applied mq revisions are removed from mq
3243 control. Otherwise, the given revisions must be at the base of the
3244 control. Otherwise, the given revisions must be at the base of the
3244 stack of applied patches.
3245 stack of applied patches.
3245
3246
3246 This can be especially useful if your changes have been applied to
3247 This can be especially useful if your changes have been applied to
3247 an upstream repository, or if you are about to push your changes
3248 an upstream repository, or if you are about to push your changes
3248 to upstream.
3249 to upstream.
3249
3250
3250 Returns 0 on success.
3251 Returns 0 on success.
3251 """
3252 """
3252 if not opts.get(r'applied') and not revrange:
3253 if not opts.get(r'applied') and not revrange:
3253 raise error.Abort(_('no revisions specified'))
3254 raise error.Abort(_('no revisions specified'))
3254 elif opts.get(r'applied'):
3255 elif opts.get(r'applied'):
3255 revrange = ('qbase::qtip',) + revrange
3256 revrange = ('qbase::qtip',) + revrange
3256
3257
3257 q = repo.mq
3258 q = repo.mq
3258 if not q.applied:
3259 if not q.applied:
3259 ui.status(_('no patches applied\n'))
3260 ui.status(_('no patches applied\n'))
3260 return 0
3261 return 0
3261
3262
3262 revs = scmutil.revrange(repo, revrange)
3263 revs = scmutil.revrange(repo, revrange)
3263 if repo['.'].rev() in revs and repo[None].files():
3264 if repo['.'].rev() in revs and repo[None].files():
3264 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3265 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3265 # queue.finish may changes phases but leave the responsibility to lock the
3266 # queue.finish may changes phases but leave the responsibility to lock the
3266 # repo to the caller to avoid deadlock with wlock. This command code is
3267 # repo to the caller to avoid deadlock with wlock. This command code is
3267 # responsibility for this locking.
3268 # responsibility for this locking.
3268 with repo.lock():
3269 with repo.lock():
3269 q.finish(repo, revs)
3270 q.finish(repo, revs)
3270 q.savedirty()
3271 q.savedirty()
3271 return 0
3272 return 0
3272
3273
3273 @command("qqueue",
3274 @command("qqueue",
3274 [('l', 'list', False, _('list all available queues')),
3275 [('l', 'list', False, _('list all available queues')),
3275 ('', 'active', False, _('print name of active queue')),
3276 ('', 'active', False, _('print name of active queue')),
3276 ('c', 'create', False, _('create new queue')),
3277 ('c', 'create', False, _('create new queue')),
3277 ('', 'rename', False, _('rename active queue')),
3278 ('', 'rename', False, _('rename active queue')),
3278 ('', 'delete', False, _('delete reference to queue')),
3279 ('', 'delete', False, _('delete reference to queue')),
3279 ('', 'purge', False, _('delete queue, and remove patch dir')),
3280 ('', 'purge', False, _('delete queue, and remove patch dir')),
3280 ],
3281 ],
3281 _('[OPTION] [QUEUE]'))
3282 _('[OPTION] [QUEUE]'))
3282 def qqueue(ui, repo, name=None, **opts):
3283 def qqueue(ui, repo, name=None, **opts):
3283 '''manage multiple patch queues
3284 '''manage multiple patch queues
3284
3285
3285 Supports switching between different patch queues, as well as creating
3286 Supports switching between different patch queues, as well as creating
3286 new patch queues and deleting existing ones.
3287 new patch queues and deleting existing ones.
3287
3288
3288 Omitting a queue name or specifying -l/--list will show you the registered
3289 Omitting a queue name or specifying -l/--list will show you the registered
3289 queues - by default the "normal" patches queue is registered. The currently
3290 queues - by default the "normal" patches queue is registered. The currently
3290 active queue will be marked with "(active)". Specifying --active will print
3291 active queue will be marked with "(active)". Specifying --active will print
3291 only the name of the active queue.
3292 only the name of the active queue.
3292
3293
3293 To create a new queue, use -c/--create. The queue is automatically made
3294 To create a new queue, use -c/--create. The queue is automatically made
3294 active, except in the case where there are applied patches from the
3295 active, except in the case where there are applied patches from the
3295 currently active queue in the repository. Then the queue will only be
3296 currently active queue in the repository. Then the queue will only be
3296 created and switching will fail.
3297 created and switching will fail.
3297
3298
3298 To delete an existing queue, use --delete. You cannot delete the currently
3299 To delete an existing queue, use --delete. You cannot delete the currently
3299 active queue.
3300 active queue.
3300
3301
3301 Returns 0 on success.
3302 Returns 0 on success.
3302 '''
3303 '''
3303 q = repo.mq
3304 q = repo.mq
3304 _defaultqueue = 'patches'
3305 _defaultqueue = 'patches'
3305 _allqueues = 'patches.queues'
3306 _allqueues = 'patches.queues'
3306 _activequeue = 'patches.queue'
3307 _activequeue = 'patches.queue'
3307
3308
3308 def _getcurrent():
3309 def _getcurrent():
3309 cur = os.path.basename(q.path)
3310 cur = os.path.basename(q.path)
3310 if cur.startswith('patches-'):
3311 if cur.startswith('patches-'):
3311 cur = cur[8:]
3312 cur = cur[8:]
3312 return cur
3313 return cur
3313
3314
3314 def _noqueues():
3315 def _noqueues():
3315 try:
3316 try:
3316 fh = repo.vfs(_allqueues, 'r')
3317 fh = repo.vfs(_allqueues, 'r')
3317 fh.close()
3318 fh.close()
3318 except IOError:
3319 except IOError:
3319 return True
3320 return True
3320
3321
3321 return False
3322 return False
3322
3323
3323 def _getqueues():
3324 def _getqueues():
3324 current = _getcurrent()
3325 current = _getcurrent()
3325
3326
3326 try:
3327 try:
3327 fh = repo.vfs(_allqueues, 'r')
3328 fh = repo.vfs(_allqueues, 'r')
3328 queues = [queue.strip() for queue in fh if queue.strip()]
3329 queues = [queue.strip() for queue in fh if queue.strip()]
3329 fh.close()
3330 fh.close()
3330 if current not in queues:
3331 if current not in queues:
3331 queues.append(current)
3332 queues.append(current)
3332 except IOError:
3333 except IOError:
3333 queues = [_defaultqueue]
3334 queues = [_defaultqueue]
3334
3335
3335 return sorted(queues)
3336 return sorted(queues)
3336
3337
3337 def _setactive(name):
3338 def _setactive(name):
3338 if q.applied:
3339 if q.applied:
3339 raise error.Abort(_('new queue created, but cannot make active '
3340 raise error.Abort(_('new queue created, but cannot make active '
3340 'as patches are applied'))
3341 'as patches are applied'))
3341 _setactivenocheck(name)
3342 _setactivenocheck(name)
3342
3343
3343 def _setactivenocheck(name):
3344 def _setactivenocheck(name):
3344 fh = repo.vfs(_activequeue, 'w')
3345 fh = repo.vfs(_activequeue, 'w')
3345 if name != 'patches':
3346 if name != 'patches':
3346 fh.write(name)
3347 fh.write(name)
3347 fh.close()
3348 fh.close()
3348
3349
3349 def _addqueue(name):
3350 def _addqueue(name):
3350 fh = repo.vfs(_allqueues, 'a')
3351 fh = repo.vfs(_allqueues, 'a')
3351 fh.write('%s\n' % (name,))
3352 fh.write('%s\n' % (name,))
3352 fh.close()
3353 fh.close()
3353
3354
3354 def _queuedir(name):
3355 def _queuedir(name):
3355 if name == 'patches':
3356 if name == 'patches':
3356 return repo.vfs.join('patches')
3357 return repo.vfs.join('patches')
3357 else:
3358 else:
3358 return repo.vfs.join('patches-' + name)
3359 return repo.vfs.join('patches-' + name)
3359
3360
3360 def _validname(name):
3361 def _validname(name):
3361 for n in name:
3362 for n in name:
3362 if n in ':\\/.':
3363 if n in ':\\/.':
3363 return False
3364 return False
3364 return True
3365 return True
3365
3366
3366 def _delete(name):
3367 def _delete(name):
3367 if name not in existing:
3368 if name not in existing:
3368 raise error.Abort(_('cannot delete queue that does not exist'))
3369 raise error.Abort(_('cannot delete queue that does not exist'))
3369
3370
3370 current = _getcurrent()
3371 current = _getcurrent()
3371
3372
3372 if name == current:
3373 if name == current:
3373 raise error.Abort(_('cannot delete currently active queue'))
3374 raise error.Abort(_('cannot delete currently active queue'))
3374
3375
3375 fh = repo.vfs('patches.queues.new', 'w')
3376 fh = repo.vfs('patches.queues.new', 'w')
3376 for queue in existing:
3377 for queue in existing:
3377 if queue == name:
3378 if queue == name:
3378 continue
3379 continue
3379 fh.write('%s\n' % (queue,))
3380 fh.write('%s\n' % (queue,))
3380 fh.close()
3381 fh.close()
3381 repo.vfs.rename('patches.queues.new', _allqueues)
3382 repo.vfs.rename('patches.queues.new', _allqueues)
3382
3383
3383 opts = pycompat.byteskwargs(opts)
3384 opts = pycompat.byteskwargs(opts)
3384 if not name or opts.get('list') or opts.get('active'):
3385 if not name or opts.get('list') or opts.get('active'):
3385 current = _getcurrent()
3386 current = _getcurrent()
3386 if opts.get('active'):
3387 if opts.get('active'):
3387 ui.write('%s\n' % (current,))
3388 ui.write('%s\n' % (current,))
3388 return
3389 return
3389 for queue in _getqueues():
3390 for queue in _getqueues():
3390 ui.write('%s' % (queue,))
3391 ui.write('%s' % (queue,))
3391 if queue == current and not ui.quiet:
3392 if queue == current and not ui.quiet:
3392 ui.write(_(' (active)\n'))
3393 ui.write(_(' (active)\n'))
3393 else:
3394 else:
3394 ui.write('\n')
3395 ui.write('\n')
3395 return
3396 return
3396
3397
3397 if not _validname(name):
3398 if not _validname(name):
3398 raise error.Abort(
3399 raise error.Abort(
3399 _('invalid queue name, may not contain the characters ":\\/."'))
3400 _('invalid queue name, may not contain the characters ":\\/."'))
3400
3401
3401 with repo.wlock():
3402 with repo.wlock():
3402 existing = _getqueues()
3403 existing = _getqueues()
3403
3404
3404 if opts.get('create'):
3405 if opts.get('create'):
3405 if name in existing:
3406 if name in existing:
3406 raise error.Abort(_('queue "%s" already exists') % name)
3407 raise error.Abort(_('queue "%s" already exists') % name)
3407 if _noqueues():
3408 if _noqueues():
3408 _addqueue(_defaultqueue)
3409 _addqueue(_defaultqueue)
3409 _addqueue(name)
3410 _addqueue(name)
3410 _setactive(name)
3411 _setactive(name)
3411 elif opts.get('rename'):
3412 elif opts.get('rename'):
3412 current = _getcurrent()
3413 current = _getcurrent()
3413 if name == current:
3414 if name == current:
3414 raise error.Abort(_('can\'t rename "%s" to its current name')
3415 raise error.Abort(_('can\'t rename "%s" to its current name')
3415 % name)
3416 % name)
3416 if name in existing:
3417 if name in existing:
3417 raise error.Abort(_('queue "%s" already exists') % name)
3418 raise error.Abort(_('queue "%s" already exists') % name)
3418
3419
3419 olddir = _queuedir(current)
3420 olddir = _queuedir(current)
3420 newdir = _queuedir(name)
3421 newdir = _queuedir(name)
3421
3422
3422 if os.path.exists(newdir):
3423 if os.path.exists(newdir):
3423 raise error.Abort(_('non-queue directory "%s" already exists') %
3424 raise error.Abort(_('non-queue directory "%s" already exists') %
3424 newdir)
3425 newdir)
3425
3426
3426 fh = repo.vfs('patches.queues.new', 'w')
3427 fh = repo.vfs('patches.queues.new', 'w')
3427 for queue in existing:
3428 for queue in existing:
3428 if queue == current:
3429 if queue == current:
3429 fh.write('%s\n' % (name,))
3430 fh.write('%s\n' % (name,))
3430 if os.path.exists(olddir):
3431 if os.path.exists(olddir):
3431 util.rename(olddir, newdir)
3432 util.rename(olddir, newdir)
3432 else:
3433 else:
3433 fh.write('%s\n' % (queue,))
3434 fh.write('%s\n' % (queue,))
3434 fh.close()
3435 fh.close()
3435 repo.vfs.rename('patches.queues.new', _allqueues)
3436 repo.vfs.rename('patches.queues.new', _allqueues)
3436 _setactivenocheck(name)
3437 _setactivenocheck(name)
3437 elif opts.get('delete'):
3438 elif opts.get('delete'):
3438 _delete(name)
3439 _delete(name)
3439 elif opts.get('purge'):
3440 elif opts.get('purge'):
3440 if name in existing:
3441 if name in existing:
3441 _delete(name)
3442 _delete(name)
3442 qdir = _queuedir(name)
3443 qdir = _queuedir(name)
3443 if os.path.exists(qdir):
3444 if os.path.exists(qdir):
3444 shutil.rmtree(qdir)
3445 shutil.rmtree(qdir)
3445 else:
3446 else:
3446 if name not in existing:
3447 if name not in existing:
3447 raise error.Abort(_('use --create to create a new queue'))
3448 raise error.Abort(_('use --create to create a new queue'))
3448 _setactive(name)
3449 _setactive(name)
3449
3450
3450 def mqphasedefaults(repo, roots):
3451 def mqphasedefaults(repo, roots):
3451 """callback used to set mq changeset as secret when no phase data exists"""
3452 """callback used to set mq changeset as secret when no phase data exists"""
3452 if repo.mq.applied:
3453 if repo.mq.applied:
3453 if repo.ui.configbool('mq', 'secret'):
3454 if repo.ui.configbool('mq', 'secret'):
3454 mqphase = phases.secret
3455 mqphase = phases.secret
3455 else:
3456 else:
3456 mqphase = phases.draft
3457 mqphase = phases.draft
3457 qbase = repo[repo.mq.applied[0].node]
3458 qbase = repo[repo.mq.applied[0].node]
3458 roots[mqphase].add(qbase.node())
3459 roots[mqphase].add(qbase.node())
3459 return roots
3460 return roots
3460
3461
3461 def reposetup(ui, repo):
3462 def reposetup(ui, repo):
3462 class mqrepo(repo.__class__):
3463 class mqrepo(repo.__class__):
3463 @localrepo.unfilteredpropertycache
3464 @localrepo.unfilteredpropertycache
3464 def mq(self):
3465 def mq(self):
3465 return queue(self.ui, self.baseui, self.path)
3466 return queue(self.ui, self.baseui, self.path)
3466
3467
3467 def invalidateall(self):
3468 def invalidateall(self):
3468 super(mqrepo, self).invalidateall()
3469 super(mqrepo, self).invalidateall()
3469 if localrepo.hasunfilteredcache(self, 'mq'):
3470 if localrepo.hasunfilteredcache(self, 'mq'):
3470 # recreate mq in case queue path was changed
3471 # recreate mq in case queue path was changed
3471 delattr(self.unfiltered(), 'mq')
3472 delattr(self.unfiltered(), 'mq')
3472
3473
3473 def abortifwdirpatched(self, errmsg, force=False):
3474 def abortifwdirpatched(self, errmsg, force=False):
3474 if self.mq.applied and self.mq.checkapplied and not force:
3475 if self.mq.applied and self.mq.checkapplied and not force:
3475 parents = self.dirstate.parents()
3476 parents = self.dirstate.parents()
3476 patches = [s.node for s in self.mq.applied]
3477 patches = [s.node for s in self.mq.applied]
3477 if parents[0] in patches or parents[1] in patches:
3478 if parents[0] in patches or parents[1] in patches:
3478 raise error.Abort(errmsg)
3479 raise error.Abort(errmsg)
3479
3480
3480 def commit(self, text="", user=None, date=None, match=None,
3481 def commit(self, text="", user=None, date=None, match=None,
3481 force=False, editor=False, extra=None):
3482 force=False, editor=False, extra=None):
3482 if extra is None:
3483 if extra is None:
3483 extra = {}
3484 extra = {}
3484 self.abortifwdirpatched(
3485 self.abortifwdirpatched(
3485 _('cannot commit over an applied mq patch'),
3486 _('cannot commit over an applied mq patch'),
3486 force)
3487 force)
3487
3488
3488 return super(mqrepo, self).commit(text, user, date, match, force,
3489 return super(mqrepo, self).commit(text, user, date, match, force,
3489 editor, extra)
3490 editor, extra)
3490
3491
3491 def checkpush(self, pushop):
3492 def checkpush(self, pushop):
3492 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3493 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3493 outapplied = [e.node for e in self.mq.applied]
3494 outapplied = [e.node for e in self.mq.applied]
3494 if pushop.revs:
3495 if pushop.revs:
3495 # Assume applied patches have no non-patch descendants and
3496 # Assume applied patches have no non-patch descendants and
3496 # are not on remote already. Filtering any changeset not
3497 # are not on remote already. Filtering any changeset not
3497 # pushed.
3498 # pushed.
3498 heads = set(pushop.revs)
3499 heads = set(pushop.revs)
3499 for node in reversed(outapplied):
3500 for node in reversed(outapplied):
3500 if node in heads:
3501 if node in heads:
3501 break
3502 break
3502 else:
3503 else:
3503 outapplied.pop()
3504 outapplied.pop()
3504 # looking for pushed and shared changeset
3505 # looking for pushed and shared changeset
3505 for node in outapplied:
3506 for node in outapplied:
3506 if self[node].phase() < phases.secret:
3507 if self[node].phase() < phases.secret:
3507 raise error.Abort(_('source has mq patches applied'))
3508 raise error.Abort(_('source has mq patches applied'))
3508 # no non-secret patches pushed
3509 # no non-secret patches pushed
3509 super(mqrepo, self).checkpush(pushop)
3510 super(mqrepo, self).checkpush(pushop)
3510
3511
3511 def _findtags(self):
3512 def _findtags(self):
3512 '''augment tags from base class with patch tags'''
3513 '''augment tags from base class with patch tags'''
3513 result = super(mqrepo, self)._findtags()
3514 result = super(mqrepo, self)._findtags()
3514
3515
3515 q = self.mq
3516 q = self.mq
3516 if not q.applied:
3517 if not q.applied:
3517 return result
3518 return result
3518
3519
3519 mqtags = [(patch.node, patch.name) for patch in q.applied]
3520 mqtags = [(patch.node, patch.name) for patch in q.applied]
3520
3521
3521 try:
3522 try:
3522 # for now ignore filtering business
3523 # for now ignore filtering business
3523 self.unfiltered().changelog.rev(mqtags[-1][0])
3524 self.unfiltered().changelog.rev(mqtags[-1][0])
3524 except error.LookupError:
3525 except error.LookupError:
3525 self.ui.warn(_('mq status file refers to unknown node %s\n')
3526 self.ui.warn(_('mq status file refers to unknown node %s\n')
3526 % short(mqtags[-1][0]))
3527 % short(mqtags[-1][0]))
3527 return result
3528 return result
3528
3529
3529 # do not add fake tags for filtered revisions
3530 # do not add fake tags for filtered revisions
3530 included = self.changelog.hasnode
3531 included = self.changelog.hasnode
3531 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3532 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3532 if not mqtags:
3533 if not mqtags:
3533 return result
3534 return result
3534
3535
3535 mqtags.append((mqtags[-1][0], 'qtip'))
3536 mqtags.append((mqtags[-1][0], 'qtip'))
3536 mqtags.append((mqtags[0][0], 'qbase'))
3537 mqtags.append((mqtags[0][0], 'qbase'))
3537 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3538 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3538 tags = result[0]
3539 tags = result[0]
3539 for patch in mqtags:
3540 for patch in mqtags:
3540 if patch[1] in tags:
3541 if patch[1] in tags:
3541 self.ui.warn(_('tag %s overrides mq patch of the same '
3542 self.ui.warn(_('tag %s overrides mq patch of the same '
3542 'name\n') % patch[1])
3543 'name\n') % patch[1])
3543 else:
3544 else:
3544 tags[patch[1]] = patch[0]
3545 tags[patch[1]] = patch[0]
3545
3546
3546 return result
3547 return result
3547
3548
3548 if repo.local():
3549 if repo.local():
3549 repo.__class__ = mqrepo
3550 repo.__class__ = mqrepo
3550
3551
3551 repo._phasedefaults.append(mqphasedefaults)
3552 repo._phasedefaults.append(mqphasedefaults)
3552
3553
3553 def mqimport(orig, ui, repo, *args, **kwargs):
3554 def mqimport(orig, ui, repo, *args, **kwargs):
3554 if (util.safehasattr(repo, 'abortifwdirpatched')
3555 if (util.safehasattr(repo, 'abortifwdirpatched')
3555 and not kwargs.get(r'no_commit', False)):
3556 and not kwargs.get(r'no_commit', False)):
3556 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3557 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3557 kwargs.get(r'force'))
3558 kwargs.get(r'force'))
3558 return orig(ui, repo, *args, **kwargs)
3559 return orig(ui, repo, *args, **kwargs)
3559
3560
3560 def mqinit(orig, ui, *args, **kwargs):
3561 def mqinit(orig, ui, *args, **kwargs):
3561 mq = kwargs.pop(r'mq', None)
3562 mq = kwargs.pop(r'mq', None)
3562
3563
3563 if not mq:
3564 if not mq:
3564 return orig(ui, *args, **kwargs)
3565 return orig(ui, *args, **kwargs)
3565
3566
3566 if args:
3567 if args:
3567 repopath = args[0]
3568 repopath = args[0]
3568 if not hg.islocal(repopath):
3569 if not hg.islocal(repopath):
3569 raise error.Abort(_('only a local queue repository '
3570 raise error.Abort(_('only a local queue repository '
3570 'may be initialized'))
3571 'may be initialized'))
3571 else:
3572 else:
3572 repopath = cmdutil.findrepo(pycompat.getcwd())
3573 repopath = cmdutil.findrepo(pycompat.getcwd())
3573 if not repopath:
3574 if not repopath:
3574 raise error.Abort(_('there is no Mercurial repository here '
3575 raise error.Abort(_('there is no Mercurial repository here '
3575 '(.hg not found)'))
3576 '(.hg not found)'))
3576 repo = hg.repository(ui, repopath)
3577 repo = hg.repository(ui, repopath)
3577 return qinit(ui, repo, True)
3578 return qinit(ui, repo, True)
3578
3579
3579 def mqcommand(orig, ui, repo, *args, **kwargs):
3580 def mqcommand(orig, ui, repo, *args, **kwargs):
3580 """Add --mq option to operate on patch repository instead of main"""
3581 """Add --mq option to operate on patch repository instead of main"""
3581
3582
3582 # some commands do not like getting unknown options
3583 # some commands do not like getting unknown options
3583 mq = kwargs.pop(r'mq', None)
3584 mq = kwargs.pop(r'mq', None)
3584
3585
3585 if not mq:
3586 if not mq:
3586 return orig(ui, repo, *args, **kwargs)
3587 return orig(ui, repo, *args, **kwargs)
3587
3588
3588 q = repo.mq
3589 q = repo.mq
3589 r = q.qrepo()
3590 r = q.qrepo()
3590 if not r:
3591 if not r:
3591 raise error.Abort(_('no queue repository'))
3592 raise error.Abort(_('no queue repository'))
3592 return orig(r.ui, r, *args, **kwargs)
3593 return orig(r.ui, r, *args, **kwargs)
3593
3594
3594 def summaryhook(ui, repo):
3595 def summaryhook(ui, repo):
3595 q = repo.mq
3596 q = repo.mq
3596 m = []
3597 m = []
3597 a, u = len(q.applied), len(q.unapplied(repo))
3598 a, u = len(q.applied), len(q.unapplied(repo))
3598 if a:
3599 if a:
3599 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3600 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3600 if u:
3601 if u:
3601 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3602 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3602 if m:
3603 if m:
3603 # i18n: column positioning for "hg summary"
3604 # i18n: column positioning for "hg summary"
3604 ui.write(_("mq: %s\n") % ', '.join(m))
3605 ui.write(_("mq: %s\n") % ', '.join(m))
3605 else:
3606 else:
3606 # i18n: column positioning for "hg summary"
3607 # i18n: column positioning for "hg summary"
3607 ui.note(_("mq: (empty queue)\n"))
3608 ui.note(_("mq: (empty queue)\n"))
3608
3609
3609 revsetpredicate = registrar.revsetpredicate()
3610 revsetpredicate = registrar.revsetpredicate()
3610
3611
3611 @revsetpredicate('mq()')
3612 @revsetpredicate('mq()')
3612 def revsetmq(repo, subset, x):
3613 def revsetmq(repo, subset, x):
3613 """Changesets managed by MQ.
3614 """Changesets managed by MQ.
3614 """
3615 """
3615 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3616 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3616 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3617 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3617 return smartset.baseset([r for r in subset if r in applied])
3618 return smartset.baseset([r for r in subset if r in applied])
3618
3619
3619 # tell hggettext to extract docstrings from these functions:
3620 # tell hggettext to extract docstrings from these functions:
3620 i18nfunctions = [revsetmq]
3621 i18nfunctions = [revsetmq]
3621
3622
3622 def extsetup(ui):
3623 def extsetup(ui):
3623 # Ensure mq wrappers are called first, regardless of extension load order by
3624 # Ensure mq wrappers are called first, regardless of extension load order by
3624 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3625 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3625 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3626 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3626
3627
3627 extensions.wrapcommand(commands.table, 'import', mqimport)
3628 extensions.wrapcommand(commands.table, 'import', mqimport)
3628 cmdutil.summaryhooks.add('mq', summaryhook)
3629 cmdutil.summaryhooks.add('mq', summaryhook)
3629
3630
3630 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3631 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3631 entry[1].extend(mqopt)
3632 entry[1].extend(mqopt)
3632
3633
3633 def dotable(cmdtable):
3634 def dotable(cmdtable):
3634 for cmd, entry in cmdtable.iteritems():
3635 for cmd, entry in cmdtable.iteritems():
3635 cmd = cmdutil.parsealiases(cmd)[0]
3636 cmd = cmdutil.parsealiases(cmd)[0]
3636 func = entry[0]
3637 func = entry[0]
3637 if func.norepo:
3638 if func.norepo:
3638 continue
3639 continue
3639 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3640 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3640 entry[1].extend(mqopt)
3641 entry[1].extend(mqopt)
3641
3642
3642 dotable(commands.table)
3643 dotable(commands.table)
3643
3644
3644 for extname, extmodule in extensions.extensions():
3645 for extname, extmodule in extensions.extensions():
3645 if extmodule.__file__ != __file__:
3646 if extmodule.__file__ != __file__:
3646 dotable(getattr(extmodule, 'cmdtable', {}))
3647 dotable(getattr(extmodule, 'cmdtable', {}))
3647
3648
3648 colortable = {'qguard.negative': 'red',
3649 colortable = {'qguard.negative': 'red',
3649 'qguard.positive': 'yellow',
3650 'qguard.positive': 'yellow',
3650 'qguard.unguarded': 'green',
3651 'qguard.unguarded': 'green',
3651 'qseries.applied': 'blue bold underline',
3652 'qseries.applied': 'blue bold underline',
3652 'qseries.guarded': 'black bold',
3653 'qseries.guarded': 'black bold',
3653 'qseries.missing': 'red bold',
3654 'qseries.missing': 'red bold',
3654 'qseries.unapplied': 'black bold'}
3655 'qseries.unapplied': 'black bold'}
@@ -1,484 +1,485 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''hooks for sending email push notifications
8 '''hooks for sending email push notifications
9
9
10 This extension implements hooks to send email notifications when
10 This extension implements hooks to send email notifications when
11 changesets are sent from or received by the local repository.
11 changesets are sent from or received by the local repository.
12
12
13 First, enable the extension as explained in :hg:`help extensions`, and
13 First, enable the extension as explained in :hg:`help extensions`, and
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
15 are run when changesets are received, while ``outgoing`` hooks are for
15 are run when changesets are received, while ``outgoing`` hooks are for
16 changesets sent to another repository::
16 changesets sent to another repository::
17
17
18 [hooks]
18 [hooks]
19 # one email for each incoming changeset
19 # one email for each incoming changeset
20 incoming.notify = python:hgext.notify.hook
20 incoming.notify = python:hgext.notify.hook
21 # one email for all incoming changesets
21 # one email for all incoming changesets
22 changegroup.notify = python:hgext.notify.hook
22 changegroup.notify = python:hgext.notify.hook
23
23
24 # one email for all outgoing changesets
24 # one email for all outgoing changesets
25 outgoing.notify = python:hgext.notify.hook
25 outgoing.notify = python:hgext.notify.hook
26
26
27 This registers the hooks. To enable notification, subscribers must
27 This registers the hooks. To enable notification, subscribers must
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
29 repositories to a given recipient. The ``[reposubs]`` section maps
29 repositories to a given recipient. The ``[reposubs]`` section maps
30 multiple recipients to a single repository::
30 multiple recipients to a single repository::
31
31
32 [usersubs]
32 [usersubs]
33 # key is subscriber email, value is a comma-separated list of repo patterns
33 # key is subscriber email, value is a comma-separated list of repo patterns
34 user@host = pattern
34 user@host = pattern
35
35
36 [reposubs]
36 [reposubs]
37 # key is repo pattern, value is a comma-separated list of subscriber emails
37 # key is repo pattern, value is a comma-separated list of subscriber emails
38 pattern = user@host
38 pattern = user@host
39
39
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
41 optionally combined with a revset expression. A revset expression, if
41 optionally combined with a revset expression. A revset expression, if
42 present, is separated from the glob by a hash. Example::
42 present, is separated from the glob by a hash. Example::
43
43
44 [reposubs]
44 [reposubs]
45 */widgets#branch(release) = qa-team@example.com
45 */widgets#branch(release) = qa-team@example.com
46
46
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
48 branch triggers a notification in any repository ending in ``widgets``.
48 branch triggers a notification in any repository ending in ``widgets``.
49
49
50 In order to place them under direct user management, ``[usersubs]`` and
50 In order to place them under direct user management, ``[usersubs]`` and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
52 incorporated by reference::
52 incorporated by reference::
53
53
54 [notify]
54 [notify]
55 config = /path/to/subscriptionsfile
55 config = /path/to/subscriptionsfile
56
56
57 Notifications will not be sent until the ``notify.test`` value is set
57 Notifications will not be sent until the ``notify.test`` value is set
58 to ``False``; see below.
58 to ``False``; see below.
59
59
60 Notifications content can be tweaked with the following configuration entries:
60 Notifications content can be tweaked with the following configuration entries:
61
61
62 notify.test
62 notify.test
63 If ``True``, print messages to stdout instead of sending them. Default: True.
63 If ``True``, print messages to stdout instead of sending them. Default: True.
64
64
65 notify.sources
65 notify.sources
66 Space-separated list of change sources. Notifications are activated only
66 Space-separated list of change sources. Notifications are activated only
67 when a changeset's source is in this list. Sources may be:
67 when a changeset's source is in this list. Sources may be:
68
68
69 :``serve``: changesets received via http or ssh
69 :``serve``: changesets received via http or ssh
70 :``pull``: changesets received via ``hg pull``
70 :``pull``: changesets received via ``hg pull``
71 :``unbundle``: changesets received via ``hg unbundle``
71 :``unbundle``: changesets received via ``hg unbundle``
72 :``push``: changesets sent or received via ``hg push``
72 :``push``: changesets sent or received via ``hg push``
73 :``bundle``: changesets sent via ``hg unbundle``
73 :``bundle``: changesets sent via ``hg unbundle``
74
74
75 Default: serve.
75 Default: serve.
76
76
77 notify.strip
77 notify.strip
78 Number of leading slashes to strip from url paths. By default, notifications
78 Number of leading slashes to strip from url paths. By default, notifications
79 reference repositories with their absolute path. ``notify.strip`` lets you
79 reference repositories with their absolute path. ``notify.strip`` lets you
80 turn them into relative paths. For example, ``notify.strip=3`` will change
80 turn them into relative paths. For example, ``notify.strip=3`` will change
81 ``/long/path/repository`` into ``repository``. Default: 0.
81 ``/long/path/repository`` into ``repository``. Default: 0.
82
82
83 notify.domain
83 notify.domain
84 Default email domain for sender or recipients with no explicit domain.
84 Default email domain for sender or recipients with no explicit domain.
85
85
86 notify.style
86 notify.style
87 Style file to use when formatting emails.
87 Style file to use when formatting emails.
88
88
89 notify.template
89 notify.template
90 Template to use when formatting emails.
90 Template to use when formatting emails.
91
91
92 notify.incoming
92 notify.incoming
93 Template to use when run as an incoming hook, overriding ``notify.template``.
93 Template to use when run as an incoming hook, overriding ``notify.template``.
94
94
95 notify.outgoing
95 notify.outgoing
96 Template to use when run as an outgoing hook, overriding ``notify.template``.
96 Template to use when run as an outgoing hook, overriding ``notify.template``.
97
97
98 notify.changegroup
98 notify.changegroup
99 Template to use when running as a changegroup hook, overriding
99 Template to use when running as a changegroup hook, overriding
100 ``notify.template``.
100 ``notify.template``.
101
101
102 notify.maxdiff
102 notify.maxdiff
103 Maximum number of diff lines to include in notification email. Set to 0
103 Maximum number of diff lines to include in notification email. Set to 0
104 to disable the diff, or -1 to include all of it. Default: 300.
104 to disable the diff, or -1 to include all of it. Default: 300.
105
105
106 notify.maxsubject
106 notify.maxsubject
107 Maximum number of characters in email's subject line. Default: 67.
107 Maximum number of characters in email's subject line. Default: 67.
108
108
109 notify.diffstat
109 notify.diffstat
110 Set to True to include a diffstat before diff content. Default: True.
110 Set to True to include a diffstat before diff content. Default: True.
111
111
112 notify.merge
112 notify.merge
113 If True, send notifications for merge changesets. Default: True.
113 If True, send notifications for merge changesets. Default: True.
114
114
115 notify.mbox
115 notify.mbox
116 If set, append mails to this mbox file instead of sending. Default: None.
116 If set, append mails to this mbox file instead of sending. Default: None.
117
117
118 notify.fromauthor
118 notify.fromauthor
119 If set, use the committer of the first changeset in a changegroup for
119 If set, use the committer of the first changeset in a changegroup for
120 the "From" field of the notification mail. If not set, take the user
120 the "From" field of the notification mail. If not set, take the user
121 from the pushing repo. Default: False.
121 from the pushing repo. Default: False.
122
122
123 If set, the following entries will also be used to customize the
123 If set, the following entries will also be used to customize the
124 notifications:
124 notifications:
125
125
126 email.from
126 email.from
127 Email ``From`` address to use if none can be found in the generated
127 Email ``From`` address to use if none can be found in the generated
128 email content.
128 email content.
129
129
130 web.baseurl
130 web.baseurl
131 Root repository URL to combine with repository paths when making
131 Root repository URL to combine with repository paths when making
132 references. See also ``notify.strip``.
132 references. See also ``notify.strip``.
133
133
134 '''
134 '''
135 from __future__ import absolute_import
135 from __future__ import absolute_import
136
136
137 import email
137 import email
138 import email.parser as emailparser
138 import email.parser as emailparser
139 import fnmatch
139 import fnmatch
140 import socket
140 import socket
141 import time
141 import time
142
142
143 from mercurial.i18n import _
143 from mercurial.i18n import _
144 from mercurial import (
144 from mercurial import (
145 error,
145 error,
146 logcmdutil,
146 logcmdutil,
147 mail,
147 mail,
148 patch,
148 patch,
149 registrar,
149 registrar,
150 util,
150 util,
151 )
151 )
152 from mercurial.utils import dateutil
152
153
153 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
154 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
154 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
155 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
155 # be specifying the version(s) of Mercurial they are tested with, or
156 # be specifying the version(s) of Mercurial they are tested with, or
156 # leave the attribute unspecified.
157 # leave the attribute unspecified.
157 testedwith = 'ships-with-hg-core'
158 testedwith = 'ships-with-hg-core'
158
159
159 configtable = {}
160 configtable = {}
160 configitem = registrar.configitem(configtable)
161 configitem = registrar.configitem(configtable)
161
162
162 configitem('notify', 'changegroup',
163 configitem('notify', 'changegroup',
163 default=None,
164 default=None,
164 )
165 )
165 configitem('notify', 'config',
166 configitem('notify', 'config',
166 default=None,
167 default=None,
167 )
168 )
168 configitem('notify', 'diffstat',
169 configitem('notify', 'diffstat',
169 default=True,
170 default=True,
170 )
171 )
171 configitem('notify', 'domain',
172 configitem('notify', 'domain',
172 default=None,
173 default=None,
173 )
174 )
174 configitem('notify', 'fromauthor',
175 configitem('notify', 'fromauthor',
175 default=None,
176 default=None,
176 )
177 )
177 configitem('notify', 'incoming',
178 configitem('notify', 'incoming',
178 default=None,
179 default=None,
179 )
180 )
180 configitem('notify', 'maxdiff',
181 configitem('notify', 'maxdiff',
181 default=300,
182 default=300,
182 )
183 )
183 configitem('notify', 'maxsubject',
184 configitem('notify', 'maxsubject',
184 default=67,
185 default=67,
185 )
186 )
186 configitem('notify', 'mbox',
187 configitem('notify', 'mbox',
187 default=None,
188 default=None,
188 )
189 )
189 configitem('notify', 'merge',
190 configitem('notify', 'merge',
190 default=True,
191 default=True,
191 )
192 )
192 configitem('notify', 'outgoing',
193 configitem('notify', 'outgoing',
193 default=None,
194 default=None,
194 )
195 )
195 configitem('notify', 'sources',
196 configitem('notify', 'sources',
196 default='serve',
197 default='serve',
197 )
198 )
198 configitem('notify', 'strip',
199 configitem('notify', 'strip',
199 default=0,
200 default=0,
200 )
201 )
201 configitem('notify', 'style',
202 configitem('notify', 'style',
202 default=None,
203 default=None,
203 )
204 )
204 configitem('notify', 'template',
205 configitem('notify', 'template',
205 default=None,
206 default=None,
206 )
207 )
207 configitem('notify', 'test',
208 configitem('notify', 'test',
208 default=True,
209 default=True,
209 )
210 )
210
211
211 # template for single changeset can include email headers.
212 # template for single changeset can include email headers.
212 single_template = '''
213 single_template = '''
213 Subject: changeset in {webroot}: {desc|firstline|strip}
214 Subject: changeset in {webroot}: {desc|firstline|strip}
214 From: {author}
215 From: {author}
215
216
216 changeset {node|short} in {root}
217 changeset {node|short} in {root}
217 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
218 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
218 description:
219 description:
219 \t{desc|tabindent|strip}
220 \t{desc|tabindent|strip}
220 '''.lstrip()
221 '''.lstrip()
221
222
222 # template for multiple changesets should not contain email headers,
223 # template for multiple changesets should not contain email headers,
223 # because only first set of headers will be used and result will look
224 # because only first set of headers will be used and result will look
224 # strange.
225 # strange.
225 multiple_template = '''
226 multiple_template = '''
226 changeset {node|short} in {root}
227 changeset {node|short} in {root}
227 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
228 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
228 summary: {desc|firstline}
229 summary: {desc|firstline}
229 '''
230 '''
230
231
231 deftemplates = {
232 deftemplates = {
232 'changegroup': multiple_template,
233 'changegroup': multiple_template,
233 }
234 }
234
235
235 class notifier(object):
236 class notifier(object):
236 '''email notification class.'''
237 '''email notification class.'''
237
238
238 def __init__(self, ui, repo, hooktype):
239 def __init__(self, ui, repo, hooktype):
239 self.ui = ui
240 self.ui = ui
240 cfg = self.ui.config('notify', 'config')
241 cfg = self.ui.config('notify', 'config')
241 if cfg:
242 if cfg:
242 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
243 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
243 self.repo = repo
244 self.repo = repo
244 self.stripcount = int(self.ui.config('notify', 'strip'))
245 self.stripcount = int(self.ui.config('notify', 'strip'))
245 self.root = self.strip(self.repo.root)
246 self.root = self.strip(self.repo.root)
246 self.domain = self.ui.config('notify', 'domain')
247 self.domain = self.ui.config('notify', 'domain')
247 self.mbox = self.ui.config('notify', 'mbox')
248 self.mbox = self.ui.config('notify', 'mbox')
248 self.test = self.ui.configbool('notify', 'test')
249 self.test = self.ui.configbool('notify', 'test')
249 self.charsets = mail._charsets(self.ui)
250 self.charsets = mail._charsets(self.ui)
250 self.subs = self.subscribers()
251 self.subs = self.subscribers()
251 self.merge = self.ui.configbool('notify', 'merge')
252 self.merge = self.ui.configbool('notify', 'merge')
252
253
253 mapfile = None
254 mapfile = None
254 template = (self.ui.config('notify', hooktype) or
255 template = (self.ui.config('notify', hooktype) or
255 self.ui.config('notify', 'template'))
256 self.ui.config('notify', 'template'))
256 if not template:
257 if not template:
257 mapfile = self.ui.config('notify', 'style')
258 mapfile = self.ui.config('notify', 'style')
258 if not mapfile and not template:
259 if not mapfile and not template:
259 template = deftemplates.get(hooktype) or single_template
260 template = deftemplates.get(hooktype) or single_template
260 spec = logcmdutil.templatespec(template, mapfile)
261 spec = logcmdutil.templatespec(template, mapfile)
261 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
262 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
262
263
263 def strip(self, path):
264 def strip(self, path):
264 '''strip leading slashes from local path, turn into web-safe path.'''
265 '''strip leading slashes from local path, turn into web-safe path.'''
265
266
266 path = util.pconvert(path)
267 path = util.pconvert(path)
267 count = self.stripcount
268 count = self.stripcount
268 while count > 0:
269 while count > 0:
269 c = path.find('/')
270 c = path.find('/')
270 if c == -1:
271 if c == -1:
271 break
272 break
272 path = path[c + 1:]
273 path = path[c + 1:]
273 count -= 1
274 count -= 1
274 return path
275 return path
275
276
276 def fixmail(self, addr):
277 def fixmail(self, addr):
277 '''try to clean up email addresses.'''
278 '''try to clean up email addresses.'''
278
279
279 addr = util.email(addr.strip())
280 addr = util.email(addr.strip())
280 if self.domain:
281 if self.domain:
281 a = addr.find('@localhost')
282 a = addr.find('@localhost')
282 if a != -1:
283 if a != -1:
283 addr = addr[:a]
284 addr = addr[:a]
284 if '@' not in addr:
285 if '@' not in addr:
285 return addr + '@' + self.domain
286 return addr + '@' + self.domain
286 return addr
287 return addr
287
288
288 def subscribers(self):
289 def subscribers(self):
289 '''return list of email addresses of subscribers to this repo.'''
290 '''return list of email addresses of subscribers to this repo.'''
290 subs = set()
291 subs = set()
291 for user, pats in self.ui.configitems('usersubs'):
292 for user, pats in self.ui.configitems('usersubs'):
292 for pat in pats.split(','):
293 for pat in pats.split(','):
293 if '#' in pat:
294 if '#' in pat:
294 pat, revs = pat.split('#', 1)
295 pat, revs = pat.split('#', 1)
295 else:
296 else:
296 revs = None
297 revs = None
297 if fnmatch.fnmatch(self.repo.root, pat.strip()):
298 if fnmatch.fnmatch(self.repo.root, pat.strip()):
298 subs.add((self.fixmail(user), revs))
299 subs.add((self.fixmail(user), revs))
299 for pat, users in self.ui.configitems('reposubs'):
300 for pat, users in self.ui.configitems('reposubs'):
300 if '#' in pat:
301 if '#' in pat:
301 pat, revs = pat.split('#', 1)
302 pat, revs = pat.split('#', 1)
302 else:
303 else:
303 revs = None
304 revs = None
304 if fnmatch.fnmatch(self.repo.root, pat):
305 if fnmatch.fnmatch(self.repo.root, pat):
305 for user in users.split(','):
306 for user in users.split(','):
306 subs.add((self.fixmail(user), revs))
307 subs.add((self.fixmail(user), revs))
307 return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
308 return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
308 for s, r in sorted(subs)]
309 for s, r in sorted(subs)]
309
310
310 def node(self, ctx, **props):
311 def node(self, ctx, **props):
311 '''format one changeset, unless it is a suppressed merge.'''
312 '''format one changeset, unless it is a suppressed merge.'''
312 if not self.merge and len(ctx.parents()) > 1:
313 if not self.merge and len(ctx.parents()) > 1:
313 return False
314 return False
314 self.t.show(ctx, changes=ctx.changeset(),
315 self.t.show(ctx, changes=ctx.changeset(),
315 baseurl=self.ui.config('web', 'baseurl'),
316 baseurl=self.ui.config('web', 'baseurl'),
316 root=self.repo.root, webroot=self.root, **props)
317 root=self.repo.root, webroot=self.root, **props)
317 return True
318 return True
318
319
319 def skipsource(self, source):
320 def skipsource(self, source):
320 '''true if incoming changes from this source should be skipped.'''
321 '''true if incoming changes from this source should be skipped.'''
321 ok_sources = self.ui.config('notify', 'sources').split()
322 ok_sources = self.ui.config('notify', 'sources').split()
322 return source not in ok_sources
323 return source not in ok_sources
323
324
324 def send(self, ctx, count, data):
325 def send(self, ctx, count, data):
325 '''send message.'''
326 '''send message.'''
326
327
327 # Select subscribers by revset
328 # Select subscribers by revset
328 subs = set()
329 subs = set()
329 for sub, spec in self.subs:
330 for sub, spec in self.subs:
330 if spec is None:
331 if spec is None:
331 subs.add(sub)
332 subs.add(sub)
332 continue
333 continue
333 revs = self.repo.revs('%r and %d:', spec, ctx.rev())
334 revs = self.repo.revs('%r and %d:', spec, ctx.rev())
334 if len(revs):
335 if len(revs):
335 subs.add(sub)
336 subs.add(sub)
336 continue
337 continue
337 if len(subs) == 0:
338 if len(subs) == 0:
338 self.ui.debug('notify: no subscribers to selected repo '
339 self.ui.debug('notify: no subscribers to selected repo '
339 'and revset\n')
340 'and revset\n')
340 return
341 return
341
342
342 p = emailparser.Parser()
343 p = emailparser.Parser()
343 try:
344 try:
344 msg = p.parsestr(data)
345 msg = p.parsestr(data)
345 except email.Errors.MessageParseError as inst:
346 except email.Errors.MessageParseError as inst:
346 raise error.Abort(inst)
347 raise error.Abort(inst)
347
348
348 # store sender and subject
349 # store sender and subject
349 sender, subject = msg['From'], msg['Subject']
350 sender, subject = msg['From'], msg['Subject']
350 del msg['From'], msg['Subject']
351 del msg['From'], msg['Subject']
351
352
352 if not msg.is_multipart():
353 if not msg.is_multipart():
353 # create fresh mime message from scratch
354 # create fresh mime message from scratch
354 # (multipart templates must take care of this themselves)
355 # (multipart templates must take care of this themselves)
355 headers = msg.items()
356 headers = msg.items()
356 payload = msg.get_payload()
357 payload = msg.get_payload()
357 # for notification prefer readability over data precision
358 # for notification prefer readability over data precision
358 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
359 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
359 # reinstate custom headers
360 # reinstate custom headers
360 for k, v in headers:
361 for k, v in headers:
361 msg[k] = v
362 msg[k] = v
362
363
363 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
364 msg['Date'] = dateutil.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
364
365
365 # try to make subject line exist and be useful
366 # try to make subject line exist and be useful
366 if not subject:
367 if not subject:
367 if count > 1:
368 if count > 1:
368 subject = _('%s: %d new changesets') % (self.root, count)
369 subject = _('%s: %d new changesets') % (self.root, count)
369 else:
370 else:
370 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
371 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
371 subject = '%s: %s' % (self.root, s)
372 subject = '%s: %s' % (self.root, s)
372 maxsubject = int(self.ui.config('notify', 'maxsubject'))
373 maxsubject = int(self.ui.config('notify', 'maxsubject'))
373 if maxsubject:
374 if maxsubject:
374 subject = util.ellipsis(subject, maxsubject)
375 subject = util.ellipsis(subject, maxsubject)
375 msg['Subject'] = mail.headencode(self.ui, subject,
376 msg['Subject'] = mail.headencode(self.ui, subject,
376 self.charsets, self.test)
377 self.charsets, self.test)
377
378
378 # try to make message have proper sender
379 # try to make message have proper sender
379 if not sender:
380 if not sender:
380 sender = self.ui.config('email', 'from') or self.ui.username()
381 sender = self.ui.config('email', 'from') or self.ui.username()
381 if '@' not in sender or '@localhost' in sender:
382 if '@' not in sender or '@localhost' in sender:
382 sender = self.fixmail(sender)
383 sender = self.fixmail(sender)
383 msg['From'] = mail.addressencode(self.ui, sender,
384 msg['From'] = mail.addressencode(self.ui, sender,
384 self.charsets, self.test)
385 self.charsets, self.test)
385
386
386 msg['X-Hg-Notification'] = 'changeset %s' % ctx
387 msg['X-Hg-Notification'] = 'changeset %s' % ctx
387 if not msg['Message-Id']:
388 if not msg['Message-Id']:
388 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
389 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
389 (ctx, int(time.time()),
390 (ctx, int(time.time()),
390 hash(self.repo.root), socket.getfqdn()))
391 hash(self.repo.root), socket.getfqdn()))
391 msg['To'] = ', '.join(sorted(subs))
392 msg['To'] = ', '.join(sorted(subs))
392
393
393 msgtext = msg.as_string()
394 msgtext = msg.as_string()
394 if self.test:
395 if self.test:
395 self.ui.write(msgtext)
396 self.ui.write(msgtext)
396 if not msgtext.endswith('\n'):
397 if not msgtext.endswith('\n'):
397 self.ui.write('\n')
398 self.ui.write('\n')
398 else:
399 else:
399 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
400 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
400 (len(subs), count))
401 (len(subs), count))
401 mail.sendmail(self.ui, util.email(msg['From']),
402 mail.sendmail(self.ui, util.email(msg['From']),
402 subs, msgtext, mbox=self.mbox)
403 subs, msgtext, mbox=self.mbox)
403
404
404 def diff(self, ctx, ref=None):
405 def diff(self, ctx, ref=None):
405
406
406 maxdiff = int(self.ui.config('notify', 'maxdiff'))
407 maxdiff = int(self.ui.config('notify', 'maxdiff'))
407 prev = ctx.p1().node()
408 prev = ctx.p1().node()
408 if ref:
409 if ref:
409 ref = ref.node()
410 ref = ref.node()
410 else:
411 else:
411 ref = ctx.node()
412 ref = ctx.node()
412 chunks = patch.diff(self.repo, prev, ref,
413 chunks = patch.diff(self.repo, prev, ref,
413 opts=patch.diffallopts(self.ui))
414 opts=patch.diffallopts(self.ui))
414 difflines = ''.join(chunks).splitlines()
415 difflines = ''.join(chunks).splitlines()
415
416
416 if self.ui.configbool('notify', 'diffstat'):
417 if self.ui.configbool('notify', 'diffstat'):
417 s = patch.diffstat(difflines)
418 s = patch.diffstat(difflines)
418 # s may be nil, don't include the header if it is
419 # s may be nil, don't include the header if it is
419 if s:
420 if s:
420 self.ui.write(_('\ndiffstat:\n\n%s') % s)
421 self.ui.write(_('\ndiffstat:\n\n%s') % s)
421
422
422 if maxdiff == 0:
423 if maxdiff == 0:
423 return
424 return
424 elif maxdiff > 0 and len(difflines) > maxdiff:
425 elif maxdiff > 0 and len(difflines) > maxdiff:
425 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
426 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
426 self.ui.write(msg % (len(difflines), maxdiff))
427 self.ui.write(msg % (len(difflines), maxdiff))
427 difflines = difflines[:maxdiff]
428 difflines = difflines[:maxdiff]
428 elif difflines:
429 elif difflines:
429 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
430 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
430
431
431 self.ui.write("\n".join(difflines))
432 self.ui.write("\n".join(difflines))
432
433
433 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
434 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
434 '''send email notifications to interested subscribers.
435 '''send email notifications to interested subscribers.
435
436
436 if used as changegroup hook, send one email for all changesets in
437 if used as changegroup hook, send one email for all changesets in
437 changegroup. else send one email per changeset.'''
438 changegroup. else send one email per changeset.'''
438
439
439 n = notifier(ui, repo, hooktype)
440 n = notifier(ui, repo, hooktype)
440 ctx = repo[node]
441 ctx = repo[node]
441
442
442 if not n.subs:
443 if not n.subs:
443 ui.debug('notify: no subscribers to repository %s\n' % n.root)
444 ui.debug('notify: no subscribers to repository %s\n' % n.root)
444 return
445 return
445 if n.skipsource(source):
446 if n.skipsource(source):
446 ui.debug('notify: changes have source "%s" - skipping\n' % source)
447 ui.debug('notify: changes have source "%s" - skipping\n' % source)
447 return
448 return
448
449
449 ui.pushbuffer()
450 ui.pushbuffer()
450 data = ''
451 data = ''
451 count = 0
452 count = 0
452 author = ''
453 author = ''
453 if hooktype == 'changegroup' or hooktype == 'outgoing':
454 if hooktype == 'changegroup' or hooktype == 'outgoing':
454 start, end = ctx.rev(), len(repo)
455 start, end = ctx.rev(), len(repo)
455 for rev in xrange(start, end):
456 for rev in xrange(start, end):
456 if n.node(repo[rev]):
457 if n.node(repo[rev]):
457 count += 1
458 count += 1
458 if not author:
459 if not author:
459 author = repo[rev].user()
460 author = repo[rev].user()
460 else:
461 else:
461 data += ui.popbuffer()
462 data += ui.popbuffer()
462 ui.note(_('notify: suppressing notification for merge %d:%s\n')
463 ui.note(_('notify: suppressing notification for merge %d:%s\n')
463 % (rev, repo[rev].hex()[:12]))
464 % (rev, repo[rev].hex()[:12]))
464 ui.pushbuffer()
465 ui.pushbuffer()
465 if count:
466 if count:
466 n.diff(ctx, repo['tip'])
467 n.diff(ctx, repo['tip'])
467 else:
468 else:
468 if not n.node(ctx):
469 if not n.node(ctx):
469 ui.popbuffer()
470 ui.popbuffer()
470 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
471 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
471 (ctx.rev(), ctx.hex()[:12]))
472 (ctx.rev(), ctx.hex()[:12]))
472 return
473 return
473 count += 1
474 count += 1
474 n.diff(ctx)
475 n.diff(ctx)
475 if not author:
476 if not author:
476 author = ctx.user()
477 author = ctx.user()
477
478
478 data += ui.popbuffer()
479 data += ui.popbuffer()
479 fromauthor = ui.config('notify', 'fromauthor')
480 fromauthor = ui.config('notify', 'fromauthor')
480 if author and fromauthor:
481 if author and fromauthor:
481 data = '\n'.join(['From: %s' % author, data])
482 data = '\n'.join(['From: %s' % author, data])
482
483
483 if count:
484 if count:
484 n.send(ctx, count, data)
485 n.send(ctx, count, data)
@@ -1,806 +1,807 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 you do not supply one via configuration or the command line. You can
48 you do not supply one via configuration or the command line. You can
49 override this to never prompt by configuring an empty value::
49 override this to never prompt by configuring an empty value::
50
50
51 [email]
51 [email]
52 cc =
52 cc =
53
53
54 You can control the default inclusion of an introduction message with the
54 You can control the default inclusion of an introduction message with the
55 ``patchbomb.intro`` configuration option. The configuration is always
55 ``patchbomb.intro`` configuration option. The configuration is always
56 overwritten by command line flags like --intro and --desc::
56 overwritten by command line flags like --intro and --desc::
57
57
58 [patchbomb]
58 [patchbomb]
59 intro=auto # include introduction message if more than 1 patch (default)
59 intro=auto # include introduction message if more than 1 patch (default)
60 intro=never # never include an introduction message
60 intro=never # never include an introduction message
61 intro=always # always include an introduction message
61 intro=always # always include an introduction message
62
62
63 You can specify a template for flags to be added in subject prefixes. Flags
63 You can specify a template for flags to be added in subject prefixes. Flags
64 specified by --flag option are exported as ``{flags}`` keyword::
64 specified by --flag option are exported as ``{flags}`` keyword::
65
65
66 [patchbomb]
66 [patchbomb]
67 flagtemplate = "{separate(' ',
67 flagtemplate = "{separate(' ',
68 ifeq(branch, 'default', '', branch|upper),
68 ifeq(branch, 'default', '', branch|upper),
69 flags)}"
69 flags)}"
70
70
71 You can set patchbomb to always ask for confirmation by setting
71 You can set patchbomb to always ask for confirmation by setting
72 ``patchbomb.confirm`` to true.
72 ``patchbomb.confirm`` to true.
73 '''
73 '''
74 from __future__ import absolute_import
74 from __future__ import absolute_import
75
75
76 import email as emailmod
76 import email as emailmod
77 import email.generator as emailgen
77 import email.generator as emailgen
78 import email.utils as eutil
78 import email.utils as eutil
79 import errno
79 import errno
80 import os
80 import os
81 import socket
81 import socket
82 import tempfile
82 import tempfile
83
83
84 from mercurial.i18n import _
84 from mercurial.i18n import _
85 from mercurial import (
85 from mercurial import (
86 cmdutil,
86 cmdutil,
87 commands,
87 commands,
88 encoding,
88 encoding,
89 error,
89 error,
90 formatter,
90 formatter,
91 hg,
91 hg,
92 mail,
92 mail,
93 node as nodemod,
93 node as nodemod,
94 patch,
94 patch,
95 pycompat,
95 pycompat,
96 registrar,
96 registrar,
97 repair,
97 repair,
98 scmutil,
98 scmutil,
99 templater,
99 templater,
100 util,
100 util,
101 )
101 )
102 from mercurial.utils import dateutil
102 stringio = util.stringio
103 stringio = util.stringio
103
104
104 cmdtable = {}
105 cmdtable = {}
105 command = registrar.command(cmdtable)
106 command = registrar.command(cmdtable)
106
107
107 configtable = {}
108 configtable = {}
108 configitem = registrar.configitem(configtable)
109 configitem = registrar.configitem(configtable)
109
110
110 configitem('patchbomb', 'bundletype',
111 configitem('patchbomb', 'bundletype',
111 default=None,
112 default=None,
112 )
113 )
113 configitem('patchbomb', 'bcc',
114 configitem('patchbomb', 'bcc',
114 default=None,
115 default=None,
115 )
116 )
116 configitem('patchbomb', 'cc',
117 configitem('patchbomb', 'cc',
117 default=None,
118 default=None,
118 )
119 )
119 configitem('patchbomb', 'confirm',
120 configitem('patchbomb', 'confirm',
120 default=False,
121 default=False,
121 )
122 )
122 configitem('patchbomb', 'flagtemplate',
123 configitem('patchbomb', 'flagtemplate',
123 default=None,
124 default=None,
124 )
125 )
125 configitem('patchbomb', 'from',
126 configitem('patchbomb', 'from',
126 default=None,
127 default=None,
127 )
128 )
128 configitem('patchbomb', 'intro',
129 configitem('patchbomb', 'intro',
129 default='auto',
130 default='auto',
130 )
131 )
131 configitem('patchbomb', 'publicurl',
132 configitem('patchbomb', 'publicurl',
132 default=None,
133 default=None,
133 )
134 )
134 configitem('patchbomb', 'reply-to',
135 configitem('patchbomb', 'reply-to',
135 default=None,
136 default=None,
136 )
137 )
137 configitem('patchbomb', 'to',
138 configitem('patchbomb', 'to',
138 default=None,
139 default=None,
139 )
140 )
140
141
141 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
142 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
142 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
143 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
143 # be specifying the version(s) of Mercurial they are tested with, or
144 # be specifying the version(s) of Mercurial they are tested with, or
144 # leave the attribute unspecified.
145 # leave the attribute unspecified.
145 testedwith = 'ships-with-hg-core'
146 testedwith = 'ships-with-hg-core'
146
147
147 def _addpullheader(seq, ctx):
148 def _addpullheader(seq, ctx):
148 """Add a header pointing to a public URL where the changeset is available
149 """Add a header pointing to a public URL where the changeset is available
149 """
150 """
150 repo = ctx.repo()
151 repo = ctx.repo()
151 # experimental config: patchbomb.publicurl
152 # experimental config: patchbomb.publicurl
152 # waiting for some logic that check that the changeset are available on the
153 # waiting for some logic that check that the changeset are available on the
153 # destination before patchbombing anything.
154 # destination before patchbombing anything.
154 publicurl = repo.ui.config('patchbomb', 'publicurl')
155 publicurl = repo.ui.config('patchbomb', 'publicurl')
155 if publicurl:
156 if publicurl:
156 return ('Available At %s\n'
157 return ('Available At %s\n'
157 '# hg pull %s -r %s' % (publicurl, publicurl, ctx))
158 '# hg pull %s -r %s' % (publicurl, publicurl, ctx))
158 return None
159 return None
159
160
160 def uisetup(ui):
161 def uisetup(ui):
161 cmdutil.extraexport.append('pullurl')
162 cmdutil.extraexport.append('pullurl')
162 cmdutil.extraexportmap['pullurl'] = _addpullheader
163 cmdutil.extraexportmap['pullurl'] = _addpullheader
163
164
164 def reposetup(ui, repo):
165 def reposetup(ui, repo):
165 if not repo.local():
166 if not repo.local():
166 return
167 return
167 repo._wlockfreeprefix.add('last-email.txt')
168 repo._wlockfreeprefix.add('last-email.txt')
168
169
169 def prompt(ui, prompt, default=None, rest=':'):
170 def prompt(ui, prompt, default=None, rest=':'):
170 if default:
171 if default:
171 prompt += ' [%s]' % default
172 prompt += ' [%s]' % default
172 return ui.prompt(prompt + rest, default)
173 return ui.prompt(prompt + rest, default)
173
174
174 def introwanted(ui, opts, number):
175 def introwanted(ui, opts, number):
175 '''is an introductory message apparently wanted?'''
176 '''is an introductory message apparently wanted?'''
176 introconfig = ui.config('patchbomb', 'intro')
177 introconfig = ui.config('patchbomb', 'intro')
177 if opts.get('intro') or opts.get('desc'):
178 if opts.get('intro') or opts.get('desc'):
178 intro = True
179 intro = True
179 elif introconfig == 'always':
180 elif introconfig == 'always':
180 intro = True
181 intro = True
181 elif introconfig == 'never':
182 elif introconfig == 'never':
182 intro = False
183 intro = False
183 elif introconfig == 'auto':
184 elif introconfig == 'auto':
184 intro = 1 < number
185 intro = 1 < number
185 else:
186 else:
186 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
187 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
187 % introconfig)
188 % introconfig)
188 ui.write_err(_('(should be one of always, never, auto)\n'))
189 ui.write_err(_('(should be one of always, never, auto)\n'))
189 intro = 1 < number
190 intro = 1 < number
190 return intro
191 return intro
191
192
192 def _formatflags(ui, repo, rev, flags):
193 def _formatflags(ui, repo, rev, flags):
193 """build flag string optionally by template"""
194 """build flag string optionally by template"""
194 tmpl = ui.config('patchbomb', 'flagtemplate')
195 tmpl = ui.config('patchbomb', 'flagtemplate')
195 if not tmpl:
196 if not tmpl:
196 return ' '.join(flags)
197 return ' '.join(flags)
197 out = util.stringio()
198 out = util.stringio()
198 opts = {'template': templater.unquotestring(tmpl)}
199 opts = {'template': templater.unquotestring(tmpl)}
199 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
200 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
200 fm.startitem()
201 fm.startitem()
201 fm.context(ctx=repo[rev])
202 fm.context(ctx=repo[rev])
202 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
203 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
203 return out.getvalue()
204 return out.getvalue()
204
205
205 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
206 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
206 """build prefix to patch subject"""
207 """build prefix to patch subject"""
207 flag = _formatflags(ui, repo, rev, flags)
208 flag = _formatflags(ui, repo, rev, flags)
208 if flag:
209 if flag:
209 flag = ' ' + flag
210 flag = ' ' + flag
210
211
211 if not numbered:
212 if not numbered:
212 return '[PATCH%s]' % flag
213 return '[PATCH%s]' % flag
213 else:
214 else:
214 tlen = len(str(total))
215 tlen = len(str(total))
215 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
216 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
216
217
217 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
218 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
218 patchname=None):
219 patchname=None):
219
220
220 desc = []
221 desc = []
221 node = None
222 node = None
222 body = ''
223 body = ''
223
224
224 for line in patchlines:
225 for line in patchlines:
225 if line.startswith('#'):
226 if line.startswith('#'):
226 if line.startswith('# Node ID'):
227 if line.startswith('# Node ID'):
227 node = line.split()[-1]
228 node = line.split()[-1]
228 continue
229 continue
229 if line.startswith('diff -r') or line.startswith('diff --git'):
230 if line.startswith('diff -r') or line.startswith('diff --git'):
230 break
231 break
231 desc.append(line)
232 desc.append(line)
232
233
233 if not patchname and not node:
234 if not patchname and not node:
234 raise ValueError
235 raise ValueError
235
236
236 if opts.get('attach') and not opts.get('body'):
237 if opts.get('attach') and not opts.get('body'):
237 body = ('\n'.join(desc[1:]).strip() or
238 body = ('\n'.join(desc[1:]).strip() or
238 'Patch subject is complete summary.')
239 'Patch subject is complete summary.')
239 body += '\n\n\n'
240 body += '\n\n\n'
240
241
241 if opts.get('plain'):
242 if opts.get('plain'):
242 while patchlines and patchlines[0].startswith('# '):
243 while patchlines and patchlines[0].startswith('# '):
243 patchlines.pop(0)
244 patchlines.pop(0)
244 if patchlines:
245 if patchlines:
245 patchlines.pop(0)
246 patchlines.pop(0)
246 while patchlines and not patchlines[0].strip():
247 while patchlines and not patchlines[0].strip():
247 patchlines.pop(0)
248 patchlines.pop(0)
248
249
249 ds = patch.diffstat(patchlines)
250 ds = patch.diffstat(patchlines)
250 if opts.get('diffstat'):
251 if opts.get('diffstat'):
251 body += ds + '\n\n'
252 body += ds + '\n\n'
252
253
253 addattachment = opts.get('attach') or opts.get('inline')
254 addattachment = opts.get('attach') or opts.get('inline')
254 if not addattachment or opts.get('body'):
255 if not addattachment or opts.get('body'):
255 body += '\n'.join(patchlines)
256 body += '\n'.join(patchlines)
256
257
257 if addattachment:
258 if addattachment:
258 msg = emailmod.MIMEMultipart.MIMEMultipart()
259 msg = emailmod.MIMEMultipart.MIMEMultipart()
259 if body:
260 if body:
260 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
261 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
261 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
262 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
262 opts.get('test'))
263 opts.get('test'))
263 binnode = nodemod.bin(node)
264 binnode = nodemod.bin(node)
264 # if node is mq patch, it will have the patch file's name as a tag
265 # if node is mq patch, it will have the patch file's name as a tag
265 if not patchname:
266 if not patchname:
266 patchtags = [t for t in repo.nodetags(binnode)
267 patchtags = [t for t in repo.nodetags(binnode)
267 if t.endswith('.patch') or t.endswith('.diff')]
268 if t.endswith('.patch') or t.endswith('.diff')]
268 if patchtags:
269 if patchtags:
269 patchname = patchtags[0]
270 patchname = patchtags[0]
270 elif total > 1:
271 elif total > 1:
271 patchname = cmdutil.makefilename(repo[node], '%b-%n.patch',
272 patchname = cmdutil.makefilename(repo[node], '%b-%n.patch',
272 seqno=idx, total=total)
273 seqno=idx, total=total)
273 else:
274 else:
274 patchname = cmdutil.makefilename(repo[node], '%b.patch')
275 patchname = cmdutil.makefilename(repo[node], '%b.patch')
275 disposition = 'inline'
276 disposition = 'inline'
276 if opts.get('attach'):
277 if opts.get('attach'):
277 disposition = 'attachment'
278 disposition = 'attachment'
278 p['Content-Disposition'] = disposition + '; filename=' + patchname
279 p['Content-Disposition'] = disposition + '; filename=' + patchname
279 msg.attach(p)
280 msg.attach(p)
280 else:
281 else:
281 msg = mail.mimetextpatch(body, display=opts.get('test'))
282 msg = mail.mimetextpatch(body, display=opts.get('test'))
282
283
283 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
284 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
284 numbered)
285 numbered)
285 subj = desc[0].strip().rstrip('. ')
286 subj = desc[0].strip().rstrip('. ')
286 if not numbered:
287 if not numbered:
287 subj = ' '.join([prefix, opts.get('subject') or subj])
288 subj = ' '.join([prefix, opts.get('subject') or subj])
288 else:
289 else:
289 subj = ' '.join([prefix, subj])
290 subj = ' '.join([prefix, subj])
290 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
291 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
291 msg['X-Mercurial-Node'] = node
292 msg['X-Mercurial-Node'] = node
292 msg['X-Mercurial-Series-Index'] = '%i' % idx
293 msg['X-Mercurial-Series-Index'] = '%i' % idx
293 msg['X-Mercurial-Series-Total'] = '%i' % total
294 msg['X-Mercurial-Series-Total'] = '%i' % total
294 return msg, subj, ds
295 return msg, subj, ds
295
296
296 def _getpatches(repo, revs, **opts):
297 def _getpatches(repo, revs, **opts):
297 """return a list of patches for a list of revisions
298 """return a list of patches for a list of revisions
298
299
299 Each patch in the list is itself a list of lines.
300 Each patch in the list is itself a list of lines.
300 """
301 """
301 ui = repo.ui
302 ui = repo.ui
302 prev = repo['.'].rev()
303 prev = repo['.'].rev()
303 for r in revs:
304 for r in revs:
304 if r == prev and (repo[None].files() or repo[None].deleted()):
305 if r == prev and (repo[None].files() or repo[None].deleted()):
305 ui.warn(_('warning: working directory has '
306 ui.warn(_('warning: working directory has '
306 'uncommitted changes\n'))
307 'uncommitted changes\n'))
307 output = stringio()
308 output = stringio()
308 cmdutil.export(repo, [r], fp=output,
309 cmdutil.export(repo, [r], fp=output,
309 opts=patch.difffeatureopts(ui, opts, git=True))
310 opts=patch.difffeatureopts(ui, opts, git=True))
310 yield output.getvalue().split('\n')
311 yield output.getvalue().split('\n')
311 def _getbundle(repo, dest, **opts):
312 def _getbundle(repo, dest, **opts):
312 """return a bundle containing changesets missing in "dest"
313 """return a bundle containing changesets missing in "dest"
313
314
314 The `opts` keyword-arguments are the same as the one accepted by the
315 The `opts` keyword-arguments are the same as the one accepted by the
315 `bundle` command.
316 `bundle` command.
316
317
317 The bundle is a returned as a single in-memory binary blob.
318 The bundle is a returned as a single in-memory binary blob.
318 """
319 """
319 ui = repo.ui
320 ui = repo.ui
320 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
321 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
321 tmpfn = os.path.join(tmpdir, 'bundle')
322 tmpfn = os.path.join(tmpdir, 'bundle')
322 btype = ui.config('patchbomb', 'bundletype')
323 btype = ui.config('patchbomb', 'bundletype')
323 if btype:
324 if btype:
324 opts[r'type'] = btype
325 opts[r'type'] = btype
325 try:
326 try:
326 commands.bundle(ui, repo, tmpfn, dest, **opts)
327 commands.bundle(ui, repo, tmpfn, dest, **opts)
327 return util.readfile(tmpfn)
328 return util.readfile(tmpfn)
328 finally:
329 finally:
329 try:
330 try:
330 os.unlink(tmpfn)
331 os.unlink(tmpfn)
331 except OSError:
332 except OSError:
332 pass
333 pass
333 os.rmdir(tmpdir)
334 os.rmdir(tmpdir)
334
335
335 def _getdescription(repo, defaultbody, sender, **opts):
336 def _getdescription(repo, defaultbody, sender, **opts):
336 """obtain the body of the introduction message and return it
337 """obtain the body of the introduction message and return it
337
338
338 This is also used for the body of email with an attached bundle.
339 This is also used for the body of email with an attached bundle.
339
340
340 The body can be obtained either from the command line option or entered by
341 The body can be obtained either from the command line option or entered by
341 the user through the editor.
342 the user through the editor.
342 """
343 """
343 ui = repo.ui
344 ui = repo.ui
344 if opts.get(r'desc'):
345 if opts.get(r'desc'):
345 body = open(opts.get(r'desc')).read()
346 body = open(opts.get(r'desc')).read()
346 else:
347 else:
347 ui.write(_('\nWrite the introductory message for the '
348 ui.write(_('\nWrite the introductory message for the '
348 'patch series.\n\n'))
349 'patch series.\n\n'))
349 body = ui.edit(defaultbody, sender, repopath=repo.path,
350 body = ui.edit(defaultbody, sender, repopath=repo.path,
350 action='patchbombbody')
351 action='patchbombbody')
351 # Save series description in case sendmail fails
352 # Save series description in case sendmail fails
352 msgfile = repo.vfs('last-email.txt', 'wb')
353 msgfile = repo.vfs('last-email.txt', 'wb')
353 msgfile.write(body)
354 msgfile.write(body)
354 msgfile.close()
355 msgfile.close()
355 return body
356 return body
356
357
357 def _getbundlemsgs(repo, sender, bundle, **opts):
358 def _getbundlemsgs(repo, sender, bundle, **opts):
358 """Get the full email for sending a given bundle
359 """Get the full email for sending a given bundle
359
360
360 This function returns a list of "email" tuples (subject, content, None).
361 This function returns a list of "email" tuples (subject, content, None).
361 The list is always one message long in that case.
362 The list is always one message long in that case.
362 """
363 """
363 ui = repo.ui
364 ui = repo.ui
364 _charsets = mail._charsets(ui)
365 _charsets = mail._charsets(ui)
365 subj = (opts.get(r'subject')
366 subj = (opts.get(r'subject')
366 or prompt(ui, 'Subject:', 'A bundle for your repository'))
367 or prompt(ui, 'Subject:', 'A bundle for your repository'))
367
368
368 body = _getdescription(repo, '', sender, **opts)
369 body = _getdescription(repo, '', sender, **opts)
369 msg = emailmod.MIMEMultipart.MIMEMultipart()
370 msg = emailmod.MIMEMultipart.MIMEMultipart()
370 if body:
371 if body:
371 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
372 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
372 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
373 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
373 datapart.set_payload(bundle)
374 datapart.set_payload(bundle)
374 bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
375 bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
375 datapart.add_header('Content-Disposition', 'attachment',
376 datapart.add_header('Content-Disposition', 'attachment',
376 filename=bundlename)
377 filename=bundlename)
377 emailmod.Encoders.encode_base64(datapart)
378 emailmod.Encoders.encode_base64(datapart)
378 msg.attach(datapart)
379 msg.attach(datapart)
379 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
380 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
380 return [(msg, subj, None)]
381 return [(msg, subj, None)]
381
382
382 def _makeintro(repo, sender, revs, patches, **opts):
383 def _makeintro(repo, sender, revs, patches, **opts):
383 """make an introduction email, asking the user for content if needed
384 """make an introduction email, asking the user for content if needed
384
385
385 email is returned as (subject, body, cumulative-diffstat)"""
386 email is returned as (subject, body, cumulative-diffstat)"""
386 ui = repo.ui
387 ui = repo.ui
387 _charsets = mail._charsets(ui)
388 _charsets = mail._charsets(ui)
388
389
389 # use the last revision which is likely to be a bookmarked head
390 # use the last revision which is likely to be a bookmarked head
390 prefix = _formatprefix(ui, repo, revs.last(), opts.get(r'flag'),
391 prefix = _formatprefix(ui, repo, revs.last(), opts.get(r'flag'),
391 0, len(patches), numbered=True)
392 0, len(patches), numbered=True)
392 subj = (opts.get(r'subject') or
393 subj = (opts.get(r'subject') or
393 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
394 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
394 if not subj:
395 if not subj:
395 return None # skip intro if the user doesn't bother
396 return None # skip intro if the user doesn't bother
396
397
397 subj = prefix + ' ' + subj
398 subj = prefix + ' ' + subj
398
399
399 body = ''
400 body = ''
400 if opts.get(r'diffstat'):
401 if opts.get(r'diffstat'):
401 # generate a cumulative diffstat of the whole patch series
402 # generate a cumulative diffstat of the whole patch series
402 diffstat = patch.diffstat(sum(patches, []))
403 diffstat = patch.diffstat(sum(patches, []))
403 body = '\n' + diffstat
404 body = '\n' + diffstat
404 else:
405 else:
405 diffstat = None
406 diffstat = None
406
407
407 body = _getdescription(repo, body, sender, **opts)
408 body = _getdescription(repo, body, sender, **opts)
408 msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
409 msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
409 msg['Subject'] = mail.headencode(ui, subj, _charsets,
410 msg['Subject'] = mail.headencode(ui, subj, _charsets,
410 opts.get(r'test'))
411 opts.get(r'test'))
411 return (msg, subj, diffstat)
412 return (msg, subj, diffstat)
412
413
413 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
414 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
414 """return a list of emails from a list of patches
415 """return a list of emails from a list of patches
415
416
416 This involves introduction message creation if necessary.
417 This involves introduction message creation if necessary.
417
418
418 This function returns a list of "email" tuples (subject, content, None).
419 This function returns a list of "email" tuples (subject, content, None).
419 """
420 """
420 bytesopts = pycompat.byteskwargs(opts)
421 bytesopts = pycompat.byteskwargs(opts)
421 ui = repo.ui
422 ui = repo.ui
422 _charsets = mail._charsets(ui)
423 _charsets = mail._charsets(ui)
423 patches = list(_getpatches(repo, revs, **opts))
424 patches = list(_getpatches(repo, revs, **opts))
424 msgs = []
425 msgs = []
425
426
426 ui.write(_('this patch series consists of %d patches.\n\n')
427 ui.write(_('this patch series consists of %d patches.\n\n')
427 % len(patches))
428 % len(patches))
428
429
429 # build the intro message, or skip it if the user declines
430 # build the intro message, or skip it if the user declines
430 if introwanted(ui, bytesopts, len(patches)):
431 if introwanted(ui, bytesopts, len(patches)):
431 msg = _makeintro(repo, sender, revs, patches, **opts)
432 msg = _makeintro(repo, sender, revs, patches, **opts)
432 if msg:
433 if msg:
433 msgs.append(msg)
434 msgs.append(msg)
434
435
435 # are we going to send more than one message?
436 # are we going to send more than one message?
436 numbered = len(msgs) + len(patches) > 1
437 numbered = len(msgs) + len(patches) > 1
437
438
438 # now generate the actual patch messages
439 # now generate the actual patch messages
439 name = None
440 name = None
440 assert len(revs) == len(patches)
441 assert len(revs) == len(patches)
441 for i, (r, p) in enumerate(zip(revs, patches)):
442 for i, (r, p) in enumerate(zip(revs, patches)):
442 if patchnames:
443 if patchnames:
443 name = patchnames[i]
444 name = patchnames[i]
444 msg = makepatch(ui, repo, r, p, bytesopts, _charsets,
445 msg = makepatch(ui, repo, r, p, bytesopts, _charsets,
445 i + 1, len(patches), numbered, name)
446 i + 1, len(patches), numbered, name)
446 msgs.append(msg)
447 msgs.append(msg)
447
448
448 return msgs
449 return msgs
449
450
450 def _getoutgoing(repo, dest, revs):
451 def _getoutgoing(repo, dest, revs):
451 '''Return the revisions present locally but not in dest'''
452 '''Return the revisions present locally but not in dest'''
452 ui = repo.ui
453 ui = repo.ui
453 url = ui.expandpath(dest or 'default-push', dest or 'default')
454 url = ui.expandpath(dest or 'default-push', dest or 'default')
454 url = hg.parseurl(url)[0]
455 url = hg.parseurl(url)[0]
455 ui.status(_('comparing with %s\n') % util.hidepassword(url))
456 ui.status(_('comparing with %s\n') % util.hidepassword(url))
456
457
457 revs = [r for r in revs if r >= 0]
458 revs = [r for r in revs if r >= 0]
458 if not revs:
459 if not revs:
459 revs = [repo.changelog.tiprev()]
460 revs = [repo.changelog.tiprev()]
460 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
461 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
461 if not revs:
462 if not revs:
462 ui.status(_("no changes found\n"))
463 ui.status(_("no changes found\n"))
463 return revs
464 return revs
464
465
465 emailopts = [
466 emailopts = [
466 ('', 'body', None, _('send patches as inline message text (default)')),
467 ('', 'body', None, _('send patches as inline message text (default)')),
467 ('a', 'attach', None, _('send patches as attachments')),
468 ('a', 'attach', None, _('send patches as attachments')),
468 ('i', 'inline', None, _('send patches as inline attachments')),
469 ('i', 'inline', None, _('send patches as inline attachments')),
469 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
470 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
470 ('c', 'cc', [], _('email addresses of copy recipients')),
471 ('c', 'cc', [], _('email addresses of copy recipients')),
471 ('', 'confirm', None, _('ask for confirmation before sending')),
472 ('', 'confirm', None, _('ask for confirmation before sending')),
472 ('d', 'diffstat', None, _('add diffstat output to messages')),
473 ('d', 'diffstat', None, _('add diffstat output to messages')),
473 ('', 'date', '', _('use the given date as the sending date')),
474 ('', 'date', '', _('use the given date as the sending date')),
474 ('', 'desc', '', _('use the given file as the series description')),
475 ('', 'desc', '', _('use the given file as the series description')),
475 ('f', 'from', '', _('email address of sender')),
476 ('f', 'from', '', _('email address of sender')),
476 ('n', 'test', None, _('print messages that would be sent')),
477 ('n', 'test', None, _('print messages that would be sent')),
477 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
478 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
478 ('', 'reply-to', [], _('email addresses replies should be sent to')),
479 ('', 'reply-to', [], _('email addresses replies should be sent to')),
479 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
480 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
480 ('', 'in-reply-to', '', _('message identifier to reply to')),
481 ('', 'in-reply-to', '', _('message identifier to reply to')),
481 ('', 'flag', [], _('flags to add in subject prefixes')),
482 ('', 'flag', [], _('flags to add in subject prefixes')),
482 ('t', 'to', [], _('email addresses of recipients'))]
483 ('t', 'to', [], _('email addresses of recipients'))]
483
484
484 @command('email',
485 @command('email',
485 [('g', 'git', None, _('use git extended diff format')),
486 [('g', 'git', None, _('use git extended diff format')),
486 ('', 'plain', None, _('omit hg patch header')),
487 ('', 'plain', None, _('omit hg patch header')),
487 ('o', 'outgoing', None,
488 ('o', 'outgoing', None,
488 _('send changes not found in the target repository')),
489 _('send changes not found in the target repository')),
489 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
490 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
490 ('B', 'bookmark', '', _('send changes only reachable by given bookmark')),
491 ('B', 'bookmark', '', _('send changes only reachable by given bookmark')),
491 ('', 'bundlename', 'bundle',
492 ('', 'bundlename', 'bundle',
492 _('name of the bundle attachment file'), _('NAME')),
493 _('name of the bundle attachment file'), _('NAME')),
493 ('r', 'rev', [], _('a revision to send'), _('REV')),
494 ('r', 'rev', [], _('a revision to send'), _('REV')),
494 ('', 'force', None, _('run even when remote repository is unrelated '
495 ('', 'force', None, _('run even when remote repository is unrelated '
495 '(with -b/--bundle)')),
496 '(with -b/--bundle)')),
496 ('', 'base', [], _('a base changeset to specify instead of a destination '
497 ('', 'base', [], _('a base changeset to specify instead of a destination '
497 '(with -b/--bundle)'), _('REV')),
498 '(with -b/--bundle)'), _('REV')),
498 ('', 'intro', None, _('send an introduction email for a single patch')),
499 ('', 'intro', None, _('send an introduction email for a single patch')),
499 ] + emailopts + cmdutil.remoteopts,
500 ] + emailopts + cmdutil.remoteopts,
500 _('hg email [OPTION]... [DEST]...'))
501 _('hg email [OPTION]... [DEST]...'))
501 def email(ui, repo, *revs, **opts):
502 def email(ui, repo, *revs, **opts):
502 '''send changesets by email
503 '''send changesets by email
503
504
504 By default, diffs are sent in the format generated by
505 By default, diffs are sent in the format generated by
505 :hg:`export`, one per message. The series starts with a "[PATCH 0
506 :hg:`export`, one per message. The series starts with a "[PATCH 0
506 of N]" introduction, which describes the series as a whole.
507 of N]" introduction, which describes the series as a whole.
507
508
508 Each patch email has a Subject line of "[PATCH M of N] ...", using
509 Each patch email has a Subject line of "[PATCH M of N] ...", using
509 the first line of the changeset description as the subject text.
510 the first line of the changeset description as the subject text.
510 The message contains two or three parts. First, the changeset
511 The message contains two or three parts. First, the changeset
511 description.
512 description.
512
513
513 With the -d/--diffstat option, if the diffstat program is
514 With the -d/--diffstat option, if the diffstat program is
514 installed, the result of running diffstat on the patch is inserted.
515 installed, the result of running diffstat on the patch is inserted.
515
516
516 Finally, the patch itself, as generated by :hg:`export`.
517 Finally, the patch itself, as generated by :hg:`export`.
517
518
518 With the -d/--diffstat or --confirm options, you will be presented
519 With the -d/--diffstat or --confirm options, you will be presented
519 with a final summary of all messages and asked for confirmation before
520 with a final summary of all messages and asked for confirmation before
520 the messages are sent.
521 the messages are sent.
521
522
522 By default the patch is included as text in the email body for
523 By default the patch is included as text in the email body for
523 easy reviewing. Using the -a/--attach option will instead create
524 easy reviewing. Using the -a/--attach option will instead create
524 an attachment for the patch. With -i/--inline an inline attachment
525 an attachment for the patch. With -i/--inline an inline attachment
525 will be created. You can include a patch both as text in the email
526 will be created. You can include a patch both as text in the email
526 body and as a regular or an inline attachment by combining the
527 body and as a regular or an inline attachment by combining the
527 -a/--attach or -i/--inline with the --body option.
528 -a/--attach or -i/--inline with the --body option.
528
529
529 With -B/--bookmark changesets reachable by the given bookmark are
530 With -B/--bookmark changesets reachable by the given bookmark are
530 selected.
531 selected.
531
532
532 With -o/--outgoing, emails will be generated for patches not found
533 With -o/--outgoing, emails will be generated for patches not found
533 in the destination repository (or only those which are ancestors
534 in the destination repository (or only those which are ancestors
534 of the specified revisions if any are provided)
535 of the specified revisions if any are provided)
535
536
536 With -b/--bundle, changesets are selected as for --outgoing, but a
537 With -b/--bundle, changesets are selected as for --outgoing, but a
537 single email containing a binary Mercurial bundle as an attachment
538 single email containing a binary Mercurial bundle as an attachment
538 will be sent. Use the ``patchbomb.bundletype`` config option to
539 will be sent. Use the ``patchbomb.bundletype`` config option to
539 control the bundle type as with :hg:`bundle --type`.
540 control the bundle type as with :hg:`bundle --type`.
540
541
541 With -m/--mbox, instead of previewing each patchbomb message in a
542 With -m/--mbox, instead of previewing each patchbomb message in a
542 pager or sending the messages directly, it will create a UNIX
543 pager or sending the messages directly, it will create a UNIX
543 mailbox file with the patch emails. This mailbox file can be
544 mailbox file with the patch emails. This mailbox file can be
544 previewed with any mail user agent which supports UNIX mbox
545 previewed with any mail user agent which supports UNIX mbox
545 files.
546 files.
546
547
547 With -n/--test, all steps will run, but mail will not be sent.
548 With -n/--test, all steps will run, but mail will not be sent.
548 You will be prompted for an email recipient address, a subject and
549 You will be prompted for an email recipient address, a subject and
549 an introductory message describing the patches of your patchbomb.
550 an introductory message describing the patches of your patchbomb.
550 Then when all is done, patchbomb messages are displayed.
551 Then when all is done, patchbomb messages are displayed.
551
552
552 In case email sending fails, you will find a backup of your series
553 In case email sending fails, you will find a backup of your series
553 introductory message in ``.hg/last-email.txt``.
554 introductory message in ``.hg/last-email.txt``.
554
555
555 The default behavior of this command can be customized through
556 The default behavior of this command can be customized through
556 configuration. (See :hg:`help patchbomb` for details)
557 configuration. (See :hg:`help patchbomb` for details)
557
558
558 Examples::
559 Examples::
559
560
560 hg email -r 3000 # send patch 3000 only
561 hg email -r 3000 # send patch 3000 only
561 hg email -r 3000 -r 3001 # send patches 3000 and 3001
562 hg email -r 3000 -r 3001 # send patches 3000 and 3001
562 hg email -r 3000:3005 # send patches 3000 through 3005
563 hg email -r 3000:3005 # send patches 3000 through 3005
563 hg email 3000 # send patch 3000 (deprecated)
564 hg email 3000 # send patch 3000 (deprecated)
564
565
565 hg email -o # send all patches not in default
566 hg email -o # send all patches not in default
566 hg email -o DEST # send all patches not in DEST
567 hg email -o DEST # send all patches not in DEST
567 hg email -o -r 3000 # send all ancestors of 3000 not in default
568 hg email -o -r 3000 # send all ancestors of 3000 not in default
568 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
569 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
569
570
570 hg email -B feature # send all ancestors of feature bookmark
571 hg email -B feature # send all ancestors of feature bookmark
571
572
572 hg email -b # send bundle of all patches not in default
573 hg email -b # send bundle of all patches not in default
573 hg email -b DEST # send bundle of all patches not in DEST
574 hg email -b DEST # send bundle of all patches not in DEST
574 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
575 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
575 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
576 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
576
577
577 hg email -o -m mbox && # generate an mbox file...
578 hg email -o -m mbox && # generate an mbox file...
578 mutt -R -f mbox # ... and view it with mutt
579 mutt -R -f mbox # ... and view it with mutt
579 hg email -o -m mbox && # generate an mbox file ...
580 hg email -o -m mbox && # generate an mbox file ...
580 formail -s sendmail \\ # ... and use formail to send from the mbox
581 formail -s sendmail \\ # ... and use formail to send from the mbox
581 -bm -t < mbox # ... using sendmail
582 -bm -t < mbox # ... using sendmail
582
583
583 Before using this command, you will need to enable email in your
584 Before using this command, you will need to enable email in your
584 hgrc. See the [email] section in hgrc(5) for details.
585 hgrc. See the [email] section in hgrc(5) for details.
585 '''
586 '''
586 opts = pycompat.byteskwargs(opts)
587 opts = pycompat.byteskwargs(opts)
587
588
588 _charsets = mail._charsets(ui)
589 _charsets = mail._charsets(ui)
589
590
590 bundle = opts.get('bundle')
591 bundle = opts.get('bundle')
591 date = opts.get('date')
592 date = opts.get('date')
592 mbox = opts.get('mbox')
593 mbox = opts.get('mbox')
593 outgoing = opts.get('outgoing')
594 outgoing = opts.get('outgoing')
594 rev = opts.get('rev')
595 rev = opts.get('rev')
595 bookmark = opts.get('bookmark')
596 bookmark = opts.get('bookmark')
596
597
597 if not (opts.get('test') or mbox):
598 if not (opts.get('test') or mbox):
598 # really sending
599 # really sending
599 mail.validateconfig(ui)
600 mail.validateconfig(ui)
600
601
601 if not (revs or rev or outgoing or bundle or bookmark):
602 if not (revs or rev or outgoing or bundle or bookmark):
602 raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
603 raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
603
604
604 if outgoing and bundle:
605 if outgoing and bundle:
605 raise error.Abort(_("--outgoing mode always on with --bundle;"
606 raise error.Abort(_("--outgoing mode always on with --bundle;"
606 " do not re-specify --outgoing"))
607 " do not re-specify --outgoing"))
607 if rev and bookmark:
608 if rev and bookmark:
608 raise error.Abort(_("-r and -B are mutually exclusive"))
609 raise error.Abort(_("-r and -B are mutually exclusive"))
609
610
610 if outgoing or bundle:
611 if outgoing or bundle:
611 if len(revs) > 1:
612 if len(revs) > 1:
612 raise error.Abort(_("too many destinations"))
613 raise error.Abort(_("too many destinations"))
613 if revs:
614 if revs:
614 dest = revs[0]
615 dest = revs[0]
615 else:
616 else:
616 dest = None
617 dest = None
617 revs = []
618 revs = []
618
619
619 if rev:
620 if rev:
620 if revs:
621 if revs:
621 raise error.Abort(_('use only one form to specify the revision'))
622 raise error.Abort(_('use only one form to specify the revision'))
622 revs = rev
623 revs = rev
623 elif bookmark:
624 elif bookmark:
624 if bookmark not in repo._bookmarks:
625 if bookmark not in repo._bookmarks:
625 raise error.Abort(_("bookmark '%s' not found") % bookmark)
626 raise error.Abort(_("bookmark '%s' not found") % bookmark)
626 revs = repair.stripbmrevset(repo, bookmark)
627 revs = repair.stripbmrevset(repo, bookmark)
627
628
628 revs = scmutil.revrange(repo, revs)
629 revs = scmutil.revrange(repo, revs)
629 if outgoing:
630 if outgoing:
630 revs = _getoutgoing(repo, dest, revs)
631 revs = _getoutgoing(repo, dest, revs)
631 if bundle:
632 if bundle:
632 opts['revs'] = [str(r) for r in revs]
633 opts['revs'] = [str(r) for r in revs]
633
634
634 # check if revision exist on the public destination
635 # check if revision exist on the public destination
635 publicurl = repo.ui.config('patchbomb', 'publicurl')
636 publicurl = repo.ui.config('patchbomb', 'publicurl')
636 if publicurl:
637 if publicurl:
637 repo.ui.debug('checking that revision exist in the public repo\n')
638 repo.ui.debug('checking that revision exist in the public repo\n')
638 try:
639 try:
639 publicpeer = hg.peer(repo, {}, publicurl)
640 publicpeer = hg.peer(repo, {}, publicurl)
640 except error.RepoError:
641 except error.RepoError:
641 repo.ui.write_err(_('unable to access public repo: %s\n')
642 repo.ui.write_err(_('unable to access public repo: %s\n')
642 % publicurl)
643 % publicurl)
643 raise
644 raise
644 if not publicpeer.capable('known'):
645 if not publicpeer.capable('known'):
645 repo.ui.debug('skipping existence checks: public repo too old\n')
646 repo.ui.debug('skipping existence checks: public repo too old\n')
646 else:
647 else:
647 out = [repo[r] for r in revs]
648 out = [repo[r] for r in revs]
648 known = publicpeer.known(h.node() for h in out)
649 known = publicpeer.known(h.node() for h in out)
649 missing = []
650 missing = []
650 for idx, h in enumerate(out):
651 for idx, h in enumerate(out):
651 if not known[idx]:
652 if not known[idx]:
652 missing.append(h)
653 missing.append(h)
653 if missing:
654 if missing:
654 if 1 < len(missing):
655 if 1 < len(missing):
655 msg = _('public "%s" is missing %s and %i others')
656 msg = _('public "%s" is missing %s and %i others')
656 msg %= (publicurl, missing[0], len(missing) - 1)
657 msg %= (publicurl, missing[0], len(missing) - 1)
657 else:
658 else:
658 msg = _('public url %s is missing %s')
659 msg = _('public url %s is missing %s')
659 msg %= (publicurl, missing[0])
660 msg %= (publicurl, missing[0])
660 missingrevs = [ctx.rev() for ctx in missing]
661 missingrevs = [ctx.rev() for ctx in missing]
661 revhint = ' '.join('-r %s' % h
662 revhint = ' '.join('-r %s' % h
662 for h in repo.set('heads(%ld)', missingrevs))
663 for h in repo.set('heads(%ld)', missingrevs))
663 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
664 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
664 raise error.Abort(msg, hint=hint)
665 raise error.Abort(msg, hint=hint)
665
666
666 # start
667 # start
667 if date:
668 if date:
668 start_time = util.parsedate(date)
669 start_time = dateutil.parsedate(date)
669 else:
670 else:
670 start_time = util.makedate()
671 start_time = dateutil.makedate()
671
672
672 def genmsgid(id):
673 def genmsgid(id):
673 return '<%s.%d@%s>' % (id[:20], int(start_time[0]),
674 return '<%s.%d@%s>' % (id[:20], int(start_time[0]),
674 encoding.strtolocal(socket.getfqdn()))
675 encoding.strtolocal(socket.getfqdn()))
675
676
676 # deprecated config: patchbomb.from
677 # deprecated config: patchbomb.from
677 sender = (opts.get('from') or ui.config('email', 'from') or
678 sender = (opts.get('from') or ui.config('email', 'from') or
678 ui.config('patchbomb', 'from') or
679 ui.config('patchbomb', 'from') or
679 prompt(ui, 'From', ui.username()))
680 prompt(ui, 'From', ui.username()))
680
681
681 if bundle:
682 if bundle:
682 stropts = pycompat.strkwargs(opts)
683 stropts = pycompat.strkwargs(opts)
683 bundledata = _getbundle(repo, dest, **stropts)
684 bundledata = _getbundle(repo, dest, **stropts)
684 bundleopts = stropts.copy()
685 bundleopts = stropts.copy()
685 bundleopts.pop(r'bundle', None) # already processed
686 bundleopts.pop(r'bundle', None) # already processed
686 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
687 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
687 else:
688 else:
688 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
689 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
689
690
690 showaddrs = []
691 showaddrs = []
691
692
692 def getaddrs(header, ask=False, default=None):
693 def getaddrs(header, ask=False, default=None):
693 configkey = header.lower()
694 configkey = header.lower()
694 opt = header.replace('-', '_').lower()
695 opt = header.replace('-', '_').lower()
695 addrs = opts.get(opt)
696 addrs = opts.get(opt)
696 if addrs:
697 if addrs:
697 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
698 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
698 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
699 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
699
700
700 # not on the command line: fallback to config and then maybe ask
701 # not on the command line: fallback to config and then maybe ask
701 addr = (ui.config('email', configkey) or
702 addr = (ui.config('email', configkey) or
702 ui.config('patchbomb', configkey))
703 ui.config('patchbomb', configkey))
703 if not addr:
704 if not addr:
704 specified = (ui.hasconfig('email', configkey) or
705 specified = (ui.hasconfig('email', configkey) or
705 ui.hasconfig('patchbomb', configkey))
706 ui.hasconfig('patchbomb', configkey))
706 if not specified and ask:
707 if not specified and ask:
707 addr = prompt(ui, header, default=default)
708 addr = prompt(ui, header, default=default)
708 if addr:
709 if addr:
709 showaddrs.append('%s: %s' % (header, addr))
710 showaddrs.append('%s: %s' % (header, addr))
710 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
711 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
711 elif default:
712 elif default:
712 return mail.addrlistencode(
713 return mail.addrlistencode(
713 ui, [default], _charsets, opts.get('test'))
714 ui, [default], _charsets, opts.get('test'))
714 return []
715 return []
715
716
716 to = getaddrs('To', ask=True)
717 to = getaddrs('To', ask=True)
717 if not to:
718 if not to:
718 # we can get here in non-interactive mode
719 # we can get here in non-interactive mode
719 raise error.Abort(_('no recipient addresses provided'))
720 raise error.Abort(_('no recipient addresses provided'))
720 cc = getaddrs('Cc', ask=True, default='')
721 cc = getaddrs('Cc', ask=True, default='')
721 bcc = getaddrs('Bcc')
722 bcc = getaddrs('Bcc')
722 replyto = getaddrs('Reply-To')
723 replyto = getaddrs('Reply-To')
723
724
724 confirm = ui.configbool('patchbomb', 'confirm')
725 confirm = ui.configbool('patchbomb', 'confirm')
725 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
726 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
726
727
727 if confirm:
728 if confirm:
728 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
729 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
729 ui.write(('From: %s\n' % sender), label='patchbomb.from')
730 ui.write(('From: %s\n' % sender), label='patchbomb.from')
730 for addr in showaddrs:
731 for addr in showaddrs:
731 ui.write('%s\n' % addr, label='patchbomb.to')
732 ui.write('%s\n' % addr, label='patchbomb.to')
732 for m, subj, ds in msgs:
733 for m, subj, ds in msgs:
733 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
734 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
734 if ds:
735 if ds:
735 ui.write(ds, label='patchbomb.diffstats')
736 ui.write(ds, label='patchbomb.diffstats')
736 ui.write('\n')
737 ui.write('\n')
737 if ui.promptchoice(_('are you sure you want to send (yn)?'
738 if ui.promptchoice(_('are you sure you want to send (yn)?'
738 '$$ &Yes $$ &No')):
739 '$$ &Yes $$ &No')):
739 raise error.Abort(_('patchbomb canceled'))
740 raise error.Abort(_('patchbomb canceled'))
740
741
741 ui.write('\n')
742 ui.write('\n')
742
743
743 parent = opts.get('in_reply_to') or None
744 parent = opts.get('in_reply_to') or None
744 # angle brackets may be omitted, they're not semantically part of the msg-id
745 # angle brackets may be omitted, they're not semantically part of the msg-id
745 if parent is not None:
746 if parent is not None:
746 if not parent.startswith('<'):
747 if not parent.startswith('<'):
747 parent = '<' + parent
748 parent = '<' + parent
748 if not parent.endswith('>'):
749 if not parent.endswith('>'):
749 parent += '>'
750 parent += '>'
750
751
751 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
752 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
752 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
753 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
753 sendmail = None
754 sendmail = None
754 firstpatch = None
755 firstpatch = None
755 for i, (m, subj, ds) in enumerate(msgs):
756 for i, (m, subj, ds) in enumerate(msgs):
756 try:
757 try:
757 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
758 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
758 if not firstpatch:
759 if not firstpatch:
759 firstpatch = m['Message-Id']
760 firstpatch = m['Message-Id']
760 m['X-Mercurial-Series-Id'] = firstpatch
761 m['X-Mercurial-Series-Id'] = firstpatch
761 except TypeError:
762 except TypeError:
762 m['Message-Id'] = genmsgid('patchbomb')
763 m['Message-Id'] = genmsgid('patchbomb')
763 if parent:
764 if parent:
764 m['In-Reply-To'] = parent
765 m['In-Reply-To'] = parent
765 m['References'] = parent
766 m['References'] = parent
766 if not parent or 'X-Mercurial-Node' not in m:
767 if not parent or 'X-Mercurial-Node' not in m:
767 parent = m['Message-Id']
768 parent = m['Message-Id']
768
769
769 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
770 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
770 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
771 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
771
772
772 start_time = (start_time[0] + 1, start_time[1])
773 start_time = (start_time[0] + 1, start_time[1])
773 m['From'] = sender
774 m['From'] = sender
774 m['To'] = ', '.join(to)
775 m['To'] = ', '.join(to)
775 if cc:
776 if cc:
776 m['Cc'] = ', '.join(cc)
777 m['Cc'] = ', '.join(cc)
777 if bcc:
778 if bcc:
778 m['Bcc'] = ', '.join(bcc)
779 m['Bcc'] = ', '.join(bcc)
779 if replyto:
780 if replyto:
780 m['Reply-To'] = ', '.join(replyto)
781 m['Reply-To'] = ', '.join(replyto)
781 if opts.get('test'):
782 if opts.get('test'):
782 ui.status(_('displaying '), subj, ' ...\n')
783 ui.status(_('displaying '), subj, ' ...\n')
783 ui.pager('email')
784 ui.pager('email')
784 generator = emailgen.Generator(ui, mangle_from_=False)
785 generator = emailgen.Generator(ui, mangle_from_=False)
785 try:
786 try:
786 generator.flatten(m, 0)
787 generator.flatten(m, 0)
787 ui.write('\n')
788 ui.write('\n')
788 except IOError as inst:
789 except IOError as inst:
789 if inst.errno != errno.EPIPE:
790 if inst.errno != errno.EPIPE:
790 raise
791 raise
791 else:
792 else:
792 if not sendmail:
793 if not sendmail:
793 sendmail = mail.connect(ui, mbox=mbox)
794 sendmail = mail.connect(ui, mbox=mbox)
794 ui.status(_('sending '), subj, ' ...\n')
795 ui.status(_('sending '), subj, ' ...\n')
795 ui.progress(_('sending'), i, item=subj, total=len(msgs),
796 ui.progress(_('sending'), i, item=subj, total=len(msgs),
796 unit=_('emails'))
797 unit=_('emails'))
797 if not mbox:
798 if not mbox:
798 # Exim does not remove the Bcc field
799 # Exim does not remove the Bcc field
799 del m['Bcc']
800 del m['Bcc']
800 fp = stringio()
801 fp = stringio()
801 generator = emailgen.Generator(fp, mangle_from_=False)
802 generator = emailgen.Generator(fp, mangle_from_=False)
802 generator.flatten(m, 0)
803 generator.flatten(m, 0)
803 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
804 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
804
805
805 ui.progress(_('writing'), None)
806 ui.progress(_('writing'), None)
806 ui.progress(_('sending'), None)
807 ui.progress(_('sending'), None)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from mercurial/util.py to mercurial/utils/dateutil.py
NO CONTENT: file copied from mercurial/util.py to mercurial/utils/dateutil.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now