##// END OF EJS Templates
compat: use 'key' argument instead of 'cmp' when sorting a list
Alejandro Santos -
r9032:1fa80c54 default
parent child Browse files
Show More
@@ -1,174 +1,174 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import patch, cmdutil, util, templater
12 from mercurial import patch, cmdutil, util, templater
13 import sys, os
13 import sys, os
14 import time, datetime
14 import time, datetime
15
15
16 def maketemplater(ui, repo, tmpl):
16 def maketemplater(ui, repo, tmpl):
17 tmpl = templater.parsestring(tmpl, quoted=False)
17 tmpl = templater.parsestring(tmpl, quoted=False)
18 try:
18 try:
19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
20 except SyntaxError, inst:
20 except SyntaxError, inst:
21 raise util.Abort(inst.args[0])
21 raise util.Abort(inst.args[0])
22 t.use_template(tmpl)
22 t.use_template(tmpl)
23 return t
23 return t
24
24
25 def changedlines(ui, repo, ctx1, ctx2, fns):
25 def changedlines(ui, repo, ctx1, ctx2, fns):
26 lines = 0
26 lines = 0
27 fmatch = cmdutil.match(repo, pats=fns)
27 fmatch = cmdutil.match(repo, pats=fns)
28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
29 for l in diff.split('\n'):
29 for l in diff.split('\n'):
30 if (l.startswith("+") and not l.startswith("+++ ") or
30 if (l.startswith("+") and not l.startswith("+++ ") or
31 l.startswith("-") and not l.startswith("--- ")):
31 l.startswith("-") and not l.startswith("--- ")):
32 lines += 1
32 lines += 1
33 return lines
33 return lines
34
34
35 def countrate(ui, repo, amap, *pats, **opts):
35 def countrate(ui, repo, amap, *pats, **opts):
36 """Calculate stats"""
36 """Calculate stats"""
37 if opts.get('dateformat'):
37 if opts.get('dateformat'):
38 def getkey(ctx):
38 def getkey(ctx):
39 t, tz = ctx.date()
39 t, tz = ctx.date()
40 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
40 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
41 return date.strftime(opts['dateformat'])
41 return date.strftime(opts['dateformat'])
42 else:
42 else:
43 tmpl = opts.get('template', '{author|email}')
43 tmpl = opts.get('template', '{author|email}')
44 tmpl = maketemplater(ui, repo, tmpl)
44 tmpl = maketemplater(ui, repo, tmpl)
45 def getkey(ctx):
45 def getkey(ctx):
46 ui.pushbuffer()
46 ui.pushbuffer()
47 tmpl.show(ctx)
47 tmpl.show(ctx)
48 return ui.popbuffer()
48 return ui.popbuffer()
49
49
50 count = pct = 0
50 count = pct = 0
51 rate = {}
51 rate = {}
52 df = False
52 df = False
53 if opts.get('date'):
53 if opts.get('date'):
54 df = util.matchdate(opts['date'])
54 df = util.matchdate(opts['date'])
55
55
56 get = util.cachefunc(lambda r: repo[r].changeset())
56 get = util.cachefunc(lambda r: repo[r].changeset())
57 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
57 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
58 for st, rev, fns in changeiter:
58 for st, rev, fns in changeiter:
59 if not st == 'add':
59 if not st == 'add':
60 continue
60 continue
61 if df and not df(get(rev)[2][0]): # doesn't match date format
61 if df and not df(get(rev)[2][0]): # doesn't match date format
62 continue
62 continue
63
63
64 ctx = repo[rev]
64 ctx = repo[rev]
65 key = getkey(ctx)
65 key = getkey(ctx)
66 key = amap.get(key, key) # alias remap
66 key = amap.get(key, key) # alias remap
67 if opts.get('changesets'):
67 if opts.get('changesets'):
68 rate[key] = rate.get(key, 0) + 1
68 rate[key] = rate.get(key, 0) + 1
69 else:
69 else:
70 parents = ctx.parents()
70 parents = ctx.parents()
71 if len(parents) > 1:
71 if len(parents) > 1:
72 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
72 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
73 continue
73 continue
74
74
75 ctx1 = parents[0]
75 ctx1 = parents[0]
76 lines = changedlines(ui, repo, ctx1, ctx, fns)
76 lines = changedlines(ui, repo, ctx1, ctx, fns)
77 rate[key] = rate.get(key, 0) + lines
77 rate[key] = rate.get(key, 0) + lines
78
78
79 if opts.get('progress'):
79 if opts.get('progress'):
80 count += 1
80 count += 1
81 newpct = int(100.0 * count / max(len(repo), 1))
81 newpct = int(100.0 * count / max(len(repo), 1))
82 if pct < newpct:
82 if pct < newpct:
83 pct = newpct
83 pct = newpct
84 ui.write("\r" + _("generating stats: %d%%") % pct)
84 ui.write("\r" + _("generating stats: %d%%") % pct)
85 sys.stdout.flush()
85 sys.stdout.flush()
86
86
87 if opts.get('progress'):
87 if opts.get('progress'):
88 ui.write("\r")
88 ui.write("\r")
89 sys.stdout.flush()
89 sys.stdout.flush()
90
90
91 return rate
91 return rate
92
92
93
93
94 def churn(ui, repo, *pats, **opts):
94 def churn(ui, repo, *pats, **opts):
95 '''histogram of changes to the repository
95 '''histogram of changes to the repository
96
96
97 This command will display a histogram representing the number
97 This command will display a histogram representing the number
98 of changed lines or revisions, grouped according to the given
98 of changed lines or revisions, grouped according to the given
99 template. The default template will group changes by author.
99 template. The default template will group changes by author.
100 The --dateformat option may be used to group the results by
100 The --dateformat option may be used to group the results by
101 date instead.
101 date instead.
102
102
103 Statistics are based on the number of changed lines, or
103 Statistics are based on the number of changed lines, or
104 alternatively the number of matching revisions if the
104 alternatively the number of matching revisions if the
105 --changesets option is specified.
105 --changesets option is specified.
106
106
107 Examples:
107 Examples:
108
108
109 # display count of changed lines for every committer
109 # display count of changed lines for every committer
110 hg churn -t '{author|email}'
110 hg churn -t '{author|email}'
111
111
112 # display daily activity graph
112 # display daily activity graph
113 hg churn -f '%H' -s -c
113 hg churn -f '%H' -s -c
114
114
115 # display activity of developers by month
115 # display activity of developers by month
116 hg churn -f '%Y-%m' -s -c
116 hg churn -f '%Y-%m' -s -c
117
117
118 # display count of lines changed in every year
118 # display count of lines changed in every year
119 hg churn -f '%Y' -s
119 hg churn -f '%Y' -s
120
120
121 It is possible to map alternate email addresses to a main address
121 It is possible to map alternate email addresses to a main address
122 by providing a file using the following format:
122 by providing a file using the following format:
123
123
124 <alias email> <actual email>
124 <alias email> <actual email>
125
125
126 Such a file may be specified with the --aliases option, otherwise a
126 Such a file may be specified with the --aliases option, otherwise a
127 .hgchurn file will be looked for in the working directory root.
127 .hgchurn file will be looked for in the working directory root.
128 '''
128 '''
129 def pad(s, l):
129 def pad(s, l):
130 return (s + " " * l)[:l]
130 return (s + " " * l)[:l]
131
131
132 amap = {}
132 amap = {}
133 aliases = opts.get('aliases')
133 aliases = opts.get('aliases')
134 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
134 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
135 aliases = repo.wjoin('.hgchurn')
135 aliases = repo.wjoin('.hgchurn')
136 if aliases:
136 if aliases:
137 for l in open(aliases, "r"):
137 for l in open(aliases, "r"):
138 l = l.strip()
138 l = l.strip()
139 alias, actual = l.split()
139 alias, actual = l.split()
140 amap[alias] = actual
140 amap[alias] = actual
141
141
142 rate = countrate(ui, repo, amap, *pats, **opts).items()
142 rate = countrate(ui, repo, amap, *pats, **opts).items()
143 if not rate:
143 if not rate:
144 return
144 return
145
145
146 sortfn = ((not opts.get('sort')) and (lambda a, b: cmp(b[1], a[1])) or None)
146 sortkey = ((not opts.get('sort')) and (lambda x: -x[1]) or None)
147 rate.sort(sortfn)
147 rate.sort(key=sortkey)
148
148
149 maxcount = float(max([v for k, v in rate]))
149 maxcount = float(max([v for k, v in rate]))
150 maxname = max([len(k) for k, v in rate])
150 maxname = max([len(k) for k, v in rate])
151
151
152 ttywidth = util.termwidth()
152 ttywidth = util.termwidth()
153 ui.debug(_("assuming %i character terminal\n") % ttywidth)
153 ui.debug(_("assuming %i character terminal\n") % ttywidth)
154 width = ttywidth - maxname - 2 - 6 - 2 - 2
154 width = ttywidth - maxname - 2 - 6 - 2 - 2
155
155
156 for date, count in rate:
156 for date, count in rate:
157 print "%s %6d %s" % (pad(date, maxname), count,
157 print "%s %6d %s" % (pad(date, maxname), count,
158 "*" * int(count * width / maxcount))
158 "*" * int(count * width / maxcount))
159
159
160
160
161 cmdtable = {
161 cmdtable = {
162 "churn":
162 "churn":
163 (churn,
163 (churn,
164 [('r', 'rev', [], _('count rate for the specified revision or range')),
164 [('r', 'rev', [], _('count rate for the specified revision or range')),
165 ('d', 'date', '', _('count rate for revisions matching date spec')),
165 ('d', 'date', '', _('count rate for revisions matching date spec')),
166 ('t', 'template', '{author|email}', _('template to group changesets')),
166 ('t', 'template', '{author|email}', _('template to group changesets')),
167 ('f', 'dateformat', '',
167 ('f', 'dateformat', '',
168 _('strftime-compatible format for grouping by date')),
168 _('strftime-compatible format for grouping by date')),
169 ('c', 'changesets', False, _('count rate by number of changesets')),
169 ('c', 'changesets', False, _('count rate by number of changesets')),
170 ('s', 'sort', False, _('sort by key (default: sort by count)')),
170 ('s', 'sort', False, _('sort by key (default: sort by count)')),
171 ('', 'aliases', '', _('file with email aliases')),
171 ('', 'aliases', '', _('file with email aliases')),
172 ('', 'progress', None, _('show progress'))],
172 ('', 'progress', None, _('show progress'))],
173 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
173 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
174 }
174 }
@@ -1,838 +1,831 b''
1 #
1 #
2 # Mercurial built-in replacement for cvsps.
2 # Mercurial built-in replacement for cvsps.
3 #
3 #
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import os
9 import os
10 import re
10 import re
11 import cPickle as pickle
11 import cPickle as pickle
12 from mercurial import util
12 from mercurial import util
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 def listsort(list, key):
16 "helper to sort by key in Python 2.3"
17 try:
18 list.sort(key=key)
19 except TypeError:
20 list.sort(lambda l, r: cmp(key(l), key(r)))
21
22 class logentry(object):
15 class logentry(object):
23 '''Class logentry has the following attributes:
16 '''Class logentry has the following attributes:
24 .author - author name as CVS knows it
17 .author - author name as CVS knows it
25 .branch - name of branch this revision is on
18 .branch - name of branch this revision is on
26 .branches - revision tuple of branches starting at this revision
19 .branches - revision tuple of branches starting at this revision
27 .comment - commit message
20 .comment - commit message
28 .date - the commit date as a (time, tz) tuple
21 .date - the commit date as a (time, tz) tuple
29 .dead - true if file revision is dead
22 .dead - true if file revision is dead
30 .file - Name of file
23 .file - Name of file
31 .lines - a tuple (+lines, -lines) or None
24 .lines - a tuple (+lines, -lines) or None
32 .parent - Previous revision of this entry
25 .parent - Previous revision of this entry
33 .rcs - name of file as returned from CVS
26 .rcs - name of file as returned from CVS
34 .revision - revision number as tuple
27 .revision - revision number as tuple
35 .tags - list of tags on the file
28 .tags - list of tags on the file
36 .synthetic - is this a synthetic "file ... added on ..." revision?
29 .synthetic - is this a synthetic "file ... added on ..." revision?
37 .mergepoint- the branch that has been merged from
30 .mergepoint- the branch that has been merged from
38 (if present in rlog output)
31 (if present in rlog output)
39 .branchpoints- the branches that start at the current entry
32 .branchpoints- the branches that start at the current entry
40 '''
33 '''
41 def __init__(self, **entries):
34 def __init__(self, **entries):
42 self.__dict__.update(entries)
35 self.__dict__.update(entries)
43
36
44 def __repr__(self):
37 def __repr__(self):
45 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
38 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
46 id(self),
39 id(self),
47 self.file,
40 self.file,
48 ".".join(map(str, self.revision)))
41 ".".join(map(str, self.revision)))
49
42
50 class logerror(Exception):
43 class logerror(Exception):
51 pass
44 pass
52
45
53 def getrepopath(cvspath):
46 def getrepopath(cvspath):
54 """Return the repository path from a CVS path.
47 """Return the repository path from a CVS path.
55
48
56 >>> getrepopath('/foo/bar')
49 >>> getrepopath('/foo/bar')
57 '/foo/bar'
50 '/foo/bar'
58 >>> getrepopath('c:/foo/bar')
51 >>> getrepopath('c:/foo/bar')
59 'c:/foo/bar'
52 'c:/foo/bar'
60 >>> getrepopath(':pserver:10/foo/bar')
53 >>> getrepopath(':pserver:10/foo/bar')
61 '/foo/bar'
54 '/foo/bar'
62 >>> getrepopath(':pserver:10c:/foo/bar')
55 >>> getrepopath(':pserver:10c:/foo/bar')
63 '/foo/bar'
56 '/foo/bar'
64 >>> getrepopath(':pserver:/foo/bar')
57 >>> getrepopath(':pserver:/foo/bar')
65 '/foo/bar'
58 '/foo/bar'
66 >>> getrepopath(':pserver:c:/foo/bar')
59 >>> getrepopath(':pserver:c:/foo/bar')
67 'c:/foo/bar'
60 'c:/foo/bar'
68 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
61 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
69 '/foo/bar'
62 '/foo/bar'
70 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
63 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
71 'c:/foo/bar'
64 'c:/foo/bar'
72 """
65 """
73 # According to CVS manual, CVS paths are expressed like:
66 # According to CVS manual, CVS paths are expressed like:
74 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
67 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
75 #
68 #
76 # Unfortunately, Windows absolute paths start with a drive letter
69 # Unfortunately, Windows absolute paths start with a drive letter
77 # like 'c:' making it harder to parse. Here we assume that drive
70 # like 'c:' making it harder to parse. Here we assume that drive
78 # letters are only one character long and any CVS component before
71 # letters are only one character long and any CVS component before
79 # the repository path is at least 2 characters long, and use this
72 # the repository path is at least 2 characters long, and use this
80 # to disambiguate.
73 # to disambiguate.
81 parts = cvspath.split(':')
74 parts = cvspath.split(':')
82 if len(parts) == 1:
75 if len(parts) == 1:
83 return parts[0]
76 return parts[0]
84 # Here there is an ambiguous case if we have a port number
77 # Here there is an ambiguous case if we have a port number
85 # immediately followed by a Windows driver letter. We assume this
78 # immediately followed by a Windows driver letter. We assume this
86 # never happens and decide it must be CVS path component,
79 # never happens and decide it must be CVS path component,
87 # therefore ignoring it.
80 # therefore ignoring it.
88 if len(parts[-2]) > 1:
81 if len(parts[-2]) > 1:
89 return parts[-1].lstrip('0123456789')
82 return parts[-1].lstrip('0123456789')
90 return parts[-2] + ':' + parts[-1]
83 return parts[-2] + ':' + parts[-1]
91
84
92 def createlog(ui, directory=None, root="", rlog=True, cache=None):
85 def createlog(ui, directory=None, root="", rlog=True, cache=None):
93 '''Collect the CVS rlog'''
86 '''Collect the CVS rlog'''
94
87
95 # Because we store many duplicate commit log messages, reusing strings
88 # Because we store many duplicate commit log messages, reusing strings
96 # saves a lot of memory and pickle storage space.
89 # saves a lot of memory and pickle storage space.
97 _scache = {}
90 _scache = {}
98 def scache(s):
91 def scache(s):
99 "return a shared version of a string"
92 "return a shared version of a string"
100 return _scache.setdefault(s, s)
93 return _scache.setdefault(s, s)
101
94
102 ui.status(_('collecting CVS rlog\n'))
95 ui.status(_('collecting CVS rlog\n'))
103
96
104 log = [] # list of logentry objects containing the CVS state
97 log = [] # list of logentry objects containing the CVS state
105
98
106 # patterns to match in CVS (r)log output, by state of use
99 # patterns to match in CVS (r)log output, by state of use
107 re_00 = re.compile('RCS file: (.+)$')
100 re_00 = re.compile('RCS file: (.+)$')
108 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
101 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
109 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
102 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
110 re_03 = re.compile("(Cannot access.+CVSROOT)|"
103 re_03 = re.compile("(Cannot access.+CVSROOT)|"
111 "(can't create temporary directory.+)$")
104 "(can't create temporary directory.+)$")
112 re_10 = re.compile('Working file: (.+)$')
105 re_10 = re.compile('Working file: (.+)$')
113 re_20 = re.compile('symbolic names:')
106 re_20 = re.compile('symbolic names:')
114 re_30 = re.compile('\t(.+): ([\\d.]+)$')
107 re_30 = re.compile('\t(.+): ([\\d.]+)$')
115 re_31 = re.compile('----------------------------$')
108 re_31 = re.compile('----------------------------$')
116 re_32 = re.compile('======================================='
109 re_32 = re.compile('======================================='
117 '======================================$')
110 '======================================$')
118 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
111 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
119 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
112 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
120 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
113 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
121 r'(.*mergepoint:\s+([^;]+);)?')
114 r'(.*mergepoint:\s+([^;]+);)?')
122 re_70 = re.compile('branches: (.+);$')
115 re_70 = re.compile('branches: (.+);$')
123
116
124 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
117 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
125
118
126 prefix = '' # leading path to strip of what we get from CVS
119 prefix = '' # leading path to strip of what we get from CVS
127
120
128 if directory is None:
121 if directory is None:
129 # Current working directory
122 # Current working directory
130
123
131 # Get the real directory in the repository
124 # Get the real directory in the repository
132 try:
125 try:
133 prefix = open(os.path.join('CVS','Repository')).read().strip()
126 prefix = open(os.path.join('CVS','Repository')).read().strip()
134 if prefix == ".":
127 if prefix == ".":
135 prefix = ""
128 prefix = ""
136 directory = prefix
129 directory = prefix
137 except IOError:
130 except IOError:
138 raise logerror('Not a CVS sandbox')
131 raise logerror('Not a CVS sandbox')
139
132
140 if prefix and not prefix.endswith(os.sep):
133 if prefix and not prefix.endswith(os.sep):
141 prefix += os.sep
134 prefix += os.sep
142
135
143 # Use the Root file in the sandbox, if it exists
136 # Use the Root file in the sandbox, if it exists
144 try:
137 try:
145 root = open(os.path.join('CVS','Root')).read().strip()
138 root = open(os.path.join('CVS','Root')).read().strip()
146 except IOError:
139 except IOError:
147 pass
140 pass
148
141
149 if not root:
142 if not root:
150 root = os.environ.get('CVSROOT', '')
143 root = os.environ.get('CVSROOT', '')
151
144
152 # read log cache if one exists
145 # read log cache if one exists
153 oldlog = []
146 oldlog = []
154 date = None
147 date = None
155
148
156 if cache:
149 if cache:
157 cachedir = os.path.expanduser('~/.hg.cvsps')
150 cachedir = os.path.expanduser('~/.hg.cvsps')
158 if not os.path.exists(cachedir):
151 if not os.path.exists(cachedir):
159 os.mkdir(cachedir)
152 os.mkdir(cachedir)
160
153
161 # The cvsps cache pickle needs a uniquified name, based on the
154 # The cvsps cache pickle needs a uniquified name, based on the
162 # repository location. The address may have all sort of nasties
155 # repository location. The address may have all sort of nasties
163 # in it, slashes, colons and such. So here we take just the
156 # in it, slashes, colons and such. So here we take just the
164 # alphanumerics, concatenated in a way that does not mix up the
157 # alphanumerics, concatenated in a way that does not mix up the
165 # various components, so that
158 # various components, so that
166 # :pserver:user@server:/path
159 # :pserver:user@server:/path
167 # and
160 # and
168 # /pserver/user/server/path
161 # /pserver/user/server/path
169 # are mapped to different cache file names.
162 # are mapped to different cache file names.
170 cachefile = root.split(":") + [directory, "cache"]
163 cachefile = root.split(":") + [directory, "cache"]
171 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
164 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
172 cachefile = os.path.join(cachedir,
165 cachefile = os.path.join(cachedir,
173 '.'.join([s for s in cachefile if s]))
166 '.'.join([s for s in cachefile if s]))
174
167
175 if cache == 'update':
168 if cache == 'update':
176 try:
169 try:
177 ui.note(_('reading cvs log cache %s\n') % cachefile)
170 ui.note(_('reading cvs log cache %s\n') % cachefile)
178 oldlog = pickle.load(open(cachefile))
171 oldlog = pickle.load(open(cachefile))
179 ui.note(_('cache has %d log entries\n') % len(oldlog))
172 ui.note(_('cache has %d log entries\n') % len(oldlog))
180 except Exception, e:
173 except Exception, e:
181 ui.note(_('error reading cache: %r\n') % e)
174 ui.note(_('error reading cache: %r\n') % e)
182
175
183 if oldlog:
176 if oldlog:
184 date = oldlog[-1].date # last commit date as a (time,tz) tuple
177 date = oldlog[-1].date # last commit date as a (time,tz) tuple
185 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
178 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
186
179
187 # build the CVS commandline
180 # build the CVS commandline
188 cmd = ['cvs', '-q']
181 cmd = ['cvs', '-q']
189 if root:
182 if root:
190 cmd.append('-d%s' % root)
183 cmd.append('-d%s' % root)
191 p = util.normpath(getrepopath(root))
184 p = util.normpath(getrepopath(root))
192 if not p.endswith('/'):
185 if not p.endswith('/'):
193 p += '/'
186 p += '/'
194 prefix = p + util.normpath(prefix)
187 prefix = p + util.normpath(prefix)
195 cmd.append(['log', 'rlog'][rlog])
188 cmd.append(['log', 'rlog'][rlog])
196 if date:
189 if date:
197 # no space between option and date string
190 # no space between option and date string
198 cmd.append('-d>%s' % date)
191 cmd.append('-d>%s' % date)
199 cmd.append(directory)
192 cmd.append(directory)
200
193
201 # state machine begins here
194 # state machine begins here
202 tags = {} # dictionary of revisions on current file with their tags
195 tags = {} # dictionary of revisions on current file with their tags
203 branchmap = {} # mapping between branch names and revision numbers
196 branchmap = {} # mapping between branch names and revision numbers
204 state = 0
197 state = 0
205 store = False # set when a new record can be appended
198 store = False # set when a new record can be appended
206
199
207 cmd = [util.shellquote(arg) for arg in cmd]
200 cmd = [util.shellquote(arg) for arg in cmd]
208 ui.note(_("running %s\n") % (' '.join(cmd)))
201 ui.note(_("running %s\n") % (' '.join(cmd)))
209 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
202 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
210
203
211 pfp = util.popen(' '.join(cmd))
204 pfp = util.popen(' '.join(cmd))
212 peek = pfp.readline()
205 peek = pfp.readline()
213 while True:
206 while True:
214 line = peek
207 line = peek
215 if line == '':
208 if line == '':
216 break
209 break
217 peek = pfp.readline()
210 peek = pfp.readline()
218 if line.endswith('\n'):
211 if line.endswith('\n'):
219 line = line[:-1]
212 line = line[:-1]
220 #ui.debug('state=%d line=%r\n' % (state, line))
213 #ui.debug('state=%d line=%r\n' % (state, line))
221
214
222 if state == 0:
215 if state == 0:
223 # initial state, consume input until we see 'RCS file'
216 # initial state, consume input until we see 'RCS file'
224 match = re_00.match(line)
217 match = re_00.match(line)
225 if match:
218 if match:
226 rcs = match.group(1)
219 rcs = match.group(1)
227 tags = {}
220 tags = {}
228 if rlog:
221 if rlog:
229 filename = util.normpath(rcs[:-2])
222 filename = util.normpath(rcs[:-2])
230 if filename.startswith(prefix):
223 if filename.startswith(prefix):
231 filename = filename[len(prefix):]
224 filename = filename[len(prefix):]
232 if filename.startswith('/'):
225 if filename.startswith('/'):
233 filename = filename[1:]
226 filename = filename[1:]
234 if filename.startswith('Attic/'):
227 if filename.startswith('Attic/'):
235 filename = filename[6:]
228 filename = filename[6:]
236 else:
229 else:
237 filename = filename.replace('/Attic/', '/')
230 filename = filename.replace('/Attic/', '/')
238 state = 2
231 state = 2
239 continue
232 continue
240 state = 1
233 state = 1
241 continue
234 continue
242 match = re_01.match(line)
235 match = re_01.match(line)
243 if match:
236 if match:
244 raise Exception(match.group(1))
237 raise Exception(match.group(1))
245 match = re_02.match(line)
238 match = re_02.match(line)
246 if match:
239 if match:
247 raise Exception(match.group(2))
240 raise Exception(match.group(2))
248 if re_03.match(line):
241 if re_03.match(line):
249 raise Exception(line)
242 raise Exception(line)
250
243
251 elif state == 1:
244 elif state == 1:
252 # expect 'Working file' (only when using log instead of rlog)
245 # expect 'Working file' (only when using log instead of rlog)
253 match = re_10.match(line)
246 match = re_10.match(line)
254 assert match, _('RCS file must be followed by working file')
247 assert match, _('RCS file must be followed by working file')
255 filename = util.normpath(match.group(1))
248 filename = util.normpath(match.group(1))
256 state = 2
249 state = 2
257
250
258 elif state == 2:
251 elif state == 2:
259 # expect 'symbolic names'
252 # expect 'symbolic names'
260 if re_20.match(line):
253 if re_20.match(line):
261 branchmap = {}
254 branchmap = {}
262 state = 3
255 state = 3
263
256
264 elif state == 3:
257 elif state == 3:
265 # read the symbolic names and store as tags
258 # read the symbolic names and store as tags
266 match = re_30.match(line)
259 match = re_30.match(line)
267 if match:
260 if match:
268 rev = [int(x) for x in match.group(2).split('.')]
261 rev = [int(x) for x in match.group(2).split('.')]
269
262
270 # Convert magic branch number to an odd-numbered one
263 # Convert magic branch number to an odd-numbered one
271 revn = len(rev)
264 revn = len(rev)
272 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
265 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
273 rev = rev[:-2] + rev[-1:]
266 rev = rev[:-2] + rev[-1:]
274 rev = tuple(rev)
267 rev = tuple(rev)
275
268
276 if rev not in tags:
269 if rev not in tags:
277 tags[rev] = []
270 tags[rev] = []
278 tags[rev].append(match.group(1))
271 tags[rev].append(match.group(1))
279 branchmap[match.group(1)] = match.group(2)
272 branchmap[match.group(1)] = match.group(2)
280
273
281 elif re_31.match(line):
274 elif re_31.match(line):
282 state = 5
275 state = 5
283 elif re_32.match(line):
276 elif re_32.match(line):
284 state = 0
277 state = 0
285
278
286 elif state == 4:
279 elif state == 4:
287 # expecting '------' separator before first revision
280 # expecting '------' separator before first revision
288 if re_31.match(line):
281 if re_31.match(line):
289 state = 5
282 state = 5
290 else:
283 else:
291 assert not re_32.match(line), _('must have at least '
284 assert not re_32.match(line), _('must have at least '
292 'some revisions')
285 'some revisions')
293
286
294 elif state == 5:
287 elif state == 5:
295 # expecting revision number and possibly (ignored) lock indication
288 # expecting revision number and possibly (ignored) lock indication
296 # we create the logentry here from values stored in states 0 to 4,
289 # we create the logentry here from values stored in states 0 to 4,
297 # as this state is re-entered for subsequent revisions of a file.
290 # as this state is re-entered for subsequent revisions of a file.
298 match = re_50.match(line)
291 match = re_50.match(line)
299 assert match, _('expected revision number')
292 assert match, _('expected revision number')
300 e = logentry(rcs=scache(rcs), file=scache(filename),
293 e = logentry(rcs=scache(rcs), file=scache(filename),
301 revision=tuple([int(x) for x in match.group(1).split('.')]),
294 revision=tuple([int(x) for x in match.group(1).split('.')]),
302 branches=[], parent=None,
295 branches=[], parent=None,
303 synthetic=False)
296 synthetic=False)
304 state = 6
297 state = 6
305
298
306 elif state == 6:
299 elif state == 6:
307 # expecting date, author, state, lines changed
300 # expecting date, author, state, lines changed
308 match = re_60.match(line)
301 match = re_60.match(line)
309 assert match, _('revision must be followed by date line')
302 assert match, _('revision must be followed by date line')
310 d = match.group(1)
303 d = match.group(1)
311 if d[2] == '/':
304 if d[2] == '/':
312 # Y2K
305 # Y2K
313 d = '19' + d
306 d = '19' + d
314
307
315 if len(d.split()) != 3:
308 if len(d.split()) != 3:
316 # cvs log dates always in GMT
309 # cvs log dates always in GMT
317 d = d + ' UTC'
310 d = d + ' UTC'
318 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
311 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
319 '%Y/%m/%d %H:%M:%S',
312 '%Y/%m/%d %H:%M:%S',
320 '%Y-%m-%d %H:%M:%S'])
313 '%Y-%m-%d %H:%M:%S'])
321 e.author = scache(match.group(2))
314 e.author = scache(match.group(2))
322 e.dead = match.group(3).lower() == 'dead'
315 e.dead = match.group(3).lower() == 'dead'
323
316
324 if match.group(5):
317 if match.group(5):
325 if match.group(6):
318 if match.group(6):
326 e.lines = (int(match.group(5)), int(match.group(6)))
319 e.lines = (int(match.group(5)), int(match.group(6)))
327 else:
320 else:
328 e.lines = (int(match.group(5)), 0)
321 e.lines = (int(match.group(5)), 0)
329 elif match.group(6):
322 elif match.group(6):
330 e.lines = (0, int(match.group(6)))
323 e.lines = (0, int(match.group(6)))
331 else:
324 else:
332 e.lines = None
325 e.lines = None
333
326
334 if match.group(7): # cvsnt mergepoint
327 if match.group(7): # cvsnt mergepoint
335 myrev = match.group(8).split('.')
328 myrev = match.group(8).split('.')
336 if len(myrev) == 2: # head
329 if len(myrev) == 2: # head
337 e.mergepoint = 'HEAD'
330 e.mergepoint = 'HEAD'
338 else:
331 else:
339 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
332 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
340 branches = [b for b in branchmap if branchmap[b] == myrev]
333 branches = [b for b in branchmap if branchmap[b] == myrev]
341 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
334 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
342 e.mergepoint = branches[0]
335 e.mergepoint = branches[0]
343 else:
336 else:
344 e.mergepoint = None
337 e.mergepoint = None
345 e.comment = []
338 e.comment = []
346 state = 7
339 state = 7
347
340
348 elif state == 7:
341 elif state == 7:
349 # read the revision numbers of branches that start at this revision
342 # read the revision numbers of branches that start at this revision
350 # or store the commit log message otherwise
343 # or store the commit log message otherwise
351 m = re_70.match(line)
344 m = re_70.match(line)
352 if m:
345 if m:
353 e.branches = [tuple([int(y) for y in x.strip().split('.')])
346 e.branches = [tuple([int(y) for y in x.strip().split('.')])
354 for x in m.group(1).split(';')]
347 for x in m.group(1).split(';')]
355 state = 8
348 state = 8
356 elif re_31.match(line) and re_50.match(peek):
349 elif re_31.match(line) and re_50.match(peek):
357 state = 5
350 state = 5
358 store = True
351 store = True
359 elif re_32.match(line):
352 elif re_32.match(line):
360 state = 0
353 state = 0
361 store = True
354 store = True
362 else:
355 else:
363 e.comment.append(line)
356 e.comment.append(line)
364
357
365 elif state == 8:
358 elif state == 8:
366 # store commit log message
359 # store commit log message
367 if re_31.match(line):
360 if re_31.match(line):
368 state = 5
361 state = 5
369 store = True
362 store = True
370 elif re_32.match(line):
363 elif re_32.match(line):
371 state = 0
364 state = 0
372 store = True
365 store = True
373 else:
366 else:
374 e.comment.append(line)
367 e.comment.append(line)
375
368
376 # When a file is added on a branch B1, CVS creates a synthetic
369 # When a file is added on a branch B1, CVS creates a synthetic
377 # dead trunk revision 1.1 so that the branch has a root.
370 # dead trunk revision 1.1 so that the branch has a root.
378 # Likewise, if you merge such a file to a later branch B2 (one
371 # Likewise, if you merge such a file to a later branch B2 (one
379 # that already existed when the file was added on B1), CVS
372 # that already existed when the file was added on B1), CVS
380 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
373 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
381 # these revisions now, but mark them synthetic so
374 # these revisions now, but mark them synthetic so
382 # createchangeset() can take care of them.
375 # createchangeset() can take care of them.
383 if (store and
376 if (store and
384 e.dead and
377 e.dead and
385 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
378 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
386 len(e.comment) == 1 and
379 len(e.comment) == 1 and
387 file_added_re.match(e.comment[0])):
380 file_added_re.match(e.comment[0])):
388 ui.debug(_('found synthetic revision in %s: %r\n')
381 ui.debug(_('found synthetic revision in %s: %r\n')
389 % (e.rcs, e.comment[0]))
382 % (e.rcs, e.comment[0]))
390 e.synthetic = True
383 e.synthetic = True
391
384
392 if store:
385 if store:
393 # clean up the results and save in the log.
386 # clean up the results and save in the log.
394 store = False
387 store = False
395 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
388 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
396 e.comment = scache('\n'.join(e.comment))
389 e.comment = scache('\n'.join(e.comment))
397
390
398 revn = len(e.revision)
391 revn = len(e.revision)
399 if revn > 3 and (revn % 2) == 0:
392 if revn > 3 and (revn % 2) == 0:
400 e.branch = tags.get(e.revision[:-1], [None])[0]
393 e.branch = tags.get(e.revision[:-1], [None])[0]
401 else:
394 else:
402 e.branch = None
395 e.branch = None
403
396
404 # find the branches starting from this revision
397 # find the branches starting from this revision
405 branchpoints = set()
398 branchpoints = set()
406 for branch, revision in branchmap.iteritems():
399 for branch, revision in branchmap.iteritems():
407 revparts = tuple([int(i) for i in revision.split('.')])
400 revparts = tuple([int(i) for i in revision.split('.')])
408 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
401 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
409 # normal branch
402 # normal branch
410 if revparts[:-2] == e.revision:
403 if revparts[:-2] == e.revision:
411 branchpoints.add(branch)
404 branchpoints.add(branch)
412 elif revparts == (1,1,1): # vendor branch
405 elif revparts == (1,1,1): # vendor branch
413 if revparts in e.branches:
406 if revparts in e.branches:
414 branchpoints.add(branch)
407 branchpoints.add(branch)
415 e.branchpoints = branchpoints
408 e.branchpoints = branchpoints
416
409
417 log.append(e)
410 log.append(e)
418
411
419 if len(log) % 100 == 0:
412 if len(log) % 100 == 0:
420 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
413 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
421
414
422 listsort(log, key=lambda x:(x.rcs, x.revision))
415 log.sort(key=lambda x: (x.rcs, x.revision))
423
416
424 # find parent revisions of individual files
417 # find parent revisions of individual files
425 versions = {}
418 versions = {}
426 for e in log:
419 for e in log:
427 branch = e.revision[:-1]
420 branch = e.revision[:-1]
428 p = versions.get((e.rcs, branch), None)
421 p = versions.get((e.rcs, branch), None)
429 if p is None:
422 if p is None:
430 p = e.revision[:-2]
423 p = e.revision[:-2]
431 e.parent = p
424 e.parent = p
432 versions[(e.rcs, branch)] = e.revision
425 versions[(e.rcs, branch)] = e.revision
433
426
434 # update the log cache
427 # update the log cache
435 if cache:
428 if cache:
436 if log:
429 if log:
437 # join up the old and new logs
430 # join up the old and new logs
438 listsort(log, key=lambda x:x.date)
431 log.sort(key=lambda x: x.date)
439
432
440 if oldlog and oldlog[-1].date >= log[0].date:
433 if oldlog and oldlog[-1].date >= log[0].date:
441 raise logerror('Log cache overlaps with new log entries,'
434 raise logerror('Log cache overlaps with new log entries,'
442 ' re-run without cache.')
435 ' re-run without cache.')
443
436
444 log = oldlog + log
437 log = oldlog + log
445
438
446 # write the new cachefile
439 # write the new cachefile
447 ui.note(_('writing cvs log cache %s\n') % cachefile)
440 ui.note(_('writing cvs log cache %s\n') % cachefile)
448 pickle.dump(log, open(cachefile, 'w'))
441 pickle.dump(log, open(cachefile, 'w'))
449 else:
442 else:
450 log = oldlog
443 log = oldlog
451
444
452 ui.status(_('%d log entries\n') % len(log))
445 ui.status(_('%d log entries\n') % len(log))
453
446
454 return log
447 return log
455
448
456
449
457 class changeset(object):
450 class changeset(object):
458 '''Class changeset has the following attributes:
451 '''Class changeset has the following attributes:
459 .id - integer identifying this changeset (list index)
452 .id - integer identifying this changeset (list index)
460 .author - author name as CVS knows it
453 .author - author name as CVS knows it
461 .branch - name of branch this changeset is on, or None
454 .branch - name of branch this changeset is on, or None
462 .comment - commit message
455 .comment - commit message
463 .date - the commit date as a (time,tz) tuple
456 .date - the commit date as a (time,tz) tuple
464 .entries - list of logentry objects in this changeset
457 .entries - list of logentry objects in this changeset
465 .parents - list of one or two parent changesets
458 .parents - list of one or two parent changesets
466 .tags - list of tags on this changeset
459 .tags - list of tags on this changeset
467 .synthetic - from synthetic revision "file ... added on branch ..."
460 .synthetic - from synthetic revision "file ... added on branch ..."
468 .mergepoint- the branch that has been merged from
461 .mergepoint- the branch that has been merged from
469 (if present in rlog output)
462 (if present in rlog output)
470 .branchpoints- the branches that start at the current entry
463 .branchpoints- the branches that start at the current entry
471 '''
464 '''
472 def __init__(self, **entries):
465 def __init__(self, **entries):
473 self.__dict__.update(entries)
466 self.__dict__.update(entries)
474
467
475 def __repr__(self):
468 def __repr__(self):
476 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
469 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
477 id(self),
470 id(self),
478 getattr(self, 'id', "(no id)"))
471 getattr(self, 'id', "(no id)"))
479
472
480 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
473 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
481 '''Convert log into changesets.'''
474 '''Convert log into changesets.'''
482
475
483 ui.status(_('creating changesets\n'))
476 ui.status(_('creating changesets\n'))
484
477
485 # Merge changesets
478 # Merge changesets
486
479
487 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
480 log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
488
481
489 changesets = []
482 changesets = []
490 files = set()
483 files = set()
491 c = None
484 c = None
492 for i, e in enumerate(log):
485 for i, e in enumerate(log):
493
486
494 # Check if log entry belongs to the current changeset or not.
487 # Check if log entry belongs to the current changeset or not.
495
488
496 # Since CVS is file centric, two different file revisions with
489 # Since CVS is file centric, two different file revisions with
497 # different branchpoints should be treated as belonging to two
490 # different branchpoints should be treated as belonging to two
498 # different changesets (and the ordering is important and not
491 # different changesets (and the ordering is important and not
499 # honoured by cvsps at this point).
492 # honoured by cvsps at this point).
500 #
493 #
501 # Consider the following case:
494 # Consider the following case:
502 # foo 1.1 branchpoints: [MYBRANCH]
495 # foo 1.1 branchpoints: [MYBRANCH]
503 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
496 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
504 #
497 #
505 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
498 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
506 # later version of foo may be in MYBRANCH2, so foo should be the
499 # later version of foo may be in MYBRANCH2, so foo should be the
507 # first changeset and bar the next and MYBRANCH and MYBRANCH2
500 # first changeset and bar the next and MYBRANCH and MYBRANCH2
508 # should both start off of the bar changeset. No provisions are
501 # should both start off of the bar changeset. No provisions are
509 # made to ensure that this is, in fact, what happens.
502 # made to ensure that this is, in fact, what happens.
510 if not (c and
503 if not (c and
511 e.comment == c.comment and
504 e.comment == c.comment and
512 e.author == c.author and
505 e.author == c.author and
513 e.branch == c.branch and
506 e.branch == c.branch and
514 (not hasattr(e, 'branchpoints') or
507 (not hasattr(e, 'branchpoints') or
515 not hasattr (c, 'branchpoints') or
508 not hasattr (c, 'branchpoints') or
516 e.branchpoints == c.branchpoints) and
509 e.branchpoints == c.branchpoints) and
517 ((c.date[0] + c.date[1]) <=
510 ((c.date[0] + c.date[1]) <=
518 (e.date[0] + e.date[1]) <=
511 (e.date[0] + e.date[1]) <=
519 (c.date[0] + c.date[1]) + fuzz) and
512 (c.date[0] + c.date[1]) + fuzz) and
520 e.file not in files):
513 e.file not in files):
521 c = changeset(comment=e.comment, author=e.author,
514 c = changeset(comment=e.comment, author=e.author,
522 branch=e.branch, date=e.date, entries=[],
515 branch=e.branch, date=e.date, entries=[],
523 mergepoint=getattr(e, 'mergepoint', None),
516 mergepoint=getattr(e, 'mergepoint', None),
524 branchpoints=getattr(e, 'branchpoints', set()))
517 branchpoints=getattr(e, 'branchpoints', set()))
525 changesets.append(c)
518 changesets.append(c)
526 files = set()
519 files = set()
527 if len(changesets) % 100 == 0:
520 if len(changesets) % 100 == 0:
528 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
521 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
529 ui.status(util.ellipsis(t, 80) + '\n')
522 ui.status(util.ellipsis(t, 80) + '\n')
530
523
531 c.entries.append(e)
524 c.entries.append(e)
532 files.add(e.file)
525 files.add(e.file)
533 c.date = e.date # changeset date is date of latest commit in it
526 c.date = e.date # changeset date is date of latest commit in it
534
527
535 # Mark synthetic changesets
528 # Mark synthetic changesets
536
529
537 for c in changesets:
530 for c in changesets:
538 # Synthetic revisions always get their own changeset, because
531 # Synthetic revisions always get their own changeset, because
539 # the log message includes the filename. E.g. if you add file3
532 # the log message includes the filename. E.g. if you add file3
540 # and file4 on a branch, you get four log entries and three
533 # and file4 on a branch, you get four log entries and three
541 # changesets:
534 # changesets:
542 # "File file3 was added on branch ..." (synthetic, 1 entry)
535 # "File file3 was added on branch ..." (synthetic, 1 entry)
543 # "File file4 was added on branch ..." (synthetic, 1 entry)
536 # "File file4 was added on branch ..." (synthetic, 1 entry)
544 # "Add file3 and file4 to fix ..." (real, 2 entries)
537 # "Add file3 and file4 to fix ..." (real, 2 entries)
545 # Hence the check for 1 entry here.
538 # Hence the check for 1 entry here.
546 synth = getattr(c.entries[0], 'synthetic', None)
539 synth = getattr(c.entries[0], 'synthetic', None)
547 c.synthetic = (len(c.entries) == 1 and synth)
540 c.synthetic = (len(c.entries) == 1 and synth)
548
541
549 # Sort files in each changeset
542 # Sort files in each changeset
550
543
551 for c in changesets:
544 for c in changesets:
552 def pathcompare(l, r):
545 def pathcompare(l, r):
553 'Mimic cvsps sorting order'
546 'Mimic cvsps sorting order'
554 l = l.split('/')
547 l = l.split('/')
555 r = r.split('/')
548 r = r.split('/')
556 nl = len(l)
549 nl = len(l)
557 nr = len(r)
550 nr = len(r)
558 n = min(nl, nr)
551 n = min(nl, nr)
559 for i in range(n):
552 for i in range(n):
560 if i + 1 == nl and nl < nr:
553 if i + 1 == nl and nl < nr:
561 return -1
554 return -1
562 elif i + 1 == nr and nl > nr:
555 elif i + 1 == nr and nl > nr:
563 return +1
556 return +1
564 elif l[i] < r[i]:
557 elif l[i] < r[i]:
565 return -1
558 return -1
566 elif l[i] > r[i]:
559 elif l[i] > r[i]:
567 return +1
560 return +1
568 return 0
561 return 0
569 def entitycompare(l, r):
562 def entitycompare(l, r):
570 return pathcompare(l.file, r.file)
563 return pathcompare(l.file, r.file)
571
564
572 c.entries.sort(entitycompare)
565 c.entries.sort(entitycompare)
573
566
574 # Sort changesets by date
567 # Sort changesets by date
575
568
576 def cscmp(l, r):
569 def cscmp(l, r):
577 d = sum(l.date) - sum(r.date)
570 d = sum(l.date) - sum(r.date)
578 if d:
571 if d:
579 return d
572 return d
580
573
581 # detect vendor branches and initial commits on a branch
574 # detect vendor branches and initial commits on a branch
582 le = {}
575 le = {}
583 for e in l.entries:
576 for e in l.entries:
584 le[e.rcs] = e.revision
577 le[e.rcs] = e.revision
585 re = {}
578 re = {}
586 for e in r.entries:
579 for e in r.entries:
587 re[e.rcs] = e.revision
580 re[e.rcs] = e.revision
588
581
589 d = 0
582 d = 0
590 for e in l.entries:
583 for e in l.entries:
591 if re.get(e.rcs, None) == e.parent:
584 if re.get(e.rcs, None) == e.parent:
592 assert not d
585 assert not d
593 d = 1
586 d = 1
594 break
587 break
595
588
596 for e in r.entries:
589 for e in r.entries:
597 if le.get(e.rcs, None) == e.parent:
590 if le.get(e.rcs, None) == e.parent:
598 assert not d
591 assert not d
599 d = -1
592 d = -1
600 break
593 break
601
594
602 return d
595 return d
603
596
604 changesets.sort(cscmp)
597 changesets.sort(cscmp)
605
598
606 # Collect tags
599 # Collect tags
607
600
608 globaltags = {}
601 globaltags = {}
609 for c in changesets:
602 for c in changesets:
610 for e in c.entries:
603 for e in c.entries:
611 for tag in e.tags:
604 for tag in e.tags:
612 # remember which is the latest changeset to have this tag
605 # remember which is the latest changeset to have this tag
613 globaltags[tag] = c
606 globaltags[tag] = c
614
607
615 for c in changesets:
608 for c in changesets:
616 tags = set()
609 tags = set()
617 for e in c.entries:
610 for e in c.entries:
618 tags.update(e.tags)
611 tags.update(e.tags)
619 # remember tags only if this is the latest changeset to have it
612 # remember tags only if this is the latest changeset to have it
620 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
613 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
621
614
622 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
615 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
623 # by inserting dummy changesets with two parents, and handle
616 # by inserting dummy changesets with two parents, and handle
624 # {{mergefrombranch BRANCHNAME}} by setting two parents.
617 # {{mergefrombranch BRANCHNAME}} by setting two parents.
625
618
626 if mergeto is None:
619 if mergeto is None:
627 mergeto = r'{{mergetobranch ([-\w]+)}}'
620 mergeto = r'{{mergetobranch ([-\w]+)}}'
628 if mergeto:
621 if mergeto:
629 mergeto = re.compile(mergeto)
622 mergeto = re.compile(mergeto)
630
623
631 if mergefrom is None:
624 if mergefrom is None:
632 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
625 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
633 if mergefrom:
626 if mergefrom:
634 mergefrom = re.compile(mergefrom)
627 mergefrom = re.compile(mergefrom)
635
628
636 versions = {} # changeset index where we saw any particular file version
629 versions = {} # changeset index where we saw any particular file version
637 branches = {} # changeset index where we saw a branch
630 branches = {} # changeset index where we saw a branch
638 n = len(changesets)
631 n = len(changesets)
639 i = 0
632 i = 0
640 while i<n:
633 while i<n:
641 c = changesets[i]
634 c = changesets[i]
642
635
643 for f in c.entries:
636 for f in c.entries:
644 versions[(f.rcs, f.revision)] = i
637 versions[(f.rcs, f.revision)] = i
645
638
646 p = None
639 p = None
647 if c.branch in branches:
640 if c.branch in branches:
648 p = branches[c.branch]
641 p = branches[c.branch]
649 else:
642 else:
650 # first changeset on a new branch
643 # first changeset on a new branch
651 # the parent is a changeset with the branch in its
644 # the parent is a changeset with the branch in its
652 # branchpoints such that it is the latest possible
645 # branchpoints such that it is the latest possible
653 # commit without any intervening, unrelated commits.
646 # commit without any intervening, unrelated commits.
654
647
655 for candidate in xrange(i):
648 for candidate in xrange(i):
656 if c.branch not in changesets[candidate].branchpoints:
649 if c.branch not in changesets[candidate].branchpoints:
657 if p is not None:
650 if p is not None:
658 break
651 break
659 continue
652 continue
660 p = candidate
653 p = candidate
661
654
662 c.parents = []
655 c.parents = []
663 if p is not None:
656 if p is not None:
664 p = changesets[p]
657 p = changesets[p]
665
658
666 # Ensure no changeset has a synthetic changeset as a parent.
659 # Ensure no changeset has a synthetic changeset as a parent.
667 while p.synthetic:
660 while p.synthetic:
668 assert len(p.parents) <= 1, \
661 assert len(p.parents) <= 1, \
669 _('synthetic changeset cannot have multiple parents')
662 _('synthetic changeset cannot have multiple parents')
670 if p.parents:
663 if p.parents:
671 p = p.parents[0]
664 p = p.parents[0]
672 else:
665 else:
673 p = None
666 p = None
674 break
667 break
675
668
676 if p is not None:
669 if p is not None:
677 c.parents.append(p)
670 c.parents.append(p)
678
671
679 if c.mergepoint:
672 if c.mergepoint:
680 if c.mergepoint == 'HEAD':
673 if c.mergepoint == 'HEAD':
681 c.mergepoint = None
674 c.mergepoint = None
682 c.parents.append(changesets[branches[c.mergepoint]])
675 c.parents.append(changesets[branches[c.mergepoint]])
683
676
684 if mergefrom:
677 if mergefrom:
685 m = mergefrom.search(c.comment)
678 m = mergefrom.search(c.comment)
686 if m:
679 if m:
687 m = m.group(1)
680 m = m.group(1)
688 if m == 'HEAD':
681 if m == 'HEAD':
689 m = None
682 m = None
690 try:
683 try:
691 candidate = changesets[branches[m]]
684 candidate = changesets[branches[m]]
692 except KeyError:
685 except KeyError:
693 ui.warn(_("warning: CVS commit message references "
686 ui.warn(_("warning: CVS commit message references "
694 "non-existent branch %r:\n%s\n")
687 "non-existent branch %r:\n%s\n")
695 % (m, c.comment))
688 % (m, c.comment))
696 if m in branches and c.branch != m and not candidate.synthetic:
689 if m in branches and c.branch != m and not candidate.synthetic:
697 c.parents.append(candidate)
690 c.parents.append(candidate)
698
691
699 if mergeto:
692 if mergeto:
700 m = mergeto.search(c.comment)
693 m = mergeto.search(c.comment)
701 if m:
694 if m:
702 try:
695 try:
703 m = m.group(1)
696 m = m.group(1)
704 if m == 'HEAD':
697 if m == 'HEAD':
705 m = None
698 m = None
706 except:
699 except:
707 m = None # if no group found then merge to HEAD
700 m = None # if no group found then merge to HEAD
708 if m in branches and c.branch != m:
701 if m in branches and c.branch != m:
709 # insert empty changeset for merge
702 # insert empty changeset for merge
710 cc = changeset(author=c.author, branch=m, date=c.date,
703 cc = changeset(author=c.author, branch=m, date=c.date,
711 comment='convert-repo: CVS merge from branch %s' % c.branch,
704 comment='convert-repo: CVS merge from branch %s' % c.branch,
712 entries=[], tags=[], parents=[changesets[branches[m]], c])
705 entries=[], tags=[], parents=[changesets[branches[m]], c])
713 changesets.insert(i + 1, cc)
706 changesets.insert(i + 1, cc)
714 branches[m] = i + 1
707 branches[m] = i + 1
715
708
716 # adjust our loop counters now we have inserted a new entry
709 # adjust our loop counters now we have inserted a new entry
717 n += 1
710 n += 1
718 i += 2
711 i += 2
719 continue
712 continue
720
713
721 branches[c.branch] = i
714 branches[c.branch] = i
722 i += 1
715 i += 1
723
716
724 # Drop synthetic changesets (safe now that we have ensured no other
717 # Drop synthetic changesets (safe now that we have ensured no other
725 # changesets can have them as parents).
718 # changesets can have them as parents).
726 i = 0
719 i = 0
727 while i < len(changesets):
720 while i < len(changesets):
728 if changesets[i].synthetic:
721 if changesets[i].synthetic:
729 del changesets[i]
722 del changesets[i]
730 else:
723 else:
731 i += 1
724 i += 1
732
725
733 # Number changesets
726 # Number changesets
734
727
735 for i, c in enumerate(changesets):
728 for i, c in enumerate(changesets):
736 c.id = i + 1
729 c.id = i + 1
737
730
738 ui.status(_('%d changeset entries\n') % len(changesets))
731 ui.status(_('%d changeset entries\n') % len(changesets))
739
732
740 return changesets
733 return changesets
741
734
742
735
743 def debugcvsps(ui, *args, **opts):
736 def debugcvsps(ui, *args, **opts):
744 '''Read CVS rlog for current directory or named path in
737 '''Read CVS rlog for current directory or named path in
745 repository, and convert the log to changesets based on matching
738 repository, and convert the log to changesets based on matching
746 commit log entries and dates.
739 commit log entries and dates.
747 '''
740 '''
748 if opts["new_cache"]:
741 if opts["new_cache"]:
749 cache = "write"
742 cache = "write"
750 elif opts["update_cache"]:
743 elif opts["update_cache"]:
751 cache = "update"
744 cache = "update"
752 else:
745 else:
753 cache = None
746 cache = None
754
747
755 revisions = opts["revisions"]
748 revisions = opts["revisions"]
756
749
757 try:
750 try:
758 if args:
751 if args:
759 log = []
752 log = []
760 for d in args:
753 for d in args:
761 log += createlog(ui, d, root=opts["root"], cache=cache)
754 log += createlog(ui, d, root=opts["root"], cache=cache)
762 else:
755 else:
763 log = createlog(ui, root=opts["root"], cache=cache)
756 log = createlog(ui, root=opts["root"], cache=cache)
764 except logerror, e:
757 except logerror, e:
765 ui.write("%r\n"%e)
758 ui.write("%r\n"%e)
766 return
759 return
767
760
768 changesets = createchangeset(ui, log, opts["fuzz"])
761 changesets = createchangeset(ui, log, opts["fuzz"])
769 del log
762 del log
770
763
771 # Print changesets (optionally filtered)
764 # Print changesets (optionally filtered)
772
765
773 off = len(revisions)
766 off = len(revisions)
774 branches = {} # latest version number in each branch
767 branches = {} # latest version number in each branch
775 ancestors = {} # parent branch
768 ancestors = {} # parent branch
776 for cs in changesets:
769 for cs in changesets:
777
770
778 if opts["ancestors"]:
771 if opts["ancestors"]:
779 if cs.branch not in branches and cs.parents and cs.parents[0].id:
772 if cs.branch not in branches and cs.parents and cs.parents[0].id:
780 ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch,
773 ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch,
781 cs.parents[0].id)
774 cs.parents[0].id)
782 branches[cs.branch] = cs.id
775 branches[cs.branch] = cs.id
783
776
784 # limit by branches
777 # limit by branches
785 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
778 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
786 continue
779 continue
787
780
788 if not off:
781 if not off:
789 # Note: trailing spaces on several lines here are needed to have
782 # Note: trailing spaces on several lines here are needed to have
790 # bug-for-bug compatibility with cvsps.
783 # bug-for-bug compatibility with cvsps.
791 ui.write('---------------------\n')
784 ui.write('---------------------\n')
792 ui.write('PatchSet %d \n' % cs.id)
785 ui.write('PatchSet %d \n' % cs.id)
793 ui.write('Date: %s\n' % util.datestr(cs.date,
786 ui.write('Date: %s\n' % util.datestr(cs.date,
794 '%Y/%m/%d %H:%M:%S %1%2'))
787 '%Y/%m/%d %H:%M:%S %1%2'))
795 ui.write('Author: %s\n' % cs.author)
788 ui.write('Author: %s\n' % cs.author)
796 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
789 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
797 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1],
790 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1],
798 ','.join(cs.tags) or '(none)'))
791 ','.join(cs.tags) or '(none)'))
799 branchpoints = getattr(cs, 'branchpoints', None)
792 branchpoints = getattr(cs, 'branchpoints', None)
800 if branchpoints:
793 if branchpoints:
801 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
794 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
802 if opts["parents"] and cs.parents:
795 if opts["parents"] and cs.parents:
803 if len(cs.parents)>1:
796 if len(cs.parents)>1:
804 ui.write('Parents: %s\n' % (','.join([str(p.id) for p in cs.parents])))
797 ui.write('Parents: %s\n' % (','.join([str(p.id) for p in cs.parents])))
805 else:
798 else:
806 ui.write('Parent: %d\n' % cs.parents[0].id)
799 ui.write('Parent: %d\n' % cs.parents[0].id)
807
800
808 if opts["ancestors"]:
801 if opts["ancestors"]:
809 b = cs.branch
802 b = cs.branch
810 r = []
803 r = []
811 while b:
804 while b:
812 b, c = ancestors[b]
805 b, c = ancestors[b]
813 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
806 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
814 if r:
807 if r:
815 ui.write('Ancestors: %s\n' % (','.join(r)))
808 ui.write('Ancestors: %s\n' % (','.join(r)))
816
809
817 ui.write('Log:\n')
810 ui.write('Log:\n')
818 ui.write('%s\n\n' % cs.comment)
811 ui.write('%s\n\n' % cs.comment)
819 ui.write('Members: \n')
812 ui.write('Members: \n')
820 for f in cs.entries:
813 for f in cs.entries:
821 fn = f.file
814 fn = f.file
822 if fn.startswith(opts["prefix"]):
815 if fn.startswith(opts["prefix"]):
823 fn = fn[len(opts["prefix"]):]
816 fn = fn[len(opts["prefix"]):]
824 ui.write('\t%s:%s->%s%s \n' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
817 ui.write('\t%s:%s->%s%s \n' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
825 '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]))
818 '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]))
826 ui.write('\n')
819 ui.write('\n')
827
820
828 # have we seen the start tag?
821 # have we seen the start tag?
829 if revisions and off:
822 if revisions and off:
830 if revisions[0] == str(cs.id) or \
823 if revisions[0] == str(cs.id) or \
831 revisions[0] in cs.tags:
824 revisions[0] in cs.tags:
832 off = False
825 off = False
833
826
834 # see if we reached the end tag
827 # see if we reached the end tag
835 if len(revisions)>1 and not off:
828 if len(revisions)>1 and not off:
836 if revisions[1] == str(cs.id) or \
829 if revisions[1] == str(cs.id) or \
837 revisions[1] in cs.tags:
830 revisions[1] in cs.tags:
838 break
831 break
@@ -1,2630 +1,2630 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial.node import bin, hex, short, nullid, nullrev
33 from mercurial.node import bin, hex, short, nullid, nullrev
34 from mercurial.lock import release
34 from mercurial.lock import release
35 from mercurial import commands, cmdutil, hg, patch, util
35 from mercurial import commands, cmdutil, hg, patch, util
36 from mercurial import repair, extensions, url, error
36 from mercurial import repair, extensions, url, error
37 import os, sys, re, errno
37 import os, sys, re, errno
38
38
39 commands.norepo += " qclone"
39 commands.norepo += " qclone"
40
40
41 # Patch names looks like unix-file names.
41 # Patch names looks like unix-file names.
42 # They must be joinable with queue directory and result in the patch path.
42 # They must be joinable with queue directory and result in the patch path.
43 normname = util.normpath
43 normname = util.normpath
44
44
45 class statusentry(object):
45 class statusentry(object):
46 def __init__(self, rev, name=None):
46 def __init__(self, rev, name=None):
47 if not name:
47 if not name:
48 fields = rev.split(':', 1)
48 fields = rev.split(':', 1)
49 if len(fields) == 2:
49 if len(fields) == 2:
50 self.rev, self.name = fields
50 self.rev, self.name = fields
51 else:
51 else:
52 self.rev, self.name = None, None
52 self.rev, self.name = None, None
53 else:
53 else:
54 self.rev, self.name = rev, name
54 self.rev, self.name = rev, name
55
55
56 def __str__(self):
56 def __str__(self):
57 return self.rev + ':' + self.name
57 return self.rev + ':' + self.name
58
58
59 class patchheader(object):
59 class patchheader(object):
60 def __init__(self, pf):
60 def __init__(self, pf):
61 def eatdiff(lines):
61 def eatdiff(lines):
62 while lines:
62 while lines:
63 l = lines[-1]
63 l = lines[-1]
64 if (l.startswith("diff -") or
64 if (l.startswith("diff -") or
65 l.startswith("Index:") or
65 l.startswith("Index:") or
66 l.startswith("===========")):
66 l.startswith("===========")):
67 del lines[-1]
67 del lines[-1]
68 else:
68 else:
69 break
69 break
70 def eatempty(lines):
70 def eatempty(lines):
71 while lines:
71 while lines:
72 l = lines[-1]
72 l = lines[-1]
73 if re.match('\s*$', l):
73 if re.match('\s*$', l):
74 del lines[-1]
74 del lines[-1]
75 else:
75 else:
76 break
76 break
77
77
78 message = []
78 message = []
79 comments = []
79 comments = []
80 user = None
80 user = None
81 date = None
81 date = None
82 format = None
82 format = None
83 subject = None
83 subject = None
84 diffstart = 0
84 diffstart = 0
85
85
86 for line in file(pf):
86 for line in file(pf):
87 line = line.rstrip()
87 line = line.rstrip()
88 if line.startswith('diff --git'):
88 if line.startswith('diff --git'):
89 diffstart = 2
89 diffstart = 2
90 break
90 break
91 if diffstart:
91 if diffstart:
92 if line.startswith('+++ '):
92 if line.startswith('+++ '):
93 diffstart = 2
93 diffstart = 2
94 break
94 break
95 if line.startswith("--- "):
95 if line.startswith("--- "):
96 diffstart = 1
96 diffstart = 1
97 continue
97 continue
98 elif format == "hgpatch":
98 elif format == "hgpatch":
99 # parse values when importing the result of an hg export
99 # parse values when importing the result of an hg export
100 if line.startswith("# User "):
100 if line.startswith("# User "):
101 user = line[7:]
101 user = line[7:]
102 elif line.startswith("# Date "):
102 elif line.startswith("# Date "):
103 date = line[7:]
103 date = line[7:]
104 elif not line.startswith("# ") and line:
104 elif not line.startswith("# ") and line:
105 message.append(line)
105 message.append(line)
106 format = None
106 format = None
107 elif line == '# HG changeset patch':
107 elif line == '# HG changeset patch':
108 format = "hgpatch"
108 format = "hgpatch"
109 elif (format != "tagdone" and (line.startswith("Subject: ") or
109 elif (format != "tagdone" and (line.startswith("Subject: ") or
110 line.startswith("subject: "))):
110 line.startswith("subject: "))):
111 subject = line[9:]
111 subject = line[9:]
112 format = "tag"
112 format = "tag"
113 elif (format != "tagdone" and (line.startswith("From: ") or
113 elif (format != "tagdone" and (line.startswith("From: ") or
114 line.startswith("from: "))):
114 line.startswith("from: "))):
115 user = line[6:]
115 user = line[6:]
116 format = "tag"
116 format = "tag"
117 elif format == "tag" and line == "":
117 elif format == "tag" and line == "":
118 # when looking for tags (subject: from: etc) they
118 # when looking for tags (subject: from: etc) they
119 # end once you find a blank line in the source
119 # end once you find a blank line in the source
120 format = "tagdone"
120 format = "tagdone"
121 elif message or line:
121 elif message or line:
122 message.append(line)
122 message.append(line)
123 comments.append(line)
123 comments.append(line)
124
124
125 eatdiff(message)
125 eatdiff(message)
126 eatdiff(comments)
126 eatdiff(comments)
127 eatempty(message)
127 eatempty(message)
128 eatempty(comments)
128 eatempty(comments)
129
129
130 # make sure message isn't empty
130 # make sure message isn't empty
131 if format and format.startswith("tag") and subject:
131 if format and format.startswith("tag") and subject:
132 message.insert(0, "")
132 message.insert(0, "")
133 message.insert(0, subject)
133 message.insert(0, subject)
134
134
135 self.message = message
135 self.message = message
136 self.comments = comments
136 self.comments = comments
137 self.user = user
137 self.user = user
138 self.date = date
138 self.date = date
139 self.haspatch = diffstart > 1
139 self.haspatch = diffstart > 1
140
140
141 def setuser(self, user):
141 def setuser(self, user):
142 if not self.updateheader(['From: ', '# User '], user):
142 if not self.updateheader(['From: ', '# User '], user):
143 try:
143 try:
144 patchheaderat = self.comments.index('# HG changeset patch')
144 patchheaderat = self.comments.index('# HG changeset patch')
145 self.comments.insert(patchheaderat + 1,'# User ' + user)
145 self.comments.insert(patchheaderat + 1,'# User ' + user)
146 except ValueError:
146 except ValueError:
147 self.comments = ['From: ' + user, ''] + self.comments
147 self.comments = ['From: ' + user, ''] + self.comments
148 self.user = user
148 self.user = user
149
149
150 def setdate(self, date):
150 def setdate(self, date):
151 if self.updateheader(['# Date '], date):
151 if self.updateheader(['# Date '], date):
152 self.date = date
152 self.date = date
153
153
154 def setmessage(self, message):
154 def setmessage(self, message):
155 if self.comments:
155 if self.comments:
156 self._delmsg()
156 self._delmsg()
157 self.message = [message]
157 self.message = [message]
158 self.comments += self.message
158 self.comments += self.message
159
159
160 def updateheader(self, prefixes, new):
160 def updateheader(self, prefixes, new):
161 '''Update all references to a field in the patch header.
161 '''Update all references to a field in the patch header.
162 Return whether the field is present.'''
162 Return whether the field is present.'''
163 res = False
163 res = False
164 for prefix in prefixes:
164 for prefix in prefixes:
165 for i in xrange(len(self.comments)):
165 for i in xrange(len(self.comments)):
166 if self.comments[i].startswith(prefix):
166 if self.comments[i].startswith(prefix):
167 self.comments[i] = prefix + new
167 self.comments[i] = prefix + new
168 res = True
168 res = True
169 break
169 break
170 return res
170 return res
171
171
172 def __str__(self):
172 def __str__(self):
173 if not self.comments:
173 if not self.comments:
174 return ''
174 return ''
175 return '\n'.join(self.comments) + '\n\n'
175 return '\n'.join(self.comments) + '\n\n'
176
176
177 def _delmsg(self):
177 def _delmsg(self):
178 '''Remove existing message, keeping the rest of the comments fields.
178 '''Remove existing message, keeping the rest of the comments fields.
179 If comments contains 'subject: ', message will prepend
179 If comments contains 'subject: ', message will prepend
180 the field and a blank line.'''
180 the field and a blank line.'''
181 if self.message:
181 if self.message:
182 subj = 'subject: ' + self.message[0].lower()
182 subj = 'subject: ' + self.message[0].lower()
183 for i in xrange(len(self.comments)):
183 for i in xrange(len(self.comments)):
184 if subj == self.comments[i].lower():
184 if subj == self.comments[i].lower():
185 del self.comments[i]
185 del self.comments[i]
186 self.message = self.message[2:]
186 self.message = self.message[2:]
187 break
187 break
188 ci = 0
188 ci = 0
189 for mi in self.message:
189 for mi in self.message:
190 while mi != self.comments[ci]:
190 while mi != self.comments[ci]:
191 ci += 1
191 ci += 1
192 del self.comments[ci]
192 del self.comments[ci]
193
193
194 class queue(object):
194 class queue(object):
195 def __init__(self, ui, path, patchdir=None):
195 def __init__(self, ui, path, patchdir=None):
196 self.basepath = path
196 self.basepath = path
197 self.path = patchdir or os.path.join(path, "patches")
197 self.path = patchdir or os.path.join(path, "patches")
198 self.opener = util.opener(self.path)
198 self.opener = util.opener(self.path)
199 self.ui = ui
199 self.ui = ui
200 self.applied_dirty = 0
200 self.applied_dirty = 0
201 self.series_dirty = 0
201 self.series_dirty = 0
202 self.series_path = "series"
202 self.series_path = "series"
203 self.status_path = "status"
203 self.status_path = "status"
204 self.guards_path = "guards"
204 self.guards_path = "guards"
205 self.active_guards = None
205 self.active_guards = None
206 self.guards_dirty = False
206 self.guards_dirty = False
207 self._diffopts = None
207 self._diffopts = None
208
208
209 @util.propertycache
209 @util.propertycache
210 def applied(self):
210 def applied(self):
211 if os.path.exists(self.join(self.status_path)):
211 if os.path.exists(self.join(self.status_path)):
212 lines = self.opener(self.status_path).read().splitlines()
212 lines = self.opener(self.status_path).read().splitlines()
213 return [statusentry(l) for l in lines]
213 return [statusentry(l) for l in lines]
214 return []
214 return []
215
215
216 @util.propertycache
216 @util.propertycache
217 def full_series(self):
217 def full_series(self):
218 if os.path.exists(self.join(self.series_path)):
218 if os.path.exists(self.join(self.series_path)):
219 return self.opener(self.series_path).read().splitlines()
219 return self.opener(self.series_path).read().splitlines()
220 return []
220 return []
221
221
222 @util.propertycache
222 @util.propertycache
223 def series(self):
223 def series(self):
224 self.parse_series()
224 self.parse_series()
225 return self.series
225 return self.series
226
226
227 @util.propertycache
227 @util.propertycache
228 def series_guards(self):
228 def series_guards(self):
229 self.parse_series()
229 self.parse_series()
230 return self.series_guards
230 return self.series_guards
231
231
232 def invalidate(self):
232 def invalidate(self):
233 for a in 'applied full_series series series_guards'.split():
233 for a in 'applied full_series series series_guards'.split():
234 if a in self.__dict__:
234 if a in self.__dict__:
235 delattr(self, a)
235 delattr(self, a)
236 self.applied_dirty = 0
236 self.applied_dirty = 0
237 self.series_dirty = 0
237 self.series_dirty = 0
238 self.guards_dirty = False
238 self.guards_dirty = False
239 self.active_guards = None
239 self.active_guards = None
240
240
241 def diffopts(self):
241 def diffopts(self):
242 if self._diffopts is None:
242 if self._diffopts is None:
243 self._diffopts = patch.diffopts(self.ui)
243 self._diffopts = patch.diffopts(self.ui)
244 return self._diffopts
244 return self._diffopts
245
245
246 def join(self, *p):
246 def join(self, *p):
247 return os.path.join(self.path, *p)
247 return os.path.join(self.path, *p)
248
248
249 def find_series(self, patch):
249 def find_series(self, patch):
250 pre = re.compile("(\s*)([^#]+)")
250 pre = re.compile("(\s*)([^#]+)")
251 index = 0
251 index = 0
252 for l in self.full_series:
252 for l in self.full_series:
253 m = pre.match(l)
253 m = pre.match(l)
254 if m:
254 if m:
255 s = m.group(2)
255 s = m.group(2)
256 s = s.rstrip()
256 s = s.rstrip()
257 if s == patch:
257 if s == patch:
258 return index
258 return index
259 index += 1
259 index += 1
260 return None
260 return None
261
261
262 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
262 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
263
263
264 def parse_series(self):
264 def parse_series(self):
265 self.series = []
265 self.series = []
266 self.series_guards = []
266 self.series_guards = []
267 for l in self.full_series:
267 for l in self.full_series:
268 h = l.find('#')
268 h = l.find('#')
269 if h == -1:
269 if h == -1:
270 patch = l
270 patch = l
271 comment = ''
271 comment = ''
272 elif h == 0:
272 elif h == 0:
273 continue
273 continue
274 else:
274 else:
275 patch = l[:h]
275 patch = l[:h]
276 comment = l[h:]
276 comment = l[h:]
277 patch = patch.strip()
277 patch = patch.strip()
278 if patch:
278 if patch:
279 if patch in self.series:
279 if patch in self.series:
280 raise util.Abort(_('%s appears more than once in %s') %
280 raise util.Abort(_('%s appears more than once in %s') %
281 (patch, self.join(self.series_path)))
281 (patch, self.join(self.series_path)))
282 self.series.append(patch)
282 self.series.append(patch)
283 self.series_guards.append(self.guard_re.findall(comment))
283 self.series_guards.append(self.guard_re.findall(comment))
284
284
285 def check_guard(self, guard):
285 def check_guard(self, guard):
286 if not guard:
286 if not guard:
287 return _('guard cannot be an empty string')
287 return _('guard cannot be an empty string')
288 bad_chars = '# \t\r\n\f'
288 bad_chars = '# \t\r\n\f'
289 first = guard[0]
289 first = guard[0]
290 if first in '-+':
290 if first in '-+':
291 return (_('guard %r starts with invalid character: %r') %
291 return (_('guard %r starts with invalid character: %r') %
292 (guard, first))
292 (guard, first))
293 for c in bad_chars:
293 for c in bad_chars:
294 if c in guard:
294 if c in guard:
295 return _('invalid character in guard %r: %r') % (guard, c)
295 return _('invalid character in guard %r: %r') % (guard, c)
296
296
297 def set_active(self, guards):
297 def set_active(self, guards):
298 for guard in guards:
298 for guard in guards:
299 bad = self.check_guard(guard)
299 bad = self.check_guard(guard)
300 if bad:
300 if bad:
301 raise util.Abort(bad)
301 raise util.Abort(bad)
302 guards = sorted(set(guards))
302 guards = sorted(set(guards))
303 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
303 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
304 self.active_guards = guards
304 self.active_guards = guards
305 self.guards_dirty = True
305 self.guards_dirty = True
306
306
307 def active(self):
307 def active(self):
308 if self.active_guards is None:
308 if self.active_guards is None:
309 self.active_guards = []
309 self.active_guards = []
310 try:
310 try:
311 guards = self.opener(self.guards_path).read().split()
311 guards = self.opener(self.guards_path).read().split()
312 except IOError, err:
312 except IOError, err:
313 if err.errno != errno.ENOENT: raise
313 if err.errno != errno.ENOENT: raise
314 guards = []
314 guards = []
315 for i, guard in enumerate(guards):
315 for i, guard in enumerate(guards):
316 bad = self.check_guard(guard)
316 bad = self.check_guard(guard)
317 if bad:
317 if bad:
318 self.ui.warn('%s:%d: %s\n' %
318 self.ui.warn('%s:%d: %s\n' %
319 (self.join(self.guards_path), i + 1, bad))
319 (self.join(self.guards_path), i + 1, bad))
320 else:
320 else:
321 self.active_guards.append(guard)
321 self.active_guards.append(guard)
322 return self.active_guards
322 return self.active_guards
323
323
324 def set_guards(self, idx, guards):
324 def set_guards(self, idx, guards):
325 for g in guards:
325 for g in guards:
326 if len(g) < 2:
326 if len(g) < 2:
327 raise util.Abort(_('guard %r too short') % g)
327 raise util.Abort(_('guard %r too short') % g)
328 if g[0] not in '-+':
328 if g[0] not in '-+':
329 raise util.Abort(_('guard %r starts with invalid char') % g)
329 raise util.Abort(_('guard %r starts with invalid char') % g)
330 bad = self.check_guard(g[1:])
330 bad = self.check_guard(g[1:])
331 if bad:
331 if bad:
332 raise util.Abort(bad)
332 raise util.Abort(bad)
333 drop = self.guard_re.sub('', self.full_series[idx])
333 drop = self.guard_re.sub('', self.full_series[idx])
334 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
334 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
335 self.parse_series()
335 self.parse_series()
336 self.series_dirty = True
336 self.series_dirty = True
337
337
338 def pushable(self, idx):
338 def pushable(self, idx):
339 if isinstance(idx, str):
339 if isinstance(idx, str):
340 idx = self.series.index(idx)
340 idx = self.series.index(idx)
341 patchguards = self.series_guards[idx]
341 patchguards = self.series_guards[idx]
342 if not patchguards:
342 if not patchguards:
343 return True, None
343 return True, None
344 guards = self.active()
344 guards = self.active()
345 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
345 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
346 if exactneg:
346 if exactneg:
347 return False, exactneg[0]
347 return False, exactneg[0]
348 pos = [g for g in patchguards if g[0] == '+']
348 pos = [g for g in patchguards if g[0] == '+']
349 exactpos = [g for g in pos if g[1:] in guards]
349 exactpos = [g for g in pos if g[1:] in guards]
350 if pos:
350 if pos:
351 if exactpos:
351 if exactpos:
352 return True, exactpos[0]
352 return True, exactpos[0]
353 return False, pos
353 return False, pos
354 return True, ''
354 return True, ''
355
355
356 def explain_pushable(self, idx, all_patches=False):
356 def explain_pushable(self, idx, all_patches=False):
357 write = all_patches and self.ui.write or self.ui.warn
357 write = all_patches and self.ui.write or self.ui.warn
358 if all_patches or self.ui.verbose:
358 if all_patches or self.ui.verbose:
359 if isinstance(idx, str):
359 if isinstance(idx, str):
360 idx = self.series.index(idx)
360 idx = self.series.index(idx)
361 pushable, why = self.pushable(idx)
361 pushable, why = self.pushable(idx)
362 if all_patches and pushable:
362 if all_patches and pushable:
363 if why is None:
363 if why is None:
364 write(_('allowing %s - no guards in effect\n') %
364 write(_('allowing %s - no guards in effect\n') %
365 self.series[idx])
365 self.series[idx])
366 else:
366 else:
367 if not why:
367 if not why:
368 write(_('allowing %s - no matching negative guards\n') %
368 write(_('allowing %s - no matching negative guards\n') %
369 self.series[idx])
369 self.series[idx])
370 else:
370 else:
371 write(_('allowing %s - guarded by %r\n') %
371 write(_('allowing %s - guarded by %r\n') %
372 (self.series[idx], why))
372 (self.series[idx], why))
373 if not pushable:
373 if not pushable:
374 if why:
374 if why:
375 write(_('skipping %s - guarded by %r\n') %
375 write(_('skipping %s - guarded by %r\n') %
376 (self.series[idx], why))
376 (self.series[idx], why))
377 else:
377 else:
378 write(_('skipping %s - no matching guards\n') %
378 write(_('skipping %s - no matching guards\n') %
379 self.series[idx])
379 self.series[idx])
380
380
381 def save_dirty(self):
381 def save_dirty(self):
382 def write_list(items, path):
382 def write_list(items, path):
383 fp = self.opener(path, 'w')
383 fp = self.opener(path, 'w')
384 for i in items:
384 for i in items:
385 fp.write("%s\n" % i)
385 fp.write("%s\n" % i)
386 fp.close()
386 fp.close()
387 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
387 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
388 if self.series_dirty: write_list(self.full_series, self.series_path)
388 if self.series_dirty: write_list(self.full_series, self.series_path)
389 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
389 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
390
390
391 def removeundo(self, repo):
391 def removeundo(self, repo):
392 undo = repo.sjoin('undo')
392 undo = repo.sjoin('undo')
393 if not os.path.exists(undo):
393 if not os.path.exists(undo):
394 return
394 return
395 try:
395 try:
396 os.unlink(undo)
396 os.unlink(undo)
397 except OSError, inst:
397 except OSError, inst:
398 self.ui.warn(_('error removing undo: %s\n') % str(inst))
398 self.ui.warn(_('error removing undo: %s\n') % str(inst))
399
399
400 def printdiff(self, repo, node1, node2=None, files=None,
400 def printdiff(self, repo, node1, node2=None, files=None,
401 fp=None, changes=None, opts={}):
401 fp=None, changes=None, opts={}):
402 m = cmdutil.match(repo, files, opts)
402 m = cmdutil.match(repo, files, opts)
403 chunks = patch.diff(repo, node1, node2, m, changes, self.diffopts())
403 chunks = patch.diff(repo, node1, node2, m, changes, self.diffopts())
404 write = fp is None and repo.ui.write or fp.write
404 write = fp is None and repo.ui.write or fp.write
405 for chunk in chunks:
405 for chunk in chunks:
406 write(chunk)
406 write(chunk)
407
407
408 def mergeone(self, repo, mergeq, head, patch, rev):
408 def mergeone(self, repo, mergeq, head, patch, rev):
409 # first try just applying the patch
409 # first try just applying the patch
410 (err, n) = self.apply(repo, [ patch ], update_status=False,
410 (err, n) = self.apply(repo, [ patch ], update_status=False,
411 strict=True, merge=rev)
411 strict=True, merge=rev)
412
412
413 if err == 0:
413 if err == 0:
414 return (err, n)
414 return (err, n)
415
415
416 if n is None:
416 if n is None:
417 raise util.Abort(_("apply failed for patch %s") % patch)
417 raise util.Abort(_("apply failed for patch %s") % patch)
418
418
419 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
419 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
420
420
421 # apply failed, strip away that rev and merge.
421 # apply failed, strip away that rev and merge.
422 hg.clean(repo, head)
422 hg.clean(repo, head)
423 self.strip(repo, n, update=False, backup='strip')
423 self.strip(repo, n, update=False, backup='strip')
424
424
425 ctx = repo[rev]
425 ctx = repo[rev]
426 ret = hg.merge(repo, rev)
426 ret = hg.merge(repo, rev)
427 if ret:
427 if ret:
428 raise util.Abort(_("update returned %d") % ret)
428 raise util.Abort(_("update returned %d") % ret)
429 n = repo.commit(ctx.description(), ctx.user(), force=True)
429 n = repo.commit(ctx.description(), ctx.user(), force=True)
430 if n is None:
430 if n is None:
431 raise util.Abort(_("repo commit failed"))
431 raise util.Abort(_("repo commit failed"))
432 try:
432 try:
433 ph = patchheader(mergeq.join(patch))
433 ph = patchheader(mergeq.join(patch))
434 except:
434 except:
435 raise util.Abort(_("unable to read %s") % patch)
435 raise util.Abort(_("unable to read %s") % patch)
436
436
437 patchf = self.opener(patch, "w")
437 patchf = self.opener(patch, "w")
438 comments = str(ph)
438 comments = str(ph)
439 if comments:
439 if comments:
440 patchf.write(comments)
440 patchf.write(comments)
441 self.printdiff(repo, head, n, fp=patchf)
441 self.printdiff(repo, head, n, fp=patchf)
442 patchf.close()
442 patchf.close()
443 self.removeundo(repo)
443 self.removeundo(repo)
444 return (0, n)
444 return (0, n)
445
445
446 def qparents(self, repo, rev=None):
446 def qparents(self, repo, rev=None):
447 if rev is None:
447 if rev is None:
448 (p1, p2) = repo.dirstate.parents()
448 (p1, p2) = repo.dirstate.parents()
449 if p2 == nullid:
449 if p2 == nullid:
450 return p1
450 return p1
451 if len(self.applied) == 0:
451 if len(self.applied) == 0:
452 return None
452 return None
453 return bin(self.applied[-1].rev)
453 return bin(self.applied[-1].rev)
454 pp = repo.changelog.parents(rev)
454 pp = repo.changelog.parents(rev)
455 if pp[1] != nullid:
455 if pp[1] != nullid:
456 arevs = [ x.rev for x in self.applied ]
456 arevs = [ x.rev for x in self.applied ]
457 p0 = hex(pp[0])
457 p0 = hex(pp[0])
458 p1 = hex(pp[1])
458 p1 = hex(pp[1])
459 if p0 in arevs:
459 if p0 in arevs:
460 return pp[0]
460 return pp[0]
461 if p1 in arevs:
461 if p1 in arevs:
462 return pp[1]
462 return pp[1]
463 return pp[0]
463 return pp[0]
464
464
465 def mergepatch(self, repo, mergeq, series):
465 def mergepatch(self, repo, mergeq, series):
466 if len(self.applied) == 0:
466 if len(self.applied) == 0:
467 # each of the patches merged in will have two parents. This
467 # each of the patches merged in will have two parents. This
468 # can confuse the qrefresh, qdiff, and strip code because it
468 # can confuse the qrefresh, qdiff, and strip code because it
469 # needs to know which parent is actually in the patch queue.
469 # needs to know which parent is actually in the patch queue.
470 # so, we insert a merge marker with only one parent. This way
470 # so, we insert a merge marker with only one parent. This way
471 # the first patch in the queue is never a merge patch
471 # the first patch in the queue is never a merge patch
472 #
472 #
473 pname = ".hg.patches.merge.marker"
473 pname = ".hg.patches.merge.marker"
474 n = repo.commit('[mq]: merge marker', force=True)
474 n = repo.commit('[mq]: merge marker', force=True)
475 self.removeundo(repo)
475 self.removeundo(repo)
476 self.applied.append(statusentry(hex(n), pname))
476 self.applied.append(statusentry(hex(n), pname))
477 self.applied_dirty = 1
477 self.applied_dirty = 1
478
478
479 head = self.qparents(repo)
479 head = self.qparents(repo)
480
480
481 for patch in series:
481 for patch in series:
482 patch = mergeq.lookup(patch, strict=True)
482 patch = mergeq.lookup(patch, strict=True)
483 if not patch:
483 if not patch:
484 self.ui.warn(_("patch %s does not exist\n") % patch)
484 self.ui.warn(_("patch %s does not exist\n") % patch)
485 return (1, None)
485 return (1, None)
486 pushable, reason = self.pushable(patch)
486 pushable, reason = self.pushable(patch)
487 if not pushable:
487 if not pushable:
488 self.explain_pushable(patch, all_patches=True)
488 self.explain_pushable(patch, all_patches=True)
489 continue
489 continue
490 info = mergeq.isapplied(patch)
490 info = mergeq.isapplied(patch)
491 if not info:
491 if not info:
492 self.ui.warn(_("patch %s is not applied\n") % patch)
492 self.ui.warn(_("patch %s is not applied\n") % patch)
493 return (1, None)
493 return (1, None)
494 rev = bin(info[1])
494 rev = bin(info[1])
495 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
495 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
496 if head:
496 if head:
497 self.applied.append(statusentry(hex(head), patch))
497 self.applied.append(statusentry(hex(head), patch))
498 self.applied_dirty = 1
498 self.applied_dirty = 1
499 if err:
499 if err:
500 return (err, head)
500 return (err, head)
501 self.save_dirty()
501 self.save_dirty()
502 return (0, head)
502 return (0, head)
503
503
504 def patch(self, repo, patchfile):
504 def patch(self, repo, patchfile):
505 '''Apply patchfile to the working directory.
505 '''Apply patchfile to the working directory.
506 patchfile: name of patch file'''
506 patchfile: name of patch file'''
507 files = {}
507 files = {}
508 try:
508 try:
509 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
509 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
510 files=files, eolmode=None)
510 files=files, eolmode=None)
511 except Exception, inst:
511 except Exception, inst:
512 self.ui.note(str(inst) + '\n')
512 self.ui.note(str(inst) + '\n')
513 if not self.ui.verbose:
513 if not self.ui.verbose:
514 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
514 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
515 return (False, files, False)
515 return (False, files, False)
516
516
517 return (True, files, fuzz)
517 return (True, files, fuzz)
518
518
519 def apply(self, repo, series, list=False, update_status=True,
519 def apply(self, repo, series, list=False, update_status=True,
520 strict=False, patchdir=None, merge=None, all_files={}):
520 strict=False, patchdir=None, merge=None, all_files={}):
521 wlock = lock = tr = None
521 wlock = lock = tr = None
522 try:
522 try:
523 wlock = repo.wlock()
523 wlock = repo.wlock()
524 lock = repo.lock()
524 lock = repo.lock()
525 tr = repo.transaction()
525 tr = repo.transaction()
526 try:
526 try:
527 ret = self._apply(repo, series, list, update_status,
527 ret = self._apply(repo, series, list, update_status,
528 strict, patchdir, merge, all_files=all_files)
528 strict, patchdir, merge, all_files=all_files)
529 tr.close()
529 tr.close()
530 self.save_dirty()
530 self.save_dirty()
531 return ret
531 return ret
532 except:
532 except:
533 try:
533 try:
534 tr.abort()
534 tr.abort()
535 finally:
535 finally:
536 repo.invalidate()
536 repo.invalidate()
537 repo.dirstate.invalidate()
537 repo.dirstate.invalidate()
538 raise
538 raise
539 finally:
539 finally:
540 del tr
540 del tr
541 release(lock, wlock)
541 release(lock, wlock)
542 self.removeundo(repo)
542 self.removeundo(repo)
543
543
544 def _apply(self, repo, series, list=False, update_status=True,
544 def _apply(self, repo, series, list=False, update_status=True,
545 strict=False, patchdir=None, merge=None, all_files={}):
545 strict=False, patchdir=None, merge=None, all_files={}):
546 '''returns (error, hash)
546 '''returns (error, hash)
547 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
547 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
548 # TODO unify with commands.py
548 # TODO unify with commands.py
549 if not patchdir:
549 if not patchdir:
550 patchdir = self.path
550 patchdir = self.path
551 err = 0
551 err = 0
552 n = None
552 n = None
553 for patchname in series:
553 for patchname in series:
554 pushable, reason = self.pushable(patchname)
554 pushable, reason = self.pushable(patchname)
555 if not pushable:
555 if not pushable:
556 self.explain_pushable(patchname, all_patches=True)
556 self.explain_pushable(patchname, all_patches=True)
557 continue
557 continue
558 self.ui.warn(_("applying %s\n") % patchname)
558 self.ui.warn(_("applying %s\n") % patchname)
559 pf = os.path.join(patchdir, patchname)
559 pf = os.path.join(patchdir, patchname)
560
560
561 try:
561 try:
562 ph = patchheader(self.join(patchname))
562 ph = patchheader(self.join(patchname))
563 except:
563 except:
564 self.ui.warn(_("unable to read %s\n") % patchname)
564 self.ui.warn(_("unable to read %s\n") % patchname)
565 err = 1
565 err = 1
566 break
566 break
567
567
568 message = ph.message
568 message = ph.message
569 if not message:
569 if not message:
570 message = _("imported patch %s\n") % patchname
570 message = _("imported patch %s\n") % patchname
571 else:
571 else:
572 if list:
572 if list:
573 message.append(_("\nimported patch %s") % patchname)
573 message.append(_("\nimported patch %s") % patchname)
574 message = '\n'.join(message)
574 message = '\n'.join(message)
575
575
576 if ph.haspatch:
576 if ph.haspatch:
577 (patcherr, files, fuzz) = self.patch(repo, pf)
577 (patcherr, files, fuzz) = self.patch(repo, pf)
578 all_files.update(files)
578 all_files.update(files)
579 patcherr = not patcherr
579 patcherr = not patcherr
580 else:
580 else:
581 self.ui.warn(_("patch %s is empty\n") % patchname)
581 self.ui.warn(_("patch %s is empty\n") % patchname)
582 patcherr, files, fuzz = 0, [], 0
582 patcherr, files, fuzz = 0, [], 0
583
583
584 if merge and files:
584 if merge and files:
585 # Mark as removed/merged and update dirstate parent info
585 # Mark as removed/merged and update dirstate parent info
586 removed = []
586 removed = []
587 merged = []
587 merged = []
588 for f in files:
588 for f in files:
589 if os.path.exists(repo.wjoin(f)):
589 if os.path.exists(repo.wjoin(f)):
590 merged.append(f)
590 merged.append(f)
591 else:
591 else:
592 removed.append(f)
592 removed.append(f)
593 for f in removed:
593 for f in removed:
594 repo.dirstate.remove(f)
594 repo.dirstate.remove(f)
595 for f in merged:
595 for f in merged:
596 repo.dirstate.merge(f)
596 repo.dirstate.merge(f)
597 p1, p2 = repo.dirstate.parents()
597 p1, p2 = repo.dirstate.parents()
598 repo.dirstate.setparents(p1, merge)
598 repo.dirstate.setparents(p1, merge)
599
599
600 files = patch.updatedir(self.ui, repo, files)
600 files = patch.updatedir(self.ui, repo, files)
601 match = cmdutil.matchfiles(repo, files or [])
601 match = cmdutil.matchfiles(repo, files or [])
602 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
602 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
603
603
604 if n is None:
604 if n is None:
605 raise util.Abort(_("repo commit failed"))
605 raise util.Abort(_("repo commit failed"))
606
606
607 if update_status:
607 if update_status:
608 self.applied.append(statusentry(hex(n), patchname))
608 self.applied.append(statusentry(hex(n), patchname))
609
609
610 if patcherr:
610 if patcherr:
611 self.ui.warn(_("patch failed, rejects left in working dir\n"))
611 self.ui.warn(_("patch failed, rejects left in working dir\n"))
612 err = 2
612 err = 2
613 break
613 break
614
614
615 if fuzz and strict:
615 if fuzz and strict:
616 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
616 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
617 err = 3
617 err = 3
618 break
618 break
619 return (err, n)
619 return (err, n)
620
620
621 def _cleanup(self, patches, numrevs, keep=False):
621 def _cleanup(self, patches, numrevs, keep=False):
622 if not keep:
622 if not keep:
623 r = self.qrepo()
623 r = self.qrepo()
624 if r:
624 if r:
625 r.remove(patches, True)
625 r.remove(patches, True)
626 else:
626 else:
627 for p in patches:
627 for p in patches:
628 os.unlink(self.join(p))
628 os.unlink(self.join(p))
629
629
630 if numrevs:
630 if numrevs:
631 del self.applied[:numrevs]
631 del self.applied[:numrevs]
632 self.applied_dirty = 1
632 self.applied_dirty = 1
633
633
634 for i in sorted([self.find_series(p) for p in patches], reverse=True):
634 for i in sorted([self.find_series(p) for p in patches], reverse=True):
635 del self.full_series[i]
635 del self.full_series[i]
636 self.parse_series()
636 self.parse_series()
637 self.series_dirty = 1
637 self.series_dirty = 1
638
638
639 def _revpatches(self, repo, revs):
639 def _revpatches(self, repo, revs):
640 firstrev = repo[self.applied[0].rev].rev()
640 firstrev = repo[self.applied[0].rev].rev()
641 patches = []
641 patches = []
642 for i, rev in enumerate(revs):
642 for i, rev in enumerate(revs):
643
643
644 if rev < firstrev:
644 if rev < firstrev:
645 raise util.Abort(_('revision %d is not managed') % rev)
645 raise util.Abort(_('revision %d is not managed') % rev)
646
646
647 ctx = repo[rev]
647 ctx = repo[rev]
648 base = bin(self.applied[i].rev)
648 base = bin(self.applied[i].rev)
649 if ctx.node() != base:
649 if ctx.node() != base:
650 msg = _('cannot delete revision %d above applied patches')
650 msg = _('cannot delete revision %d above applied patches')
651 raise util.Abort(msg % rev)
651 raise util.Abort(msg % rev)
652
652
653 patch = self.applied[i].name
653 patch = self.applied[i].name
654 for fmt in ('[mq]: %s', 'imported patch %s'):
654 for fmt in ('[mq]: %s', 'imported patch %s'):
655 if ctx.description() == fmt % patch:
655 if ctx.description() == fmt % patch:
656 msg = _('patch %s finalized without changeset message\n')
656 msg = _('patch %s finalized without changeset message\n')
657 repo.ui.status(msg % patch)
657 repo.ui.status(msg % patch)
658 break
658 break
659
659
660 patches.append(patch)
660 patches.append(patch)
661 return patches
661 return patches
662
662
663 def finish(self, repo, revs):
663 def finish(self, repo, revs):
664 patches = self._revpatches(repo, sorted(revs))
664 patches = self._revpatches(repo, sorted(revs))
665 self._cleanup(patches, len(patches))
665 self._cleanup(patches, len(patches))
666
666
667 def delete(self, repo, patches, opts):
667 def delete(self, repo, patches, opts):
668 if not patches and not opts.get('rev'):
668 if not patches and not opts.get('rev'):
669 raise util.Abort(_('qdelete requires at least one revision or '
669 raise util.Abort(_('qdelete requires at least one revision or '
670 'patch name'))
670 'patch name'))
671
671
672 realpatches = []
672 realpatches = []
673 for patch in patches:
673 for patch in patches:
674 patch = self.lookup(patch, strict=True)
674 patch = self.lookup(patch, strict=True)
675 info = self.isapplied(patch)
675 info = self.isapplied(patch)
676 if info:
676 if info:
677 raise util.Abort(_("cannot delete applied patch %s") % patch)
677 raise util.Abort(_("cannot delete applied patch %s") % patch)
678 if patch not in self.series:
678 if patch not in self.series:
679 raise util.Abort(_("patch %s not in series file") % patch)
679 raise util.Abort(_("patch %s not in series file") % patch)
680 realpatches.append(patch)
680 realpatches.append(patch)
681
681
682 numrevs = 0
682 numrevs = 0
683 if opts.get('rev'):
683 if opts.get('rev'):
684 if not self.applied:
684 if not self.applied:
685 raise util.Abort(_('no patches applied'))
685 raise util.Abort(_('no patches applied'))
686 revs = cmdutil.revrange(repo, opts['rev'])
686 revs = cmdutil.revrange(repo, opts['rev'])
687 if len(revs) > 1 and revs[0] > revs[1]:
687 if len(revs) > 1 and revs[0] > revs[1]:
688 revs.reverse()
688 revs.reverse()
689 revpatches = self._revpatches(repo, revs)
689 revpatches = self._revpatches(repo, revs)
690 realpatches += revpatches
690 realpatches += revpatches
691 numrevs = len(revpatches)
691 numrevs = len(revpatches)
692
692
693 self._cleanup(realpatches, numrevs, opts.get('keep'))
693 self._cleanup(realpatches, numrevs, opts.get('keep'))
694
694
695 def check_toppatch(self, repo):
695 def check_toppatch(self, repo):
696 if len(self.applied) > 0:
696 if len(self.applied) > 0:
697 top = bin(self.applied[-1].rev)
697 top = bin(self.applied[-1].rev)
698 pp = repo.dirstate.parents()
698 pp = repo.dirstate.parents()
699 if top not in pp:
699 if top not in pp:
700 raise util.Abort(_("working directory revision is not qtip"))
700 raise util.Abort(_("working directory revision is not qtip"))
701 return top
701 return top
702 return None
702 return None
703 def check_localchanges(self, repo, force=False, refresh=True):
703 def check_localchanges(self, repo, force=False, refresh=True):
704 m, a, r, d = repo.status()[:4]
704 m, a, r, d = repo.status()[:4]
705 if m or a or r or d:
705 if m or a or r or d:
706 if not force:
706 if not force:
707 if refresh:
707 if refresh:
708 raise util.Abort(_("local changes found, refresh first"))
708 raise util.Abort(_("local changes found, refresh first"))
709 else:
709 else:
710 raise util.Abort(_("local changes found"))
710 raise util.Abort(_("local changes found"))
711 return m, a, r, d
711 return m, a, r, d
712
712
713 _reserved = ('series', 'status', 'guards')
713 _reserved = ('series', 'status', 'guards')
714 def check_reserved_name(self, name):
714 def check_reserved_name(self, name):
715 if (name in self._reserved or name.startswith('.hg')
715 if (name in self._reserved or name.startswith('.hg')
716 or name.startswith('.mq')):
716 or name.startswith('.mq')):
717 raise util.Abort(_('"%s" cannot be used as the name of a patch')
717 raise util.Abort(_('"%s" cannot be used as the name of a patch')
718 % name)
718 % name)
719
719
720 def new(self, repo, patchfn, *pats, **opts):
720 def new(self, repo, patchfn, *pats, **opts):
721 """options:
721 """options:
722 msg: a string or a no-argument function returning a string
722 msg: a string or a no-argument function returning a string
723 """
723 """
724 msg = opts.get('msg')
724 msg = opts.get('msg')
725 force = opts.get('force')
725 force = opts.get('force')
726 user = opts.get('user')
726 user = opts.get('user')
727 date = opts.get('date')
727 date = opts.get('date')
728 if date:
728 if date:
729 date = util.parsedate(date)
729 date = util.parsedate(date)
730 self.check_reserved_name(patchfn)
730 self.check_reserved_name(patchfn)
731 if os.path.exists(self.join(patchfn)):
731 if os.path.exists(self.join(patchfn)):
732 raise util.Abort(_('patch "%s" already exists') % patchfn)
732 raise util.Abort(_('patch "%s" already exists') % patchfn)
733 if opts.get('include') or opts.get('exclude') or pats:
733 if opts.get('include') or opts.get('exclude') or pats:
734 match = cmdutil.match(repo, pats, opts)
734 match = cmdutil.match(repo, pats, opts)
735 # detect missing files in pats
735 # detect missing files in pats
736 def badfn(f, msg):
736 def badfn(f, msg):
737 raise util.Abort('%s: %s' % (f, msg))
737 raise util.Abort('%s: %s' % (f, msg))
738 match.bad = badfn
738 match.bad = badfn
739 m, a, r, d = repo.status(match=match)[:4]
739 m, a, r, d = repo.status(match=match)[:4]
740 else:
740 else:
741 m, a, r, d = self.check_localchanges(repo, force)
741 m, a, r, d = self.check_localchanges(repo, force)
742 match = cmdutil.matchfiles(repo, m + a + r)
742 match = cmdutil.matchfiles(repo, m + a + r)
743 commitfiles = m + a + r
743 commitfiles = m + a + r
744 self.check_toppatch(repo)
744 self.check_toppatch(repo)
745 insert = self.full_series_end()
745 insert = self.full_series_end()
746 wlock = repo.wlock()
746 wlock = repo.wlock()
747 try:
747 try:
748 # if patch file write fails, abort early
748 # if patch file write fails, abort early
749 p = self.opener(patchfn, "w")
749 p = self.opener(patchfn, "w")
750 try:
750 try:
751 if date:
751 if date:
752 p.write("# HG changeset patch\n")
752 p.write("# HG changeset patch\n")
753 if user:
753 if user:
754 p.write("# User " + user + "\n")
754 p.write("# User " + user + "\n")
755 p.write("# Date %d %d\n\n" % date)
755 p.write("# Date %d %d\n\n" % date)
756 elif user:
756 elif user:
757 p.write("From: " + user + "\n\n")
757 p.write("From: " + user + "\n\n")
758
758
759 if hasattr(msg, '__call__'):
759 if hasattr(msg, '__call__'):
760 msg = msg()
760 msg = msg()
761 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
761 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
762 n = repo.commit(commitmsg, user, date, match=match, force=True)
762 n = repo.commit(commitmsg, user, date, match=match, force=True)
763 if n is None:
763 if n is None:
764 raise util.Abort(_("repo commit failed"))
764 raise util.Abort(_("repo commit failed"))
765 try:
765 try:
766 self.full_series[insert:insert] = [patchfn]
766 self.full_series[insert:insert] = [patchfn]
767 self.applied.append(statusentry(hex(n), patchfn))
767 self.applied.append(statusentry(hex(n), patchfn))
768 self.parse_series()
768 self.parse_series()
769 self.series_dirty = 1
769 self.series_dirty = 1
770 self.applied_dirty = 1
770 self.applied_dirty = 1
771 if msg:
771 if msg:
772 msg = msg + "\n\n"
772 msg = msg + "\n\n"
773 p.write(msg)
773 p.write(msg)
774 if commitfiles:
774 if commitfiles:
775 diffopts = self.diffopts()
775 diffopts = self.diffopts()
776 if opts.get('git'): diffopts.git = True
776 if opts.get('git'): diffopts.git = True
777 parent = self.qparents(repo, n)
777 parent = self.qparents(repo, n)
778 chunks = patch.diff(repo, node1=parent, node2=n,
778 chunks = patch.diff(repo, node1=parent, node2=n,
779 match=match, opts=diffopts)
779 match=match, opts=diffopts)
780 for chunk in chunks:
780 for chunk in chunks:
781 p.write(chunk)
781 p.write(chunk)
782 p.close()
782 p.close()
783 wlock.release()
783 wlock.release()
784 wlock = None
784 wlock = None
785 r = self.qrepo()
785 r = self.qrepo()
786 if r: r.add([patchfn])
786 if r: r.add([patchfn])
787 except:
787 except:
788 repo.rollback()
788 repo.rollback()
789 raise
789 raise
790 except Exception:
790 except Exception:
791 patchpath = self.join(patchfn)
791 patchpath = self.join(patchfn)
792 try:
792 try:
793 os.unlink(patchpath)
793 os.unlink(patchpath)
794 except:
794 except:
795 self.ui.warn(_('error unlinking %s\n') % patchpath)
795 self.ui.warn(_('error unlinking %s\n') % patchpath)
796 raise
796 raise
797 self.removeundo(repo)
797 self.removeundo(repo)
798 finally:
798 finally:
799 release(wlock)
799 release(wlock)
800
800
801 def strip(self, repo, rev, update=True, backup="all", force=None):
801 def strip(self, repo, rev, update=True, backup="all", force=None):
802 wlock = lock = None
802 wlock = lock = None
803 try:
803 try:
804 wlock = repo.wlock()
804 wlock = repo.wlock()
805 lock = repo.lock()
805 lock = repo.lock()
806
806
807 if update:
807 if update:
808 self.check_localchanges(repo, force=force, refresh=False)
808 self.check_localchanges(repo, force=force, refresh=False)
809 urev = self.qparents(repo, rev)
809 urev = self.qparents(repo, rev)
810 hg.clean(repo, urev)
810 hg.clean(repo, urev)
811 repo.dirstate.write()
811 repo.dirstate.write()
812
812
813 self.removeundo(repo)
813 self.removeundo(repo)
814 repair.strip(self.ui, repo, rev, backup)
814 repair.strip(self.ui, repo, rev, backup)
815 # strip may have unbundled a set of backed up revisions after
815 # strip may have unbundled a set of backed up revisions after
816 # the actual strip
816 # the actual strip
817 self.removeundo(repo)
817 self.removeundo(repo)
818 finally:
818 finally:
819 release(lock, wlock)
819 release(lock, wlock)
820
820
821 def isapplied(self, patch):
821 def isapplied(self, patch):
822 """returns (index, rev, patch)"""
822 """returns (index, rev, patch)"""
823 for i, a in enumerate(self.applied):
823 for i, a in enumerate(self.applied):
824 if a.name == patch:
824 if a.name == patch:
825 return (i, a.rev, a.name)
825 return (i, a.rev, a.name)
826 return None
826 return None
827
827
828 # if the exact patch name does not exist, we try a few
828 # if the exact patch name does not exist, we try a few
829 # variations. If strict is passed, we try only #1
829 # variations. If strict is passed, we try only #1
830 #
830 #
831 # 1) a number to indicate an offset in the series file
831 # 1) a number to indicate an offset in the series file
832 # 2) a unique substring of the patch name was given
832 # 2) a unique substring of the patch name was given
833 # 3) patchname[-+]num to indicate an offset in the series file
833 # 3) patchname[-+]num to indicate an offset in the series file
834 def lookup(self, patch, strict=False):
834 def lookup(self, patch, strict=False):
835 patch = patch and str(patch)
835 patch = patch and str(patch)
836
836
837 def partial_name(s):
837 def partial_name(s):
838 if s in self.series:
838 if s in self.series:
839 return s
839 return s
840 matches = [x for x in self.series if s in x]
840 matches = [x for x in self.series if s in x]
841 if len(matches) > 1:
841 if len(matches) > 1:
842 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
842 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
843 for m in matches:
843 for m in matches:
844 self.ui.warn(' %s\n' % m)
844 self.ui.warn(' %s\n' % m)
845 return None
845 return None
846 if matches:
846 if matches:
847 return matches[0]
847 return matches[0]
848 if len(self.series) > 0 and len(self.applied) > 0:
848 if len(self.series) > 0 and len(self.applied) > 0:
849 if s == 'qtip':
849 if s == 'qtip':
850 return self.series[self.series_end(True)-1]
850 return self.series[self.series_end(True)-1]
851 if s == 'qbase':
851 if s == 'qbase':
852 return self.series[0]
852 return self.series[0]
853 return None
853 return None
854
854
855 if patch is None:
855 if patch is None:
856 return None
856 return None
857 if patch in self.series:
857 if patch in self.series:
858 return patch
858 return patch
859
859
860 if not os.path.isfile(self.join(patch)):
860 if not os.path.isfile(self.join(patch)):
861 try:
861 try:
862 sno = int(patch)
862 sno = int(patch)
863 except(ValueError, OverflowError):
863 except(ValueError, OverflowError):
864 pass
864 pass
865 else:
865 else:
866 if -len(self.series) <= sno < len(self.series):
866 if -len(self.series) <= sno < len(self.series):
867 return self.series[sno]
867 return self.series[sno]
868
868
869 if not strict:
869 if not strict:
870 res = partial_name(patch)
870 res = partial_name(patch)
871 if res:
871 if res:
872 return res
872 return res
873 minus = patch.rfind('-')
873 minus = patch.rfind('-')
874 if minus >= 0:
874 if minus >= 0:
875 res = partial_name(patch[:minus])
875 res = partial_name(patch[:minus])
876 if res:
876 if res:
877 i = self.series.index(res)
877 i = self.series.index(res)
878 try:
878 try:
879 off = int(patch[minus+1:] or 1)
879 off = int(patch[minus+1:] or 1)
880 except(ValueError, OverflowError):
880 except(ValueError, OverflowError):
881 pass
881 pass
882 else:
882 else:
883 if i - off >= 0:
883 if i - off >= 0:
884 return self.series[i - off]
884 return self.series[i - off]
885 plus = patch.rfind('+')
885 plus = patch.rfind('+')
886 if plus >= 0:
886 if plus >= 0:
887 res = partial_name(patch[:plus])
887 res = partial_name(patch[:plus])
888 if res:
888 if res:
889 i = self.series.index(res)
889 i = self.series.index(res)
890 try:
890 try:
891 off = int(patch[plus+1:] or 1)
891 off = int(patch[plus+1:] or 1)
892 except(ValueError, OverflowError):
892 except(ValueError, OverflowError):
893 pass
893 pass
894 else:
894 else:
895 if i + off < len(self.series):
895 if i + off < len(self.series):
896 return self.series[i + off]
896 return self.series[i + off]
897 raise util.Abort(_("patch %s not in series") % patch)
897 raise util.Abort(_("patch %s not in series") % patch)
898
898
899 def push(self, repo, patch=None, force=False, list=False,
899 def push(self, repo, patch=None, force=False, list=False,
900 mergeq=None, all=False):
900 mergeq=None, all=False):
901 wlock = repo.wlock()
901 wlock = repo.wlock()
902 try:
902 try:
903 if repo.dirstate.parents()[0] not in repo.heads():
903 if repo.dirstate.parents()[0] not in repo.heads():
904 self.ui.status(_("(working directory not at a head)\n"))
904 self.ui.status(_("(working directory not at a head)\n"))
905
905
906 if not self.series:
906 if not self.series:
907 self.ui.warn(_('no patches in series\n'))
907 self.ui.warn(_('no patches in series\n'))
908 return 0
908 return 0
909
909
910 patch = self.lookup(patch)
910 patch = self.lookup(patch)
911 # Suppose our series file is: A B C and the current 'top'
911 # Suppose our series file is: A B C and the current 'top'
912 # patch is B. qpush C should be performed (moving forward)
912 # patch is B. qpush C should be performed (moving forward)
913 # qpush B is a NOP (no change) qpush A is an error (can't
913 # qpush B is a NOP (no change) qpush A is an error (can't
914 # go backwards with qpush)
914 # go backwards with qpush)
915 if patch:
915 if patch:
916 info = self.isapplied(patch)
916 info = self.isapplied(patch)
917 if info:
917 if info:
918 if info[0] < len(self.applied) - 1:
918 if info[0] < len(self.applied) - 1:
919 raise util.Abort(
919 raise util.Abort(
920 _("cannot push to a previous patch: %s") % patch)
920 _("cannot push to a previous patch: %s") % patch)
921 self.ui.warn(
921 self.ui.warn(
922 _('qpush: %s is already at the top\n') % patch)
922 _('qpush: %s is already at the top\n') % patch)
923 return
923 return
924 pushable, reason = self.pushable(patch)
924 pushable, reason = self.pushable(patch)
925 if not pushable:
925 if not pushable:
926 if reason:
926 if reason:
927 reason = _('guarded by %r') % reason
927 reason = _('guarded by %r') % reason
928 else:
928 else:
929 reason = _('no matching guards')
929 reason = _('no matching guards')
930 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
930 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
931 return 1
931 return 1
932 elif all:
932 elif all:
933 patch = self.series[-1]
933 patch = self.series[-1]
934 if self.isapplied(patch):
934 if self.isapplied(patch):
935 self.ui.warn(_('all patches are currently applied\n'))
935 self.ui.warn(_('all patches are currently applied\n'))
936 return 0
936 return 0
937
937
938 # Following the above example, starting at 'top' of B:
938 # Following the above example, starting at 'top' of B:
939 # qpush should be performed (pushes C), but a subsequent
939 # qpush should be performed (pushes C), but a subsequent
940 # qpush without an argument is an error (nothing to
940 # qpush without an argument is an error (nothing to
941 # apply). This allows a loop of "...while hg qpush..." to
941 # apply). This allows a loop of "...while hg qpush..." to
942 # work as it detects an error when done
942 # work as it detects an error when done
943 start = self.series_end()
943 start = self.series_end()
944 if start == len(self.series):
944 if start == len(self.series):
945 self.ui.warn(_('patch series already fully applied\n'))
945 self.ui.warn(_('patch series already fully applied\n'))
946 return 1
946 return 1
947 if not force:
947 if not force:
948 self.check_localchanges(repo)
948 self.check_localchanges(repo)
949
949
950 self.applied_dirty = 1
950 self.applied_dirty = 1
951 if start > 0:
951 if start > 0:
952 self.check_toppatch(repo)
952 self.check_toppatch(repo)
953 if not patch:
953 if not patch:
954 patch = self.series[start]
954 patch = self.series[start]
955 end = start + 1
955 end = start + 1
956 else:
956 else:
957 end = self.series.index(patch, start) + 1
957 end = self.series.index(patch, start) + 1
958
958
959 s = self.series[start:end]
959 s = self.series[start:end]
960 all_files = {}
960 all_files = {}
961 try:
961 try:
962 if mergeq:
962 if mergeq:
963 ret = self.mergepatch(repo, mergeq, s)
963 ret = self.mergepatch(repo, mergeq, s)
964 else:
964 else:
965 ret = self.apply(repo, s, list, all_files=all_files)
965 ret = self.apply(repo, s, list, all_files=all_files)
966 except:
966 except:
967 self.ui.warn(_('cleaning up working directory...'))
967 self.ui.warn(_('cleaning up working directory...'))
968 node = repo.dirstate.parents()[0]
968 node = repo.dirstate.parents()[0]
969 hg.revert(repo, node, None)
969 hg.revert(repo, node, None)
970 unknown = repo.status(unknown=True)[4]
970 unknown = repo.status(unknown=True)[4]
971 # only remove unknown files that we know we touched or
971 # only remove unknown files that we know we touched or
972 # created while patching
972 # created while patching
973 for f in unknown:
973 for f in unknown:
974 if f in all_files:
974 if f in all_files:
975 util.unlink(repo.wjoin(f))
975 util.unlink(repo.wjoin(f))
976 self.ui.warn(_('done\n'))
976 self.ui.warn(_('done\n'))
977 raise
977 raise
978
978
979 top = self.applied[-1].name
979 top = self.applied[-1].name
980 if ret[0] and ret[0] > 1:
980 if ret[0] and ret[0] > 1:
981 msg = _("errors during apply, please fix and refresh %s\n")
981 msg = _("errors during apply, please fix and refresh %s\n")
982 self.ui.write(msg % top)
982 self.ui.write(msg % top)
983 else:
983 else:
984 self.ui.write(_("now at: %s\n") % top)
984 self.ui.write(_("now at: %s\n") % top)
985 return ret[0]
985 return ret[0]
986
986
987 finally:
987 finally:
988 wlock.release()
988 wlock.release()
989
989
990 def pop(self, repo, patch=None, force=False, update=True, all=False):
990 def pop(self, repo, patch=None, force=False, update=True, all=False):
991 def getfile(f, rev, flags):
991 def getfile(f, rev, flags):
992 t = repo.file(f).read(rev)
992 t = repo.file(f).read(rev)
993 repo.wwrite(f, t, flags)
993 repo.wwrite(f, t, flags)
994
994
995 wlock = repo.wlock()
995 wlock = repo.wlock()
996 try:
996 try:
997 if patch:
997 if patch:
998 # index, rev, patch
998 # index, rev, patch
999 info = self.isapplied(patch)
999 info = self.isapplied(patch)
1000 if not info:
1000 if not info:
1001 patch = self.lookup(patch)
1001 patch = self.lookup(patch)
1002 info = self.isapplied(patch)
1002 info = self.isapplied(patch)
1003 if not info:
1003 if not info:
1004 raise util.Abort(_("patch %s is not applied") % patch)
1004 raise util.Abort(_("patch %s is not applied") % patch)
1005
1005
1006 if len(self.applied) == 0:
1006 if len(self.applied) == 0:
1007 # Allow qpop -a to work repeatedly,
1007 # Allow qpop -a to work repeatedly,
1008 # but not qpop without an argument
1008 # but not qpop without an argument
1009 self.ui.warn(_("no patches applied\n"))
1009 self.ui.warn(_("no patches applied\n"))
1010 return not all
1010 return not all
1011
1011
1012 if all:
1012 if all:
1013 start = 0
1013 start = 0
1014 elif patch:
1014 elif patch:
1015 start = info[0] + 1
1015 start = info[0] + 1
1016 else:
1016 else:
1017 start = len(self.applied) - 1
1017 start = len(self.applied) - 1
1018
1018
1019 if start >= len(self.applied):
1019 if start >= len(self.applied):
1020 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1020 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1021 return
1021 return
1022
1022
1023 if not update:
1023 if not update:
1024 parents = repo.dirstate.parents()
1024 parents = repo.dirstate.parents()
1025 rr = [ bin(x.rev) for x in self.applied ]
1025 rr = [ bin(x.rev) for x in self.applied ]
1026 for p in parents:
1026 for p in parents:
1027 if p in rr:
1027 if p in rr:
1028 self.ui.warn(_("qpop: forcing dirstate update\n"))
1028 self.ui.warn(_("qpop: forcing dirstate update\n"))
1029 update = True
1029 update = True
1030 else:
1030 else:
1031 parents = [p.hex() for p in repo[None].parents()]
1031 parents = [p.hex() for p in repo[None].parents()]
1032 needupdate = False
1032 needupdate = False
1033 for entry in self.applied[start:]:
1033 for entry in self.applied[start:]:
1034 if entry.rev in parents:
1034 if entry.rev in parents:
1035 needupdate = True
1035 needupdate = True
1036 break
1036 break
1037 update = needupdate
1037 update = needupdate
1038
1038
1039 if not force and update:
1039 if not force and update:
1040 self.check_localchanges(repo)
1040 self.check_localchanges(repo)
1041
1041
1042 self.applied_dirty = 1
1042 self.applied_dirty = 1
1043 end = len(self.applied)
1043 end = len(self.applied)
1044 rev = bin(self.applied[start].rev)
1044 rev = bin(self.applied[start].rev)
1045 if update:
1045 if update:
1046 top = self.check_toppatch(repo)
1046 top = self.check_toppatch(repo)
1047
1047
1048 try:
1048 try:
1049 heads = repo.changelog.heads(rev)
1049 heads = repo.changelog.heads(rev)
1050 except error.LookupError:
1050 except error.LookupError:
1051 node = short(rev)
1051 node = short(rev)
1052 raise util.Abort(_('trying to pop unknown node %s') % node)
1052 raise util.Abort(_('trying to pop unknown node %s') % node)
1053
1053
1054 if heads != [bin(self.applied[-1].rev)]:
1054 if heads != [bin(self.applied[-1].rev)]:
1055 raise util.Abort(_("popping would remove a revision not "
1055 raise util.Abort(_("popping would remove a revision not "
1056 "managed by this patch queue"))
1056 "managed by this patch queue"))
1057
1057
1058 # we know there are no local changes, so we can make a simplified
1058 # we know there are no local changes, so we can make a simplified
1059 # form of hg.update.
1059 # form of hg.update.
1060 if update:
1060 if update:
1061 qp = self.qparents(repo, rev)
1061 qp = self.qparents(repo, rev)
1062 changes = repo.changelog.read(qp)
1062 changes = repo.changelog.read(qp)
1063 mmap = repo.manifest.read(changes[0])
1063 mmap = repo.manifest.read(changes[0])
1064 m, a, r, d = repo.status(qp, top)[:4]
1064 m, a, r, d = repo.status(qp, top)[:4]
1065 if d:
1065 if d:
1066 raise util.Abort(_("deletions found between repo revs"))
1066 raise util.Abort(_("deletions found between repo revs"))
1067 for f in m:
1067 for f in m:
1068 getfile(f, mmap[f], mmap.flags(f))
1068 getfile(f, mmap[f], mmap.flags(f))
1069 for f in r:
1069 for f in r:
1070 getfile(f, mmap[f], mmap.flags(f))
1070 getfile(f, mmap[f], mmap.flags(f))
1071 for f in m + r:
1071 for f in m + r:
1072 repo.dirstate.normal(f)
1072 repo.dirstate.normal(f)
1073 for f in a:
1073 for f in a:
1074 try:
1074 try:
1075 os.unlink(repo.wjoin(f))
1075 os.unlink(repo.wjoin(f))
1076 except OSError, e:
1076 except OSError, e:
1077 if e.errno != errno.ENOENT:
1077 if e.errno != errno.ENOENT:
1078 raise
1078 raise
1079 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
1079 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
1080 except: pass
1080 except: pass
1081 repo.dirstate.forget(f)
1081 repo.dirstate.forget(f)
1082 repo.dirstate.setparents(qp, nullid)
1082 repo.dirstate.setparents(qp, nullid)
1083 del self.applied[start:end]
1083 del self.applied[start:end]
1084 self.strip(repo, rev, update=False, backup='strip')
1084 self.strip(repo, rev, update=False, backup='strip')
1085 if len(self.applied):
1085 if len(self.applied):
1086 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1086 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1087 else:
1087 else:
1088 self.ui.write(_("patch queue now empty\n"))
1088 self.ui.write(_("patch queue now empty\n"))
1089 finally:
1089 finally:
1090 wlock.release()
1090 wlock.release()
1091
1091
1092 def diff(self, repo, pats, opts):
1092 def diff(self, repo, pats, opts):
1093 top = self.check_toppatch(repo)
1093 top = self.check_toppatch(repo)
1094 if not top:
1094 if not top:
1095 self.ui.write(_("no patches applied\n"))
1095 self.ui.write(_("no patches applied\n"))
1096 return
1096 return
1097 qp = self.qparents(repo, top)
1097 qp = self.qparents(repo, top)
1098 self._diffopts = patch.diffopts(self.ui, opts)
1098 self._diffopts = patch.diffopts(self.ui, opts)
1099 self.printdiff(repo, qp, files=pats, opts=opts)
1099 self.printdiff(repo, qp, files=pats, opts=opts)
1100
1100
1101 def refresh(self, repo, pats=None, **opts):
1101 def refresh(self, repo, pats=None, **opts):
1102 if len(self.applied) == 0:
1102 if len(self.applied) == 0:
1103 self.ui.write(_("no patches applied\n"))
1103 self.ui.write(_("no patches applied\n"))
1104 return 1
1104 return 1
1105 msg = opts.get('msg', '').rstrip()
1105 msg = opts.get('msg', '').rstrip()
1106 newuser = opts.get('user')
1106 newuser = opts.get('user')
1107 newdate = opts.get('date')
1107 newdate = opts.get('date')
1108 if newdate:
1108 if newdate:
1109 newdate = '%d %d' % util.parsedate(newdate)
1109 newdate = '%d %d' % util.parsedate(newdate)
1110 wlock = repo.wlock()
1110 wlock = repo.wlock()
1111 try:
1111 try:
1112 self.check_toppatch(repo)
1112 self.check_toppatch(repo)
1113 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1113 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1114 top = bin(top)
1114 top = bin(top)
1115 if repo.changelog.heads(top) != [top]:
1115 if repo.changelog.heads(top) != [top]:
1116 raise util.Abort(_("cannot refresh a revision with children"))
1116 raise util.Abort(_("cannot refresh a revision with children"))
1117 cparents = repo.changelog.parents(top)
1117 cparents = repo.changelog.parents(top)
1118 patchparent = self.qparents(repo, top)
1118 patchparent = self.qparents(repo, top)
1119 ph = patchheader(self.join(patchfn))
1119 ph = patchheader(self.join(patchfn))
1120
1120
1121 patchf = self.opener(patchfn, 'r')
1121 patchf = self.opener(patchfn, 'r')
1122
1122
1123 # if the patch was a git patch, refresh it as a git patch
1123 # if the patch was a git patch, refresh it as a git patch
1124 for line in patchf:
1124 for line in patchf:
1125 if line.startswith('diff --git'):
1125 if line.startswith('diff --git'):
1126 self.diffopts().git = True
1126 self.diffopts().git = True
1127 break
1127 break
1128
1128
1129 if msg:
1129 if msg:
1130 ph.setmessage(msg)
1130 ph.setmessage(msg)
1131 if newuser:
1131 if newuser:
1132 ph.setuser(newuser)
1132 ph.setuser(newuser)
1133 if newdate:
1133 if newdate:
1134 ph.setdate(newdate)
1134 ph.setdate(newdate)
1135
1135
1136 # only commit new patch when write is complete
1136 # only commit new patch when write is complete
1137 patchf = self.opener(patchfn, 'w', atomictemp=True)
1137 patchf = self.opener(patchfn, 'w', atomictemp=True)
1138
1138
1139 patchf.seek(0)
1139 patchf.seek(0)
1140 patchf.truncate()
1140 patchf.truncate()
1141
1141
1142 comments = str(ph)
1142 comments = str(ph)
1143 if comments:
1143 if comments:
1144 patchf.write(comments)
1144 patchf.write(comments)
1145
1145
1146 if opts.get('git'):
1146 if opts.get('git'):
1147 self.diffopts().git = True
1147 self.diffopts().git = True
1148 tip = repo.changelog.tip()
1148 tip = repo.changelog.tip()
1149 if top == tip:
1149 if top == tip:
1150 # if the top of our patch queue is also the tip, there is an
1150 # if the top of our patch queue is also the tip, there is an
1151 # optimization here. We update the dirstate in place and strip
1151 # optimization here. We update the dirstate in place and strip
1152 # off the tip commit. Then just commit the current directory
1152 # off the tip commit. Then just commit the current directory
1153 # tree. We can also send repo.commit the list of files
1153 # tree. We can also send repo.commit the list of files
1154 # changed to speed up the diff
1154 # changed to speed up the diff
1155 #
1155 #
1156 # in short mode, we only diff the files included in the
1156 # in short mode, we only diff the files included in the
1157 # patch already plus specified files
1157 # patch already plus specified files
1158 #
1158 #
1159 # this should really read:
1159 # this should really read:
1160 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1160 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1161 # but we do it backwards to take advantage of manifest/chlog
1161 # but we do it backwards to take advantage of manifest/chlog
1162 # caching against the next repo.status call
1162 # caching against the next repo.status call
1163 #
1163 #
1164 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1164 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1165 changes = repo.changelog.read(tip)
1165 changes = repo.changelog.read(tip)
1166 man = repo.manifest.read(changes[0])
1166 man = repo.manifest.read(changes[0])
1167 aaa = aa[:]
1167 aaa = aa[:]
1168 matchfn = cmdutil.match(repo, pats, opts)
1168 matchfn = cmdutil.match(repo, pats, opts)
1169 if opts.get('short'):
1169 if opts.get('short'):
1170 # if amending a patch, we start with existing
1170 # if amending a patch, we start with existing
1171 # files plus specified files - unfiltered
1171 # files plus specified files - unfiltered
1172 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1172 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1173 # filter with inc/exl options
1173 # filter with inc/exl options
1174 matchfn = cmdutil.match(repo, opts=opts)
1174 matchfn = cmdutil.match(repo, opts=opts)
1175 else:
1175 else:
1176 match = cmdutil.matchall(repo)
1176 match = cmdutil.matchall(repo)
1177 m, a, r, d = repo.status(match=match)[:4]
1177 m, a, r, d = repo.status(match=match)[:4]
1178
1178
1179 # we might end up with files that were added between
1179 # we might end up with files that were added between
1180 # tip and the dirstate parent, but then changed in the
1180 # tip and the dirstate parent, but then changed in the
1181 # local dirstate. in this case, we want them to only
1181 # local dirstate. in this case, we want them to only
1182 # show up in the added section
1182 # show up in the added section
1183 for x in m:
1183 for x in m:
1184 if x not in aa:
1184 if x not in aa:
1185 mm.append(x)
1185 mm.append(x)
1186 # we might end up with files added by the local dirstate that
1186 # we might end up with files added by the local dirstate that
1187 # were deleted by the patch. In this case, they should only
1187 # were deleted by the patch. In this case, they should only
1188 # show up in the changed section.
1188 # show up in the changed section.
1189 for x in a:
1189 for x in a:
1190 if x in dd:
1190 if x in dd:
1191 del dd[dd.index(x)]
1191 del dd[dd.index(x)]
1192 mm.append(x)
1192 mm.append(x)
1193 else:
1193 else:
1194 aa.append(x)
1194 aa.append(x)
1195 # make sure any files deleted in the local dirstate
1195 # make sure any files deleted in the local dirstate
1196 # are not in the add or change column of the patch
1196 # are not in the add or change column of the patch
1197 forget = []
1197 forget = []
1198 for x in d + r:
1198 for x in d + r:
1199 if x in aa:
1199 if x in aa:
1200 del aa[aa.index(x)]
1200 del aa[aa.index(x)]
1201 forget.append(x)
1201 forget.append(x)
1202 continue
1202 continue
1203 elif x in mm:
1203 elif x in mm:
1204 del mm[mm.index(x)]
1204 del mm[mm.index(x)]
1205 dd.append(x)
1205 dd.append(x)
1206
1206
1207 m = list(set(mm))
1207 m = list(set(mm))
1208 r = list(set(dd))
1208 r = list(set(dd))
1209 a = list(set(aa))
1209 a = list(set(aa))
1210 c = [filter(matchfn, l) for l in (m, a, r)]
1210 c = [filter(matchfn, l) for l in (m, a, r)]
1211 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1211 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1212 chunks = patch.diff(repo, patchparent, match=match,
1212 chunks = patch.diff(repo, patchparent, match=match,
1213 changes=c, opts=self.diffopts())
1213 changes=c, opts=self.diffopts())
1214 for chunk in chunks:
1214 for chunk in chunks:
1215 patchf.write(chunk)
1215 patchf.write(chunk)
1216
1216
1217 try:
1217 try:
1218 if self.diffopts().git:
1218 if self.diffopts().git:
1219 copies = {}
1219 copies = {}
1220 for dst in a:
1220 for dst in a:
1221 src = repo.dirstate.copied(dst)
1221 src = repo.dirstate.copied(dst)
1222 # during qfold, the source file for copies may
1222 # during qfold, the source file for copies may
1223 # be removed. Treat this as a simple add.
1223 # be removed. Treat this as a simple add.
1224 if src is not None and src in repo.dirstate:
1224 if src is not None and src in repo.dirstate:
1225 copies.setdefault(src, []).append(dst)
1225 copies.setdefault(src, []).append(dst)
1226 repo.dirstate.add(dst)
1226 repo.dirstate.add(dst)
1227 # remember the copies between patchparent and tip
1227 # remember the copies between patchparent and tip
1228 for dst in aaa:
1228 for dst in aaa:
1229 f = repo.file(dst)
1229 f = repo.file(dst)
1230 src = f.renamed(man[dst])
1230 src = f.renamed(man[dst])
1231 if src:
1231 if src:
1232 copies.setdefault(src[0], []).extend(copies.get(dst, []))
1232 copies.setdefault(src[0], []).extend(copies.get(dst, []))
1233 if dst in a:
1233 if dst in a:
1234 copies[src[0]].append(dst)
1234 copies[src[0]].append(dst)
1235 # we can't copy a file created by the patch itself
1235 # we can't copy a file created by the patch itself
1236 if dst in copies:
1236 if dst in copies:
1237 del copies[dst]
1237 del copies[dst]
1238 for src, dsts in copies.iteritems():
1238 for src, dsts in copies.iteritems():
1239 for dst in dsts:
1239 for dst in dsts:
1240 repo.dirstate.copy(src, dst)
1240 repo.dirstate.copy(src, dst)
1241 else:
1241 else:
1242 for dst in a:
1242 for dst in a:
1243 repo.dirstate.add(dst)
1243 repo.dirstate.add(dst)
1244 # Drop useless copy information
1244 # Drop useless copy information
1245 for f in list(repo.dirstate.copies()):
1245 for f in list(repo.dirstate.copies()):
1246 repo.dirstate.copy(None, f)
1246 repo.dirstate.copy(None, f)
1247 for f in r:
1247 for f in r:
1248 repo.dirstate.remove(f)
1248 repo.dirstate.remove(f)
1249 # if the patch excludes a modified file, mark that
1249 # if the patch excludes a modified file, mark that
1250 # file with mtime=0 so status can see it.
1250 # file with mtime=0 so status can see it.
1251 mm = []
1251 mm = []
1252 for i in xrange(len(m)-1, -1, -1):
1252 for i in xrange(len(m)-1, -1, -1):
1253 if not matchfn(m[i]):
1253 if not matchfn(m[i]):
1254 mm.append(m[i])
1254 mm.append(m[i])
1255 del m[i]
1255 del m[i]
1256 for f in m:
1256 for f in m:
1257 repo.dirstate.normal(f)
1257 repo.dirstate.normal(f)
1258 for f in mm:
1258 for f in mm:
1259 repo.dirstate.normallookup(f)
1259 repo.dirstate.normallookup(f)
1260 for f in forget:
1260 for f in forget:
1261 repo.dirstate.forget(f)
1261 repo.dirstate.forget(f)
1262
1262
1263 if not msg:
1263 if not msg:
1264 if not ph.message:
1264 if not ph.message:
1265 message = "[mq]: %s\n" % patchfn
1265 message = "[mq]: %s\n" % patchfn
1266 else:
1266 else:
1267 message = "\n".join(ph.message)
1267 message = "\n".join(ph.message)
1268 else:
1268 else:
1269 message = msg
1269 message = msg
1270
1270
1271 user = ph.user or changes[1]
1271 user = ph.user or changes[1]
1272
1272
1273 # assumes strip can roll itself back if interrupted
1273 # assumes strip can roll itself back if interrupted
1274 repo.dirstate.setparents(*cparents)
1274 repo.dirstate.setparents(*cparents)
1275 self.applied.pop()
1275 self.applied.pop()
1276 self.applied_dirty = 1
1276 self.applied_dirty = 1
1277 self.strip(repo, top, update=False,
1277 self.strip(repo, top, update=False,
1278 backup='strip')
1278 backup='strip')
1279 except:
1279 except:
1280 repo.dirstate.invalidate()
1280 repo.dirstate.invalidate()
1281 raise
1281 raise
1282
1282
1283 try:
1283 try:
1284 # might be nice to attempt to roll back strip after this
1284 # might be nice to attempt to roll back strip after this
1285 patchf.rename()
1285 patchf.rename()
1286 n = repo.commit(message, user, ph.date, match=match,
1286 n = repo.commit(message, user, ph.date, match=match,
1287 force=True)
1287 force=True)
1288 self.applied.append(statusentry(hex(n), patchfn))
1288 self.applied.append(statusentry(hex(n), patchfn))
1289 except:
1289 except:
1290 ctx = repo[cparents[0]]
1290 ctx = repo[cparents[0]]
1291 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1291 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1292 self.save_dirty()
1292 self.save_dirty()
1293 self.ui.warn(_('refresh interrupted while patch was popped! '
1293 self.ui.warn(_('refresh interrupted while patch was popped! '
1294 '(revert --all, qpush to recover)\n'))
1294 '(revert --all, qpush to recover)\n'))
1295 raise
1295 raise
1296 else:
1296 else:
1297 self.printdiff(repo, patchparent, fp=patchf)
1297 self.printdiff(repo, patchparent, fp=patchf)
1298 patchf.rename()
1298 patchf.rename()
1299 added = repo.status()[1]
1299 added = repo.status()[1]
1300 for a in added:
1300 for a in added:
1301 f = repo.wjoin(a)
1301 f = repo.wjoin(a)
1302 try:
1302 try:
1303 os.unlink(f)
1303 os.unlink(f)
1304 except OSError, e:
1304 except OSError, e:
1305 if e.errno != errno.ENOENT:
1305 if e.errno != errno.ENOENT:
1306 raise
1306 raise
1307 try: os.removedirs(os.path.dirname(f))
1307 try: os.removedirs(os.path.dirname(f))
1308 except: pass
1308 except: pass
1309 # forget the file copies in the dirstate
1309 # forget the file copies in the dirstate
1310 # push should readd the files later on
1310 # push should readd the files later on
1311 repo.dirstate.forget(a)
1311 repo.dirstate.forget(a)
1312 self.pop(repo, force=True)
1312 self.pop(repo, force=True)
1313 self.push(repo, force=True)
1313 self.push(repo, force=True)
1314 finally:
1314 finally:
1315 wlock.release()
1315 wlock.release()
1316 self.removeundo(repo)
1316 self.removeundo(repo)
1317
1317
1318 def init(self, repo, create=False):
1318 def init(self, repo, create=False):
1319 if not create and os.path.isdir(self.path):
1319 if not create and os.path.isdir(self.path):
1320 raise util.Abort(_("patch queue directory already exists"))
1320 raise util.Abort(_("patch queue directory already exists"))
1321 try:
1321 try:
1322 os.mkdir(self.path)
1322 os.mkdir(self.path)
1323 except OSError, inst:
1323 except OSError, inst:
1324 if inst.errno != errno.EEXIST or not create:
1324 if inst.errno != errno.EEXIST or not create:
1325 raise
1325 raise
1326 if create:
1326 if create:
1327 return self.qrepo(create=True)
1327 return self.qrepo(create=True)
1328
1328
1329 def unapplied(self, repo, patch=None):
1329 def unapplied(self, repo, patch=None):
1330 if patch and patch not in self.series:
1330 if patch and patch not in self.series:
1331 raise util.Abort(_("patch %s is not in series file") % patch)
1331 raise util.Abort(_("patch %s is not in series file") % patch)
1332 if not patch:
1332 if not patch:
1333 start = self.series_end()
1333 start = self.series_end()
1334 else:
1334 else:
1335 start = self.series.index(patch) + 1
1335 start = self.series.index(patch) + 1
1336 unapplied = []
1336 unapplied = []
1337 for i in xrange(start, len(self.series)):
1337 for i in xrange(start, len(self.series)):
1338 pushable, reason = self.pushable(i)
1338 pushable, reason = self.pushable(i)
1339 if pushable:
1339 if pushable:
1340 unapplied.append((i, self.series[i]))
1340 unapplied.append((i, self.series[i]))
1341 self.explain_pushable(i)
1341 self.explain_pushable(i)
1342 return unapplied
1342 return unapplied
1343
1343
1344 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1344 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1345 summary=False):
1345 summary=False):
1346 def displayname(pfx, patchname):
1346 def displayname(pfx, patchname):
1347 if summary:
1347 if summary:
1348 ph = patchheader(self.join(patchname))
1348 ph = patchheader(self.join(patchname))
1349 msg = ph.message
1349 msg = ph.message
1350 msg = msg and ': ' + msg[0] or ': '
1350 msg = msg and ': ' + msg[0] or ': '
1351 else:
1351 else:
1352 msg = ''
1352 msg = ''
1353 msg = "%s%s%s" % (pfx, patchname, msg)
1353 msg = "%s%s%s" % (pfx, patchname, msg)
1354 if self.ui.interactive():
1354 if self.ui.interactive():
1355 msg = util.ellipsis(msg, util.termwidth())
1355 msg = util.ellipsis(msg, util.termwidth())
1356 self.ui.write(msg + '\n')
1356 self.ui.write(msg + '\n')
1357
1357
1358 applied = set([p.name for p in self.applied])
1358 applied = set([p.name for p in self.applied])
1359 if length is None:
1359 if length is None:
1360 length = len(self.series) - start
1360 length = len(self.series) - start
1361 if not missing:
1361 if not missing:
1362 if self.ui.verbose:
1362 if self.ui.verbose:
1363 idxwidth = len(str(start+length - 1))
1363 idxwidth = len(str(start+length - 1))
1364 for i in xrange(start, start+length):
1364 for i in xrange(start, start+length):
1365 patch = self.series[i]
1365 patch = self.series[i]
1366 if patch in applied:
1366 if patch in applied:
1367 stat = 'A'
1367 stat = 'A'
1368 elif self.pushable(i)[0]:
1368 elif self.pushable(i)[0]:
1369 stat = 'U'
1369 stat = 'U'
1370 else:
1370 else:
1371 stat = 'G'
1371 stat = 'G'
1372 pfx = ''
1372 pfx = ''
1373 if self.ui.verbose:
1373 if self.ui.verbose:
1374 pfx = '%*d %s ' % (idxwidth, i, stat)
1374 pfx = '%*d %s ' % (idxwidth, i, stat)
1375 elif status and status != stat:
1375 elif status and status != stat:
1376 continue
1376 continue
1377 displayname(pfx, patch)
1377 displayname(pfx, patch)
1378 else:
1378 else:
1379 msng_list = []
1379 msng_list = []
1380 for root, dirs, files in os.walk(self.path):
1380 for root, dirs, files in os.walk(self.path):
1381 d = root[len(self.path) + 1:]
1381 d = root[len(self.path) + 1:]
1382 for f in files:
1382 for f in files:
1383 fl = os.path.join(d, f)
1383 fl = os.path.join(d, f)
1384 if (fl not in self.series and
1384 if (fl not in self.series and
1385 fl not in (self.status_path, self.series_path,
1385 fl not in (self.status_path, self.series_path,
1386 self.guards_path)
1386 self.guards_path)
1387 and not fl.startswith('.')):
1387 and not fl.startswith('.')):
1388 msng_list.append(fl)
1388 msng_list.append(fl)
1389 for x in sorted(msng_list):
1389 for x in sorted(msng_list):
1390 pfx = self.ui.verbose and ('D ') or ''
1390 pfx = self.ui.verbose and ('D ') or ''
1391 displayname(pfx, x)
1391 displayname(pfx, x)
1392
1392
1393 def issaveline(self, l):
1393 def issaveline(self, l):
1394 if l.name == '.hg.patches.save.line':
1394 if l.name == '.hg.patches.save.line':
1395 return True
1395 return True
1396
1396
1397 def qrepo(self, create=False):
1397 def qrepo(self, create=False):
1398 if create or os.path.isdir(self.join(".hg")):
1398 if create or os.path.isdir(self.join(".hg")):
1399 return hg.repository(self.ui, path=self.path, create=create)
1399 return hg.repository(self.ui, path=self.path, create=create)
1400
1400
1401 def restore(self, repo, rev, delete=None, qupdate=None):
1401 def restore(self, repo, rev, delete=None, qupdate=None):
1402 c = repo.changelog.read(rev)
1402 c = repo.changelog.read(rev)
1403 desc = c[4].strip()
1403 desc = c[4].strip()
1404 lines = desc.splitlines()
1404 lines = desc.splitlines()
1405 i = 0
1405 i = 0
1406 datastart = None
1406 datastart = None
1407 series = []
1407 series = []
1408 applied = []
1408 applied = []
1409 qpp = None
1409 qpp = None
1410 for i, line in enumerate(lines):
1410 for i, line in enumerate(lines):
1411 if line == 'Patch Data:':
1411 if line == 'Patch Data:':
1412 datastart = i + 1
1412 datastart = i + 1
1413 elif line.startswith('Dirstate:'):
1413 elif line.startswith('Dirstate:'):
1414 l = line.rstrip()
1414 l = line.rstrip()
1415 l = l[10:].split(' ')
1415 l = l[10:].split(' ')
1416 qpp = [ bin(x) for x in l ]
1416 qpp = [ bin(x) for x in l ]
1417 elif datastart != None:
1417 elif datastart != None:
1418 l = line.rstrip()
1418 l = line.rstrip()
1419 se = statusentry(l)
1419 se = statusentry(l)
1420 file_ = se.name
1420 file_ = se.name
1421 if se.rev:
1421 if se.rev:
1422 applied.append(se)
1422 applied.append(se)
1423 else:
1423 else:
1424 series.append(file_)
1424 series.append(file_)
1425 if datastart is None:
1425 if datastart is None:
1426 self.ui.warn(_("No saved patch data found\n"))
1426 self.ui.warn(_("No saved patch data found\n"))
1427 return 1
1427 return 1
1428 self.ui.warn(_("restoring status: %s\n") % lines[0])
1428 self.ui.warn(_("restoring status: %s\n") % lines[0])
1429 self.full_series = series
1429 self.full_series = series
1430 self.applied = applied
1430 self.applied = applied
1431 self.parse_series()
1431 self.parse_series()
1432 self.series_dirty = 1
1432 self.series_dirty = 1
1433 self.applied_dirty = 1
1433 self.applied_dirty = 1
1434 heads = repo.changelog.heads()
1434 heads = repo.changelog.heads()
1435 if delete:
1435 if delete:
1436 if rev not in heads:
1436 if rev not in heads:
1437 self.ui.warn(_("save entry has children, leaving it alone\n"))
1437 self.ui.warn(_("save entry has children, leaving it alone\n"))
1438 else:
1438 else:
1439 self.ui.warn(_("removing save entry %s\n") % short(rev))
1439 self.ui.warn(_("removing save entry %s\n") % short(rev))
1440 pp = repo.dirstate.parents()
1440 pp = repo.dirstate.parents()
1441 if rev in pp:
1441 if rev in pp:
1442 update = True
1442 update = True
1443 else:
1443 else:
1444 update = False
1444 update = False
1445 self.strip(repo, rev, update=update, backup='strip')
1445 self.strip(repo, rev, update=update, backup='strip')
1446 if qpp:
1446 if qpp:
1447 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1447 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1448 (short(qpp[0]), short(qpp[1])))
1448 (short(qpp[0]), short(qpp[1])))
1449 if qupdate:
1449 if qupdate:
1450 self.ui.status(_("queue directory updating\n"))
1450 self.ui.status(_("queue directory updating\n"))
1451 r = self.qrepo()
1451 r = self.qrepo()
1452 if not r:
1452 if not r:
1453 self.ui.warn(_("Unable to load queue repository\n"))
1453 self.ui.warn(_("Unable to load queue repository\n"))
1454 return 1
1454 return 1
1455 hg.clean(r, qpp[0])
1455 hg.clean(r, qpp[0])
1456
1456
1457 def save(self, repo, msg=None):
1457 def save(self, repo, msg=None):
1458 if len(self.applied) == 0:
1458 if len(self.applied) == 0:
1459 self.ui.warn(_("save: no patches applied, exiting\n"))
1459 self.ui.warn(_("save: no patches applied, exiting\n"))
1460 return 1
1460 return 1
1461 if self.issaveline(self.applied[-1]):
1461 if self.issaveline(self.applied[-1]):
1462 self.ui.warn(_("status is already saved\n"))
1462 self.ui.warn(_("status is already saved\n"))
1463 return 1
1463 return 1
1464
1464
1465 ar = [ ':' + x for x in self.full_series ]
1465 ar = [ ':' + x for x in self.full_series ]
1466 if not msg:
1466 if not msg:
1467 msg = _("hg patches saved state")
1467 msg = _("hg patches saved state")
1468 else:
1468 else:
1469 msg = "hg patches: " + msg.rstrip('\r\n')
1469 msg = "hg patches: " + msg.rstrip('\r\n')
1470 r = self.qrepo()
1470 r = self.qrepo()
1471 if r:
1471 if r:
1472 pp = r.dirstate.parents()
1472 pp = r.dirstate.parents()
1473 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1473 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1474 msg += "\n\nPatch Data:\n"
1474 msg += "\n\nPatch Data:\n"
1475 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1475 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1476 "\n".join(ar) + '\n' or "")
1476 "\n".join(ar) + '\n' or "")
1477 n = repo.commit(text, force=True)
1477 n = repo.commit(text, force=True)
1478 if not n:
1478 if not n:
1479 self.ui.warn(_("repo commit failed\n"))
1479 self.ui.warn(_("repo commit failed\n"))
1480 return 1
1480 return 1
1481 self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
1481 self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
1482 self.applied_dirty = 1
1482 self.applied_dirty = 1
1483 self.removeundo(repo)
1483 self.removeundo(repo)
1484
1484
1485 def full_series_end(self):
1485 def full_series_end(self):
1486 if len(self.applied) > 0:
1486 if len(self.applied) > 0:
1487 p = self.applied[-1].name
1487 p = self.applied[-1].name
1488 end = self.find_series(p)
1488 end = self.find_series(p)
1489 if end is None:
1489 if end is None:
1490 return len(self.full_series)
1490 return len(self.full_series)
1491 return end + 1
1491 return end + 1
1492 return 0
1492 return 0
1493
1493
1494 def series_end(self, all_patches=False):
1494 def series_end(self, all_patches=False):
1495 """If all_patches is False, return the index of the next pushable patch
1495 """If all_patches is False, return the index of the next pushable patch
1496 in the series, or the series length. If all_patches is True, return the
1496 in the series, or the series length. If all_patches is True, return the
1497 index of the first patch past the last applied one.
1497 index of the first patch past the last applied one.
1498 """
1498 """
1499 end = 0
1499 end = 0
1500 def next(start):
1500 def next(start):
1501 if all_patches:
1501 if all_patches:
1502 return start
1502 return start
1503 i = start
1503 i = start
1504 while i < len(self.series):
1504 while i < len(self.series):
1505 p, reason = self.pushable(i)
1505 p, reason = self.pushable(i)
1506 if p:
1506 if p:
1507 break
1507 break
1508 self.explain_pushable(i)
1508 self.explain_pushable(i)
1509 i += 1
1509 i += 1
1510 return i
1510 return i
1511 if len(self.applied) > 0:
1511 if len(self.applied) > 0:
1512 p = self.applied[-1].name
1512 p = self.applied[-1].name
1513 try:
1513 try:
1514 end = self.series.index(p)
1514 end = self.series.index(p)
1515 except ValueError:
1515 except ValueError:
1516 return 0
1516 return 0
1517 return next(end + 1)
1517 return next(end + 1)
1518 return next(end)
1518 return next(end)
1519
1519
1520 def appliedname(self, index):
1520 def appliedname(self, index):
1521 pname = self.applied[index].name
1521 pname = self.applied[index].name
1522 if not self.ui.verbose:
1522 if not self.ui.verbose:
1523 p = pname
1523 p = pname
1524 else:
1524 else:
1525 p = str(self.series.index(pname)) + " " + pname
1525 p = str(self.series.index(pname)) + " " + pname
1526 return p
1526 return p
1527
1527
1528 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1528 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1529 force=None, git=False):
1529 force=None, git=False):
1530 def checkseries(patchname):
1530 def checkseries(patchname):
1531 if patchname in self.series:
1531 if patchname in self.series:
1532 raise util.Abort(_('patch %s is already in the series file')
1532 raise util.Abort(_('patch %s is already in the series file')
1533 % patchname)
1533 % patchname)
1534 def checkfile(patchname):
1534 def checkfile(patchname):
1535 if not force and os.path.exists(self.join(patchname)):
1535 if not force and os.path.exists(self.join(patchname)):
1536 raise util.Abort(_('patch "%s" already exists')
1536 raise util.Abort(_('patch "%s" already exists')
1537 % patchname)
1537 % patchname)
1538
1538
1539 if rev:
1539 if rev:
1540 if files:
1540 if files:
1541 raise util.Abort(_('option "-r" not valid when importing '
1541 raise util.Abort(_('option "-r" not valid when importing '
1542 'files'))
1542 'files'))
1543 rev = cmdutil.revrange(repo, rev)
1543 rev = cmdutil.revrange(repo, rev)
1544 rev.sort(lambda x, y: cmp(y, x))
1544 rev.sort(reverse=True)
1545 if (len(files) > 1 or len(rev) > 1) and patchname:
1545 if (len(files) > 1 or len(rev) > 1) and patchname:
1546 raise util.Abort(_('option "-n" not valid when importing multiple '
1546 raise util.Abort(_('option "-n" not valid when importing multiple '
1547 'patches'))
1547 'patches'))
1548 i = 0
1548 i = 0
1549 added = []
1549 added = []
1550 if rev:
1550 if rev:
1551 # If mq patches are applied, we can only import revisions
1551 # If mq patches are applied, we can only import revisions
1552 # that form a linear path to qbase.
1552 # that form a linear path to qbase.
1553 # Otherwise, they should form a linear path to a head.
1553 # Otherwise, they should form a linear path to a head.
1554 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1554 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1555 if len(heads) > 1:
1555 if len(heads) > 1:
1556 raise util.Abort(_('revision %d is the root of more than one '
1556 raise util.Abort(_('revision %d is the root of more than one '
1557 'branch') % rev[-1])
1557 'branch') % rev[-1])
1558 if self.applied:
1558 if self.applied:
1559 base = hex(repo.changelog.node(rev[0]))
1559 base = hex(repo.changelog.node(rev[0]))
1560 if base in [n.rev for n in self.applied]:
1560 if base in [n.rev for n in self.applied]:
1561 raise util.Abort(_('revision %d is already managed')
1561 raise util.Abort(_('revision %d is already managed')
1562 % rev[0])
1562 % rev[0])
1563 if heads != [bin(self.applied[-1].rev)]:
1563 if heads != [bin(self.applied[-1].rev)]:
1564 raise util.Abort(_('revision %d is not the parent of '
1564 raise util.Abort(_('revision %d is not the parent of '
1565 'the queue') % rev[0])
1565 'the queue') % rev[0])
1566 base = repo.changelog.rev(bin(self.applied[0].rev))
1566 base = repo.changelog.rev(bin(self.applied[0].rev))
1567 lastparent = repo.changelog.parentrevs(base)[0]
1567 lastparent = repo.changelog.parentrevs(base)[0]
1568 else:
1568 else:
1569 if heads != [repo.changelog.node(rev[0])]:
1569 if heads != [repo.changelog.node(rev[0])]:
1570 raise util.Abort(_('revision %d has unmanaged children')
1570 raise util.Abort(_('revision %d has unmanaged children')
1571 % rev[0])
1571 % rev[0])
1572 lastparent = None
1572 lastparent = None
1573
1573
1574 if git:
1574 if git:
1575 self.diffopts().git = True
1575 self.diffopts().git = True
1576
1576
1577 for r in rev:
1577 for r in rev:
1578 p1, p2 = repo.changelog.parentrevs(r)
1578 p1, p2 = repo.changelog.parentrevs(r)
1579 n = repo.changelog.node(r)
1579 n = repo.changelog.node(r)
1580 if p2 != nullrev:
1580 if p2 != nullrev:
1581 raise util.Abort(_('cannot import merge revision %d') % r)
1581 raise util.Abort(_('cannot import merge revision %d') % r)
1582 if lastparent and lastparent != r:
1582 if lastparent and lastparent != r:
1583 raise util.Abort(_('revision %d is not the parent of %d')
1583 raise util.Abort(_('revision %d is not the parent of %d')
1584 % (r, lastparent))
1584 % (r, lastparent))
1585 lastparent = p1
1585 lastparent = p1
1586
1586
1587 if not patchname:
1587 if not patchname:
1588 patchname = normname('%d.diff' % r)
1588 patchname = normname('%d.diff' % r)
1589 self.check_reserved_name(patchname)
1589 self.check_reserved_name(patchname)
1590 checkseries(patchname)
1590 checkseries(patchname)
1591 checkfile(patchname)
1591 checkfile(patchname)
1592 self.full_series.insert(0, patchname)
1592 self.full_series.insert(0, patchname)
1593
1593
1594 patchf = self.opener(patchname, "w")
1594 patchf = self.opener(patchname, "w")
1595 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1595 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1596 patchf.close()
1596 patchf.close()
1597
1597
1598 se = statusentry(hex(n), patchname)
1598 se = statusentry(hex(n), patchname)
1599 self.applied.insert(0, se)
1599 self.applied.insert(0, se)
1600
1600
1601 added.append(patchname)
1601 added.append(patchname)
1602 patchname = None
1602 patchname = None
1603 self.parse_series()
1603 self.parse_series()
1604 self.applied_dirty = 1
1604 self.applied_dirty = 1
1605
1605
1606 for filename in files:
1606 for filename in files:
1607 if existing:
1607 if existing:
1608 if filename == '-':
1608 if filename == '-':
1609 raise util.Abort(_('-e is incompatible with import from -'))
1609 raise util.Abort(_('-e is incompatible with import from -'))
1610 if not patchname:
1610 if not patchname:
1611 patchname = normname(filename)
1611 patchname = normname(filename)
1612 self.check_reserved_name(patchname)
1612 self.check_reserved_name(patchname)
1613 if not os.path.isfile(self.join(patchname)):
1613 if not os.path.isfile(self.join(patchname)):
1614 raise util.Abort(_("patch %s does not exist") % patchname)
1614 raise util.Abort(_("patch %s does not exist") % patchname)
1615 else:
1615 else:
1616 try:
1616 try:
1617 if filename == '-':
1617 if filename == '-':
1618 if not patchname:
1618 if not patchname:
1619 raise util.Abort(_('need --name to import a patch from -'))
1619 raise util.Abort(_('need --name to import a patch from -'))
1620 text = sys.stdin.read()
1620 text = sys.stdin.read()
1621 else:
1621 else:
1622 text = url.open(self.ui, filename).read()
1622 text = url.open(self.ui, filename).read()
1623 except (OSError, IOError):
1623 except (OSError, IOError):
1624 raise util.Abort(_("unable to read %s") % filename)
1624 raise util.Abort(_("unable to read %s") % filename)
1625 if not patchname:
1625 if not patchname:
1626 patchname = normname(os.path.basename(filename))
1626 patchname = normname(os.path.basename(filename))
1627 self.check_reserved_name(patchname)
1627 self.check_reserved_name(patchname)
1628 checkfile(patchname)
1628 checkfile(patchname)
1629 patchf = self.opener(patchname, "w")
1629 patchf = self.opener(patchname, "w")
1630 patchf.write(text)
1630 patchf.write(text)
1631 if not force:
1631 if not force:
1632 checkseries(patchname)
1632 checkseries(patchname)
1633 if patchname not in self.series:
1633 if patchname not in self.series:
1634 index = self.full_series_end() + i
1634 index = self.full_series_end() + i
1635 self.full_series[index:index] = [patchname]
1635 self.full_series[index:index] = [patchname]
1636 self.parse_series()
1636 self.parse_series()
1637 self.ui.warn(_("adding %s to series file\n") % patchname)
1637 self.ui.warn(_("adding %s to series file\n") % patchname)
1638 i += 1
1638 i += 1
1639 added.append(patchname)
1639 added.append(patchname)
1640 patchname = None
1640 patchname = None
1641 self.series_dirty = 1
1641 self.series_dirty = 1
1642 qrepo = self.qrepo()
1642 qrepo = self.qrepo()
1643 if qrepo:
1643 if qrepo:
1644 qrepo.add(added)
1644 qrepo.add(added)
1645
1645
1646 def delete(ui, repo, *patches, **opts):
1646 def delete(ui, repo, *patches, **opts):
1647 """remove patches from queue
1647 """remove patches from queue
1648
1648
1649 The patches must not be applied, and at least one patch is required. With
1649 The patches must not be applied, and at least one patch is required. With
1650 -k/--keep, the patch files are preserved in the patch directory.
1650 -k/--keep, the patch files are preserved in the patch directory.
1651
1651
1652 To stop managing a patch and move it into permanent history,
1652 To stop managing a patch and move it into permanent history,
1653 use the qfinish command."""
1653 use the qfinish command."""
1654 q = repo.mq
1654 q = repo.mq
1655 q.delete(repo, patches, opts)
1655 q.delete(repo, patches, opts)
1656 q.save_dirty()
1656 q.save_dirty()
1657 return 0
1657 return 0
1658
1658
1659 def applied(ui, repo, patch=None, **opts):
1659 def applied(ui, repo, patch=None, **opts):
1660 """print the patches already applied"""
1660 """print the patches already applied"""
1661 q = repo.mq
1661 q = repo.mq
1662 if patch:
1662 if patch:
1663 if patch not in q.series:
1663 if patch not in q.series:
1664 raise util.Abort(_("patch %s is not in series file") % patch)
1664 raise util.Abort(_("patch %s is not in series file") % patch)
1665 end = q.series.index(patch) + 1
1665 end = q.series.index(patch) + 1
1666 else:
1666 else:
1667 end = q.series_end(True)
1667 end = q.series_end(True)
1668 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1668 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1669
1669
1670 def unapplied(ui, repo, patch=None, **opts):
1670 def unapplied(ui, repo, patch=None, **opts):
1671 """print the patches not yet applied"""
1671 """print the patches not yet applied"""
1672 q = repo.mq
1672 q = repo.mq
1673 if patch:
1673 if patch:
1674 if patch not in q.series:
1674 if patch not in q.series:
1675 raise util.Abort(_("patch %s is not in series file") % patch)
1675 raise util.Abort(_("patch %s is not in series file") % patch)
1676 start = q.series.index(patch) + 1
1676 start = q.series.index(patch) + 1
1677 else:
1677 else:
1678 start = q.series_end(True)
1678 start = q.series_end(True)
1679 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1679 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1680
1680
1681 def qimport(ui, repo, *filename, **opts):
1681 def qimport(ui, repo, *filename, **opts):
1682 """import a patch
1682 """import a patch
1683
1683
1684 The patch is inserted into the series after the last applied
1684 The patch is inserted into the series after the last applied
1685 patch. If no patches have been applied, qimport prepends the patch
1685 patch. If no patches have been applied, qimport prepends the patch
1686 to the series.
1686 to the series.
1687
1687
1688 The patch will have the same name as its source file unless you
1688 The patch will have the same name as its source file unless you
1689 give it a new one with -n/--name.
1689 give it a new one with -n/--name.
1690
1690
1691 You can register an existing patch inside the patch directory with
1691 You can register an existing patch inside the patch directory with
1692 the -e/--existing flag.
1692 the -e/--existing flag.
1693
1693
1694 With -f/--force, an existing patch of the same name will be
1694 With -f/--force, an existing patch of the same name will be
1695 overwritten.
1695 overwritten.
1696
1696
1697 An existing changeset may be placed under mq control with -r/--rev
1697 An existing changeset may be placed under mq control with -r/--rev
1698 (e.g. qimport --rev tip -n patch will place tip under mq control).
1698 (e.g. qimport --rev tip -n patch will place tip under mq control).
1699 With -g/--git, patches imported with --rev will use the git diff
1699 With -g/--git, patches imported with --rev will use the git diff
1700 format. See the diffs help topic for information on why this is
1700 format. See the diffs help topic for information on why this is
1701 important for preserving rename/copy information and permission
1701 important for preserving rename/copy information and permission
1702 changes.
1702 changes.
1703
1703
1704 To import a patch from standard input, pass - as the patch file.
1704 To import a patch from standard input, pass - as the patch file.
1705 When importing from standard input, a patch name must be specified
1705 When importing from standard input, a patch name must be specified
1706 using the --name flag.
1706 using the --name flag.
1707 """
1707 """
1708 q = repo.mq
1708 q = repo.mq
1709 q.qimport(repo, filename, patchname=opts['name'],
1709 q.qimport(repo, filename, patchname=opts['name'],
1710 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1710 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1711 git=opts['git'])
1711 git=opts['git'])
1712 q.save_dirty()
1712 q.save_dirty()
1713
1713
1714 if opts.get('push') and not opts.get('rev'):
1714 if opts.get('push') and not opts.get('rev'):
1715 return q.push(repo, None)
1715 return q.push(repo, None)
1716 return 0
1716 return 0
1717
1717
1718 def init(ui, repo, **opts):
1718 def init(ui, repo, **opts):
1719 """init a new queue repository
1719 """init a new queue repository
1720
1720
1721 The queue repository is unversioned by default. If
1721 The queue repository is unversioned by default. If
1722 -c/--create-repo is specified, qinit will create a separate nested
1722 -c/--create-repo is specified, qinit will create a separate nested
1723 repository for patches (qinit -c may also be run later to convert
1723 repository for patches (qinit -c may also be run later to convert
1724 an unversioned patch repository into a versioned one). You can use
1724 an unversioned patch repository into a versioned one). You can use
1725 qcommit to commit changes to this queue repository."""
1725 qcommit to commit changes to this queue repository."""
1726 q = repo.mq
1726 q = repo.mq
1727 r = q.init(repo, create=opts['create_repo'])
1727 r = q.init(repo, create=opts['create_repo'])
1728 q.save_dirty()
1728 q.save_dirty()
1729 if r:
1729 if r:
1730 if not os.path.exists(r.wjoin('.hgignore')):
1730 if not os.path.exists(r.wjoin('.hgignore')):
1731 fp = r.wopener('.hgignore', 'w')
1731 fp = r.wopener('.hgignore', 'w')
1732 fp.write('^\\.hg\n')
1732 fp.write('^\\.hg\n')
1733 fp.write('^\\.mq\n')
1733 fp.write('^\\.mq\n')
1734 fp.write('syntax: glob\n')
1734 fp.write('syntax: glob\n')
1735 fp.write('status\n')
1735 fp.write('status\n')
1736 fp.write('guards\n')
1736 fp.write('guards\n')
1737 fp.close()
1737 fp.close()
1738 if not os.path.exists(r.wjoin('series')):
1738 if not os.path.exists(r.wjoin('series')):
1739 r.wopener('series', 'w').close()
1739 r.wopener('series', 'w').close()
1740 r.add(['.hgignore', 'series'])
1740 r.add(['.hgignore', 'series'])
1741 commands.add(ui, r)
1741 commands.add(ui, r)
1742 return 0
1742 return 0
1743
1743
1744 def clone(ui, source, dest=None, **opts):
1744 def clone(ui, source, dest=None, **opts):
1745 '''clone main and patch repository at same time
1745 '''clone main and patch repository at same time
1746
1746
1747 If source is local, destination will have no patches applied. If
1747 If source is local, destination will have no patches applied. If
1748 source is remote, this command can not check if patches are
1748 source is remote, this command can not check if patches are
1749 applied in source, so cannot guarantee that patches are not
1749 applied in source, so cannot guarantee that patches are not
1750 applied in destination. If you clone remote repository, be sure
1750 applied in destination. If you clone remote repository, be sure
1751 before that it has no patches applied.
1751 before that it has no patches applied.
1752
1752
1753 Source patch repository is looked for in <src>/.hg/patches by
1753 Source patch repository is looked for in <src>/.hg/patches by
1754 default. Use -p <url> to change.
1754 default. Use -p <url> to change.
1755
1755
1756 The patch directory must be a nested Mercurial repository, as
1756 The patch directory must be a nested Mercurial repository, as
1757 would be created by qinit -c.
1757 would be created by qinit -c.
1758 '''
1758 '''
1759 def patchdir(repo):
1759 def patchdir(repo):
1760 url = repo.url()
1760 url = repo.url()
1761 if url.endswith('/'):
1761 if url.endswith('/'):
1762 url = url[:-1]
1762 url = url[:-1]
1763 return url + '/.hg/patches'
1763 return url + '/.hg/patches'
1764 if dest is None:
1764 if dest is None:
1765 dest = hg.defaultdest(source)
1765 dest = hg.defaultdest(source)
1766 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1766 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1767 if opts['patches']:
1767 if opts['patches']:
1768 patchespath = ui.expandpath(opts['patches'])
1768 patchespath = ui.expandpath(opts['patches'])
1769 else:
1769 else:
1770 patchespath = patchdir(sr)
1770 patchespath = patchdir(sr)
1771 try:
1771 try:
1772 hg.repository(ui, patchespath)
1772 hg.repository(ui, patchespath)
1773 except error.RepoError:
1773 except error.RepoError:
1774 raise util.Abort(_('versioned patch repository not found'
1774 raise util.Abort(_('versioned patch repository not found'
1775 ' (see qinit -c)'))
1775 ' (see qinit -c)'))
1776 qbase, destrev = None, None
1776 qbase, destrev = None, None
1777 if sr.local():
1777 if sr.local():
1778 if sr.mq.applied:
1778 if sr.mq.applied:
1779 qbase = bin(sr.mq.applied[0].rev)
1779 qbase = bin(sr.mq.applied[0].rev)
1780 if not hg.islocal(dest):
1780 if not hg.islocal(dest):
1781 heads = set(sr.heads())
1781 heads = set(sr.heads())
1782 destrev = list(heads.difference(sr.heads(qbase)))
1782 destrev = list(heads.difference(sr.heads(qbase)))
1783 destrev.append(sr.changelog.parents(qbase)[0])
1783 destrev.append(sr.changelog.parents(qbase)[0])
1784 elif sr.capable('lookup'):
1784 elif sr.capable('lookup'):
1785 try:
1785 try:
1786 qbase = sr.lookup('qbase')
1786 qbase = sr.lookup('qbase')
1787 except error.RepoError:
1787 except error.RepoError:
1788 pass
1788 pass
1789 ui.note(_('cloning main repository\n'))
1789 ui.note(_('cloning main repository\n'))
1790 sr, dr = hg.clone(ui, sr.url(), dest,
1790 sr, dr = hg.clone(ui, sr.url(), dest,
1791 pull=opts['pull'],
1791 pull=opts['pull'],
1792 rev=destrev,
1792 rev=destrev,
1793 update=False,
1793 update=False,
1794 stream=opts['uncompressed'])
1794 stream=opts['uncompressed'])
1795 ui.note(_('cloning patch repository\n'))
1795 ui.note(_('cloning patch repository\n'))
1796 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1796 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1797 pull=opts['pull'], update=not opts['noupdate'],
1797 pull=opts['pull'], update=not opts['noupdate'],
1798 stream=opts['uncompressed'])
1798 stream=opts['uncompressed'])
1799 if dr.local():
1799 if dr.local():
1800 if qbase:
1800 if qbase:
1801 ui.note(_('stripping applied patches from destination '
1801 ui.note(_('stripping applied patches from destination '
1802 'repository\n'))
1802 'repository\n'))
1803 dr.mq.strip(dr, qbase, update=False, backup=None)
1803 dr.mq.strip(dr, qbase, update=False, backup=None)
1804 if not opts['noupdate']:
1804 if not opts['noupdate']:
1805 ui.note(_('updating destination repository\n'))
1805 ui.note(_('updating destination repository\n'))
1806 hg.update(dr, dr.changelog.tip())
1806 hg.update(dr, dr.changelog.tip())
1807
1807
1808 def commit(ui, repo, *pats, **opts):
1808 def commit(ui, repo, *pats, **opts):
1809 """commit changes in the queue repository"""
1809 """commit changes in the queue repository"""
1810 q = repo.mq
1810 q = repo.mq
1811 r = q.qrepo()
1811 r = q.qrepo()
1812 if not r: raise util.Abort('no queue repository')
1812 if not r: raise util.Abort('no queue repository')
1813 commands.commit(r.ui, r, *pats, **opts)
1813 commands.commit(r.ui, r, *pats, **opts)
1814
1814
1815 def series(ui, repo, **opts):
1815 def series(ui, repo, **opts):
1816 """print the entire series file"""
1816 """print the entire series file"""
1817 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1817 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1818 return 0
1818 return 0
1819
1819
1820 def top(ui, repo, **opts):
1820 def top(ui, repo, **opts):
1821 """print the name of the current patch"""
1821 """print the name of the current patch"""
1822 q = repo.mq
1822 q = repo.mq
1823 t = q.applied and q.series_end(True) or 0
1823 t = q.applied and q.series_end(True) or 0
1824 if t:
1824 if t:
1825 return q.qseries(repo, start=t-1, length=1, status='A',
1825 return q.qseries(repo, start=t-1, length=1, status='A',
1826 summary=opts.get('summary'))
1826 summary=opts.get('summary'))
1827 else:
1827 else:
1828 ui.write(_("no patches applied\n"))
1828 ui.write(_("no patches applied\n"))
1829 return 1
1829 return 1
1830
1830
1831 def next(ui, repo, **opts):
1831 def next(ui, repo, **opts):
1832 """print the name of the next patch"""
1832 """print the name of the next patch"""
1833 q = repo.mq
1833 q = repo.mq
1834 end = q.series_end()
1834 end = q.series_end()
1835 if end == len(q.series):
1835 if end == len(q.series):
1836 ui.write(_("all patches applied\n"))
1836 ui.write(_("all patches applied\n"))
1837 return 1
1837 return 1
1838 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1838 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1839
1839
1840 def prev(ui, repo, **opts):
1840 def prev(ui, repo, **opts):
1841 """print the name of the previous patch"""
1841 """print the name of the previous patch"""
1842 q = repo.mq
1842 q = repo.mq
1843 l = len(q.applied)
1843 l = len(q.applied)
1844 if l == 1:
1844 if l == 1:
1845 ui.write(_("only one patch applied\n"))
1845 ui.write(_("only one patch applied\n"))
1846 return 1
1846 return 1
1847 if not l:
1847 if not l:
1848 ui.write(_("no patches applied\n"))
1848 ui.write(_("no patches applied\n"))
1849 return 1
1849 return 1
1850 return q.qseries(repo, start=l-2, length=1, status='A',
1850 return q.qseries(repo, start=l-2, length=1, status='A',
1851 summary=opts.get('summary'))
1851 summary=opts.get('summary'))
1852
1852
1853 def setupheaderopts(ui, opts):
1853 def setupheaderopts(ui, opts):
1854 def do(opt,val):
1854 def do(opt,val):
1855 if not opts[opt] and opts['current' + opt]:
1855 if not opts[opt] and opts['current' + opt]:
1856 opts[opt] = val
1856 opts[opt] = val
1857 do('user', ui.username())
1857 do('user', ui.username())
1858 do('date', "%d %d" % util.makedate())
1858 do('date', "%d %d" % util.makedate())
1859
1859
1860 def new(ui, repo, patch, *args, **opts):
1860 def new(ui, repo, patch, *args, **opts):
1861 """create a new patch
1861 """create a new patch
1862
1862
1863 qnew creates a new patch on top of the currently-applied patch (if
1863 qnew creates a new patch on top of the currently-applied patch (if
1864 any). It will refuse to run if there are any outstanding changes
1864 any). It will refuse to run if there are any outstanding changes
1865 unless -f/--force is specified, in which case the patch will be
1865 unless -f/--force is specified, in which case the patch will be
1866 initialized with them. You may also use -I/--include,
1866 initialized with them. You may also use -I/--include,
1867 -X/--exclude, and/or a list of files after the patch name to add
1867 -X/--exclude, and/or a list of files after the patch name to add
1868 only changes to matching files to the new patch, leaving the rest
1868 only changes to matching files to the new patch, leaving the rest
1869 as uncommitted modifications.
1869 as uncommitted modifications.
1870
1870
1871 -u/--user and -d/--date can be used to set the (given) user and
1871 -u/--user and -d/--date can be used to set the (given) user and
1872 date, respectively. -U/--currentuser and -D/--currentdate set user
1872 date, respectively. -U/--currentuser and -D/--currentdate set user
1873 to current user and date to current date.
1873 to current user and date to current date.
1874
1874
1875 -e/--edit, -m/--message or -l/--logfile set the patch header as
1875 -e/--edit, -m/--message or -l/--logfile set the patch header as
1876 well as the commit message. If none is specified, the header is
1876 well as the commit message. If none is specified, the header is
1877 empty and the commit message is '[mq]: PATCH'.
1877 empty and the commit message is '[mq]: PATCH'.
1878
1878
1879 Use the -g/--git option to keep the patch in the git extended diff
1879 Use the -g/--git option to keep the patch in the git extended diff
1880 format. Read the diffs help topic for more information on why this
1880 format. Read the diffs help topic for more information on why this
1881 is important for preserving permission changes and copy/rename
1881 is important for preserving permission changes and copy/rename
1882 information.
1882 information.
1883 """
1883 """
1884 msg = cmdutil.logmessage(opts)
1884 msg = cmdutil.logmessage(opts)
1885 def getmsg(): return ui.edit(msg, ui.username())
1885 def getmsg(): return ui.edit(msg, ui.username())
1886 q = repo.mq
1886 q = repo.mq
1887 opts['msg'] = msg
1887 opts['msg'] = msg
1888 if opts.get('edit'):
1888 if opts.get('edit'):
1889 opts['msg'] = getmsg
1889 opts['msg'] = getmsg
1890 else:
1890 else:
1891 opts['msg'] = msg
1891 opts['msg'] = msg
1892 setupheaderopts(ui, opts)
1892 setupheaderopts(ui, opts)
1893 q.new(repo, patch, *args, **opts)
1893 q.new(repo, patch, *args, **opts)
1894 q.save_dirty()
1894 q.save_dirty()
1895 return 0
1895 return 0
1896
1896
1897 def refresh(ui, repo, *pats, **opts):
1897 def refresh(ui, repo, *pats, **opts):
1898 """update the current patch
1898 """update the current patch
1899
1899
1900 If any file patterns are provided, the refreshed patch will
1900 If any file patterns are provided, the refreshed patch will
1901 contain only the modifications that match those patterns; the
1901 contain only the modifications that match those patterns; the
1902 remaining modifications will remain in the working directory.
1902 remaining modifications will remain in the working directory.
1903
1903
1904 If -s/--short is specified, files currently included in the patch
1904 If -s/--short is specified, files currently included in the patch
1905 will be refreshed just like matched files and remain in the patch.
1905 will be refreshed just like matched files and remain in the patch.
1906
1906
1907 hg add/remove/copy/rename work as usual, though you might want to
1907 hg add/remove/copy/rename work as usual, though you might want to
1908 use git-style patches (-g/--git or [diff] git=1) to track copies
1908 use git-style patches (-g/--git or [diff] git=1) to track copies
1909 and renames. See the diffs help topic for more information on the
1909 and renames. See the diffs help topic for more information on the
1910 git diff format.
1910 git diff format.
1911 """
1911 """
1912 q = repo.mq
1912 q = repo.mq
1913 message = cmdutil.logmessage(opts)
1913 message = cmdutil.logmessage(opts)
1914 if opts['edit']:
1914 if opts['edit']:
1915 if not q.applied:
1915 if not q.applied:
1916 ui.write(_("no patches applied\n"))
1916 ui.write(_("no patches applied\n"))
1917 return 1
1917 return 1
1918 if message:
1918 if message:
1919 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1919 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1920 patch = q.applied[-1].name
1920 patch = q.applied[-1].name
1921 ph = patchheader(q.join(patch))
1921 ph = patchheader(q.join(patch))
1922 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
1922 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
1923 setupheaderopts(ui, opts)
1923 setupheaderopts(ui, opts)
1924 ret = q.refresh(repo, pats, msg=message, **opts)
1924 ret = q.refresh(repo, pats, msg=message, **opts)
1925 q.save_dirty()
1925 q.save_dirty()
1926 return ret
1926 return ret
1927
1927
1928 def diff(ui, repo, *pats, **opts):
1928 def diff(ui, repo, *pats, **opts):
1929 """diff of the current patch and subsequent modifications
1929 """diff of the current patch and subsequent modifications
1930
1930
1931 Shows a diff which includes the current patch as well as any
1931 Shows a diff which includes the current patch as well as any
1932 changes which have been made in the working directory since the
1932 changes which have been made in the working directory since the
1933 last refresh (thus showing what the current patch would become
1933 last refresh (thus showing what the current patch would become
1934 after a qrefresh).
1934 after a qrefresh).
1935
1935
1936 Use 'hg diff' if you only want to see the changes made since the
1936 Use 'hg diff' if you only want to see the changes made since the
1937 last qrefresh, or 'hg export qtip' if you want to see changes made
1937 last qrefresh, or 'hg export qtip' if you want to see changes made
1938 by the current patch without including changes made since the
1938 by the current patch without including changes made since the
1939 qrefresh.
1939 qrefresh.
1940 """
1940 """
1941 repo.mq.diff(repo, pats, opts)
1941 repo.mq.diff(repo, pats, opts)
1942 return 0
1942 return 0
1943
1943
1944 def fold(ui, repo, *files, **opts):
1944 def fold(ui, repo, *files, **opts):
1945 """fold the named patches into the current patch
1945 """fold the named patches into the current patch
1946
1946
1947 Patches must not yet be applied. Each patch will be successively
1947 Patches must not yet be applied. Each patch will be successively
1948 applied to the current patch in the order given. If all the
1948 applied to the current patch in the order given. If all the
1949 patches apply successfully, the current patch will be refreshed
1949 patches apply successfully, the current patch will be refreshed
1950 with the new cumulative patch, and the folded patches will be
1950 with the new cumulative patch, and the folded patches will be
1951 deleted. With -k/--keep, the folded patch files will not be
1951 deleted. With -k/--keep, the folded patch files will not be
1952 removed afterwards.
1952 removed afterwards.
1953
1953
1954 The header for each folded patch will be concatenated with the
1954 The header for each folded patch will be concatenated with the
1955 current patch header, separated by a line of '* * *'."""
1955 current patch header, separated by a line of '* * *'."""
1956
1956
1957 q = repo.mq
1957 q = repo.mq
1958
1958
1959 if not files:
1959 if not files:
1960 raise util.Abort(_('qfold requires at least one patch name'))
1960 raise util.Abort(_('qfold requires at least one patch name'))
1961 if not q.check_toppatch(repo):
1961 if not q.check_toppatch(repo):
1962 raise util.Abort(_('No patches applied'))
1962 raise util.Abort(_('No patches applied'))
1963 q.check_localchanges(repo)
1963 q.check_localchanges(repo)
1964
1964
1965 message = cmdutil.logmessage(opts)
1965 message = cmdutil.logmessage(opts)
1966 if opts['edit']:
1966 if opts['edit']:
1967 if message:
1967 if message:
1968 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1968 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1969
1969
1970 parent = q.lookup('qtip')
1970 parent = q.lookup('qtip')
1971 patches = []
1971 patches = []
1972 messages = []
1972 messages = []
1973 for f in files:
1973 for f in files:
1974 p = q.lookup(f)
1974 p = q.lookup(f)
1975 if p in patches or p == parent:
1975 if p in patches or p == parent:
1976 ui.warn(_('Skipping already folded patch %s') % p)
1976 ui.warn(_('Skipping already folded patch %s') % p)
1977 if q.isapplied(p):
1977 if q.isapplied(p):
1978 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1978 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1979 patches.append(p)
1979 patches.append(p)
1980
1980
1981 for p in patches:
1981 for p in patches:
1982 if not message:
1982 if not message:
1983 ph = patchheader(q.join(p))
1983 ph = patchheader(q.join(p))
1984 if ph.message:
1984 if ph.message:
1985 messages.append(ph.message)
1985 messages.append(ph.message)
1986 pf = q.join(p)
1986 pf = q.join(p)
1987 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1987 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1988 if not patchsuccess:
1988 if not patchsuccess:
1989 raise util.Abort(_('Error folding patch %s') % p)
1989 raise util.Abort(_('Error folding patch %s') % p)
1990 patch.updatedir(ui, repo, files)
1990 patch.updatedir(ui, repo, files)
1991
1991
1992 if not message:
1992 if not message:
1993 ph = patchheader(q.join(parent))
1993 ph = patchheader(q.join(parent))
1994 message, user = ph.message, ph.user
1994 message, user = ph.message, ph.user
1995 for msg in messages:
1995 for msg in messages:
1996 message.append('* * *')
1996 message.append('* * *')
1997 message.extend(msg)
1997 message.extend(msg)
1998 message = '\n'.join(message)
1998 message = '\n'.join(message)
1999
1999
2000 if opts['edit']:
2000 if opts['edit']:
2001 message = ui.edit(message, user or ui.username())
2001 message = ui.edit(message, user or ui.username())
2002
2002
2003 q.refresh(repo, msg=message)
2003 q.refresh(repo, msg=message)
2004 q.delete(repo, patches, opts)
2004 q.delete(repo, patches, opts)
2005 q.save_dirty()
2005 q.save_dirty()
2006
2006
2007 def goto(ui, repo, patch, **opts):
2007 def goto(ui, repo, patch, **opts):
2008 '''push or pop patches until named patch is at top of stack'''
2008 '''push or pop patches until named patch is at top of stack'''
2009 q = repo.mq
2009 q = repo.mq
2010 patch = q.lookup(patch)
2010 patch = q.lookup(patch)
2011 if q.isapplied(patch):
2011 if q.isapplied(patch):
2012 ret = q.pop(repo, patch, force=opts['force'])
2012 ret = q.pop(repo, patch, force=opts['force'])
2013 else:
2013 else:
2014 ret = q.push(repo, patch, force=opts['force'])
2014 ret = q.push(repo, patch, force=opts['force'])
2015 q.save_dirty()
2015 q.save_dirty()
2016 return ret
2016 return ret
2017
2017
2018 def guard(ui, repo, *args, **opts):
2018 def guard(ui, repo, *args, **opts):
2019 '''set or print guards for a patch
2019 '''set or print guards for a patch
2020
2020
2021 Guards control whether a patch can be pushed. A patch with no
2021 Guards control whether a patch can be pushed. A patch with no
2022 guards is always pushed. A patch with a positive guard ("+foo") is
2022 guards is always pushed. A patch with a positive guard ("+foo") is
2023 pushed only if the qselect command has activated it. A patch with
2023 pushed only if the qselect command has activated it. A patch with
2024 a negative guard ("-foo") is never pushed if the qselect command
2024 a negative guard ("-foo") is never pushed if the qselect command
2025 has activated it.
2025 has activated it.
2026
2026
2027 With no arguments, print the currently active guards.
2027 With no arguments, print the currently active guards.
2028 With arguments, set guards for the named patch.
2028 With arguments, set guards for the named patch.
2029 NOTE: Specifying negative guards now requires '--'.
2029 NOTE: Specifying negative guards now requires '--'.
2030
2030
2031 To set guards on another patch:
2031 To set guards on another patch:
2032 hg qguard -- other.patch +2.6.17 -stable
2032 hg qguard -- other.patch +2.6.17 -stable
2033 '''
2033 '''
2034 def status(idx):
2034 def status(idx):
2035 guards = q.series_guards[idx] or ['unguarded']
2035 guards = q.series_guards[idx] or ['unguarded']
2036 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
2036 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
2037 q = repo.mq
2037 q = repo.mq
2038 patch = None
2038 patch = None
2039 args = list(args)
2039 args = list(args)
2040 if opts['list']:
2040 if opts['list']:
2041 if args or opts['none']:
2041 if args or opts['none']:
2042 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2042 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2043 for i in xrange(len(q.series)):
2043 for i in xrange(len(q.series)):
2044 status(i)
2044 status(i)
2045 return
2045 return
2046 if not args or args[0][0:1] in '-+':
2046 if not args or args[0][0:1] in '-+':
2047 if not q.applied:
2047 if not q.applied:
2048 raise util.Abort(_('no patches applied'))
2048 raise util.Abort(_('no patches applied'))
2049 patch = q.applied[-1].name
2049 patch = q.applied[-1].name
2050 if patch is None and args[0][0:1] not in '-+':
2050 if patch is None and args[0][0:1] not in '-+':
2051 patch = args.pop(0)
2051 patch = args.pop(0)
2052 if patch is None:
2052 if patch is None:
2053 raise util.Abort(_('no patch to work with'))
2053 raise util.Abort(_('no patch to work with'))
2054 if args or opts['none']:
2054 if args or opts['none']:
2055 idx = q.find_series(patch)
2055 idx = q.find_series(patch)
2056 if idx is None:
2056 if idx is None:
2057 raise util.Abort(_('no patch named %s') % patch)
2057 raise util.Abort(_('no patch named %s') % patch)
2058 q.set_guards(idx, args)
2058 q.set_guards(idx, args)
2059 q.save_dirty()
2059 q.save_dirty()
2060 else:
2060 else:
2061 status(q.series.index(q.lookup(patch)))
2061 status(q.series.index(q.lookup(patch)))
2062
2062
2063 def header(ui, repo, patch=None):
2063 def header(ui, repo, patch=None):
2064 """print the header of the topmost or specified patch"""
2064 """print the header of the topmost or specified patch"""
2065 q = repo.mq
2065 q = repo.mq
2066
2066
2067 if patch:
2067 if patch:
2068 patch = q.lookup(patch)
2068 patch = q.lookup(patch)
2069 else:
2069 else:
2070 if not q.applied:
2070 if not q.applied:
2071 ui.write('no patches applied\n')
2071 ui.write('no patches applied\n')
2072 return 1
2072 return 1
2073 patch = q.lookup('qtip')
2073 patch = q.lookup('qtip')
2074 ph = patchheader(repo.mq.join(patch))
2074 ph = patchheader(repo.mq.join(patch))
2075
2075
2076 ui.write('\n'.join(ph.message) + '\n')
2076 ui.write('\n'.join(ph.message) + '\n')
2077
2077
2078 def lastsavename(path):
2078 def lastsavename(path):
2079 (directory, base) = os.path.split(path)
2079 (directory, base) = os.path.split(path)
2080 names = os.listdir(directory)
2080 names = os.listdir(directory)
2081 namere = re.compile("%s.([0-9]+)" % base)
2081 namere = re.compile("%s.([0-9]+)" % base)
2082 maxindex = None
2082 maxindex = None
2083 maxname = None
2083 maxname = None
2084 for f in names:
2084 for f in names:
2085 m = namere.match(f)
2085 m = namere.match(f)
2086 if m:
2086 if m:
2087 index = int(m.group(1))
2087 index = int(m.group(1))
2088 if maxindex is None or index > maxindex:
2088 if maxindex is None or index > maxindex:
2089 maxindex = index
2089 maxindex = index
2090 maxname = f
2090 maxname = f
2091 if maxname:
2091 if maxname:
2092 return (os.path.join(directory, maxname), maxindex)
2092 return (os.path.join(directory, maxname), maxindex)
2093 return (None, None)
2093 return (None, None)
2094
2094
2095 def savename(path):
2095 def savename(path):
2096 (last, index) = lastsavename(path)
2096 (last, index) = lastsavename(path)
2097 if last is None:
2097 if last is None:
2098 index = 0
2098 index = 0
2099 newpath = path + ".%d" % (index + 1)
2099 newpath = path + ".%d" % (index + 1)
2100 return newpath
2100 return newpath
2101
2101
2102 def push(ui, repo, patch=None, **opts):
2102 def push(ui, repo, patch=None, **opts):
2103 """push the next patch onto the stack
2103 """push the next patch onto the stack
2104
2104
2105 When -f/--force is applied, all local changes in patched files
2105 When -f/--force is applied, all local changes in patched files
2106 will be lost.
2106 will be lost.
2107 """
2107 """
2108 q = repo.mq
2108 q = repo.mq
2109 mergeq = None
2109 mergeq = None
2110
2110
2111 if opts['merge']:
2111 if opts['merge']:
2112 if opts['name']:
2112 if opts['name']:
2113 newpath = repo.join(opts['name'])
2113 newpath = repo.join(opts['name'])
2114 else:
2114 else:
2115 newpath, i = lastsavename(q.path)
2115 newpath, i = lastsavename(q.path)
2116 if not newpath:
2116 if not newpath:
2117 ui.warn(_("no saved queues found, please use -n\n"))
2117 ui.warn(_("no saved queues found, please use -n\n"))
2118 return 1
2118 return 1
2119 mergeq = queue(ui, repo.join(""), newpath)
2119 mergeq = queue(ui, repo.join(""), newpath)
2120 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2120 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2121 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2121 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2122 mergeq=mergeq, all=opts.get('all'))
2122 mergeq=mergeq, all=opts.get('all'))
2123 return ret
2123 return ret
2124
2124
2125 def pop(ui, repo, patch=None, **opts):
2125 def pop(ui, repo, patch=None, **opts):
2126 """pop the current patch off the stack
2126 """pop the current patch off the stack
2127
2127
2128 By default, pops off the top of the patch stack. If given a patch
2128 By default, pops off the top of the patch stack. If given a patch
2129 name, keeps popping off patches until the named patch is at the
2129 name, keeps popping off patches until the named patch is at the
2130 top of the stack.
2130 top of the stack.
2131 """
2131 """
2132 localupdate = True
2132 localupdate = True
2133 if opts['name']:
2133 if opts['name']:
2134 q = queue(ui, repo.join(""), repo.join(opts['name']))
2134 q = queue(ui, repo.join(""), repo.join(opts['name']))
2135 ui.warn(_('using patch queue: %s\n') % q.path)
2135 ui.warn(_('using patch queue: %s\n') % q.path)
2136 localupdate = False
2136 localupdate = False
2137 else:
2137 else:
2138 q = repo.mq
2138 q = repo.mq
2139 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2139 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2140 all=opts['all'])
2140 all=opts['all'])
2141 q.save_dirty()
2141 q.save_dirty()
2142 return ret
2142 return ret
2143
2143
2144 def rename(ui, repo, patch, name=None, **opts):
2144 def rename(ui, repo, patch, name=None, **opts):
2145 """rename a patch
2145 """rename a patch
2146
2146
2147 With one argument, renames the current patch to PATCH1.
2147 With one argument, renames the current patch to PATCH1.
2148 With two arguments, renames PATCH1 to PATCH2."""
2148 With two arguments, renames PATCH1 to PATCH2."""
2149
2149
2150 q = repo.mq
2150 q = repo.mq
2151
2151
2152 if not name:
2152 if not name:
2153 name = patch
2153 name = patch
2154 patch = None
2154 patch = None
2155
2155
2156 if patch:
2156 if patch:
2157 patch = q.lookup(patch)
2157 patch = q.lookup(patch)
2158 else:
2158 else:
2159 if not q.applied:
2159 if not q.applied:
2160 ui.write(_('no patches applied\n'))
2160 ui.write(_('no patches applied\n'))
2161 return
2161 return
2162 patch = q.lookup('qtip')
2162 patch = q.lookup('qtip')
2163 absdest = q.join(name)
2163 absdest = q.join(name)
2164 if os.path.isdir(absdest):
2164 if os.path.isdir(absdest):
2165 name = normname(os.path.join(name, os.path.basename(patch)))
2165 name = normname(os.path.join(name, os.path.basename(patch)))
2166 absdest = q.join(name)
2166 absdest = q.join(name)
2167 if os.path.exists(absdest):
2167 if os.path.exists(absdest):
2168 raise util.Abort(_('%s already exists') % absdest)
2168 raise util.Abort(_('%s already exists') % absdest)
2169
2169
2170 if name in q.series:
2170 if name in q.series:
2171 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2171 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2172
2172
2173 if ui.verbose:
2173 if ui.verbose:
2174 ui.write('renaming %s to %s\n' % (patch, name))
2174 ui.write('renaming %s to %s\n' % (patch, name))
2175 i = q.find_series(patch)
2175 i = q.find_series(patch)
2176 guards = q.guard_re.findall(q.full_series[i])
2176 guards = q.guard_re.findall(q.full_series[i])
2177 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2177 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2178 q.parse_series()
2178 q.parse_series()
2179 q.series_dirty = 1
2179 q.series_dirty = 1
2180
2180
2181 info = q.isapplied(patch)
2181 info = q.isapplied(patch)
2182 if info:
2182 if info:
2183 q.applied[info[0]] = statusentry(info[1], name)
2183 q.applied[info[0]] = statusentry(info[1], name)
2184 q.applied_dirty = 1
2184 q.applied_dirty = 1
2185
2185
2186 util.rename(q.join(patch), absdest)
2186 util.rename(q.join(patch), absdest)
2187 r = q.qrepo()
2187 r = q.qrepo()
2188 if r:
2188 if r:
2189 wlock = r.wlock()
2189 wlock = r.wlock()
2190 try:
2190 try:
2191 if r.dirstate[patch] == 'a':
2191 if r.dirstate[patch] == 'a':
2192 r.dirstate.forget(patch)
2192 r.dirstate.forget(patch)
2193 r.dirstate.add(name)
2193 r.dirstate.add(name)
2194 else:
2194 else:
2195 if r.dirstate[name] == 'r':
2195 if r.dirstate[name] == 'r':
2196 r.undelete([name])
2196 r.undelete([name])
2197 r.copy(patch, name)
2197 r.copy(patch, name)
2198 r.remove([patch], False)
2198 r.remove([patch], False)
2199 finally:
2199 finally:
2200 wlock.release()
2200 wlock.release()
2201
2201
2202 q.save_dirty()
2202 q.save_dirty()
2203
2203
2204 def restore(ui, repo, rev, **opts):
2204 def restore(ui, repo, rev, **opts):
2205 """restore the queue state saved by a revision"""
2205 """restore the queue state saved by a revision"""
2206 rev = repo.lookup(rev)
2206 rev = repo.lookup(rev)
2207 q = repo.mq
2207 q = repo.mq
2208 q.restore(repo, rev, delete=opts['delete'],
2208 q.restore(repo, rev, delete=opts['delete'],
2209 qupdate=opts['update'])
2209 qupdate=opts['update'])
2210 q.save_dirty()
2210 q.save_dirty()
2211 return 0
2211 return 0
2212
2212
2213 def save(ui, repo, **opts):
2213 def save(ui, repo, **opts):
2214 """save current queue state"""
2214 """save current queue state"""
2215 q = repo.mq
2215 q = repo.mq
2216 message = cmdutil.logmessage(opts)
2216 message = cmdutil.logmessage(opts)
2217 ret = q.save(repo, msg=message)
2217 ret = q.save(repo, msg=message)
2218 if ret:
2218 if ret:
2219 return ret
2219 return ret
2220 q.save_dirty()
2220 q.save_dirty()
2221 if opts['copy']:
2221 if opts['copy']:
2222 path = q.path
2222 path = q.path
2223 if opts['name']:
2223 if opts['name']:
2224 newpath = os.path.join(q.basepath, opts['name'])
2224 newpath = os.path.join(q.basepath, opts['name'])
2225 if os.path.exists(newpath):
2225 if os.path.exists(newpath):
2226 if not os.path.isdir(newpath):
2226 if not os.path.isdir(newpath):
2227 raise util.Abort(_('destination %s exists and is not '
2227 raise util.Abort(_('destination %s exists and is not '
2228 'a directory') % newpath)
2228 'a directory') % newpath)
2229 if not opts['force']:
2229 if not opts['force']:
2230 raise util.Abort(_('destination %s exists, '
2230 raise util.Abort(_('destination %s exists, '
2231 'use -f to force') % newpath)
2231 'use -f to force') % newpath)
2232 else:
2232 else:
2233 newpath = savename(path)
2233 newpath = savename(path)
2234 ui.warn(_("copy %s to %s\n") % (path, newpath))
2234 ui.warn(_("copy %s to %s\n") % (path, newpath))
2235 util.copyfiles(path, newpath)
2235 util.copyfiles(path, newpath)
2236 if opts['empty']:
2236 if opts['empty']:
2237 try:
2237 try:
2238 os.unlink(q.join(q.status_path))
2238 os.unlink(q.join(q.status_path))
2239 except:
2239 except:
2240 pass
2240 pass
2241 return 0
2241 return 0
2242
2242
2243 def strip(ui, repo, rev, **opts):
2243 def strip(ui, repo, rev, **opts):
2244 """strip a revision and all its descendants from the repository
2244 """strip a revision and all its descendants from the repository
2245
2245
2246 If one of the working directory's parent revisions is stripped, the
2246 If one of the working directory's parent revisions is stripped, the
2247 working directory will be updated to the parent of the stripped
2247 working directory will be updated to the parent of the stripped
2248 revision.
2248 revision.
2249 """
2249 """
2250 backup = 'all'
2250 backup = 'all'
2251 if opts['backup']:
2251 if opts['backup']:
2252 backup = 'strip'
2252 backup = 'strip'
2253 elif opts['nobackup']:
2253 elif opts['nobackup']:
2254 backup = 'none'
2254 backup = 'none'
2255
2255
2256 rev = repo.lookup(rev)
2256 rev = repo.lookup(rev)
2257 p = repo.dirstate.parents()
2257 p = repo.dirstate.parents()
2258 cl = repo.changelog
2258 cl = repo.changelog
2259 update = True
2259 update = True
2260 if p[0] == nullid:
2260 if p[0] == nullid:
2261 update = False
2261 update = False
2262 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2262 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2263 update = False
2263 update = False
2264 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2264 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2265 update = False
2265 update = False
2266
2266
2267 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2267 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2268 return 0
2268 return 0
2269
2269
2270 def select(ui, repo, *args, **opts):
2270 def select(ui, repo, *args, **opts):
2271 '''set or print guarded patches to push
2271 '''set or print guarded patches to push
2272
2272
2273 Use the qguard command to set or print guards on patch, then use
2273 Use the qguard command to set or print guards on patch, then use
2274 qselect to tell mq which guards to use. A patch will be pushed if
2274 qselect to tell mq which guards to use. A patch will be pushed if
2275 it has no guards or any positive guards match the currently
2275 it has no guards or any positive guards match the currently
2276 selected guard, but will not be pushed if any negative guards
2276 selected guard, but will not be pushed if any negative guards
2277 match the current guard. For example:
2277 match the current guard. For example:
2278
2278
2279 qguard foo.patch -stable (negative guard)
2279 qguard foo.patch -stable (negative guard)
2280 qguard bar.patch +stable (positive guard)
2280 qguard bar.patch +stable (positive guard)
2281 qselect stable
2281 qselect stable
2282
2282
2283 This activates the "stable" guard. mq will skip foo.patch (because
2283 This activates the "stable" guard. mq will skip foo.patch (because
2284 it has a negative match) but push bar.patch (because it has a
2284 it has a negative match) but push bar.patch (because it has a
2285 positive match).
2285 positive match).
2286
2286
2287 With no arguments, prints the currently active guards.
2287 With no arguments, prints the currently active guards.
2288 With one argument, sets the active guard.
2288 With one argument, sets the active guard.
2289
2289
2290 Use -n/--none to deactivate guards (no other arguments needed).
2290 Use -n/--none to deactivate guards (no other arguments needed).
2291 When no guards are active, patches with positive guards are
2291 When no guards are active, patches with positive guards are
2292 skipped and patches with negative guards are pushed.
2292 skipped and patches with negative guards are pushed.
2293
2293
2294 qselect can change the guards on applied patches. It does not pop
2294 qselect can change the guards on applied patches. It does not pop
2295 guarded patches by default. Use --pop to pop back to the last
2295 guarded patches by default. Use --pop to pop back to the last
2296 applied patch that is not guarded. Use --reapply (which implies
2296 applied patch that is not guarded. Use --reapply (which implies
2297 --pop) to push back to the current patch afterwards, but skip
2297 --pop) to push back to the current patch afterwards, but skip
2298 guarded patches.
2298 guarded patches.
2299
2299
2300 Use -s/--series to print a list of all guards in the series file
2300 Use -s/--series to print a list of all guards in the series file
2301 (no other arguments needed). Use -v for more information.'''
2301 (no other arguments needed). Use -v for more information.'''
2302
2302
2303 q = repo.mq
2303 q = repo.mq
2304 guards = q.active()
2304 guards = q.active()
2305 if args or opts['none']:
2305 if args or opts['none']:
2306 old_unapplied = q.unapplied(repo)
2306 old_unapplied = q.unapplied(repo)
2307 old_guarded = [i for i in xrange(len(q.applied)) if
2307 old_guarded = [i for i in xrange(len(q.applied)) if
2308 not q.pushable(i)[0]]
2308 not q.pushable(i)[0]]
2309 q.set_active(args)
2309 q.set_active(args)
2310 q.save_dirty()
2310 q.save_dirty()
2311 if not args:
2311 if not args:
2312 ui.status(_('guards deactivated\n'))
2312 ui.status(_('guards deactivated\n'))
2313 if not opts['pop'] and not opts['reapply']:
2313 if not opts['pop'] and not opts['reapply']:
2314 unapplied = q.unapplied(repo)
2314 unapplied = q.unapplied(repo)
2315 guarded = [i for i in xrange(len(q.applied))
2315 guarded = [i for i in xrange(len(q.applied))
2316 if not q.pushable(i)[0]]
2316 if not q.pushable(i)[0]]
2317 if len(unapplied) != len(old_unapplied):
2317 if len(unapplied) != len(old_unapplied):
2318 ui.status(_('number of unguarded, unapplied patches has '
2318 ui.status(_('number of unguarded, unapplied patches has '
2319 'changed from %d to %d\n') %
2319 'changed from %d to %d\n') %
2320 (len(old_unapplied), len(unapplied)))
2320 (len(old_unapplied), len(unapplied)))
2321 if len(guarded) != len(old_guarded):
2321 if len(guarded) != len(old_guarded):
2322 ui.status(_('number of guarded, applied patches has changed '
2322 ui.status(_('number of guarded, applied patches has changed '
2323 'from %d to %d\n') %
2323 'from %d to %d\n') %
2324 (len(old_guarded), len(guarded)))
2324 (len(old_guarded), len(guarded)))
2325 elif opts['series']:
2325 elif opts['series']:
2326 guards = {}
2326 guards = {}
2327 noguards = 0
2327 noguards = 0
2328 for gs in q.series_guards:
2328 for gs in q.series_guards:
2329 if not gs:
2329 if not gs:
2330 noguards += 1
2330 noguards += 1
2331 for g in gs:
2331 for g in gs:
2332 guards.setdefault(g, 0)
2332 guards.setdefault(g, 0)
2333 guards[g] += 1
2333 guards[g] += 1
2334 if ui.verbose:
2334 if ui.verbose:
2335 guards['NONE'] = noguards
2335 guards['NONE'] = noguards
2336 guards = guards.items()
2336 guards = guards.items()
2337 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2337 guards.sort(key=lambda x: x[0][1:])
2338 if guards:
2338 if guards:
2339 ui.note(_('guards in series file:\n'))
2339 ui.note(_('guards in series file:\n'))
2340 for guard, count in guards:
2340 for guard, count in guards:
2341 ui.note('%2d ' % count)
2341 ui.note('%2d ' % count)
2342 ui.write(guard, '\n')
2342 ui.write(guard, '\n')
2343 else:
2343 else:
2344 ui.note(_('no guards in series file\n'))
2344 ui.note(_('no guards in series file\n'))
2345 else:
2345 else:
2346 if guards:
2346 if guards:
2347 ui.note(_('active guards:\n'))
2347 ui.note(_('active guards:\n'))
2348 for g in guards:
2348 for g in guards:
2349 ui.write(g, '\n')
2349 ui.write(g, '\n')
2350 else:
2350 else:
2351 ui.write(_('no active guards\n'))
2351 ui.write(_('no active guards\n'))
2352 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2352 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2353 popped = False
2353 popped = False
2354 if opts['pop'] or opts['reapply']:
2354 if opts['pop'] or opts['reapply']:
2355 for i in xrange(len(q.applied)):
2355 for i in xrange(len(q.applied)):
2356 pushable, reason = q.pushable(i)
2356 pushable, reason = q.pushable(i)
2357 if not pushable:
2357 if not pushable:
2358 ui.status(_('popping guarded patches\n'))
2358 ui.status(_('popping guarded patches\n'))
2359 popped = True
2359 popped = True
2360 if i == 0:
2360 if i == 0:
2361 q.pop(repo, all=True)
2361 q.pop(repo, all=True)
2362 else:
2362 else:
2363 q.pop(repo, i-1)
2363 q.pop(repo, i-1)
2364 break
2364 break
2365 if popped:
2365 if popped:
2366 try:
2366 try:
2367 if reapply:
2367 if reapply:
2368 ui.status(_('reapplying unguarded patches\n'))
2368 ui.status(_('reapplying unguarded patches\n'))
2369 q.push(repo, reapply)
2369 q.push(repo, reapply)
2370 finally:
2370 finally:
2371 q.save_dirty()
2371 q.save_dirty()
2372
2372
2373 def finish(ui, repo, *revrange, **opts):
2373 def finish(ui, repo, *revrange, **opts):
2374 """move applied patches into repository history
2374 """move applied patches into repository history
2375
2375
2376 Finishes the specified revisions (corresponding to applied
2376 Finishes the specified revisions (corresponding to applied
2377 patches) by moving them out of mq control into regular repository
2377 patches) by moving them out of mq control into regular repository
2378 history.
2378 history.
2379
2379
2380 Accepts a revision range or the -a/--applied option. If --applied
2380 Accepts a revision range or the -a/--applied option. If --applied
2381 is specified, all applied mq revisions are removed from mq
2381 is specified, all applied mq revisions are removed from mq
2382 control. Otherwise, the given revisions must be at the base of the
2382 control. Otherwise, the given revisions must be at the base of the
2383 stack of applied patches.
2383 stack of applied patches.
2384
2384
2385 This can be especially useful if your changes have been applied to
2385 This can be especially useful if your changes have been applied to
2386 an upstream repository, or if you are about to push your changes
2386 an upstream repository, or if you are about to push your changes
2387 to upstream.
2387 to upstream.
2388 """
2388 """
2389 if not opts['applied'] and not revrange:
2389 if not opts['applied'] and not revrange:
2390 raise util.Abort(_('no revisions specified'))
2390 raise util.Abort(_('no revisions specified'))
2391 elif opts['applied']:
2391 elif opts['applied']:
2392 revrange = ('qbase:qtip',) + revrange
2392 revrange = ('qbase:qtip',) + revrange
2393
2393
2394 q = repo.mq
2394 q = repo.mq
2395 if not q.applied:
2395 if not q.applied:
2396 ui.status(_('no patches applied\n'))
2396 ui.status(_('no patches applied\n'))
2397 return 0
2397 return 0
2398
2398
2399 revs = cmdutil.revrange(repo, revrange)
2399 revs = cmdutil.revrange(repo, revrange)
2400 q.finish(repo, revs)
2400 q.finish(repo, revs)
2401 q.save_dirty()
2401 q.save_dirty()
2402 return 0
2402 return 0
2403
2403
2404 def reposetup(ui, repo):
2404 def reposetup(ui, repo):
2405 class mqrepo(repo.__class__):
2405 class mqrepo(repo.__class__):
2406 @util.propertycache
2406 @util.propertycache
2407 def mq(self):
2407 def mq(self):
2408 return queue(self.ui, self.join(""))
2408 return queue(self.ui, self.join(""))
2409
2409
2410 def abort_if_wdir_patched(self, errmsg, force=False):
2410 def abort_if_wdir_patched(self, errmsg, force=False):
2411 if self.mq.applied and not force:
2411 if self.mq.applied and not force:
2412 parent = hex(self.dirstate.parents()[0])
2412 parent = hex(self.dirstate.parents()[0])
2413 if parent in [s.rev for s in self.mq.applied]:
2413 if parent in [s.rev for s in self.mq.applied]:
2414 raise util.Abort(errmsg)
2414 raise util.Abort(errmsg)
2415
2415
2416 def commit(self, text="", user=None, date=None, match=None,
2416 def commit(self, text="", user=None, date=None, match=None,
2417 force=False, editor=False, extra={}):
2417 force=False, editor=False, extra={}):
2418 self.abort_if_wdir_patched(
2418 self.abort_if_wdir_patched(
2419 _('cannot commit over an applied mq patch'),
2419 _('cannot commit over an applied mq patch'),
2420 force)
2420 force)
2421
2421
2422 return super(mqrepo, self).commit(text, user, date, match, force,
2422 return super(mqrepo, self).commit(text, user, date, match, force,
2423 editor, extra)
2423 editor, extra)
2424
2424
2425 def push(self, remote, force=False, revs=None):
2425 def push(self, remote, force=False, revs=None):
2426 if self.mq.applied and not force and not revs:
2426 if self.mq.applied and not force and not revs:
2427 raise util.Abort(_('source has mq patches applied'))
2427 raise util.Abort(_('source has mq patches applied'))
2428 return super(mqrepo, self).push(remote, force, revs)
2428 return super(mqrepo, self).push(remote, force, revs)
2429
2429
2430 def tags(self):
2430 def tags(self):
2431 if self.tagscache:
2431 if self.tagscache:
2432 return self.tagscache
2432 return self.tagscache
2433
2433
2434 tagscache = super(mqrepo, self).tags()
2434 tagscache = super(mqrepo, self).tags()
2435
2435
2436 q = self.mq
2436 q = self.mq
2437 if not q.applied:
2437 if not q.applied:
2438 return tagscache
2438 return tagscache
2439
2439
2440 mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
2440 mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
2441
2441
2442 if mqtags[-1][0] not in self.changelog.nodemap:
2442 if mqtags[-1][0] not in self.changelog.nodemap:
2443 self.ui.warn(_('mq status file refers to unknown node %s\n')
2443 self.ui.warn(_('mq status file refers to unknown node %s\n')
2444 % short(mqtags[-1][0]))
2444 % short(mqtags[-1][0]))
2445 return tagscache
2445 return tagscache
2446
2446
2447 mqtags.append((mqtags[-1][0], 'qtip'))
2447 mqtags.append((mqtags[-1][0], 'qtip'))
2448 mqtags.append((mqtags[0][0], 'qbase'))
2448 mqtags.append((mqtags[0][0], 'qbase'))
2449 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2449 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2450 for patch in mqtags:
2450 for patch in mqtags:
2451 if patch[1] in tagscache:
2451 if patch[1] in tagscache:
2452 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2452 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2453 % patch[1])
2453 % patch[1])
2454 else:
2454 else:
2455 tagscache[patch[1]] = patch[0]
2455 tagscache[patch[1]] = patch[0]
2456
2456
2457 return tagscache
2457 return tagscache
2458
2458
2459 def _branchtags(self, partial, lrev):
2459 def _branchtags(self, partial, lrev):
2460 q = self.mq
2460 q = self.mq
2461 if not q.applied:
2461 if not q.applied:
2462 return super(mqrepo, self)._branchtags(partial, lrev)
2462 return super(mqrepo, self)._branchtags(partial, lrev)
2463
2463
2464 cl = self.changelog
2464 cl = self.changelog
2465 qbasenode = bin(q.applied[0].rev)
2465 qbasenode = bin(q.applied[0].rev)
2466 if qbasenode not in cl.nodemap:
2466 if qbasenode not in cl.nodemap:
2467 self.ui.warn(_('mq status file refers to unknown node %s\n')
2467 self.ui.warn(_('mq status file refers to unknown node %s\n')
2468 % short(qbasenode))
2468 % short(qbasenode))
2469 return super(mqrepo, self)._branchtags(partial, lrev)
2469 return super(mqrepo, self)._branchtags(partial, lrev)
2470
2470
2471 qbase = cl.rev(qbasenode)
2471 qbase = cl.rev(qbasenode)
2472 start = lrev + 1
2472 start = lrev + 1
2473 if start < qbase:
2473 if start < qbase:
2474 # update the cache (excluding the patches) and save it
2474 # update the cache (excluding the patches) and save it
2475 self._updatebranchcache(partial, lrev+1, qbase)
2475 self._updatebranchcache(partial, lrev+1, qbase)
2476 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2476 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2477 start = qbase
2477 start = qbase
2478 # if start = qbase, the cache is as updated as it should be.
2478 # if start = qbase, the cache is as updated as it should be.
2479 # if start > qbase, the cache includes (part of) the patches.
2479 # if start > qbase, the cache includes (part of) the patches.
2480 # we might as well use it, but we won't save it.
2480 # we might as well use it, but we won't save it.
2481
2481
2482 # update the cache up to the tip
2482 # update the cache up to the tip
2483 self._updatebranchcache(partial, start, len(cl))
2483 self._updatebranchcache(partial, start, len(cl))
2484
2484
2485 return partial
2485 return partial
2486
2486
2487 if repo.local():
2487 if repo.local():
2488 repo.__class__ = mqrepo
2488 repo.__class__ = mqrepo
2489
2489
2490 def mqimport(orig, ui, repo, *args, **kwargs):
2490 def mqimport(orig, ui, repo, *args, **kwargs):
2491 if hasattr(repo, 'abort_if_wdir_patched'):
2491 if hasattr(repo, 'abort_if_wdir_patched'):
2492 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2492 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2493 kwargs.get('force'))
2493 kwargs.get('force'))
2494 return orig(ui, repo, *args, **kwargs)
2494 return orig(ui, repo, *args, **kwargs)
2495
2495
2496 def uisetup(ui):
2496 def uisetup(ui):
2497 extensions.wrapcommand(commands.table, 'import', mqimport)
2497 extensions.wrapcommand(commands.table, 'import', mqimport)
2498
2498
2499 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2499 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2500
2500
2501 cmdtable = {
2501 cmdtable = {
2502 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2502 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2503 "qclone":
2503 "qclone":
2504 (clone,
2504 (clone,
2505 [('', 'pull', None, _('use pull protocol to copy metadata')),
2505 [('', 'pull', None, _('use pull protocol to copy metadata')),
2506 ('U', 'noupdate', None, _('do not update the new working directories')),
2506 ('U', 'noupdate', None, _('do not update the new working directories')),
2507 ('', 'uncompressed', None,
2507 ('', 'uncompressed', None,
2508 _('use uncompressed transfer (fast over LAN)')),
2508 _('use uncompressed transfer (fast over LAN)')),
2509 ('p', 'patches', '', _('location of source patch repository')),
2509 ('p', 'patches', '', _('location of source patch repository')),
2510 ] + commands.remoteopts,
2510 ] + commands.remoteopts,
2511 _('hg qclone [OPTION]... SOURCE [DEST]')),
2511 _('hg qclone [OPTION]... SOURCE [DEST]')),
2512 "qcommit|qci":
2512 "qcommit|qci":
2513 (commit,
2513 (commit,
2514 commands.table["^commit|ci"][1],
2514 commands.table["^commit|ci"][1],
2515 _('hg qcommit [OPTION]... [FILE]...')),
2515 _('hg qcommit [OPTION]... [FILE]...')),
2516 "^qdiff":
2516 "^qdiff":
2517 (diff,
2517 (diff,
2518 commands.diffopts + commands.diffopts2 + commands.walkopts,
2518 commands.diffopts + commands.diffopts2 + commands.walkopts,
2519 _('hg qdiff [OPTION]... [FILE]...')),
2519 _('hg qdiff [OPTION]... [FILE]...')),
2520 "qdelete|qremove|qrm":
2520 "qdelete|qremove|qrm":
2521 (delete,
2521 (delete,
2522 [('k', 'keep', None, _('keep patch file')),
2522 [('k', 'keep', None, _('keep patch file')),
2523 ('r', 'rev', [], _('stop managing a revision (DEPRECATED)'))],
2523 ('r', 'rev', [], _('stop managing a revision (DEPRECATED)'))],
2524 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2524 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2525 'qfold':
2525 'qfold':
2526 (fold,
2526 (fold,
2527 [('e', 'edit', None, _('edit patch header')),
2527 [('e', 'edit', None, _('edit patch header')),
2528 ('k', 'keep', None, _('keep folded patch files')),
2528 ('k', 'keep', None, _('keep folded patch files')),
2529 ] + commands.commitopts,
2529 ] + commands.commitopts,
2530 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2530 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2531 'qgoto':
2531 'qgoto':
2532 (goto,
2532 (goto,
2533 [('f', 'force', None, _('overwrite any local changes'))],
2533 [('f', 'force', None, _('overwrite any local changes'))],
2534 _('hg qgoto [OPTION]... PATCH')),
2534 _('hg qgoto [OPTION]... PATCH')),
2535 'qguard':
2535 'qguard':
2536 (guard,
2536 (guard,
2537 [('l', 'list', None, _('list all patches and guards')),
2537 [('l', 'list', None, _('list all patches and guards')),
2538 ('n', 'none', None, _('drop all guards'))],
2538 ('n', 'none', None, _('drop all guards'))],
2539 _('hg qguard [-l] [-n] -- [PATCH] [+GUARD]... [-GUARD]...')),
2539 _('hg qguard [-l] [-n] -- [PATCH] [+GUARD]... [-GUARD]...')),
2540 'qheader': (header, [], _('hg qheader [PATCH]')),
2540 'qheader': (header, [], _('hg qheader [PATCH]')),
2541 "^qimport":
2541 "^qimport":
2542 (qimport,
2542 (qimport,
2543 [('e', 'existing', None, _('import file in patch directory')),
2543 [('e', 'existing', None, _('import file in patch directory')),
2544 ('n', 'name', '', _('name of patch file')),
2544 ('n', 'name', '', _('name of patch file')),
2545 ('f', 'force', None, _('overwrite existing files')),
2545 ('f', 'force', None, _('overwrite existing files')),
2546 ('r', 'rev', [], _('place existing revisions under mq control')),
2546 ('r', 'rev', [], _('place existing revisions under mq control')),
2547 ('g', 'git', None, _('use git extended diff format')),
2547 ('g', 'git', None, _('use git extended diff format')),
2548 ('P', 'push', None, _('qpush after importing'))],
2548 ('P', 'push', None, _('qpush after importing'))],
2549 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2549 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2550 "^qinit":
2550 "^qinit":
2551 (init,
2551 (init,
2552 [('c', 'create-repo', None, _('create queue repository'))],
2552 [('c', 'create-repo', None, _('create queue repository'))],
2553 _('hg qinit [-c]')),
2553 _('hg qinit [-c]')),
2554 "qnew":
2554 "qnew":
2555 (new,
2555 (new,
2556 [('e', 'edit', None, _('edit commit message')),
2556 [('e', 'edit', None, _('edit commit message')),
2557 ('f', 'force', None, _('import uncommitted changes into patch')),
2557 ('f', 'force', None, _('import uncommitted changes into patch')),
2558 ('g', 'git', None, _('use git extended diff format')),
2558 ('g', 'git', None, _('use git extended diff format')),
2559 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2559 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2560 ('u', 'user', '', _('add "From: <given user>" to patch')),
2560 ('u', 'user', '', _('add "From: <given user>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2562 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2562 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2563 ] + commands.walkopts + commands.commitopts,
2563 ] + commands.walkopts + commands.commitopts,
2564 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2564 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2565 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2565 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2566 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2566 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2567 "^qpop":
2567 "^qpop":
2568 (pop,
2568 (pop,
2569 [('a', 'all', None, _('pop all patches')),
2569 [('a', 'all', None, _('pop all patches')),
2570 ('n', 'name', '', _('queue name to pop')),
2570 ('n', 'name', '', _('queue name to pop')),
2571 ('f', 'force', None, _('forget any local changes'))],
2571 ('f', 'force', None, _('forget any local changes'))],
2572 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2572 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2573 "^qpush":
2573 "^qpush":
2574 (push,
2574 (push,
2575 [('f', 'force', None, _('apply if the patch has rejects')),
2575 [('f', 'force', None, _('apply if the patch has rejects')),
2576 ('l', 'list', None, _('list patch name in commit text')),
2576 ('l', 'list', None, _('list patch name in commit text')),
2577 ('a', 'all', None, _('apply all patches')),
2577 ('a', 'all', None, _('apply all patches')),
2578 ('m', 'merge', None, _('merge from another queue')),
2578 ('m', 'merge', None, _('merge from another queue')),
2579 ('n', 'name', '', _('merge queue name'))],
2579 ('n', 'name', '', _('merge queue name'))],
2580 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2580 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2581 "^qrefresh":
2581 "^qrefresh":
2582 (refresh,
2582 (refresh,
2583 [('e', 'edit', None, _('edit commit message')),
2583 [('e', 'edit', None, _('edit commit message')),
2584 ('g', 'git', None, _('use git extended diff format')),
2584 ('g', 'git', None, _('use git extended diff format')),
2585 ('s', 'short', None, _('refresh only files already in the patch and specified files')),
2585 ('s', 'short', None, _('refresh only files already in the patch and specified files')),
2586 ('U', 'currentuser', None, _('add/update "From: <current user>" in patch')),
2586 ('U', 'currentuser', None, _('add/update "From: <current user>" in patch')),
2587 ('u', 'user', '', _('add/update "From: <given user>" in patch')),
2587 ('u', 'user', '', _('add/update "From: <given user>" in patch')),
2588 ('D', 'currentdate', None, _('update "Date: <current date>" in patch (if present)')),
2588 ('D', 'currentdate', None, _('update "Date: <current date>" in patch (if present)')),
2589 ('d', 'date', '', _('update "Date: <given date>" in patch (if present)'))
2589 ('d', 'date', '', _('update "Date: <given date>" in patch (if present)'))
2590 ] + commands.walkopts + commands.commitopts,
2590 ] + commands.walkopts + commands.commitopts,
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2592 'qrename|qmv':
2592 'qrename|qmv':
2593 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2593 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2594 "qrestore":
2594 "qrestore":
2595 (restore,
2595 (restore,
2596 [('d', 'delete', None, _('delete save entry')),
2596 [('d', 'delete', None, _('delete save entry')),
2597 ('u', 'update', None, _('update queue working directory'))],
2597 ('u', 'update', None, _('update queue working directory'))],
2598 _('hg qrestore [-d] [-u] REV')),
2598 _('hg qrestore [-d] [-u] REV')),
2599 "qsave":
2599 "qsave":
2600 (save,
2600 (save,
2601 [('c', 'copy', None, _('copy patch directory')),
2601 [('c', 'copy', None, _('copy patch directory')),
2602 ('n', 'name', '', _('copy directory name')),
2602 ('n', 'name', '', _('copy directory name')),
2603 ('e', 'empty', None, _('clear queue status file')),
2603 ('e', 'empty', None, _('clear queue status file')),
2604 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2604 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2605 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2605 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2606 "qselect":
2606 "qselect":
2607 (select,
2607 (select,
2608 [('n', 'none', None, _('disable all guards')),
2608 [('n', 'none', None, _('disable all guards')),
2609 ('s', 'series', None, _('list all guards in series file')),
2609 ('s', 'series', None, _('list all guards in series file')),
2610 ('', 'pop', None, _('pop to before first guarded applied patch')),
2610 ('', 'pop', None, _('pop to before first guarded applied patch')),
2611 ('', 'reapply', None, _('pop, then reapply patches'))],
2611 ('', 'reapply', None, _('pop, then reapply patches'))],
2612 _('hg qselect [OPTION]... [GUARD]...')),
2612 _('hg qselect [OPTION]... [GUARD]...')),
2613 "qseries":
2613 "qseries":
2614 (series,
2614 (series,
2615 [('m', 'missing', None, _('print patches not in series')),
2615 [('m', 'missing', None, _('print patches not in series')),
2616 ] + seriesopts,
2616 ] + seriesopts,
2617 _('hg qseries [-ms]')),
2617 _('hg qseries [-ms]')),
2618 "^strip":
2618 "^strip":
2619 (strip,
2619 (strip,
2620 [('f', 'force', None, _('force removal with local changes')),
2620 [('f', 'force', None, _('force removal with local changes')),
2621 ('b', 'backup', None, _('bundle unrelated changesets')),
2621 ('b', 'backup', None, _('bundle unrelated changesets')),
2622 ('n', 'nobackup', None, _('no backups'))],
2622 ('n', 'nobackup', None, _('no backups'))],
2623 _('hg strip [-f] [-b] [-n] REV')),
2623 _('hg strip [-f] [-b] [-n] REV')),
2624 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2624 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2625 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2625 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2626 "qfinish":
2626 "qfinish":
2627 (finish,
2627 (finish,
2628 [('a', 'applied', None, _('finish all applied changesets'))],
2628 [('a', 'applied', None, _('finish all applied changesets'))],
2629 _('hg qfinish [-a] [REV]...')),
2629 _('hg qfinish [-a] [REV]...')),
2630 }
2630 }
@@ -1,2190 +1,2190 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo
10 import repo, changegroup, subrepo
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import util, extensions, hook, error
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16 from lock import release
16 from lock import release
17 import weakref, stat, errno, os, time, inspect
17 import weakref, stat, errno, os, time, inspect
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 class localrepository(repo.repository):
20 class localrepository(repo.repository):
21 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
21 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
22 supported = set('revlogv1 store fncache shared'.split())
22 supported = set('revlogv1 store fncache shared'.split())
23
23
24 def __init__(self, baseui, path=None, create=0):
24 def __init__(self, baseui, path=None, create=0):
25 repo.repository.__init__(self)
25 repo.repository.__init__(self)
26 self.root = os.path.realpath(path)
26 self.root = os.path.realpath(path)
27 self.path = os.path.join(self.root, ".hg")
27 self.path = os.path.join(self.root, ".hg")
28 self.origroot = path
28 self.origroot = path
29 self.opener = util.opener(self.path)
29 self.opener = util.opener(self.path)
30 self.wopener = util.opener(self.root)
30 self.wopener = util.opener(self.root)
31 self.baseui = baseui
31 self.baseui = baseui
32 self.ui = baseui.copy()
32 self.ui = baseui.copy()
33
33
34 try:
34 try:
35 self.ui.readconfig(self.join("hgrc"), self.root)
35 self.ui.readconfig(self.join("hgrc"), self.root)
36 extensions.loadall(self.ui)
36 extensions.loadall(self.ui)
37 except IOError:
37 except IOError:
38 pass
38 pass
39
39
40 if not os.path.isdir(self.path):
40 if not os.path.isdir(self.path):
41 if create:
41 if create:
42 if not os.path.exists(path):
42 if not os.path.exists(path):
43 os.mkdir(path)
43 os.mkdir(path)
44 os.mkdir(self.path)
44 os.mkdir(self.path)
45 requirements = ["revlogv1"]
45 requirements = ["revlogv1"]
46 if self.ui.configbool('format', 'usestore', True):
46 if self.ui.configbool('format', 'usestore', True):
47 os.mkdir(os.path.join(self.path, "store"))
47 os.mkdir(os.path.join(self.path, "store"))
48 requirements.append("store")
48 requirements.append("store")
49 if self.ui.configbool('format', 'usefncache', True):
49 if self.ui.configbool('format', 'usefncache', True):
50 requirements.append("fncache")
50 requirements.append("fncache")
51 # create an invalid changelog
51 # create an invalid changelog
52 self.opener("00changelog.i", "a").write(
52 self.opener("00changelog.i", "a").write(
53 '\0\0\0\2' # represents revlogv2
53 '\0\0\0\2' # represents revlogv2
54 ' dummy changelog to prevent using the old repo layout'
54 ' dummy changelog to prevent using the old repo layout'
55 )
55 )
56 reqfile = self.opener("requires", "w")
56 reqfile = self.opener("requires", "w")
57 for r in requirements:
57 for r in requirements:
58 reqfile.write("%s\n" % r)
58 reqfile.write("%s\n" % r)
59 reqfile.close()
59 reqfile.close()
60 else:
60 else:
61 raise error.RepoError(_("repository %s not found") % path)
61 raise error.RepoError(_("repository %s not found") % path)
62 elif create:
62 elif create:
63 raise error.RepoError(_("repository %s already exists") % path)
63 raise error.RepoError(_("repository %s already exists") % path)
64 else:
64 else:
65 # find requirements
65 # find requirements
66 requirements = set()
66 requirements = set()
67 try:
67 try:
68 requirements = set(self.opener("requires").read().splitlines())
68 requirements = set(self.opener("requires").read().splitlines())
69 except IOError, inst:
69 except IOError, inst:
70 if inst.errno != errno.ENOENT:
70 if inst.errno != errno.ENOENT:
71 raise
71 raise
72 for r in requirements - self.supported:
72 for r in requirements - self.supported:
73 raise error.RepoError(_("requirement '%s' not supported") % r)
73 raise error.RepoError(_("requirement '%s' not supported") % r)
74
74
75 self.sharedpath = self.path
75 self.sharedpath = self.path
76 try:
76 try:
77 s = os.path.realpath(self.opener("sharedpath").read())
77 s = os.path.realpath(self.opener("sharedpath").read())
78 if not os.path.exists(s):
78 if not os.path.exists(s):
79 raise error.RepoError(
79 raise error.RepoError(
80 _('.hg/sharedpath points to nonexistent directory %s') % s)
80 _('.hg/sharedpath points to nonexistent directory %s') % s)
81 self.sharedpath = s
81 self.sharedpath = s
82 except IOError, inst:
82 except IOError, inst:
83 if inst.errno != errno.ENOENT:
83 if inst.errno != errno.ENOENT:
84 raise
84 raise
85
85
86 self.store = store.store(requirements, self.sharedpath, util.opener)
86 self.store = store.store(requirements, self.sharedpath, util.opener)
87 self.spath = self.store.path
87 self.spath = self.store.path
88 self.sopener = self.store.opener
88 self.sopener = self.store.opener
89 self.sjoin = self.store.join
89 self.sjoin = self.store.join
90 self.opener.createmode = self.store.createmode
90 self.opener.createmode = self.store.createmode
91
91
92 self.tagscache = None
92 self.tagscache = None
93 self._tagstypecache = None
93 self._tagstypecache = None
94 self.branchcache = None
94 self.branchcache = None
95 self._ubranchcache = None # UTF-8 version of branchcache
95 self._ubranchcache = None # UTF-8 version of branchcache
96 self._branchcachetip = None
96 self._branchcachetip = None
97 self.nodetagscache = None
97 self.nodetagscache = None
98 self.filterpats = {}
98 self.filterpats = {}
99 self._datafilters = {}
99 self._datafilters = {}
100 self._transref = self._lockref = self._wlockref = None
100 self._transref = self._lockref = self._wlockref = None
101
101
102 @propertycache
102 @propertycache
103 def changelog(self):
103 def changelog(self):
104 c = changelog.changelog(self.sopener)
104 c = changelog.changelog(self.sopener)
105 if 'HG_PENDING' in os.environ:
105 if 'HG_PENDING' in os.environ:
106 p = os.environ['HG_PENDING']
106 p = os.environ['HG_PENDING']
107 if p.startswith(self.root):
107 if p.startswith(self.root):
108 c.readpending('00changelog.i.a')
108 c.readpending('00changelog.i.a')
109 self.sopener.defversion = c.version
109 self.sopener.defversion = c.version
110 return c
110 return c
111
111
112 @propertycache
112 @propertycache
113 def manifest(self):
113 def manifest(self):
114 return manifest.manifest(self.sopener)
114 return manifest.manifest(self.sopener)
115
115
116 @propertycache
116 @propertycache
117 def dirstate(self):
117 def dirstate(self):
118 return dirstate.dirstate(self.opener, self.ui, self.root)
118 return dirstate.dirstate(self.opener, self.ui, self.root)
119
119
120 def __getitem__(self, changeid):
120 def __getitem__(self, changeid):
121 if changeid is None:
121 if changeid is None:
122 return context.workingctx(self)
122 return context.workingctx(self)
123 return context.changectx(self, changeid)
123 return context.changectx(self, changeid)
124
124
125 def __nonzero__(self):
125 def __nonzero__(self):
126 return True
126 return True
127
127
128 def __len__(self):
128 def __len__(self):
129 return len(self.changelog)
129 return len(self.changelog)
130
130
131 def __iter__(self):
131 def __iter__(self):
132 for i in xrange(len(self)):
132 for i in xrange(len(self)):
133 yield i
133 yield i
134
134
135 def url(self):
135 def url(self):
136 return 'file:' + self.root
136 return 'file:' + self.root
137
137
138 def hook(self, name, throw=False, **args):
138 def hook(self, name, throw=False, **args):
139 return hook.hook(self.ui, self, name, throw, **args)
139 return hook.hook(self.ui, self, name, throw, **args)
140
140
141 tag_disallowed = ':\r\n'
141 tag_disallowed = ':\r\n'
142
142
143 def _tag(self, names, node, message, local, user, date, extra={}):
143 def _tag(self, names, node, message, local, user, date, extra={}):
144 if isinstance(names, str):
144 if isinstance(names, str):
145 allchars = names
145 allchars = names
146 names = (names,)
146 names = (names,)
147 else:
147 else:
148 allchars = ''.join(names)
148 allchars = ''.join(names)
149 for c in self.tag_disallowed:
149 for c in self.tag_disallowed:
150 if c in allchars:
150 if c in allchars:
151 raise util.Abort(_('%r cannot be used in a tag name') % c)
151 raise util.Abort(_('%r cannot be used in a tag name') % c)
152
152
153 for name in names:
153 for name in names:
154 self.hook('pretag', throw=True, node=hex(node), tag=name,
154 self.hook('pretag', throw=True, node=hex(node), tag=name,
155 local=local)
155 local=local)
156
156
157 def writetags(fp, names, munge, prevtags):
157 def writetags(fp, names, munge, prevtags):
158 fp.seek(0, 2)
158 fp.seek(0, 2)
159 if prevtags and prevtags[-1] != '\n':
159 if prevtags and prevtags[-1] != '\n':
160 fp.write('\n')
160 fp.write('\n')
161 for name in names:
161 for name in names:
162 m = munge and munge(name) or name
162 m = munge and munge(name) or name
163 if self._tagstypecache and name in self._tagstypecache:
163 if self._tagstypecache and name in self._tagstypecache:
164 old = self.tagscache.get(name, nullid)
164 old = self.tagscache.get(name, nullid)
165 fp.write('%s %s\n' % (hex(old), m))
165 fp.write('%s %s\n' % (hex(old), m))
166 fp.write('%s %s\n' % (hex(node), m))
166 fp.write('%s %s\n' % (hex(node), m))
167 fp.close()
167 fp.close()
168
168
169 prevtags = ''
169 prevtags = ''
170 if local:
170 if local:
171 try:
171 try:
172 fp = self.opener('localtags', 'r+')
172 fp = self.opener('localtags', 'r+')
173 except IOError:
173 except IOError:
174 fp = self.opener('localtags', 'a')
174 fp = self.opener('localtags', 'a')
175 else:
175 else:
176 prevtags = fp.read()
176 prevtags = fp.read()
177
177
178 # local tags are stored in the current charset
178 # local tags are stored in the current charset
179 writetags(fp, names, None, prevtags)
179 writetags(fp, names, None, prevtags)
180 for name in names:
180 for name in names:
181 self.hook('tag', node=hex(node), tag=name, local=local)
181 self.hook('tag', node=hex(node), tag=name, local=local)
182 return
182 return
183
183
184 try:
184 try:
185 fp = self.wfile('.hgtags', 'rb+')
185 fp = self.wfile('.hgtags', 'rb+')
186 except IOError:
186 except IOError:
187 fp = self.wfile('.hgtags', 'ab')
187 fp = self.wfile('.hgtags', 'ab')
188 else:
188 else:
189 prevtags = fp.read()
189 prevtags = fp.read()
190
190
191 # committed tags are stored in UTF-8
191 # committed tags are stored in UTF-8
192 writetags(fp, names, encoding.fromlocal, prevtags)
192 writetags(fp, names, encoding.fromlocal, prevtags)
193
193
194 if '.hgtags' not in self.dirstate:
194 if '.hgtags' not in self.dirstate:
195 self.add(['.hgtags'])
195 self.add(['.hgtags'])
196
196
197 m = match_.exact(self.root, '', ['.hgtags'])
197 m = match_.exact(self.root, '', ['.hgtags'])
198 tagnode = self.commit(message, user, date, extra=extra, match=m)
198 tagnode = self.commit(message, user, date, extra=extra, match=m)
199
199
200 for name in names:
200 for name in names:
201 self.hook('tag', node=hex(node), tag=name, local=local)
201 self.hook('tag', node=hex(node), tag=name, local=local)
202
202
203 return tagnode
203 return tagnode
204
204
205 def tag(self, names, node, message, local, user, date):
205 def tag(self, names, node, message, local, user, date):
206 '''tag a revision with one or more symbolic names.
206 '''tag a revision with one or more symbolic names.
207
207
208 names is a list of strings or, when adding a single tag, names may be a
208 names is a list of strings or, when adding a single tag, names may be a
209 string.
209 string.
210
210
211 if local is True, the tags are stored in a per-repository file.
211 if local is True, the tags are stored in a per-repository file.
212 otherwise, they are stored in the .hgtags file, and a new
212 otherwise, they are stored in the .hgtags file, and a new
213 changeset is committed with the change.
213 changeset is committed with the change.
214
214
215 keyword arguments:
215 keyword arguments:
216
216
217 local: whether to store tags in non-version-controlled file
217 local: whether to store tags in non-version-controlled file
218 (default False)
218 (default False)
219
219
220 message: commit message to use if committing
220 message: commit message to use if committing
221
221
222 user: name of user to use if committing
222 user: name of user to use if committing
223
223
224 date: date tuple to use if committing'''
224 date: date tuple to use if committing'''
225
225
226 for x in self.status()[:5]:
226 for x in self.status()[:5]:
227 if '.hgtags' in x:
227 if '.hgtags' in x:
228 raise util.Abort(_('working copy of .hgtags is changed '
228 raise util.Abort(_('working copy of .hgtags is changed '
229 '(please commit .hgtags manually)'))
229 '(please commit .hgtags manually)'))
230
230
231 self.tags() # instantiate the cache
231 self.tags() # instantiate the cache
232 self._tag(names, node, message, local, user, date)
232 self._tag(names, node, message, local, user, date)
233
233
234 def tags(self):
234 def tags(self):
235 '''return a mapping of tag to node'''
235 '''return a mapping of tag to node'''
236 if self.tagscache:
236 if self.tagscache:
237 return self.tagscache
237 return self.tagscache
238
238
239 globaltags = {}
239 globaltags = {}
240 tagtypes = {}
240 tagtypes = {}
241
241
242 def readtags(lines, fn, tagtype):
242 def readtags(lines, fn, tagtype):
243 filetags = {}
243 filetags = {}
244 count = 0
244 count = 0
245
245
246 def warn(msg):
246 def warn(msg):
247 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
247 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
248
248
249 for l in lines:
249 for l in lines:
250 count += 1
250 count += 1
251 if not l:
251 if not l:
252 continue
252 continue
253 s = l.split(" ", 1)
253 s = l.split(" ", 1)
254 if len(s) != 2:
254 if len(s) != 2:
255 warn(_("cannot parse entry"))
255 warn(_("cannot parse entry"))
256 continue
256 continue
257 node, key = s
257 node, key = s
258 key = encoding.tolocal(key.strip()) # stored in UTF-8
258 key = encoding.tolocal(key.strip()) # stored in UTF-8
259 try:
259 try:
260 bin_n = bin(node)
260 bin_n = bin(node)
261 except TypeError:
261 except TypeError:
262 warn(_("node '%s' is not well formed") % node)
262 warn(_("node '%s' is not well formed") % node)
263 continue
263 continue
264 if bin_n not in self.changelog.nodemap:
264 if bin_n not in self.changelog.nodemap:
265 # silently ignore as pull -r might cause this
265 # silently ignore as pull -r might cause this
266 continue
266 continue
267
267
268 h = []
268 h = []
269 if key in filetags:
269 if key in filetags:
270 n, h = filetags[key]
270 n, h = filetags[key]
271 h.append(n)
271 h.append(n)
272 filetags[key] = (bin_n, h)
272 filetags[key] = (bin_n, h)
273
273
274 for k, nh in filetags.iteritems():
274 for k, nh in filetags.iteritems():
275 if k not in globaltags:
275 if k not in globaltags:
276 globaltags[k] = nh
276 globaltags[k] = nh
277 tagtypes[k] = tagtype
277 tagtypes[k] = tagtype
278 continue
278 continue
279
279
280 # we prefer the global tag if:
280 # we prefer the global tag if:
281 # it supercedes us OR
281 # it supercedes us OR
282 # mutual supercedes and it has a higher rank
282 # mutual supercedes and it has a higher rank
283 # otherwise we win because we're tip-most
283 # otherwise we win because we're tip-most
284 an, ah = nh
284 an, ah = nh
285 bn, bh = globaltags[k]
285 bn, bh = globaltags[k]
286 if (bn != an and an in bh and
286 if (bn != an and an in bh and
287 (bn not in ah or len(bh) > len(ah))):
287 (bn not in ah or len(bh) > len(ah))):
288 an = bn
288 an = bn
289 ah.extend([n for n in bh if n not in ah])
289 ah.extend([n for n in bh if n not in ah])
290 globaltags[k] = an, ah
290 globaltags[k] = an, ah
291 tagtypes[k] = tagtype
291 tagtypes[k] = tagtype
292
292
293 seen = set()
293 seen = set()
294 f = None
294 f = None
295 ctxs = []
295 ctxs = []
296 for node in self.heads():
296 for node in self.heads():
297 try:
297 try:
298 fnode = self[node].filenode('.hgtags')
298 fnode = self[node].filenode('.hgtags')
299 except error.LookupError:
299 except error.LookupError:
300 continue
300 continue
301 if fnode not in seen:
301 if fnode not in seen:
302 seen.add(fnode)
302 seen.add(fnode)
303 if not f:
303 if not f:
304 f = self.filectx('.hgtags', fileid=fnode)
304 f = self.filectx('.hgtags', fileid=fnode)
305 else:
305 else:
306 f = f.filectx(fnode)
306 f = f.filectx(fnode)
307 ctxs.append(f)
307 ctxs.append(f)
308
308
309 # read the tags file from each head, ending with the tip
309 # read the tags file from each head, ending with the tip
310 for f in reversed(ctxs):
310 for f in reversed(ctxs):
311 readtags(f.data().splitlines(), f, "global")
311 readtags(f.data().splitlines(), f, "global")
312
312
313 try:
313 try:
314 data = encoding.fromlocal(self.opener("localtags").read())
314 data = encoding.fromlocal(self.opener("localtags").read())
315 # localtags are stored in the local character set
315 # localtags are stored in the local character set
316 # while the internal tag table is stored in UTF-8
316 # while the internal tag table is stored in UTF-8
317 readtags(data.splitlines(), "localtags", "local")
317 readtags(data.splitlines(), "localtags", "local")
318 except IOError:
318 except IOError:
319 pass
319 pass
320
320
321 self.tagscache = {}
321 self.tagscache = {}
322 self._tagstypecache = {}
322 self._tagstypecache = {}
323 for k, nh in globaltags.iteritems():
323 for k, nh in globaltags.iteritems():
324 n = nh[0]
324 n = nh[0]
325 if n != nullid:
325 if n != nullid:
326 self.tagscache[k] = n
326 self.tagscache[k] = n
327 self._tagstypecache[k] = tagtypes[k]
327 self._tagstypecache[k] = tagtypes[k]
328 self.tagscache['tip'] = self.changelog.tip()
328 self.tagscache['tip'] = self.changelog.tip()
329 return self.tagscache
329 return self.tagscache
330
330
331 def tagtype(self, tagname):
331 def tagtype(self, tagname):
332 '''
332 '''
333 return the type of the given tag. result can be:
333 return the type of the given tag. result can be:
334
334
335 'local' : a local tag
335 'local' : a local tag
336 'global' : a global tag
336 'global' : a global tag
337 None : tag does not exist
337 None : tag does not exist
338 '''
338 '''
339
339
340 self.tags()
340 self.tags()
341
341
342 return self._tagstypecache.get(tagname)
342 return self._tagstypecache.get(tagname)
343
343
344 def tagslist(self):
344 def tagslist(self):
345 '''return a list of tags ordered by revision'''
345 '''return a list of tags ordered by revision'''
346 l = []
346 l = []
347 for t, n in self.tags().iteritems():
347 for t, n in self.tags().iteritems():
348 try:
348 try:
349 r = self.changelog.rev(n)
349 r = self.changelog.rev(n)
350 except:
350 except:
351 r = -2 # sort to the beginning of the list if unknown
351 r = -2 # sort to the beginning of the list if unknown
352 l.append((r, t, n))
352 l.append((r, t, n))
353 return [(t, n) for r, t, n in sorted(l)]
353 return [(t, n) for r, t, n in sorted(l)]
354
354
355 def nodetags(self, node):
355 def nodetags(self, node):
356 '''return the tags associated with a node'''
356 '''return the tags associated with a node'''
357 if not self.nodetagscache:
357 if not self.nodetagscache:
358 self.nodetagscache = {}
358 self.nodetagscache = {}
359 for t, n in self.tags().iteritems():
359 for t, n in self.tags().iteritems():
360 self.nodetagscache.setdefault(n, []).append(t)
360 self.nodetagscache.setdefault(n, []).append(t)
361 return self.nodetagscache.get(node, [])
361 return self.nodetagscache.get(node, [])
362
362
363 def _branchtags(self, partial, lrev):
363 def _branchtags(self, partial, lrev):
364 # TODO: rename this function?
364 # TODO: rename this function?
365 tiprev = len(self) - 1
365 tiprev = len(self) - 1
366 if lrev != tiprev:
366 if lrev != tiprev:
367 self._updatebranchcache(partial, lrev+1, tiprev+1)
367 self._updatebranchcache(partial, lrev+1, tiprev+1)
368 self._writebranchcache(partial, self.changelog.tip(), tiprev)
368 self._writebranchcache(partial, self.changelog.tip(), tiprev)
369
369
370 return partial
370 return partial
371
371
372 def branchmap(self):
372 def branchmap(self):
373 tip = self.changelog.tip()
373 tip = self.changelog.tip()
374 if self.branchcache is not None and self._branchcachetip == tip:
374 if self.branchcache is not None and self._branchcachetip == tip:
375 return self.branchcache
375 return self.branchcache
376
376
377 oldtip = self._branchcachetip
377 oldtip = self._branchcachetip
378 self._branchcachetip = tip
378 self._branchcachetip = tip
379 if self.branchcache is None:
379 if self.branchcache is None:
380 self.branchcache = {} # avoid recursion in changectx
380 self.branchcache = {} # avoid recursion in changectx
381 else:
381 else:
382 self.branchcache.clear() # keep using the same dict
382 self.branchcache.clear() # keep using the same dict
383 if oldtip is None or oldtip not in self.changelog.nodemap:
383 if oldtip is None or oldtip not in self.changelog.nodemap:
384 partial, last, lrev = self._readbranchcache()
384 partial, last, lrev = self._readbranchcache()
385 else:
385 else:
386 lrev = self.changelog.rev(oldtip)
386 lrev = self.changelog.rev(oldtip)
387 partial = self._ubranchcache
387 partial = self._ubranchcache
388
388
389 self._branchtags(partial, lrev)
389 self._branchtags(partial, lrev)
390 # this private cache holds all heads (not just tips)
390 # this private cache holds all heads (not just tips)
391 self._ubranchcache = partial
391 self._ubranchcache = partial
392
392
393 # the branch cache is stored on disk as UTF-8, but in the local
393 # the branch cache is stored on disk as UTF-8, but in the local
394 # charset internally
394 # charset internally
395 for k, v in partial.iteritems():
395 for k, v in partial.iteritems():
396 self.branchcache[encoding.tolocal(k)] = v
396 self.branchcache[encoding.tolocal(k)] = v
397 return self.branchcache
397 return self.branchcache
398
398
399
399
400 def branchtags(self):
400 def branchtags(self):
401 '''return a dict where branch names map to the tipmost head of
401 '''return a dict where branch names map to the tipmost head of
402 the branch, open heads come before closed'''
402 the branch, open heads come before closed'''
403 bt = {}
403 bt = {}
404 for bn, heads in self.branchmap().iteritems():
404 for bn, heads in self.branchmap().iteritems():
405 head = None
405 head = None
406 for i in range(len(heads)-1, -1, -1):
406 for i in range(len(heads)-1, -1, -1):
407 h = heads[i]
407 h = heads[i]
408 if 'close' not in self.changelog.read(h)[5]:
408 if 'close' not in self.changelog.read(h)[5]:
409 head = h
409 head = h
410 break
410 break
411 # no open heads were found
411 # no open heads were found
412 if head is None:
412 if head is None:
413 head = heads[-1]
413 head = heads[-1]
414 bt[bn] = head
414 bt[bn] = head
415 return bt
415 return bt
416
416
417
417
418 def _readbranchcache(self):
418 def _readbranchcache(self):
419 partial = {}
419 partial = {}
420 try:
420 try:
421 f = self.opener("branchheads.cache")
421 f = self.opener("branchheads.cache")
422 lines = f.read().split('\n')
422 lines = f.read().split('\n')
423 f.close()
423 f.close()
424 except (IOError, OSError):
424 except (IOError, OSError):
425 return {}, nullid, nullrev
425 return {}, nullid, nullrev
426
426
427 try:
427 try:
428 last, lrev = lines.pop(0).split(" ", 1)
428 last, lrev = lines.pop(0).split(" ", 1)
429 last, lrev = bin(last), int(lrev)
429 last, lrev = bin(last), int(lrev)
430 if lrev >= len(self) or self[lrev].node() != last:
430 if lrev >= len(self) or self[lrev].node() != last:
431 # invalidate the cache
431 # invalidate the cache
432 raise ValueError('invalidating branch cache (tip differs)')
432 raise ValueError('invalidating branch cache (tip differs)')
433 for l in lines:
433 for l in lines:
434 if not l: continue
434 if not l: continue
435 node, label = l.split(" ", 1)
435 node, label = l.split(" ", 1)
436 partial.setdefault(label.strip(), []).append(bin(node))
436 partial.setdefault(label.strip(), []).append(bin(node))
437 except KeyboardInterrupt:
437 except KeyboardInterrupt:
438 raise
438 raise
439 except Exception, inst:
439 except Exception, inst:
440 if self.ui.debugflag:
440 if self.ui.debugflag:
441 self.ui.warn(str(inst), '\n')
441 self.ui.warn(str(inst), '\n')
442 partial, last, lrev = {}, nullid, nullrev
442 partial, last, lrev = {}, nullid, nullrev
443 return partial, last, lrev
443 return partial, last, lrev
444
444
445 def _writebranchcache(self, branches, tip, tiprev):
445 def _writebranchcache(self, branches, tip, tiprev):
446 try:
446 try:
447 f = self.opener("branchheads.cache", "w", atomictemp=True)
447 f = self.opener("branchheads.cache", "w", atomictemp=True)
448 f.write("%s %s\n" % (hex(tip), tiprev))
448 f.write("%s %s\n" % (hex(tip), tiprev))
449 for label, nodes in branches.iteritems():
449 for label, nodes in branches.iteritems():
450 for node in nodes:
450 for node in nodes:
451 f.write("%s %s\n" % (hex(node), label))
451 f.write("%s %s\n" % (hex(node), label))
452 f.rename()
452 f.rename()
453 except (IOError, OSError):
453 except (IOError, OSError):
454 pass
454 pass
455
455
456 def _updatebranchcache(self, partial, start, end):
456 def _updatebranchcache(self, partial, start, end):
457 # collect new branch entries
457 # collect new branch entries
458 newbranches = {}
458 newbranches = {}
459 for r in xrange(start, end):
459 for r in xrange(start, end):
460 c = self[r]
460 c = self[r]
461 newbranches.setdefault(c.branch(), []).append(c.node())
461 newbranches.setdefault(c.branch(), []).append(c.node())
462 # if older branchheads are reachable from new ones, they aren't
462 # if older branchheads are reachable from new ones, they aren't
463 # really branchheads. Note checking parents is insufficient:
463 # really branchheads. Note checking parents is insufficient:
464 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
464 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
465 for branch, newnodes in newbranches.iteritems():
465 for branch, newnodes in newbranches.iteritems():
466 bheads = partial.setdefault(branch, [])
466 bheads = partial.setdefault(branch, [])
467 bheads.extend(newnodes)
467 bheads.extend(newnodes)
468 if len(bheads) < 2:
468 if len(bheads) < 2:
469 continue
469 continue
470 newbheads = []
470 newbheads = []
471 # starting from tip means fewer passes over reachable
471 # starting from tip means fewer passes over reachable
472 while newnodes:
472 while newnodes:
473 latest = newnodes.pop()
473 latest = newnodes.pop()
474 if latest not in bheads:
474 if latest not in bheads:
475 continue
475 continue
476 reachable = self.changelog.reachable(latest, bheads[0])
476 reachable = self.changelog.reachable(latest, bheads[0])
477 bheads = [b for b in bheads if b not in reachable]
477 bheads = [b for b in bheads if b not in reachable]
478 newbheads.insert(0, latest)
478 newbheads.insert(0, latest)
479 bheads.extend(newbheads)
479 bheads.extend(newbheads)
480 partial[branch] = bheads
480 partial[branch] = bheads
481
481
482 def lookup(self, key):
482 def lookup(self, key):
483 if isinstance(key, int):
483 if isinstance(key, int):
484 return self.changelog.node(key)
484 return self.changelog.node(key)
485 elif key == '.':
485 elif key == '.':
486 return self.dirstate.parents()[0]
486 return self.dirstate.parents()[0]
487 elif key == 'null':
487 elif key == 'null':
488 return nullid
488 return nullid
489 elif key == 'tip':
489 elif key == 'tip':
490 return self.changelog.tip()
490 return self.changelog.tip()
491 n = self.changelog._match(key)
491 n = self.changelog._match(key)
492 if n:
492 if n:
493 return n
493 return n
494 if key in self.tags():
494 if key in self.tags():
495 return self.tags()[key]
495 return self.tags()[key]
496 if key in self.branchtags():
496 if key in self.branchtags():
497 return self.branchtags()[key]
497 return self.branchtags()[key]
498 n = self.changelog._partialmatch(key)
498 n = self.changelog._partialmatch(key)
499 if n:
499 if n:
500 return n
500 return n
501
501
502 # can't find key, check if it might have come from damaged dirstate
502 # can't find key, check if it might have come from damaged dirstate
503 if key in self.dirstate.parents():
503 if key in self.dirstate.parents():
504 raise error.Abort(_("working directory has unknown parent '%s'!")
504 raise error.Abort(_("working directory has unknown parent '%s'!")
505 % short(key))
505 % short(key))
506 try:
506 try:
507 if len(key) == 20:
507 if len(key) == 20:
508 key = hex(key)
508 key = hex(key)
509 except:
509 except:
510 pass
510 pass
511 raise error.RepoError(_("unknown revision '%s'") % key)
511 raise error.RepoError(_("unknown revision '%s'") % key)
512
512
513 def local(self):
513 def local(self):
514 return True
514 return True
515
515
516 def join(self, f):
516 def join(self, f):
517 return os.path.join(self.path, f)
517 return os.path.join(self.path, f)
518
518
519 def wjoin(self, f):
519 def wjoin(self, f):
520 return os.path.join(self.root, f)
520 return os.path.join(self.root, f)
521
521
522 def rjoin(self, f):
522 def rjoin(self, f):
523 return os.path.join(self.root, util.pconvert(f))
523 return os.path.join(self.root, util.pconvert(f))
524
524
525 def file(self, f):
525 def file(self, f):
526 if f[0] == '/':
526 if f[0] == '/':
527 f = f[1:]
527 f = f[1:]
528 return filelog.filelog(self.sopener, f)
528 return filelog.filelog(self.sopener, f)
529
529
530 def changectx(self, changeid):
530 def changectx(self, changeid):
531 return self[changeid]
531 return self[changeid]
532
532
533 def parents(self, changeid=None):
533 def parents(self, changeid=None):
534 '''get list of changectxs for parents of changeid'''
534 '''get list of changectxs for parents of changeid'''
535 return self[changeid].parents()
535 return self[changeid].parents()
536
536
537 def filectx(self, path, changeid=None, fileid=None):
537 def filectx(self, path, changeid=None, fileid=None):
538 """changeid can be a changeset revision, node, or tag.
538 """changeid can be a changeset revision, node, or tag.
539 fileid can be a file revision or node."""
539 fileid can be a file revision or node."""
540 return context.filectx(self, path, changeid, fileid)
540 return context.filectx(self, path, changeid, fileid)
541
541
542 def getcwd(self):
542 def getcwd(self):
543 return self.dirstate.getcwd()
543 return self.dirstate.getcwd()
544
544
545 def pathto(self, f, cwd=None):
545 def pathto(self, f, cwd=None):
546 return self.dirstate.pathto(f, cwd)
546 return self.dirstate.pathto(f, cwd)
547
547
548 def wfile(self, f, mode='r'):
548 def wfile(self, f, mode='r'):
549 return self.wopener(f, mode)
549 return self.wopener(f, mode)
550
550
551 def _link(self, f):
551 def _link(self, f):
552 return os.path.islink(self.wjoin(f))
552 return os.path.islink(self.wjoin(f))
553
553
554 def _filter(self, filter, filename, data):
554 def _filter(self, filter, filename, data):
555 if filter not in self.filterpats:
555 if filter not in self.filterpats:
556 l = []
556 l = []
557 for pat, cmd in self.ui.configitems(filter):
557 for pat, cmd in self.ui.configitems(filter):
558 if cmd == '!':
558 if cmd == '!':
559 continue
559 continue
560 mf = match_.match(self.root, '', [pat])
560 mf = match_.match(self.root, '', [pat])
561 fn = None
561 fn = None
562 params = cmd
562 params = cmd
563 for name, filterfn in self._datafilters.iteritems():
563 for name, filterfn in self._datafilters.iteritems():
564 if cmd.startswith(name):
564 if cmd.startswith(name):
565 fn = filterfn
565 fn = filterfn
566 params = cmd[len(name):].lstrip()
566 params = cmd[len(name):].lstrip()
567 break
567 break
568 if not fn:
568 if not fn:
569 fn = lambda s, c, **kwargs: util.filter(s, c)
569 fn = lambda s, c, **kwargs: util.filter(s, c)
570 # Wrap old filters not supporting keyword arguments
570 # Wrap old filters not supporting keyword arguments
571 if not inspect.getargspec(fn)[2]:
571 if not inspect.getargspec(fn)[2]:
572 oldfn = fn
572 oldfn = fn
573 fn = lambda s, c, **kwargs: oldfn(s, c)
573 fn = lambda s, c, **kwargs: oldfn(s, c)
574 l.append((mf, fn, params))
574 l.append((mf, fn, params))
575 self.filterpats[filter] = l
575 self.filterpats[filter] = l
576
576
577 for mf, fn, cmd in self.filterpats[filter]:
577 for mf, fn, cmd in self.filterpats[filter]:
578 if mf(filename):
578 if mf(filename):
579 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
579 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
580 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
580 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
581 break
581 break
582
582
583 return data
583 return data
584
584
585 def adddatafilter(self, name, filter):
585 def adddatafilter(self, name, filter):
586 self._datafilters[name] = filter
586 self._datafilters[name] = filter
587
587
588 def wread(self, filename):
588 def wread(self, filename):
589 if self._link(filename):
589 if self._link(filename):
590 data = os.readlink(self.wjoin(filename))
590 data = os.readlink(self.wjoin(filename))
591 else:
591 else:
592 data = self.wopener(filename, 'r').read()
592 data = self.wopener(filename, 'r').read()
593 return self._filter("encode", filename, data)
593 return self._filter("encode", filename, data)
594
594
595 def wwrite(self, filename, data, flags):
595 def wwrite(self, filename, data, flags):
596 data = self._filter("decode", filename, data)
596 data = self._filter("decode", filename, data)
597 try:
597 try:
598 os.unlink(self.wjoin(filename))
598 os.unlink(self.wjoin(filename))
599 except OSError:
599 except OSError:
600 pass
600 pass
601 if 'l' in flags:
601 if 'l' in flags:
602 self.wopener.symlink(data, filename)
602 self.wopener.symlink(data, filename)
603 else:
603 else:
604 self.wopener(filename, 'w').write(data)
604 self.wopener(filename, 'w').write(data)
605 if 'x' in flags:
605 if 'x' in flags:
606 util.set_flags(self.wjoin(filename), False, True)
606 util.set_flags(self.wjoin(filename), False, True)
607
607
608 def wwritedata(self, filename, data):
608 def wwritedata(self, filename, data):
609 return self._filter("decode", filename, data)
609 return self._filter("decode", filename, data)
610
610
611 def transaction(self):
611 def transaction(self):
612 tr = self._transref and self._transref() or None
612 tr = self._transref and self._transref() or None
613 if tr and tr.running():
613 if tr and tr.running():
614 return tr.nest()
614 return tr.nest()
615
615
616 # abort here if the journal already exists
616 # abort here if the journal already exists
617 if os.path.exists(self.sjoin("journal")):
617 if os.path.exists(self.sjoin("journal")):
618 raise error.RepoError(_("journal already exists - run hg recover"))
618 raise error.RepoError(_("journal already exists - run hg recover"))
619
619
620 # save dirstate for rollback
620 # save dirstate for rollback
621 try:
621 try:
622 ds = self.opener("dirstate").read()
622 ds = self.opener("dirstate").read()
623 except IOError:
623 except IOError:
624 ds = ""
624 ds = ""
625 self.opener("journal.dirstate", "w").write(ds)
625 self.opener("journal.dirstate", "w").write(ds)
626 self.opener("journal.branch", "w").write(self.dirstate.branch())
626 self.opener("journal.branch", "w").write(self.dirstate.branch())
627
627
628 renames = [(self.sjoin("journal"), self.sjoin("undo")),
628 renames = [(self.sjoin("journal"), self.sjoin("undo")),
629 (self.join("journal.dirstate"), self.join("undo.dirstate")),
629 (self.join("journal.dirstate"), self.join("undo.dirstate")),
630 (self.join("journal.branch"), self.join("undo.branch"))]
630 (self.join("journal.branch"), self.join("undo.branch"))]
631 tr = transaction.transaction(self.ui.warn, self.sopener,
631 tr = transaction.transaction(self.ui.warn, self.sopener,
632 self.sjoin("journal"),
632 self.sjoin("journal"),
633 aftertrans(renames),
633 aftertrans(renames),
634 self.store.createmode)
634 self.store.createmode)
635 self._transref = weakref.ref(tr)
635 self._transref = weakref.ref(tr)
636 return tr
636 return tr
637
637
638 def recover(self):
638 def recover(self):
639 lock = self.lock()
639 lock = self.lock()
640 try:
640 try:
641 if os.path.exists(self.sjoin("journal")):
641 if os.path.exists(self.sjoin("journal")):
642 self.ui.status(_("rolling back interrupted transaction\n"))
642 self.ui.status(_("rolling back interrupted transaction\n"))
643 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
643 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
644 self.invalidate()
644 self.invalidate()
645 return True
645 return True
646 else:
646 else:
647 self.ui.warn(_("no interrupted transaction available\n"))
647 self.ui.warn(_("no interrupted transaction available\n"))
648 return False
648 return False
649 finally:
649 finally:
650 lock.release()
650 lock.release()
651
651
652 def rollback(self):
652 def rollback(self):
653 wlock = lock = None
653 wlock = lock = None
654 try:
654 try:
655 wlock = self.wlock()
655 wlock = self.wlock()
656 lock = self.lock()
656 lock = self.lock()
657 if os.path.exists(self.sjoin("undo")):
657 if os.path.exists(self.sjoin("undo")):
658 self.ui.status(_("rolling back last transaction\n"))
658 self.ui.status(_("rolling back last transaction\n"))
659 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
659 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
660 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
660 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
661 try:
661 try:
662 branch = self.opener("undo.branch").read()
662 branch = self.opener("undo.branch").read()
663 self.dirstate.setbranch(branch)
663 self.dirstate.setbranch(branch)
664 except IOError:
664 except IOError:
665 self.ui.warn(_("Named branch could not be reset, "
665 self.ui.warn(_("Named branch could not be reset, "
666 "current branch still is: %s\n")
666 "current branch still is: %s\n")
667 % encoding.tolocal(self.dirstate.branch()))
667 % encoding.tolocal(self.dirstate.branch()))
668 self.invalidate()
668 self.invalidate()
669 self.dirstate.invalidate()
669 self.dirstate.invalidate()
670 else:
670 else:
671 self.ui.warn(_("no rollback information available\n"))
671 self.ui.warn(_("no rollback information available\n"))
672 finally:
672 finally:
673 release(lock, wlock)
673 release(lock, wlock)
674
674
675 def invalidate(self):
675 def invalidate(self):
676 for a in "changelog manifest".split():
676 for a in "changelog manifest".split():
677 if a in self.__dict__:
677 if a in self.__dict__:
678 delattr(self, a)
678 delattr(self, a)
679 self.tagscache = None
679 self.tagscache = None
680 self._tagstypecache = None
680 self._tagstypecache = None
681 self.nodetagscache = None
681 self.nodetagscache = None
682 self.branchcache = None
682 self.branchcache = None
683 self._ubranchcache = None
683 self._ubranchcache = None
684 self._branchcachetip = None
684 self._branchcachetip = None
685
685
686 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
686 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
687 try:
687 try:
688 l = lock.lock(lockname, 0, releasefn, desc=desc)
688 l = lock.lock(lockname, 0, releasefn, desc=desc)
689 except error.LockHeld, inst:
689 except error.LockHeld, inst:
690 if not wait:
690 if not wait:
691 raise
691 raise
692 self.ui.warn(_("waiting for lock on %s held by %r\n") %
692 self.ui.warn(_("waiting for lock on %s held by %r\n") %
693 (desc, inst.locker))
693 (desc, inst.locker))
694 # default to 600 seconds timeout
694 # default to 600 seconds timeout
695 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
695 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
696 releasefn, desc=desc)
696 releasefn, desc=desc)
697 if acquirefn:
697 if acquirefn:
698 acquirefn()
698 acquirefn()
699 return l
699 return l
700
700
701 def lock(self, wait=True):
701 def lock(self, wait=True):
702 l = self._lockref and self._lockref()
702 l = self._lockref and self._lockref()
703 if l is not None and l.held:
703 if l is not None and l.held:
704 l.lock()
704 l.lock()
705 return l
705 return l
706
706
707 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
707 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
708 _('repository %s') % self.origroot)
708 _('repository %s') % self.origroot)
709 self._lockref = weakref.ref(l)
709 self._lockref = weakref.ref(l)
710 return l
710 return l
711
711
712 def wlock(self, wait=True):
712 def wlock(self, wait=True):
713 l = self._wlockref and self._wlockref()
713 l = self._wlockref and self._wlockref()
714 if l is not None and l.held:
714 if l is not None and l.held:
715 l.lock()
715 l.lock()
716 return l
716 return l
717
717
718 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
718 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
719 self.dirstate.invalidate, _('working directory of %s') %
719 self.dirstate.invalidate, _('working directory of %s') %
720 self.origroot)
720 self.origroot)
721 self._wlockref = weakref.ref(l)
721 self._wlockref = weakref.ref(l)
722 return l
722 return l
723
723
724 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
724 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
725 """
725 """
726 commit an individual file as part of a larger transaction
726 commit an individual file as part of a larger transaction
727 """
727 """
728
728
729 fname = fctx.path()
729 fname = fctx.path()
730 text = fctx.data()
730 text = fctx.data()
731 flog = self.file(fname)
731 flog = self.file(fname)
732 fparent1 = manifest1.get(fname, nullid)
732 fparent1 = manifest1.get(fname, nullid)
733 fparent2 = fparent2o = manifest2.get(fname, nullid)
733 fparent2 = fparent2o = manifest2.get(fname, nullid)
734
734
735 meta = {}
735 meta = {}
736 copy = fctx.renamed()
736 copy = fctx.renamed()
737 if copy and copy[0] != fname:
737 if copy and copy[0] != fname:
738 # Mark the new revision of this file as a copy of another
738 # Mark the new revision of this file as a copy of another
739 # file. This copy data will effectively act as a parent
739 # file. This copy data will effectively act as a parent
740 # of this new revision. If this is a merge, the first
740 # of this new revision. If this is a merge, the first
741 # parent will be the nullid (meaning "look up the copy data")
741 # parent will be the nullid (meaning "look up the copy data")
742 # and the second one will be the other parent. For example:
742 # and the second one will be the other parent. For example:
743 #
743 #
744 # 0 --- 1 --- 3 rev1 changes file foo
744 # 0 --- 1 --- 3 rev1 changes file foo
745 # \ / rev2 renames foo to bar and changes it
745 # \ / rev2 renames foo to bar and changes it
746 # \- 2 -/ rev3 should have bar with all changes and
746 # \- 2 -/ rev3 should have bar with all changes and
747 # should record that bar descends from
747 # should record that bar descends from
748 # bar in rev2 and foo in rev1
748 # bar in rev2 and foo in rev1
749 #
749 #
750 # this allows this merge to succeed:
750 # this allows this merge to succeed:
751 #
751 #
752 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
752 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
753 # \ / merging rev3 and rev4 should use bar@rev2
753 # \ / merging rev3 and rev4 should use bar@rev2
754 # \- 2 --- 4 as the merge base
754 # \- 2 --- 4 as the merge base
755 #
755 #
756
756
757 cfname = copy[0]
757 cfname = copy[0]
758 crev = manifest1.get(cfname)
758 crev = manifest1.get(cfname)
759 newfparent = fparent2
759 newfparent = fparent2
760
760
761 if manifest2: # branch merge
761 if manifest2: # branch merge
762 if fparent2 == nullid or crev is None: # copied on remote side
762 if fparent2 == nullid or crev is None: # copied on remote side
763 if cfname in manifest2:
763 if cfname in manifest2:
764 crev = manifest2[cfname]
764 crev = manifest2[cfname]
765 newfparent = fparent1
765 newfparent = fparent1
766
766
767 # find source in nearest ancestor if we've lost track
767 # find source in nearest ancestor if we've lost track
768 if not crev:
768 if not crev:
769 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
769 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
770 (fname, cfname))
770 (fname, cfname))
771 for ancestor in self['.'].ancestors():
771 for ancestor in self['.'].ancestors():
772 if cfname in ancestor:
772 if cfname in ancestor:
773 crev = ancestor[cfname].filenode()
773 crev = ancestor[cfname].filenode()
774 break
774 break
775
775
776 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
776 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
777 meta["copy"] = cfname
777 meta["copy"] = cfname
778 meta["copyrev"] = hex(crev)
778 meta["copyrev"] = hex(crev)
779 fparent1, fparent2 = nullid, newfparent
779 fparent1, fparent2 = nullid, newfparent
780 elif fparent2 != nullid:
780 elif fparent2 != nullid:
781 # is one parent an ancestor of the other?
781 # is one parent an ancestor of the other?
782 fparentancestor = flog.ancestor(fparent1, fparent2)
782 fparentancestor = flog.ancestor(fparent1, fparent2)
783 if fparentancestor == fparent1:
783 if fparentancestor == fparent1:
784 fparent1, fparent2 = fparent2, nullid
784 fparent1, fparent2 = fparent2, nullid
785 elif fparentancestor == fparent2:
785 elif fparentancestor == fparent2:
786 fparent2 = nullid
786 fparent2 = nullid
787
787
788 # is the file changed?
788 # is the file changed?
789 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
789 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
790 changelist.append(fname)
790 changelist.append(fname)
791 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
791 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
792
792
793 # are just the flags changed during merge?
793 # are just the flags changed during merge?
794 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
794 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
795 changelist.append(fname)
795 changelist.append(fname)
796
796
797 return fparent1
797 return fparent1
798
798
799 def commit(self, text="", user=None, date=None, match=None, force=False,
799 def commit(self, text="", user=None, date=None, match=None, force=False,
800 editor=False, extra={}):
800 editor=False, extra={}):
801 """Add a new revision to current repository.
801 """Add a new revision to current repository.
802
802
803 Revision information is gathered from the working directory,
803 Revision information is gathered from the working directory,
804 match can be used to filter the committed files. If editor is
804 match can be used to filter the committed files. If editor is
805 supplied, it is called to get a commit message.
805 supplied, it is called to get a commit message.
806 """
806 """
807
807
808 def fail(f, msg):
808 def fail(f, msg):
809 raise util.Abort('%s: %s' % (f, msg))
809 raise util.Abort('%s: %s' % (f, msg))
810
810
811 if not match:
811 if not match:
812 match = match_.always(self.root, '')
812 match = match_.always(self.root, '')
813
813
814 if not force:
814 if not force:
815 vdirs = []
815 vdirs = []
816 match.dir = vdirs.append
816 match.dir = vdirs.append
817 match.bad = fail
817 match.bad = fail
818
818
819 wlock = self.wlock()
819 wlock = self.wlock()
820 try:
820 try:
821 p1, p2 = self.dirstate.parents()
821 p1, p2 = self.dirstate.parents()
822 wctx = self[None]
822 wctx = self[None]
823
823
824 if (not force and p2 != nullid and match and
824 if (not force and p2 != nullid and match and
825 (match.files() or match.anypats())):
825 (match.files() or match.anypats())):
826 raise util.Abort(_('cannot partially commit a merge '
826 raise util.Abort(_('cannot partially commit a merge '
827 '(do not specify files or patterns)'))
827 '(do not specify files or patterns)'))
828
828
829 changes = self.status(match=match, clean=force)
829 changes = self.status(match=match, clean=force)
830 if force:
830 if force:
831 changes[0].extend(changes[6]) # mq may commit unchanged files
831 changes[0].extend(changes[6]) # mq may commit unchanged files
832
832
833 # check subrepos
833 # check subrepos
834 subs = []
834 subs = []
835 for s in wctx.substate:
835 for s in wctx.substate:
836 if match(s) and wctx.sub(s).dirty():
836 if match(s) and wctx.sub(s).dirty():
837 subs.append(s)
837 subs.append(s)
838 if subs and '.hgsubstate' not in changes[0]:
838 if subs and '.hgsubstate' not in changes[0]:
839 changes[0].insert(0, '.hgsubstate')
839 changes[0].insert(0, '.hgsubstate')
840
840
841 # make sure all explicit patterns are matched
841 # make sure all explicit patterns are matched
842 if not force and match.files():
842 if not force and match.files():
843 matched = set(changes[0] + changes[1] + changes[2])
843 matched = set(changes[0] + changes[1] + changes[2])
844
844
845 for f in match.files():
845 for f in match.files():
846 if f == '.' or f in matched or f in wctx.substate:
846 if f == '.' or f in matched or f in wctx.substate:
847 continue
847 continue
848 if f in changes[3]: # missing
848 if f in changes[3]: # missing
849 fail(f, _('file not found!'))
849 fail(f, _('file not found!'))
850 if f in vdirs: # visited directory
850 if f in vdirs: # visited directory
851 d = f + '/'
851 d = f + '/'
852 for mf in matched:
852 for mf in matched:
853 if mf.startswith(d):
853 if mf.startswith(d):
854 break
854 break
855 else:
855 else:
856 fail(f, _("no match under directory!"))
856 fail(f, _("no match under directory!"))
857 elif f not in self.dirstate:
857 elif f not in self.dirstate:
858 fail(f, _("file not tracked!"))
858 fail(f, _("file not tracked!"))
859
859
860 if (not force and not extra.get("close") and p2 == nullid
860 if (not force and not extra.get("close") and p2 == nullid
861 and not (changes[0] or changes[1] or changes[2])
861 and not (changes[0] or changes[1] or changes[2])
862 and self[None].branch() == self['.'].branch()):
862 and self[None].branch() == self['.'].branch()):
863 return None
863 return None
864
864
865 ms = merge_.mergestate(self)
865 ms = merge_.mergestate(self)
866 for f in changes[0]:
866 for f in changes[0]:
867 if f in ms and ms[f] == 'u':
867 if f in ms and ms[f] == 'u':
868 raise util.Abort(_("unresolved merge conflicts "
868 raise util.Abort(_("unresolved merge conflicts "
869 "(see hg resolve)"))
869 "(see hg resolve)"))
870
870
871 cctx = context.workingctx(self, (p1, p2), text, user, date,
871 cctx = context.workingctx(self, (p1, p2), text, user, date,
872 extra, changes)
872 extra, changes)
873 if editor:
873 if editor:
874 cctx._text = editor(self, cctx, subs)
874 cctx._text = editor(self, cctx, subs)
875
875
876 # commit subs
876 # commit subs
877 if subs:
877 if subs:
878 state = wctx.substate.copy()
878 state = wctx.substate.copy()
879 for s in subs:
879 for s in subs:
880 self.ui.status(_('committing subrepository %s\n') % s)
880 self.ui.status(_('committing subrepository %s\n') % s)
881 sr = wctx.sub(s).commit(cctx._text, user, date)
881 sr = wctx.sub(s).commit(cctx._text, user, date)
882 state[s] = (state[s][0], sr)
882 state[s] = (state[s][0], sr)
883 subrepo.writestate(self, state)
883 subrepo.writestate(self, state)
884
884
885 ret = self.commitctx(cctx, True)
885 ret = self.commitctx(cctx, True)
886
886
887 # update dirstate and mergestate
887 # update dirstate and mergestate
888 for f in changes[0] + changes[1]:
888 for f in changes[0] + changes[1]:
889 self.dirstate.normal(f)
889 self.dirstate.normal(f)
890 for f in changes[2]:
890 for f in changes[2]:
891 self.dirstate.forget(f)
891 self.dirstate.forget(f)
892 self.dirstate.setparents(ret)
892 self.dirstate.setparents(ret)
893 ms.reset()
893 ms.reset()
894
894
895 return ret
895 return ret
896
896
897 finally:
897 finally:
898 wlock.release()
898 wlock.release()
899
899
900 def commitctx(self, ctx, error=False):
900 def commitctx(self, ctx, error=False):
901 """Add a new revision to current repository.
901 """Add a new revision to current repository.
902
902
903 Revision information is passed via the context argument.
903 Revision information is passed via the context argument.
904 """
904 """
905
905
906 tr = lock = None
906 tr = lock = None
907 removed = ctx.removed()
907 removed = ctx.removed()
908 p1, p2 = ctx.p1(), ctx.p2()
908 p1, p2 = ctx.p1(), ctx.p2()
909 m1 = p1.manifest().copy()
909 m1 = p1.manifest().copy()
910 m2 = p2.manifest()
910 m2 = p2.manifest()
911 user = ctx.user()
911 user = ctx.user()
912
912
913 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
913 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
914 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
914 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
915
915
916 lock = self.lock()
916 lock = self.lock()
917 try:
917 try:
918 tr = self.transaction()
918 tr = self.transaction()
919 trp = weakref.proxy(tr)
919 trp = weakref.proxy(tr)
920
920
921 # check in files
921 # check in files
922 new = {}
922 new = {}
923 changed = []
923 changed = []
924 linkrev = len(self)
924 linkrev = len(self)
925 for f in sorted(ctx.modified() + ctx.added()):
925 for f in sorted(ctx.modified() + ctx.added()):
926 self.ui.note(f + "\n")
926 self.ui.note(f + "\n")
927 try:
927 try:
928 fctx = ctx[f]
928 fctx = ctx[f]
929 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
929 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
930 changed)
930 changed)
931 m1.set(f, fctx.flags())
931 m1.set(f, fctx.flags())
932 except (OSError, IOError):
932 except (OSError, IOError):
933 if error:
933 if error:
934 self.ui.warn(_("trouble committing %s!\n") % f)
934 self.ui.warn(_("trouble committing %s!\n") % f)
935 raise
935 raise
936 else:
936 else:
937 removed.append(f)
937 removed.append(f)
938
938
939 # update manifest
939 # update manifest
940 m1.update(new)
940 m1.update(new)
941 removed = [f for f in sorted(removed) if f in m1 or f in m2]
941 removed = [f for f in sorted(removed) if f in m1 or f in m2]
942 drop = [f for f in removed if f in m1]
942 drop = [f for f in removed if f in m1]
943 for f in drop:
943 for f in drop:
944 del m1[f]
944 del m1[f]
945 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
945 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
946 p2.manifestnode(), (new, drop))
946 p2.manifestnode(), (new, drop))
947
947
948 # update changelog
948 # update changelog
949 self.changelog.delayupdate()
949 self.changelog.delayupdate()
950 n = self.changelog.add(mn, changed + removed, ctx.description(),
950 n = self.changelog.add(mn, changed + removed, ctx.description(),
951 trp, p1.node(), p2.node(),
951 trp, p1.node(), p2.node(),
952 user, ctx.date(), ctx.extra().copy())
952 user, ctx.date(), ctx.extra().copy())
953 p = lambda: self.changelog.writepending() and self.root or ""
953 p = lambda: self.changelog.writepending() and self.root or ""
954 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
954 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
955 parent2=xp2, pending=p)
955 parent2=xp2, pending=p)
956 self.changelog.finalize(trp)
956 self.changelog.finalize(trp)
957 tr.close()
957 tr.close()
958
958
959 if self.branchcache:
959 if self.branchcache:
960 self.branchtags()
960 self.branchtags()
961
961
962 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
962 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
963 return n
963 return n
964 finally:
964 finally:
965 del tr
965 del tr
966 lock.release()
966 lock.release()
967
967
968 def walk(self, match, node=None):
968 def walk(self, match, node=None):
969 '''
969 '''
970 walk recursively through the directory tree or a given
970 walk recursively through the directory tree or a given
971 changeset, finding all files matched by the match
971 changeset, finding all files matched by the match
972 function
972 function
973 '''
973 '''
974 return self[node].walk(match)
974 return self[node].walk(match)
975
975
976 def status(self, node1='.', node2=None, match=None,
976 def status(self, node1='.', node2=None, match=None,
977 ignored=False, clean=False, unknown=False):
977 ignored=False, clean=False, unknown=False):
978 """return status of files between two nodes or node and working directory
978 """return status of files between two nodes or node and working directory
979
979
980 If node1 is None, use the first dirstate parent instead.
980 If node1 is None, use the first dirstate parent instead.
981 If node2 is None, compare node1 with working directory.
981 If node2 is None, compare node1 with working directory.
982 """
982 """
983
983
984 def mfmatches(ctx):
984 def mfmatches(ctx):
985 mf = ctx.manifest().copy()
985 mf = ctx.manifest().copy()
986 for fn in mf.keys():
986 for fn in mf.keys():
987 if not match(fn):
987 if not match(fn):
988 del mf[fn]
988 del mf[fn]
989 return mf
989 return mf
990
990
991 if isinstance(node1, context.changectx):
991 if isinstance(node1, context.changectx):
992 ctx1 = node1
992 ctx1 = node1
993 else:
993 else:
994 ctx1 = self[node1]
994 ctx1 = self[node1]
995 if isinstance(node2, context.changectx):
995 if isinstance(node2, context.changectx):
996 ctx2 = node2
996 ctx2 = node2
997 else:
997 else:
998 ctx2 = self[node2]
998 ctx2 = self[node2]
999
999
1000 working = ctx2.rev() is None
1000 working = ctx2.rev() is None
1001 parentworking = working and ctx1 == self['.']
1001 parentworking = working and ctx1 == self['.']
1002 match = match or match_.always(self.root, self.getcwd())
1002 match = match or match_.always(self.root, self.getcwd())
1003 listignored, listclean, listunknown = ignored, clean, unknown
1003 listignored, listclean, listunknown = ignored, clean, unknown
1004
1004
1005 # load earliest manifest first for caching reasons
1005 # load earliest manifest first for caching reasons
1006 if not working and ctx2.rev() < ctx1.rev():
1006 if not working and ctx2.rev() < ctx1.rev():
1007 ctx2.manifest()
1007 ctx2.manifest()
1008
1008
1009 if not parentworking:
1009 if not parentworking:
1010 def bad(f, msg):
1010 def bad(f, msg):
1011 if f not in ctx1:
1011 if f not in ctx1:
1012 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1012 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1013 match.bad = bad
1013 match.bad = bad
1014
1014
1015 if working: # we need to scan the working dir
1015 if working: # we need to scan the working dir
1016 s = self.dirstate.status(match, listignored, listclean, listunknown)
1016 s = self.dirstate.status(match, listignored, listclean, listunknown)
1017 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1017 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1018
1018
1019 # check for any possibly clean files
1019 # check for any possibly clean files
1020 if parentworking and cmp:
1020 if parentworking and cmp:
1021 fixup = []
1021 fixup = []
1022 # do a full compare of any files that might have changed
1022 # do a full compare of any files that might have changed
1023 for f in sorted(cmp):
1023 for f in sorted(cmp):
1024 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1024 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1025 or ctx1[f].cmp(ctx2[f].data())):
1025 or ctx1[f].cmp(ctx2[f].data())):
1026 modified.append(f)
1026 modified.append(f)
1027 else:
1027 else:
1028 fixup.append(f)
1028 fixup.append(f)
1029
1029
1030 if listclean:
1030 if listclean:
1031 clean += fixup
1031 clean += fixup
1032
1032
1033 # update dirstate for files that are actually clean
1033 # update dirstate for files that are actually clean
1034 if fixup:
1034 if fixup:
1035 try:
1035 try:
1036 # updating the dirstate is optional
1036 # updating the dirstate is optional
1037 # so we don't wait on the lock
1037 # so we don't wait on the lock
1038 wlock = self.wlock(False)
1038 wlock = self.wlock(False)
1039 try:
1039 try:
1040 for f in fixup:
1040 for f in fixup:
1041 self.dirstate.normal(f)
1041 self.dirstate.normal(f)
1042 finally:
1042 finally:
1043 wlock.release()
1043 wlock.release()
1044 except error.LockError:
1044 except error.LockError:
1045 pass
1045 pass
1046
1046
1047 if not parentworking:
1047 if not parentworking:
1048 mf1 = mfmatches(ctx1)
1048 mf1 = mfmatches(ctx1)
1049 if working:
1049 if working:
1050 # we are comparing working dir against non-parent
1050 # we are comparing working dir against non-parent
1051 # generate a pseudo-manifest for the working dir
1051 # generate a pseudo-manifest for the working dir
1052 mf2 = mfmatches(self['.'])
1052 mf2 = mfmatches(self['.'])
1053 for f in cmp + modified + added:
1053 for f in cmp + modified + added:
1054 mf2[f] = None
1054 mf2[f] = None
1055 mf2.set(f, ctx2.flags(f))
1055 mf2.set(f, ctx2.flags(f))
1056 for f in removed:
1056 for f in removed:
1057 if f in mf2:
1057 if f in mf2:
1058 del mf2[f]
1058 del mf2[f]
1059 else:
1059 else:
1060 # we are comparing two revisions
1060 # we are comparing two revisions
1061 deleted, unknown, ignored = [], [], []
1061 deleted, unknown, ignored = [], [], []
1062 mf2 = mfmatches(ctx2)
1062 mf2 = mfmatches(ctx2)
1063
1063
1064 modified, added, clean = [], [], []
1064 modified, added, clean = [], [], []
1065 for fn in mf2:
1065 for fn in mf2:
1066 if fn in mf1:
1066 if fn in mf1:
1067 if (mf1.flags(fn) != mf2.flags(fn) or
1067 if (mf1.flags(fn) != mf2.flags(fn) or
1068 (mf1[fn] != mf2[fn] and
1068 (mf1[fn] != mf2[fn] and
1069 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1069 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1070 modified.append(fn)
1070 modified.append(fn)
1071 elif listclean:
1071 elif listclean:
1072 clean.append(fn)
1072 clean.append(fn)
1073 del mf1[fn]
1073 del mf1[fn]
1074 else:
1074 else:
1075 added.append(fn)
1075 added.append(fn)
1076 removed = mf1.keys()
1076 removed = mf1.keys()
1077
1077
1078 r = modified, added, removed, deleted, unknown, ignored, clean
1078 r = modified, added, removed, deleted, unknown, ignored, clean
1079 [l.sort() for l in r]
1079 [l.sort() for l in r]
1080 return r
1080 return r
1081
1081
1082 def add(self, list):
1082 def add(self, list):
1083 wlock = self.wlock()
1083 wlock = self.wlock()
1084 try:
1084 try:
1085 rejected = []
1085 rejected = []
1086 for f in list:
1086 for f in list:
1087 p = self.wjoin(f)
1087 p = self.wjoin(f)
1088 try:
1088 try:
1089 st = os.lstat(p)
1089 st = os.lstat(p)
1090 except:
1090 except:
1091 self.ui.warn(_("%s does not exist!\n") % f)
1091 self.ui.warn(_("%s does not exist!\n") % f)
1092 rejected.append(f)
1092 rejected.append(f)
1093 continue
1093 continue
1094 if st.st_size > 10000000:
1094 if st.st_size > 10000000:
1095 self.ui.warn(_("%s: files over 10MB may cause memory and"
1095 self.ui.warn(_("%s: files over 10MB may cause memory and"
1096 " performance problems\n"
1096 " performance problems\n"
1097 "(use 'hg revert %s' to unadd the file)\n")
1097 "(use 'hg revert %s' to unadd the file)\n")
1098 % (f, f))
1098 % (f, f))
1099 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1099 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1100 self.ui.warn(_("%s not added: only files and symlinks "
1100 self.ui.warn(_("%s not added: only files and symlinks "
1101 "supported currently\n") % f)
1101 "supported currently\n") % f)
1102 rejected.append(p)
1102 rejected.append(p)
1103 elif self.dirstate[f] in 'amn':
1103 elif self.dirstate[f] in 'amn':
1104 self.ui.warn(_("%s already tracked!\n") % f)
1104 self.ui.warn(_("%s already tracked!\n") % f)
1105 elif self.dirstate[f] == 'r':
1105 elif self.dirstate[f] == 'r':
1106 self.dirstate.normallookup(f)
1106 self.dirstate.normallookup(f)
1107 else:
1107 else:
1108 self.dirstate.add(f)
1108 self.dirstate.add(f)
1109 return rejected
1109 return rejected
1110 finally:
1110 finally:
1111 wlock.release()
1111 wlock.release()
1112
1112
1113 def forget(self, list):
1113 def forget(self, list):
1114 wlock = self.wlock()
1114 wlock = self.wlock()
1115 try:
1115 try:
1116 for f in list:
1116 for f in list:
1117 if self.dirstate[f] != 'a':
1117 if self.dirstate[f] != 'a':
1118 self.ui.warn(_("%s not added!\n") % f)
1118 self.ui.warn(_("%s not added!\n") % f)
1119 else:
1119 else:
1120 self.dirstate.forget(f)
1120 self.dirstate.forget(f)
1121 finally:
1121 finally:
1122 wlock.release()
1122 wlock.release()
1123
1123
1124 def remove(self, list, unlink=False):
1124 def remove(self, list, unlink=False):
1125 if unlink:
1125 if unlink:
1126 for f in list:
1126 for f in list:
1127 try:
1127 try:
1128 util.unlink(self.wjoin(f))
1128 util.unlink(self.wjoin(f))
1129 except OSError, inst:
1129 except OSError, inst:
1130 if inst.errno != errno.ENOENT:
1130 if inst.errno != errno.ENOENT:
1131 raise
1131 raise
1132 wlock = self.wlock()
1132 wlock = self.wlock()
1133 try:
1133 try:
1134 for f in list:
1134 for f in list:
1135 if unlink and os.path.exists(self.wjoin(f)):
1135 if unlink and os.path.exists(self.wjoin(f)):
1136 self.ui.warn(_("%s still exists!\n") % f)
1136 self.ui.warn(_("%s still exists!\n") % f)
1137 elif self.dirstate[f] == 'a':
1137 elif self.dirstate[f] == 'a':
1138 self.dirstate.forget(f)
1138 self.dirstate.forget(f)
1139 elif f not in self.dirstate:
1139 elif f not in self.dirstate:
1140 self.ui.warn(_("%s not tracked!\n") % f)
1140 self.ui.warn(_("%s not tracked!\n") % f)
1141 else:
1141 else:
1142 self.dirstate.remove(f)
1142 self.dirstate.remove(f)
1143 finally:
1143 finally:
1144 wlock.release()
1144 wlock.release()
1145
1145
1146 def undelete(self, list):
1146 def undelete(self, list):
1147 manifests = [self.manifest.read(self.changelog.read(p)[0])
1147 manifests = [self.manifest.read(self.changelog.read(p)[0])
1148 for p in self.dirstate.parents() if p != nullid]
1148 for p in self.dirstate.parents() if p != nullid]
1149 wlock = self.wlock()
1149 wlock = self.wlock()
1150 try:
1150 try:
1151 for f in list:
1151 for f in list:
1152 if self.dirstate[f] != 'r':
1152 if self.dirstate[f] != 'r':
1153 self.ui.warn(_("%s not removed!\n") % f)
1153 self.ui.warn(_("%s not removed!\n") % f)
1154 else:
1154 else:
1155 m = f in manifests[0] and manifests[0] or manifests[1]
1155 m = f in manifests[0] and manifests[0] or manifests[1]
1156 t = self.file(f).read(m[f])
1156 t = self.file(f).read(m[f])
1157 self.wwrite(f, t, m.flags(f))
1157 self.wwrite(f, t, m.flags(f))
1158 self.dirstate.normal(f)
1158 self.dirstate.normal(f)
1159 finally:
1159 finally:
1160 wlock.release()
1160 wlock.release()
1161
1161
1162 def copy(self, source, dest):
1162 def copy(self, source, dest):
1163 p = self.wjoin(dest)
1163 p = self.wjoin(dest)
1164 if not (os.path.exists(p) or os.path.islink(p)):
1164 if not (os.path.exists(p) or os.path.islink(p)):
1165 self.ui.warn(_("%s does not exist!\n") % dest)
1165 self.ui.warn(_("%s does not exist!\n") % dest)
1166 elif not (os.path.isfile(p) or os.path.islink(p)):
1166 elif not (os.path.isfile(p) or os.path.islink(p)):
1167 self.ui.warn(_("copy failed: %s is not a file or a "
1167 self.ui.warn(_("copy failed: %s is not a file or a "
1168 "symbolic link\n") % dest)
1168 "symbolic link\n") % dest)
1169 else:
1169 else:
1170 wlock = self.wlock()
1170 wlock = self.wlock()
1171 try:
1171 try:
1172 if self.dirstate[dest] in '?r':
1172 if self.dirstate[dest] in '?r':
1173 self.dirstate.add(dest)
1173 self.dirstate.add(dest)
1174 self.dirstate.copy(source, dest)
1174 self.dirstate.copy(source, dest)
1175 finally:
1175 finally:
1176 wlock.release()
1176 wlock.release()
1177
1177
1178 def heads(self, start=None):
1178 def heads(self, start=None):
1179 heads = self.changelog.heads(start)
1179 heads = self.changelog.heads(start)
1180 # sort the output in rev descending order
1180 # sort the output in rev descending order
1181 heads = [(-self.changelog.rev(h), h) for h in heads]
1181 heads = [(-self.changelog.rev(h), h) for h in heads]
1182 return [n for (r, n) in sorted(heads)]
1182 return [n for (r, n) in sorted(heads)]
1183
1183
1184 def branchheads(self, branch=None, start=None, closed=False):
1184 def branchheads(self, branch=None, start=None, closed=False):
1185 if branch is None:
1185 if branch is None:
1186 branch = self[None].branch()
1186 branch = self[None].branch()
1187 branches = self.branchmap()
1187 branches = self.branchmap()
1188 if branch not in branches:
1188 if branch not in branches:
1189 return []
1189 return []
1190 bheads = branches[branch]
1190 bheads = branches[branch]
1191 # the cache returns heads ordered lowest to highest
1191 # the cache returns heads ordered lowest to highest
1192 bheads.reverse()
1192 bheads.reverse()
1193 if start is not None:
1193 if start is not None:
1194 # filter out the heads that cannot be reached from startrev
1194 # filter out the heads that cannot be reached from startrev
1195 bheads = self.changelog.nodesbetween([start], bheads)[2]
1195 bheads = self.changelog.nodesbetween([start], bheads)[2]
1196 if not closed:
1196 if not closed:
1197 bheads = [h for h in bheads if
1197 bheads = [h for h in bheads if
1198 ('close' not in self.changelog.read(h)[5])]
1198 ('close' not in self.changelog.read(h)[5])]
1199 return bheads
1199 return bheads
1200
1200
1201 def branches(self, nodes):
1201 def branches(self, nodes):
1202 if not nodes:
1202 if not nodes:
1203 nodes = [self.changelog.tip()]
1203 nodes = [self.changelog.tip()]
1204 b = []
1204 b = []
1205 for n in nodes:
1205 for n in nodes:
1206 t = n
1206 t = n
1207 while 1:
1207 while 1:
1208 p = self.changelog.parents(n)
1208 p = self.changelog.parents(n)
1209 if p[1] != nullid or p[0] == nullid:
1209 if p[1] != nullid or p[0] == nullid:
1210 b.append((t, n, p[0], p[1]))
1210 b.append((t, n, p[0], p[1]))
1211 break
1211 break
1212 n = p[0]
1212 n = p[0]
1213 return b
1213 return b
1214
1214
1215 def between(self, pairs):
1215 def between(self, pairs):
1216 r = []
1216 r = []
1217
1217
1218 for top, bottom in pairs:
1218 for top, bottom in pairs:
1219 n, l, i = top, [], 0
1219 n, l, i = top, [], 0
1220 f = 1
1220 f = 1
1221
1221
1222 while n != bottom and n != nullid:
1222 while n != bottom and n != nullid:
1223 p = self.changelog.parents(n)[0]
1223 p = self.changelog.parents(n)[0]
1224 if i == f:
1224 if i == f:
1225 l.append(n)
1225 l.append(n)
1226 f = f * 2
1226 f = f * 2
1227 n = p
1227 n = p
1228 i += 1
1228 i += 1
1229
1229
1230 r.append(l)
1230 r.append(l)
1231
1231
1232 return r
1232 return r
1233
1233
1234 def findincoming(self, remote, base=None, heads=None, force=False):
1234 def findincoming(self, remote, base=None, heads=None, force=False):
1235 """Return list of roots of the subsets of missing nodes from remote
1235 """Return list of roots of the subsets of missing nodes from remote
1236
1236
1237 If base dict is specified, assume that these nodes and their parents
1237 If base dict is specified, assume that these nodes and their parents
1238 exist on the remote side and that no child of a node of base exists
1238 exist on the remote side and that no child of a node of base exists
1239 in both remote and self.
1239 in both remote and self.
1240 Furthermore base will be updated to include the nodes that exists
1240 Furthermore base will be updated to include the nodes that exists
1241 in self and remote but no children exists in self and remote.
1241 in self and remote but no children exists in self and remote.
1242 If a list of heads is specified, return only nodes which are heads
1242 If a list of heads is specified, return only nodes which are heads
1243 or ancestors of these heads.
1243 or ancestors of these heads.
1244
1244
1245 All the ancestors of base are in self and in remote.
1245 All the ancestors of base are in self and in remote.
1246 All the descendants of the list returned are missing in self.
1246 All the descendants of the list returned are missing in self.
1247 (and so we know that the rest of the nodes are missing in remote, see
1247 (and so we know that the rest of the nodes are missing in remote, see
1248 outgoing)
1248 outgoing)
1249 """
1249 """
1250 return self.findcommonincoming(remote, base, heads, force)[1]
1250 return self.findcommonincoming(remote, base, heads, force)[1]
1251
1251
1252 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1252 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1253 """Return a tuple (common, missing roots, heads) used to identify
1253 """Return a tuple (common, missing roots, heads) used to identify
1254 missing nodes from remote.
1254 missing nodes from remote.
1255
1255
1256 If base dict is specified, assume that these nodes and their parents
1256 If base dict is specified, assume that these nodes and their parents
1257 exist on the remote side and that no child of a node of base exists
1257 exist on the remote side and that no child of a node of base exists
1258 in both remote and self.
1258 in both remote and self.
1259 Furthermore base will be updated to include the nodes that exists
1259 Furthermore base will be updated to include the nodes that exists
1260 in self and remote but no children exists in self and remote.
1260 in self and remote but no children exists in self and remote.
1261 If a list of heads is specified, return only nodes which are heads
1261 If a list of heads is specified, return only nodes which are heads
1262 or ancestors of these heads.
1262 or ancestors of these heads.
1263
1263
1264 All the ancestors of base are in self and in remote.
1264 All the ancestors of base are in self and in remote.
1265 """
1265 """
1266 m = self.changelog.nodemap
1266 m = self.changelog.nodemap
1267 search = []
1267 search = []
1268 fetch = set()
1268 fetch = set()
1269 seen = set()
1269 seen = set()
1270 seenbranch = set()
1270 seenbranch = set()
1271 if base is None:
1271 if base is None:
1272 base = {}
1272 base = {}
1273
1273
1274 if not heads:
1274 if not heads:
1275 heads = remote.heads()
1275 heads = remote.heads()
1276
1276
1277 if self.changelog.tip() == nullid:
1277 if self.changelog.tip() == nullid:
1278 base[nullid] = 1
1278 base[nullid] = 1
1279 if heads != [nullid]:
1279 if heads != [nullid]:
1280 return [nullid], [nullid], list(heads)
1280 return [nullid], [nullid], list(heads)
1281 return [nullid], [], []
1281 return [nullid], [], []
1282
1282
1283 # assume we're closer to the tip than the root
1283 # assume we're closer to the tip than the root
1284 # and start by examining the heads
1284 # and start by examining the heads
1285 self.ui.status(_("searching for changes\n"))
1285 self.ui.status(_("searching for changes\n"))
1286
1286
1287 unknown = []
1287 unknown = []
1288 for h in heads:
1288 for h in heads:
1289 if h not in m:
1289 if h not in m:
1290 unknown.append(h)
1290 unknown.append(h)
1291 else:
1291 else:
1292 base[h] = 1
1292 base[h] = 1
1293
1293
1294 heads = unknown
1294 heads = unknown
1295 if not unknown:
1295 if not unknown:
1296 return base.keys(), [], []
1296 return base.keys(), [], []
1297
1297
1298 req = set(unknown)
1298 req = set(unknown)
1299 reqcnt = 0
1299 reqcnt = 0
1300
1300
1301 # search through remote branches
1301 # search through remote branches
1302 # a 'branch' here is a linear segment of history, with four parts:
1302 # a 'branch' here is a linear segment of history, with four parts:
1303 # head, root, first parent, second parent
1303 # head, root, first parent, second parent
1304 # (a branch always has two parents (or none) by definition)
1304 # (a branch always has two parents (or none) by definition)
1305 unknown = remote.branches(unknown)
1305 unknown = remote.branches(unknown)
1306 while unknown:
1306 while unknown:
1307 r = []
1307 r = []
1308 while unknown:
1308 while unknown:
1309 n = unknown.pop(0)
1309 n = unknown.pop(0)
1310 if n[0] in seen:
1310 if n[0] in seen:
1311 continue
1311 continue
1312
1312
1313 self.ui.debug(_("examining %s:%s\n")
1313 self.ui.debug(_("examining %s:%s\n")
1314 % (short(n[0]), short(n[1])))
1314 % (short(n[0]), short(n[1])))
1315 if n[0] == nullid: # found the end of the branch
1315 if n[0] == nullid: # found the end of the branch
1316 pass
1316 pass
1317 elif n in seenbranch:
1317 elif n in seenbranch:
1318 self.ui.debug(_("branch already found\n"))
1318 self.ui.debug(_("branch already found\n"))
1319 continue
1319 continue
1320 elif n[1] and n[1] in m: # do we know the base?
1320 elif n[1] and n[1] in m: # do we know the base?
1321 self.ui.debug(_("found incomplete branch %s:%s\n")
1321 self.ui.debug(_("found incomplete branch %s:%s\n")
1322 % (short(n[0]), short(n[1])))
1322 % (short(n[0]), short(n[1])))
1323 search.append(n[0:2]) # schedule branch range for scanning
1323 search.append(n[0:2]) # schedule branch range for scanning
1324 seenbranch.add(n)
1324 seenbranch.add(n)
1325 else:
1325 else:
1326 if n[1] not in seen and n[1] not in fetch:
1326 if n[1] not in seen and n[1] not in fetch:
1327 if n[2] in m and n[3] in m:
1327 if n[2] in m and n[3] in m:
1328 self.ui.debug(_("found new changeset %s\n") %
1328 self.ui.debug(_("found new changeset %s\n") %
1329 short(n[1]))
1329 short(n[1]))
1330 fetch.add(n[1]) # earliest unknown
1330 fetch.add(n[1]) # earliest unknown
1331 for p in n[2:4]:
1331 for p in n[2:4]:
1332 if p in m:
1332 if p in m:
1333 base[p] = 1 # latest known
1333 base[p] = 1 # latest known
1334
1334
1335 for p in n[2:4]:
1335 for p in n[2:4]:
1336 if p not in req and p not in m:
1336 if p not in req and p not in m:
1337 r.append(p)
1337 r.append(p)
1338 req.add(p)
1338 req.add(p)
1339 seen.add(n[0])
1339 seen.add(n[0])
1340
1340
1341 if r:
1341 if r:
1342 reqcnt += 1
1342 reqcnt += 1
1343 self.ui.debug(_("request %d: %s\n") %
1343 self.ui.debug(_("request %d: %s\n") %
1344 (reqcnt, " ".join(map(short, r))))
1344 (reqcnt, " ".join(map(short, r))))
1345 for p in xrange(0, len(r), 10):
1345 for p in xrange(0, len(r), 10):
1346 for b in remote.branches(r[p:p+10]):
1346 for b in remote.branches(r[p:p+10]):
1347 self.ui.debug(_("received %s:%s\n") %
1347 self.ui.debug(_("received %s:%s\n") %
1348 (short(b[0]), short(b[1])))
1348 (short(b[0]), short(b[1])))
1349 unknown.append(b)
1349 unknown.append(b)
1350
1350
1351 # do binary search on the branches we found
1351 # do binary search on the branches we found
1352 while search:
1352 while search:
1353 newsearch = []
1353 newsearch = []
1354 reqcnt += 1
1354 reqcnt += 1
1355 for n, l in zip(search, remote.between(search)):
1355 for n, l in zip(search, remote.between(search)):
1356 l.append(n[1])
1356 l.append(n[1])
1357 p = n[0]
1357 p = n[0]
1358 f = 1
1358 f = 1
1359 for i in l:
1359 for i in l:
1360 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1360 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1361 if i in m:
1361 if i in m:
1362 if f <= 2:
1362 if f <= 2:
1363 self.ui.debug(_("found new branch changeset %s\n") %
1363 self.ui.debug(_("found new branch changeset %s\n") %
1364 short(p))
1364 short(p))
1365 fetch.add(p)
1365 fetch.add(p)
1366 base[i] = 1
1366 base[i] = 1
1367 else:
1367 else:
1368 self.ui.debug(_("narrowed branch search to %s:%s\n")
1368 self.ui.debug(_("narrowed branch search to %s:%s\n")
1369 % (short(p), short(i)))
1369 % (short(p), short(i)))
1370 newsearch.append((p, i))
1370 newsearch.append((p, i))
1371 break
1371 break
1372 p, f = i, f * 2
1372 p, f = i, f * 2
1373 search = newsearch
1373 search = newsearch
1374
1374
1375 # sanity check our fetch list
1375 # sanity check our fetch list
1376 for f in fetch:
1376 for f in fetch:
1377 if f in m:
1377 if f in m:
1378 raise error.RepoError(_("already have changeset ")
1378 raise error.RepoError(_("already have changeset ")
1379 + short(f[:4]))
1379 + short(f[:4]))
1380
1380
1381 if base.keys() == [nullid]:
1381 if base.keys() == [nullid]:
1382 if force:
1382 if force:
1383 self.ui.warn(_("warning: repository is unrelated\n"))
1383 self.ui.warn(_("warning: repository is unrelated\n"))
1384 else:
1384 else:
1385 raise util.Abort(_("repository is unrelated"))
1385 raise util.Abort(_("repository is unrelated"))
1386
1386
1387 self.ui.debug(_("found new changesets starting at ") +
1387 self.ui.debug(_("found new changesets starting at ") +
1388 " ".join([short(f) for f in fetch]) + "\n")
1388 " ".join([short(f) for f in fetch]) + "\n")
1389
1389
1390 self.ui.debug(_("%d total queries\n") % reqcnt)
1390 self.ui.debug(_("%d total queries\n") % reqcnt)
1391
1391
1392 return base.keys(), list(fetch), heads
1392 return base.keys(), list(fetch), heads
1393
1393
1394 def findoutgoing(self, remote, base=None, heads=None, force=False):
1394 def findoutgoing(self, remote, base=None, heads=None, force=False):
1395 """Return list of nodes that are roots of subsets not in remote
1395 """Return list of nodes that are roots of subsets not in remote
1396
1396
1397 If base dict is specified, assume that these nodes and their parents
1397 If base dict is specified, assume that these nodes and their parents
1398 exist on the remote side.
1398 exist on the remote side.
1399 If a list of heads is specified, return only nodes which are heads
1399 If a list of heads is specified, return only nodes which are heads
1400 or ancestors of these heads, and return a second element which
1400 or ancestors of these heads, and return a second element which
1401 contains all remote heads which get new children.
1401 contains all remote heads which get new children.
1402 """
1402 """
1403 if base is None:
1403 if base is None:
1404 base = {}
1404 base = {}
1405 self.findincoming(remote, base, heads, force=force)
1405 self.findincoming(remote, base, heads, force=force)
1406
1406
1407 self.ui.debug(_("common changesets up to ")
1407 self.ui.debug(_("common changesets up to ")
1408 + " ".join(map(short, base.keys())) + "\n")
1408 + " ".join(map(short, base.keys())) + "\n")
1409
1409
1410 remain = set(self.changelog.nodemap)
1410 remain = set(self.changelog.nodemap)
1411
1411
1412 # prune everything remote has from the tree
1412 # prune everything remote has from the tree
1413 remain.remove(nullid)
1413 remain.remove(nullid)
1414 remove = base.keys()
1414 remove = base.keys()
1415 while remove:
1415 while remove:
1416 n = remove.pop(0)
1416 n = remove.pop(0)
1417 if n in remain:
1417 if n in remain:
1418 remain.remove(n)
1418 remain.remove(n)
1419 for p in self.changelog.parents(n):
1419 for p in self.changelog.parents(n):
1420 remove.append(p)
1420 remove.append(p)
1421
1421
1422 # find every node whose parents have been pruned
1422 # find every node whose parents have been pruned
1423 subset = []
1423 subset = []
1424 # find every remote head that will get new children
1424 # find every remote head that will get new children
1425 updated_heads = set()
1425 updated_heads = set()
1426 for n in remain:
1426 for n in remain:
1427 p1, p2 = self.changelog.parents(n)
1427 p1, p2 = self.changelog.parents(n)
1428 if p1 not in remain and p2 not in remain:
1428 if p1 not in remain and p2 not in remain:
1429 subset.append(n)
1429 subset.append(n)
1430 if heads:
1430 if heads:
1431 if p1 in heads:
1431 if p1 in heads:
1432 updated_heads.add(p1)
1432 updated_heads.add(p1)
1433 if p2 in heads:
1433 if p2 in heads:
1434 updated_heads.add(p2)
1434 updated_heads.add(p2)
1435
1435
1436 # this is the set of all roots we have to push
1436 # this is the set of all roots we have to push
1437 if heads:
1437 if heads:
1438 return subset, list(updated_heads)
1438 return subset, list(updated_heads)
1439 else:
1439 else:
1440 return subset
1440 return subset
1441
1441
1442 def pull(self, remote, heads=None, force=False):
1442 def pull(self, remote, heads=None, force=False):
1443 lock = self.lock()
1443 lock = self.lock()
1444 try:
1444 try:
1445 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1445 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1446 force=force)
1446 force=force)
1447 if fetch == [nullid]:
1447 if fetch == [nullid]:
1448 self.ui.status(_("requesting all changes\n"))
1448 self.ui.status(_("requesting all changes\n"))
1449
1449
1450 if not fetch:
1450 if not fetch:
1451 self.ui.status(_("no changes found\n"))
1451 self.ui.status(_("no changes found\n"))
1452 return 0
1452 return 0
1453
1453
1454 if heads is None and remote.capable('changegroupsubset'):
1454 if heads is None and remote.capable('changegroupsubset'):
1455 heads = rheads
1455 heads = rheads
1456
1456
1457 if heads is None:
1457 if heads is None:
1458 cg = remote.changegroup(fetch, 'pull')
1458 cg = remote.changegroup(fetch, 'pull')
1459 else:
1459 else:
1460 if not remote.capable('changegroupsubset'):
1460 if not remote.capable('changegroupsubset'):
1461 raise util.Abort(_("Partial pull cannot be done because "
1461 raise util.Abort(_("Partial pull cannot be done because "
1462 "other repository doesn't support "
1462 "other repository doesn't support "
1463 "changegroupsubset."))
1463 "changegroupsubset."))
1464 cg = remote.changegroupsubset(fetch, heads, 'pull')
1464 cg = remote.changegroupsubset(fetch, heads, 'pull')
1465 return self.addchangegroup(cg, 'pull', remote.url())
1465 return self.addchangegroup(cg, 'pull', remote.url())
1466 finally:
1466 finally:
1467 lock.release()
1467 lock.release()
1468
1468
1469 def push(self, remote, force=False, revs=None):
1469 def push(self, remote, force=False, revs=None):
1470 # there are two ways to push to remote repo:
1470 # there are two ways to push to remote repo:
1471 #
1471 #
1472 # addchangegroup assumes local user can lock remote
1472 # addchangegroup assumes local user can lock remote
1473 # repo (local filesystem, old ssh servers).
1473 # repo (local filesystem, old ssh servers).
1474 #
1474 #
1475 # unbundle assumes local user cannot lock remote repo (new ssh
1475 # unbundle assumes local user cannot lock remote repo (new ssh
1476 # servers, http servers).
1476 # servers, http servers).
1477
1477
1478 if remote.capable('unbundle'):
1478 if remote.capable('unbundle'):
1479 return self.push_unbundle(remote, force, revs)
1479 return self.push_unbundle(remote, force, revs)
1480 return self.push_addchangegroup(remote, force, revs)
1480 return self.push_addchangegroup(remote, force, revs)
1481
1481
1482 def prepush(self, remote, force, revs):
1482 def prepush(self, remote, force, revs):
1483 common = {}
1483 common = {}
1484 remote_heads = remote.heads()
1484 remote_heads = remote.heads()
1485 inc = self.findincoming(remote, common, remote_heads, force=force)
1485 inc = self.findincoming(remote, common, remote_heads, force=force)
1486
1486
1487 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1487 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1488 if revs is not None:
1488 if revs is not None:
1489 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1489 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1490 else:
1490 else:
1491 bases, heads = update, self.changelog.heads()
1491 bases, heads = update, self.changelog.heads()
1492
1492
1493 def checkbranch(lheads, rheads, updatelh):
1493 def checkbranch(lheads, rheads, updatelh):
1494 '''
1494 '''
1495 check whether there are more local heads than remote heads on
1495 check whether there are more local heads than remote heads on
1496 a specific branch.
1496 a specific branch.
1497
1497
1498 lheads: local branch heads
1498 lheads: local branch heads
1499 rheads: remote branch heads
1499 rheads: remote branch heads
1500 updatelh: outgoing local branch heads
1500 updatelh: outgoing local branch heads
1501 '''
1501 '''
1502
1502
1503 warn = 0
1503 warn = 0
1504
1504
1505 if not revs and len(lheads) > len(rheads):
1505 if not revs and len(lheads) > len(rheads):
1506 warn = 1
1506 warn = 1
1507 else:
1507 else:
1508 updatelheads = [self.changelog.heads(x, lheads)
1508 updatelheads = [self.changelog.heads(x, lheads)
1509 for x in updatelh]
1509 for x in updatelh]
1510 newheads = set(sum(updatelheads, [])) & set(lheads)
1510 newheads = set(sum(updatelheads, [])) & set(lheads)
1511
1511
1512 if not newheads:
1512 if not newheads:
1513 return True
1513 return True
1514
1514
1515 for r in rheads:
1515 for r in rheads:
1516 if r in self.changelog.nodemap:
1516 if r in self.changelog.nodemap:
1517 desc = self.changelog.heads(r, heads)
1517 desc = self.changelog.heads(r, heads)
1518 l = [h for h in heads if h in desc]
1518 l = [h for h in heads if h in desc]
1519 if not l:
1519 if not l:
1520 newheads.add(r)
1520 newheads.add(r)
1521 else:
1521 else:
1522 newheads.add(r)
1522 newheads.add(r)
1523 if len(newheads) > len(rheads):
1523 if len(newheads) > len(rheads):
1524 warn = 1
1524 warn = 1
1525
1525
1526 if warn:
1526 if warn:
1527 if not rheads: # new branch requires --force
1527 if not rheads: # new branch requires --force
1528 self.ui.warn(_("abort: push creates new"
1528 self.ui.warn(_("abort: push creates new"
1529 " remote branch '%s'!\n") %
1529 " remote branch '%s'!\n") %
1530 self[updatelh[0]].branch())
1530 self[updatelh[0]].branch())
1531 else:
1531 else:
1532 self.ui.warn(_("abort: push creates new remote heads!\n"))
1532 self.ui.warn(_("abort: push creates new remote heads!\n"))
1533
1533
1534 self.ui.status(_("(did you forget to merge?"
1534 self.ui.status(_("(did you forget to merge?"
1535 " use push -f to force)\n"))
1535 " use push -f to force)\n"))
1536 return False
1536 return False
1537 return True
1537 return True
1538
1538
1539 if not bases:
1539 if not bases:
1540 self.ui.status(_("no changes found\n"))
1540 self.ui.status(_("no changes found\n"))
1541 return None, 1
1541 return None, 1
1542 elif not force:
1542 elif not force:
1543 # Check for each named branch if we're creating new remote heads.
1543 # Check for each named branch if we're creating new remote heads.
1544 # To be a remote head after push, node must be either:
1544 # To be a remote head after push, node must be either:
1545 # - unknown locally
1545 # - unknown locally
1546 # - a local outgoing head descended from update
1546 # - a local outgoing head descended from update
1547 # - a remote head that's known locally and not
1547 # - a remote head that's known locally and not
1548 # ancestral to an outgoing head
1548 # ancestral to an outgoing head
1549 #
1549 #
1550 # New named branches cannot be created without --force.
1550 # New named branches cannot be created without --force.
1551
1551
1552 if remote_heads != [nullid]:
1552 if remote_heads != [nullid]:
1553 if remote.capable('branchmap'):
1553 if remote.capable('branchmap'):
1554 localhds = {}
1554 localhds = {}
1555 if not revs:
1555 if not revs:
1556 localhds = self.branchmap()
1556 localhds = self.branchmap()
1557 else:
1557 else:
1558 for n in heads:
1558 for n in heads:
1559 branch = self[n].branch()
1559 branch = self[n].branch()
1560 if branch in localhds:
1560 if branch in localhds:
1561 localhds[branch].append(n)
1561 localhds[branch].append(n)
1562 else:
1562 else:
1563 localhds[branch] = [n]
1563 localhds[branch] = [n]
1564
1564
1565 remotehds = remote.branchmap()
1565 remotehds = remote.branchmap()
1566
1566
1567 for lh in localhds:
1567 for lh in localhds:
1568 if lh in remotehds:
1568 if lh in remotehds:
1569 rheads = remotehds[lh]
1569 rheads = remotehds[lh]
1570 else:
1570 else:
1571 rheads = []
1571 rheads = []
1572 lheads = localhds[lh]
1572 lheads = localhds[lh]
1573 updatelh = [upd for upd in update
1573 updatelh = [upd for upd in update
1574 if self[upd].branch() == lh]
1574 if self[upd].branch() == lh]
1575 if not updatelh:
1575 if not updatelh:
1576 continue
1576 continue
1577 if not checkbranch(lheads, rheads, updatelh):
1577 if not checkbranch(lheads, rheads, updatelh):
1578 return None, 0
1578 return None, 0
1579 else:
1579 else:
1580 if not checkbranch(heads, remote_heads, update):
1580 if not checkbranch(heads, remote_heads, update):
1581 return None, 0
1581 return None, 0
1582
1582
1583 if inc:
1583 if inc:
1584 self.ui.warn(_("note: unsynced remote changes!\n"))
1584 self.ui.warn(_("note: unsynced remote changes!\n"))
1585
1585
1586
1586
1587 if revs is None:
1587 if revs is None:
1588 # use the fast path, no race possible on push
1588 # use the fast path, no race possible on push
1589 cg = self._changegroup(common.keys(), 'push')
1589 cg = self._changegroup(common.keys(), 'push')
1590 else:
1590 else:
1591 cg = self.changegroupsubset(update, revs, 'push')
1591 cg = self.changegroupsubset(update, revs, 'push')
1592 return cg, remote_heads
1592 return cg, remote_heads
1593
1593
1594 def push_addchangegroup(self, remote, force, revs):
1594 def push_addchangegroup(self, remote, force, revs):
1595 lock = remote.lock()
1595 lock = remote.lock()
1596 try:
1596 try:
1597 ret = self.prepush(remote, force, revs)
1597 ret = self.prepush(remote, force, revs)
1598 if ret[0] is not None:
1598 if ret[0] is not None:
1599 cg, remote_heads = ret
1599 cg, remote_heads = ret
1600 return remote.addchangegroup(cg, 'push', self.url())
1600 return remote.addchangegroup(cg, 'push', self.url())
1601 return ret[1]
1601 return ret[1]
1602 finally:
1602 finally:
1603 lock.release()
1603 lock.release()
1604
1604
1605 def push_unbundle(self, remote, force, revs):
1605 def push_unbundle(self, remote, force, revs):
1606 # local repo finds heads on server, finds out what revs it
1606 # local repo finds heads on server, finds out what revs it
1607 # must push. once revs transferred, if server finds it has
1607 # must push. once revs transferred, if server finds it has
1608 # different heads (someone else won commit/push race), server
1608 # different heads (someone else won commit/push race), server
1609 # aborts.
1609 # aborts.
1610
1610
1611 ret = self.prepush(remote, force, revs)
1611 ret = self.prepush(remote, force, revs)
1612 if ret[0] is not None:
1612 if ret[0] is not None:
1613 cg, remote_heads = ret
1613 cg, remote_heads = ret
1614 if force: remote_heads = ['force']
1614 if force: remote_heads = ['force']
1615 return remote.unbundle(cg, remote_heads, 'push')
1615 return remote.unbundle(cg, remote_heads, 'push')
1616 return ret[1]
1616 return ret[1]
1617
1617
1618 def changegroupinfo(self, nodes, source):
1618 def changegroupinfo(self, nodes, source):
1619 if self.ui.verbose or source == 'bundle':
1619 if self.ui.verbose or source == 'bundle':
1620 self.ui.status(_("%d changesets found\n") % len(nodes))
1620 self.ui.status(_("%d changesets found\n") % len(nodes))
1621 if self.ui.debugflag:
1621 if self.ui.debugflag:
1622 self.ui.debug(_("list of changesets:\n"))
1622 self.ui.debug(_("list of changesets:\n"))
1623 for node in nodes:
1623 for node in nodes:
1624 self.ui.debug("%s\n" % hex(node))
1624 self.ui.debug("%s\n" % hex(node))
1625
1625
1626 def changegroupsubset(self, bases, heads, source, extranodes=None):
1626 def changegroupsubset(self, bases, heads, source, extranodes=None):
1627 """This function generates a changegroup consisting of all the nodes
1627 """This function generates a changegroup consisting of all the nodes
1628 that are descendents of any of the bases, and ancestors of any of
1628 that are descendents of any of the bases, and ancestors of any of
1629 the heads.
1629 the heads.
1630
1630
1631 It is fairly complex as determining which filenodes and which
1631 It is fairly complex as determining which filenodes and which
1632 manifest nodes need to be included for the changeset to be complete
1632 manifest nodes need to be included for the changeset to be complete
1633 is non-trivial.
1633 is non-trivial.
1634
1634
1635 Another wrinkle is doing the reverse, figuring out which changeset in
1635 Another wrinkle is doing the reverse, figuring out which changeset in
1636 the changegroup a particular filenode or manifestnode belongs to.
1636 the changegroup a particular filenode or manifestnode belongs to.
1637
1637
1638 The caller can specify some nodes that must be included in the
1638 The caller can specify some nodes that must be included in the
1639 changegroup using the extranodes argument. It should be a dict
1639 changegroup using the extranodes argument. It should be a dict
1640 where the keys are the filenames (or 1 for the manifest), and the
1640 where the keys are the filenames (or 1 for the manifest), and the
1641 values are lists of (node, linknode) tuples, where node is a wanted
1641 values are lists of (node, linknode) tuples, where node is a wanted
1642 node and linknode is the changelog node that should be transmitted as
1642 node and linknode is the changelog node that should be transmitted as
1643 the linkrev.
1643 the linkrev.
1644 """
1644 """
1645
1645
1646 if extranodes is None:
1646 if extranodes is None:
1647 # can we go through the fast path ?
1647 # can we go through the fast path ?
1648 heads.sort()
1648 heads.sort()
1649 allheads = self.heads()
1649 allheads = self.heads()
1650 allheads.sort()
1650 allheads.sort()
1651 if heads == allheads:
1651 if heads == allheads:
1652 common = []
1652 common = []
1653 # parents of bases are known from both sides
1653 # parents of bases are known from both sides
1654 for n in bases:
1654 for n in bases:
1655 for p in self.changelog.parents(n):
1655 for p in self.changelog.parents(n):
1656 if p != nullid:
1656 if p != nullid:
1657 common.append(p)
1657 common.append(p)
1658 return self._changegroup(common, source)
1658 return self._changegroup(common, source)
1659
1659
1660 self.hook('preoutgoing', throw=True, source=source)
1660 self.hook('preoutgoing', throw=True, source=source)
1661
1661
1662 # Set up some initial variables
1662 # Set up some initial variables
1663 # Make it easy to refer to self.changelog
1663 # Make it easy to refer to self.changelog
1664 cl = self.changelog
1664 cl = self.changelog
1665 # msng is short for missing - compute the list of changesets in this
1665 # msng is short for missing - compute the list of changesets in this
1666 # changegroup.
1666 # changegroup.
1667 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1667 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1668 self.changegroupinfo(msng_cl_lst, source)
1668 self.changegroupinfo(msng_cl_lst, source)
1669 # Some bases may turn out to be superfluous, and some heads may be
1669 # Some bases may turn out to be superfluous, and some heads may be
1670 # too. nodesbetween will return the minimal set of bases and heads
1670 # too. nodesbetween will return the minimal set of bases and heads
1671 # necessary to re-create the changegroup.
1671 # necessary to re-create the changegroup.
1672
1672
1673 # Known heads are the list of heads that it is assumed the recipient
1673 # Known heads are the list of heads that it is assumed the recipient
1674 # of this changegroup will know about.
1674 # of this changegroup will know about.
1675 knownheads = set()
1675 knownheads = set()
1676 # We assume that all parents of bases are known heads.
1676 # We assume that all parents of bases are known heads.
1677 for n in bases:
1677 for n in bases:
1678 knownheads.update(cl.parents(n))
1678 knownheads.update(cl.parents(n))
1679 knownheads.discard(nullid)
1679 knownheads.discard(nullid)
1680 knownheads = list(knownheads)
1680 knownheads = list(knownheads)
1681 if knownheads:
1681 if knownheads:
1682 # Now that we know what heads are known, we can compute which
1682 # Now that we know what heads are known, we can compute which
1683 # changesets are known. The recipient must know about all
1683 # changesets are known. The recipient must know about all
1684 # changesets required to reach the known heads from the null
1684 # changesets required to reach the known heads from the null
1685 # changeset.
1685 # changeset.
1686 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1686 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1687 junk = None
1687 junk = None
1688 # Transform the list into a set.
1688 # Transform the list into a set.
1689 has_cl_set = set(has_cl_set)
1689 has_cl_set = set(has_cl_set)
1690 else:
1690 else:
1691 # If there were no known heads, the recipient cannot be assumed to
1691 # If there were no known heads, the recipient cannot be assumed to
1692 # know about any changesets.
1692 # know about any changesets.
1693 has_cl_set = set()
1693 has_cl_set = set()
1694
1694
1695 # Make it easy to refer to self.manifest
1695 # Make it easy to refer to self.manifest
1696 mnfst = self.manifest
1696 mnfst = self.manifest
1697 # We don't know which manifests are missing yet
1697 # We don't know which manifests are missing yet
1698 msng_mnfst_set = {}
1698 msng_mnfst_set = {}
1699 # Nor do we know which filenodes are missing.
1699 # Nor do we know which filenodes are missing.
1700 msng_filenode_set = {}
1700 msng_filenode_set = {}
1701
1701
1702 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1702 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1703 junk = None
1703 junk = None
1704
1704
1705 # A changeset always belongs to itself, so the changenode lookup
1705 # A changeset always belongs to itself, so the changenode lookup
1706 # function for a changenode is identity.
1706 # function for a changenode is identity.
1707 def identity(x):
1707 def identity(x):
1708 return x
1708 return x
1709
1709
1710 # A function generating function. Sets up an environment for the
1710 # A function generating function. Sets up an environment for the
1711 # inner function.
1711 # inner function.
1712 def cmp_by_rev_func(revlog):
1712 def revkey(revlog):
1713 # Compare two nodes by their revision number in the environment's
1713 # Key to sort a node by it's revision number in the environment's
1714 # revision history. Since the revision number both represents the
1714 # revision history. Since the revision number both represents the
1715 # most efficient order to read the nodes in, and represents a
1715 # most efficient order to read the nodes in, and represents a
1716 # topological sorting of the nodes, this function is often useful.
1716 # topological sorting of the nodes, this function is often useful.
1717 def cmp_by_rev(a, b):
1717 def revlog_sort_key(x):
1718 return cmp(revlog.rev(a), revlog.rev(b))
1718 return revlog.rev(x)
1719 return cmp_by_rev
1719 return revlog_sort_key
1720
1720
1721 # If we determine that a particular file or manifest node must be a
1721 # If we determine that a particular file or manifest node must be a
1722 # node that the recipient of the changegroup will already have, we can
1722 # node that the recipient of the changegroup will already have, we can
1723 # also assume the recipient will have all the parents. This function
1723 # also assume the recipient will have all the parents. This function
1724 # prunes them from the set of missing nodes.
1724 # prunes them from the set of missing nodes.
1725 def prune_parents(revlog, hasset, msngset):
1725 def prune_parents(revlog, hasset, msngset):
1726 haslst = list(hasset)
1726 haslst = list(hasset)
1727 haslst.sort(cmp_by_rev_func(revlog))
1727 haslst.sort(key=revkey(revlog))
1728 for node in haslst:
1728 for node in haslst:
1729 parentlst = [p for p in revlog.parents(node) if p != nullid]
1729 parentlst = [p for p in revlog.parents(node) if p != nullid]
1730 while parentlst:
1730 while parentlst:
1731 n = parentlst.pop()
1731 n = parentlst.pop()
1732 if n not in hasset:
1732 if n not in hasset:
1733 hasset.add(n)
1733 hasset.add(n)
1734 p = [p for p in revlog.parents(n) if p != nullid]
1734 p = [p for p in revlog.parents(n) if p != nullid]
1735 parentlst.extend(p)
1735 parentlst.extend(p)
1736 for n in hasset:
1736 for n in hasset:
1737 msngset.pop(n, None)
1737 msngset.pop(n, None)
1738
1738
1739 # This is a function generating function used to set up an environment
1739 # This is a function generating function used to set up an environment
1740 # for the inner function to execute in.
1740 # for the inner function to execute in.
1741 def manifest_and_file_collector(changedfileset):
1741 def manifest_and_file_collector(changedfileset):
1742 # This is an information gathering function that gathers
1742 # This is an information gathering function that gathers
1743 # information from each changeset node that goes out as part of
1743 # information from each changeset node that goes out as part of
1744 # the changegroup. The information gathered is a list of which
1744 # the changegroup. The information gathered is a list of which
1745 # manifest nodes are potentially required (the recipient may
1745 # manifest nodes are potentially required (the recipient may
1746 # already have them) and total list of all files which were
1746 # already have them) and total list of all files which were
1747 # changed in any changeset in the changegroup.
1747 # changed in any changeset in the changegroup.
1748 #
1748 #
1749 # We also remember the first changenode we saw any manifest
1749 # We also remember the first changenode we saw any manifest
1750 # referenced by so we can later determine which changenode 'owns'
1750 # referenced by so we can later determine which changenode 'owns'
1751 # the manifest.
1751 # the manifest.
1752 def collect_manifests_and_files(clnode):
1752 def collect_manifests_and_files(clnode):
1753 c = cl.read(clnode)
1753 c = cl.read(clnode)
1754 for f in c[3]:
1754 for f in c[3]:
1755 # This is to make sure we only have one instance of each
1755 # This is to make sure we only have one instance of each
1756 # filename string for each filename.
1756 # filename string for each filename.
1757 changedfileset.setdefault(f, f)
1757 changedfileset.setdefault(f, f)
1758 msng_mnfst_set.setdefault(c[0], clnode)
1758 msng_mnfst_set.setdefault(c[0], clnode)
1759 return collect_manifests_and_files
1759 return collect_manifests_and_files
1760
1760
1761 # Figure out which manifest nodes (of the ones we think might be part
1761 # Figure out which manifest nodes (of the ones we think might be part
1762 # of the changegroup) the recipient must know about and remove them
1762 # of the changegroup) the recipient must know about and remove them
1763 # from the changegroup.
1763 # from the changegroup.
1764 def prune_manifests():
1764 def prune_manifests():
1765 has_mnfst_set = set()
1765 has_mnfst_set = set()
1766 for n in msng_mnfst_set:
1766 for n in msng_mnfst_set:
1767 # If a 'missing' manifest thinks it belongs to a changenode
1767 # If a 'missing' manifest thinks it belongs to a changenode
1768 # the recipient is assumed to have, obviously the recipient
1768 # the recipient is assumed to have, obviously the recipient
1769 # must have that manifest.
1769 # must have that manifest.
1770 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1770 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1771 if linknode in has_cl_set:
1771 if linknode in has_cl_set:
1772 has_mnfst_set.add(n)
1772 has_mnfst_set.add(n)
1773 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1773 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1774
1774
1775 # Use the information collected in collect_manifests_and_files to say
1775 # Use the information collected in collect_manifests_and_files to say
1776 # which changenode any manifestnode belongs to.
1776 # which changenode any manifestnode belongs to.
1777 def lookup_manifest_link(mnfstnode):
1777 def lookup_manifest_link(mnfstnode):
1778 return msng_mnfst_set[mnfstnode]
1778 return msng_mnfst_set[mnfstnode]
1779
1779
1780 # A function generating function that sets up the initial environment
1780 # A function generating function that sets up the initial environment
1781 # the inner function.
1781 # the inner function.
1782 def filenode_collector(changedfiles):
1782 def filenode_collector(changedfiles):
1783 next_rev = [0]
1783 next_rev = [0]
1784 # This gathers information from each manifestnode included in the
1784 # This gathers information from each manifestnode included in the
1785 # changegroup about which filenodes the manifest node references
1785 # changegroup about which filenodes the manifest node references
1786 # so we can include those in the changegroup too.
1786 # so we can include those in the changegroup too.
1787 #
1787 #
1788 # It also remembers which changenode each filenode belongs to. It
1788 # It also remembers which changenode each filenode belongs to. It
1789 # does this by assuming the a filenode belongs to the changenode
1789 # does this by assuming the a filenode belongs to the changenode
1790 # the first manifest that references it belongs to.
1790 # the first manifest that references it belongs to.
1791 def collect_msng_filenodes(mnfstnode):
1791 def collect_msng_filenodes(mnfstnode):
1792 r = mnfst.rev(mnfstnode)
1792 r = mnfst.rev(mnfstnode)
1793 if r == next_rev[0]:
1793 if r == next_rev[0]:
1794 # If the last rev we looked at was the one just previous,
1794 # If the last rev we looked at was the one just previous,
1795 # we only need to see a diff.
1795 # we only need to see a diff.
1796 deltamf = mnfst.readdelta(mnfstnode)
1796 deltamf = mnfst.readdelta(mnfstnode)
1797 # For each line in the delta
1797 # For each line in the delta
1798 for f, fnode in deltamf.iteritems():
1798 for f, fnode in deltamf.iteritems():
1799 f = changedfiles.get(f, None)
1799 f = changedfiles.get(f, None)
1800 # And if the file is in the list of files we care
1800 # And if the file is in the list of files we care
1801 # about.
1801 # about.
1802 if f is not None:
1802 if f is not None:
1803 # Get the changenode this manifest belongs to
1803 # Get the changenode this manifest belongs to
1804 clnode = msng_mnfst_set[mnfstnode]
1804 clnode = msng_mnfst_set[mnfstnode]
1805 # Create the set of filenodes for the file if
1805 # Create the set of filenodes for the file if
1806 # there isn't one already.
1806 # there isn't one already.
1807 ndset = msng_filenode_set.setdefault(f, {})
1807 ndset = msng_filenode_set.setdefault(f, {})
1808 # And set the filenode's changelog node to the
1808 # And set the filenode's changelog node to the
1809 # manifest's if it hasn't been set already.
1809 # manifest's if it hasn't been set already.
1810 ndset.setdefault(fnode, clnode)
1810 ndset.setdefault(fnode, clnode)
1811 else:
1811 else:
1812 # Otherwise we need a full manifest.
1812 # Otherwise we need a full manifest.
1813 m = mnfst.read(mnfstnode)
1813 m = mnfst.read(mnfstnode)
1814 # For every file in we care about.
1814 # For every file in we care about.
1815 for f in changedfiles:
1815 for f in changedfiles:
1816 fnode = m.get(f, None)
1816 fnode = m.get(f, None)
1817 # If it's in the manifest
1817 # If it's in the manifest
1818 if fnode is not None:
1818 if fnode is not None:
1819 # See comments above.
1819 # See comments above.
1820 clnode = msng_mnfst_set[mnfstnode]
1820 clnode = msng_mnfst_set[mnfstnode]
1821 ndset = msng_filenode_set.setdefault(f, {})
1821 ndset = msng_filenode_set.setdefault(f, {})
1822 ndset.setdefault(fnode, clnode)
1822 ndset.setdefault(fnode, clnode)
1823 # Remember the revision we hope to see next.
1823 # Remember the revision we hope to see next.
1824 next_rev[0] = r + 1
1824 next_rev[0] = r + 1
1825 return collect_msng_filenodes
1825 return collect_msng_filenodes
1826
1826
1827 # We have a list of filenodes we think we need for a file, lets remove
1827 # We have a list of filenodes we think we need for a file, lets remove
1828 # all those we know the recipient must have.
1828 # all those we know the recipient must have.
1829 def prune_filenodes(f, filerevlog):
1829 def prune_filenodes(f, filerevlog):
1830 msngset = msng_filenode_set[f]
1830 msngset = msng_filenode_set[f]
1831 hasset = set()
1831 hasset = set()
1832 # If a 'missing' filenode thinks it belongs to a changenode we
1832 # If a 'missing' filenode thinks it belongs to a changenode we
1833 # assume the recipient must have, then the recipient must have
1833 # assume the recipient must have, then the recipient must have
1834 # that filenode.
1834 # that filenode.
1835 for n in msngset:
1835 for n in msngset:
1836 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1836 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1837 if clnode in has_cl_set:
1837 if clnode in has_cl_set:
1838 hasset.add(n)
1838 hasset.add(n)
1839 prune_parents(filerevlog, hasset, msngset)
1839 prune_parents(filerevlog, hasset, msngset)
1840
1840
1841 # A function generator function that sets up the a context for the
1841 # A function generator function that sets up the a context for the
1842 # inner function.
1842 # inner function.
1843 def lookup_filenode_link_func(fname):
1843 def lookup_filenode_link_func(fname):
1844 msngset = msng_filenode_set[fname]
1844 msngset = msng_filenode_set[fname]
1845 # Lookup the changenode the filenode belongs to.
1845 # Lookup the changenode the filenode belongs to.
1846 def lookup_filenode_link(fnode):
1846 def lookup_filenode_link(fnode):
1847 return msngset[fnode]
1847 return msngset[fnode]
1848 return lookup_filenode_link
1848 return lookup_filenode_link
1849
1849
1850 # Add the nodes that were explicitly requested.
1850 # Add the nodes that were explicitly requested.
1851 def add_extra_nodes(name, nodes):
1851 def add_extra_nodes(name, nodes):
1852 if not extranodes or name not in extranodes:
1852 if not extranodes or name not in extranodes:
1853 return
1853 return
1854
1854
1855 for node, linknode in extranodes[name]:
1855 for node, linknode in extranodes[name]:
1856 if node not in nodes:
1856 if node not in nodes:
1857 nodes[node] = linknode
1857 nodes[node] = linknode
1858
1858
1859 # Now that we have all theses utility functions to help out and
1859 # Now that we have all theses utility functions to help out and
1860 # logically divide up the task, generate the group.
1860 # logically divide up the task, generate the group.
1861 def gengroup():
1861 def gengroup():
1862 # The set of changed files starts empty.
1862 # The set of changed files starts empty.
1863 changedfiles = {}
1863 changedfiles = {}
1864 # Create a changenode group generator that will call our functions
1864 # Create a changenode group generator that will call our functions
1865 # back to lookup the owning changenode and collect information.
1865 # back to lookup the owning changenode and collect information.
1866 group = cl.group(msng_cl_lst, identity,
1866 group = cl.group(msng_cl_lst, identity,
1867 manifest_and_file_collector(changedfiles))
1867 manifest_and_file_collector(changedfiles))
1868 for chnk in group:
1868 for chnk in group:
1869 yield chnk
1869 yield chnk
1870
1870
1871 # The list of manifests has been collected by the generator
1871 # The list of manifests has been collected by the generator
1872 # calling our functions back.
1872 # calling our functions back.
1873 prune_manifests()
1873 prune_manifests()
1874 add_extra_nodes(1, msng_mnfst_set)
1874 add_extra_nodes(1, msng_mnfst_set)
1875 msng_mnfst_lst = msng_mnfst_set.keys()
1875 msng_mnfst_lst = msng_mnfst_set.keys()
1876 # Sort the manifestnodes by revision number.
1876 # Sort the manifestnodes by revision number.
1877 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1877 msng_mnfst_lst.sort(key=revkey(mnfst))
1878 # Create a generator for the manifestnodes that calls our lookup
1878 # Create a generator for the manifestnodes that calls our lookup
1879 # and data collection functions back.
1879 # and data collection functions back.
1880 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1880 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1881 filenode_collector(changedfiles))
1881 filenode_collector(changedfiles))
1882 for chnk in group:
1882 for chnk in group:
1883 yield chnk
1883 yield chnk
1884
1884
1885 # These are no longer needed, dereference and toss the memory for
1885 # These are no longer needed, dereference and toss the memory for
1886 # them.
1886 # them.
1887 msng_mnfst_lst = None
1887 msng_mnfst_lst = None
1888 msng_mnfst_set.clear()
1888 msng_mnfst_set.clear()
1889
1889
1890 if extranodes:
1890 if extranodes:
1891 for fname in extranodes:
1891 for fname in extranodes:
1892 if isinstance(fname, int):
1892 if isinstance(fname, int):
1893 continue
1893 continue
1894 msng_filenode_set.setdefault(fname, {})
1894 msng_filenode_set.setdefault(fname, {})
1895 changedfiles[fname] = 1
1895 changedfiles[fname] = 1
1896 # Go through all our files in order sorted by name.
1896 # Go through all our files in order sorted by name.
1897 for fname in sorted(changedfiles):
1897 for fname in sorted(changedfiles):
1898 filerevlog = self.file(fname)
1898 filerevlog = self.file(fname)
1899 if not len(filerevlog):
1899 if not len(filerevlog):
1900 raise util.Abort(_("empty or missing revlog for %s") % fname)
1900 raise util.Abort(_("empty or missing revlog for %s") % fname)
1901 # Toss out the filenodes that the recipient isn't really
1901 # Toss out the filenodes that the recipient isn't really
1902 # missing.
1902 # missing.
1903 if fname in msng_filenode_set:
1903 if fname in msng_filenode_set:
1904 prune_filenodes(fname, filerevlog)
1904 prune_filenodes(fname, filerevlog)
1905 add_extra_nodes(fname, msng_filenode_set[fname])
1905 add_extra_nodes(fname, msng_filenode_set[fname])
1906 msng_filenode_lst = msng_filenode_set[fname].keys()
1906 msng_filenode_lst = msng_filenode_set[fname].keys()
1907 else:
1907 else:
1908 msng_filenode_lst = []
1908 msng_filenode_lst = []
1909 # If any filenodes are left, generate the group for them,
1909 # If any filenodes are left, generate the group for them,
1910 # otherwise don't bother.
1910 # otherwise don't bother.
1911 if len(msng_filenode_lst) > 0:
1911 if len(msng_filenode_lst) > 0:
1912 yield changegroup.chunkheader(len(fname))
1912 yield changegroup.chunkheader(len(fname))
1913 yield fname
1913 yield fname
1914 # Sort the filenodes by their revision #
1914 # Sort the filenodes by their revision #
1915 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1915 msng_filenode_lst.sort(key=revkey(filerevlog))
1916 # Create a group generator and only pass in a changenode
1916 # Create a group generator and only pass in a changenode
1917 # lookup function as we need to collect no information
1917 # lookup function as we need to collect no information
1918 # from filenodes.
1918 # from filenodes.
1919 group = filerevlog.group(msng_filenode_lst,
1919 group = filerevlog.group(msng_filenode_lst,
1920 lookup_filenode_link_func(fname))
1920 lookup_filenode_link_func(fname))
1921 for chnk in group:
1921 for chnk in group:
1922 yield chnk
1922 yield chnk
1923 if fname in msng_filenode_set:
1923 if fname in msng_filenode_set:
1924 # Don't need this anymore, toss it to free memory.
1924 # Don't need this anymore, toss it to free memory.
1925 del msng_filenode_set[fname]
1925 del msng_filenode_set[fname]
1926 # Signal that no more groups are left.
1926 # Signal that no more groups are left.
1927 yield changegroup.closechunk()
1927 yield changegroup.closechunk()
1928
1928
1929 if msng_cl_lst:
1929 if msng_cl_lst:
1930 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1930 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1931
1931
1932 return util.chunkbuffer(gengroup())
1932 return util.chunkbuffer(gengroup())
1933
1933
1934 def changegroup(self, basenodes, source):
1934 def changegroup(self, basenodes, source):
1935 # to avoid a race we use changegroupsubset() (issue1320)
1935 # to avoid a race we use changegroupsubset() (issue1320)
1936 return self.changegroupsubset(basenodes, self.heads(), source)
1936 return self.changegroupsubset(basenodes, self.heads(), source)
1937
1937
1938 def _changegroup(self, common, source):
1938 def _changegroup(self, common, source):
1939 """Generate a changegroup of all nodes that we have that a recipient
1939 """Generate a changegroup of all nodes that we have that a recipient
1940 doesn't.
1940 doesn't.
1941
1941
1942 This is much easier than the previous function as we can assume that
1942 This is much easier than the previous function as we can assume that
1943 the recipient has any changenode we aren't sending them.
1943 the recipient has any changenode we aren't sending them.
1944
1944
1945 common is the set of common nodes between remote and self"""
1945 common is the set of common nodes between remote and self"""
1946
1946
1947 self.hook('preoutgoing', throw=True, source=source)
1947 self.hook('preoutgoing', throw=True, source=source)
1948
1948
1949 cl = self.changelog
1949 cl = self.changelog
1950 nodes = cl.findmissing(common)
1950 nodes = cl.findmissing(common)
1951 revset = set([cl.rev(n) for n in nodes])
1951 revset = set([cl.rev(n) for n in nodes])
1952 self.changegroupinfo(nodes, source)
1952 self.changegroupinfo(nodes, source)
1953
1953
1954 def identity(x):
1954 def identity(x):
1955 return x
1955 return x
1956
1956
1957 def gennodelst(log):
1957 def gennodelst(log):
1958 for r in log:
1958 for r in log:
1959 if log.linkrev(r) in revset:
1959 if log.linkrev(r) in revset:
1960 yield log.node(r)
1960 yield log.node(r)
1961
1961
1962 def changed_file_collector(changedfileset):
1962 def changed_file_collector(changedfileset):
1963 def collect_changed_files(clnode):
1963 def collect_changed_files(clnode):
1964 c = cl.read(clnode)
1964 c = cl.read(clnode)
1965 changedfileset.update(c[3])
1965 changedfileset.update(c[3])
1966 return collect_changed_files
1966 return collect_changed_files
1967
1967
1968 def lookuprevlink_func(revlog):
1968 def lookuprevlink_func(revlog):
1969 def lookuprevlink(n):
1969 def lookuprevlink(n):
1970 return cl.node(revlog.linkrev(revlog.rev(n)))
1970 return cl.node(revlog.linkrev(revlog.rev(n)))
1971 return lookuprevlink
1971 return lookuprevlink
1972
1972
1973 def gengroup():
1973 def gengroup():
1974 # construct a list of all changed files
1974 # construct a list of all changed files
1975 changedfiles = set()
1975 changedfiles = set()
1976
1976
1977 for chnk in cl.group(nodes, identity,
1977 for chnk in cl.group(nodes, identity,
1978 changed_file_collector(changedfiles)):
1978 changed_file_collector(changedfiles)):
1979 yield chnk
1979 yield chnk
1980
1980
1981 mnfst = self.manifest
1981 mnfst = self.manifest
1982 nodeiter = gennodelst(mnfst)
1982 nodeiter = gennodelst(mnfst)
1983 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1983 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1984 yield chnk
1984 yield chnk
1985
1985
1986 for fname in sorted(changedfiles):
1986 for fname in sorted(changedfiles):
1987 filerevlog = self.file(fname)
1987 filerevlog = self.file(fname)
1988 if not len(filerevlog):
1988 if not len(filerevlog):
1989 raise util.Abort(_("empty or missing revlog for %s") % fname)
1989 raise util.Abort(_("empty or missing revlog for %s") % fname)
1990 nodeiter = gennodelst(filerevlog)
1990 nodeiter = gennodelst(filerevlog)
1991 nodeiter = list(nodeiter)
1991 nodeiter = list(nodeiter)
1992 if nodeiter:
1992 if nodeiter:
1993 yield changegroup.chunkheader(len(fname))
1993 yield changegroup.chunkheader(len(fname))
1994 yield fname
1994 yield fname
1995 lookup = lookuprevlink_func(filerevlog)
1995 lookup = lookuprevlink_func(filerevlog)
1996 for chnk in filerevlog.group(nodeiter, lookup):
1996 for chnk in filerevlog.group(nodeiter, lookup):
1997 yield chnk
1997 yield chnk
1998
1998
1999 yield changegroup.closechunk()
1999 yield changegroup.closechunk()
2000
2000
2001 if nodes:
2001 if nodes:
2002 self.hook('outgoing', node=hex(nodes[0]), source=source)
2002 self.hook('outgoing', node=hex(nodes[0]), source=source)
2003
2003
2004 return util.chunkbuffer(gengroup())
2004 return util.chunkbuffer(gengroup())
2005
2005
2006 def addchangegroup(self, source, srctype, url, emptyok=False):
2006 def addchangegroup(self, source, srctype, url, emptyok=False):
2007 """add changegroup to repo.
2007 """add changegroup to repo.
2008
2008
2009 return values:
2009 return values:
2010 - nothing changed or no source: 0
2010 - nothing changed or no source: 0
2011 - more heads than before: 1+added heads (2..n)
2011 - more heads than before: 1+added heads (2..n)
2012 - less heads than before: -1-removed heads (-2..-n)
2012 - less heads than before: -1-removed heads (-2..-n)
2013 - number of heads stays the same: 1
2013 - number of heads stays the same: 1
2014 """
2014 """
2015 def csmap(x):
2015 def csmap(x):
2016 self.ui.debug(_("add changeset %s\n") % short(x))
2016 self.ui.debug(_("add changeset %s\n") % short(x))
2017 return len(cl)
2017 return len(cl)
2018
2018
2019 def revmap(x):
2019 def revmap(x):
2020 return cl.rev(x)
2020 return cl.rev(x)
2021
2021
2022 if not source:
2022 if not source:
2023 return 0
2023 return 0
2024
2024
2025 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2025 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2026
2026
2027 changesets = files = revisions = 0
2027 changesets = files = revisions = 0
2028
2028
2029 # write changelog data to temp files so concurrent readers will not see
2029 # write changelog data to temp files so concurrent readers will not see
2030 # inconsistent view
2030 # inconsistent view
2031 cl = self.changelog
2031 cl = self.changelog
2032 cl.delayupdate()
2032 cl.delayupdate()
2033 oldheads = len(cl.heads())
2033 oldheads = len(cl.heads())
2034
2034
2035 tr = self.transaction()
2035 tr = self.transaction()
2036 try:
2036 try:
2037 trp = weakref.proxy(tr)
2037 trp = weakref.proxy(tr)
2038 # pull off the changeset group
2038 # pull off the changeset group
2039 self.ui.status(_("adding changesets\n"))
2039 self.ui.status(_("adding changesets\n"))
2040 clstart = len(cl)
2040 clstart = len(cl)
2041 chunkiter = changegroup.chunkiter(source)
2041 chunkiter = changegroup.chunkiter(source)
2042 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2042 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2043 raise util.Abort(_("received changelog group is empty"))
2043 raise util.Abort(_("received changelog group is empty"))
2044 clend = len(cl)
2044 clend = len(cl)
2045 changesets = clend - clstart
2045 changesets = clend - clstart
2046
2046
2047 # pull off the manifest group
2047 # pull off the manifest group
2048 self.ui.status(_("adding manifests\n"))
2048 self.ui.status(_("adding manifests\n"))
2049 chunkiter = changegroup.chunkiter(source)
2049 chunkiter = changegroup.chunkiter(source)
2050 # no need to check for empty manifest group here:
2050 # no need to check for empty manifest group here:
2051 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2051 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2052 # no new manifest will be created and the manifest group will
2052 # no new manifest will be created and the manifest group will
2053 # be empty during the pull
2053 # be empty during the pull
2054 self.manifest.addgroup(chunkiter, revmap, trp)
2054 self.manifest.addgroup(chunkiter, revmap, trp)
2055
2055
2056 # process the files
2056 # process the files
2057 self.ui.status(_("adding file changes\n"))
2057 self.ui.status(_("adding file changes\n"))
2058 while 1:
2058 while 1:
2059 f = changegroup.getchunk(source)
2059 f = changegroup.getchunk(source)
2060 if not f:
2060 if not f:
2061 break
2061 break
2062 self.ui.debug(_("adding %s revisions\n") % f)
2062 self.ui.debug(_("adding %s revisions\n") % f)
2063 fl = self.file(f)
2063 fl = self.file(f)
2064 o = len(fl)
2064 o = len(fl)
2065 chunkiter = changegroup.chunkiter(source)
2065 chunkiter = changegroup.chunkiter(source)
2066 if fl.addgroup(chunkiter, revmap, trp) is None:
2066 if fl.addgroup(chunkiter, revmap, trp) is None:
2067 raise util.Abort(_("received file revlog group is empty"))
2067 raise util.Abort(_("received file revlog group is empty"))
2068 revisions += len(fl) - o
2068 revisions += len(fl) - o
2069 files += 1
2069 files += 1
2070
2070
2071 newheads = len(cl.heads())
2071 newheads = len(cl.heads())
2072 heads = ""
2072 heads = ""
2073 if oldheads and newheads != oldheads:
2073 if oldheads and newheads != oldheads:
2074 heads = _(" (%+d heads)") % (newheads - oldheads)
2074 heads = _(" (%+d heads)") % (newheads - oldheads)
2075
2075
2076 self.ui.status(_("added %d changesets"
2076 self.ui.status(_("added %d changesets"
2077 " with %d changes to %d files%s\n")
2077 " with %d changes to %d files%s\n")
2078 % (changesets, revisions, files, heads))
2078 % (changesets, revisions, files, heads))
2079
2079
2080 if changesets > 0:
2080 if changesets > 0:
2081 p = lambda: cl.writepending() and self.root or ""
2081 p = lambda: cl.writepending() and self.root or ""
2082 self.hook('pretxnchangegroup', throw=True,
2082 self.hook('pretxnchangegroup', throw=True,
2083 node=hex(cl.node(clstart)), source=srctype,
2083 node=hex(cl.node(clstart)), source=srctype,
2084 url=url, pending=p)
2084 url=url, pending=p)
2085
2085
2086 # make changelog see real files again
2086 # make changelog see real files again
2087 cl.finalize(trp)
2087 cl.finalize(trp)
2088
2088
2089 tr.close()
2089 tr.close()
2090 finally:
2090 finally:
2091 del tr
2091 del tr
2092
2092
2093 if changesets > 0:
2093 if changesets > 0:
2094 # forcefully update the on-disk branch cache
2094 # forcefully update the on-disk branch cache
2095 self.ui.debug(_("updating the branch cache\n"))
2095 self.ui.debug(_("updating the branch cache\n"))
2096 self.branchtags()
2096 self.branchtags()
2097 self.hook("changegroup", node=hex(cl.node(clstart)),
2097 self.hook("changegroup", node=hex(cl.node(clstart)),
2098 source=srctype, url=url)
2098 source=srctype, url=url)
2099
2099
2100 for i in xrange(clstart, clend):
2100 for i in xrange(clstart, clend):
2101 self.hook("incoming", node=hex(cl.node(i)),
2101 self.hook("incoming", node=hex(cl.node(i)),
2102 source=srctype, url=url)
2102 source=srctype, url=url)
2103
2103
2104 # never return 0 here:
2104 # never return 0 here:
2105 if newheads < oldheads:
2105 if newheads < oldheads:
2106 return newheads - oldheads - 1
2106 return newheads - oldheads - 1
2107 else:
2107 else:
2108 return newheads - oldheads + 1
2108 return newheads - oldheads + 1
2109
2109
2110
2110
2111 def stream_in(self, remote):
2111 def stream_in(self, remote):
2112 fp = remote.stream_out()
2112 fp = remote.stream_out()
2113 l = fp.readline()
2113 l = fp.readline()
2114 try:
2114 try:
2115 resp = int(l)
2115 resp = int(l)
2116 except ValueError:
2116 except ValueError:
2117 raise error.ResponseError(
2117 raise error.ResponseError(
2118 _('Unexpected response from remote server:'), l)
2118 _('Unexpected response from remote server:'), l)
2119 if resp == 1:
2119 if resp == 1:
2120 raise util.Abort(_('operation forbidden by server'))
2120 raise util.Abort(_('operation forbidden by server'))
2121 elif resp == 2:
2121 elif resp == 2:
2122 raise util.Abort(_('locking the remote repository failed'))
2122 raise util.Abort(_('locking the remote repository failed'))
2123 elif resp != 0:
2123 elif resp != 0:
2124 raise util.Abort(_('the server sent an unknown error code'))
2124 raise util.Abort(_('the server sent an unknown error code'))
2125 self.ui.status(_('streaming all changes\n'))
2125 self.ui.status(_('streaming all changes\n'))
2126 l = fp.readline()
2126 l = fp.readline()
2127 try:
2127 try:
2128 total_files, total_bytes = map(int, l.split(' ', 1))
2128 total_files, total_bytes = map(int, l.split(' ', 1))
2129 except (ValueError, TypeError):
2129 except (ValueError, TypeError):
2130 raise error.ResponseError(
2130 raise error.ResponseError(
2131 _('Unexpected response from remote server:'), l)
2131 _('Unexpected response from remote server:'), l)
2132 self.ui.status(_('%d files to transfer, %s of data\n') %
2132 self.ui.status(_('%d files to transfer, %s of data\n') %
2133 (total_files, util.bytecount(total_bytes)))
2133 (total_files, util.bytecount(total_bytes)))
2134 start = time.time()
2134 start = time.time()
2135 for i in xrange(total_files):
2135 for i in xrange(total_files):
2136 # XXX doesn't support '\n' or '\r' in filenames
2136 # XXX doesn't support '\n' or '\r' in filenames
2137 l = fp.readline()
2137 l = fp.readline()
2138 try:
2138 try:
2139 name, size = l.split('\0', 1)
2139 name, size = l.split('\0', 1)
2140 size = int(size)
2140 size = int(size)
2141 except (ValueError, TypeError):
2141 except (ValueError, TypeError):
2142 raise error.ResponseError(
2142 raise error.ResponseError(
2143 _('Unexpected response from remote server:'), l)
2143 _('Unexpected response from remote server:'), l)
2144 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2144 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2145 # for backwards compat, name was partially encoded
2145 # for backwards compat, name was partially encoded
2146 ofp = self.sopener(store.decodedir(name), 'w')
2146 ofp = self.sopener(store.decodedir(name), 'w')
2147 for chunk in util.filechunkiter(fp, limit=size):
2147 for chunk in util.filechunkiter(fp, limit=size):
2148 ofp.write(chunk)
2148 ofp.write(chunk)
2149 ofp.close()
2149 ofp.close()
2150 elapsed = time.time() - start
2150 elapsed = time.time() - start
2151 if elapsed <= 0:
2151 if elapsed <= 0:
2152 elapsed = 0.001
2152 elapsed = 0.001
2153 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2153 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2154 (util.bytecount(total_bytes), elapsed,
2154 (util.bytecount(total_bytes), elapsed,
2155 util.bytecount(total_bytes / elapsed)))
2155 util.bytecount(total_bytes / elapsed)))
2156 self.invalidate()
2156 self.invalidate()
2157 return len(self.heads()) + 1
2157 return len(self.heads()) + 1
2158
2158
2159 def clone(self, remote, heads=[], stream=False):
2159 def clone(self, remote, heads=[], stream=False):
2160 '''clone remote repository.
2160 '''clone remote repository.
2161
2161
2162 keyword arguments:
2162 keyword arguments:
2163 heads: list of revs to clone (forces use of pull)
2163 heads: list of revs to clone (forces use of pull)
2164 stream: use streaming clone if possible'''
2164 stream: use streaming clone if possible'''
2165
2165
2166 # now, all clients that can request uncompressed clones can
2166 # now, all clients that can request uncompressed clones can
2167 # read repo formats supported by all servers that can serve
2167 # read repo formats supported by all servers that can serve
2168 # them.
2168 # them.
2169
2169
2170 # if revlog format changes, client will have to check version
2170 # if revlog format changes, client will have to check version
2171 # and format flags on "stream" capability, and use
2171 # and format flags on "stream" capability, and use
2172 # uncompressed only if compatible.
2172 # uncompressed only if compatible.
2173
2173
2174 if stream and not heads and remote.capable('stream'):
2174 if stream and not heads and remote.capable('stream'):
2175 return self.stream_in(remote)
2175 return self.stream_in(remote)
2176 return self.pull(remote, heads)
2176 return self.pull(remote, heads)
2177
2177
2178 # used to avoid circular references so destructors work
2178 # used to avoid circular references so destructors work
2179 def aftertrans(files):
2179 def aftertrans(files):
2180 renamefiles = [tuple(t) for t in files]
2180 renamefiles = [tuple(t) for t in files]
2181 def a():
2181 def a():
2182 for src, dest in renamefiles:
2182 for src, dest in renamefiles:
2183 util.rename(src, dest)
2183 util.rename(src, dest)
2184 return a
2184 return a
2185
2185
2186 def instance(ui, path, create):
2186 def instance(ui, path, create):
2187 return localrepository(ui, util.drop_scheme('file', path), create)
2187 return localrepository(ui, util.drop_scheme('file', path), create)
2188
2188
2189 def islocal(path):
2189 def islocal(path):
2190 return True
2190 return True
@@ -1,115 +1,113 b''
1 #! /usr/bin/env python
1 #! /usr/bin/env python
2
2
3 import sys
3 import sys
4 from _lsprof import Profiler, profiler_entry
4 from _lsprof import Profiler, profiler_entry
5
5
6 __all__ = ['profile', 'Stats']
6 __all__ = ['profile', 'Stats']
7
7
8 def profile(f, *args, **kwds):
8 def profile(f, *args, **kwds):
9 """XXX docstring"""
9 """XXX docstring"""
10 p = Profiler()
10 p = Profiler()
11 p.enable(subcalls=True, builtins=True)
11 p.enable(subcalls=True, builtins=True)
12 try:
12 try:
13 f(*args, **kwds)
13 f(*args, **kwds)
14 finally:
14 finally:
15 p.disable()
15 p.disable()
16 return Stats(p.getstats())
16 return Stats(p.getstats())
17
17
18
18
19 class Stats(object):
19 class Stats(object):
20 """XXX docstring"""
20 """XXX docstring"""
21
21
22 def __init__(self, data):
22 def __init__(self, data):
23 self.data = data
23 self.data = data
24
24
25 def sort(self, crit="inlinetime"):
25 def sort(self, crit="inlinetime"):
26 """XXX docstring"""
26 """XXX docstring"""
27 if crit not in profiler_entry.__dict__:
27 if crit not in profiler_entry.__dict__:
28 raise ValueError("Can't sort by %s" % crit)
28 raise ValueError("Can't sort by %s" % crit)
29 self.data.sort(lambda b, a: cmp(getattr(a, crit),
29 self.data.sort(key=lambda x: getattr(x, crit), reverse=True)
30 getattr(b, crit)))
31 for e in self.data:
30 for e in self.data:
32 if e.calls:
31 if e.calls:
33 e.calls.sort(lambda b, a: cmp(getattr(a, crit),
32 e.calls.sort(key=lambda x: getattr(x, crit), reverse=True)
34 getattr(b, crit)))
35
33
36 def pprint(self, top=None, file=None, limit=None, climit=None):
34 def pprint(self, top=None, file=None, limit=None, climit=None):
37 """XXX docstring"""
35 """XXX docstring"""
38 if file is None:
36 if file is None:
39 file = sys.stdout
37 file = sys.stdout
40 d = self.data
38 d = self.data
41 if top is not None:
39 if top is not None:
42 d = d[:top]
40 d = d[:top]
43 cols = "% 12s %12s %11.4f %11.4f %s\n"
41 cols = "% 12s %12s %11.4f %11.4f %s\n"
44 hcols = "% 12s %12s %12s %12s %s\n"
42 hcols = "% 12s %12s %12s %12s %s\n"
45 file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
43 file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
46 "Inline(ms)", "module:lineno(function)"))
44 "Inline(ms)", "module:lineno(function)"))
47 count = 0
45 count = 0
48 for e in d:
46 for e in d:
49 file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
47 file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
50 e.inlinetime, label(e.code)))
48 e.inlinetime, label(e.code)))
51 count += 1
49 count += 1
52 if limit is not None and count == limit:
50 if limit is not None and count == limit:
53 return
51 return
54 ccount = 0
52 ccount = 0
55 if e.calls:
53 if e.calls:
56 for se in e.calls:
54 for se in e.calls:
57 file.write(cols % ("+%s" % se.callcount, se.reccallcount,
55 file.write(cols % ("+%s" % se.callcount, se.reccallcount,
58 se.totaltime, se.inlinetime,
56 se.totaltime, se.inlinetime,
59 "+%s" % label(se.code)))
57 "+%s" % label(se.code)))
60 count += 1
58 count += 1
61 ccount += 1
59 ccount += 1
62 if limit is not None and count == limit:
60 if limit is not None and count == limit:
63 return
61 return
64 if climit is not None and ccount == climit:
62 if climit is not None and ccount == climit:
65 break
63 break
66
64
67 def freeze(self):
65 def freeze(self):
68 """Replace all references to code objects with string
66 """Replace all references to code objects with string
69 descriptions; this makes it possible to pickle the instance."""
67 descriptions; this makes it possible to pickle the instance."""
70
68
71 # this code is probably rather ickier than it needs to be!
69 # this code is probably rather ickier than it needs to be!
72 for i in range(len(self.data)):
70 for i in range(len(self.data)):
73 e = self.data[i]
71 e = self.data[i]
74 if not isinstance(e.code, str):
72 if not isinstance(e.code, str):
75 self.data[i] = type(e)((label(e.code),) + e[1:])
73 self.data[i] = type(e)((label(e.code),) + e[1:])
76 if e.calls:
74 if e.calls:
77 for j in range(len(e.calls)):
75 for j in range(len(e.calls)):
78 se = e.calls[j]
76 se = e.calls[j]
79 if not isinstance(se.code, str):
77 if not isinstance(se.code, str):
80 e.calls[j] = type(se)((label(se.code),) + se[1:])
78 e.calls[j] = type(se)((label(se.code),) + se[1:])
81
79
82 _fn2mod = {}
80 _fn2mod = {}
83
81
84 def label(code):
82 def label(code):
85 if isinstance(code, str):
83 if isinstance(code, str):
86 return code
84 return code
87 try:
85 try:
88 mname = _fn2mod[code.co_filename]
86 mname = _fn2mod[code.co_filename]
89 except KeyError:
87 except KeyError:
90 for k, v in sys.modules.iteritems():
88 for k, v in sys.modules.iteritems():
91 if v is None:
89 if v is None:
92 continue
90 continue
93 if not hasattr(v, '__file__'):
91 if not hasattr(v, '__file__'):
94 continue
92 continue
95 if not isinstance(v.__file__, str):
93 if not isinstance(v.__file__, str):
96 continue
94 continue
97 if v.__file__.startswith(code.co_filename):
95 if v.__file__.startswith(code.co_filename):
98 mname = _fn2mod[code.co_filename] = k
96 mname = _fn2mod[code.co_filename] = k
99 break
97 break
100 else:
98 else:
101 mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
99 mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
102
100
103 return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
101 return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
104
102
105
103
106 if __name__ == '__main__':
104 if __name__ == '__main__':
107 import os
105 import os
108 sys.argv = sys.argv[1:]
106 sys.argv = sys.argv[1:]
109 if not sys.argv:
107 if not sys.argv:
110 print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
108 print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
111 sys.exit(2)
109 sys.exit(2)
112 sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
110 sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
113 stats = profile(execfile, sys.argv[0], globals(), locals())
111 stats = profile(execfile, sys.argv[0], globals(), locals())
114 stats.sort()
112 stats.sort()
115 stats.pprint()
113 stats.pprint()
@@ -1,1434 +1,1430 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, diffhelpers, copies
11 import base85, cmdutil, mdiff, util, diffhelpers, copies
12 import cStringIO, email.Parser, os, re, math
12 import cStringIO, email.Parser, os, re, math
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16
16
17 class PatchError(Exception):
17 class PatchError(Exception):
18 pass
18 pass
19
19
20 class NoHunks(PatchError):
20 class NoHunks(PatchError):
21 pass
21 pass
22
22
23 # helper functions
23 # helper functions
24
24
25 def copyfile(src, dst, basedir):
25 def copyfile(src, dst, basedir):
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 if os.path.exists(absdst):
27 if os.path.exists(absdst):
28 raise util.Abort(_("cannot create %s: destination already exists") %
28 raise util.Abort(_("cannot create %s: destination already exists") %
29 dst)
29 dst)
30
30
31 dstdir = os.path.dirname(absdst)
31 dstdir = os.path.dirname(absdst)
32 if dstdir and not os.path.isdir(dstdir):
32 if dstdir and not os.path.isdir(dstdir):
33 try:
33 try:
34 os.makedirs(dstdir)
34 os.makedirs(dstdir)
35 except IOError:
35 except IOError:
36 raise util.Abort(
36 raise util.Abort(
37 _("cannot create %s: unable to create destination directory")
37 _("cannot create %s: unable to create destination directory")
38 % dst)
38 % dst)
39
39
40 util.copyfile(abssrc, absdst)
40 util.copyfile(abssrc, absdst)
41
41
42 # public functions
42 # public functions
43
43
44 def extract(ui, fileobj):
44 def extract(ui, fileobj):
45 '''extract patch from data read from fileobj.
45 '''extract patch from data read from fileobj.
46
46
47 patch can be a normal patch or contained in an email message.
47 patch can be a normal patch or contained in an email message.
48
48
49 return tuple (filename, message, user, date, node, p1, p2).
49 return tuple (filename, message, user, date, node, p1, p2).
50 Any item in the returned tuple can be None. If filename is None,
50 Any item in the returned tuple can be None. If filename is None,
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
52
52
53 # attempt to detect the start of a patch
53 # attempt to detect the start of a patch
54 # (this heuristic is borrowed from quilt)
54 # (this heuristic is borrowed from quilt)
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
58
58
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
60 tmpfp = os.fdopen(fd, 'w')
60 tmpfp = os.fdopen(fd, 'w')
61 try:
61 try:
62 msg = email.Parser.Parser().parse(fileobj)
62 msg = email.Parser.Parser().parse(fileobj)
63
63
64 subject = msg['Subject']
64 subject = msg['Subject']
65 user = msg['From']
65 user = msg['From']
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
67 # should try to parse msg['Date']
67 # should try to parse msg['Date']
68 date = None
68 date = None
69 nodeid = None
69 nodeid = None
70 branch = None
70 branch = None
71 parents = []
71 parents = []
72
72
73 if subject:
73 if subject:
74 if subject.startswith('[PATCH'):
74 if subject.startswith('[PATCH'):
75 pend = subject.find(']')
75 pend = subject.find(']')
76 if pend >= 0:
76 if pend >= 0:
77 subject = subject[pend+1:].lstrip()
77 subject = subject[pend+1:].lstrip()
78 subject = subject.replace('\n\t', ' ')
78 subject = subject.replace('\n\t', ' ')
79 ui.debug('Subject: %s\n' % subject)
79 ui.debug('Subject: %s\n' % subject)
80 if user:
80 if user:
81 ui.debug('From: %s\n' % user)
81 ui.debug('From: %s\n' % user)
82 diffs_seen = 0
82 diffs_seen = 0
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
84 message = ''
84 message = ''
85 for part in msg.walk():
85 for part in msg.walk():
86 content_type = part.get_content_type()
86 content_type = part.get_content_type()
87 ui.debug('Content-Type: %s\n' % content_type)
87 ui.debug('Content-Type: %s\n' % content_type)
88 if content_type not in ok_types:
88 if content_type not in ok_types:
89 continue
89 continue
90 payload = part.get_payload(decode=True)
90 payload = part.get_payload(decode=True)
91 m = diffre.search(payload)
91 m = diffre.search(payload)
92 if m:
92 if m:
93 hgpatch = False
93 hgpatch = False
94 ignoretext = False
94 ignoretext = False
95
95
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
97 diffs_seen += 1
97 diffs_seen += 1
98 cfp = cStringIO.StringIO()
98 cfp = cStringIO.StringIO()
99 for line in payload[:m.start(0)].splitlines():
99 for line in payload[:m.start(0)].splitlines():
100 if line.startswith('# HG changeset patch'):
100 if line.startswith('# HG changeset patch'):
101 ui.debug(_('patch generated by hg export\n'))
101 ui.debug(_('patch generated by hg export\n'))
102 hgpatch = True
102 hgpatch = True
103 # drop earlier commit message content
103 # drop earlier commit message content
104 cfp.seek(0)
104 cfp.seek(0)
105 cfp.truncate()
105 cfp.truncate()
106 subject = None
106 subject = None
107 elif hgpatch:
107 elif hgpatch:
108 if line.startswith('# User '):
108 if line.startswith('# User '):
109 user = line[7:]
109 user = line[7:]
110 ui.debug('From: %s\n' % user)
110 ui.debug('From: %s\n' % user)
111 elif line.startswith("# Date "):
111 elif line.startswith("# Date "):
112 date = line[7:]
112 date = line[7:]
113 elif line.startswith("# Branch "):
113 elif line.startswith("# Branch "):
114 branch = line[9:]
114 branch = line[9:]
115 elif line.startswith("# Node ID "):
115 elif line.startswith("# Node ID "):
116 nodeid = line[10:]
116 nodeid = line[10:]
117 elif line.startswith("# Parent "):
117 elif line.startswith("# Parent "):
118 parents.append(line[10:])
118 parents.append(line[10:])
119 elif line == '---' and gitsendmail:
119 elif line == '---' and gitsendmail:
120 ignoretext = True
120 ignoretext = True
121 if not line.startswith('# ') and not ignoretext:
121 if not line.startswith('# ') and not ignoretext:
122 cfp.write(line)
122 cfp.write(line)
123 cfp.write('\n')
123 cfp.write('\n')
124 message = cfp.getvalue()
124 message = cfp.getvalue()
125 if tmpfp:
125 if tmpfp:
126 tmpfp.write(payload)
126 tmpfp.write(payload)
127 if not payload.endswith('\n'):
127 if not payload.endswith('\n'):
128 tmpfp.write('\n')
128 tmpfp.write('\n')
129 elif not diffs_seen and message and content_type == 'text/plain':
129 elif not diffs_seen and message and content_type == 'text/plain':
130 message += '\n' + payload
130 message += '\n' + payload
131 except:
131 except:
132 tmpfp.close()
132 tmpfp.close()
133 os.unlink(tmpname)
133 os.unlink(tmpname)
134 raise
134 raise
135
135
136 if subject and not message.startswith(subject):
136 if subject and not message.startswith(subject):
137 message = '%s\n%s' % (subject, message)
137 message = '%s\n%s' % (subject, message)
138 tmpfp.close()
138 tmpfp.close()
139 if not diffs_seen:
139 if not diffs_seen:
140 os.unlink(tmpname)
140 os.unlink(tmpname)
141 return None, message, user, date, branch, None, None, None
141 return None, message, user, date, branch, None, None, None
142 p1 = parents and parents.pop(0) or None
142 p1 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
144 return tmpname, message, user, date, branch, nodeid, p1, p2
144 return tmpname, message, user, date, branch, nodeid, p1, p2
145
145
146 GP_PATCH = 1 << 0 # we have to run patch
146 GP_PATCH = 1 << 0 # we have to run patch
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
148 GP_BINARY = 1 << 2 # there's a binary patch
148 GP_BINARY = 1 << 2 # there's a binary patch
149
149
150 class patchmeta(object):
150 class patchmeta(object):
151 """Patched file metadata
151 """Patched file metadata
152
152
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
157 'islink' is True if the file is a symlink and 'isexec' is True if
157 'islink' is True if the file is a symlink and 'isexec' is True if
158 the file is executable. Otherwise, 'mode' is None.
158 the file is executable. Otherwise, 'mode' is None.
159 """
159 """
160 def __init__(self, path):
160 def __init__(self, path):
161 self.path = path
161 self.path = path
162 self.oldpath = None
162 self.oldpath = None
163 self.mode = None
163 self.mode = None
164 self.op = 'MODIFY'
164 self.op = 'MODIFY'
165 self.lineno = 0
165 self.lineno = 0
166 self.binary = False
166 self.binary = False
167
167
168 def setmode(self, mode):
168 def setmode(self, mode):
169 islink = mode & 020000
169 islink = mode & 020000
170 isexec = mode & 0100
170 isexec = mode & 0100
171 self.mode = (islink, isexec)
171 self.mode = (islink, isexec)
172
172
173 def readgitpatch(lr):
173 def readgitpatch(lr):
174 """extract git-style metadata about patches from <patchname>"""
174 """extract git-style metadata about patches from <patchname>"""
175
175
176 # Filter patch for git information
176 # Filter patch for git information
177 gp = None
177 gp = None
178 gitpatches = []
178 gitpatches = []
179 # Can have a git patch with only metadata, causing patch to complain
179 # Can have a git patch with only metadata, causing patch to complain
180 dopatch = 0
180 dopatch = 0
181
181
182 lineno = 0
182 lineno = 0
183 for line in lr:
183 for line in lr:
184 lineno += 1
184 lineno += 1
185 if line.startswith('diff --git'):
185 if line.startswith('diff --git'):
186 m = gitre.match(line)
186 m = gitre.match(line)
187 if m:
187 if m:
188 if gp:
188 if gp:
189 gitpatches.append(gp)
189 gitpatches.append(gp)
190 src, dst = m.group(1, 2)
190 src, dst = m.group(1, 2)
191 gp = patchmeta(dst)
191 gp = patchmeta(dst)
192 gp.lineno = lineno
192 gp.lineno = lineno
193 elif gp:
193 elif gp:
194 if line.startswith('--- '):
194 if line.startswith('--- '):
195 if gp.op in ('COPY', 'RENAME'):
195 if gp.op in ('COPY', 'RENAME'):
196 dopatch |= GP_FILTER
196 dopatch |= GP_FILTER
197 gitpatches.append(gp)
197 gitpatches.append(gp)
198 gp = None
198 gp = None
199 dopatch |= GP_PATCH
199 dopatch |= GP_PATCH
200 continue
200 continue
201 if line.startswith('rename from '):
201 if line.startswith('rename from '):
202 gp.op = 'RENAME'
202 gp.op = 'RENAME'
203 gp.oldpath = line[12:].rstrip()
203 gp.oldpath = line[12:].rstrip()
204 elif line.startswith('rename to '):
204 elif line.startswith('rename to '):
205 gp.path = line[10:].rstrip()
205 gp.path = line[10:].rstrip()
206 elif line.startswith('copy from '):
206 elif line.startswith('copy from '):
207 gp.op = 'COPY'
207 gp.op = 'COPY'
208 gp.oldpath = line[10:].rstrip()
208 gp.oldpath = line[10:].rstrip()
209 elif line.startswith('copy to '):
209 elif line.startswith('copy to '):
210 gp.path = line[8:].rstrip()
210 gp.path = line[8:].rstrip()
211 elif line.startswith('deleted file'):
211 elif line.startswith('deleted file'):
212 gp.op = 'DELETE'
212 gp.op = 'DELETE'
213 # is the deleted file a symlink?
213 # is the deleted file a symlink?
214 gp.setmode(int(line.rstrip()[-6:], 8))
214 gp.setmode(int(line.rstrip()[-6:], 8))
215 elif line.startswith('new file mode '):
215 elif line.startswith('new file mode '):
216 gp.op = 'ADD'
216 gp.op = 'ADD'
217 gp.setmode(int(line.rstrip()[-6:], 8))
217 gp.setmode(int(line.rstrip()[-6:], 8))
218 elif line.startswith('new mode '):
218 elif line.startswith('new mode '):
219 gp.setmode(int(line.rstrip()[-6:], 8))
219 gp.setmode(int(line.rstrip()[-6:], 8))
220 elif line.startswith('GIT binary patch'):
220 elif line.startswith('GIT binary patch'):
221 dopatch |= GP_BINARY
221 dopatch |= GP_BINARY
222 gp.binary = True
222 gp.binary = True
223 if gp:
223 if gp:
224 gitpatches.append(gp)
224 gitpatches.append(gp)
225
225
226 if not gitpatches:
226 if not gitpatches:
227 dopatch = GP_PATCH
227 dopatch = GP_PATCH
228
228
229 return (dopatch, gitpatches)
229 return (dopatch, gitpatches)
230
230
231 class linereader(object):
231 class linereader(object):
232 # simple class to allow pushing lines back into the input stream
232 # simple class to allow pushing lines back into the input stream
233 def __init__(self, fp, textmode=False):
233 def __init__(self, fp, textmode=False):
234 self.fp = fp
234 self.fp = fp
235 self.buf = []
235 self.buf = []
236 self.textmode = textmode
236 self.textmode = textmode
237
237
238 def push(self, line):
238 def push(self, line):
239 if line is not None:
239 if line is not None:
240 self.buf.append(line)
240 self.buf.append(line)
241
241
242 def readline(self):
242 def readline(self):
243 if self.buf:
243 if self.buf:
244 l = self.buf[0]
244 l = self.buf[0]
245 del self.buf[0]
245 del self.buf[0]
246 return l
246 return l
247 l = self.fp.readline()
247 l = self.fp.readline()
248 if self.textmode and l.endswith('\r\n'):
248 if self.textmode and l.endswith('\r\n'):
249 l = l[:-2] + '\n'
249 l = l[:-2] + '\n'
250 return l
250 return l
251
251
252 def __iter__(self):
252 def __iter__(self):
253 while 1:
253 while 1:
254 l = self.readline()
254 l = self.readline()
255 if not l:
255 if not l:
256 break
256 break
257 yield l
257 yield l
258
258
259 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
259 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
260 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
260 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
261 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
261 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
262
262
263 class patchfile(object):
263 class patchfile(object):
264 def __init__(self, ui, fname, opener, missing=False, eol=None):
264 def __init__(self, ui, fname, opener, missing=False, eol=None):
265 self.fname = fname
265 self.fname = fname
266 self.eol = eol
266 self.eol = eol
267 self.opener = opener
267 self.opener = opener
268 self.ui = ui
268 self.ui = ui
269 self.lines = []
269 self.lines = []
270 self.exists = False
270 self.exists = False
271 self.missing = missing
271 self.missing = missing
272 if not missing:
272 if not missing:
273 try:
273 try:
274 self.lines = self.readlines(fname)
274 self.lines = self.readlines(fname)
275 self.exists = True
275 self.exists = True
276 except IOError:
276 except IOError:
277 pass
277 pass
278 else:
278 else:
279 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
279 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
280
280
281 self.hash = {}
281 self.hash = {}
282 self.dirty = 0
282 self.dirty = 0
283 self.offset = 0
283 self.offset = 0
284 self.rej = []
284 self.rej = []
285 self.fileprinted = False
285 self.fileprinted = False
286 self.printfile(False)
286 self.printfile(False)
287 self.hunks = 0
287 self.hunks = 0
288
288
289 def readlines(self, fname):
289 def readlines(self, fname):
290 fp = self.opener(fname, 'r')
290 fp = self.opener(fname, 'r')
291 try:
291 try:
292 return list(linereader(fp, self.eol is not None))
292 return list(linereader(fp, self.eol is not None))
293 finally:
293 finally:
294 fp.close()
294 fp.close()
295
295
296 def writelines(self, fname, lines):
296 def writelines(self, fname, lines):
297 fp = self.opener(fname, 'w')
297 fp = self.opener(fname, 'w')
298 try:
298 try:
299 if self.eol and self.eol != '\n':
299 if self.eol and self.eol != '\n':
300 for l in lines:
300 for l in lines:
301 if l and l[-1] == '\n':
301 if l and l[-1] == '\n':
302 l = l[:-1] + self.eol
302 l = l[:-1] + self.eol
303 fp.write(l)
303 fp.write(l)
304 else:
304 else:
305 fp.writelines(lines)
305 fp.writelines(lines)
306 finally:
306 finally:
307 fp.close()
307 fp.close()
308
308
309 def unlink(self, fname):
309 def unlink(self, fname):
310 os.unlink(fname)
310 os.unlink(fname)
311
311
312 def printfile(self, warn):
312 def printfile(self, warn):
313 if self.fileprinted:
313 if self.fileprinted:
314 return
314 return
315 if warn or self.ui.verbose:
315 if warn or self.ui.verbose:
316 self.fileprinted = True
316 self.fileprinted = True
317 s = _("patching file %s\n") % self.fname
317 s = _("patching file %s\n") % self.fname
318 if warn:
318 if warn:
319 self.ui.warn(s)
319 self.ui.warn(s)
320 else:
320 else:
321 self.ui.note(s)
321 self.ui.note(s)
322
322
323
323
324 def findlines(self, l, linenum):
324 def findlines(self, l, linenum):
325 # looks through the hash and finds candidate lines. The
325 # looks through the hash and finds candidate lines. The
326 # result is a list of line numbers sorted based on distance
326 # result is a list of line numbers sorted based on distance
327 # from linenum
327 # from linenum
328 def sorter(a, b):
329 vala = abs(a - linenum)
330 valb = abs(b - linenum)
331 return cmp(vala, valb)
332
328
333 try:
329 try:
334 cand = self.hash[l]
330 cand = self.hash[l]
335 except:
331 except:
336 return []
332 return []
337
333
338 if len(cand) > 1:
334 if len(cand) > 1:
339 # resort our list of potentials forward then back.
335 # resort our list of potentials forward then back.
340 cand.sort(sorter)
336 cand.sort(key=lambda x: abs(x - linenum))
341 return cand
337 return cand
342
338
343 def hashlines(self):
339 def hashlines(self):
344 self.hash = {}
340 self.hash = {}
345 for x, s in enumerate(self.lines):
341 for x, s in enumerate(self.lines):
346 self.hash.setdefault(s, []).append(x)
342 self.hash.setdefault(s, []).append(x)
347
343
348 def write_rej(self):
344 def write_rej(self):
349 # our rejects are a little different from patch(1). This always
345 # our rejects are a little different from patch(1). This always
350 # creates rejects in the same form as the original patch. A file
346 # creates rejects in the same form as the original patch. A file
351 # header is inserted so that you can run the reject through patch again
347 # header is inserted so that you can run the reject through patch again
352 # without having to type the filename.
348 # without having to type the filename.
353
349
354 if not self.rej:
350 if not self.rej:
355 return
351 return
356
352
357 fname = self.fname + ".rej"
353 fname = self.fname + ".rej"
358 self.ui.warn(
354 self.ui.warn(
359 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
355 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
360 (len(self.rej), self.hunks, fname))
356 (len(self.rej), self.hunks, fname))
361
357
362 def rejlines():
358 def rejlines():
363 base = os.path.basename(self.fname)
359 base = os.path.basename(self.fname)
364 yield "--- %s\n+++ %s\n" % (base, base)
360 yield "--- %s\n+++ %s\n" % (base, base)
365 for x in self.rej:
361 for x in self.rej:
366 for l in x.hunk:
362 for l in x.hunk:
367 yield l
363 yield l
368 if l[-1] != '\n':
364 if l[-1] != '\n':
369 yield "\n\ No newline at end of file\n"
365 yield "\n\ No newline at end of file\n"
370
366
371 self.writelines(fname, rejlines())
367 self.writelines(fname, rejlines())
372
368
373 def write(self, dest=None):
369 def write(self, dest=None):
374 if not self.dirty:
370 if not self.dirty:
375 return
371 return
376 if not dest:
372 if not dest:
377 dest = self.fname
373 dest = self.fname
378 self.writelines(dest, self.lines)
374 self.writelines(dest, self.lines)
379
375
380 def close(self):
376 def close(self):
381 self.write()
377 self.write()
382 self.write_rej()
378 self.write_rej()
383
379
384 def apply(self, h, reverse):
380 def apply(self, h, reverse):
385 if not h.complete():
381 if not h.complete():
386 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
382 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
387 (h.number, h.desc, len(h.a), h.lena, len(h.b),
383 (h.number, h.desc, len(h.a), h.lena, len(h.b),
388 h.lenb))
384 h.lenb))
389
385
390 self.hunks += 1
386 self.hunks += 1
391 if reverse:
387 if reverse:
392 h.reverse()
388 h.reverse()
393
389
394 if self.missing:
390 if self.missing:
395 self.rej.append(h)
391 self.rej.append(h)
396 return -1
392 return -1
397
393
398 if self.exists and h.createfile():
394 if self.exists and h.createfile():
399 self.ui.warn(_("file %s already exists\n") % self.fname)
395 self.ui.warn(_("file %s already exists\n") % self.fname)
400 self.rej.append(h)
396 self.rej.append(h)
401 return -1
397 return -1
402
398
403 if isinstance(h, githunk):
399 if isinstance(h, githunk):
404 if h.rmfile():
400 if h.rmfile():
405 self.unlink(self.fname)
401 self.unlink(self.fname)
406 else:
402 else:
407 self.lines[:] = h.new()
403 self.lines[:] = h.new()
408 self.offset += len(h.new())
404 self.offset += len(h.new())
409 self.dirty = 1
405 self.dirty = 1
410 return 0
406 return 0
411
407
412 # fast case first, no offsets, no fuzz
408 # fast case first, no offsets, no fuzz
413 old = h.old()
409 old = h.old()
414 # patch starts counting at 1 unless we are adding the file
410 # patch starts counting at 1 unless we are adding the file
415 if h.starta == 0:
411 if h.starta == 0:
416 start = 0
412 start = 0
417 else:
413 else:
418 start = h.starta + self.offset - 1
414 start = h.starta + self.offset - 1
419 orig_start = start
415 orig_start = start
420 if diffhelpers.testhunk(old, self.lines, start) == 0:
416 if diffhelpers.testhunk(old, self.lines, start) == 0:
421 if h.rmfile():
417 if h.rmfile():
422 self.unlink(self.fname)
418 self.unlink(self.fname)
423 else:
419 else:
424 self.lines[start : start + h.lena] = h.new()
420 self.lines[start : start + h.lena] = h.new()
425 self.offset += h.lenb - h.lena
421 self.offset += h.lenb - h.lena
426 self.dirty = 1
422 self.dirty = 1
427 return 0
423 return 0
428
424
429 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
425 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
430 self.hashlines()
426 self.hashlines()
431 if h.hunk[-1][0] != ' ':
427 if h.hunk[-1][0] != ' ':
432 # if the hunk tried to put something at the bottom of the file
428 # if the hunk tried to put something at the bottom of the file
433 # override the start line and use eof here
429 # override the start line and use eof here
434 search_start = len(self.lines)
430 search_start = len(self.lines)
435 else:
431 else:
436 search_start = orig_start
432 search_start = orig_start
437
433
438 for fuzzlen in xrange(3):
434 for fuzzlen in xrange(3):
439 for toponly in [ True, False ]:
435 for toponly in [ True, False ]:
440 old = h.old(fuzzlen, toponly)
436 old = h.old(fuzzlen, toponly)
441
437
442 cand = self.findlines(old[0][1:], search_start)
438 cand = self.findlines(old[0][1:], search_start)
443 for l in cand:
439 for l in cand:
444 if diffhelpers.testhunk(old, self.lines, l) == 0:
440 if diffhelpers.testhunk(old, self.lines, l) == 0:
445 newlines = h.new(fuzzlen, toponly)
441 newlines = h.new(fuzzlen, toponly)
446 self.lines[l : l + len(old)] = newlines
442 self.lines[l : l + len(old)] = newlines
447 self.offset += len(newlines) - len(old)
443 self.offset += len(newlines) - len(old)
448 self.dirty = 1
444 self.dirty = 1
449 if fuzzlen:
445 if fuzzlen:
450 fuzzstr = "with fuzz %d " % fuzzlen
446 fuzzstr = "with fuzz %d " % fuzzlen
451 f = self.ui.warn
447 f = self.ui.warn
452 self.printfile(True)
448 self.printfile(True)
453 else:
449 else:
454 fuzzstr = ""
450 fuzzstr = ""
455 f = self.ui.note
451 f = self.ui.note
456 offset = l - orig_start - fuzzlen
452 offset = l - orig_start - fuzzlen
457 if offset == 1:
453 if offset == 1:
458 msg = _("Hunk #%d succeeded at %d %s"
454 msg = _("Hunk #%d succeeded at %d %s"
459 "(offset %d line).\n")
455 "(offset %d line).\n")
460 else:
456 else:
461 msg = _("Hunk #%d succeeded at %d %s"
457 msg = _("Hunk #%d succeeded at %d %s"
462 "(offset %d lines).\n")
458 "(offset %d lines).\n")
463 f(msg % (h.number, l+1, fuzzstr, offset))
459 f(msg % (h.number, l+1, fuzzstr, offset))
464 return fuzzlen
460 return fuzzlen
465 self.printfile(True)
461 self.printfile(True)
466 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
462 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
467 self.rej.append(h)
463 self.rej.append(h)
468 return -1
464 return -1
469
465
470 class hunk(object):
466 class hunk(object):
471 def __init__(self, desc, num, lr, context, create=False, remove=False):
467 def __init__(self, desc, num, lr, context, create=False, remove=False):
472 self.number = num
468 self.number = num
473 self.desc = desc
469 self.desc = desc
474 self.hunk = [ desc ]
470 self.hunk = [ desc ]
475 self.a = []
471 self.a = []
476 self.b = []
472 self.b = []
477 if context:
473 if context:
478 self.read_context_hunk(lr)
474 self.read_context_hunk(lr)
479 else:
475 else:
480 self.read_unified_hunk(lr)
476 self.read_unified_hunk(lr)
481 self.create = create
477 self.create = create
482 self.remove = remove and not create
478 self.remove = remove and not create
483
479
484 def read_unified_hunk(self, lr):
480 def read_unified_hunk(self, lr):
485 m = unidesc.match(self.desc)
481 m = unidesc.match(self.desc)
486 if not m:
482 if not m:
487 raise PatchError(_("bad hunk #%d") % self.number)
483 raise PatchError(_("bad hunk #%d") % self.number)
488 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
484 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
489 if self.lena is None:
485 if self.lena is None:
490 self.lena = 1
486 self.lena = 1
491 else:
487 else:
492 self.lena = int(self.lena)
488 self.lena = int(self.lena)
493 if self.lenb is None:
489 if self.lenb is None:
494 self.lenb = 1
490 self.lenb = 1
495 else:
491 else:
496 self.lenb = int(self.lenb)
492 self.lenb = int(self.lenb)
497 self.starta = int(self.starta)
493 self.starta = int(self.starta)
498 self.startb = int(self.startb)
494 self.startb = int(self.startb)
499 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
495 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
500 # if we hit eof before finishing out the hunk, the last line will
496 # if we hit eof before finishing out the hunk, the last line will
501 # be zero length. Lets try to fix it up.
497 # be zero length. Lets try to fix it up.
502 while len(self.hunk[-1]) == 0:
498 while len(self.hunk[-1]) == 0:
503 del self.hunk[-1]
499 del self.hunk[-1]
504 del self.a[-1]
500 del self.a[-1]
505 del self.b[-1]
501 del self.b[-1]
506 self.lena -= 1
502 self.lena -= 1
507 self.lenb -= 1
503 self.lenb -= 1
508
504
509 def read_context_hunk(self, lr):
505 def read_context_hunk(self, lr):
510 self.desc = lr.readline()
506 self.desc = lr.readline()
511 m = contextdesc.match(self.desc)
507 m = contextdesc.match(self.desc)
512 if not m:
508 if not m:
513 raise PatchError(_("bad hunk #%d") % self.number)
509 raise PatchError(_("bad hunk #%d") % self.number)
514 foo, self.starta, foo2, aend, foo3 = m.groups()
510 foo, self.starta, foo2, aend, foo3 = m.groups()
515 self.starta = int(self.starta)
511 self.starta = int(self.starta)
516 if aend is None:
512 if aend is None:
517 aend = self.starta
513 aend = self.starta
518 self.lena = int(aend) - self.starta
514 self.lena = int(aend) - self.starta
519 if self.starta:
515 if self.starta:
520 self.lena += 1
516 self.lena += 1
521 for x in xrange(self.lena):
517 for x in xrange(self.lena):
522 l = lr.readline()
518 l = lr.readline()
523 if l.startswith('---'):
519 if l.startswith('---'):
524 lr.push(l)
520 lr.push(l)
525 break
521 break
526 s = l[2:]
522 s = l[2:]
527 if l.startswith('- ') or l.startswith('! '):
523 if l.startswith('- ') or l.startswith('! '):
528 u = '-' + s
524 u = '-' + s
529 elif l.startswith(' '):
525 elif l.startswith(' '):
530 u = ' ' + s
526 u = ' ' + s
531 else:
527 else:
532 raise PatchError(_("bad hunk #%d old text line %d") %
528 raise PatchError(_("bad hunk #%d old text line %d") %
533 (self.number, x))
529 (self.number, x))
534 self.a.append(u)
530 self.a.append(u)
535 self.hunk.append(u)
531 self.hunk.append(u)
536
532
537 l = lr.readline()
533 l = lr.readline()
538 if l.startswith('\ '):
534 if l.startswith('\ '):
539 s = self.a[-1][:-1]
535 s = self.a[-1][:-1]
540 self.a[-1] = s
536 self.a[-1] = s
541 self.hunk[-1] = s
537 self.hunk[-1] = s
542 l = lr.readline()
538 l = lr.readline()
543 m = contextdesc.match(l)
539 m = contextdesc.match(l)
544 if not m:
540 if not m:
545 raise PatchError(_("bad hunk #%d") % self.number)
541 raise PatchError(_("bad hunk #%d") % self.number)
546 foo, self.startb, foo2, bend, foo3 = m.groups()
542 foo, self.startb, foo2, bend, foo3 = m.groups()
547 self.startb = int(self.startb)
543 self.startb = int(self.startb)
548 if bend is None:
544 if bend is None:
549 bend = self.startb
545 bend = self.startb
550 self.lenb = int(bend) - self.startb
546 self.lenb = int(bend) - self.startb
551 if self.startb:
547 if self.startb:
552 self.lenb += 1
548 self.lenb += 1
553 hunki = 1
549 hunki = 1
554 for x in xrange(self.lenb):
550 for x in xrange(self.lenb):
555 l = lr.readline()
551 l = lr.readline()
556 if l.startswith('\ '):
552 if l.startswith('\ '):
557 s = self.b[-1][:-1]
553 s = self.b[-1][:-1]
558 self.b[-1] = s
554 self.b[-1] = s
559 self.hunk[hunki-1] = s
555 self.hunk[hunki-1] = s
560 continue
556 continue
561 if not l:
557 if not l:
562 lr.push(l)
558 lr.push(l)
563 break
559 break
564 s = l[2:]
560 s = l[2:]
565 if l.startswith('+ ') or l.startswith('! '):
561 if l.startswith('+ ') or l.startswith('! '):
566 u = '+' + s
562 u = '+' + s
567 elif l.startswith(' '):
563 elif l.startswith(' '):
568 u = ' ' + s
564 u = ' ' + s
569 elif len(self.b) == 0:
565 elif len(self.b) == 0:
570 # this can happen when the hunk does not add any lines
566 # this can happen when the hunk does not add any lines
571 lr.push(l)
567 lr.push(l)
572 break
568 break
573 else:
569 else:
574 raise PatchError(_("bad hunk #%d old text line %d") %
570 raise PatchError(_("bad hunk #%d old text line %d") %
575 (self.number, x))
571 (self.number, x))
576 self.b.append(s)
572 self.b.append(s)
577 while True:
573 while True:
578 if hunki >= len(self.hunk):
574 if hunki >= len(self.hunk):
579 h = ""
575 h = ""
580 else:
576 else:
581 h = self.hunk[hunki]
577 h = self.hunk[hunki]
582 hunki += 1
578 hunki += 1
583 if h == u:
579 if h == u:
584 break
580 break
585 elif h.startswith('-'):
581 elif h.startswith('-'):
586 continue
582 continue
587 else:
583 else:
588 self.hunk.insert(hunki-1, u)
584 self.hunk.insert(hunki-1, u)
589 break
585 break
590
586
591 if not self.a:
587 if not self.a:
592 # this happens when lines were only added to the hunk
588 # this happens when lines were only added to the hunk
593 for x in self.hunk:
589 for x in self.hunk:
594 if x.startswith('-') or x.startswith(' '):
590 if x.startswith('-') or x.startswith(' '):
595 self.a.append(x)
591 self.a.append(x)
596 if not self.b:
592 if not self.b:
597 # this happens when lines were only deleted from the hunk
593 # this happens when lines were only deleted from the hunk
598 for x in self.hunk:
594 for x in self.hunk:
599 if x.startswith('+') or x.startswith(' '):
595 if x.startswith('+') or x.startswith(' '):
600 self.b.append(x[1:])
596 self.b.append(x[1:])
601 # @@ -start,len +start,len @@
597 # @@ -start,len +start,len @@
602 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
598 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
603 self.startb, self.lenb)
599 self.startb, self.lenb)
604 self.hunk[0] = self.desc
600 self.hunk[0] = self.desc
605
601
606 def reverse(self):
602 def reverse(self):
607 self.create, self.remove = self.remove, self.create
603 self.create, self.remove = self.remove, self.create
608 origlena = self.lena
604 origlena = self.lena
609 origstarta = self.starta
605 origstarta = self.starta
610 self.lena = self.lenb
606 self.lena = self.lenb
611 self.starta = self.startb
607 self.starta = self.startb
612 self.lenb = origlena
608 self.lenb = origlena
613 self.startb = origstarta
609 self.startb = origstarta
614 self.a = []
610 self.a = []
615 self.b = []
611 self.b = []
616 # self.hunk[0] is the @@ description
612 # self.hunk[0] is the @@ description
617 for x in xrange(1, len(self.hunk)):
613 for x in xrange(1, len(self.hunk)):
618 o = self.hunk[x]
614 o = self.hunk[x]
619 if o.startswith('-'):
615 if o.startswith('-'):
620 n = '+' + o[1:]
616 n = '+' + o[1:]
621 self.b.append(o[1:])
617 self.b.append(o[1:])
622 elif o.startswith('+'):
618 elif o.startswith('+'):
623 n = '-' + o[1:]
619 n = '-' + o[1:]
624 self.a.append(n)
620 self.a.append(n)
625 else:
621 else:
626 n = o
622 n = o
627 self.b.append(o[1:])
623 self.b.append(o[1:])
628 self.a.append(o)
624 self.a.append(o)
629 self.hunk[x] = o
625 self.hunk[x] = o
630
626
631 def fix_newline(self):
627 def fix_newline(self):
632 diffhelpers.fix_newline(self.hunk, self.a, self.b)
628 diffhelpers.fix_newline(self.hunk, self.a, self.b)
633
629
634 def complete(self):
630 def complete(self):
635 return len(self.a) == self.lena and len(self.b) == self.lenb
631 return len(self.a) == self.lena and len(self.b) == self.lenb
636
632
637 def createfile(self):
633 def createfile(self):
638 return self.starta == 0 and self.lena == 0 and self.create
634 return self.starta == 0 and self.lena == 0 and self.create
639
635
640 def rmfile(self):
636 def rmfile(self):
641 return self.startb == 0 and self.lenb == 0 and self.remove
637 return self.startb == 0 and self.lenb == 0 and self.remove
642
638
643 def fuzzit(self, l, fuzz, toponly):
639 def fuzzit(self, l, fuzz, toponly):
644 # this removes context lines from the top and bottom of list 'l'. It
640 # this removes context lines from the top and bottom of list 'l'. It
645 # checks the hunk to make sure only context lines are removed, and then
641 # checks the hunk to make sure only context lines are removed, and then
646 # returns a new shortened list of lines.
642 # returns a new shortened list of lines.
647 fuzz = min(fuzz, len(l)-1)
643 fuzz = min(fuzz, len(l)-1)
648 if fuzz:
644 if fuzz:
649 top = 0
645 top = 0
650 bot = 0
646 bot = 0
651 hlen = len(self.hunk)
647 hlen = len(self.hunk)
652 for x in xrange(hlen-1):
648 for x in xrange(hlen-1):
653 # the hunk starts with the @@ line, so use x+1
649 # the hunk starts with the @@ line, so use x+1
654 if self.hunk[x+1][0] == ' ':
650 if self.hunk[x+1][0] == ' ':
655 top += 1
651 top += 1
656 else:
652 else:
657 break
653 break
658 if not toponly:
654 if not toponly:
659 for x in xrange(hlen-1):
655 for x in xrange(hlen-1):
660 if self.hunk[hlen-bot-1][0] == ' ':
656 if self.hunk[hlen-bot-1][0] == ' ':
661 bot += 1
657 bot += 1
662 else:
658 else:
663 break
659 break
664
660
665 # top and bot now count context in the hunk
661 # top and bot now count context in the hunk
666 # adjust them if either one is short
662 # adjust them if either one is short
667 context = max(top, bot, 3)
663 context = max(top, bot, 3)
668 if bot < context:
664 if bot < context:
669 bot = max(0, fuzz - (context - bot))
665 bot = max(0, fuzz - (context - bot))
670 else:
666 else:
671 bot = min(fuzz, bot)
667 bot = min(fuzz, bot)
672 if top < context:
668 if top < context:
673 top = max(0, fuzz - (context - top))
669 top = max(0, fuzz - (context - top))
674 else:
670 else:
675 top = min(fuzz, top)
671 top = min(fuzz, top)
676
672
677 return l[top:len(l)-bot]
673 return l[top:len(l)-bot]
678 return l
674 return l
679
675
680 def old(self, fuzz=0, toponly=False):
676 def old(self, fuzz=0, toponly=False):
681 return self.fuzzit(self.a, fuzz, toponly)
677 return self.fuzzit(self.a, fuzz, toponly)
682
678
683 def newctrl(self):
679 def newctrl(self):
684 res = []
680 res = []
685 for x in self.hunk:
681 for x in self.hunk:
686 c = x[0]
682 c = x[0]
687 if c == ' ' or c == '+':
683 if c == ' ' or c == '+':
688 res.append(x)
684 res.append(x)
689 return res
685 return res
690
686
691 def new(self, fuzz=0, toponly=False):
687 def new(self, fuzz=0, toponly=False):
692 return self.fuzzit(self.b, fuzz, toponly)
688 return self.fuzzit(self.b, fuzz, toponly)
693
689
694 class githunk(object):
690 class githunk(object):
695 """A git hunk"""
691 """A git hunk"""
696 def __init__(self, gitpatch):
692 def __init__(self, gitpatch):
697 self.gitpatch = gitpatch
693 self.gitpatch = gitpatch
698 self.text = None
694 self.text = None
699 self.hunk = []
695 self.hunk = []
700
696
701 def createfile(self):
697 def createfile(self):
702 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
698 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
703
699
704 def rmfile(self):
700 def rmfile(self):
705 return self.gitpatch.op == 'DELETE'
701 return self.gitpatch.op == 'DELETE'
706
702
707 def complete(self):
703 def complete(self):
708 return self.text is not None
704 return self.text is not None
709
705
710 def new(self):
706 def new(self):
711 return [self.text]
707 return [self.text]
712
708
713 class binhunk(githunk):
709 class binhunk(githunk):
714 'A binary patch file. Only understands literals so far.'
710 'A binary patch file. Only understands literals so far.'
715 def __init__(self, gitpatch):
711 def __init__(self, gitpatch):
716 super(binhunk, self).__init__(gitpatch)
712 super(binhunk, self).__init__(gitpatch)
717 self.hunk = ['GIT binary patch\n']
713 self.hunk = ['GIT binary patch\n']
718
714
719 def extract(self, lr):
715 def extract(self, lr):
720 line = lr.readline()
716 line = lr.readline()
721 self.hunk.append(line)
717 self.hunk.append(line)
722 while line and not line.startswith('literal '):
718 while line and not line.startswith('literal '):
723 line = lr.readline()
719 line = lr.readline()
724 self.hunk.append(line)
720 self.hunk.append(line)
725 if not line:
721 if not line:
726 raise PatchError(_('could not extract binary patch'))
722 raise PatchError(_('could not extract binary patch'))
727 size = int(line[8:].rstrip())
723 size = int(line[8:].rstrip())
728 dec = []
724 dec = []
729 line = lr.readline()
725 line = lr.readline()
730 self.hunk.append(line)
726 self.hunk.append(line)
731 while len(line) > 1:
727 while len(line) > 1:
732 l = line[0]
728 l = line[0]
733 if l <= 'Z' and l >= 'A':
729 if l <= 'Z' and l >= 'A':
734 l = ord(l) - ord('A') + 1
730 l = ord(l) - ord('A') + 1
735 else:
731 else:
736 l = ord(l) - ord('a') + 27
732 l = ord(l) - ord('a') + 27
737 dec.append(base85.b85decode(line[1:-1])[:l])
733 dec.append(base85.b85decode(line[1:-1])[:l])
738 line = lr.readline()
734 line = lr.readline()
739 self.hunk.append(line)
735 self.hunk.append(line)
740 text = zlib.decompress(''.join(dec))
736 text = zlib.decompress(''.join(dec))
741 if len(text) != size:
737 if len(text) != size:
742 raise PatchError(_('binary patch is %d bytes, not %d') %
738 raise PatchError(_('binary patch is %d bytes, not %d') %
743 len(text), size)
739 len(text), size)
744 self.text = text
740 self.text = text
745
741
746 class symlinkhunk(githunk):
742 class symlinkhunk(githunk):
747 """A git symlink hunk"""
743 """A git symlink hunk"""
748 def __init__(self, gitpatch, hunk):
744 def __init__(self, gitpatch, hunk):
749 super(symlinkhunk, self).__init__(gitpatch)
745 super(symlinkhunk, self).__init__(gitpatch)
750 self.hunk = hunk
746 self.hunk = hunk
751
747
752 def complete(self):
748 def complete(self):
753 return True
749 return True
754
750
755 def fix_newline(self):
751 def fix_newline(self):
756 return
752 return
757
753
758 def parsefilename(str):
754 def parsefilename(str):
759 # --- filename \t|space stuff
755 # --- filename \t|space stuff
760 s = str[4:].rstrip('\r\n')
756 s = str[4:].rstrip('\r\n')
761 i = s.find('\t')
757 i = s.find('\t')
762 if i < 0:
758 if i < 0:
763 i = s.find(' ')
759 i = s.find(' ')
764 if i < 0:
760 if i < 0:
765 return s
761 return s
766 return s[:i]
762 return s[:i]
767
763
768 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
764 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
769 def pathstrip(path, count=1):
765 def pathstrip(path, count=1):
770 pathlen = len(path)
766 pathlen = len(path)
771 i = 0
767 i = 0
772 if count == 0:
768 if count == 0:
773 return '', path.rstrip()
769 return '', path.rstrip()
774 while count > 0:
770 while count > 0:
775 i = path.find('/', i)
771 i = path.find('/', i)
776 if i == -1:
772 if i == -1:
777 raise PatchError(_("unable to strip away %d dirs from %s") %
773 raise PatchError(_("unable to strip away %d dirs from %s") %
778 (count, path))
774 (count, path))
779 i += 1
775 i += 1
780 # consume '//' in the path
776 # consume '//' in the path
781 while i < pathlen - 1 and path[i] == '/':
777 while i < pathlen - 1 and path[i] == '/':
782 i += 1
778 i += 1
783 count -= 1
779 count -= 1
784 return path[:i].lstrip(), path[i:].rstrip()
780 return path[:i].lstrip(), path[i:].rstrip()
785
781
786 nulla = afile_orig == "/dev/null"
782 nulla = afile_orig == "/dev/null"
787 nullb = bfile_orig == "/dev/null"
783 nullb = bfile_orig == "/dev/null"
788 abase, afile = pathstrip(afile_orig, strip)
784 abase, afile = pathstrip(afile_orig, strip)
789 gooda = not nulla and util.lexists(afile)
785 gooda = not nulla and util.lexists(afile)
790 bbase, bfile = pathstrip(bfile_orig, strip)
786 bbase, bfile = pathstrip(bfile_orig, strip)
791 if afile == bfile:
787 if afile == bfile:
792 goodb = gooda
788 goodb = gooda
793 else:
789 else:
794 goodb = not nullb and os.path.exists(bfile)
790 goodb = not nullb and os.path.exists(bfile)
795 createfunc = hunk.createfile
791 createfunc = hunk.createfile
796 if reverse:
792 if reverse:
797 createfunc = hunk.rmfile
793 createfunc = hunk.rmfile
798 missing = not goodb and not gooda and not createfunc()
794 missing = not goodb and not gooda and not createfunc()
799 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
795 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
800 # diff is between a file and its backup. In this case, the original
796 # diff is between a file and its backup. In this case, the original
801 # file should be patched (see original mpatch code).
797 # file should be patched (see original mpatch code).
802 isbackup = (abase == bbase and bfile.startswith(afile))
798 isbackup = (abase == bbase and bfile.startswith(afile))
803 fname = None
799 fname = None
804 if not missing:
800 if not missing:
805 if gooda and goodb:
801 if gooda and goodb:
806 fname = isbackup and afile or bfile
802 fname = isbackup and afile or bfile
807 elif gooda:
803 elif gooda:
808 fname = afile
804 fname = afile
809
805
810 if not fname:
806 if not fname:
811 if not nullb:
807 if not nullb:
812 fname = isbackup and afile or bfile
808 fname = isbackup and afile or bfile
813 elif not nulla:
809 elif not nulla:
814 fname = afile
810 fname = afile
815 else:
811 else:
816 raise PatchError(_("undefined source and destination files"))
812 raise PatchError(_("undefined source and destination files"))
817
813
818 return fname, missing
814 return fname, missing
819
815
820 def scangitpatch(lr, firstline):
816 def scangitpatch(lr, firstline):
821 """
817 """
822 Git patches can emit:
818 Git patches can emit:
823 - rename a to b
819 - rename a to b
824 - change b
820 - change b
825 - copy a to c
821 - copy a to c
826 - change c
822 - change c
827
823
828 We cannot apply this sequence as-is, the renamed 'a' could not be
824 We cannot apply this sequence as-is, the renamed 'a' could not be
829 found for it would have been renamed already. And we cannot copy
825 found for it would have been renamed already. And we cannot copy
830 from 'b' instead because 'b' would have been changed already. So
826 from 'b' instead because 'b' would have been changed already. So
831 we scan the git patch for copy and rename commands so we can
827 we scan the git patch for copy and rename commands so we can
832 perform the copies ahead of time.
828 perform the copies ahead of time.
833 """
829 """
834 pos = 0
830 pos = 0
835 try:
831 try:
836 pos = lr.fp.tell()
832 pos = lr.fp.tell()
837 fp = lr.fp
833 fp = lr.fp
838 except IOError:
834 except IOError:
839 fp = cStringIO.StringIO(lr.fp.read())
835 fp = cStringIO.StringIO(lr.fp.read())
840 gitlr = linereader(fp, lr.textmode)
836 gitlr = linereader(fp, lr.textmode)
841 gitlr.push(firstline)
837 gitlr.push(firstline)
842 (dopatch, gitpatches) = readgitpatch(gitlr)
838 (dopatch, gitpatches) = readgitpatch(gitlr)
843 fp.seek(pos)
839 fp.seek(pos)
844 return dopatch, gitpatches
840 return dopatch, gitpatches
845
841
846 def iterhunks(ui, fp, sourcefile=None, textmode=False):
842 def iterhunks(ui, fp, sourcefile=None, textmode=False):
847 """Read a patch and yield the following events:
843 """Read a patch and yield the following events:
848 - ("file", afile, bfile, firsthunk): select a new target file.
844 - ("file", afile, bfile, firsthunk): select a new target file.
849 - ("hunk", hunk): a new hunk is ready to be applied, follows a
845 - ("hunk", hunk): a new hunk is ready to be applied, follows a
850 "file" event.
846 "file" event.
851 - ("git", gitchanges): current diff is in git format, gitchanges
847 - ("git", gitchanges): current diff is in git format, gitchanges
852 maps filenames to gitpatch records. Unique event.
848 maps filenames to gitpatch records. Unique event.
853
849
854 If textmode is True, input line-endings are normalized to LF.
850 If textmode is True, input line-endings are normalized to LF.
855 """
851 """
856 changed = {}
852 changed = {}
857 current_hunk = None
853 current_hunk = None
858 afile = ""
854 afile = ""
859 bfile = ""
855 bfile = ""
860 state = None
856 state = None
861 hunknum = 0
857 hunknum = 0
862 emitfile = False
858 emitfile = False
863 git = False
859 git = False
864
860
865 # our states
861 # our states
866 BFILE = 1
862 BFILE = 1
867 context = None
863 context = None
868 lr = linereader(fp, textmode)
864 lr = linereader(fp, textmode)
869 dopatch = True
865 dopatch = True
870 # gitworkdone is True if a git operation (copy, rename, ...) was
866 # gitworkdone is True if a git operation (copy, rename, ...) was
871 # performed already for the current file. Useful when the file
867 # performed already for the current file. Useful when the file
872 # section may have no hunk.
868 # section may have no hunk.
873 gitworkdone = False
869 gitworkdone = False
874
870
875 while True:
871 while True:
876 newfile = False
872 newfile = False
877 x = lr.readline()
873 x = lr.readline()
878 if not x:
874 if not x:
879 break
875 break
880 if current_hunk:
876 if current_hunk:
881 if x.startswith('\ '):
877 if x.startswith('\ '):
882 current_hunk.fix_newline()
878 current_hunk.fix_newline()
883 yield 'hunk', current_hunk
879 yield 'hunk', current_hunk
884 current_hunk = None
880 current_hunk = None
885 gitworkdone = False
881 gitworkdone = False
886 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
882 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
887 ((context is not False) and x.startswith('***************')))):
883 ((context is not False) and x.startswith('***************')))):
888 try:
884 try:
889 if context is None and x.startswith('***************'):
885 if context is None and x.startswith('***************'):
890 context = True
886 context = True
891 gpatch = changed.get(bfile)
887 gpatch = changed.get(bfile)
892 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
888 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
893 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
889 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
894 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
890 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
895 if remove:
891 if remove:
896 gpatch = changed.get(afile[2:])
892 gpatch = changed.get(afile[2:])
897 if gpatch and gpatch.mode[0]:
893 if gpatch and gpatch.mode[0]:
898 current_hunk = symlinkhunk(gpatch, current_hunk)
894 current_hunk = symlinkhunk(gpatch, current_hunk)
899 except PatchError, err:
895 except PatchError, err:
900 ui.debug(err)
896 ui.debug(err)
901 current_hunk = None
897 current_hunk = None
902 continue
898 continue
903 hunknum += 1
899 hunknum += 1
904 if emitfile:
900 if emitfile:
905 emitfile = False
901 emitfile = False
906 yield 'file', (afile, bfile, current_hunk)
902 yield 'file', (afile, bfile, current_hunk)
907 elif state == BFILE and x.startswith('GIT binary patch'):
903 elif state == BFILE and x.startswith('GIT binary patch'):
908 current_hunk = binhunk(changed[bfile])
904 current_hunk = binhunk(changed[bfile])
909 hunknum += 1
905 hunknum += 1
910 if emitfile:
906 if emitfile:
911 emitfile = False
907 emitfile = False
912 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
908 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
913 current_hunk.extract(lr)
909 current_hunk.extract(lr)
914 elif x.startswith('diff --git'):
910 elif x.startswith('diff --git'):
915 # check for git diff, scanning the whole patch file if needed
911 # check for git diff, scanning the whole patch file if needed
916 m = gitre.match(x)
912 m = gitre.match(x)
917 if m:
913 if m:
918 afile, bfile = m.group(1, 2)
914 afile, bfile = m.group(1, 2)
919 if not git:
915 if not git:
920 git = True
916 git = True
921 dopatch, gitpatches = scangitpatch(lr, x)
917 dopatch, gitpatches = scangitpatch(lr, x)
922 yield 'git', gitpatches
918 yield 'git', gitpatches
923 for gp in gitpatches:
919 for gp in gitpatches:
924 changed[gp.path] = gp
920 changed[gp.path] = gp
925 # else error?
921 # else error?
926 # copy/rename + modify should modify target, not source
922 # copy/rename + modify should modify target, not source
927 gp = changed.get(bfile)
923 gp = changed.get(bfile)
928 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
924 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
929 afile = bfile
925 afile = bfile
930 gitworkdone = True
926 gitworkdone = True
931 newfile = True
927 newfile = True
932 elif x.startswith('---'):
928 elif x.startswith('---'):
933 # check for a unified diff
929 # check for a unified diff
934 l2 = lr.readline()
930 l2 = lr.readline()
935 if not l2.startswith('+++'):
931 if not l2.startswith('+++'):
936 lr.push(l2)
932 lr.push(l2)
937 continue
933 continue
938 newfile = True
934 newfile = True
939 context = False
935 context = False
940 afile = parsefilename(x)
936 afile = parsefilename(x)
941 bfile = parsefilename(l2)
937 bfile = parsefilename(l2)
942 elif x.startswith('***'):
938 elif x.startswith('***'):
943 # check for a context diff
939 # check for a context diff
944 l2 = lr.readline()
940 l2 = lr.readline()
945 if not l2.startswith('---'):
941 if not l2.startswith('---'):
946 lr.push(l2)
942 lr.push(l2)
947 continue
943 continue
948 l3 = lr.readline()
944 l3 = lr.readline()
949 lr.push(l3)
945 lr.push(l3)
950 if not l3.startswith("***************"):
946 if not l3.startswith("***************"):
951 lr.push(l2)
947 lr.push(l2)
952 continue
948 continue
953 newfile = True
949 newfile = True
954 context = True
950 context = True
955 afile = parsefilename(x)
951 afile = parsefilename(x)
956 bfile = parsefilename(l2)
952 bfile = parsefilename(l2)
957
953
958 if newfile:
954 if newfile:
959 emitfile = True
955 emitfile = True
960 state = BFILE
956 state = BFILE
961 hunknum = 0
957 hunknum = 0
962 if current_hunk:
958 if current_hunk:
963 if current_hunk.complete():
959 if current_hunk.complete():
964 yield 'hunk', current_hunk
960 yield 'hunk', current_hunk
965 else:
961 else:
966 raise PatchError(_("malformed patch %s %s") % (afile,
962 raise PatchError(_("malformed patch %s %s") % (afile,
967 current_hunk.desc))
963 current_hunk.desc))
968
964
969 if hunknum == 0 and dopatch and not gitworkdone:
965 if hunknum == 0 and dopatch and not gitworkdone:
970 raise NoHunks
966 raise NoHunks
971
967
972 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
968 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
973 eol=None):
969 eol=None):
974 """
970 """
975 Reads a patch from fp and tries to apply it.
971 Reads a patch from fp and tries to apply it.
976
972
977 The dict 'changed' is filled in with all of the filenames changed
973 The dict 'changed' is filled in with all of the filenames changed
978 by the patch. Returns 0 for a clean patch, -1 if any rejects were
974 by the patch. Returns 0 for a clean patch, -1 if any rejects were
979 found and 1 if there was any fuzz.
975 found and 1 if there was any fuzz.
980
976
981 If 'eol' is None, the patch content and patched file are read in
977 If 'eol' is None, the patch content and patched file are read in
982 binary mode. Otherwise, line endings are ignored when patching then
978 binary mode. Otherwise, line endings are ignored when patching then
983 normalized to 'eol' (usually '\n' or \r\n').
979 normalized to 'eol' (usually '\n' or \r\n').
984 """
980 """
985 rejects = 0
981 rejects = 0
986 err = 0
982 err = 0
987 current_file = None
983 current_file = None
988 gitpatches = None
984 gitpatches = None
989 opener = util.opener(os.getcwd())
985 opener = util.opener(os.getcwd())
990 textmode = eol is not None
986 textmode = eol is not None
991
987
992 def closefile():
988 def closefile():
993 if not current_file:
989 if not current_file:
994 return 0
990 return 0
995 current_file.close()
991 current_file.close()
996 return len(current_file.rej)
992 return len(current_file.rej)
997
993
998 for state, values in iterhunks(ui, fp, sourcefile, textmode):
994 for state, values in iterhunks(ui, fp, sourcefile, textmode):
999 if state == 'hunk':
995 if state == 'hunk':
1000 if not current_file:
996 if not current_file:
1001 continue
997 continue
1002 current_hunk = values
998 current_hunk = values
1003 ret = current_file.apply(current_hunk, reverse)
999 ret = current_file.apply(current_hunk, reverse)
1004 if ret >= 0:
1000 if ret >= 0:
1005 changed.setdefault(current_file.fname, None)
1001 changed.setdefault(current_file.fname, None)
1006 if ret > 0:
1002 if ret > 0:
1007 err = 1
1003 err = 1
1008 elif state == 'file':
1004 elif state == 'file':
1009 rejects += closefile()
1005 rejects += closefile()
1010 afile, bfile, first_hunk = values
1006 afile, bfile, first_hunk = values
1011 try:
1007 try:
1012 if sourcefile:
1008 if sourcefile:
1013 current_file = patchfile(ui, sourcefile, opener, eol=eol)
1009 current_file = patchfile(ui, sourcefile, opener, eol=eol)
1014 else:
1010 else:
1015 current_file, missing = selectfile(afile, bfile, first_hunk,
1011 current_file, missing = selectfile(afile, bfile, first_hunk,
1016 strip, reverse)
1012 strip, reverse)
1017 current_file = patchfile(ui, current_file, opener, missing, eol)
1013 current_file = patchfile(ui, current_file, opener, missing, eol)
1018 except PatchError, err:
1014 except PatchError, err:
1019 ui.warn(str(err) + '\n')
1015 ui.warn(str(err) + '\n')
1020 current_file, current_hunk = None, None
1016 current_file, current_hunk = None, None
1021 rejects += 1
1017 rejects += 1
1022 continue
1018 continue
1023 elif state == 'git':
1019 elif state == 'git':
1024 gitpatches = values
1020 gitpatches = values
1025 cwd = os.getcwd()
1021 cwd = os.getcwd()
1026 for gp in gitpatches:
1022 for gp in gitpatches:
1027 if gp.op in ('COPY', 'RENAME'):
1023 if gp.op in ('COPY', 'RENAME'):
1028 copyfile(gp.oldpath, gp.path, cwd)
1024 copyfile(gp.oldpath, gp.path, cwd)
1029 changed[gp.path] = gp
1025 changed[gp.path] = gp
1030 else:
1026 else:
1031 raise util.Abort(_('unsupported parser state: %s') % state)
1027 raise util.Abort(_('unsupported parser state: %s') % state)
1032
1028
1033 rejects += closefile()
1029 rejects += closefile()
1034
1030
1035 if rejects:
1031 if rejects:
1036 return -1
1032 return -1
1037 return err
1033 return err
1038
1034
1039 def diffopts(ui, opts={}, untrusted=False):
1035 def diffopts(ui, opts={}, untrusted=False):
1040 def get(key, name=None, getter=ui.configbool):
1036 def get(key, name=None, getter=ui.configbool):
1041 return (opts.get(key) or
1037 return (opts.get(key) or
1042 getter('diff', name or key, None, untrusted=untrusted))
1038 getter('diff', name or key, None, untrusted=untrusted))
1043 return mdiff.diffopts(
1039 return mdiff.diffopts(
1044 text=opts.get('text'),
1040 text=opts.get('text'),
1045 git=get('git'),
1041 git=get('git'),
1046 nodates=get('nodates'),
1042 nodates=get('nodates'),
1047 showfunc=get('show_function', 'showfunc'),
1043 showfunc=get('show_function', 'showfunc'),
1048 ignorews=get('ignore_all_space', 'ignorews'),
1044 ignorews=get('ignore_all_space', 'ignorews'),
1049 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1045 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1050 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1046 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1051 context=get('unified', getter=ui.config))
1047 context=get('unified', getter=ui.config))
1052
1048
1053 def updatedir(ui, repo, patches, similarity=0):
1049 def updatedir(ui, repo, patches, similarity=0):
1054 '''Update dirstate after patch application according to metadata'''
1050 '''Update dirstate after patch application according to metadata'''
1055 if not patches:
1051 if not patches:
1056 return
1052 return
1057 copies = []
1053 copies = []
1058 removes = set()
1054 removes = set()
1059 cfiles = patches.keys()
1055 cfiles = patches.keys()
1060 cwd = repo.getcwd()
1056 cwd = repo.getcwd()
1061 if cwd:
1057 if cwd:
1062 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1058 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1063 for f in patches:
1059 for f in patches:
1064 gp = patches[f]
1060 gp = patches[f]
1065 if not gp:
1061 if not gp:
1066 continue
1062 continue
1067 if gp.op == 'RENAME':
1063 if gp.op == 'RENAME':
1068 copies.append((gp.oldpath, gp.path))
1064 copies.append((gp.oldpath, gp.path))
1069 removes.add(gp.oldpath)
1065 removes.add(gp.oldpath)
1070 elif gp.op == 'COPY':
1066 elif gp.op == 'COPY':
1071 copies.append((gp.oldpath, gp.path))
1067 copies.append((gp.oldpath, gp.path))
1072 elif gp.op == 'DELETE':
1068 elif gp.op == 'DELETE':
1073 removes.add(gp.path)
1069 removes.add(gp.path)
1074 for src, dst in copies:
1070 for src, dst in copies:
1075 repo.copy(src, dst)
1071 repo.copy(src, dst)
1076 if (not similarity) and removes:
1072 if (not similarity) and removes:
1077 repo.remove(sorted(removes), True)
1073 repo.remove(sorted(removes), True)
1078 for f in patches:
1074 for f in patches:
1079 gp = patches[f]
1075 gp = patches[f]
1080 if gp and gp.mode:
1076 if gp and gp.mode:
1081 islink, isexec = gp.mode
1077 islink, isexec = gp.mode
1082 dst = repo.wjoin(gp.path)
1078 dst = repo.wjoin(gp.path)
1083 # patch won't create empty files
1079 # patch won't create empty files
1084 if gp.op == 'ADD' and not os.path.exists(dst):
1080 if gp.op == 'ADD' and not os.path.exists(dst):
1085 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1081 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1086 repo.wwrite(gp.path, '', flags)
1082 repo.wwrite(gp.path, '', flags)
1087 elif gp.op != 'DELETE':
1083 elif gp.op != 'DELETE':
1088 util.set_flags(dst, islink, isexec)
1084 util.set_flags(dst, islink, isexec)
1089 cmdutil.addremove(repo, cfiles, similarity=similarity)
1085 cmdutil.addremove(repo, cfiles, similarity=similarity)
1090 files = patches.keys()
1086 files = patches.keys()
1091 files.extend([r for r in removes if r not in files])
1087 files.extend([r for r in removes if r not in files])
1092 return sorted(files)
1088 return sorted(files)
1093
1089
1094 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1090 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1095 """use <patcher> to apply <patchname> to the working directory.
1091 """use <patcher> to apply <patchname> to the working directory.
1096 returns whether patch was applied with fuzz factor."""
1092 returns whether patch was applied with fuzz factor."""
1097
1093
1098 fuzz = False
1094 fuzz = False
1099 if cwd:
1095 if cwd:
1100 args.append('-d %s' % util.shellquote(cwd))
1096 args.append('-d %s' % util.shellquote(cwd))
1101 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1097 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1102 util.shellquote(patchname)))
1098 util.shellquote(patchname)))
1103
1099
1104 for line in fp:
1100 for line in fp:
1105 line = line.rstrip()
1101 line = line.rstrip()
1106 ui.note(line + '\n')
1102 ui.note(line + '\n')
1107 if line.startswith('patching file '):
1103 if line.startswith('patching file '):
1108 pf = util.parse_patch_output(line)
1104 pf = util.parse_patch_output(line)
1109 printed_file = False
1105 printed_file = False
1110 files.setdefault(pf, None)
1106 files.setdefault(pf, None)
1111 elif line.find('with fuzz') >= 0:
1107 elif line.find('with fuzz') >= 0:
1112 fuzz = True
1108 fuzz = True
1113 if not printed_file:
1109 if not printed_file:
1114 ui.warn(pf + '\n')
1110 ui.warn(pf + '\n')
1115 printed_file = True
1111 printed_file = True
1116 ui.warn(line + '\n')
1112 ui.warn(line + '\n')
1117 elif line.find('saving rejects to file') >= 0:
1113 elif line.find('saving rejects to file') >= 0:
1118 ui.warn(line + '\n')
1114 ui.warn(line + '\n')
1119 elif line.find('FAILED') >= 0:
1115 elif line.find('FAILED') >= 0:
1120 if not printed_file:
1116 if not printed_file:
1121 ui.warn(pf + '\n')
1117 ui.warn(pf + '\n')
1122 printed_file = True
1118 printed_file = True
1123 ui.warn(line + '\n')
1119 ui.warn(line + '\n')
1124 code = fp.close()
1120 code = fp.close()
1125 if code:
1121 if code:
1126 raise PatchError(_("patch command failed: %s") %
1122 raise PatchError(_("patch command failed: %s") %
1127 util.explain_exit(code)[0])
1123 util.explain_exit(code)[0])
1128 return fuzz
1124 return fuzz
1129
1125
1130 def internalpatch(patchobj, ui, strip, cwd, files={}, eolmode='strict'):
1126 def internalpatch(patchobj, ui, strip, cwd, files={}, eolmode='strict'):
1131 """use builtin patch to apply <patchobj> to the working directory.
1127 """use builtin patch to apply <patchobj> to the working directory.
1132 returns whether patch was applied with fuzz factor."""
1128 returns whether patch was applied with fuzz factor."""
1133
1129
1134 if eolmode is None:
1130 if eolmode is None:
1135 eolmode = ui.config('patch', 'eol', 'strict')
1131 eolmode = ui.config('patch', 'eol', 'strict')
1136 try:
1132 try:
1137 eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()]
1133 eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()]
1138 except KeyError:
1134 except KeyError:
1139 raise util.Abort(_('Unsupported line endings type: %s') % eolmode)
1135 raise util.Abort(_('Unsupported line endings type: %s') % eolmode)
1140
1136
1141 try:
1137 try:
1142 fp = open(patchobj, 'rb')
1138 fp = open(patchobj, 'rb')
1143 except TypeError:
1139 except TypeError:
1144 fp = patchobj
1140 fp = patchobj
1145 if cwd:
1141 if cwd:
1146 curdir = os.getcwd()
1142 curdir = os.getcwd()
1147 os.chdir(cwd)
1143 os.chdir(cwd)
1148 try:
1144 try:
1149 ret = applydiff(ui, fp, files, strip=strip, eol=eol)
1145 ret = applydiff(ui, fp, files, strip=strip, eol=eol)
1150 finally:
1146 finally:
1151 if cwd:
1147 if cwd:
1152 os.chdir(curdir)
1148 os.chdir(curdir)
1153 if ret < 0:
1149 if ret < 0:
1154 raise PatchError
1150 raise PatchError
1155 return ret > 0
1151 return ret > 0
1156
1152
1157 def patch(patchname, ui, strip=1, cwd=None, files={}, eolmode='strict'):
1153 def patch(patchname, ui, strip=1, cwd=None, files={}, eolmode='strict'):
1158 """Apply <patchname> to the working directory.
1154 """Apply <patchname> to the working directory.
1159
1155
1160 'eolmode' specifies how end of lines should be handled. It can be:
1156 'eolmode' specifies how end of lines should be handled. It can be:
1161 - 'strict': inputs are read in binary mode, EOLs are preserved
1157 - 'strict': inputs are read in binary mode, EOLs are preserved
1162 - 'crlf': EOLs are ignored when patching and reset to CRLF
1158 - 'crlf': EOLs are ignored when patching and reset to CRLF
1163 - 'lf': EOLs are ignored when patching and reset to LF
1159 - 'lf': EOLs are ignored when patching and reset to LF
1164 - None: get it from user settings, default to 'strict'
1160 - None: get it from user settings, default to 'strict'
1165 'eolmode' is ignored when using an external patcher program.
1161 'eolmode' is ignored when using an external patcher program.
1166
1162
1167 Returns whether patch was applied with fuzz factor.
1163 Returns whether patch was applied with fuzz factor.
1168 """
1164 """
1169 patcher = ui.config('ui', 'patch')
1165 patcher = ui.config('ui', 'patch')
1170 args = []
1166 args = []
1171 try:
1167 try:
1172 if patcher:
1168 if patcher:
1173 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1169 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1174 files)
1170 files)
1175 else:
1171 else:
1176 try:
1172 try:
1177 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1173 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1178 except NoHunks:
1174 except NoHunks:
1179 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1175 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1180 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1176 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1181 patcher)
1177 patcher)
1182 if util.needbinarypatch():
1178 if util.needbinarypatch():
1183 args.append('--binary')
1179 args.append('--binary')
1184 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1180 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1185 files)
1181 files)
1186 except PatchError, err:
1182 except PatchError, err:
1187 s = str(err)
1183 s = str(err)
1188 if s:
1184 if s:
1189 raise util.Abort(s)
1185 raise util.Abort(s)
1190 else:
1186 else:
1191 raise util.Abort(_('patch failed to apply'))
1187 raise util.Abort(_('patch failed to apply'))
1192
1188
1193 def b85diff(to, tn):
1189 def b85diff(to, tn):
1194 '''print base85-encoded binary diff'''
1190 '''print base85-encoded binary diff'''
1195 def gitindex(text):
1191 def gitindex(text):
1196 if not text:
1192 if not text:
1197 return '0' * 40
1193 return '0' * 40
1198 l = len(text)
1194 l = len(text)
1199 s = util.sha1('blob %d\0' % l)
1195 s = util.sha1('blob %d\0' % l)
1200 s.update(text)
1196 s.update(text)
1201 return s.hexdigest()
1197 return s.hexdigest()
1202
1198
1203 def fmtline(line):
1199 def fmtline(line):
1204 l = len(line)
1200 l = len(line)
1205 if l <= 26:
1201 if l <= 26:
1206 l = chr(ord('A') + l - 1)
1202 l = chr(ord('A') + l - 1)
1207 else:
1203 else:
1208 l = chr(l - 26 + ord('a') - 1)
1204 l = chr(l - 26 + ord('a') - 1)
1209 return '%c%s\n' % (l, base85.b85encode(line, True))
1205 return '%c%s\n' % (l, base85.b85encode(line, True))
1210
1206
1211 def chunk(text, csize=52):
1207 def chunk(text, csize=52):
1212 l = len(text)
1208 l = len(text)
1213 i = 0
1209 i = 0
1214 while i < l:
1210 while i < l:
1215 yield text[i:i+csize]
1211 yield text[i:i+csize]
1216 i += csize
1212 i += csize
1217
1213
1218 tohash = gitindex(to)
1214 tohash = gitindex(to)
1219 tnhash = gitindex(tn)
1215 tnhash = gitindex(tn)
1220 if tohash == tnhash:
1216 if tohash == tnhash:
1221 return ""
1217 return ""
1222
1218
1223 # TODO: deltas
1219 # TODO: deltas
1224 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1220 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1225 (tohash, tnhash, len(tn))]
1221 (tohash, tnhash, len(tn))]
1226 for l in chunk(zlib.compress(tn)):
1222 for l in chunk(zlib.compress(tn)):
1227 ret.append(fmtline(l))
1223 ret.append(fmtline(l))
1228 ret.append('\n')
1224 ret.append('\n')
1229 return ''.join(ret)
1225 return ''.join(ret)
1230
1226
1231 def _addmodehdr(header, omode, nmode):
1227 def _addmodehdr(header, omode, nmode):
1232 if omode != nmode:
1228 if omode != nmode:
1233 header.append('old mode %s\n' % omode)
1229 header.append('old mode %s\n' % omode)
1234 header.append('new mode %s\n' % nmode)
1230 header.append('new mode %s\n' % nmode)
1235
1231
1236 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1232 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1237 '''yields diff of changes to files between two nodes, or node and
1233 '''yields diff of changes to files between two nodes, or node and
1238 working directory.
1234 working directory.
1239
1235
1240 if node1 is None, use first dirstate parent instead.
1236 if node1 is None, use first dirstate parent instead.
1241 if node2 is None, compare node1 with working directory.'''
1237 if node2 is None, compare node1 with working directory.'''
1242
1238
1243 if opts is None:
1239 if opts is None:
1244 opts = mdiff.defaultopts
1240 opts = mdiff.defaultopts
1245
1241
1246 if not node1:
1242 if not node1:
1247 node1 = repo.dirstate.parents()[0]
1243 node1 = repo.dirstate.parents()[0]
1248
1244
1249 flcache = {}
1245 flcache = {}
1250 def getfilectx(f, ctx):
1246 def getfilectx(f, ctx):
1251 flctx = ctx.filectx(f, filelog=flcache.get(f))
1247 flctx = ctx.filectx(f, filelog=flcache.get(f))
1252 if f not in flcache:
1248 if f not in flcache:
1253 flcache[f] = flctx._filelog
1249 flcache[f] = flctx._filelog
1254 return flctx
1250 return flctx
1255
1251
1256 ctx1 = repo[node1]
1252 ctx1 = repo[node1]
1257 ctx2 = repo[node2]
1253 ctx2 = repo[node2]
1258
1254
1259 if not changes:
1255 if not changes:
1260 changes = repo.status(ctx1, ctx2, match=match)
1256 changes = repo.status(ctx1, ctx2, match=match)
1261 modified, added, removed = changes[:3]
1257 modified, added, removed = changes[:3]
1262
1258
1263 if not modified and not added and not removed:
1259 if not modified and not added and not removed:
1264 return
1260 return
1265
1261
1266 date1 = util.datestr(ctx1.date())
1262 date1 = util.datestr(ctx1.date())
1267 man1 = ctx1.manifest()
1263 man1 = ctx1.manifest()
1268
1264
1269 if repo.ui.quiet:
1265 if repo.ui.quiet:
1270 r = None
1266 r = None
1271 else:
1267 else:
1272 hexfunc = repo.ui.debugflag and hex or short
1268 hexfunc = repo.ui.debugflag and hex or short
1273 r = [hexfunc(node) for node in [node1, node2] if node]
1269 r = [hexfunc(node) for node in [node1, node2] if node]
1274
1270
1275 if opts.git:
1271 if opts.git:
1276 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1272 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1277 copy = copy.copy()
1273 copy = copy.copy()
1278 for k, v in copy.items():
1274 for k, v in copy.items():
1279 copy[v] = k
1275 copy[v] = k
1280
1276
1281 gone = set()
1277 gone = set()
1282 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1278 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1283
1279
1284 for f in sorted(modified + added + removed):
1280 for f in sorted(modified + added + removed):
1285 to = None
1281 to = None
1286 tn = None
1282 tn = None
1287 dodiff = True
1283 dodiff = True
1288 header = []
1284 header = []
1289 if f in man1:
1285 if f in man1:
1290 to = getfilectx(f, ctx1).data()
1286 to = getfilectx(f, ctx1).data()
1291 if f not in removed:
1287 if f not in removed:
1292 tn = getfilectx(f, ctx2).data()
1288 tn = getfilectx(f, ctx2).data()
1293 a, b = f, f
1289 a, b = f, f
1294 if opts.git:
1290 if opts.git:
1295 if f in added:
1291 if f in added:
1296 mode = gitmode[ctx2.flags(f)]
1292 mode = gitmode[ctx2.flags(f)]
1297 if f in copy:
1293 if f in copy:
1298 a = copy[f]
1294 a = copy[f]
1299 omode = gitmode[man1.flags(a)]
1295 omode = gitmode[man1.flags(a)]
1300 _addmodehdr(header, omode, mode)
1296 _addmodehdr(header, omode, mode)
1301 if a in removed and a not in gone:
1297 if a in removed and a not in gone:
1302 op = 'rename'
1298 op = 'rename'
1303 gone.add(a)
1299 gone.add(a)
1304 else:
1300 else:
1305 op = 'copy'
1301 op = 'copy'
1306 header.append('%s from %s\n' % (op, a))
1302 header.append('%s from %s\n' % (op, a))
1307 header.append('%s to %s\n' % (op, f))
1303 header.append('%s to %s\n' % (op, f))
1308 to = getfilectx(a, ctx1).data()
1304 to = getfilectx(a, ctx1).data()
1309 else:
1305 else:
1310 header.append('new file mode %s\n' % mode)
1306 header.append('new file mode %s\n' % mode)
1311 if util.binary(tn):
1307 if util.binary(tn):
1312 dodiff = 'binary'
1308 dodiff = 'binary'
1313 elif f in removed:
1309 elif f in removed:
1314 # have we already reported a copy above?
1310 # have we already reported a copy above?
1315 if f in copy and copy[f] in added and copy[copy[f]] == f:
1311 if f in copy and copy[f] in added and copy[copy[f]] == f:
1316 dodiff = False
1312 dodiff = False
1317 else:
1313 else:
1318 header.append('deleted file mode %s\n' %
1314 header.append('deleted file mode %s\n' %
1319 gitmode[man1.flags(f)])
1315 gitmode[man1.flags(f)])
1320 else:
1316 else:
1321 omode = gitmode[man1.flags(f)]
1317 omode = gitmode[man1.flags(f)]
1322 nmode = gitmode[ctx2.flags(f)]
1318 nmode = gitmode[ctx2.flags(f)]
1323 _addmodehdr(header, omode, nmode)
1319 _addmodehdr(header, omode, nmode)
1324 if util.binary(to) or util.binary(tn):
1320 if util.binary(to) or util.binary(tn):
1325 dodiff = 'binary'
1321 dodiff = 'binary'
1326 r = None
1322 r = None
1327 header.insert(0, mdiff.diffline(r, a, b, opts))
1323 header.insert(0, mdiff.diffline(r, a, b, opts))
1328 if dodiff:
1324 if dodiff:
1329 if dodiff == 'binary':
1325 if dodiff == 'binary':
1330 text = b85diff(to, tn)
1326 text = b85diff(to, tn)
1331 else:
1327 else:
1332 text = mdiff.unidiff(to, date1,
1328 text = mdiff.unidiff(to, date1,
1333 # ctx2 date may be dynamic
1329 # ctx2 date may be dynamic
1334 tn, util.datestr(ctx2.date()),
1330 tn, util.datestr(ctx2.date()),
1335 a, b, r, opts=opts)
1331 a, b, r, opts=opts)
1336 if header and (text or len(header) > 1):
1332 if header and (text or len(header) > 1):
1337 yield ''.join(header)
1333 yield ''.join(header)
1338 if text:
1334 if text:
1339 yield text
1335 yield text
1340
1336
1341 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1337 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1342 opts=None):
1338 opts=None):
1343 '''export changesets as hg patches.'''
1339 '''export changesets as hg patches.'''
1344
1340
1345 total = len(revs)
1341 total = len(revs)
1346 revwidth = max([len(str(rev)) for rev in revs])
1342 revwidth = max([len(str(rev)) for rev in revs])
1347
1343
1348 def single(rev, seqno, fp):
1344 def single(rev, seqno, fp):
1349 ctx = repo[rev]
1345 ctx = repo[rev]
1350 node = ctx.node()
1346 node = ctx.node()
1351 parents = [p.node() for p in ctx.parents() if p]
1347 parents = [p.node() for p in ctx.parents() if p]
1352 branch = ctx.branch()
1348 branch = ctx.branch()
1353 if switch_parent:
1349 if switch_parent:
1354 parents.reverse()
1350 parents.reverse()
1355 prev = (parents and parents[0]) or nullid
1351 prev = (parents and parents[0]) or nullid
1356
1352
1357 if not fp:
1353 if not fp:
1358 fp = cmdutil.make_file(repo, template, node, total=total,
1354 fp = cmdutil.make_file(repo, template, node, total=total,
1359 seqno=seqno, revwidth=revwidth,
1355 seqno=seqno, revwidth=revwidth,
1360 mode='ab')
1356 mode='ab')
1361 if fp != sys.stdout and hasattr(fp, 'name'):
1357 if fp != sys.stdout and hasattr(fp, 'name'):
1362 repo.ui.note("%s\n" % fp.name)
1358 repo.ui.note("%s\n" % fp.name)
1363
1359
1364 fp.write("# HG changeset patch\n")
1360 fp.write("# HG changeset patch\n")
1365 fp.write("# User %s\n" % ctx.user())
1361 fp.write("# User %s\n" % ctx.user())
1366 fp.write("# Date %d %d\n" % ctx.date())
1362 fp.write("# Date %d %d\n" % ctx.date())
1367 if branch and (branch != 'default'):
1363 if branch and (branch != 'default'):
1368 fp.write("# Branch %s\n" % branch)
1364 fp.write("# Branch %s\n" % branch)
1369 fp.write("# Node ID %s\n" % hex(node))
1365 fp.write("# Node ID %s\n" % hex(node))
1370 fp.write("# Parent %s\n" % hex(prev))
1366 fp.write("# Parent %s\n" % hex(prev))
1371 if len(parents) > 1:
1367 if len(parents) > 1:
1372 fp.write("# Parent %s\n" % hex(parents[1]))
1368 fp.write("# Parent %s\n" % hex(parents[1]))
1373 fp.write(ctx.description().rstrip())
1369 fp.write(ctx.description().rstrip())
1374 fp.write("\n\n")
1370 fp.write("\n\n")
1375
1371
1376 for chunk in diff(repo, prev, node, opts=opts):
1372 for chunk in diff(repo, prev, node, opts=opts):
1377 fp.write(chunk)
1373 fp.write(chunk)
1378
1374
1379 for seqno, rev in enumerate(revs):
1375 for seqno, rev in enumerate(revs):
1380 single(rev, seqno+1, fp)
1376 single(rev, seqno+1, fp)
1381
1377
1382 def diffstatdata(lines):
1378 def diffstatdata(lines):
1383 filename, adds, removes = None, 0, 0
1379 filename, adds, removes = None, 0, 0
1384 for line in lines:
1380 for line in lines:
1385 if line.startswith('diff'):
1381 if line.startswith('diff'):
1386 if filename:
1382 if filename:
1387 yield (filename, adds, removes)
1383 yield (filename, adds, removes)
1388 # set numbers to 0 anyway when starting new file
1384 # set numbers to 0 anyway when starting new file
1389 adds, removes = 0, 0
1385 adds, removes = 0, 0
1390 if line.startswith('diff --git'):
1386 if line.startswith('diff --git'):
1391 filename = gitre.search(line).group(1)
1387 filename = gitre.search(line).group(1)
1392 else:
1388 else:
1393 # format: "diff -r ... -r ... filename"
1389 # format: "diff -r ... -r ... filename"
1394 filename = line.split(None, 5)[-1]
1390 filename = line.split(None, 5)[-1]
1395 elif line.startswith('+') and not line.startswith('+++'):
1391 elif line.startswith('+') and not line.startswith('+++'):
1396 adds += 1
1392 adds += 1
1397 elif line.startswith('-') and not line.startswith('---'):
1393 elif line.startswith('-') and not line.startswith('---'):
1398 removes += 1
1394 removes += 1
1399 if filename:
1395 if filename:
1400 yield (filename, adds, removes)
1396 yield (filename, adds, removes)
1401
1397
1402 def diffstat(lines, width=80):
1398 def diffstat(lines, width=80):
1403 output = []
1399 output = []
1404 stats = list(diffstatdata(lines))
1400 stats = list(diffstatdata(lines))
1405
1401
1406 maxtotal, maxname = 0, 0
1402 maxtotal, maxname = 0, 0
1407 totaladds, totalremoves = 0, 0
1403 totaladds, totalremoves = 0, 0
1408 for filename, adds, removes in stats:
1404 for filename, adds, removes in stats:
1409 totaladds += adds
1405 totaladds += adds
1410 totalremoves += removes
1406 totalremoves += removes
1411 maxname = max(maxname, len(filename))
1407 maxname = max(maxname, len(filename))
1412 maxtotal = max(maxtotal, adds+removes)
1408 maxtotal = max(maxtotal, adds+removes)
1413
1409
1414 countwidth = len(str(maxtotal))
1410 countwidth = len(str(maxtotal))
1415 graphwidth = width - countwidth - maxname
1411 graphwidth = width - countwidth - maxname
1416 if graphwidth < 10:
1412 if graphwidth < 10:
1417 graphwidth = 10
1413 graphwidth = 10
1418
1414
1419 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1415 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1420
1416
1421 for filename, adds, removes in stats:
1417 for filename, adds, removes in stats:
1422 # If diffstat runs out of room it doesn't print anything, which
1418 # If diffstat runs out of room it doesn't print anything, which
1423 # isn't very useful, so always print at least one + or - if there
1419 # isn't very useful, so always print at least one + or - if there
1424 # were at least some changes
1420 # were at least some changes
1425 pluses = '+' * max(adds // factor, int(bool(adds)))
1421 pluses = '+' * max(adds // factor, int(bool(adds)))
1426 minuses = '-' * max(removes // factor, int(bool(removes)))
1422 minuses = '-' * max(removes // factor, int(bool(removes)))
1427 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1423 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1428 adds+removes, pluses, minuses))
1424 adds+removes, pluses, minuses))
1429
1425
1430 if stats:
1426 if stats:
1431 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1427 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1432 % (len(stats), totaladds, totalremoves))
1428 % (len(stats), totaladds, totalremoves))
1433
1429
1434 return ''.join(output)
1430 return ''.join(output)
General Comments 0
You need to be logged in to leave comments. Login now