##// END OF EJS Templates
merge with crew-stable
Alexis S. L. Carvalho -
r6001:30d2feca merge default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 #!/bin/sh
2
3 echo '[extensions]' >> $HGRCPATH
4 echo 'hgext.mq =' >> $HGRCPATH
5
6 hg init repo
7 cd repo
8
9 echo foo > foo
10 hg ci -qAm 'add a file'
11
12 hg qinit
13
14 hg qnew foo
15 echo foo >> foo
16 hg qrefresh -m 'append foo'
17
18 hg qnew bar
19 echo bar >> foo
20 hg qrefresh -m 'append bar'
21
22 echo '% try to commit on top of a patch'
23 echo quux >> foo
24 hg ci -m 'append quux'
25
26 # cheat a bit...
27 mv .hg/patches .hg/patches2
28 hg ci -m 'append quux'
29 mv .hg/patches2 .hg/patches
30
31 echo '% qpop/qrefresh on the wrong revision'
32 hg qpop
33 hg qpop -n patches 2>&1 | sed -e 's/\(using patch queue:\).*/\1/'
34 hg qrefresh
35
36 hg up -C qtip
37 echo '% qpop'
38 hg qpop
39
40 echo '% qrefresh'
41 hg qrefresh
42
43 echo '% tip:'
44 hg tip --template '#rev# #desc#\n'
@@ -0,0 +1,14 b''
1 % try to commit on top of a patch
2 abort: cannot commit over an applied mq patch
3 % qpop/qrefresh on the wrong revision
4 abort: working directory revision is not qtip
5 using patch queue:
6 abort: popping would remove a revision not managed by this patch queue
7 abort: working directory revision is not qtip
8 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
9 % qpop
10 abort: popping would remove a revision not managed by this patch queue
11 % qrefresh
12 abort: cannot refresh a revision with children
13 % tip:
14 3 append quux
@@ -1,343 +1,343 b''
1 # common code for the convert extension
1 # common code for the convert extension
2 import base64, errno
2 import base64, errno
3 import os
3 import os
4 import cPickle as pickle
4 import cPickle as pickle
5 from mercurial import util
5 from mercurial import util
6 from mercurial.i18n import _
6 from mercurial.i18n import _
7
7
8 def encodeargs(args):
8 def encodeargs(args):
9 def encodearg(s):
9 def encodearg(s):
10 lines = base64.encodestring(s)
10 lines = base64.encodestring(s)
11 lines = [l.splitlines()[0] for l in lines]
11 lines = [l.splitlines()[0] for l in lines]
12 return ''.join(lines)
12 return ''.join(lines)
13
13
14 s = pickle.dumps(args)
14 s = pickle.dumps(args)
15 return encodearg(s)
15 return encodearg(s)
16
16
17 def decodeargs(s):
17 def decodeargs(s):
18 s = base64.decodestring(s)
18 s = base64.decodestring(s)
19 return pickle.loads(s)
19 return pickle.loads(s)
20
20
21 def checktool(exe, name=None):
21 def checktool(exe, name=None):
22 name = name or exe
22 name = name or exe
23 if not util.find_exe(exe):
23 if not util.find_exe(exe):
24 raise util.Abort('cannot find required "%s" tool' % name)
24 raise util.Abort('cannot find required "%s" tool' % name)
25
25
26 class NoRepo(Exception): pass
26 class NoRepo(Exception): pass
27
27
28 SKIPREV = 'SKIP'
28 SKIPREV = 'SKIP'
29
29
30 class commit(object):
30 class commit(object):
31 def __init__(self, author, date, desc, parents, branch=None, rev=None,
31 def __init__(self, author, date, desc, parents, branch=None, rev=None,
32 extra={}):
32 extra={}):
33 self.author = author
33 self.author = author or 'unknown'
34 self.date = date
34 self.date = date or '0 0'
35 self.desc = desc
35 self.desc = desc
36 self.parents = parents
36 self.parents = parents
37 self.branch = branch
37 self.branch = branch
38 self.rev = rev
38 self.rev = rev
39 self.extra = extra
39 self.extra = extra
40
40
41 class converter_source(object):
41 class converter_source(object):
42 """Conversion source interface"""
42 """Conversion source interface"""
43
43
44 def __init__(self, ui, path=None, rev=None):
44 def __init__(self, ui, path=None, rev=None):
45 """Initialize conversion source (or raise NoRepo("message")
45 """Initialize conversion source (or raise NoRepo("message")
46 exception if path is not a valid repository)"""
46 exception if path is not a valid repository)"""
47 self.ui = ui
47 self.ui = ui
48 self.path = path
48 self.path = path
49 self.rev = rev
49 self.rev = rev
50
50
51 self.encoding = 'utf-8'
51 self.encoding = 'utf-8'
52
52
53 def before(self):
53 def before(self):
54 pass
54 pass
55
55
56 def after(self):
56 def after(self):
57 pass
57 pass
58
58
59 def setrevmap(self, revmap):
59 def setrevmap(self, revmap):
60 """set the map of already-converted revisions"""
60 """set the map of already-converted revisions"""
61 pass
61 pass
62
62
63 def getheads(self):
63 def getheads(self):
64 """Return a list of this repository's heads"""
64 """Return a list of this repository's heads"""
65 raise NotImplementedError()
65 raise NotImplementedError()
66
66
67 def getfile(self, name, rev):
67 def getfile(self, name, rev):
68 """Return file contents as a string"""
68 """Return file contents as a string"""
69 raise NotImplementedError()
69 raise NotImplementedError()
70
70
71 def getmode(self, name, rev):
71 def getmode(self, name, rev):
72 """Return file mode, eg. '', 'x', or 'l'"""
72 """Return file mode, eg. '', 'x', or 'l'"""
73 raise NotImplementedError()
73 raise NotImplementedError()
74
74
75 def getchanges(self, version):
75 def getchanges(self, version):
76 """Returns a tuple of (files, copies)
76 """Returns a tuple of (files, copies)
77 Files is a sorted list of (filename, id) tuples for all files changed
77 Files is a sorted list of (filename, id) tuples for all files changed
78 in version, where id is the source revision id of the file.
78 in version, where id is the source revision id of the file.
79
79
80 copies is a dictionary of dest: source
80 copies is a dictionary of dest: source
81 """
81 """
82 raise NotImplementedError()
82 raise NotImplementedError()
83
83
84 def getcommit(self, version):
84 def getcommit(self, version):
85 """Return the commit object for version"""
85 """Return the commit object for version"""
86 raise NotImplementedError()
86 raise NotImplementedError()
87
87
88 def gettags(self):
88 def gettags(self):
89 """Return the tags as a dictionary of name: revision"""
89 """Return the tags as a dictionary of name: revision"""
90 raise NotImplementedError()
90 raise NotImplementedError()
91
91
92 def recode(self, s, encoding=None):
92 def recode(self, s, encoding=None):
93 if not encoding:
93 if not encoding:
94 encoding = self.encoding or 'utf-8'
94 encoding = self.encoding or 'utf-8'
95
95
96 if isinstance(s, unicode):
96 if isinstance(s, unicode):
97 return s.encode("utf-8")
97 return s.encode("utf-8")
98 try:
98 try:
99 return s.decode(encoding).encode("utf-8")
99 return s.decode(encoding).encode("utf-8")
100 except:
100 except:
101 try:
101 try:
102 return s.decode("latin-1").encode("utf-8")
102 return s.decode("latin-1").encode("utf-8")
103 except:
103 except:
104 return s.decode(encoding, "replace").encode("utf-8")
104 return s.decode(encoding, "replace").encode("utf-8")
105
105
106 def getchangedfiles(self, rev, i):
106 def getchangedfiles(self, rev, i):
107 """Return the files changed by rev compared to parent[i].
107 """Return the files changed by rev compared to parent[i].
108
108
109 i is an index selecting one of the parents of rev. The return
109 i is an index selecting one of the parents of rev. The return
110 value should be the list of files that are different in rev and
110 value should be the list of files that are different in rev and
111 this parent.
111 this parent.
112
112
113 If rev has no parents, i is None.
113 If rev has no parents, i is None.
114
114
115 This function is only needed to support --filemap
115 This function is only needed to support --filemap
116 """
116 """
117 raise NotImplementedError()
117 raise NotImplementedError()
118
118
119 def converted(self, rev, sinkrev):
119 def converted(self, rev, sinkrev):
120 '''Notify the source that a revision has been converted.'''
120 '''Notify the source that a revision has been converted.'''
121 pass
121 pass
122
122
123
123
124 class converter_sink(object):
124 class converter_sink(object):
125 """Conversion sink (target) interface"""
125 """Conversion sink (target) interface"""
126
126
127 def __init__(self, ui, path):
127 def __init__(self, ui, path):
128 """Initialize conversion sink (or raise NoRepo("message")
128 """Initialize conversion sink (or raise NoRepo("message")
129 exception if path is not a valid repository)
129 exception if path is not a valid repository)
130
130
131 created is a list of paths to remove if a fatal error occurs
131 created is a list of paths to remove if a fatal error occurs
132 later"""
132 later"""
133 self.ui = ui
133 self.ui = ui
134 self.path = path
134 self.path = path
135 self.created = []
135 self.created = []
136
136
137 def getheads(self):
137 def getheads(self):
138 """Return a list of this repository's heads"""
138 """Return a list of this repository's heads"""
139 raise NotImplementedError()
139 raise NotImplementedError()
140
140
141 def revmapfile(self):
141 def revmapfile(self):
142 """Path to a file that will contain lines
142 """Path to a file that will contain lines
143 source_rev_id sink_rev_id
143 source_rev_id sink_rev_id
144 mapping equivalent revision identifiers for each system."""
144 mapping equivalent revision identifiers for each system."""
145 raise NotImplementedError()
145 raise NotImplementedError()
146
146
147 def authorfile(self):
147 def authorfile(self):
148 """Path to a file that will contain lines
148 """Path to a file that will contain lines
149 srcauthor=dstauthor
149 srcauthor=dstauthor
150 mapping equivalent authors identifiers for each system."""
150 mapping equivalent authors identifiers for each system."""
151 return None
151 return None
152
152
153 def putfile(self, f, e, data):
153 def putfile(self, f, e, data):
154 """Put file for next putcommit().
154 """Put file for next putcommit().
155 f: path to file
155 f: path to file
156 e: '', 'x', or 'l' (regular file, executable, or symlink)
156 e: '', 'x', or 'l' (regular file, executable, or symlink)
157 data: file contents"""
157 data: file contents"""
158 raise NotImplementedError()
158 raise NotImplementedError()
159
159
160 def delfile(self, f):
160 def delfile(self, f):
161 """Delete file for next putcommit().
161 """Delete file for next putcommit().
162 f: path to file"""
162 f: path to file"""
163 raise NotImplementedError()
163 raise NotImplementedError()
164
164
165 def putcommit(self, files, parents, commit):
165 def putcommit(self, files, parents, commit):
166 """Create a revision with all changed files listed in 'files'
166 """Create a revision with all changed files listed in 'files'
167 and having listed parents. 'commit' is a commit object containing
167 and having listed parents. 'commit' is a commit object containing
168 at a minimum the author, date, and message for this changeset.
168 at a minimum the author, date, and message for this changeset.
169 Called after putfile() and delfile() calls. Note that the sink
169 Called after putfile() and delfile() calls. Note that the sink
170 repository is not told to update itself to a particular revision
170 repository is not told to update itself to a particular revision
171 (or even what that revision would be) before it receives the
171 (or even what that revision would be) before it receives the
172 file data."""
172 file data."""
173 raise NotImplementedError()
173 raise NotImplementedError()
174
174
175 def puttags(self, tags):
175 def puttags(self, tags):
176 """Put tags into sink.
176 """Put tags into sink.
177 tags: {tagname: sink_rev_id, ...}"""
177 tags: {tagname: sink_rev_id, ...}"""
178 raise NotImplementedError()
178 raise NotImplementedError()
179
179
180 def setbranch(self, branch, pbranches):
180 def setbranch(self, branch, pbranches):
181 """Set the current branch name. Called before the first putfile
181 """Set the current branch name. Called before the first putfile
182 on the branch.
182 on the branch.
183 branch: branch name for subsequent commits
183 branch: branch name for subsequent commits
184 pbranches: (converted parent revision, parent branch) tuples"""
184 pbranches: (converted parent revision, parent branch) tuples"""
185 pass
185 pass
186
186
187 def setfilemapmode(self, active):
187 def setfilemapmode(self, active):
188 """Tell the destination that we're using a filemap
188 """Tell the destination that we're using a filemap
189
189
190 Some converter_sources (svn in particular) can claim that a file
190 Some converter_sources (svn in particular) can claim that a file
191 was changed in a revision, even if there was no change. This method
191 was changed in a revision, even if there was no change. This method
192 tells the destination that we're using a filemap and that it should
192 tells the destination that we're using a filemap and that it should
193 filter empty revisions.
193 filter empty revisions.
194 """
194 """
195 pass
195 pass
196
196
197 def before(self):
197 def before(self):
198 pass
198 pass
199
199
200 def after(self):
200 def after(self):
201 pass
201 pass
202
202
203
203
204 class commandline(object):
204 class commandline(object):
205 def __init__(self, ui, command):
205 def __init__(self, ui, command):
206 self.ui = ui
206 self.ui = ui
207 self.command = command
207 self.command = command
208
208
209 def prerun(self):
209 def prerun(self):
210 pass
210 pass
211
211
212 def postrun(self):
212 def postrun(self):
213 pass
213 pass
214
214
215 def _cmdline(self, cmd, *args, **kwargs):
215 def _cmdline(self, cmd, *args, **kwargs):
216 cmdline = [self.command, cmd] + list(args)
216 cmdline = [self.command, cmd] + list(args)
217 for k, v in kwargs.iteritems():
217 for k, v in kwargs.iteritems():
218 if len(k) == 1:
218 if len(k) == 1:
219 cmdline.append('-' + k)
219 cmdline.append('-' + k)
220 else:
220 else:
221 cmdline.append('--' + k.replace('_', '-'))
221 cmdline.append('--' + k.replace('_', '-'))
222 try:
222 try:
223 if len(k) == 1:
223 if len(k) == 1:
224 cmdline.append('' + v)
224 cmdline.append('' + v)
225 else:
225 else:
226 cmdline[-1] += '=' + v
226 cmdline[-1] += '=' + v
227 except TypeError:
227 except TypeError:
228 pass
228 pass
229 cmdline = [util.shellquote(arg) for arg in cmdline]
229 cmdline = [util.shellquote(arg) for arg in cmdline]
230 cmdline += ['<', util.nulldev]
230 cmdline += ['<', util.nulldev]
231 cmdline = ' '.join(cmdline)
231 cmdline = ' '.join(cmdline)
232 self.ui.debug(cmdline, '\n')
232 self.ui.debug(cmdline, '\n')
233 return cmdline
233 return cmdline
234
234
235 def _run(self, cmd, *args, **kwargs):
235 def _run(self, cmd, *args, **kwargs):
236 cmdline = self._cmdline(cmd, *args, **kwargs)
236 cmdline = self._cmdline(cmd, *args, **kwargs)
237 self.prerun()
237 self.prerun()
238 try:
238 try:
239 return util.popen(cmdline)
239 return util.popen(cmdline)
240 finally:
240 finally:
241 self.postrun()
241 self.postrun()
242
242
243 def run(self, cmd, *args, **kwargs):
243 def run(self, cmd, *args, **kwargs):
244 fp = self._run(cmd, *args, **kwargs)
244 fp = self._run(cmd, *args, **kwargs)
245 output = fp.read()
245 output = fp.read()
246 self.ui.debug(output)
246 self.ui.debug(output)
247 return output, fp.close()
247 return output, fp.close()
248
248
249 def checkexit(self, status, output=''):
249 def checkexit(self, status, output=''):
250 if status:
250 if status:
251 if output:
251 if output:
252 self.ui.warn(_('%s error:\n') % self.command)
252 self.ui.warn(_('%s error:\n') % self.command)
253 self.ui.warn(output)
253 self.ui.warn(output)
254 msg = util.explain_exit(status)[0]
254 msg = util.explain_exit(status)[0]
255 raise util.Abort(_('%s %s') % (self.command, msg))
255 raise util.Abort(_('%s %s') % (self.command, msg))
256
256
257 def run0(self, cmd, *args, **kwargs):
257 def run0(self, cmd, *args, **kwargs):
258 output, status = self.run(cmd, *args, **kwargs)
258 output, status = self.run(cmd, *args, **kwargs)
259 self.checkexit(status, output)
259 self.checkexit(status, output)
260 return output
260 return output
261
261
262 def getargmax(self):
262 def getargmax(self):
263 if '_argmax' in self.__dict__:
263 if '_argmax' in self.__dict__:
264 return self._argmax
264 return self._argmax
265
265
266 # POSIX requires at least 4096 bytes for ARG_MAX
266 # POSIX requires at least 4096 bytes for ARG_MAX
267 self._argmax = 4096
267 self._argmax = 4096
268 try:
268 try:
269 self._argmax = os.sysconf("SC_ARG_MAX")
269 self._argmax = os.sysconf("SC_ARG_MAX")
270 except:
270 except:
271 pass
271 pass
272
272
273 # Windows shells impose their own limits on command line length,
273 # Windows shells impose their own limits on command line length,
274 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
274 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
275 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
275 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
276 # details about cmd.exe limitations.
276 # details about cmd.exe limitations.
277
277
278 # Since ARG_MAX is for command line _and_ environment, lower our limit
278 # Since ARG_MAX is for command line _and_ environment, lower our limit
279 # (and make happy Windows shells while doing this).
279 # (and make happy Windows shells while doing this).
280
280
281 self._argmax = self._argmax/2 - 1
281 self._argmax = self._argmax/2 - 1
282 return self._argmax
282 return self._argmax
283
283
284 def limit_arglist(self, arglist, cmd, *args, **kwargs):
284 def limit_arglist(self, arglist, cmd, *args, **kwargs):
285 limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
285 limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
286 bytes = 0
286 bytes = 0
287 fl = []
287 fl = []
288 for fn in arglist:
288 for fn in arglist:
289 b = len(fn) + 3
289 b = len(fn) + 3
290 if bytes + b < limit or len(fl) == 0:
290 if bytes + b < limit or len(fl) == 0:
291 fl.append(fn)
291 fl.append(fn)
292 bytes += b
292 bytes += b
293 else:
293 else:
294 yield fl
294 yield fl
295 fl = [fn]
295 fl = [fn]
296 bytes = b
296 bytes = b
297 if fl:
297 if fl:
298 yield fl
298 yield fl
299
299
300 def xargs(self, arglist, cmd, *args, **kwargs):
300 def xargs(self, arglist, cmd, *args, **kwargs):
301 for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
301 for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
302 self.run0(cmd, *(list(args) + l), **kwargs)
302 self.run0(cmd, *(list(args) + l), **kwargs)
303
303
304 class mapfile(dict):
304 class mapfile(dict):
305 def __init__(self, ui, path):
305 def __init__(self, ui, path):
306 super(mapfile, self).__init__()
306 super(mapfile, self).__init__()
307 self.ui = ui
307 self.ui = ui
308 self.path = path
308 self.path = path
309 self.fp = None
309 self.fp = None
310 self.order = []
310 self.order = []
311 self._read()
311 self._read()
312
312
313 def _read(self):
313 def _read(self):
314 if self.path is None:
314 if self.path is None:
315 return
315 return
316 try:
316 try:
317 fp = open(self.path, 'r')
317 fp = open(self.path, 'r')
318 except IOError, err:
318 except IOError, err:
319 if err.errno != errno.ENOENT:
319 if err.errno != errno.ENOENT:
320 raise
320 raise
321 return
321 return
322 for line in fp:
322 for line in fp:
323 key, value = line[:-1].split(' ', 1)
323 key, value = line[:-1].split(' ', 1)
324 if key not in self:
324 if key not in self:
325 self.order.append(key)
325 self.order.append(key)
326 super(mapfile, self).__setitem__(key, value)
326 super(mapfile, self).__setitem__(key, value)
327 fp.close()
327 fp.close()
328
328
329 def __setitem__(self, key, value):
329 def __setitem__(self, key, value):
330 if self.fp is None:
330 if self.fp is None:
331 try:
331 try:
332 self.fp = open(self.path, 'a')
332 self.fp = open(self.path, 'a')
333 except IOError, err:
333 except IOError, err:
334 raise util.Abort(_('could not open map file %r: %s') %
334 raise util.Abort(_('could not open map file %r: %s') %
335 (self.path, err.strerror))
335 (self.path, err.strerror))
336 self.fp.write('%s %s\n' % (key, value))
336 self.fp.write('%s %s\n' % (key, value))
337 self.fp.flush()
337 self.fp.flush()
338 super(mapfile, self).__setitem__(key, value)
338 super(mapfile, self).__setitem__(key, value)
339
339
340 def close(self):
340 def close(self):
341 if self.fp:
341 if self.fp:
342 self.fp.close()
342 self.fp.close()
343 self.fp = None
343 self.fp = None
@@ -1,143 +1,142 b''
1 # git support for the convert extension
1 # git support for the convert extension
2
2
3 import os
3 import os
4 from mercurial import util
4 from mercurial import util
5
5
6 from common import NoRepo, commit, converter_source, checktool
6 from common import NoRepo, commit, converter_source, checktool
7
7
8 class convert_git(converter_source):
8 class convert_git(converter_source):
9 # Windows does not support GIT_DIR= construct while other systems
9 # Windows does not support GIT_DIR= construct while other systems
10 # cannot remove environment variable. Just assume none have
10 # cannot remove environment variable. Just assume none have
11 # both issues.
11 # both issues.
12 if hasattr(os, 'unsetenv'):
12 if hasattr(os, 'unsetenv'):
13 def gitcmd(self, s):
13 def gitcmd(self, s):
14 prevgitdir = os.environ.get('GIT_DIR')
14 prevgitdir = os.environ.get('GIT_DIR')
15 os.environ['GIT_DIR'] = self.path
15 os.environ['GIT_DIR'] = self.path
16 try:
16 try:
17 return util.popen(s)
17 return util.popen(s)
18 finally:
18 finally:
19 if prevgitdir is None:
19 if prevgitdir is None:
20 del os.environ['GIT_DIR']
20 del os.environ['GIT_DIR']
21 else:
21 else:
22 os.environ['GIT_DIR'] = prevgitdir
22 os.environ['GIT_DIR'] = prevgitdir
23 else:
23 else:
24 def gitcmd(self, s):
24 def gitcmd(self, s):
25 return util.popen('GIT_DIR=%s %s' % (self.path, s))
25 return util.popen('GIT_DIR=%s %s' % (self.path, s))
26
26
27 def __init__(self, ui, path, rev=None):
27 def __init__(self, ui, path, rev=None):
28 super(convert_git, self).__init__(ui, path, rev=rev)
28 super(convert_git, self).__init__(ui, path, rev=rev)
29
29
30 if os.path.isdir(path + "/.git"):
30 if os.path.isdir(path + "/.git"):
31 path += "/.git"
31 path += "/.git"
32 if not os.path.exists(path + "/objects"):
32 if not os.path.exists(path + "/objects"):
33 raise NoRepo("%s does not look like a Git repo" % path)
33 raise NoRepo("%s does not look like a Git repo" % path)
34
34
35 checktool('git-rev-parse', 'git')
35 checktool('git-rev-parse', 'git')
36
36
37 self.path = path
37 self.path = path
38
38
39 def getheads(self):
39 def getheads(self):
40 if not self.rev:
40 if not self.rev:
41 return self.gitcmd('git-rev-parse --branches').read().splitlines()
41 return self.gitcmd('git-rev-parse --branches').read().splitlines()
42 else:
42 else:
43 fh = self.gitcmd("git-rev-parse --verify %s" % self.rev)
43 fh = self.gitcmd("git-rev-parse --verify %s" % self.rev)
44 return [fh.read()[:-1]]
44 return [fh.read()[:-1]]
45
45
46 def catfile(self, rev, type):
46 def catfile(self, rev, type):
47 if rev == "0" * 40: raise IOError()
47 if rev == "0" * 40: raise IOError()
48 fh = self.gitcmd("git-cat-file %s %s" % (type, rev))
48 fh = self.gitcmd("git-cat-file %s %s" % (type, rev))
49 return fh.read()
49 return fh.read()
50
50
51 def getfile(self, name, rev):
51 def getfile(self, name, rev):
52 return self.catfile(rev, "blob")
52 return self.catfile(rev, "blob")
53
53
54 def getmode(self, name, rev):
54 def getmode(self, name, rev):
55 return self.modecache[(name, rev)]
55 return self.modecache[(name, rev)]
56
56
57 def getchanges(self, version):
57 def getchanges(self, version):
58 self.modecache = {}
58 self.modecache = {}
59 fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
59 fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
60 changes = []
60 changes = []
61 seen = {}
61 seen = {}
62 for l in fh:
62 for l in fh:
63 if "\t" not in l:
63 if "\t" not in l:
64 continue
64 continue
65 m, f = l[:-1].split("\t")
65 m, f = l[:-1].split("\t")
66 if f in seen:
66 if f in seen:
67 continue
67 continue
68 seen[f] = 1
68 seen[f] = 1
69 m = m.split()
69 m = m.split()
70 h = m[3]
70 h = m[3]
71 p = (m[1] == "100755")
71 p = (m[1] == "100755")
72 s = (m[1] == "120000")
72 s = (m[1] == "120000")
73 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
73 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
74 changes.append((f, h))
74 changes.append((f, h))
75 return (changes, {})
75 return (changes, {})
76
76
77 def getcommit(self, version):
77 def getcommit(self, version):
78 c = self.catfile(version, "commit") # read the commit hash
78 c = self.catfile(version, "commit") # read the commit hash
79 end = c.find("\n\n")
79 end = c.find("\n\n")
80 message = c[end+2:]
80 message = c[end+2:]
81 message = self.recode(message)
81 message = self.recode(message)
82 l = c[:end].splitlines()
82 l = c[:end].splitlines()
83 manifest = l[0].split()[1]
83 manifest = l[0].split()[1]
84 parents = []
84 parents = []
85 for e in l[1:]:
85 for e in l[1:]:
86 n, v = e.split(" ", 1)
86 n, v = e.split(" ", 1)
87 if n == "author":
87 if n == "author":
88 p = v.split()
88 p = v.split()
89 tm, tz = p[-2:]
89 tm, tz = p[-2:]
90 author = " ".join(p[:-2])
90 author = " ".join(p[:-2])
91 if author[0] == "<": author = author[1:-1]
91 if author[0] == "<": author = author[1:-1]
92 author = self.recode(author)
92 author = self.recode(author)
93 if n == "committer":
93 if n == "committer":
94 p = v.split()
94 p = v.split()
95 tm, tz = p[-2:]
95 tm, tz = p[-2:]
96 committer = " ".join(p[:-2])
96 committer = " ".join(p[:-2])
97 if committer[0] == "<": committer = committer[1:-1]
97 if committer[0] == "<": committer = committer[1:-1]
98 committer = self.recode(committer)
98 committer = self.recode(committer)
99 message += "\ncommitter: %s\n" % committer
99 message += "\ncommitter: %s\n" % committer
100 if n == "parent": parents.append(v)
100 if n == "parent": parents.append(v)
101
101
102 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
102 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
103 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
103 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
104 date = tm + " " + str(tz)
104 date = tm + " " + str(tz)
105 author = author or "unknown"
106
105
107 c = commit(parents=parents, date=date, author=author, desc=message,
106 c = commit(parents=parents, date=date, author=author, desc=message,
108 rev=version)
107 rev=version)
109 return c
108 return c
110
109
111 def gettags(self):
110 def gettags(self):
112 tags = {}
111 tags = {}
113 fh = self.gitcmd('git-ls-remote --tags "%s"' % self.path)
112 fh = self.gitcmd('git-ls-remote --tags "%s"' % self.path)
114 prefix = 'refs/tags/'
113 prefix = 'refs/tags/'
115 for line in fh:
114 for line in fh:
116 line = line.strip()
115 line = line.strip()
117 if not line.endswith("^{}"):
116 if not line.endswith("^{}"):
118 continue
117 continue
119 node, tag = line.split(None, 1)
118 node, tag = line.split(None, 1)
120 if not tag.startswith(prefix):
119 if not tag.startswith(prefix):
121 continue
120 continue
122 tag = tag[len(prefix):-3]
121 tag = tag[len(prefix):-3]
123 tags[tag] = node
122 tags[tag] = node
124
123
125 return tags
124 return tags
126
125
127 def getchangedfiles(self, version, i):
126 def getchangedfiles(self, version, i):
128 changes = []
127 changes = []
129 if i is None:
128 if i is None:
130 fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
129 fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
131 for l in fh:
130 for l in fh:
132 if "\t" not in l:
131 if "\t" not in l:
133 continue
132 continue
134 m, f = l[:-1].split("\t")
133 m, f = l[:-1].split("\t")
135 changes.append(f)
134 changes.append(f)
136 fh.close()
135 fh.close()
137 else:
136 else:
138 fh = self.gitcmd('git-diff-tree --name-only --root -r %s "%s^%s" --'
137 fh = self.gitcmd('git-diff-tree --name-only --root -r %s "%s^%s" --'
139 % (version, version, i+1))
138 % (version, version, i+1))
140 changes = [f.rstrip('\n') for f in fh]
139 changes = [f.rstrip('\n') for f in fh]
141 fh.close()
140 fh.close()
142
141
143 return changes
142 return changes
@@ -1,2315 +1,2347 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 from mercurial import repair
34 from mercurial import repair
35 import os, sys, re, errno
35 import os, sys, re, errno
36
36
37 commands.norepo += " qclone"
37 commands.norepo += " qclone"
38
38
39 # Patch names looks like unix-file names.
39 # Patch names looks like unix-file names.
40 # They must be joinable with queue directory and result in the patch path.
40 # They must be joinable with queue directory and result in the patch path.
41 normname = util.normpath
41 normname = util.normpath
42
42
43 class statusentry:
43 class statusentry:
44 def __init__(self, rev, name=None):
44 def __init__(self, rev, name=None):
45 if not name:
45 if not name:
46 fields = rev.split(':', 1)
46 fields = rev.split(':', 1)
47 if len(fields) == 2:
47 if len(fields) == 2:
48 self.rev, self.name = fields
48 self.rev, self.name = fields
49 else:
49 else:
50 self.rev, self.name = None, None
50 self.rev, self.name = None, None
51 else:
51 else:
52 self.rev, self.name = rev, name
52 self.rev, self.name = rev, name
53
53
54 def __str__(self):
54 def __str__(self):
55 return self.rev + ':' + self.name
55 return self.rev + ':' + self.name
56
56
57 class queue:
57 class queue:
58 def __init__(self, ui, path, patchdir=None):
58 def __init__(self, ui, path, patchdir=None):
59 self.basepath = path
59 self.basepath = path
60 self.path = patchdir or os.path.join(path, "patches")
60 self.path = patchdir or os.path.join(path, "patches")
61 self.opener = util.opener(self.path)
61 self.opener = util.opener(self.path)
62 self.ui = ui
62 self.ui = ui
63 self.applied = []
63 self.applied = []
64 self.full_series = []
64 self.full_series = []
65 self.applied_dirty = 0
65 self.applied_dirty = 0
66 self.series_dirty = 0
66 self.series_dirty = 0
67 self.series_path = "series"
67 self.series_path = "series"
68 self.status_path = "status"
68 self.status_path = "status"
69 self.guards_path = "guards"
69 self.guards_path = "guards"
70 self.active_guards = None
70 self.active_guards = None
71 self.guards_dirty = False
71 self.guards_dirty = False
72 self._diffopts = None
72 self._diffopts = None
73
73
74 if os.path.exists(self.join(self.series_path)):
74 if os.path.exists(self.join(self.series_path)):
75 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.full_series = self.opener(self.series_path).read().splitlines()
76 self.parse_series()
76 self.parse_series()
77
77
78 if os.path.exists(self.join(self.status_path)):
78 if os.path.exists(self.join(self.status_path)):
79 lines = self.opener(self.status_path).read().splitlines()
79 lines = self.opener(self.status_path).read().splitlines()
80 self.applied = [statusentry(l) for l in lines]
80 self.applied = [statusentry(l) for l in lines]
81
81
82 def diffopts(self):
82 def diffopts(self):
83 if self._diffopts is None:
83 if self._diffopts is None:
84 self._diffopts = patch.diffopts(self.ui)
84 self._diffopts = patch.diffopts(self.ui)
85 return self._diffopts
85 return self._diffopts
86
86
87 def join(self, *p):
87 def join(self, *p):
88 return os.path.join(self.path, *p)
88 return os.path.join(self.path, *p)
89
89
90 def find_series(self, patch):
90 def find_series(self, patch):
91 pre = re.compile("(\s*)([^#]+)")
91 pre = re.compile("(\s*)([^#]+)")
92 index = 0
92 index = 0
93 for l in self.full_series:
93 for l in self.full_series:
94 m = pre.match(l)
94 m = pre.match(l)
95 if m:
95 if m:
96 s = m.group(2)
96 s = m.group(2)
97 s = s.rstrip()
97 s = s.rstrip()
98 if s == patch:
98 if s == patch:
99 return index
99 return index
100 index += 1
100 index += 1
101 return None
101 return None
102
102
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104
104
105 def parse_series(self):
105 def parse_series(self):
106 self.series = []
106 self.series = []
107 self.series_guards = []
107 self.series_guards = []
108 for l in self.full_series:
108 for l in self.full_series:
109 h = l.find('#')
109 h = l.find('#')
110 if h == -1:
110 if h == -1:
111 patch = l
111 patch = l
112 comment = ''
112 comment = ''
113 elif h == 0:
113 elif h == 0:
114 continue
114 continue
115 else:
115 else:
116 patch = l[:h]
116 patch = l[:h]
117 comment = l[h:]
117 comment = l[h:]
118 patch = patch.strip()
118 patch = patch.strip()
119 if patch:
119 if patch:
120 if patch in self.series:
120 if patch in self.series:
121 raise util.Abort(_('%s appears more than once in %s') %
121 raise util.Abort(_('%s appears more than once in %s') %
122 (patch, self.join(self.series_path)))
122 (patch, self.join(self.series_path)))
123 self.series.append(patch)
123 self.series.append(patch)
124 self.series_guards.append(self.guard_re.findall(comment))
124 self.series_guards.append(self.guard_re.findall(comment))
125
125
126 def check_guard(self, guard):
126 def check_guard(self, guard):
127 bad_chars = '# \t\r\n\f'
127 bad_chars = '# \t\r\n\f'
128 first = guard[0]
128 first = guard[0]
129 for c in '-+':
129 for c in '-+':
130 if first == c:
130 if first == c:
131 return (_('guard %r starts with invalid character: %r') %
131 return (_('guard %r starts with invalid character: %r') %
132 (guard, c))
132 (guard, c))
133 for c in bad_chars:
133 for c in bad_chars:
134 if c in guard:
134 if c in guard:
135 return _('invalid character in guard %r: %r') % (guard, c)
135 return _('invalid character in guard %r: %r') % (guard, c)
136
136
137 def set_active(self, guards):
137 def set_active(self, guards):
138 for guard in guards:
138 for guard in guards:
139 bad = self.check_guard(guard)
139 bad = self.check_guard(guard)
140 if bad:
140 if bad:
141 raise util.Abort(bad)
141 raise util.Abort(bad)
142 guards = dict.fromkeys(guards).keys()
142 guards = dict.fromkeys(guards).keys()
143 guards.sort()
143 guards.sort()
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 self.active_guards = guards
145 self.active_guards = guards
146 self.guards_dirty = True
146 self.guards_dirty = True
147
147
148 def active(self):
148 def active(self):
149 if self.active_guards is None:
149 if self.active_guards is None:
150 self.active_guards = []
150 self.active_guards = []
151 try:
151 try:
152 guards = self.opener(self.guards_path).read().split()
152 guards = self.opener(self.guards_path).read().split()
153 except IOError, err:
153 except IOError, err:
154 if err.errno != errno.ENOENT: raise
154 if err.errno != errno.ENOENT: raise
155 guards = []
155 guards = []
156 for i, guard in enumerate(guards):
156 for i, guard in enumerate(guards):
157 bad = self.check_guard(guard)
157 bad = self.check_guard(guard)
158 if bad:
158 if bad:
159 self.ui.warn('%s:%d: %s\n' %
159 self.ui.warn('%s:%d: %s\n' %
160 (self.join(self.guards_path), i + 1, bad))
160 (self.join(self.guards_path), i + 1, bad))
161 else:
161 else:
162 self.active_guards.append(guard)
162 self.active_guards.append(guard)
163 return self.active_guards
163 return self.active_guards
164
164
165 def set_guards(self, idx, guards):
165 def set_guards(self, idx, guards):
166 for g in guards:
166 for g in guards:
167 if len(g) < 2:
167 if len(g) < 2:
168 raise util.Abort(_('guard %r too short') % g)
168 raise util.Abort(_('guard %r too short') % g)
169 if g[0] not in '-+':
169 if g[0] not in '-+':
170 raise util.Abort(_('guard %r starts with invalid char') % g)
170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 bad = self.check_guard(g[1:])
171 bad = self.check_guard(g[1:])
172 if bad:
172 if bad:
173 raise util.Abort(bad)
173 raise util.Abort(bad)
174 drop = self.guard_re.sub('', self.full_series[idx])
174 drop = self.guard_re.sub('', self.full_series[idx])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 self.parse_series()
176 self.parse_series()
177 self.series_dirty = True
177 self.series_dirty = True
178
178
179 def pushable(self, idx):
179 def pushable(self, idx):
180 if isinstance(idx, str):
180 if isinstance(idx, str):
181 idx = self.series.index(idx)
181 idx = self.series.index(idx)
182 patchguards = self.series_guards[idx]
182 patchguards = self.series_guards[idx]
183 if not patchguards:
183 if not patchguards:
184 return True, None
184 return True, None
185 default = False
185 default = False
186 guards = self.active()
186 guards = self.active()
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 if exactneg:
188 if exactneg:
189 return False, exactneg[0]
189 return False, exactneg[0]
190 pos = [g for g in patchguards if g[0] == '+']
190 pos = [g for g in patchguards if g[0] == '+']
191 exactpos = [g for g in pos if g[1:] in guards]
191 exactpos = [g for g in pos if g[1:] in guards]
192 if pos:
192 if pos:
193 if exactpos:
193 if exactpos:
194 return True, exactpos[0]
194 return True, exactpos[0]
195 return False, pos
195 return False, pos
196 return True, ''
196 return True, ''
197
197
198 def explain_pushable(self, idx, all_patches=False):
198 def explain_pushable(self, idx, all_patches=False):
199 write = all_patches and self.ui.write or self.ui.warn
199 write = all_patches and self.ui.write or self.ui.warn
200 if all_patches or self.ui.verbose:
200 if all_patches or self.ui.verbose:
201 if isinstance(idx, str):
201 if isinstance(idx, str):
202 idx = self.series.index(idx)
202 idx = self.series.index(idx)
203 pushable, why = self.pushable(idx)
203 pushable, why = self.pushable(idx)
204 if all_patches and pushable:
204 if all_patches and pushable:
205 if why is None:
205 if why is None:
206 write(_('allowing %s - no guards in effect\n') %
206 write(_('allowing %s - no guards in effect\n') %
207 self.series[idx])
207 self.series[idx])
208 else:
208 else:
209 if not why:
209 if not why:
210 write(_('allowing %s - no matching negative guards\n') %
210 write(_('allowing %s - no matching negative guards\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 write(_('allowing %s - guarded by %r\n') %
213 write(_('allowing %s - guarded by %r\n') %
214 (self.series[idx], why))
214 (self.series[idx], why))
215 if not pushable:
215 if not pushable:
216 if why:
216 if why:
217 write(_('skipping %s - guarded by %r\n') %
217 write(_('skipping %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 else:
219 else:
220 write(_('skipping %s - no matching guards\n') %
220 write(_('skipping %s - no matching guards\n') %
221 self.series[idx])
221 self.series[idx])
222
222
223 def save_dirty(self):
223 def save_dirty(self):
224 def write_list(items, path):
224 def write_list(items, path):
225 fp = self.opener(path, 'w')
225 fp = self.opener(path, 'w')
226 for i in items:
226 for i in items:
227 fp.write("%s\n" % i)
227 fp.write("%s\n" % i)
228 fp.close()
228 fp.close()
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232
232
233 def readheaders(self, patch):
233 def readheaders(self, patch):
234 def eatdiff(lines):
234 def eatdiff(lines):
235 while lines:
235 while lines:
236 l = lines[-1]
236 l = lines[-1]
237 if (l.startswith("diff -") or
237 if (l.startswith("diff -") or
238 l.startswith("Index:") or
238 l.startswith("Index:") or
239 l.startswith("===========")):
239 l.startswith("===========")):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243 def eatempty(lines):
243 def eatempty(lines):
244 while lines:
244 while lines:
245 l = lines[-1]
245 l = lines[-1]
246 if re.match('\s*$', l):
246 if re.match('\s*$', l):
247 del lines[-1]
247 del lines[-1]
248 else:
248 else:
249 break
249 break
250
250
251 pf = self.join(patch)
251 pf = self.join(patch)
252 message = []
252 message = []
253 comments = []
253 comments = []
254 user = None
254 user = None
255 date = None
255 date = None
256 format = None
256 format = None
257 subject = None
257 subject = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if line.startswith('diff --git'):
262 if line.startswith('diff --git'):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 if diffstart:
265 if diffstart:
266 if line.startswith('+++ '):
266 if line.startswith('+++ '):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if line.startswith("--- "):
269 if line.startswith("--- "):
270 diffstart = 1
270 diffstart = 1
271 continue
271 continue
272 elif format == "hgpatch":
272 elif format == "hgpatch":
273 # parse values when importing the result of an hg export
273 # parse values when importing the result of an hg export
274 if line.startswith("# User "):
274 if line.startswith("# User "):
275 user = line[7:]
275 user = line[7:]
276 elif line.startswith("# Date "):
276 elif line.startswith("# Date "):
277 date = line[7:]
277 date = line[7:]
278 elif not line.startswith("# ") and line:
278 elif not line.startswith("# ") and line:
279 message.append(line)
279 message.append(line)
280 format = None
280 format = None
281 elif line == '# HG changeset patch':
281 elif line == '# HG changeset patch':
282 format = "hgpatch"
282 format = "hgpatch"
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 line.startswith("subject: "))):
284 line.startswith("subject: "))):
285 subject = line[9:]
285 subject = line[9:]
286 format = "tag"
286 format = "tag"
287 elif (format != "tagdone" and (line.startswith("From: ") or
287 elif (format != "tagdone" and (line.startswith("From: ") or
288 line.startswith("from: "))):
288 line.startswith("from: "))):
289 user = line[6:]
289 user = line[6:]
290 format = "tag"
290 format = "tag"
291 elif format == "tag" and line == "":
291 elif format == "tag" and line == "":
292 # when looking for tags (subject: from: etc) they
292 # when looking for tags (subject: from: etc) they
293 # end once you find a blank line in the source
293 # end once you find a blank line in the source
294 format = "tagdone"
294 format = "tagdone"
295 elif message or line:
295 elif message or line:
296 message.append(line)
296 message.append(line)
297 comments.append(line)
297 comments.append(line)
298
298
299 eatdiff(message)
299 eatdiff(message)
300 eatdiff(comments)
300 eatdiff(comments)
301 eatempty(message)
301 eatempty(message)
302 eatempty(comments)
302 eatempty(comments)
303
303
304 # make sure message isn't empty
304 # make sure message isn't empty
305 if format and format.startswith("tag") and subject:
305 if format and format.startswith("tag") and subject:
306 message.insert(0, "")
306 message.insert(0, "")
307 message.insert(0, subject)
307 message.insert(0, subject)
308 return (message, comments, user, date, diffstart > 1)
308 return (message, comments, user, date, diffstart > 1)
309
309
310 def removeundo(self, repo):
310 def removeundo(self, repo):
311 undo = repo.sjoin('undo')
311 undo = repo.sjoin('undo')
312 if not os.path.exists(undo):
312 if not os.path.exists(undo):
313 return
313 return
314 try:
314 try:
315 os.unlink(undo)
315 os.unlink(undo)
316 except OSError, inst:
316 except OSError, inst:
317 self.ui.warn('error removing undo: %s\n' % str(inst))
317 self.ui.warn('error removing undo: %s\n' % str(inst))
318
318
319 def printdiff(self, repo, node1, node2=None, files=None,
319 def printdiff(self, repo, node1, node2=None, files=None,
320 fp=None, changes=None, opts={}):
320 fp=None, changes=None, opts={}):
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322
322
323 patch.diff(repo, node1, node2, fns, match=matchfn,
323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 fp=fp, changes=changes, opts=self.diffopts())
324 fp=fp, changes=changes, opts=self.diffopts())
325
325
326 def mergeone(self, repo, mergeq, head, patch, rev):
326 def mergeone(self, repo, mergeq, head, patch, rev):
327 # first try just applying the patch
327 # first try just applying the patch
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev)
329 strict=True, merge=rev)
330
330
331 if err == 0:
331 if err == 0:
332 return (err, n)
332 return (err, n)
333
333
334 if n is None:
334 if n is None:
335 raise util.Abort(_("apply failed for patch %s") % patch)
335 raise util.Abort(_("apply failed for patch %s") % patch)
336
336
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338
338
339 # apply failed, strip away that rev and merge.
339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head)
340 hg.clean(repo, head)
341 self.strip(repo, n, update=False, backup='strip')
341 self.strip(repo, n, update=False, backup='strip')
342
342
343 ctx = repo.changectx(rev)
343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev)
344 ret = hg.merge(repo, rev)
345 if ret:
345 if ret:
346 raise util.Abort(_("update returned %d") % ret)
346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 if n == None:
348 if n == None:
349 raise util.Abort(_("repo commit failed"))
349 raise util.Abort(_("repo commit failed"))
350 try:
350 try:
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 except:
352 except:
353 raise util.Abort(_("unable to read %s") % patch)
353 raise util.Abort(_("unable to read %s") % patch)
354
354
355 patchf = self.opener(patch, "w")
355 patchf = self.opener(patch, "w")
356 if comments:
356 if comments:
357 comments = "\n".join(comments) + '\n\n'
357 comments = "\n".join(comments) + '\n\n'
358 patchf.write(comments)
358 patchf.write(comments)
359 self.printdiff(repo, head, n, fp=patchf)
359 self.printdiff(repo, head, n, fp=patchf)
360 patchf.close()
360 patchf.close()
361 self.removeundo(repo)
361 self.removeundo(repo)
362 return (0, n)
362 return (0, n)
363
363
364 def qparents(self, repo, rev=None):
364 def qparents(self, repo, rev=None):
365 if rev is None:
365 if rev is None:
366 (p1, p2) = repo.dirstate.parents()
366 (p1, p2) = repo.dirstate.parents()
367 if p2 == revlog.nullid:
367 if p2 == revlog.nullid:
368 return p1
368 return p1
369 if len(self.applied) == 0:
369 if len(self.applied) == 0:
370 return None
370 return None
371 return revlog.bin(self.applied[-1].rev)
371 return revlog.bin(self.applied[-1].rev)
372 pp = repo.changelog.parents(rev)
372 pp = repo.changelog.parents(rev)
373 if pp[1] != revlog.nullid:
373 if pp[1] != revlog.nullid:
374 arevs = [ x.rev for x in self.applied ]
374 arevs = [ x.rev for x in self.applied ]
375 p0 = revlog.hex(pp[0])
375 p0 = revlog.hex(pp[0])
376 p1 = revlog.hex(pp[1])
376 p1 = revlog.hex(pp[1])
377 if p0 in arevs:
377 if p0 in arevs:
378 return pp[0]
378 return pp[0]
379 if p1 in arevs:
379 if p1 in arevs:
380 return pp[1]
380 return pp[1]
381 return pp[0]
381 return pp[0]
382
382
383 def mergepatch(self, repo, mergeq, series):
383 def mergepatch(self, repo, mergeq, series):
384 if len(self.applied) == 0:
384 if len(self.applied) == 0:
385 # each of the patches merged in will have two parents. This
385 # each of the patches merged in will have two parents. This
386 # can confuse the qrefresh, qdiff, and strip code because it
386 # can confuse the qrefresh, qdiff, and strip code because it
387 # needs to know which parent is actually in the patch queue.
387 # needs to know which parent is actually in the patch queue.
388 # so, we insert a merge marker with only one parent. This way
388 # so, we insert a merge marker with only one parent. This way
389 # the first patch in the queue is never a merge patch
389 # the first patch in the queue is never a merge patch
390 #
390 #
391 pname = ".hg.patches.merge.marker"
391 pname = ".hg.patches.merge.marker"
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 self.removeundo(repo)
393 self.removeundo(repo)
394 self.applied.append(statusentry(revlog.hex(n), pname))
394 self.applied.append(statusentry(revlog.hex(n), pname))
395 self.applied_dirty = 1
395 self.applied_dirty = 1
396
396
397 head = self.qparents(repo)
397 head = self.qparents(repo)
398
398
399 for patch in series:
399 for patch in series:
400 patch = mergeq.lookup(patch, strict=True)
400 patch = mergeq.lookup(patch, strict=True)
401 if not patch:
401 if not patch:
402 self.ui.warn("patch %s does not exist\n" % patch)
402 self.ui.warn("patch %s does not exist\n" % patch)
403 return (1, None)
403 return (1, None)
404 pushable, reason = self.pushable(patch)
404 pushable, reason = self.pushable(patch)
405 if not pushable:
405 if not pushable:
406 self.explain_pushable(patch, all_patches=True)
406 self.explain_pushable(patch, all_patches=True)
407 continue
407 continue
408 info = mergeq.isapplied(patch)
408 info = mergeq.isapplied(patch)
409 if not info:
409 if not info:
410 self.ui.warn("patch %s is not applied\n" % patch)
410 self.ui.warn("patch %s is not applied\n" % patch)
411 return (1, None)
411 return (1, None)
412 rev = revlog.bin(info[1])
412 rev = revlog.bin(info[1])
413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 if head:
414 if head:
415 self.applied.append(statusentry(revlog.hex(head), patch))
415 self.applied.append(statusentry(revlog.hex(head), patch))
416 self.applied_dirty = 1
416 self.applied_dirty = 1
417 if err:
417 if err:
418 return (err, head)
418 return (err, head)
419 self.save_dirty()
419 self.save_dirty()
420 return (0, head)
420 return (0, head)
421
421
422 def patch(self, repo, patchfile):
422 def patch(self, repo, patchfile):
423 '''Apply patchfile to the working directory.
423 '''Apply patchfile to the working directory.
424 patchfile: file name of patch'''
424 patchfile: file name of patch'''
425 files = {}
425 files = {}
426 try:
426 try:
427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 files=files)
428 files=files)
429 except Exception, inst:
429 except Exception, inst:
430 self.ui.note(str(inst) + '\n')
430 self.ui.note(str(inst) + '\n')
431 if not self.ui.verbose:
431 if not self.ui.verbose:
432 self.ui.warn("patch failed, unable to continue (try -v)\n")
432 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 return (False, files, False)
433 return (False, files, False)
434
434
435 return (True, files, fuzz)
435 return (True, files, fuzz)
436
436
437 def apply(self, repo, series, list=False, update_status=True,
437 def apply(self, repo, series, list=False, update_status=True,
438 strict=False, patchdir=None, merge=None, all_files={}):
438 strict=False, patchdir=None, merge=None, all_files={}):
439 wlock = lock = tr = None
439 wlock = lock = tr = None
440 try:
440 try:
441 wlock = repo.wlock()
441 wlock = repo.wlock()
442 lock = repo.lock()
442 lock = repo.lock()
443 tr = repo.transaction()
443 tr = repo.transaction()
444 try:
444 try:
445 ret = self._apply(repo, series, list, update_status,
445 ret = self._apply(repo, series, list, update_status,
446 strict, patchdir, merge, all_files=all_files)
446 strict, patchdir, merge, all_files=all_files)
447 tr.close()
447 tr.close()
448 self.save_dirty()
448 self.save_dirty()
449 return ret
449 return ret
450 except:
450 except:
451 try:
451 try:
452 tr.abort()
452 tr.abort()
453 finally:
453 finally:
454 repo.invalidate()
454 repo.invalidate()
455 repo.dirstate.invalidate()
455 repo.dirstate.invalidate()
456 raise
456 raise
457 finally:
457 finally:
458 del tr, lock, wlock
458 del tr, lock, wlock
459 self.removeundo(repo)
459 self.removeundo(repo)
460
460
461 def _apply(self, repo, series, list=False, update_status=True,
461 def _apply(self, repo, series, list=False, update_status=True,
462 strict=False, patchdir=None, merge=None, all_files={}):
462 strict=False, patchdir=None, merge=None, all_files={}):
463 # TODO unify with commands.py
463 # TODO unify with commands.py
464 if not patchdir:
464 if not patchdir:
465 patchdir = self.path
465 patchdir = self.path
466 err = 0
466 err = 0
467 n = None
467 n = None
468 for patchname in series:
468 for patchname in series:
469 pushable, reason = self.pushable(patchname)
469 pushable, reason = self.pushable(patchname)
470 if not pushable:
470 if not pushable:
471 self.explain_pushable(patchname, all_patches=True)
471 self.explain_pushable(patchname, all_patches=True)
472 continue
472 continue
473 self.ui.warn("applying %s\n" % patchname)
473 self.ui.warn("applying %s\n" % patchname)
474 pf = os.path.join(patchdir, patchname)
474 pf = os.path.join(patchdir, patchname)
475
475
476 try:
476 try:
477 message, comments, user, date, patchfound = self.readheaders(patchname)
477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 except:
478 except:
479 self.ui.warn("Unable to read %s\n" % patchname)
479 self.ui.warn("Unable to read %s\n" % patchname)
480 err = 1
480 err = 1
481 break
481 break
482
482
483 if not message:
483 if not message:
484 message = "imported patch %s\n" % patchname
484 message = "imported patch %s\n" % patchname
485 else:
485 else:
486 if list:
486 if list:
487 message.append("\nimported patch %s" % patchname)
487 message.append("\nimported patch %s" % patchname)
488 message = '\n'.join(message)
488 message = '\n'.join(message)
489
489
490 (patcherr, files, fuzz) = self.patch(repo, pf)
490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 all_files.update(files)
491 all_files.update(files)
492 patcherr = not patcherr
492 patcherr = not patcherr
493
493
494 if merge and files:
494 if merge and files:
495 # Mark as removed/merged and update dirstate parent info
495 # Mark as removed/merged and update dirstate parent info
496 removed = []
496 removed = []
497 merged = []
497 merged = []
498 for f in files:
498 for f in files:
499 if os.path.exists(repo.wjoin(f)):
499 if os.path.exists(repo.wjoin(f)):
500 merged.append(f)
500 merged.append(f)
501 else:
501 else:
502 removed.append(f)
502 removed.append(f)
503 for f in removed:
503 for f in removed:
504 repo.dirstate.remove(f)
504 repo.dirstate.remove(f)
505 for f in merged:
505 for f in merged:
506 repo.dirstate.merge(f)
506 repo.dirstate.merge(f)
507 p1, p2 = repo.dirstate.parents()
507 p1, p2 = repo.dirstate.parents()
508 repo.dirstate.setparents(p1, merge)
508 repo.dirstate.setparents(p1, merge)
509 files = patch.updatedir(self.ui, repo, files)
509 files = patch.updatedir(self.ui, repo, files)
510 n = repo.commit(files, message, user, date, force=1)
510 n = repo.commit(files, message, user, date, force=1)
511
511
512 if n == None:
512 if n == None:
513 raise util.Abort(_("repo commit failed"))
513 raise util.Abort(_("repo commit failed"))
514
514
515 if update_status:
515 if update_status:
516 self.applied.append(statusentry(revlog.hex(n), patchname))
516 self.applied.append(statusentry(revlog.hex(n), patchname))
517
517
518 if patcherr:
518 if patcherr:
519 if not patchfound:
519 if not patchfound:
520 self.ui.warn("patch %s is empty\n" % patchname)
520 self.ui.warn("patch %s is empty\n" % patchname)
521 err = 0
521 err = 0
522 else:
522 else:
523 self.ui.warn("patch failed, rejects left in working dir\n")
523 self.ui.warn("patch failed, rejects left in working dir\n")
524 err = 1
524 err = 1
525 break
525 break
526
526
527 if fuzz and strict:
527 if fuzz and strict:
528 self.ui.warn("fuzz found when applying patch, stopping\n")
528 self.ui.warn("fuzz found when applying patch, stopping\n")
529 err = 1
529 err = 1
530 break
530 break
531 return (err, n)
531 return (err, n)
532
532
533 def delete(self, repo, patches, opts):
533 def delete(self, repo, patches, opts):
534 if not patches and not opts.get('rev'):
534 if not patches and not opts.get('rev'):
535 raise util.Abort(_('qdelete requires at least one revision or '
535 raise util.Abort(_('qdelete requires at least one revision or '
536 'patch name'))
536 'patch name'))
537
537
538 realpatches = []
538 realpatches = []
539 for patch in patches:
539 for patch in patches:
540 patch = self.lookup(patch, strict=True)
540 patch = self.lookup(patch, strict=True)
541 info = self.isapplied(patch)
541 info = self.isapplied(patch)
542 if info:
542 if info:
543 raise util.Abort(_("cannot delete applied patch %s") % patch)
543 raise util.Abort(_("cannot delete applied patch %s") % patch)
544 if patch not in self.series:
544 if patch not in self.series:
545 raise util.Abort(_("patch %s not in series file") % patch)
545 raise util.Abort(_("patch %s not in series file") % patch)
546 realpatches.append(patch)
546 realpatches.append(patch)
547
547
548 appliedbase = 0
548 appliedbase = 0
549 if opts.get('rev'):
549 if opts.get('rev'):
550 if not self.applied:
550 if not self.applied:
551 raise util.Abort(_('no patches applied'))
551 raise util.Abort(_('no patches applied'))
552 revs = cmdutil.revrange(repo, opts['rev'])
552 revs = cmdutil.revrange(repo, opts['rev'])
553 if len(revs) > 1 and revs[0] > revs[1]:
553 if len(revs) > 1 and revs[0] > revs[1]:
554 revs.reverse()
554 revs.reverse()
555 for rev in revs:
555 for rev in revs:
556 if appliedbase >= len(self.applied):
556 if appliedbase >= len(self.applied):
557 raise util.Abort(_("revision %d is not managed") % rev)
557 raise util.Abort(_("revision %d is not managed") % rev)
558
558
559 base = revlog.bin(self.applied[appliedbase].rev)
559 base = revlog.bin(self.applied[appliedbase].rev)
560 node = repo.changelog.node(rev)
560 node = repo.changelog.node(rev)
561 if node != base:
561 if node != base:
562 raise util.Abort(_("cannot delete revision %d above "
562 raise util.Abort(_("cannot delete revision %d above "
563 "applied patches") % rev)
563 "applied patches") % rev)
564 realpatches.append(self.applied[appliedbase].name)
564 realpatches.append(self.applied[appliedbase].name)
565 appliedbase += 1
565 appliedbase += 1
566
566
567 if not opts.get('keep'):
567 if not opts.get('keep'):
568 r = self.qrepo()
568 r = self.qrepo()
569 if r:
569 if r:
570 r.remove(realpatches, True)
570 r.remove(realpatches, True)
571 else:
571 else:
572 for p in realpatches:
572 for p in realpatches:
573 os.unlink(self.join(p))
573 os.unlink(self.join(p))
574
574
575 if appliedbase:
575 if appliedbase:
576 del self.applied[:appliedbase]
576 del self.applied[:appliedbase]
577 self.applied_dirty = 1
577 self.applied_dirty = 1
578 indices = [self.find_series(p) for p in realpatches]
578 indices = [self.find_series(p) for p in realpatches]
579 indices.sort()
579 indices.sort()
580 for i in indices[-1::-1]:
580 for i in indices[-1::-1]:
581 del self.full_series[i]
581 del self.full_series[i]
582 self.parse_series()
582 self.parse_series()
583 self.series_dirty = 1
583 self.series_dirty = 1
584
584
585 def check_toppatch(self, repo):
585 def check_toppatch(self, repo):
586 if len(self.applied) > 0:
586 if len(self.applied) > 0:
587 top = revlog.bin(self.applied[-1].rev)
587 top = revlog.bin(self.applied[-1].rev)
588 pp = repo.dirstate.parents()
588 pp = repo.dirstate.parents()
589 if top not in pp:
589 if top not in pp:
590 raise util.Abort(_("working directory revision is not qtip"))
590 raise util.Abort(_("working directory revision is not qtip"))
591 return top
591 return top
592 return None
592 return None
593 def check_localchanges(self, repo, force=False, refresh=True):
593 def check_localchanges(self, repo, force=False, refresh=True):
594 m, a, r, d = repo.status()[:4]
594 m, a, r, d = repo.status()[:4]
595 if m or a or r or d:
595 if m or a or r or d:
596 if not force:
596 if not force:
597 if refresh:
597 if refresh:
598 raise util.Abort(_("local changes found, refresh first"))
598 raise util.Abort(_("local changes found, refresh first"))
599 else:
599 else:
600 raise util.Abort(_("local changes found"))
600 raise util.Abort(_("local changes found"))
601 return m, a, r, d
601 return m, a, r, d
602
602
603 _reserved = ('series', 'status', 'guards')
604 def check_reserved_name(self, name):
605 if (name in self._reserved or name.startswith('.hg')
606 or name.startswith('.mq')):
607 raise util.Abort(_('"%s" cannot be used as the name of a patch')
608 % name)
609
603 def new(self, repo, patch, *pats, **opts):
610 def new(self, repo, patch, *pats, **opts):
604 msg = opts.get('msg')
611 msg = opts.get('msg')
605 force = opts.get('force')
612 force = opts.get('force')
606 user = opts.get('user')
613 user = opts.get('user')
607 date = opts.get('date')
614 date = opts.get('date')
615 self.check_reserved_name(patch)
608 if os.path.exists(self.join(patch)):
616 if os.path.exists(self.join(patch)):
609 raise util.Abort(_('patch "%s" already exists') % patch)
617 raise util.Abort(_('patch "%s" already exists') % patch)
610 if opts.get('include') or opts.get('exclude') or pats:
618 if opts.get('include') or opts.get('exclude') or pats:
611 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
619 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
612 m, a, r, d = repo.status(files=fns, match=match)[:4]
620 m, a, r, d = repo.status(files=fns, match=match)[:4]
613 else:
621 else:
614 m, a, r, d = self.check_localchanges(repo, force)
622 m, a, r, d = self.check_localchanges(repo, force)
615 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
623 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
616 commitfiles = m + a + r
624 commitfiles = m + a + r
617 self.check_toppatch(repo)
625 self.check_toppatch(repo)
618 wlock = repo.wlock()
626 wlock = repo.wlock()
619 try:
627 try:
620 insert = self.full_series_end()
628 insert = self.full_series_end()
621 commitmsg = msg and msg or ("[mq]: %s" % patch)
629 commitmsg = msg and msg or ("[mq]: %s" % patch)
622 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
630 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
623 if n == None:
631 if n == None:
624 raise util.Abort(_("repo commit failed"))
632 raise util.Abort(_("repo commit failed"))
625 self.full_series[insert:insert] = [patch]
633 self.full_series[insert:insert] = [patch]
626 self.applied.append(statusentry(revlog.hex(n), patch))
634 self.applied.append(statusentry(revlog.hex(n), patch))
627 self.parse_series()
635 self.parse_series()
628 self.series_dirty = 1
636 self.series_dirty = 1
629 self.applied_dirty = 1
637 self.applied_dirty = 1
630 p = self.opener(patch, "w")
638 p = self.opener(patch, "w")
631 if date:
639 if date:
632 p.write("# HG changeset patch\n")
640 p.write("# HG changeset patch\n")
633 if user:
641 if user:
634 p.write("# User " + user + "\n")
642 p.write("# User " + user + "\n")
635 p.write("# Date " + date + "\n")
643 p.write("# Date " + date + "\n")
636 p.write("\n")
644 p.write("\n")
637 elif user:
645 elif user:
638 p.write("From: " + user + "\n")
646 p.write("From: " + user + "\n")
639 p.write("\n")
647 p.write("\n")
640 if msg:
648 if msg:
641 msg = msg + "\n"
649 msg = msg + "\n"
642 p.write(msg)
650 p.write(msg)
643 p.close()
651 p.close()
644 wlock = None
652 wlock = None
645 r = self.qrepo()
653 r = self.qrepo()
646 if r: r.add([patch])
654 if r: r.add([patch])
647 if commitfiles:
655 if commitfiles:
648 self.refresh(repo, short=True, git=opts.get('git'))
656 self.refresh(repo, short=True, git=opts.get('git'))
649 self.removeundo(repo)
657 self.removeundo(repo)
650 finally:
658 finally:
651 del wlock
659 del wlock
652
660
653 def strip(self, repo, rev, update=True, backup="all"):
661 def strip(self, repo, rev, update=True, backup="all"):
654 wlock = lock = None
662 wlock = lock = None
655 try:
663 try:
656 wlock = repo.wlock()
664 wlock = repo.wlock()
657 lock = repo.lock()
665 lock = repo.lock()
658
666
659 if update:
667 if update:
660 self.check_localchanges(repo, refresh=False)
668 self.check_localchanges(repo, refresh=False)
661 urev = self.qparents(repo, rev)
669 urev = self.qparents(repo, rev)
662 hg.clean(repo, urev)
670 hg.clean(repo, urev)
663 repo.dirstate.write()
671 repo.dirstate.write()
664
672
665 self.removeundo(repo)
673 self.removeundo(repo)
666 repair.strip(self.ui, repo, rev, backup)
674 repair.strip(self.ui, repo, rev, backup)
667 # strip may have unbundled a set of backed up revisions after
675 # strip may have unbundled a set of backed up revisions after
668 # the actual strip
676 # the actual strip
669 self.removeundo(repo)
677 self.removeundo(repo)
670 finally:
678 finally:
671 del lock, wlock
679 del lock, wlock
672
680
673 def isapplied(self, patch):
681 def isapplied(self, patch):
674 """returns (index, rev, patch)"""
682 """returns (index, rev, patch)"""
675 for i in xrange(len(self.applied)):
683 for i in xrange(len(self.applied)):
676 a = self.applied[i]
684 a = self.applied[i]
677 if a.name == patch:
685 if a.name == patch:
678 return (i, a.rev, a.name)
686 return (i, a.rev, a.name)
679 return None
687 return None
680
688
681 # if the exact patch name does not exist, we try a few
689 # if the exact patch name does not exist, we try a few
682 # variations. If strict is passed, we try only #1
690 # variations. If strict is passed, we try only #1
683 #
691 #
684 # 1) a number to indicate an offset in the series file
692 # 1) a number to indicate an offset in the series file
685 # 2) a unique substring of the patch name was given
693 # 2) a unique substring of the patch name was given
686 # 3) patchname[-+]num to indicate an offset in the series file
694 # 3) patchname[-+]num to indicate an offset in the series file
687 def lookup(self, patch, strict=False):
695 def lookup(self, patch, strict=False):
688 patch = patch and str(patch)
696 patch = patch and str(patch)
689
697
690 def partial_name(s):
698 def partial_name(s):
691 if s in self.series:
699 if s in self.series:
692 return s
700 return s
693 matches = [x for x in self.series if s in x]
701 matches = [x for x in self.series if s in x]
694 if len(matches) > 1:
702 if len(matches) > 1:
695 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
703 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
696 for m in matches:
704 for m in matches:
697 self.ui.warn(' %s\n' % m)
705 self.ui.warn(' %s\n' % m)
698 return None
706 return None
699 if matches:
707 if matches:
700 return matches[0]
708 return matches[0]
701 if len(self.series) > 0 and len(self.applied) > 0:
709 if len(self.series) > 0 and len(self.applied) > 0:
702 if s == 'qtip':
710 if s == 'qtip':
703 return self.series[self.series_end(True)-1]
711 return self.series[self.series_end(True)-1]
704 if s == 'qbase':
712 if s == 'qbase':
705 return self.series[0]
713 return self.series[0]
706 return None
714 return None
707 if patch == None:
715 if patch == None:
708 return None
716 return None
709
717
710 # we don't want to return a partial match until we make
718 # we don't want to return a partial match until we make
711 # sure the file name passed in does not exist (checked below)
719 # sure the file name passed in does not exist (checked below)
712 res = partial_name(patch)
720 res = partial_name(patch)
713 if res and res == patch:
721 if res and res == patch:
714 return res
722 return res
715
723
716 if not os.path.isfile(self.join(patch)):
724 if not os.path.isfile(self.join(patch)):
717 try:
725 try:
718 sno = int(patch)
726 sno = int(patch)
719 except(ValueError, OverflowError):
727 except(ValueError, OverflowError):
720 pass
728 pass
721 else:
729 else:
722 if sno < len(self.series):
730 if sno < len(self.series):
723 return self.series[sno]
731 return self.series[sno]
724 if not strict:
732 if not strict:
725 # return any partial match made above
733 # return any partial match made above
726 if res:
734 if res:
727 return res
735 return res
728 minus = patch.rfind('-')
736 minus = patch.rfind('-')
729 if minus >= 0:
737 if minus >= 0:
730 res = partial_name(patch[:minus])
738 res = partial_name(patch[:minus])
731 if res:
739 if res:
732 i = self.series.index(res)
740 i = self.series.index(res)
733 try:
741 try:
734 off = int(patch[minus+1:] or 1)
742 off = int(patch[minus+1:] or 1)
735 except(ValueError, OverflowError):
743 except(ValueError, OverflowError):
736 pass
744 pass
737 else:
745 else:
738 if i - off >= 0:
746 if i - off >= 0:
739 return self.series[i - off]
747 return self.series[i - off]
740 plus = patch.rfind('+')
748 plus = patch.rfind('+')
741 if plus >= 0:
749 if plus >= 0:
742 res = partial_name(patch[:plus])
750 res = partial_name(patch[:plus])
743 if res:
751 if res:
744 i = self.series.index(res)
752 i = self.series.index(res)
745 try:
753 try:
746 off = int(patch[plus+1:] or 1)
754 off = int(patch[plus+1:] or 1)
747 except(ValueError, OverflowError):
755 except(ValueError, OverflowError):
748 pass
756 pass
749 else:
757 else:
750 if i + off < len(self.series):
758 if i + off < len(self.series):
751 return self.series[i + off]
759 return self.series[i + off]
752 raise util.Abort(_("patch %s not in series") % patch)
760 raise util.Abort(_("patch %s not in series") % patch)
753
761
754 def push(self, repo, patch=None, force=False, list=False,
762 def push(self, repo, patch=None, force=False, list=False,
755 mergeq=None):
763 mergeq=None):
756 wlock = repo.wlock()
764 wlock = repo.wlock()
757 try:
765 try:
758 patch = self.lookup(patch)
766 patch = self.lookup(patch)
759 # Suppose our series file is: A B C and the current 'top'
767 # Suppose our series file is: A B C and the current 'top'
760 # patch is B. qpush C should be performed (moving forward)
768 # patch is B. qpush C should be performed (moving forward)
761 # qpush B is a NOP (no change) qpush A is an error (can't
769 # qpush B is a NOP (no change) qpush A is an error (can't
762 # go backwards with qpush)
770 # go backwards with qpush)
763 if patch:
771 if patch:
764 info = self.isapplied(patch)
772 info = self.isapplied(patch)
765 if info:
773 if info:
766 if info[0] < len(self.applied) - 1:
774 if info[0] < len(self.applied) - 1:
767 raise util.Abort(
775 raise util.Abort(
768 _("cannot push to a previous patch: %s") % patch)
776 _("cannot push to a previous patch: %s") % patch)
769 if info[0] < len(self.series) - 1:
777 if info[0] < len(self.series) - 1:
770 self.ui.warn(
778 self.ui.warn(
771 _('qpush: %s is already at the top\n') % patch)
779 _('qpush: %s is already at the top\n') % patch)
772 else:
780 else:
773 self.ui.warn(_('all patches are currently applied\n'))
781 self.ui.warn(_('all patches are currently applied\n'))
774 return
782 return
775
783
776 # Following the above example, starting at 'top' of B:
784 # Following the above example, starting at 'top' of B:
777 # qpush should be performed (pushes C), but a subsequent
785 # qpush should be performed (pushes C), but a subsequent
778 # qpush without an argument is an error (nothing to
786 # qpush without an argument is an error (nothing to
779 # apply). This allows a loop of "...while hg qpush..." to
787 # apply). This allows a loop of "...while hg qpush..." to
780 # work as it detects an error when done
788 # work as it detects an error when done
781 if self.series_end() == len(self.series):
789 if self.series_end() == len(self.series):
782 self.ui.warn(_('patch series already fully applied\n'))
790 self.ui.warn(_('patch series already fully applied\n'))
783 return 1
791 return 1
784 if not force:
792 if not force:
785 self.check_localchanges(repo)
793 self.check_localchanges(repo)
786
794
787 self.applied_dirty = 1;
795 self.applied_dirty = 1;
788 start = self.series_end()
796 start = self.series_end()
789 if start > 0:
797 if start > 0:
790 self.check_toppatch(repo)
798 self.check_toppatch(repo)
791 if not patch:
799 if not patch:
792 patch = self.series[start]
800 patch = self.series[start]
793 end = start + 1
801 end = start + 1
794 else:
802 else:
795 end = self.series.index(patch, start) + 1
803 end = self.series.index(patch, start) + 1
796 s = self.series[start:end]
804 s = self.series[start:end]
797 all_files = {}
805 all_files = {}
798 try:
806 try:
799 if mergeq:
807 if mergeq:
800 ret = self.mergepatch(repo, mergeq, s)
808 ret = self.mergepatch(repo, mergeq, s)
801 else:
809 else:
802 ret = self.apply(repo, s, list, all_files=all_files)
810 ret = self.apply(repo, s, list, all_files=all_files)
803 except:
811 except:
804 self.ui.warn(_('cleaning up working directory...'))
812 self.ui.warn(_('cleaning up working directory...'))
805 node = repo.dirstate.parents()[0]
813 node = repo.dirstate.parents()[0]
806 hg.revert(repo, node, None)
814 hg.revert(repo, node, None)
807 unknown = repo.status()[4]
815 unknown = repo.status()[4]
808 # only remove unknown files that we know we touched or
816 # only remove unknown files that we know we touched or
809 # created while patching
817 # created while patching
810 for f in unknown:
818 for f in unknown:
811 if f in all_files:
819 if f in all_files:
812 util.unlink(repo.wjoin(f))
820 util.unlink(repo.wjoin(f))
813 self.ui.warn(_('done\n'))
821 self.ui.warn(_('done\n'))
814 raise
822 raise
815 top = self.applied[-1].name
823 top = self.applied[-1].name
816 if ret[0]:
824 if ret[0]:
817 self.ui.write(
825 self.ui.write(
818 "Errors during apply, please fix and refresh %s\n" % top)
826 "Errors during apply, please fix and refresh %s\n" % top)
819 else:
827 else:
820 self.ui.write("Now at: %s\n" % top)
828 self.ui.write("Now at: %s\n" % top)
821 return ret[0]
829 return ret[0]
822 finally:
830 finally:
823 del wlock
831 del wlock
824
832
825 def pop(self, repo, patch=None, force=False, update=True, all=False):
833 def pop(self, repo, patch=None, force=False, update=True, all=False):
826 def getfile(f, rev, flags):
834 def getfile(f, rev, flags):
827 t = repo.file(f).read(rev)
835 t = repo.file(f).read(rev)
828 repo.wwrite(f, t, flags)
836 repo.wwrite(f, t, flags)
829
837
830 wlock = repo.wlock()
838 wlock = repo.wlock()
831 try:
839 try:
832 if patch:
840 if patch:
833 # index, rev, patch
841 # index, rev, patch
834 info = self.isapplied(patch)
842 info = self.isapplied(patch)
835 if not info:
843 if not info:
836 patch = self.lookup(patch)
844 patch = self.lookup(patch)
837 info = self.isapplied(patch)
845 info = self.isapplied(patch)
838 if not info:
846 if not info:
839 raise util.Abort(_("patch %s is not applied") % patch)
847 raise util.Abort(_("patch %s is not applied") % patch)
840
848
841 if len(self.applied) == 0:
849 if len(self.applied) == 0:
842 # Allow qpop -a to work repeatedly,
850 # Allow qpop -a to work repeatedly,
843 # but not qpop without an argument
851 # but not qpop without an argument
844 self.ui.warn(_("no patches applied\n"))
852 self.ui.warn(_("no patches applied\n"))
845 return not all
853 return not all
846
854
847 if not update:
855 if not update:
848 parents = repo.dirstate.parents()
856 parents = repo.dirstate.parents()
849 rr = [ revlog.bin(x.rev) for x in self.applied ]
857 rr = [ revlog.bin(x.rev) for x in self.applied ]
850 for p in parents:
858 for p in parents:
851 if p in rr:
859 if p in rr:
852 self.ui.warn("qpop: forcing dirstate update\n")
860 self.ui.warn("qpop: forcing dirstate update\n")
853 update = True
861 update = True
854
862
855 if not force and update:
863 if not force and update:
856 self.check_localchanges(repo)
864 self.check_localchanges(repo)
857
865
858 self.applied_dirty = 1;
866 self.applied_dirty = 1;
859 end = len(self.applied)
867 end = len(self.applied)
860 if not patch:
868 if not patch:
861 if all:
869 if all:
862 popi = 0
870 popi = 0
863 else:
871 else:
864 popi = len(self.applied) - 1
872 popi = len(self.applied) - 1
865 else:
873 else:
866 popi = info[0] + 1
874 popi = info[0] + 1
867 if popi >= end:
875 if popi >= end:
868 self.ui.warn("qpop: %s is already at the top\n" % patch)
876 self.ui.warn("qpop: %s is already at the top\n" % patch)
869 return
877 return
870 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
878 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
871
879
872 start = info[0]
880 start = info[0]
873 rev = revlog.bin(info[1])
881 rev = revlog.bin(info[1])
874
882
883 if update:
884 top = self.check_toppatch(repo)
885
886 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
887 raise util.Abort("popping would remove a revision not "
888 "managed by this patch queue")
889
875 # we know there are no local changes, so we can make a simplified
890 # we know there are no local changes, so we can make a simplified
876 # form of hg.update.
891 # form of hg.update.
877 if update:
892 if update:
878 top = self.check_toppatch(repo)
879 qp = self.qparents(repo, rev)
893 qp = self.qparents(repo, rev)
880 changes = repo.changelog.read(qp)
894 changes = repo.changelog.read(qp)
881 mmap = repo.manifest.read(changes[0])
895 mmap = repo.manifest.read(changes[0])
882 m, a, r, d, u = repo.status(qp, top)[:5]
896 m, a, r, d, u = repo.status(qp, top)[:5]
883 if d:
897 if d:
884 raise util.Abort("deletions found between repo revs")
898 raise util.Abort("deletions found between repo revs")
885 for f in m:
899 for f in m:
886 getfile(f, mmap[f], mmap.flags(f))
900 getfile(f, mmap[f], mmap.flags(f))
887 for f in r:
901 for f in r:
888 getfile(f, mmap[f], mmap.flags(f))
902 getfile(f, mmap[f], mmap.flags(f))
889 for f in m + r:
903 for f in m + r:
890 repo.dirstate.normal(f)
904 repo.dirstate.normal(f)
891 for f in a:
905 for f in a:
892 try:
906 try:
893 os.unlink(repo.wjoin(f))
907 os.unlink(repo.wjoin(f))
894 except OSError, e:
908 except OSError, e:
895 if e.errno != errno.ENOENT:
909 if e.errno != errno.ENOENT:
896 raise
910 raise
897 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
911 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
898 except: pass
912 except: pass
899 repo.dirstate.forget(f)
913 repo.dirstate.forget(f)
900 repo.dirstate.setparents(qp, revlog.nullid)
914 repo.dirstate.setparents(qp, revlog.nullid)
915 del self.applied[start:end]
901 self.strip(repo, rev, update=False, backup='strip')
916 self.strip(repo, rev, update=False, backup='strip')
902 del self.applied[start:end]
903 if len(self.applied):
917 if len(self.applied):
904 self.ui.write("Now at: %s\n" % self.applied[-1].name)
918 self.ui.write("Now at: %s\n" % self.applied[-1].name)
905 else:
919 else:
906 self.ui.write("Patch queue now empty\n")
920 self.ui.write("Patch queue now empty\n")
907 finally:
921 finally:
908 del wlock
922 del wlock
909
923
910 def diff(self, repo, pats, opts):
924 def diff(self, repo, pats, opts):
911 top = self.check_toppatch(repo)
925 top = self.check_toppatch(repo)
912 if not top:
926 if not top:
913 self.ui.write("No patches applied\n")
927 self.ui.write("No patches applied\n")
914 return
928 return
915 qp = self.qparents(repo, top)
929 qp = self.qparents(repo, top)
916 if opts.get('git'):
930 if opts.get('git'):
917 self.diffopts().git = True
931 self.diffopts().git = True
918 self.printdiff(repo, qp, files=pats, opts=opts)
932 self.printdiff(repo, qp, files=pats, opts=opts)
919
933
920 def refresh(self, repo, pats=None, **opts):
934 def refresh(self, repo, pats=None, **opts):
921 if len(self.applied) == 0:
935 if len(self.applied) == 0:
922 self.ui.write("No patches applied\n")
936 self.ui.write("No patches applied\n")
923 return 1
937 return 1
924 wlock = repo.wlock()
938 wlock = repo.wlock()
925 try:
939 try:
926 self.check_toppatch(repo)
940 self.check_toppatch(repo)
927 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
941 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
928 top = revlog.bin(top)
942 top = revlog.bin(top)
943 if repo.changelog.heads(top) != [top]:
944 raise util.Abort("cannot refresh a revision with children")
929 cparents = repo.changelog.parents(top)
945 cparents = repo.changelog.parents(top)
930 patchparent = self.qparents(repo, top)
946 patchparent = self.qparents(repo, top)
931 message, comments, user, date, patchfound = self.readheaders(patchfn)
947 message, comments, user, date, patchfound = self.readheaders(patchfn)
932
948
933 patchf = self.opener(patchfn, 'r+')
949 patchf = self.opener(patchfn, 'r+')
934
950
935 # if the patch was a git patch, refresh it as a git patch
951 # if the patch was a git patch, refresh it as a git patch
936 for line in patchf:
952 for line in patchf:
937 if line.startswith('diff --git'):
953 if line.startswith('diff --git'):
938 self.diffopts().git = True
954 self.diffopts().git = True
939 break
955 break
940
956
941 msg = opts.get('msg', '').rstrip()
957 msg = opts.get('msg', '').rstrip()
942 if msg and comments:
958 if msg and comments:
943 # Remove existing message, keeping the rest of the comments
959 # Remove existing message, keeping the rest of the comments
944 # fields.
960 # fields.
945 # If comments contains 'subject: ', message will prepend
961 # If comments contains 'subject: ', message will prepend
946 # the field and a blank line.
962 # the field and a blank line.
947 if message:
963 if message:
948 subj = 'subject: ' + message[0].lower()
964 subj = 'subject: ' + message[0].lower()
949 for i in xrange(len(comments)):
965 for i in xrange(len(comments)):
950 if subj == comments[i].lower():
966 if subj == comments[i].lower():
951 del comments[i]
967 del comments[i]
952 message = message[2:]
968 message = message[2:]
953 break
969 break
954 ci = 0
970 ci = 0
955 for mi in xrange(len(message)):
971 for mi in xrange(len(message)):
956 while message[mi] != comments[ci]:
972 while message[mi] != comments[ci]:
957 ci += 1
973 ci += 1
958 del comments[ci]
974 del comments[ci]
959
975
960 def setheaderfield(comments, prefixes, new):
976 def setheaderfield(comments, prefixes, new):
961 # Update all references to a field in the patch header.
977 # Update all references to a field in the patch header.
962 # If none found, add it email style.
978 # If none found, add it email style.
963 res = False
979 res = False
964 for prefix in prefixes:
980 for prefix in prefixes:
965 for i in xrange(len(comments)):
981 for i in xrange(len(comments)):
966 if comments[i].startswith(prefix):
982 if comments[i].startswith(prefix):
967 comments[i] = prefix + new
983 comments[i] = prefix + new
968 res = True
984 res = True
969 break
985 break
970 return res
986 return res
971
987
972 newuser = opts.get('user')
988 newuser = opts.get('user')
973 if newuser:
989 if newuser:
974 if not setheaderfield(comments, ['From: ', '# User '], newuser):
990 if not setheaderfield(comments, ['From: ', '# User '], newuser):
975 try:
991 try:
976 patchheaderat = comments.index('# HG changeset patch')
992 patchheaderat = comments.index('# HG changeset patch')
977 comments.insert(patchheaderat + 1,'# User ' + newuser)
993 comments.insert(patchheaderat + 1,'# User ' + newuser)
978 except ValueError:
994 except ValueError:
979 comments = ['From: ' + newuser, ''] + comments
995 comments = ['From: ' + newuser, ''] + comments
980 user = newuser
996 user = newuser
981
997
982 newdate = opts.get('date')
998 newdate = opts.get('date')
983 if newdate:
999 if newdate:
984 if setheaderfield(comments, ['# Date '], newdate):
1000 if setheaderfield(comments, ['# Date '], newdate):
985 date = newdate
1001 date = newdate
986
1002
987 if msg:
1003 if msg:
988 comments.append(msg)
1004 comments.append(msg)
989
1005
990 patchf.seek(0)
1006 patchf.seek(0)
991 patchf.truncate()
1007 patchf.truncate()
992
1008
993 if comments:
1009 if comments:
994 comments = "\n".join(comments) + '\n\n'
1010 comments = "\n".join(comments) + '\n\n'
995 patchf.write(comments)
1011 patchf.write(comments)
996
1012
997 if opts.get('git'):
1013 if opts.get('git'):
998 self.diffopts().git = True
1014 self.diffopts().git = True
999 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1015 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1000 tip = repo.changelog.tip()
1016 tip = repo.changelog.tip()
1001 if top == tip:
1017 if top == tip:
1002 # if the top of our patch queue is also the tip, there is an
1018 # if the top of our patch queue is also the tip, there is an
1003 # optimization here. We update the dirstate in place and strip
1019 # optimization here. We update the dirstate in place and strip
1004 # off the tip commit. Then just commit the current directory
1020 # off the tip commit. Then just commit the current directory
1005 # tree. We can also send repo.commit the list of files
1021 # tree. We can also send repo.commit the list of files
1006 # changed to speed up the diff
1022 # changed to speed up the diff
1007 #
1023 #
1008 # in short mode, we only diff the files included in the
1024 # in short mode, we only diff the files included in the
1009 # patch already
1025 # patch already
1010 #
1026 #
1011 # this should really read:
1027 # this should really read:
1012 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1028 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1013 # but we do it backwards to take advantage of manifest/chlog
1029 # but we do it backwards to take advantage of manifest/chlog
1014 # caching against the next repo.status call
1030 # caching against the next repo.status call
1015 #
1031 #
1016 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1032 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1017 changes = repo.changelog.read(tip)
1033 changes = repo.changelog.read(tip)
1018 man = repo.manifest.read(changes[0])
1034 man = repo.manifest.read(changes[0])
1019 aaa = aa[:]
1035 aaa = aa[:]
1020 if opts.get('short'):
1036 if opts.get('short'):
1021 filelist = mm + aa + dd
1037 filelist = mm + aa + dd
1022 match = dict.fromkeys(filelist).__contains__
1038 match = dict.fromkeys(filelist).__contains__
1023 else:
1039 else:
1024 filelist = None
1040 filelist = None
1025 match = util.always
1041 match = util.always
1026 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
1042 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
1027
1043
1028 # we might end up with files that were added between
1044 # we might end up with files that were added between
1029 # tip and the dirstate parent, but then changed in the
1045 # tip and the dirstate parent, but then changed in the
1030 # local dirstate. in this case, we want them to only
1046 # local dirstate. in this case, we want them to only
1031 # show up in the added section
1047 # show up in the added section
1032 for x in m:
1048 for x in m:
1033 if x not in aa:
1049 if x not in aa:
1034 mm.append(x)
1050 mm.append(x)
1035 # we might end up with files added by the local dirstate that
1051 # we might end up with files added by the local dirstate that
1036 # were deleted by the patch. In this case, they should only
1052 # were deleted by the patch. In this case, they should only
1037 # show up in the changed section.
1053 # show up in the changed section.
1038 for x in a:
1054 for x in a:
1039 if x in dd:
1055 if x in dd:
1040 del dd[dd.index(x)]
1056 del dd[dd.index(x)]
1041 mm.append(x)
1057 mm.append(x)
1042 else:
1058 else:
1043 aa.append(x)
1059 aa.append(x)
1044 # make sure any files deleted in the local dirstate
1060 # make sure any files deleted in the local dirstate
1045 # are not in the add or change column of the patch
1061 # are not in the add or change column of the patch
1046 forget = []
1062 forget = []
1047 for x in d + r:
1063 for x in d + r:
1048 if x in aa:
1064 if x in aa:
1049 del aa[aa.index(x)]
1065 del aa[aa.index(x)]
1050 forget.append(x)
1066 forget.append(x)
1051 continue
1067 continue
1052 elif x in mm:
1068 elif x in mm:
1053 del mm[mm.index(x)]
1069 del mm[mm.index(x)]
1054 dd.append(x)
1070 dd.append(x)
1055
1071
1056 m = util.unique(mm)
1072 m = util.unique(mm)
1057 r = util.unique(dd)
1073 r = util.unique(dd)
1058 a = util.unique(aa)
1074 a = util.unique(aa)
1059 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1075 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1060 filelist = util.unique(c[0] + c[1] + c[2])
1076 filelist = util.unique(c[0] + c[1] + c[2])
1061 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1077 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1062 fp=patchf, changes=c, opts=self.diffopts())
1078 fp=patchf, changes=c, opts=self.diffopts())
1063 patchf.close()
1079 patchf.close()
1064
1080
1065 repo.dirstate.setparents(*cparents)
1081 repo.dirstate.setparents(*cparents)
1066 copies = {}
1082 copies = {}
1067 for dst in a:
1083 for dst in a:
1068 src = repo.dirstate.copied(dst)
1084 src = repo.dirstate.copied(dst)
1069 if src is not None:
1085 if src is not None:
1070 copies.setdefault(src, []).append(dst)
1086 copies.setdefault(src, []).append(dst)
1071 repo.dirstate.add(dst)
1087 repo.dirstate.add(dst)
1072 # remember the copies between patchparent and tip
1088 # remember the copies between patchparent and tip
1073 # this may be slow, so don't do it if we're not tracking copies
1089 # this may be slow, so don't do it if we're not tracking copies
1074 if self.diffopts().git:
1090 if self.diffopts().git:
1075 for dst in aaa:
1091 for dst in aaa:
1076 f = repo.file(dst)
1092 f = repo.file(dst)
1077 src = f.renamed(man[dst])
1093 src = f.renamed(man[dst])
1078 if src:
1094 if src:
1079 copies[src[0]] = copies.get(dst, [])
1095 copies[src[0]] = copies.get(dst, [])
1080 if dst in a:
1096 if dst in a:
1081 copies[src[0]].append(dst)
1097 copies[src[0]].append(dst)
1082 # we can't copy a file created by the patch itself
1098 # we can't copy a file created by the patch itself
1083 if dst in copies:
1099 if dst in copies:
1084 del copies[dst]
1100 del copies[dst]
1085 for src, dsts in copies.iteritems():
1101 for src, dsts in copies.iteritems():
1086 for dst in dsts:
1102 for dst in dsts:
1087 repo.dirstate.copy(src, dst)
1103 repo.dirstate.copy(src, dst)
1088 for f in r:
1104 for f in r:
1089 repo.dirstate.remove(f)
1105 repo.dirstate.remove(f)
1090 # if the patch excludes a modified file, mark that
1106 # if the patch excludes a modified file, mark that
1091 # file with mtime=0 so status can see it.
1107 # file with mtime=0 so status can see it.
1092 mm = []
1108 mm = []
1093 for i in xrange(len(m)-1, -1, -1):
1109 for i in xrange(len(m)-1, -1, -1):
1094 if not matchfn(m[i]):
1110 if not matchfn(m[i]):
1095 mm.append(m[i])
1111 mm.append(m[i])
1096 del m[i]
1112 del m[i]
1097 for f in m:
1113 for f in m:
1098 repo.dirstate.normal(f)
1114 repo.dirstate.normal(f)
1099 for f in mm:
1115 for f in mm:
1100 repo.dirstate.normallookup(f)
1116 repo.dirstate.normallookup(f)
1101 for f in forget:
1117 for f in forget:
1102 repo.dirstate.forget(f)
1118 repo.dirstate.forget(f)
1103
1119
1104 if not msg:
1120 if not msg:
1105 if not message:
1121 if not message:
1106 message = "[mq]: %s\n" % patchfn
1122 message = "[mq]: %s\n" % patchfn
1107 else:
1123 else:
1108 message = "\n".join(message)
1124 message = "\n".join(message)
1109 else:
1125 else:
1110 message = msg
1126 message = msg
1111
1127
1112 if not user:
1128 if not user:
1113 user = changes[1]
1129 user = changes[1]
1114
1130
1131 self.applied.pop()
1132 self.applied_dirty = 1
1115 self.strip(repo, top, update=False,
1133 self.strip(repo, top, update=False,
1116 backup='strip')
1134 backup='strip')
1117 n = repo.commit(filelist, message, user, date, match=matchfn,
1135 n = repo.commit(filelist, message, user, date, match=matchfn,
1118 force=1)
1136 force=1)
1119 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1137 self.applied.append(statusentry(revlog.hex(n), patchfn))
1120 self.applied_dirty = 1
1121 self.removeundo(repo)
1138 self.removeundo(repo)
1122 else:
1139 else:
1123 self.printdiff(repo, patchparent, fp=patchf)
1140 self.printdiff(repo, patchparent, fp=patchf)
1124 patchf.close()
1141 patchf.close()
1125 added = repo.status()[1]
1142 added = repo.status()[1]
1126 for a in added:
1143 for a in added:
1127 f = repo.wjoin(a)
1144 f = repo.wjoin(a)
1128 try:
1145 try:
1129 os.unlink(f)
1146 os.unlink(f)
1130 except OSError, e:
1147 except OSError, e:
1131 if e.errno != errno.ENOENT:
1148 if e.errno != errno.ENOENT:
1132 raise
1149 raise
1133 try: os.removedirs(os.path.dirname(f))
1150 try: os.removedirs(os.path.dirname(f))
1134 except: pass
1151 except: pass
1135 # forget the file copies in the dirstate
1152 # forget the file copies in the dirstate
1136 # push should readd the files later on
1153 # push should readd the files later on
1137 repo.dirstate.forget(a)
1154 repo.dirstate.forget(a)
1138 self.pop(repo, force=True)
1155 self.pop(repo, force=True)
1139 self.push(repo, force=True)
1156 self.push(repo, force=True)
1140 finally:
1157 finally:
1141 del wlock
1158 del wlock
1142
1159
1143 def init(self, repo, create=False):
1160 def init(self, repo, create=False):
1144 if not create and os.path.isdir(self.path):
1161 if not create and os.path.isdir(self.path):
1145 raise util.Abort(_("patch queue directory already exists"))
1162 raise util.Abort(_("patch queue directory already exists"))
1146 try:
1163 try:
1147 os.mkdir(self.path)
1164 os.mkdir(self.path)
1148 except OSError, inst:
1165 except OSError, inst:
1149 if inst.errno != errno.EEXIST or not create:
1166 if inst.errno != errno.EEXIST or not create:
1150 raise
1167 raise
1151 if create:
1168 if create:
1152 return self.qrepo(create=True)
1169 return self.qrepo(create=True)
1153
1170
1154 def unapplied(self, repo, patch=None):
1171 def unapplied(self, repo, patch=None):
1155 if patch and patch not in self.series:
1172 if patch and patch not in self.series:
1156 raise util.Abort(_("patch %s is not in series file") % patch)
1173 raise util.Abort(_("patch %s is not in series file") % patch)
1157 if not patch:
1174 if not patch:
1158 start = self.series_end()
1175 start = self.series_end()
1159 else:
1176 else:
1160 start = self.series.index(patch) + 1
1177 start = self.series.index(patch) + 1
1161 unapplied = []
1178 unapplied = []
1162 for i in xrange(start, len(self.series)):
1179 for i in xrange(start, len(self.series)):
1163 pushable, reason = self.pushable(i)
1180 pushable, reason = self.pushable(i)
1164 if pushable:
1181 if pushable:
1165 unapplied.append((i, self.series[i]))
1182 unapplied.append((i, self.series[i]))
1166 self.explain_pushable(i)
1183 self.explain_pushable(i)
1167 return unapplied
1184 return unapplied
1168
1185
1169 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1186 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1170 summary=False):
1187 summary=False):
1171 def displayname(patchname):
1188 def displayname(patchname):
1172 if summary:
1189 if summary:
1173 msg = self.readheaders(patchname)[0]
1190 msg = self.readheaders(patchname)[0]
1174 msg = msg and ': ' + msg[0] or ': '
1191 msg = msg and ': ' + msg[0] or ': '
1175 else:
1192 else:
1176 msg = ''
1193 msg = ''
1177 return '%s%s' % (patchname, msg)
1194 return '%s%s' % (patchname, msg)
1178
1195
1179 applied = dict.fromkeys([p.name for p in self.applied])
1196 applied = dict.fromkeys([p.name for p in self.applied])
1180 if length is None:
1197 if length is None:
1181 length = len(self.series) - start
1198 length = len(self.series) - start
1182 if not missing:
1199 if not missing:
1183 for i in xrange(start, start+length):
1200 for i in xrange(start, start+length):
1184 patch = self.series[i]
1201 patch = self.series[i]
1185 if patch in applied:
1202 if patch in applied:
1186 stat = 'A'
1203 stat = 'A'
1187 elif self.pushable(i)[0]:
1204 elif self.pushable(i)[0]:
1188 stat = 'U'
1205 stat = 'U'
1189 else:
1206 else:
1190 stat = 'G'
1207 stat = 'G'
1191 pfx = ''
1208 pfx = ''
1192 if self.ui.verbose:
1209 if self.ui.verbose:
1193 pfx = '%d %s ' % (i, stat)
1210 pfx = '%d %s ' % (i, stat)
1194 elif status and status != stat:
1211 elif status and status != stat:
1195 continue
1212 continue
1196 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1213 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1197 else:
1214 else:
1198 msng_list = []
1215 msng_list = []
1199 for root, dirs, files in os.walk(self.path):
1216 for root, dirs, files in os.walk(self.path):
1200 d = root[len(self.path) + 1:]
1217 d = root[len(self.path) + 1:]
1201 for f in files:
1218 for f in files:
1202 fl = os.path.join(d, f)
1219 fl = os.path.join(d, f)
1203 if (fl not in self.series and
1220 if (fl not in self.series and
1204 fl not in (self.status_path, self.series_path,
1221 fl not in (self.status_path, self.series_path,
1205 self.guards_path)
1222 self.guards_path)
1206 and not fl.startswith('.')):
1223 and not fl.startswith('.')):
1207 msng_list.append(fl)
1224 msng_list.append(fl)
1208 msng_list.sort()
1225 msng_list.sort()
1209 for x in msng_list:
1226 for x in msng_list:
1210 pfx = self.ui.verbose and ('D ') or ''
1227 pfx = self.ui.verbose and ('D ') or ''
1211 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1228 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1212
1229
1213 def issaveline(self, l):
1230 def issaveline(self, l):
1214 if l.name == '.hg.patches.save.line':
1231 if l.name == '.hg.patches.save.line':
1215 return True
1232 return True
1216
1233
1217 def qrepo(self, create=False):
1234 def qrepo(self, create=False):
1218 if create or os.path.isdir(self.join(".hg")):
1235 if create or os.path.isdir(self.join(".hg")):
1219 return hg.repository(self.ui, path=self.path, create=create)
1236 return hg.repository(self.ui, path=self.path, create=create)
1220
1237
1221 def restore(self, repo, rev, delete=None, qupdate=None):
1238 def restore(self, repo, rev, delete=None, qupdate=None):
1222 c = repo.changelog.read(rev)
1239 c = repo.changelog.read(rev)
1223 desc = c[4].strip()
1240 desc = c[4].strip()
1224 lines = desc.splitlines()
1241 lines = desc.splitlines()
1225 i = 0
1242 i = 0
1226 datastart = None
1243 datastart = None
1227 series = []
1244 series = []
1228 applied = []
1245 applied = []
1229 qpp = None
1246 qpp = None
1230 for i in xrange(0, len(lines)):
1247 for i in xrange(0, len(lines)):
1231 if lines[i] == 'Patch Data:':
1248 if lines[i] == 'Patch Data:':
1232 datastart = i + 1
1249 datastart = i + 1
1233 elif lines[i].startswith('Dirstate:'):
1250 elif lines[i].startswith('Dirstate:'):
1234 l = lines[i].rstrip()
1251 l = lines[i].rstrip()
1235 l = l[10:].split(' ')
1252 l = l[10:].split(' ')
1236 qpp = [ hg.bin(x) for x in l ]
1253 qpp = [ hg.bin(x) for x in l ]
1237 elif datastart != None:
1254 elif datastart != None:
1238 l = lines[i].rstrip()
1255 l = lines[i].rstrip()
1239 se = statusentry(l)
1256 se = statusentry(l)
1240 file_ = se.name
1257 file_ = se.name
1241 if se.rev:
1258 if se.rev:
1242 applied.append(se)
1259 applied.append(se)
1243 else:
1260 else:
1244 series.append(file_)
1261 series.append(file_)
1245 if datastart == None:
1262 if datastart == None:
1246 self.ui.warn("No saved patch data found\n")
1263 self.ui.warn("No saved patch data found\n")
1247 return 1
1264 return 1
1248 self.ui.warn("restoring status: %s\n" % lines[0])
1265 self.ui.warn("restoring status: %s\n" % lines[0])
1249 self.full_series = series
1266 self.full_series = series
1250 self.applied = applied
1267 self.applied = applied
1251 self.parse_series()
1268 self.parse_series()
1252 self.series_dirty = 1
1269 self.series_dirty = 1
1253 self.applied_dirty = 1
1270 self.applied_dirty = 1
1254 heads = repo.changelog.heads()
1271 heads = repo.changelog.heads()
1255 if delete:
1272 if delete:
1256 if rev not in heads:
1273 if rev not in heads:
1257 self.ui.warn("save entry has children, leaving it alone\n")
1274 self.ui.warn("save entry has children, leaving it alone\n")
1258 else:
1275 else:
1259 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1276 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1260 pp = repo.dirstate.parents()
1277 pp = repo.dirstate.parents()
1261 if rev in pp:
1278 if rev in pp:
1262 update = True
1279 update = True
1263 else:
1280 else:
1264 update = False
1281 update = False
1265 self.strip(repo, rev, update=update, backup='strip')
1282 self.strip(repo, rev, update=update, backup='strip')
1266 if qpp:
1283 if qpp:
1267 self.ui.warn("saved queue repository parents: %s %s\n" %
1284 self.ui.warn("saved queue repository parents: %s %s\n" %
1268 (hg.short(qpp[0]), hg.short(qpp[1])))
1285 (hg.short(qpp[0]), hg.short(qpp[1])))
1269 if qupdate:
1286 if qupdate:
1270 self.ui.status(_("queue directory updating\n"))
1287 self.ui.status(_("queue directory updating\n"))
1271 r = self.qrepo()
1288 r = self.qrepo()
1272 if not r:
1289 if not r:
1273 self.ui.warn("Unable to load queue repository\n")
1290 self.ui.warn("Unable to load queue repository\n")
1274 return 1
1291 return 1
1275 hg.clean(r, qpp[0])
1292 hg.clean(r, qpp[0])
1276
1293
1277 def save(self, repo, msg=None):
1294 def save(self, repo, msg=None):
1278 if len(self.applied) == 0:
1295 if len(self.applied) == 0:
1279 self.ui.warn("save: no patches applied, exiting\n")
1296 self.ui.warn("save: no patches applied, exiting\n")
1280 return 1
1297 return 1
1281 if self.issaveline(self.applied[-1]):
1298 if self.issaveline(self.applied[-1]):
1282 self.ui.warn("status is already saved\n")
1299 self.ui.warn("status is already saved\n")
1283 return 1
1300 return 1
1284
1301
1285 ar = [ ':' + x for x in self.full_series ]
1302 ar = [ ':' + x for x in self.full_series ]
1286 if not msg:
1303 if not msg:
1287 msg = "hg patches saved state"
1304 msg = "hg patches saved state"
1288 else:
1305 else:
1289 msg = "hg patches: " + msg.rstrip('\r\n')
1306 msg = "hg patches: " + msg.rstrip('\r\n')
1290 r = self.qrepo()
1307 r = self.qrepo()
1291 if r:
1308 if r:
1292 pp = r.dirstate.parents()
1309 pp = r.dirstate.parents()
1293 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1310 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1294 msg += "\n\nPatch Data:\n"
1311 msg += "\n\nPatch Data:\n"
1295 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1312 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1296 "\n".join(ar) + '\n' or "")
1313 "\n".join(ar) + '\n' or "")
1297 n = repo.commit(None, text, user=None, force=1)
1314 n = repo.commit(None, text, user=None, force=1)
1298 if not n:
1315 if not n:
1299 self.ui.warn("repo commit failed\n")
1316 self.ui.warn("repo commit failed\n")
1300 return 1
1317 return 1
1301 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1318 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1302 self.applied_dirty = 1
1319 self.applied_dirty = 1
1303 self.removeundo(repo)
1320 self.removeundo(repo)
1304
1321
1305 def full_series_end(self):
1322 def full_series_end(self):
1306 if len(self.applied) > 0:
1323 if len(self.applied) > 0:
1307 p = self.applied[-1].name
1324 p = self.applied[-1].name
1308 end = self.find_series(p)
1325 end = self.find_series(p)
1309 if end == None:
1326 if end == None:
1310 return len(self.full_series)
1327 return len(self.full_series)
1311 return end + 1
1328 return end + 1
1312 return 0
1329 return 0
1313
1330
1314 def series_end(self, all_patches=False):
1331 def series_end(self, all_patches=False):
1315 """If all_patches is False, return the index of the next pushable patch
1332 """If all_patches is False, return the index of the next pushable patch
1316 in the series, or the series length. If all_patches is True, return the
1333 in the series, or the series length. If all_patches is True, return the
1317 index of the first patch past the last applied one.
1334 index of the first patch past the last applied one.
1318 """
1335 """
1319 end = 0
1336 end = 0
1320 def next(start):
1337 def next(start):
1321 if all_patches:
1338 if all_patches:
1322 return start
1339 return start
1323 i = start
1340 i = start
1324 while i < len(self.series):
1341 while i < len(self.series):
1325 p, reason = self.pushable(i)
1342 p, reason = self.pushable(i)
1326 if p:
1343 if p:
1327 break
1344 break
1328 self.explain_pushable(i)
1345 self.explain_pushable(i)
1329 i += 1
1346 i += 1
1330 return i
1347 return i
1331 if len(self.applied) > 0:
1348 if len(self.applied) > 0:
1332 p = self.applied[-1].name
1349 p = self.applied[-1].name
1333 try:
1350 try:
1334 end = self.series.index(p)
1351 end = self.series.index(p)
1335 except ValueError:
1352 except ValueError:
1336 return 0
1353 return 0
1337 return next(end + 1)
1354 return next(end + 1)
1338 return next(end)
1355 return next(end)
1339
1356
1340 def appliedname(self, index):
1357 def appliedname(self, index):
1341 pname = self.applied[index].name
1358 pname = self.applied[index].name
1342 if not self.ui.verbose:
1359 if not self.ui.verbose:
1343 p = pname
1360 p = pname
1344 else:
1361 else:
1345 p = str(self.series.index(pname)) + " " + pname
1362 p = str(self.series.index(pname)) + " " + pname
1346 return p
1363 return p
1347
1364
1348 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1365 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1349 force=None, git=False):
1366 force=None, git=False):
1350 def checkseries(patchname):
1367 def checkseries(patchname):
1351 if patchname in self.series:
1368 if patchname in self.series:
1352 raise util.Abort(_('patch %s is already in the series file')
1369 raise util.Abort(_('patch %s is already in the series file')
1353 % patchname)
1370 % patchname)
1354 def checkfile(patchname):
1371 def checkfile(patchname):
1355 if not force and os.path.exists(self.join(patchname)):
1372 if not force and os.path.exists(self.join(patchname)):
1356 raise util.Abort(_('patch "%s" already exists')
1373 raise util.Abort(_('patch "%s" already exists')
1357 % patchname)
1374 % patchname)
1358
1375
1359 if rev:
1376 if rev:
1360 if files:
1377 if files:
1361 raise util.Abort(_('option "-r" not valid when importing '
1378 raise util.Abort(_('option "-r" not valid when importing '
1362 'files'))
1379 'files'))
1363 rev = cmdutil.revrange(repo, rev)
1380 rev = cmdutil.revrange(repo, rev)
1364 rev.sort(lambda x, y: cmp(y, x))
1381 rev.sort(lambda x, y: cmp(y, x))
1365 if (len(files) > 1 or len(rev) > 1) and patchname:
1382 if (len(files) > 1 or len(rev) > 1) and patchname:
1366 raise util.Abort(_('option "-n" not valid when importing multiple '
1383 raise util.Abort(_('option "-n" not valid when importing multiple '
1367 'patches'))
1384 'patches'))
1368 i = 0
1385 i = 0
1369 added = []
1386 added = []
1370 if rev:
1387 if rev:
1371 # If mq patches are applied, we can only import revisions
1388 # If mq patches are applied, we can only import revisions
1372 # that form a linear path to qbase.
1389 # that form a linear path to qbase.
1373 # Otherwise, they should form a linear path to a head.
1390 # Otherwise, they should form a linear path to a head.
1374 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1391 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1375 if len(heads) > 1:
1392 if len(heads) > 1:
1376 raise util.Abort(_('revision %d is the root of more than one '
1393 raise util.Abort(_('revision %d is the root of more than one '
1377 'branch') % rev[-1])
1394 'branch') % rev[-1])
1378 if self.applied:
1395 if self.applied:
1379 base = revlog.hex(repo.changelog.node(rev[0]))
1396 base = revlog.hex(repo.changelog.node(rev[0]))
1380 if base in [n.rev for n in self.applied]:
1397 if base in [n.rev for n in self.applied]:
1381 raise util.Abort(_('revision %d is already managed')
1398 raise util.Abort(_('revision %d is already managed')
1382 % rev[0])
1399 % rev[0])
1383 if heads != [revlog.bin(self.applied[-1].rev)]:
1400 if heads != [revlog.bin(self.applied[-1].rev)]:
1384 raise util.Abort(_('revision %d is not the parent of '
1401 raise util.Abort(_('revision %d is not the parent of '
1385 'the queue') % rev[0])
1402 'the queue') % rev[0])
1386 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1403 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1387 lastparent = repo.changelog.parentrevs(base)[0]
1404 lastparent = repo.changelog.parentrevs(base)[0]
1388 else:
1405 else:
1389 if heads != [repo.changelog.node(rev[0])]:
1406 if heads != [repo.changelog.node(rev[0])]:
1390 raise util.Abort(_('revision %d has unmanaged children')
1407 raise util.Abort(_('revision %d has unmanaged children')
1391 % rev[0])
1408 % rev[0])
1392 lastparent = None
1409 lastparent = None
1393
1410
1394 if git:
1411 if git:
1395 self.diffopts().git = True
1412 self.diffopts().git = True
1396
1413
1397 for r in rev:
1414 for r in rev:
1398 p1, p2 = repo.changelog.parentrevs(r)
1415 p1, p2 = repo.changelog.parentrevs(r)
1399 n = repo.changelog.node(r)
1416 n = repo.changelog.node(r)
1400 if p2 != revlog.nullrev:
1417 if p2 != revlog.nullrev:
1401 raise util.Abort(_('cannot import merge revision %d') % r)
1418 raise util.Abort(_('cannot import merge revision %d') % r)
1402 if lastparent and lastparent != r:
1419 if lastparent and lastparent != r:
1403 raise util.Abort(_('revision %d is not the parent of %d')
1420 raise util.Abort(_('revision %d is not the parent of %d')
1404 % (r, lastparent))
1421 % (r, lastparent))
1405 lastparent = p1
1422 lastparent = p1
1406
1423
1407 if not patchname:
1424 if not patchname:
1408 patchname = normname('%d.diff' % r)
1425 patchname = normname('%d.diff' % r)
1426 self.check_reserved_name(patchname)
1409 checkseries(patchname)
1427 checkseries(patchname)
1410 checkfile(patchname)
1428 checkfile(patchname)
1411 self.full_series.insert(0, patchname)
1429 self.full_series.insert(0, patchname)
1412
1430
1413 patchf = self.opener(patchname, "w")
1431 patchf = self.opener(patchname, "w")
1414 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1432 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1415 patchf.close()
1433 patchf.close()
1416
1434
1417 se = statusentry(revlog.hex(n), patchname)
1435 se = statusentry(revlog.hex(n), patchname)
1418 self.applied.insert(0, se)
1436 self.applied.insert(0, se)
1419
1437
1420 added.append(patchname)
1438 added.append(patchname)
1421 patchname = None
1439 patchname = None
1422 self.parse_series()
1440 self.parse_series()
1423 self.applied_dirty = 1
1441 self.applied_dirty = 1
1424
1442
1425 for filename in files:
1443 for filename in files:
1426 if existing:
1444 if existing:
1427 if filename == '-':
1445 if filename == '-':
1428 raise util.Abort(_('-e is incompatible with import from -'))
1446 raise util.Abort(_('-e is incompatible with import from -'))
1429 if not patchname:
1447 if not patchname:
1430 patchname = normname(filename)
1448 patchname = normname(filename)
1449 self.check_reserved_name(patchname)
1431 if not os.path.isfile(self.join(patchname)):
1450 if not os.path.isfile(self.join(patchname)):
1432 raise util.Abort(_("patch %s does not exist") % patchname)
1451 raise util.Abort(_("patch %s does not exist") % patchname)
1433 else:
1452 else:
1434 try:
1453 try:
1435 if filename == '-':
1454 if filename == '-':
1436 if not patchname:
1455 if not patchname:
1437 raise util.Abort(_('need --name to import a patch from -'))
1456 raise util.Abort(_('need --name to import a patch from -'))
1438 text = sys.stdin.read()
1457 text = sys.stdin.read()
1439 else:
1458 else:
1440 text = file(filename).read()
1459 text = file(filename).read()
1441 except IOError:
1460 except IOError:
1442 raise util.Abort(_("unable to read %s") % patchname)
1461 raise util.Abort(_("unable to read %s") % patchname)
1443 if not patchname:
1462 if not patchname:
1444 patchname = normname(os.path.basename(filename))
1463 patchname = normname(os.path.basename(filename))
1464 self.check_reserved_name(patchname)
1445 checkfile(patchname)
1465 checkfile(patchname)
1446 patchf = self.opener(patchname, "w")
1466 patchf = self.opener(patchname, "w")
1447 patchf.write(text)
1467 patchf.write(text)
1448 checkseries(patchname)
1468 checkseries(patchname)
1449 index = self.full_series_end() + i
1469 index = self.full_series_end() + i
1450 self.full_series[index:index] = [patchname]
1470 self.full_series[index:index] = [patchname]
1451 self.parse_series()
1471 self.parse_series()
1452 self.ui.warn("adding %s to series file\n" % patchname)
1472 self.ui.warn("adding %s to series file\n" % patchname)
1453 i += 1
1473 i += 1
1454 added.append(patchname)
1474 added.append(patchname)
1455 patchname = None
1475 patchname = None
1456 self.series_dirty = 1
1476 self.series_dirty = 1
1457 qrepo = self.qrepo()
1477 qrepo = self.qrepo()
1458 if qrepo:
1478 if qrepo:
1459 qrepo.add(added)
1479 qrepo.add(added)
1460
1480
1461 def delete(ui, repo, *patches, **opts):
1481 def delete(ui, repo, *patches, **opts):
1462 """remove patches from queue
1482 """remove patches from queue
1463
1483
1464 The patches must not be applied, unless they are arguments to
1484 The patches must not be applied, unless they are arguments to
1465 the --rev parameter. At least one patch or revision is required.
1485 the --rev parameter. At least one patch or revision is required.
1466
1486
1467 With --rev, mq will stop managing the named revisions (converting
1487 With --rev, mq will stop managing the named revisions (converting
1468 them to regular mercurial changesets). The patches must be applied
1488 them to regular mercurial changesets). The patches must be applied
1469 and at the base of the stack. This option is useful when the patches
1489 and at the base of the stack. This option is useful when the patches
1470 have been applied upstream.
1490 have been applied upstream.
1471
1491
1472 With --keep, the patch files are preserved in the patch directory."""
1492 With --keep, the patch files are preserved in the patch directory."""
1473 q = repo.mq
1493 q = repo.mq
1474 q.delete(repo, patches, opts)
1494 q.delete(repo, patches, opts)
1475 q.save_dirty()
1495 q.save_dirty()
1476 return 0
1496 return 0
1477
1497
1478 def applied(ui, repo, patch=None, **opts):
1498 def applied(ui, repo, patch=None, **opts):
1479 """print the patches already applied"""
1499 """print the patches already applied"""
1480 q = repo.mq
1500 q = repo.mq
1481 if patch:
1501 if patch:
1482 if patch not in q.series:
1502 if patch not in q.series:
1483 raise util.Abort(_("patch %s is not in series file") % patch)
1503 raise util.Abort(_("patch %s is not in series file") % patch)
1484 end = q.series.index(patch) + 1
1504 end = q.series.index(patch) + 1
1485 else:
1505 else:
1486 end = q.series_end(True)
1506 end = q.series_end(True)
1487 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1507 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1488
1508
1489 def unapplied(ui, repo, patch=None, **opts):
1509 def unapplied(ui, repo, patch=None, **opts):
1490 """print the patches not yet applied"""
1510 """print the patches not yet applied"""
1491 q = repo.mq
1511 q = repo.mq
1492 if patch:
1512 if patch:
1493 if patch not in q.series:
1513 if patch not in q.series:
1494 raise util.Abort(_("patch %s is not in series file") % patch)
1514 raise util.Abort(_("patch %s is not in series file") % patch)
1495 start = q.series.index(patch) + 1
1515 start = q.series.index(patch) + 1
1496 else:
1516 else:
1497 start = q.series_end(True)
1517 start = q.series_end(True)
1498 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1518 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1499
1519
1500 def qimport(ui, repo, *filename, **opts):
1520 def qimport(ui, repo, *filename, **opts):
1501 """import a patch
1521 """import a patch
1502
1522
1503 The patch will have the same name as its source file unless you
1523 The patch will have the same name as its source file unless you
1504 give it a new one with --name.
1524 give it a new one with --name.
1505
1525
1506 You can register an existing patch inside the patch directory
1526 You can register an existing patch inside the patch directory
1507 with the --existing flag.
1527 with the --existing flag.
1508
1528
1509 With --force, an existing patch of the same name will be overwritten.
1529 With --force, an existing patch of the same name will be overwritten.
1510
1530
1511 An existing changeset may be placed under mq control with --rev
1531 An existing changeset may be placed under mq control with --rev
1512 (e.g. qimport --rev tip -n patch will place tip under mq control).
1532 (e.g. qimport --rev tip -n patch will place tip under mq control).
1513 With --git, patches imported with --rev will use the git diff
1533 With --git, patches imported with --rev will use the git diff
1514 format.
1534 format.
1515 """
1535 """
1516 q = repo.mq
1536 q = repo.mq
1517 q.qimport(repo, filename, patchname=opts['name'],
1537 q.qimport(repo, filename, patchname=opts['name'],
1518 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1538 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1519 git=opts['git'])
1539 git=opts['git'])
1520 q.save_dirty()
1540 q.save_dirty()
1521 return 0
1541 return 0
1522
1542
1523 def init(ui, repo, **opts):
1543 def init(ui, repo, **opts):
1524 """init a new queue repository
1544 """init a new queue repository
1525
1545
1526 The queue repository is unversioned by default. If -c is
1546 The queue repository is unversioned by default. If -c is
1527 specified, qinit will create a separate nested repository
1547 specified, qinit will create a separate nested repository
1528 for patches (qinit -c may also be run later to convert
1548 for patches (qinit -c may also be run later to convert
1529 an unversioned patch repository into a versioned one).
1549 an unversioned patch repository into a versioned one).
1530 You can use qcommit to commit changes to this queue repository."""
1550 You can use qcommit to commit changes to this queue repository."""
1531 q = repo.mq
1551 q = repo.mq
1532 r = q.init(repo, create=opts['create_repo'])
1552 r = q.init(repo, create=opts['create_repo'])
1533 q.save_dirty()
1553 q.save_dirty()
1534 if r:
1554 if r:
1535 if not os.path.exists(r.wjoin('.hgignore')):
1555 if not os.path.exists(r.wjoin('.hgignore')):
1536 fp = r.wopener('.hgignore', 'w')
1556 fp = r.wopener('.hgignore', 'w')
1537 fp.write('syntax: glob\n')
1557 fp.write('syntax: glob\n')
1538 fp.write('status\n')
1558 fp.write('status\n')
1539 fp.write('guards\n')
1559 fp.write('guards\n')
1540 fp.close()
1560 fp.close()
1541 if not os.path.exists(r.wjoin('series')):
1561 if not os.path.exists(r.wjoin('series')):
1542 r.wopener('series', 'w').close()
1562 r.wopener('series', 'w').close()
1543 r.add(['.hgignore', 'series'])
1563 r.add(['.hgignore', 'series'])
1544 commands.add(ui, r)
1564 commands.add(ui, r)
1545 return 0
1565 return 0
1546
1566
1547 def clone(ui, source, dest=None, **opts):
1567 def clone(ui, source, dest=None, **opts):
1548 '''clone main and patch repository at same time
1568 '''clone main and patch repository at same time
1549
1569
1550 If source is local, destination will have no patches applied. If
1570 If source is local, destination will have no patches applied. If
1551 source is remote, this command can not check if patches are
1571 source is remote, this command can not check if patches are
1552 applied in source, so cannot guarantee that patches are not
1572 applied in source, so cannot guarantee that patches are not
1553 applied in destination. If you clone remote repository, be sure
1573 applied in destination. If you clone remote repository, be sure
1554 before that it has no patches applied.
1574 before that it has no patches applied.
1555
1575
1556 Source patch repository is looked for in <src>/.hg/patches by
1576 Source patch repository is looked for in <src>/.hg/patches by
1557 default. Use -p <url> to change.
1577 default. Use -p <url> to change.
1558
1578
1559 The patch directory must be a nested mercurial repository, as
1579 The patch directory must be a nested mercurial repository, as
1560 would be created by qinit -c.
1580 would be created by qinit -c.
1561 '''
1581 '''
1562 def patchdir(repo):
1582 def patchdir(repo):
1563 url = repo.url()
1583 url = repo.url()
1564 if url.endswith('/'):
1584 if url.endswith('/'):
1565 url = url[:-1]
1585 url = url[:-1]
1566 return url + '/.hg/patches'
1586 return url + '/.hg/patches'
1567 cmdutil.setremoteconfig(ui, opts)
1587 cmdutil.setremoteconfig(ui, opts)
1568 if dest is None:
1588 if dest is None:
1569 dest = hg.defaultdest(source)
1589 dest = hg.defaultdest(source)
1570 sr = hg.repository(ui, ui.expandpath(source))
1590 sr = hg.repository(ui, ui.expandpath(source))
1571 patchespath = opts['patches'] or patchdir(sr)
1591 patchespath = opts['patches'] or patchdir(sr)
1572 try:
1592 try:
1573 pr = hg.repository(ui, patchespath)
1593 pr = hg.repository(ui, patchespath)
1574 except hg.RepoError:
1594 except hg.RepoError:
1575 raise util.Abort(_('versioned patch repository not found'
1595 raise util.Abort(_('versioned patch repository not found'
1576 ' (see qinit -c)'))
1596 ' (see qinit -c)'))
1577 qbase, destrev = None, None
1597 qbase, destrev = None, None
1578 if sr.local():
1598 if sr.local():
1579 if sr.mq.applied:
1599 if sr.mq.applied:
1580 qbase = revlog.bin(sr.mq.applied[0].rev)
1600 qbase = revlog.bin(sr.mq.applied[0].rev)
1581 if not hg.islocal(dest):
1601 if not hg.islocal(dest):
1582 heads = dict.fromkeys(sr.heads())
1602 heads = dict.fromkeys(sr.heads())
1583 for h in sr.heads(qbase):
1603 for h in sr.heads(qbase):
1584 del heads[h]
1604 del heads[h]
1585 destrev = heads.keys()
1605 destrev = heads.keys()
1586 destrev.append(sr.changelog.parents(qbase)[0])
1606 destrev.append(sr.changelog.parents(qbase)[0])
1587 ui.note(_('cloning main repo\n'))
1607 ui.note(_('cloning main repo\n'))
1588 sr, dr = hg.clone(ui, sr.url(), dest,
1608 sr, dr = hg.clone(ui, sr.url(), dest,
1589 pull=opts['pull'],
1609 pull=opts['pull'],
1590 rev=destrev,
1610 rev=destrev,
1591 update=False,
1611 update=False,
1592 stream=opts['uncompressed'])
1612 stream=opts['uncompressed'])
1593 ui.note(_('cloning patch repo\n'))
1613 ui.note(_('cloning patch repo\n'))
1594 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1614 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1595 pull=opts['pull'], update=not opts['noupdate'],
1615 pull=opts['pull'], update=not opts['noupdate'],
1596 stream=opts['uncompressed'])
1616 stream=opts['uncompressed'])
1597 if dr.local():
1617 if dr.local():
1598 if qbase:
1618 if qbase:
1599 ui.note(_('stripping applied patches from destination repo\n'))
1619 ui.note(_('stripping applied patches from destination repo\n'))
1600 dr.mq.strip(dr, qbase, update=False, backup=None)
1620 dr.mq.strip(dr, qbase, update=False, backup=None)
1601 if not opts['noupdate']:
1621 if not opts['noupdate']:
1602 ui.note(_('updating destination repo\n'))
1622 ui.note(_('updating destination repo\n'))
1603 hg.update(dr, dr.changelog.tip())
1623 hg.update(dr, dr.changelog.tip())
1604
1624
1605 def commit(ui, repo, *pats, **opts):
1625 def commit(ui, repo, *pats, **opts):
1606 """commit changes in the queue repository"""
1626 """commit changes in the queue repository"""
1607 q = repo.mq
1627 q = repo.mq
1608 r = q.qrepo()
1628 r = q.qrepo()
1609 if not r: raise util.Abort('no queue repository')
1629 if not r: raise util.Abort('no queue repository')
1610 commands.commit(r.ui, r, *pats, **opts)
1630 commands.commit(r.ui, r, *pats, **opts)
1611
1631
1612 def series(ui, repo, **opts):
1632 def series(ui, repo, **opts):
1613 """print the entire series file"""
1633 """print the entire series file"""
1614 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1634 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1615 return 0
1635 return 0
1616
1636
1617 def top(ui, repo, **opts):
1637 def top(ui, repo, **opts):
1618 """print the name of the current patch"""
1638 """print the name of the current patch"""
1619 q = repo.mq
1639 q = repo.mq
1620 t = q.applied and q.series_end(True) or 0
1640 t = q.applied and q.series_end(True) or 0
1621 if t:
1641 if t:
1622 return q.qseries(repo, start=t-1, length=1, status='A',
1642 return q.qseries(repo, start=t-1, length=1, status='A',
1623 summary=opts.get('summary'))
1643 summary=opts.get('summary'))
1624 else:
1644 else:
1625 ui.write("No patches applied\n")
1645 ui.write("No patches applied\n")
1626 return 1
1646 return 1
1627
1647
1628 def next(ui, repo, **opts):
1648 def next(ui, repo, **opts):
1629 """print the name of the next patch"""
1649 """print the name of the next patch"""
1630 q = repo.mq
1650 q = repo.mq
1631 end = q.series_end()
1651 end = q.series_end()
1632 if end == len(q.series):
1652 if end == len(q.series):
1633 ui.write("All patches applied\n")
1653 ui.write("All patches applied\n")
1634 return 1
1654 return 1
1635 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1655 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1636
1656
1637 def prev(ui, repo, **opts):
1657 def prev(ui, repo, **opts):
1638 """print the name of the previous patch"""
1658 """print the name of the previous patch"""
1639 q = repo.mq
1659 q = repo.mq
1640 l = len(q.applied)
1660 l = len(q.applied)
1641 if l == 1:
1661 if l == 1:
1642 ui.write("Only one patch applied\n")
1662 ui.write("Only one patch applied\n")
1643 return 1
1663 return 1
1644 if not l:
1664 if not l:
1645 ui.write("No patches applied\n")
1665 ui.write("No patches applied\n")
1646 return 1
1666 return 1
1647 return q.qseries(repo, start=l-2, length=1, status='A',
1667 return q.qseries(repo, start=l-2, length=1, status='A',
1648 summary=opts.get('summary'))
1668 summary=opts.get('summary'))
1649
1669
1650 def setupheaderopts(ui, opts):
1670 def setupheaderopts(ui, opts):
1651 def do(opt,val):
1671 def do(opt,val):
1652 if not opts[opt] and opts['current' + opt]:
1672 if not opts[opt] and opts['current' + opt]:
1653 opts[opt] = val
1673 opts[opt] = val
1654 do('user', ui.username())
1674 do('user', ui.username())
1655 do('date', "%d %d" % util.makedate())
1675 do('date', "%d %d" % util.makedate())
1656
1676
1657 def new(ui, repo, patch, *args, **opts):
1677 def new(ui, repo, patch, *args, **opts):
1658 """create a new patch
1678 """create a new patch
1659
1679
1660 qnew creates a new patch on top of the currently-applied patch
1680 qnew creates a new patch on top of the currently-applied patch
1661 (if any). It will refuse to run if there are any outstanding
1681 (if any). It will refuse to run if there are any outstanding
1662 changes unless -f is specified, in which case the patch will
1682 changes unless -f is specified, in which case the patch will
1663 be initialised with them. You may also use -I, -X, and/or a list of
1683 be initialised with them. You may also use -I, -X, and/or a list of
1664 files after the patch name to add only changes to matching files
1684 files after the patch name to add only changes to matching files
1665 to the new patch, leaving the rest as uncommitted modifications.
1685 to the new patch, leaving the rest as uncommitted modifications.
1666
1686
1667 -e, -m or -l set the patch header as well as the commit message.
1687 -e, -m or -l set the patch header as well as the commit message.
1668 If none is specified, the patch header is empty and the
1688 If none is specified, the patch header is empty and the
1669 commit message is '[mq]: PATCH'"""
1689 commit message is '[mq]: PATCH'"""
1670 q = repo.mq
1690 q = repo.mq
1671 message = cmdutil.logmessage(opts)
1691 message = cmdutil.logmessage(opts)
1672 if opts['edit']:
1692 if opts['edit']:
1673 message = ui.edit(message, ui.username())
1693 message = ui.edit(message, ui.username())
1674 opts['msg'] = message
1694 opts['msg'] = message
1675 setupheaderopts(ui, opts)
1695 setupheaderopts(ui, opts)
1676 q.new(repo, patch, *args, **opts)
1696 q.new(repo, patch, *args, **opts)
1677 q.save_dirty()
1697 q.save_dirty()
1678 return 0
1698 return 0
1679
1699
1680 def refresh(ui, repo, *pats, **opts):
1700 def refresh(ui, repo, *pats, **opts):
1681 """update the current patch
1701 """update the current patch
1682
1702
1683 If any file patterns are provided, the refreshed patch will contain only
1703 If any file patterns are provided, the refreshed patch will contain only
1684 the modifications that match those patterns; the remaining modifications
1704 the modifications that match those patterns; the remaining modifications
1685 will remain in the working directory.
1705 will remain in the working directory.
1686
1706
1687 hg add/remove/copy/rename work as usual, though you might want to use
1707 hg add/remove/copy/rename work as usual, though you might want to use
1688 git-style patches (--git or [diff] git=1) to track copies and renames.
1708 git-style patches (--git or [diff] git=1) to track copies and renames.
1689 """
1709 """
1690 q = repo.mq
1710 q = repo.mq
1691 message = cmdutil.logmessage(opts)
1711 message = cmdutil.logmessage(opts)
1692 if opts['edit']:
1712 if opts['edit']:
1693 if not q.applied:
1713 if not q.applied:
1694 ui.write(_("No patches applied\n"))
1714 ui.write(_("No patches applied\n"))
1695 return 1
1715 return 1
1696 if message:
1716 if message:
1697 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1717 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1698 patch = q.applied[-1].name
1718 patch = q.applied[-1].name
1699 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1719 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1700 message = ui.edit('\n'.join(message), user or ui.username())
1720 message = ui.edit('\n'.join(message), user or ui.username())
1701 setupheaderopts(ui, opts)
1721 setupheaderopts(ui, opts)
1702 ret = q.refresh(repo, pats, msg=message, **opts)
1722 ret = q.refresh(repo, pats, msg=message, **opts)
1703 q.save_dirty()
1723 q.save_dirty()
1704 return ret
1724 return ret
1705
1725
1706 def diff(ui, repo, *pats, **opts):
1726 def diff(ui, repo, *pats, **opts):
1707 """diff of the current patch"""
1727 """diff of the current patch"""
1708 repo.mq.diff(repo, pats, opts)
1728 repo.mq.diff(repo, pats, opts)
1709 return 0
1729 return 0
1710
1730
1711 def fold(ui, repo, *files, **opts):
1731 def fold(ui, repo, *files, **opts):
1712 """fold the named patches into the current patch
1732 """fold the named patches into the current patch
1713
1733
1714 Patches must not yet be applied. Each patch will be successively
1734 Patches must not yet be applied. Each patch will be successively
1715 applied to the current patch in the order given. If all the
1735 applied to the current patch in the order given. If all the
1716 patches apply successfully, the current patch will be refreshed
1736 patches apply successfully, the current patch will be refreshed
1717 with the new cumulative patch, and the folded patches will
1737 with the new cumulative patch, and the folded patches will
1718 be deleted. With -k/--keep, the folded patch files will not
1738 be deleted. With -k/--keep, the folded patch files will not
1719 be removed afterwards.
1739 be removed afterwards.
1720
1740
1721 The header for each folded patch will be concatenated with
1741 The header for each folded patch will be concatenated with
1722 the current patch header, separated by a line of '* * *'."""
1742 the current patch header, separated by a line of '* * *'."""
1723
1743
1724 q = repo.mq
1744 q = repo.mq
1725
1745
1726 if not files:
1746 if not files:
1727 raise util.Abort(_('qfold requires at least one patch name'))
1747 raise util.Abort(_('qfold requires at least one patch name'))
1728 if not q.check_toppatch(repo):
1748 if not q.check_toppatch(repo):
1729 raise util.Abort(_('No patches applied'))
1749 raise util.Abort(_('No patches applied'))
1730
1750
1731 message = cmdutil.logmessage(opts)
1751 message = cmdutil.logmessage(opts)
1732 if opts['edit']:
1752 if opts['edit']:
1733 if message:
1753 if message:
1734 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1754 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1735
1755
1736 parent = q.lookup('qtip')
1756 parent = q.lookup('qtip')
1737 patches = []
1757 patches = []
1738 messages = []
1758 messages = []
1739 for f in files:
1759 for f in files:
1740 p = q.lookup(f)
1760 p = q.lookup(f)
1741 if p in patches or p == parent:
1761 if p in patches or p == parent:
1742 ui.warn(_('Skipping already folded patch %s') % p)
1762 ui.warn(_('Skipping already folded patch %s') % p)
1743 if q.isapplied(p):
1763 if q.isapplied(p):
1744 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1764 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1745 patches.append(p)
1765 patches.append(p)
1746
1766
1747 for p in patches:
1767 for p in patches:
1748 if not message:
1768 if not message:
1749 messages.append(q.readheaders(p)[0])
1769 messages.append(q.readheaders(p)[0])
1750 pf = q.join(p)
1770 pf = q.join(p)
1751 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1771 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1752 if not patchsuccess:
1772 if not patchsuccess:
1753 raise util.Abort(_('Error folding patch %s') % p)
1773 raise util.Abort(_('Error folding patch %s') % p)
1754 patch.updatedir(ui, repo, files)
1774 patch.updatedir(ui, repo, files)
1755
1775
1756 if not message:
1776 if not message:
1757 message, comments, user = q.readheaders(parent)[0:3]
1777 message, comments, user = q.readheaders(parent)[0:3]
1758 for msg in messages:
1778 for msg in messages:
1759 message.append('* * *')
1779 message.append('* * *')
1760 message.extend(msg)
1780 message.extend(msg)
1761 message = '\n'.join(message)
1781 message = '\n'.join(message)
1762
1782
1763 if opts['edit']:
1783 if opts['edit']:
1764 message = ui.edit(message, user or ui.username())
1784 message = ui.edit(message, user or ui.username())
1765
1785
1766 q.refresh(repo, msg=message)
1786 q.refresh(repo, msg=message)
1767 q.delete(repo, patches, opts)
1787 q.delete(repo, patches, opts)
1768 q.save_dirty()
1788 q.save_dirty()
1769
1789
1770 def goto(ui, repo, patch, **opts):
1790 def goto(ui, repo, patch, **opts):
1771 '''push or pop patches until named patch is at top of stack'''
1791 '''push or pop patches until named patch is at top of stack'''
1772 q = repo.mq
1792 q = repo.mq
1773 patch = q.lookup(patch)
1793 patch = q.lookup(patch)
1774 if q.isapplied(patch):
1794 if q.isapplied(patch):
1775 ret = q.pop(repo, patch, force=opts['force'])
1795 ret = q.pop(repo, patch, force=opts['force'])
1776 else:
1796 else:
1777 ret = q.push(repo, patch, force=opts['force'])
1797 ret = q.push(repo, patch, force=opts['force'])
1778 q.save_dirty()
1798 q.save_dirty()
1779 return ret
1799 return ret
1780
1800
1781 def guard(ui, repo, *args, **opts):
1801 def guard(ui, repo, *args, **opts):
1782 '''set or print guards for a patch
1802 '''set or print guards for a patch
1783
1803
1784 Guards control whether a patch can be pushed. A patch with no
1804 Guards control whether a patch can be pushed. A patch with no
1785 guards is always pushed. A patch with a positive guard ("+foo") is
1805 guards is always pushed. A patch with a positive guard ("+foo") is
1786 pushed only if the qselect command has activated it. A patch with
1806 pushed only if the qselect command has activated it. A patch with
1787 a negative guard ("-foo") is never pushed if the qselect command
1807 a negative guard ("-foo") is never pushed if the qselect command
1788 has activated it.
1808 has activated it.
1789
1809
1790 With no arguments, print the currently active guards.
1810 With no arguments, print the currently active guards.
1791 With arguments, set guards for the named patch.
1811 With arguments, set guards for the named patch.
1792
1812
1793 To set a negative guard "-foo" on topmost patch ("--" is needed so
1813 To set a negative guard "-foo" on topmost patch ("--" is needed so
1794 hg will not interpret "-foo" as an option):
1814 hg will not interpret "-foo" as an option):
1795 hg qguard -- -foo
1815 hg qguard -- -foo
1796
1816
1797 To set guards on another patch:
1817 To set guards on another patch:
1798 hg qguard other.patch +2.6.17 -stable
1818 hg qguard other.patch +2.6.17 -stable
1799 '''
1819 '''
1800 def status(idx):
1820 def status(idx):
1801 guards = q.series_guards[idx] or ['unguarded']
1821 guards = q.series_guards[idx] or ['unguarded']
1802 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1822 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1803 q = repo.mq
1823 q = repo.mq
1804 patch = None
1824 patch = None
1805 args = list(args)
1825 args = list(args)
1806 if opts['list']:
1826 if opts['list']:
1807 if args or opts['none']:
1827 if args or opts['none']:
1808 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1828 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1809 for i in xrange(len(q.series)):
1829 for i in xrange(len(q.series)):
1810 status(i)
1830 status(i)
1811 return
1831 return
1812 if not args or args[0][0:1] in '-+':
1832 if not args or args[0][0:1] in '-+':
1813 if not q.applied:
1833 if not q.applied:
1814 raise util.Abort(_('no patches applied'))
1834 raise util.Abort(_('no patches applied'))
1815 patch = q.applied[-1].name
1835 patch = q.applied[-1].name
1816 if patch is None and args[0][0:1] not in '-+':
1836 if patch is None and args[0][0:1] not in '-+':
1817 patch = args.pop(0)
1837 patch = args.pop(0)
1818 if patch is None:
1838 if patch is None:
1819 raise util.Abort(_('no patch to work with'))
1839 raise util.Abort(_('no patch to work with'))
1820 if args or opts['none']:
1840 if args or opts['none']:
1821 idx = q.find_series(patch)
1841 idx = q.find_series(patch)
1822 if idx is None:
1842 if idx is None:
1823 raise util.Abort(_('no patch named %s') % patch)
1843 raise util.Abort(_('no patch named %s') % patch)
1824 q.set_guards(idx, args)
1844 q.set_guards(idx, args)
1825 q.save_dirty()
1845 q.save_dirty()
1826 else:
1846 else:
1827 status(q.series.index(q.lookup(patch)))
1847 status(q.series.index(q.lookup(patch)))
1828
1848
1829 def header(ui, repo, patch=None):
1849 def header(ui, repo, patch=None):
1830 """Print the header of the topmost or specified patch"""
1850 """Print the header of the topmost or specified patch"""
1831 q = repo.mq
1851 q = repo.mq
1832
1852
1833 if patch:
1853 if patch:
1834 patch = q.lookup(patch)
1854 patch = q.lookup(patch)
1835 else:
1855 else:
1836 if not q.applied:
1856 if not q.applied:
1837 ui.write('No patches applied\n')
1857 ui.write('No patches applied\n')
1838 return 1
1858 return 1
1839 patch = q.lookup('qtip')
1859 patch = q.lookup('qtip')
1840 message = repo.mq.readheaders(patch)[0]
1860 message = repo.mq.readheaders(patch)[0]
1841
1861
1842 ui.write('\n'.join(message) + '\n')
1862 ui.write('\n'.join(message) + '\n')
1843
1863
1844 def lastsavename(path):
1864 def lastsavename(path):
1845 (directory, base) = os.path.split(path)
1865 (directory, base) = os.path.split(path)
1846 names = os.listdir(directory)
1866 names = os.listdir(directory)
1847 namere = re.compile("%s.([0-9]+)" % base)
1867 namere = re.compile("%s.([0-9]+)" % base)
1848 maxindex = None
1868 maxindex = None
1849 maxname = None
1869 maxname = None
1850 for f in names:
1870 for f in names:
1851 m = namere.match(f)
1871 m = namere.match(f)
1852 if m:
1872 if m:
1853 index = int(m.group(1))
1873 index = int(m.group(1))
1854 if maxindex == None or index > maxindex:
1874 if maxindex == None or index > maxindex:
1855 maxindex = index
1875 maxindex = index
1856 maxname = f
1876 maxname = f
1857 if maxname:
1877 if maxname:
1858 return (os.path.join(directory, maxname), maxindex)
1878 return (os.path.join(directory, maxname), maxindex)
1859 return (None, None)
1879 return (None, None)
1860
1880
1861 def savename(path):
1881 def savename(path):
1862 (last, index) = lastsavename(path)
1882 (last, index) = lastsavename(path)
1863 if last is None:
1883 if last is None:
1864 index = 0
1884 index = 0
1865 newpath = path + ".%d" % (index + 1)
1885 newpath = path + ".%d" % (index + 1)
1866 return newpath
1886 return newpath
1867
1887
1868 def push(ui, repo, patch=None, **opts):
1888 def push(ui, repo, patch=None, **opts):
1869 """push the next patch onto the stack"""
1889 """push the next patch onto the stack"""
1870 q = repo.mq
1890 q = repo.mq
1871 mergeq = None
1891 mergeq = None
1872
1892
1873 if opts['all']:
1893 if opts['all']:
1874 if not q.series:
1894 if not q.series:
1875 ui.warn(_('no patches in series\n'))
1895 ui.warn(_('no patches in series\n'))
1876 return 0
1896 return 0
1877 patch = q.series[-1]
1897 patch = q.series[-1]
1878 if opts['merge']:
1898 if opts['merge']:
1879 if opts['name']:
1899 if opts['name']:
1880 newpath = opts['name']
1900 newpath = opts['name']
1881 else:
1901 else:
1882 newpath, i = lastsavename(q.path)
1902 newpath, i = lastsavename(q.path)
1883 if not newpath:
1903 if not newpath:
1884 ui.warn("no saved queues found, please use -n\n")
1904 ui.warn("no saved queues found, please use -n\n")
1885 return 1
1905 return 1
1886 mergeq = queue(ui, repo.join(""), newpath)
1906 mergeq = queue(ui, repo.join(""), newpath)
1887 ui.warn("merging with queue at: %s\n" % mergeq.path)
1907 ui.warn("merging with queue at: %s\n" % mergeq.path)
1888 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1908 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1889 mergeq=mergeq)
1909 mergeq=mergeq)
1890 return ret
1910 return ret
1891
1911
1892 def pop(ui, repo, patch=None, **opts):
1912 def pop(ui, repo, patch=None, **opts):
1893 """pop the current patch off the stack"""
1913 """pop the current patch off the stack"""
1894 localupdate = True
1914 localupdate = True
1895 if opts['name']:
1915 if opts['name']:
1896 q = queue(ui, repo.join(""), repo.join(opts['name']))
1916 q = queue(ui, repo.join(""), repo.join(opts['name']))
1897 ui.warn('using patch queue: %s\n' % q.path)
1917 ui.warn('using patch queue: %s\n' % q.path)
1898 localupdate = False
1918 localupdate = False
1899 else:
1919 else:
1900 q = repo.mq
1920 q = repo.mq
1901 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1921 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1902 all=opts['all'])
1922 all=opts['all'])
1903 q.save_dirty()
1923 q.save_dirty()
1904 return ret
1924 return ret
1905
1925
1906 def rename(ui, repo, patch, name=None, **opts):
1926 def rename(ui, repo, patch, name=None, **opts):
1907 """rename a patch
1927 """rename a patch
1908
1928
1909 With one argument, renames the current patch to PATCH1.
1929 With one argument, renames the current patch to PATCH1.
1910 With two arguments, renames PATCH1 to PATCH2."""
1930 With two arguments, renames PATCH1 to PATCH2."""
1911
1931
1912 q = repo.mq
1932 q = repo.mq
1913
1933
1914 if not name:
1934 if not name:
1915 name = patch
1935 name = patch
1916 patch = None
1936 patch = None
1917
1937
1918 if patch:
1938 if patch:
1919 patch = q.lookup(patch)
1939 patch = q.lookup(patch)
1920 else:
1940 else:
1921 if not q.applied:
1941 if not q.applied:
1922 ui.write(_('No patches applied\n'))
1942 ui.write(_('No patches applied\n'))
1923 return
1943 return
1924 patch = q.lookup('qtip')
1944 patch = q.lookup('qtip')
1925 absdest = q.join(name)
1945 absdest = q.join(name)
1926 if os.path.isdir(absdest):
1946 if os.path.isdir(absdest):
1927 name = normname(os.path.join(name, os.path.basename(patch)))
1947 name = normname(os.path.join(name, os.path.basename(patch)))
1928 absdest = q.join(name)
1948 absdest = q.join(name)
1929 if os.path.exists(absdest):
1949 if os.path.exists(absdest):
1930 raise util.Abort(_('%s already exists') % absdest)
1950 raise util.Abort(_('%s already exists') % absdest)
1931
1951
1932 if name in q.series:
1952 if name in q.series:
1933 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1953 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1934
1954
1935 if ui.verbose:
1955 if ui.verbose:
1936 ui.write('Renaming %s to %s\n' % (patch, name))
1956 ui.write('Renaming %s to %s\n' % (patch, name))
1937 i = q.find_series(patch)
1957 i = q.find_series(patch)
1938 guards = q.guard_re.findall(q.full_series[i])
1958 guards = q.guard_re.findall(q.full_series[i])
1939 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1959 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1940 q.parse_series()
1960 q.parse_series()
1941 q.series_dirty = 1
1961 q.series_dirty = 1
1942
1962
1943 info = q.isapplied(patch)
1963 info = q.isapplied(patch)
1944 if info:
1964 if info:
1945 q.applied[info[0]] = statusentry(info[1], name)
1965 q.applied[info[0]] = statusentry(info[1], name)
1946 q.applied_dirty = 1
1966 q.applied_dirty = 1
1947
1967
1948 util.rename(q.join(patch), absdest)
1968 util.rename(q.join(patch), absdest)
1949 r = q.qrepo()
1969 r = q.qrepo()
1950 if r:
1970 if r:
1951 wlock = r.wlock()
1971 wlock = r.wlock()
1952 try:
1972 try:
1953 if r.dirstate[name] == 'r':
1973 if r.dirstate[name] == 'r':
1954 r.undelete([name])
1974 r.undelete([name])
1955 r.copy(patch, name)
1975 r.copy(patch, name)
1956 r.remove([patch], False)
1976 r.remove([patch], False)
1957 finally:
1977 finally:
1958 del wlock
1978 del wlock
1959
1979
1960 q.save_dirty()
1980 q.save_dirty()
1961
1981
1962 def restore(ui, repo, rev, **opts):
1982 def restore(ui, repo, rev, **opts):
1963 """restore the queue state saved by a rev"""
1983 """restore the queue state saved by a rev"""
1964 rev = repo.lookup(rev)
1984 rev = repo.lookup(rev)
1965 q = repo.mq
1985 q = repo.mq
1966 q.restore(repo, rev, delete=opts['delete'],
1986 q.restore(repo, rev, delete=opts['delete'],
1967 qupdate=opts['update'])
1987 qupdate=opts['update'])
1968 q.save_dirty()
1988 q.save_dirty()
1969 return 0
1989 return 0
1970
1990
1971 def save(ui, repo, **opts):
1991 def save(ui, repo, **opts):
1972 """save current queue state"""
1992 """save current queue state"""
1973 q = repo.mq
1993 q = repo.mq
1974 message = cmdutil.logmessage(opts)
1994 message = cmdutil.logmessage(opts)
1975 ret = q.save(repo, msg=message)
1995 ret = q.save(repo, msg=message)
1976 if ret:
1996 if ret:
1977 return ret
1997 return ret
1978 q.save_dirty()
1998 q.save_dirty()
1979 if opts['copy']:
1999 if opts['copy']:
1980 path = q.path
2000 path = q.path
1981 if opts['name']:
2001 if opts['name']:
1982 newpath = os.path.join(q.basepath, opts['name'])
2002 newpath = os.path.join(q.basepath, opts['name'])
1983 if os.path.exists(newpath):
2003 if os.path.exists(newpath):
1984 if not os.path.isdir(newpath):
2004 if not os.path.isdir(newpath):
1985 raise util.Abort(_('destination %s exists and is not '
2005 raise util.Abort(_('destination %s exists and is not '
1986 'a directory') % newpath)
2006 'a directory') % newpath)
1987 if not opts['force']:
2007 if not opts['force']:
1988 raise util.Abort(_('destination %s exists, '
2008 raise util.Abort(_('destination %s exists, '
1989 'use -f to force') % newpath)
2009 'use -f to force') % newpath)
1990 else:
2010 else:
1991 newpath = savename(path)
2011 newpath = savename(path)
1992 ui.warn("copy %s to %s\n" % (path, newpath))
2012 ui.warn("copy %s to %s\n" % (path, newpath))
1993 util.copyfiles(path, newpath)
2013 util.copyfiles(path, newpath)
1994 if opts['empty']:
2014 if opts['empty']:
1995 try:
2015 try:
1996 os.unlink(q.join(q.status_path))
2016 os.unlink(q.join(q.status_path))
1997 except:
2017 except:
1998 pass
2018 pass
1999 return 0
2019 return 0
2000
2020
2001 def strip(ui, repo, rev, **opts):
2021 def strip(ui, repo, rev, **opts):
2002 """strip a revision and all later revs on the same branch"""
2022 """strip a revision and all later revs on the same branch"""
2003 rev = repo.lookup(rev)
2023 rev = repo.lookup(rev)
2004 backup = 'all'
2024 backup = 'all'
2005 if opts['backup']:
2025 if opts['backup']:
2006 backup = 'strip'
2026 backup = 'strip'
2007 elif opts['nobackup']:
2027 elif opts['nobackup']:
2008 backup = 'none'
2028 backup = 'none'
2009 update = repo.dirstate.parents()[0] != revlog.nullid
2029 update = repo.dirstate.parents()[0] != revlog.nullid
2010 repo.mq.strip(repo, rev, backup=backup, update=update)
2030 repo.mq.strip(repo, rev, backup=backup, update=update)
2011 return 0
2031 return 0
2012
2032
2013 def select(ui, repo, *args, **opts):
2033 def select(ui, repo, *args, **opts):
2014 '''set or print guarded patches to push
2034 '''set or print guarded patches to push
2015
2035
2016 Use the qguard command to set or print guards on patch, then use
2036 Use the qguard command to set or print guards on patch, then use
2017 qselect to tell mq which guards to use. A patch will be pushed if it
2037 qselect to tell mq which guards to use. A patch will be pushed if it
2018 has no guards or any positive guards match the currently selected guard,
2038 has no guards or any positive guards match the currently selected guard,
2019 but will not be pushed if any negative guards match the current guard.
2039 but will not be pushed if any negative guards match the current guard.
2020 For example:
2040 For example:
2021
2041
2022 qguard foo.patch -stable (negative guard)
2042 qguard foo.patch -stable (negative guard)
2023 qguard bar.patch +stable (positive guard)
2043 qguard bar.patch +stable (positive guard)
2024 qselect stable
2044 qselect stable
2025
2045
2026 This activates the "stable" guard. mq will skip foo.patch (because
2046 This activates the "stable" guard. mq will skip foo.patch (because
2027 it has a negative match) but push bar.patch (because it
2047 it has a negative match) but push bar.patch (because it
2028 has a positive match).
2048 has a positive match).
2029
2049
2030 With no arguments, prints the currently active guards.
2050 With no arguments, prints the currently active guards.
2031 With one argument, sets the active guard.
2051 With one argument, sets the active guard.
2032
2052
2033 Use -n/--none to deactivate guards (no other arguments needed).
2053 Use -n/--none to deactivate guards (no other arguments needed).
2034 When no guards are active, patches with positive guards are skipped
2054 When no guards are active, patches with positive guards are skipped
2035 and patches with negative guards are pushed.
2055 and patches with negative guards are pushed.
2036
2056
2037 qselect can change the guards on applied patches. It does not pop
2057 qselect can change the guards on applied patches. It does not pop
2038 guarded patches by default. Use --pop to pop back to the last applied
2058 guarded patches by default. Use --pop to pop back to the last applied
2039 patch that is not guarded. Use --reapply (which implies --pop) to push
2059 patch that is not guarded. Use --reapply (which implies --pop) to push
2040 back to the current patch afterwards, but skip guarded patches.
2060 back to the current patch afterwards, but skip guarded patches.
2041
2061
2042 Use -s/--series to print a list of all guards in the series file (no
2062 Use -s/--series to print a list of all guards in the series file (no
2043 other arguments needed). Use -v for more information.'''
2063 other arguments needed). Use -v for more information.'''
2044
2064
2045 q = repo.mq
2065 q = repo.mq
2046 guards = q.active()
2066 guards = q.active()
2047 if args or opts['none']:
2067 if args or opts['none']:
2048 old_unapplied = q.unapplied(repo)
2068 old_unapplied = q.unapplied(repo)
2049 old_guarded = [i for i in xrange(len(q.applied)) if
2069 old_guarded = [i for i in xrange(len(q.applied)) if
2050 not q.pushable(i)[0]]
2070 not q.pushable(i)[0]]
2051 q.set_active(args)
2071 q.set_active(args)
2052 q.save_dirty()
2072 q.save_dirty()
2053 if not args:
2073 if not args:
2054 ui.status(_('guards deactivated\n'))
2074 ui.status(_('guards deactivated\n'))
2055 if not opts['pop'] and not opts['reapply']:
2075 if not opts['pop'] and not opts['reapply']:
2056 unapplied = q.unapplied(repo)
2076 unapplied = q.unapplied(repo)
2057 guarded = [i for i in xrange(len(q.applied))
2077 guarded = [i for i in xrange(len(q.applied))
2058 if not q.pushable(i)[0]]
2078 if not q.pushable(i)[0]]
2059 if len(unapplied) != len(old_unapplied):
2079 if len(unapplied) != len(old_unapplied):
2060 ui.status(_('number of unguarded, unapplied patches has '
2080 ui.status(_('number of unguarded, unapplied patches has '
2061 'changed from %d to %d\n') %
2081 'changed from %d to %d\n') %
2062 (len(old_unapplied), len(unapplied)))
2082 (len(old_unapplied), len(unapplied)))
2063 if len(guarded) != len(old_guarded):
2083 if len(guarded) != len(old_guarded):
2064 ui.status(_('number of guarded, applied patches has changed '
2084 ui.status(_('number of guarded, applied patches has changed '
2065 'from %d to %d\n') %
2085 'from %d to %d\n') %
2066 (len(old_guarded), len(guarded)))
2086 (len(old_guarded), len(guarded)))
2067 elif opts['series']:
2087 elif opts['series']:
2068 guards = {}
2088 guards = {}
2069 noguards = 0
2089 noguards = 0
2070 for gs in q.series_guards:
2090 for gs in q.series_guards:
2071 if not gs:
2091 if not gs:
2072 noguards += 1
2092 noguards += 1
2073 for g in gs:
2093 for g in gs:
2074 guards.setdefault(g, 0)
2094 guards.setdefault(g, 0)
2075 guards[g] += 1
2095 guards[g] += 1
2076 if ui.verbose:
2096 if ui.verbose:
2077 guards['NONE'] = noguards
2097 guards['NONE'] = noguards
2078 guards = guards.items()
2098 guards = guards.items()
2079 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2099 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2080 if guards:
2100 if guards:
2081 ui.note(_('guards in series file:\n'))
2101 ui.note(_('guards in series file:\n'))
2082 for guard, count in guards:
2102 for guard, count in guards:
2083 ui.note('%2d ' % count)
2103 ui.note('%2d ' % count)
2084 ui.write(guard, '\n')
2104 ui.write(guard, '\n')
2085 else:
2105 else:
2086 ui.note(_('no guards in series file\n'))
2106 ui.note(_('no guards in series file\n'))
2087 else:
2107 else:
2088 if guards:
2108 if guards:
2089 ui.note(_('active guards:\n'))
2109 ui.note(_('active guards:\n'))
2090 for g in guards:
2110 for g in guards:
2091 ui.write(g, '\n')
2111 ui.write(g, '\n')
2092 else:
2112 else:
2093 ui.write(_('no active guards\n'))
2113 ui.write(_('no active guards\n'))
2094 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2114 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2095 popped = False
2115 popped = False
2096 if opts['pop'] or opts['reapply']:
2116 if opts['pop'] or opts['reapply']:
2097 for i in xrange(len(q.applied)):
2117 for i in xrange(len(q.applied)):
2098 pushable, reason = q.pushable(i)
2118 pushable, reason = q.pushable(i)
2099 if not pushable:
2119 if not pushable:
2100 ui.status(_('popping guarded patches\n'))
2120 ui.status(_('popping guarded patches\n'))
2101 popped = True
2121 popped = True
2102 if i == 0:
2122 if i == 0:
2103 q.pop(repo, all=True)
2123 q.pop(repo, all=True)
2104 else:
2124 else:
2105 q.pop(repo, i-1)
2125 q.pop(repo, i-1)
2106 break
2126 break
2107 if popped:
2127 if popped:
2108 try:
2128 try:
2109 if reapply:
2129 if reapply:
2110 ui.status(_('reapplying unguarded patches\n'))
2130 ui.status(_('reapplying unguarded patches\n'))
2111 q.push(repo, reapply)
2131 q.push(repo, reapply)
2112 finally:
2132 finally:
2113 q.save_dirty()
2133 q.save_dirty()
2114
2134
2115 def reposetup(ui, repo):
2135 def reposetup(ui, repo):
2116 class mqrepo(repo.__class__):
2136 class mqrepo(repo.__class__):
2117 def abort_if_wdir_patched(self, errmsg, force=False):
2137 def abort_if_wdir_patched(self, errmsg, force=False):
2118 if self.mq.applied and not force:
2138 if self.mq.applied and not force:
2119 parent = revlog.hex(self.dirstate.parents()[0])
2139 parent = revlog.hex(self.dirstate.parents()[0])
2120 if parent in [s.rev for s in self.mq.applied]:
2140 if parent in [s.rev for s in self.mq.applied]:
2121 raise util.Abort(errmsg)
2141 raise util.Abort(errmsg)
2122
2142
2123 def commit(self, *args, **opts):
2143 def commit(self, *args, **opts):
2124 if len(args) >= 6:
2144 if len(args) >= 6:
2125 force = args[5]
2145 force = args[5]
2126 else:
2146 else:
2127 force = opts.get('force')
2147 force = opts.get('force')
2128 self.abort_if_wdir_patched(
2148 self.abort_if_wdir_patched(
2129 _('cannot commit over an applied mq patch'),
2149 _('cannot commit over an applied mq patch'),
2130 force)
2150 force)
2131
2151
2132 return super(mqrepo, self).commit(*args, **opts)
2152 return super(mqrepo, self).commit(*args, **opts)
2133
2153
2134 def push(self, remote, force=False, revs=None):
2154 def push(self, remote, force=False, revs=None):
2135 if self.mq.applied and not force and not revs:
2155 if self.mq.applied and not force and not revs:
2136 raise util.Abort(_('source has mq patches applied'))
2156 raise util.Abort(_('source has mq patches applied'))
2137 return super(mqrepo, self).push(remote, force, revs)
2157 return super(mqrepo, self).push(remote, force, revs)
2138
2158
2139 def tags(self):
2159 def tags(self):
2140 if self.tagscache:
2160 if self.tagscache:
2141 return self.tagscache
2161 return self.tagscache
2142
2162
2143 tagscache = super(mqrepo, self).tags()
2163 tagscache = super(mqrepo, self).tags()
2144
2164
2145 q = self.mq
2165 q = self.mq
2146 if not q.applied:
2166 if not q.applied:
2147 return tagscache
2167 return tagscache
2148
2168
2149 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2169 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2170
2171 if mqtags[-1][0] not in self.changelog.nodemap:
2172 self.ui.warn('mq status file refers to unknown node %s\n'
2173 % revlog.short(mqtags[-1][0]))
2174 return tagscache
2175
2150 mqtags.append((mqtags[-1][0], 'qtip'))
2176 mqtags.append((mqtags[-1][0], 'qtip'))
2151 mqtags.append((mqtags[0][0], 'qbase'))
2177 mqtags.append((mqtags[0][0], 'qbase'))
2152 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2178 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2153 for patch in mqtags:
2179 for patch in mqtags:
2154 if patch[1] in tagscache:
2180 if patch[1] in tagscache:
2155 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2181 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2156 else:
2182 else:
2157 tagscache[patch[1]] = patch[0]
2183 tagscache[patch[1]] = patch[0]
2158
2184
2159 return tagscache
2185 return tagscache
2160
2186
2161 def _branchtags(self):
2187 def _branchtags(self):
2162 q = self.mq
2188 q = self.mq
2163 if not q.applied:
2189 if not q.applied:
2164 return super(mqrepo, self)._branchtags()
2190 return super(mqrepo, self)._branchtags()
2165
2191
2192 cl = self.changelog
2193 qbasenode = revlog.bin(q.applied[0].rev)
2194 if qbasenode not in cl.nodemap:
2195 self.ui.warn('mq status file refers to unknown node %s\n'
2196 % revlog.short(qbasenode))
2197 return super(mqrepo, self)._branchtags()
2198
2166 self.branchcache = {} # avoid recursion in changectx
2199 self.branchcache = {} # avoid recursion in changectx
2167 cl = self.changelog
2168 partial, last, lrev = self._readbranchcache()
2200 partial, last, lrev = self._readbranchcache()
2169
2201
2170 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2202 qbase = cl.rev(qbasenode)
2171 start = lrev + 1
2203 start = lrev + 1
2172 if start < qbase:
2204 if start < qbase:
2173 # update the cache (excluding the patches) and save it
2205 # update the cache (excluding the patches) and save it
2174 self._updatebranchcache(partial, lrev+1, qbase)
2206 self._updatebranchcache(partial, lrev+1, qbase)
2175 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2207 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2176 start = qbase
2208 start = qbase
2177 # if start = qbase, the cache is as updated as it should be.
2209 # if start = qbase, the cache is as updated as it should be.
2178 # if start > qbase, the cache includes (part of) the patches.
2210 # if start > qbase, the cache includes (part of) the patches.
2179 # we might as well use it, but we won't save it.
2211 # we might as well use it, but we won't save it.
2180
2212
2181 # update the cache up to the tip
2213 # update the cache up to the tip
2182 self._updatebranchcache(partial, start, cl.count())
2214 self._updatebranchcache(partial, start, cl.count())
2183
2215
2184 return partial
2216 return partial
2185
2217
2186 if repo.local():
2218 if repo.local():
2187 repo.__class__ = mqrepo
2219 repo.__class__ = mqrepo
2188 repo.mq = queue(ui, repo.join(""))
2220 repo.mq = queue(ui, repo.join(""))
2189
2221
2190 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2222 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2191
2223
2192 headeropts = [
2224 headeropts = [
2193 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2225 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2194 ('u', 'user', '', _('add "From: <given user>" to patch')),
2226 ('u', 'user', '', _('add "From: <given user>" to patch')),
2195 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2227 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2196 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2228 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2197
2229
2198 cmdtable = {
2230 cmdtable = {
2199 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2231 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2200 "qclone":
2232 "qclone":
2201 (clone,
2233 (clone,
2202 [('', 'pull', None, _('use pull protocol to copy metadata')),
2234 [('', 'pull', None, _('use pull protocol to copy metadata')),
2203 ('U', 'noupdate', None, _('do not update the new working directories')),
2235 ('U', 'noupdate', None, _('do not update the new working directories')),
2204 ('', 'uncompressed', None,
2236 ('', 'uncompressed', None,
2205 _('use uncompressed transfer (fast over LAN)')),
2237 _('use uncompressed transfer (fast over LAN)')),
2206 ('p', 'patches', '', _('location of source patch repo')),
2238 ('p', 'patches', '', _('location of source patch repo')),
2207 ] + commands.remoteopts,
2239 ] + commands.remoteopts,
2208 _('hg qclone [OPTION]... SOURCE [DEST]')),
2240 _('hg qclone [OPTION]... SOURCE [DEST]')),
2209 "qcommit|qci":
2241 "qcommit|qci":
2210 (commit,
2242 (commit,
2211 commands.table["^commit|ci"][1],
2243 commands.table["^commit|ci"][1],
2212 _('hg qcommit [OPTION]... [FILE]...')),
2244 _('hg qcommit [OPTION]... [FILE]...')),
2213 "^qdiff":
2245 "^qdiff":
2214 (diff,
2246 (diff,
2215 [('g', 'git', None, _('use git extended diff format')),
2247 [('g', 'git', None, _('use git extended diff format')),
2216 ] + commands.walkopts,
2248 ] + commands.walkopts,
2217 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2249 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2218 "qdelete|qremove|qrm":
2250 "qdelete|qremove|qrm":
2219 (delete,
2251 (delete,
2220 [('k', 'keep', None, _('keep patch file')),
2252 [('k', 'keep', None, _('keep patch file')),
2221 ('r', 'rev', [], _('stop managing a revision'))],
2253 ('r', 'rev', [], _('stop managing a revision'))],
2222 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2254 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2223 'qfold':
2255 'qfold':
2224 (fold,
2256 (fold,
2225 [('e', 'edit', None, _('edit patch header')),
2257 [('e', 'edit', None, _('edit patch header')),
2226 ('k', 'keep', None, _('keep folded patch files')),
2258 ('k', 'keep', None, _('keep folded patch files')),
2227 ] + commands.commitopts,
2259 ] + commands.commitopts,
2228 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2260 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2229 'qgoto':
2261 'qgoto':
2230 (goto,
2262 (goto,
2231 [('f', 'force', None, _('overwrite any local changes'))],
2263 [('f', 'force', None, _('overwrite any local changes'))],
2232 _('hg qgoto [OPTION]... PATCH')),
2264 _('hg qgoto [OPTION]... PATCH')),
2233 'qguard':
2265 'qguard':
2234 (guard,
2266 (guard,
2235 [('l', 'list', None, _('list all patches and guards')),
2267 [('l', 'list', None, _('list all patches and guards')),
2236 ('n', 'none', None, _('drop all guards'))],
2268 ('n', 'none', None, _('drop all guards'))],
2237 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2269 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2238 'qheader': (header, [], _('hg qheader [PATCH]')),
2270 'qheader': (header, [], _('hg qheader [PATCH]')),
2239 "^qimport":
2271 "^qimport":
2240 (qimport,
2272 (qimport,
2241 [('e', 'existing', None, 'import file in patch dir'),
2273 [('e', 'existing', None, 'import file in patch dir'),
2242 ('n', 'name', '', 'patch file name'),
2274 ('n', 'name', '', 'patch file name'),
2243 ('f', 'force', None, 'overwrite existing files'),
2275 ('f', 'force', None, 'overwrite existing files'),
2244 ('r', 'rev', [], 'place existing revisions under mq control'),
2276 ('r', 'rev', [], 'place existing revisions under mq control'),
2245 ('g', 'git', None, _('use git extended diff format'))],
2277 ('g', 'git', None, _('use git extended diff format'))],
2246 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2278 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2247 "^qinit":
2279 "^qinit":
2248 (init,
2280 (init,
2249 [('c', 'create-repo', None, 'create queue repository')],
2281 [('c', 'create-repo', None, 'create queue repository')],
2250 _('hg qinit [-c]')),
2282 _('hg qinit [-c]')),
2251 "qnew":
2283 "qnew":
2252 (new,
2284 (new,
2253 [('e', 'edit', None, _('edit commit message')),
2285 [('e', 'edit', None, _('edit commit message')),
2254 ('f', 'force', None, _('import uncommitted changes into patch')),
2286 ('f', 'force', None, _('import uncommitted changes into patch')),
2255 ('g', 'git', None, _('use git extended diff format')),
2287 ('g', 'git', None, _('use git extended diff format')),
2256 ] + commands.walkopts + commands.commitopts + headeropts,
2288 ] + commands.walkopts + commands.commitopts + headeropts,
2257 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2289 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2258 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2290 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2259 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2291 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2260 "^qpop":
2292 "^qpop":
2261 (pop,
2293 (pop,
2262 [('a', 'all', None, _('pop all patches')),
2294 [('a', 'all', None, _('pop all patches')),
2263 ('n', 'name', '', _('queue name to pop')),
2295 ('n', 'name', '', _('queue name to pop')),
2264 ('f', 'force', None, _('forget any local changes'))],
2296 ('f', 'force', None, _('forget any local changes'))],
2265 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2297 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2266 "^qpush":
2298 "^qpush":
2267 (push,
2299 (push,
2268 [('f', 'force', None, _('apply if the patch has rejects')),
2300 [('f', 'force', None, _('apply if the patch has rejects')),
2269 ('l', 'list', None, _('list patch name in commit text')),
2301 ('l', 'list', None, _('list patch name in commit text')),
2270 ('a', 'all', None, _('apply all patches')),
2302 ('a', 'all', None, _('apply all patches')),
2271 ('m', 'merge', None, _('merge from another queue')),
2303 ('m', 'merge', None, _('merge from another queue')),
2272 ('n', 'name', '', _('merge queue name'))],
2304 ('n', 'name', '', _('merge queue name'))],
2273 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2305 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2274 "^qrefresh":
2306 "^qrefresh":
2275 (refresh,
2307 (refresh,
2276 [('e', 'edit', None, _('edit commit message')),
2308 [('e', 'edit', None, _('edit commit message')),
2277 ('g', 'git', None, _('use git extended diff format')),
2309 ('g', 'git', None, _('use git extended diff format')),
2278 ('s', 'short', None, _('refresh only files already in the patch')),
2310 ('s', 'short', None, _('refresh only files already in the patch')),
2279 ] + commands.walkopts + commands.commitopts + headeropts,
2311 ] + commands.walkopts + commands.commitopts + headeropts,
2280 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2312 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2281 'qrename|qmv':
2313 'qrename|qmv':
2282 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2314 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2283 "qrestore":
2315 "qrestore":
2284 (restore,
2316 (restore,
2285 [('d', 'delete', None, _('delete save entry')),
2317 [('d', 'delete', None, _('delete save entry')),
2286 ('u', 'update', None, _('update queue working dir'))],
2318 ('u', 'update', None, _('update queue working dir'))],
2287 _('hg qrestore [-d] [-u] REV')),
2319 _('hg qrestore [-d] [-u] REV')),
2288 "qsave":
2320 "qsave":
2289 (save,
2321 (save,
2290 [('c', 'copy', None, _('copy patch directory')),
2322 [('c', 'copy', None, _('copy patch directory')),
2291 ('n', 'name', '', _('copy directory name')),
2323 ('n', 'name', '', _('copy directory name')),
2292 ('e', 'empty', None, _('clear queue status file')),
2324 ('e', 'empty', None, _('clear queue status file')),
2293 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2325 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2294 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2326 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2295 "qselect":
2327 "qselect":
2296 (select,
2328 (select,
2297 [('n', 'none', None, _('disable all guards')),
2329 [('n', 'none', None, _('disable all guards')),
2298 ('s', 'series', None, _('list all guards in series file')),
2330 ('s', 'series', None, _('list all guards in series file')),
2299 ('', 'pop', None, _('pop to before first guarded applied patch')),
2331 ('', 'pop', None, _('pop to before first guarded applied patch')),
2300 ('', 'reapply', None, _('pop, then reapply patches'))],
2332 ('', 'reapply', None, _('pop, then reapply patches'))],
2301 _('hg qselect [OPTION]... [GUARD]...')),
2333 _('hg qselect [OPTION]... [GUARD]...')),
2302 "qseries":
2334 "qseries":
2303 (series,
2335 (series,
2304 [('m', 'missing', None, _('print patches not in series')),
2336 [('m', 'missing', None, _('print patches not in series')),
2305 ] + seriesopts,
2337 ] + seriesopts,
2306 _('hg qseries [-ms]')),
2338 _('hg qseries [-ms]')),
2307 "^strip":
2339 "^strip":
2308 (strip,
2340 (strip,
2309 [('f', 'force', None, _('force multi-head removal')),
2341 [('f', 'force', None, _('force multi-head removal')),
2310 ('b', 'backup', None, _('bundle unrelated changesets')),
2342 ('b', 'backup', None, _('bundle unrelated changesets')),
2311 ('n', 'nobackup', None, _('no backups'))],
2343 ('n', 'nobackup', None, _('no backups'))],
2312 _('hg strip [-f] [-b] [-n] REV')),
2344 _('hg strip [-f] [-b] [-n] REV')),
2313 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2345 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2314 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2346 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2315 }
2347 }
@@ -1,3111 +1,3114 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, re, sys, urllib
10 import os, re, sys, urllib
11 import hg, util, revlog, bundlerepo, extensions
11 import hg, util, revlog, bundlerepo, extensions
12 import difflib, patch, time, help, mdiff, tempfile
12 import difflib, patch, time, help, mdiff, tempfile
13 import errno, version, socket
13 import errno, version, socket
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15
15
16 # Commands start here, listed alphabetically
16 # Commands start here, listed alphabetically
17
17
18 def add(ui, repo, *pats, **opts):
18 def add(ui, repo, *pats, **opts):
19 """add the specified files on the next commit
19 """add the specified files on the next commit
20
20
21 Schedule files to be version controlled and added to the repository.
21 Schedule files to be version controlled and added to the repository.
22
22
23 The files will be added to the repository at the next commit. To
23 The files will be added to the repository at the next commit. To
24 undo an add before that, see hg revert.
24 undo an add before that, see hg revert.
25
25
26 If no names are given, add all files in the repository.
26 If no names are given, add all files in the repository.
27 """
27 """
28
28
29 rejected = None
29 rejected = None
30 exacts = {}
30 exacts = {}
31 names = []
31 names = []
32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 badmatch=util.always):
33 badmatch=util.always):
34 if exact:
34 if exact:
35 if ui.verbose:
35 if ui.verbose:
36 ui.status(_('adding %s\n') % rel)
36 ui.status(_('adding %s\n') % rel)
37 names.append(abs)
37 names.append(abs)
38 exacts[abs] = 1
38 exacts[abs] = 1
39 elif abs not in repo.dirstate:
39 elif abs not in repo.dirstate:
40 ui.status(_('adding %s\n') % rel)
40 ui.status(_('adding %s\n') % rel)
41 names.append(abs)
41 names.append(abs)
42 if not opts.get('dry_run'):
42 if not opts.get('dry_run'):
43 rejected = repo.add(names)
43 rejected = repo.add(names)
44 rejected = [p for p in rejected if p in exacts]
44 rejected = [p for p in rejected if p in exacts]
45 return rejected and 1 or 0
45 return rejected and 1 or 0
46
46
47 def addremove(ui, repo, *pats, **opts):
47 def addremove(ui, repo, *pats, **opts):
48 """add all new files, delete all missing files
48 """add all new files, delete all missing files
49
49
50 Add all new files and remove all missing files from the repository.
50 Add all new files and remove all missing files from the repository.
51
51
52 New files are ignored if they match any of the patterns in .hgignore. As
52 New files are ignored if they match any of the patterns in .hgignore. As
53 with add, these changes take effect at the next commit.
53 with add, these changes take effect at the next commit.
54
54
55 Use the -s option to detect renamed files. With a parameter > 0,
55 Use the -s option to detect renamed files. With a parameter > 0,
56 this compares every removed file with every added file and records
56 this compares every removed file with every added file and records
57 those similar enough as renames. This option takes a percentage
57 those similar enough as renames. This option takes a percentage
58 between 0 (disabled) and 100 (files must be identical) as its
58 between 0 (disabled) and 100 (files must be identical) as its
59 parameter. Detecting renamed files this way can be expensive.
59 parameter. Detecting renamed files this way can be expensive.
60 """
60 """
61 try:
61 try:
62 sim = float(opts.get('similarity') or 0)
62 sim = float(opts.get('similarity') or 0)
63 except ValueError:
63 except ValueError:
64 raise util.Abort(_('similarity must be a number'))
64 raise util.Abort(_('similarity must be a number'))
65 if sim < 0 or sim > 100:
65 if sim < 0 or sim > 100:
66 raise util.Abort(_('similarity must be between 0 and 100'))
66 raise util.Abort(_('similarity must be between 0 and 100'))
67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68
68
69 def annotate(ui, repo, *pats, **opts):
69 def annotate(ui, repo, *pats, **opts):
70 """show changeset information per file line
70 """show changeset information per file line
71
71
72 List changes in files, showing the revision id responsible for each line
72 List changes in files, showing the revision id responsible for each line
73
73
74 This command is useful to discover who did a change or when a change took
74 This command is useful to discover who did a change or when a change took
75 place.
75 place.
76
76
77 Without the -a option, annotate will avoid processing files it
77 Without the -a option, annotate will avoid processing files it
78 detects as binary. With -a, annotate will generate an annotation
78 detects as binary. With -a, annotate will generate an annotation
79 anyway, probably with undesirable results.
79 anyway, probably with undesirable results.
80 """
80 """
81 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
81 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
82
82
83 if not pats:
83 if not pats:
84 raise util.Abort(_('at least one file name or pattern required'))
84 raise util.Abort(_('at least one file name or pattern required'))
85
85
86 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
86 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
87 ('number', lambda x: str(x[0].rev())),
87 ('number', lambda x: str(x[0].rev())),
88 ('changeset', lambda x: short(x[0].node())),
88 ('changeset', lambda x: short(x[0].node())),
89 ('date', getdate),
89 ('date', getdate),
90 ('follow', lambda x: x[0].path()),
90 ('follow', lambda x: x[0].path()),
91 ]
91 ]
92
92
93 if (not opts['user'] and not opts['changeset'] and not opts['date']
93 if (not opts['user'] and not opts['changeset'] and not opts['date']
94 and not opts['follow']):
94 and not opts['follow']):
95 opts['number'] = 1
95 opts['number'] = 1
96
96
97 linenumber = opts.get('line_number') is not None
97 linenumber = opts.get('line_number') is not None
98 if (linenumber and (not opts['changeset']) and (not opts['number'])):
98 if (linenumber and (not opts['changeset']) and (not opts['number'])):
99 raise util.Abort(_('at least one of -n/-c is required for -l'))
99 raise util.Abort(_('at least one of -n/-c is required for -l'))
100
100
101 funcmap = [func for op, func in opmap if opts.get(op)]
101 funcmap = [func for op, func in opmap if opts.get(op)]
102 if linenumber:
102 if linenumber:
103 lastfunc = funcmap[-1]
103 lastfunc = funcmap[-1]
104 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
104 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
105
105
106 ctx = repo.changectx(opts['rev'])
106 ctx = repo.changectx(opts['rev'])
107
107
108 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
108 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
109 node=ctx.node()):
109 node=ctx.node()):
110 fctx = ctx.filectx(abs)
110 fctx = ctx.filectx(abs)
111 if not opts['text'] and util.binary(fctx.data()):
111 if not opts['text'] and util.binary(fctx.data()):
112 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
112 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
113 continue
113 continue
114
114
115 lines = fctx.annotate(follow=opts.get('follow'),
115 lines = fctx.annotate(follow=opts.get('follow'),
116 linenumber=linenumber)
116 linenumber=linenumber)
117 pieces = []
117 pieces = []
118
118
119 for f in funcmap:
119 for f in funcmap:
120 l = [f(n) for n, dummy in lines]
120 l = [f(n) for n, dummy in lines]
121 if l:
121 if l:
122 m = max(map(len, l))
122 m = max(map(len, l))
123 pieces.append(["%*s" % (m, x) for x in l])
123 pieces.append(["%*s" % (m, x) for x in l])
124
124
125 if pieces:
125 if pieces:
126 for p, l in zip(zip(*pieces), lines):
126 for p, l in zip(zip(*pieces), lines):
127 ui.write("%s: %s" % (" ".join(p), l[1]))
127 ui.write("%s: %s" % (" ".join(p), l[1]))
128
128
129 def archive(ui, repo, dest, **opts):
129 def archive(ui, repo, dest, **opts):
130 '''create unversioned archive of a repository revision
130 '''create unversioned archive of a repository revision
131
131
132 By default, the revision used is the parent of the working
132 By default, the revision used is the parent of the working
133 directory; use "-r" to specify a different revision.
133 directory; use "-r" to specify a different revision.
134
134
135 To specify the type of archive to create, use "-t". Valid
135 To specify the type of archive to create, use "-t". Valid
136 types are:
136 types are:
137
137
138 "files" (default): a directory full of files
138 "files" (default): a directory full of files
139 "tar": tar archive, uncompressed
139 "tar": tar archive, uncompressed
140 "tbz2": tar archive, compressed using bzip2
140 "tbz2": tar archive, compressed using bzip2
141 "tgz": tar archive, compressed using gzip
141 "tgz": tar archive, compressed using gzip
142 "uzip": zip archive, uncompressed
142 "uzip": zip archive, uncompressed
143 "zip": zip archive, compressed using deflate
143 "zip": zip archive, compressed using deflate
144
144
145 The exact name of the destination archive or directory is given
145 The exact name of the destination archive or directory is given
146 using a format string; see "hg help export" for details.
146 using a format string; see "hg help export" for details.
147
147
148 Each member added to an archive file has a directory prefix
148 Each member added to an archive file has a directory prefix
149 prepended. Use "-p" to specify a format string for the prefix.
149 prepended. Use "-p" to specify a format string for the prefix.
150 The default is the basename of the archive, with suffixes removed.
150 The default is the basename of the archive, with suffixes removed.
151 '''
151 '''
152
152
153 ctx = repo.changectx(opts['rev'])
153 ctx = repo.changectx(opts['rev'])
154 if not ctx:
154 if not ctx:
155 raise util.Abort(_('repository has no revisions'))
155 raise util.Abort(_('repository has no revisions'))
156 node = ctx.node()
156 node = ctx.node()
157 dest = cmdutil.make_filename(repo, dest, node)
157 dest = cmdutil.make_filename(repo, dest, node)
158 if os.path.realpath(dest) == repo.root:
158 if os.path.realpath(dest) == repo.root:
159 raise util.Abort(_('repository root cannot be destination'))
159 raise util.Abort(_('repository root cannot be destination'))
160 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
160 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
161 kind = opts.get('type') or 'files'
161 kind = opts.get('type') or 'files'
162 prefix = opts['prefix']
162 prefix = opts['prefix']
163 if dest == '-':
163 if dest == '-':
164 if kind == 'files':
164 if kind == 'files':
165 raise util.Abort(_('cannot archive plain files to stdout'))
165 raise util.Abort(_('cannot archive plain files to stdout'))
166 dest = sys.stdout
166 dest = sys.stdout
167 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
167 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
168 prefix = cmdutil.make_filename(repo, prefix, node)
168 prefix = cmdutil.make_filename(repo, prefix, node)
169 archival.archive(repo, dest, node, kind, not opts['no_decode'],
169 archival.archive(repo, dest, node, kind, not opts['no_decode'],
170 matchfn, prefix)
170 matchfn, prefix)
171
171
172 def backout(ui, repo, node=None, rev=None, **opts):
172 def backout(ui, repo, node=None, rev=None, **opts):
173 '''reverse effect of earlier changeset
173 '''reverse effect of earlier changeset
174
174
175 Commit the backed out changes as a new changeset. The new
175 Commit the backed out changes as a new changeset. The new
176 changeset is a child of the backed out changeset.
176 changeset is a child of the backed out changeset.
177
177
178 If you back out a changeset other than the tip, a new head is
178 If you back out a changeset other than the tip, a new head is
179 created. This head is the parent of the working directory. If
179 created. This head is the parent of the working directory. If
180 you back out an old changeset, your working directory will appear
180 you back out an old changeset, your working directory will appear
181 old after the backout. You should merge the backout changeset
181 old after the backout. You should merge the backout changeset
182 with another head.
182 with another head.
183
183
184 The --merge option remembers the parent of the working directory
184 The --merge option remembers the parent of the working directory
185 before starting the backout, then merges the new head with that
185 before starting the backout, then merges the new head with that
186 changeset afterwards. This saves you from doing the merge by
186 changeset afterwards. This saves you from doing the merge by
187 hand. The result of this merge is not committed, as for a normal
187 hand. The result of this merge is not committed, as for a normal
188 merge.'''
188 merge.'''
189 if rev and node:
189 if rev and node:
190 raise util.Abort(_("please specify just one revision"))
190 raise util.Abort(_("please specify just one revision"))
191
191
192 if not rev:
192 if not rev:
193 rev = node
193 rev = node
194
194
195 if not rev:
195 if not rev:
196 raise util.Abort(_("please specify a revision to backout"))
196 raise util.Abort(_("please specify a revision to backout"))
197
197
198 cmdutil.bail_if_changed(repo)
198 cmdutil.bail_if_changed(repo)
199 node = repo.lookup(rev)
199 node = repo.lookup(rev)
200
200
201 op1, op2 = repo.dirstate.parents()
201 op1, op2 = repo.dirstate.parents()
202 a = repo.changelog.ancestor(op1, node)
202 a = repo.changelog.ancestor(op1, node)
203 if a != node:
203 if a != node:
204 raise util.Abort(_('cannot back out change on a different branch'))
204 raise util.Abort(_('cannot back out change on a different branch'))
205
205
206 p1, p2 = repo.changelog.parents(node)
206 p1, p2 = repo.changelog.parents(node)
207 if p1 == nullid:
207 if p1 == nullid:
208 raise util.Abort(_('cannot back out a change with no parents'))
208 raise util.Abort(_('cannot back out a change with no parents'))
209 if p2 != nullid:
209 if p2 != nullid:
210 if not opts['parent']:
210 if not opts['parent']:
211 raise util.Abort(_('cannot back out a merge changeset without '
211 raise util.Abort(_('cannot back out a merge changeset without '
212 '--parent'))
212 '--parent'))
213 p = repo.lookup(opts['parent'])
213 p = repo.lookup(opts['parent'])
214 if p not in (p1, p2):
214 if p not in (p1, p2):
215 raise util.Abort(_('%s is not a parent of %s') %
215 raise util.Abort(_('%s is not a parent of %s') %
216 (short(p), short(node)))
216 (short(p), short(node)))
217 parent = p
217 parent = p
218 else:
218 else:
219 if opts['parent']:
219 if opts['parent']:
220 raise util.Abort(_('cannot use --parent on non-merge changeset'))
220 raise util.Abort(_('cannot use --parent on non-merge changeset'))
221 parent = p1
221 parent = p1
222
222
223 hg.clean(repo, node, show_stats=False)
223 hg.clean(repo, node, show_stats=False)
224 revert_opts = opts.copy()
224 revert_opts = opts.copy()
225 revert_opts['date'] = None
225 revert_opts['date'] = None
226 revert_opts['all'] = True
226 revert_opts['all'] = True
227 revert_opts['rev'] = hex(parent)
227 revert_opts['rev'] = hex(parent)
228 revert(ui, repo, **revert_opts)
228 revert(ui, repo, **revert_opts)
229 commit_opts = opts.copy()
229 commit_opts = opts.copy()
230 commit_opts['addremove'] = False
230 commit_opts['addremove'] = False
231 if not commit_opts['message'] and not commit_opts['logfile']:
231 if not commit_opts['message'] and not commit_opts['logfile']:
232 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
232 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
233 commit_opts['force_editor'] = True
233 commit_opts['force_editor'] = True
234 commit(ui, repo, **commit_opts)
234 commit(ui, repo, **commit_opts)
235 def nice(node):
235 def nice(node):
236 return '%d:%s' % (repo.changelog.rev(node), short(node))
236 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 ui.status(_('changeset %s backs out changeset %s\n') %
237 ui.status(_('changeset %s backs out changeset %s\n') %
238 (nice(repo.changelog.tip()), nice(node)))
238 (nice(repo.changelog.tip()), nice(node)))
239 if op1 != node:
239 if op1 != node:
240 if opts['merge']:
240 if opts['merge']:
241 ui.status(_('merging with changeset %s\n') % nice(op1))
241 ui.status(_('merging with changeset %s\n') % nice(op1))
242 hg.merge(repo, hex(op1))
242 hg.merge(repo, hex(op1))
243 else:
243 else:
244 ui.status(_('the backout changeset is a new head - '
244 ui.status(_('the backout changeset is a new head - '
245 'do not forget to merge\n'))
245 'do not forget to merge\n'))
246 ui.status(_('(use "backout --merge" '
246 ui.status(_('(use "backout --merge" '
247 'if you want to auto-merge)\n'))
247 'if you want to auto-merge)\n'))
248
248
249 def bisect(ui, repo, rev=None, extra=None,
249 def bisect(ui, repo, rev=None, extra=None,
250 reset=None, good=None, bad=None, skip=None, noupdate=None):
250 reset=None, good=None, bad=None, skip=None, noupdate=None):
251 """subdivision search of changesets
251 """subdivision search of changesets
252
252
253 This command helps to find changesets which introduce problems.
253 This command helps to find changesets which introduce problems.
254 To use, mark the earliest changeset you know exhibits the problem
254 To use, mark the earliest changeset you know exhibits the problem
255 as bad, then mark the latest changeset which is free from the
255 as bad, then mark the latest changeset which is free from the
256 problem as good. Bisect will update your working directory to a
256 problem as good. Bisect will update your working directory to a
257 revision for testing. Once you have performed tests, mark the
257 revision for testing. Once you have performed tests, mark the
258 working directory as bad or good and bisect will either update to
258 working directory as bad or good and bisect will either update to
259 another candidate changeset or announce that it has found the bad
259 another candidate changeset or announce that it has found the bad
260 revision.
260 revision.
261 """
261 """
262 # backward compatibility
262 # backward compatibility
263 if rev in "good bad reset init".split():
263 if rev in "good bad reset init".split():
264 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
264 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
265 cmd, rev, extra = rev, extra, None
265 cmd, rev, extra = rev, extra, None
266 if cmd == "good":
266 if cmd == "good":
267 good = True
267 good = True
268 elif cmd == "bad":
268 elif cmd == "bad":
269 bad = True
269 bad = True
270 else:
270 else:
271 reset = True
271 reset = True
272 elif extra or good + bad + skip + reset > 1:
272 elif extra or good + bad + skip + reset > 1:
273 raise util.Abort("Incompatible arguments")
273 raise util.Abort("Incompatible arguments")
274
274
275 if reset:
275 if reset:
276 p = repo.join("bisect.state")
276 p = repo.join("bisect.state")
277 if os.path.exists(p):
277 if os.path.exists(p):
278 os.unlink(p)
278 os.unlink(p)
279 return
279 return
280
280
281 # load state
281 # load state
282 state = {'good': [], 'bad': [], 'skip': []}
282 state = {'good': [], 'bad': [], 'skip': []}
283 if os.path.exists(repo.join("bisect.state")):
283 if os.path.exists(repo.join("bisect.state")):
284 for l in repo.opener("bisect.state"):
284 for l in repo.opener("bisect.state"):
285 kind, node = l[:-1].split()
285 kind, node = l[:-1].split()
286 node = repo.lookup(node)
286 node = repo.lookup(node)
287 if kind not in state:
287 if kind not in state:
288 raise util.Abort(_("unknown bisect kind %s") % kind)
288 raise util.Abort(_("unknown bisect kind %s") % kind)
289 state[kind].append(node)
289 state[kind].append(node)
290
290
291 # update state
291 # update state
292 node = repo.lookup(rev or '.')
292 node = repo.lookup(rev or '.')
293 if good:
293 if good:
294 state['good'].append(node)
294 state['good'].append(node)
295 elif bad:
295 elif bad:
296 state['bad'].append(node)
296 state['bad'].append(node)
297 elif skip:
297 elif skip:
298 state['skip'].append(node)
298 state['skip'].append(node)
299
299
300 # save state
300 # save state
301 f = repo.opener("bisect.state", "w", atomictemp=True)
301 f = repo.opener("bisect.state", "w", atomictemp=True)
302 wlock = repo.wlock()
302 wlock = repo.wlock()
303 try:
303 try:
304 for kind in state:
304 for kind in state:
305 for node in state[kind]:
305 for node in state[kind]:
306 f.write("%s %s\n" % (kind, hg.hex(node)))
306 f.write("%s %s\n" % (kind, hg.hex(node)))
307 f.rename()
307 f.rename()
308 finally:
308 finally:
309 del wlock
309 del wlock
310
310
311 if not state['good'] or not state['bad']:
311 if not state['good'] or not state['bad']:
312 return
312 return
313
313
314 # actually bisect
314 # actually bisect
315 node, changesets, good = hbisect.bisect(repo.changelog, state)
315 node, changesets, good = hbisect.bisect(repo.changelog, state)
316 if changesets == 0:
316 if changesets == 0:
317 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
317 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
318 displayer = cmdutil.show_changeset(ui, repo, {})
318 displayer = cmdutil.show_changeset(ui, repo, {})
319 displayer.show(changenode=node)
319 displayer.show(changenode=node)
320 elif node is not None:
320 elif node is not None:
321 # compute the approximate number of remaining tests
321 # compute the approximate number of remaining tests
322 tests, size = 0, 2
322 tests, size = 0, 2
323 while size <= changesets:
323 while size <= changesets:
324 tests, size = tests + 1, size * 2
324 tests, size = tests + 1, size * 2
325 rev = repo.changelog.rev(node)
325 rev = repo.changelog.rev(node)
326 ui.write(_("Testing changeset %s:%s "
326 ui.write(_("Testing changeset %s:%s "
327 "(%s changesets remaining, ~%s tests)\n")
327 "(%s changesets remaining, ~%s tests)\n")
328 % (rev, hg.short(node), changesets, tests))
328 % (rev, hg.short(node), changesets, tests))
329 if not noupdate:
329 if not noupdate:
330 cmdutil.bail_if_changed(repo)
330 cmdutil.bail_if_changed(repo)
331 return hg.clean(repo, node)
331 return hg.clean(repo, node)
332
332
333 def branch(ui, repo, label=None, **opts):
333 def branch(ui, repo, label=None, **opts):
334 """set or show the current branch name
334 """set or show the current branch name
335
335
336 With no argument, show the current branch name. With one argument,
336 With no argument, show the current branch name. With one argument,
337 set the working directory branch name (the branch does not exist in
337 set the working directory branch name (the branch does not exist in
338 the repository until the next commit).
338 the repository until the next commit).
339
339
340 Unless --force is specified, branch will not let you set a
340 Unless --force is specified, branch will not let you set a
341 branch name that shadows an existing branch.
341 branch name that shadows an existing branch.
342
342
343 Use the command 'hg update' to switch to an existing branch.
343 Use the command 'hg update' to switch to an existing branch.
344 """
344 """
345
345
346 if label:
346 if label:
347 if not opts.get('force') and label in repo.branchtags():
347 if not opts.get('force') and label in repo.branchtags():
348 if label not in [p.branch() for p in repo.workingctx().parents()]:
348 if label not in [p.branch() for p in repo.workingctx().parents()]:
349 raise util.Abort(_('a branch of the same name already exists'
349 raise util.Abort(_('a branch of the same name already exists'
350 ' (use --force to override)'))
350 ' (use --force to override)'))
351 repo.dirstate.setbranch(util.fromlocal(label))
351 repo.dirstate.setbranch(util.fromlocal(label))
352 ui.status(_('marked working directory as branch %s\n') % label)
352 ui.status(_('marked working directory as branch %s\n') % label)
353 else:
353 else:
354 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
354 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
355
355
356 def branches(ui, repo, active=False):
356 def branches(ui, repo, active=False):
357 """list repository named branches
357 """list repository named branches
358
358
359 List the repository's named branches, indicating which ones are
359 List the repository's named branches, indicating which ones are
360 inactive. If active is specified, only show active branches.
360 inactive. If active is specified, only show active branches.
361
361
362 A branch is considered active if it contains unmerged heads.
362 A branch is considered active if it contains unmerged heads.
363
363
364 Use the command 'hg update' to switch to an existing branch.
364 Use the command 'hg update' to switch to an existing branch.
365 """
365 """
366 b = repo.branchtags()
366 b = repo.branchtags()
367 heads = dict.fromkeys(repo.heads(), 1)
367 heads = dict.fromkeys(repo.heads(), 1)
368 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
368 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
369 l.sort()
369 l.sort()
370 l.reverse()
370 l.reverse()
371 for ishead, r, n, t in l:
371 for ishead, r, n, t in l:
372 if active and not ishead:
372 if active and not ishead:
373 # If we're only displaying active branches, abort the loop on
373 # If we're only displaying active branches, abort the loop on
374 # encountering the first inactive head
374 # encountering the first inactive head
375 break
375 break
376 else:
376 else:
377 hexfunc = ui.debugflag and hex or short
377 hexfunc = ui.debugflag and hex or short
378 if ui.quiet:
378 if ui.quiet:
379 ui.write("%s\n" % t)
379 ui.write("%s\n" % t)
380 else:
380 else:
381 spaces = " " * (30 - util.locallen(t))
381 spaces = " " * (30 - util.locallen(t))
382 # The code only gets here if inactive branches are being
382 # The code only gets here if inactive branches are being
383 # displayed or the branch is active.
383 # displayed or the branch is active.
384 isinactive = ((not ishead) and " (inactive)") or ''
384 isinactive = ((not ishead) and " (inactive)") or ''
385 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
385 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
386
386
387 def bundle(ui, repo, fname, dest=None, **opts):
387 def bundle(ui, repo, fname, dest=None, **opts):
388 """create a changegroup file
388 """create a changegroup file
389
389
390 Generate a compressed changegroup file collecting changesets not
390 Generate a compressed changegroup file collecting changesets not
391 found in the other repository.
391 found in the other repository.
392
392
393 If no destination repository is specified the destination is assumed
393 If no destination repository is specified the destination is assumed
394 to have all the nodes specified by one or more --base parameters.
394 to have all the nodes specified by one or more --base parameters.
395 To create a bundle containing all changesets, use --base null.
395 To create a bundle containing all changesets, use --base null.
396
396
397 The bundle file can then be transferred using conventional means and
397 The bundle file can then be transferred using conventional means and
398 applied to another repository with the unbundle or pull command.
398 applied to another repository with the unbundle or pull command.
399 This is useful when direct push and pull are not available or when
399 This is useful when direct push and pull are not available or when
400 exporting an entire repository is undesirable.
400 exporting an entire repository is undesirable.
401
401
402 Applying bundles preserves all changeset contents including
402 Applying bundles preserves all changeset contents including
403 permissions, copy/rename information, and revision history.
403 permissions, copy/rename information, and revision history.
404 """
404 """
405 revs = opts.get('rev') or None
405 revs = opts.get('rev') or None
406 if revs:
406 if revs:
407 revs = [repo.lookup(rev) for rev in revs]
407 revs = [repo.lookup(rev) for rev in revs]
408 base = opts.get('base')
408 base = opts.get('base')
409 if base:
409 if base:
410 if dest:
410 if dest:
411 raise util.Abort(_("--base is incompatible with specifiying "
411 raise util.Abort(_("--base is incompatible with specifiying "
412 "a destination"))
412 "a destination"))
413 base = [repo.lookup(rev) for rev in base]
413 base = [repo.lookup(rev) for rev in base]
414 # create the right base
414 # create the right base
415 # XXX: nodesbetween / changegroup* should be "fixed" instead
415 # XXX: nodesbetween / changegroup* should be "fixed" instead
416 o = []
416 o = []
417 has = {nullid: None}
417 has = {nullid: None}
418 for n in base:
418 for n in base:
419 has.update(repo.changelog.reachable(n))
419 has.update(repo.changelog.reachable(n))
420 if revs:
420 if revs:
421 visit = list(revs)
421 visit = list(revs)
422 else:
422 else:
423 visit = repo.changelog.heads()
423 visit = repo.changelog.heads()
424 seen = {}
424 seen = {}
425 while visit:
425 while visit:
426 n = visit.pop(0)
426 n = visit.pop(0)
427 parents = [p for p in repo.changelog.parents(n) if p not in has]
427 parents = [p for p in repo.changelog.parents(n) if p not in has]
428 if len(parents) == 0:
428 if len(parents) == 0:
429 o.insert(0, n)
429 o.insert(0, n)
430 else:
430 else:
431 for p in parents:
431 for p in parents:
432 if p not in seen:
432 if p not in seen:
433 seen[p] = 1
433 seen[p] = 1
434 visit.append(p)
434 visit.append(p)
435 else:
435 else:
436 cmdutil.setremoteconfig(ui, opts)
436 cmdutil.setremoteconfig(ui, opts)
437 dest, revs, checkout = hg.parseurl(
437 dest, revs, checkout = hg.parseurl(
438 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
438 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
439 other = hg.repository(ui, dest)
439 other = hg.repository(ui, dest)
440 o = repo.findoutgoing(other, force=opts['force'])
440 o = repo.findoutgoing(other, force=opts['force'])
441
441
442 if revs:
442 if revs:
443 cg = repo.changegroupsubset(o, revs, 'bundle')
443 cg = repo.changegroupsubset(o, revs, 'bundle')
444 else:
444 else:
445 cg = repo.changegroup(o, 'bundle')
445 cg = repo.changegroup(o, 'bundle')
446 changegroup.writebundle(cg, fname, "HG10BZ")
446 changegroup.writebundle(cg, fname, "HG10BZ")
447
447
448 def cat(ui, repo, file1, *pats, **opts):
448 def cat(ui, repo, file1, *pats, **opts):
449 """output the current or given revision of files
449 """output the current or given revision of files
450
450
451 Print the specified files as they were at the given revision.
451 Print the specified files as they were at the given revision.
452 If no revision is given, the parent of the working directory is used,
452 If no revision is given, the parent of the working directory is used,
453 or tip if no revision is checked out.
453 or tip if no revision is checked out.
454
454
455 Output may be to a file, in which case the name of the file is
455 Output may be to a file, in which case the name of the file is
456 given using a format string. The formatting rules are the same as
456 given using a format string. The formatting rules are the same as
457 for the export command, with the following additions:
457 for the export command, with the following additions:
458
458
459 %s basename of file being printed
459 %s basename of file being printed
460 %d dirname of file being printed, or '.' if in repo root
460 %d dirname of file being printed, or '.' if in repo root
461 %p root-relative path name of file being printed
461 %p root-relative path name of file being printed
462 """
462 """
463 ctx = repo.changectx(opts['rev'])
463 ctx = repo.changectx(opts['rev'])
464 err = 1
464 err = 1
465 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
465 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
466 ctx.node()):
466 ctx.node()):
467 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
467 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
468 fp.write(ctx.filectx(abs).data())
468 fp.write(ctx.filectx(abs).data())
469 err = 0
469 err = 0
470 return err
470 return err
471
471
472 def clone(ui, source, dest=None, **opts):
472 def clone(ui, source, dest=None, **opts):
473 """make a copy of an existing repository
473 """make a copy of an existing repository
474
474
475 Create a copy of an existing repository in a new directory.
475 Create a copy of an existing repository in a new directory.
476
476
477 If no destination directory name is specified, it defaults to the
477 If no destination directory name is specified, it defaults to the
478 basename of the source.
478 basename of the source.
479
479
480 The location of the source is added to the new repository's
480 The location of the source is added to the new repository's
481 .hg/hgrc file, as the default to be used for future pulls.
481 .hg/hgrc file, as the default to be used for future pulls.
482
482
483 For efficiency, hardlinks are used for cloning whenever the source
483 For efficiency, hardlinks are used for cloning whenever the source
484 and destination are on the same filesystem (note this applies only
484 and destination are on the same filesystem (note this applies only
485 to the repository data, not to the checked out files). Some
485 to the repository data, not to the checked out files). Some
486 filesystems, such as AFS, implement hardlinking incorrectly, but
486 filesystems, such as AFS, implement hardlinking incorrectly, but
487 do not report errors. In these cases, use the --pull option to
487 do not report errors. In these cases, use the --pull option to
488 avoid hardlinking.
488 avoid hardlinking.
489
489
490 You can safely clone repositories and checked out files using full
490 You can safely clone repositories and checked out files using full
491 hardlinks with
491 hardlinks with
492
492
493 $ cp -al REPO REPOCLONE
493 $ cp -al REPO REPOCLONE
494
494
495 which is the fastest way to clone. However, the operation is not
495 which is the fastest way to clone. However, the operation is not
496 atomic (making sure REPO is not modified during the operation is
496 atomic (making sure REPO is not modified during the operation is
497 up to you) and you have to make sure your editor breaks hardlinks
497 up to you) and you have to make sure your editor breaks hardlinks
498 (Emacs and most Linux Kernel tools do so).
498 (Emacs and most Linux Kernel tools do so).
499
499
500 If you use the -r option to clone up to a specific revision, no
500 If you use the -r option to clone up to a specific revision, no
501 subsequent revisions will be present in the cloned repository.
501 subsequent revisions will be present in the cloned repository.
502 This option implies --pull, even on local repositories.
502 This option implies --pull, even on local repositories.
503
503
504 See pull for valid source format details.
504 See pull for valid source format details.
505
505
506 It is possible to specify an ssh:// URL as the destination, but no
506 It is possible to specify an ssh:// URL as the destination, but no
507 .hg/hgrc and working directory will be created on the remote side.
507 .hg/hgrc and working directory will be created on the remote side.
508 Look at the help text for the pull command for important details
508 Look at the help text for the pull command for important details
509 about ssh:// URLs.
509 about ssh:// URLs.
510 """
510 """
511 cmdutil.setremoteconfig(ui, opts)
511 cmdutil.setremoteconfig(ui, opts)
512 hg.clone(ui, source, dest,
512 hg.clone(ui, source, dest,
513 pull=opts['pull'],
513 pull=opts['pull'],
514 stream=opts['uncompressed'],
514 stream=opts['uncompressed'],
515 rev=opts['rev'],
515 rev=opts['rev'],
516 update=not opts['noupdate'])
516 update=not opts['noupdate'])
517
517
518 def commit(ui, repo, *pats, **opts):
518 def commit(ui, repo, *pats, **opts):
519 """commit the specified files or all outstanding changes
519 """commit the specified files or all outstanding changes
520
520
521 Commit changes to the given files into the repository.
521 Commit changes to the given files into the repository.
522
522
523 If a list of files is omitted, all changes reported by "hg status"
523 If a list of files is omitted, all changes reported by "hg status"
524 will be committed.
524 will be committed.
525
525
526 If no commit message is specified, the configured editor is started to
526 If no commit message is specified, the configured editor is started to
527 enter a message.
527 enter a message.
528 """
528 """
529 def commitfunc(ui, repo, files, message, match, opts):
529 def commitfunc(ui, repo, files, message, match, opts):
530 return repo.commit(files, message, opts['user'], opts['date'], match,
530 return repo.commit(files, message, opts['user'], opts['date'], match,
531 force_editor=opts.get('force_editor'))
531 force_editor=opts.get('force_editor'))
532 cmdutil.commit(ui, repo, commitfunc, pats, opts)
532 cmdutil.commit(ui, repo, commitfunc, pats, opts)
533
533
534 def copy(ui, repo, *pats, **opts):
534 def copy(ui, repo, *pats, **opts):
535 """mark files as copied for the next commit
535 """mark files as copied for the next commit
536
536
537 Mark dest as having copies of source files. If dest is a
537 Mark dest as having copies of source files. If dest is a
538 directory, copies are put in that directory. If dest is a file,
538 directory, copies are put in that directory. If dest is a file,
539 there can only be one source.
539 there can only be one source.
540
540
541 By default, this command copies the contents of files as they
541 By default, this command copies the contents of files as they
542 stand in the working directory. If invoked with --after, the
542 stand in the working directory. If invoked with --after, the
543 operation is recorded, but no copying is performed.
543 operation is recorded, but no copying is performed.
544
544
545 This command takes effect in the next commit. To undo a copy
545 This command takes effect in the next commit. To undo a copy
546 before that, see hg revert.
546 before that, see hg revert.
547 """
547 """
548 wlock = repo.wlock(False)
548 wlock = repo.wlock(False)
549 try:
549 try:
550 return cmdutil.copy(ui, repo, pats, opts)
550 return cmdutil.copy(ui, repo, pats, opts)
551 finally:
551 finally:
552 del wlock
552 del wlock
553
553
554 def debugancestor(ui, index, rev1, rev2):
554 def debugancestor(ui, index, rev1, rev2):
555 """find the ancestor revision of two revisions in a given index"""
555 """find the ancestor revision of two revisions in a given index"""
556 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
556 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
557 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
557 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
558 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
558 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
559
559
560 def debugcomplete(ui, cmd='', **opts):
560 def debugcomplete(ui, cmd='', **opts):
561 """returns the completion list associated with the given command"""
561 """returns the completion list associated with the given command"""
562
562
563 if opts['options']:
563 if opts['options']:
564 options = []
564 options = []
565 otables = [globalopts]
565 otables = [globalopts]
566 if cmd:
566 if cmd:
567 aliases, entry = cmdutil.findcmd(ui, cmd, table)
567 aliases, entry = cmdutil.findcmd(ui, cmd, table)
568 otables.append(entry[1])
568 otables.append(entry[1])
569 for t in otables:
569 for t in otables:
570 for o in t:
570 for o in t:
571 if o[0]:
571 if o[0]:
572 options.append('-%s' % o[0])
572 options.append('-%s' % o[0])
573 options.append('--%s' % o[1])
573 options.append('--%s' % o[1])
574 ui.write("%s\n" % "\n".join(options))
574 ui.write("%s\n" % "\n".join(options))
575 return
575 return
576
576
577 clist = cmdutil.findpossible(ui, cmd, table).keys()
577 clist = cmdutil.findpossible(ui, cmd, table).keys()
578 clist.sort()
578 clist.sort()
579 ui.write("%s\n" % "\n".join(clist))
579 ui.write("%s\n" % "\n".join(clist))
580
580
581 def debugfsinfo(ui, path = "."):
581 def debugfsinfo(ui, path = "."):
582 file('.debugfsinfo', 'w').write('')
582 file('.debugfsinfo', 'w').write('')
583 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
583 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
584 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
584 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
585 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
585 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
586 and 'yes' or 'no'))
586 and 'yes' or 'no'))
587 os.unlink('.debugfsinfo')
587 os.unlink('.debugfsinfo')
588
588
589 def debugrebuildstate(ui, repo, rev=""):
589 def debugrebuildstate(ui, repo, rev=""):
590 """rebuild the dirstate as it would look like for the given revision"""
590 """rebuild the dirstate as it would look like for the given revision"""
591 if rev == "":
591 if rev == "":
592 rev = repo.changelog.tip()
592 rev = repo.changelog.tip()
593 ctx = repo.changectx(rev)
593 ctx = repo.changectx(rev)
594 files = ctx.manifest()
594 files = ctx.manifest()
595 wlock = repo.wlock()
595 wlock = repo.wlock()
596 try:
596 try:
597 repo.dirstate.rebuild(rev, files)
597 repo.dirstate.rebuild(rev, files)
598 finally:
598 finally:
599 del wlock
599 del wlock
600
600
601 def debugcheckstate(ui, repo):
601 def debugcheckstate(ui, repo):
602 """validate the correctness of the current dirstate"""
602 """validate the correctness of the current dirstate"""
603 parent1, parent2 = repo.dirstate.parents()
603 parent1, parent2 = repo.dirstate.parents()
604 m1 = repo.changectx(parent1).manifest()
604 m1 = repo.changectx(parent1).manifest()
605 m2 = repo.changectx(parent2).manifest()
605 m2 = repo.changectx(parent2).manifest()
606 errors = 0
606 errors = 0
607 for f in repo.dirstate:
607 for f in repo.dirstate:
608 state = repo.dirstate[f]
608 state = repo.dirstate[f]
609 if state in "nr" and f not in m1:
609 if state in "nr" and f not in m1:
610 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
610 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
611 errors += 1
611 errors += 1
612 if state in "a" and f in m1:
612 if state in "a" and f in m1:
613 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
613 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
614 errors += 1
614 errors += 1
615 if state in "m" and f not in m1 and f not in m2:
615 if state in "m" and f not in m1 and f not in m2:
616 ui.warn(_("%s in state %s, but not in either manifest\n") %
616 ui.warn(_("%s in state %s, but not in either manifest\n") %
617 (f, state))
617 (f, state))
618 errors += 1
618 errors += 1
619 for f in m1:
619 for f in m1:
620 state = repo.dirstate[f]
620 state = repo.dirstate[f]
621 if state not in "nrm":
621 if state not in "nrm":
622 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
622 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
623 errors += 1
623 errors += 1
624 if errors:
624 if errors:
625 error = _(".hg/dirstate inconsistent with current parent's manifest")
625 error = _(".hg/dirstate inconsistent with current parent's manifest")
626 raise util.Abort(error)
626 raise util.Abort(error)
627
627
628 def showconfig(ui, repo, *values, **opts):
628 def showconfig(ui, repo, *values, **opts):
629 """show combined config settings from all hgrc files
629 """show combined config settings from all hgrc files
630
630
631 With no args, print names and values of all config items.
631 With no args, print names and values of all config items.
632
632
633 With one arg of the form section.name, print just the value of
633 With one arg of the form section.name, print just the value of
634 that config item.
634 that config item.
635
635
636 With multiple args, print names and values of all config items
636 With multiple args, print names and values of all config items
637 with matching section names."""
637 with matching section names."""
638
638
639 untrusted = bool(opts.get('untrusted'))
639 untrusted = bool(opts.get('untrusted'))
640 if values:
640 if values:
641 if len([v for v in values if '.' in v]) > 1:
641 if len([v for v in values if '.' in v]) > 1:
642 raise util.Abort(_('only one config item permitted'))
642 raise util.Abort(_('only one config item permitted'))
643 for section, name, value in ui.walkconfig(untrusted=untrusted):
643 for section, name, value in ui.walkconfig(untrusted=untrusted):
644 sectname = section + '.' + name
644 sectname = section + '.' + name
645 if values:
645 if values:
646 for v in values:
646 for v in values:
647 if v == section:
647 if v == section:
648 ui.write('%s=%s\n' % (sectname, value))
648 ui.write('%s=%s\n' % (sectname, value))
649 elif v == sectname:
649 elif v == sectname:
650 ui.write(value, '\n')
650 ui.write(value, '\n')
651 else:
651 else:
652 ui.write('%s=%s\n' % (sectname, value))
652 ui.write('%s=%s\n' % (sectname, value))
653
653
654 def debugsetparents(ui, repo, rev1, rev2=None):
654 def debugsetparents(ui, repo, rev1, rev2=None):
655 """manually set the parents of the current working directory
655 """manually set the parents of the current working directory
656
656
657 This is useful for writing repository conversion tools, but should
657 This is useful for writing repository conversion tools, but should
658 be used with care.
658 be used with care.
659 """
659 """
660
660
661 if not rev2:
661 if not rev2:
662 rev2 = hex(nullid)
662 rev2 = hex(nullid)
663
663
664 wlock = repo.wlock()
664 wlock = repo.wlock()
665 try:
665 try:
666 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
666 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
667 finally:
667 finally:
668 del wlock
668 del wlock
669
669
670 def debugstate(ui, repo):
670 def debugstate(ui, repo):
671 """show the contents of the current dirstate"""
671 """show the contents of the current dirstate"""
672 k = repo.dirstate._map.items()
672 k = repo.dirstate._map.items()
673 k.sort()
673 k.sort()
674 for file_, ent in k:
674 for file_, ent in k:
675 if ent[3] == -1:
675 if ent[3] == -1:
676 # Pad or slice to locale representation
676 # Pad or slice to locale representation
677 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
677 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
678 timestr = 'unset'
678 timestr = 'unset'
679 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
679 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
680 else:
680 else:
681 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
681 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
682 if ent[1] & 020000:
682 if ent[1] & 020000:
683 mode = 'lnk'
683 mode = 'lnk'
684 else:
684 else:
685 mode = '%3o' % (ent[1] & 0777)
685 mode = '%3o' % (ent[1] & 0777)
686 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
686 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
687 for f in repo.dirstate.copies():
687 for f in repo.dirstate.copies():
688 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
688 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
689
689
690 def debugdata(ui, file_, rev):
690 def debugdata(ui, file_, rev):
691 """dump the contents of a data file revision"""
691 """dump the contents of a data file revision"""
692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
693 try:
693 try:
694 ui.write(r.revision(r.lookup(rev)))
694 ui.write(r.revision(r.lookup(rev)))
695 except KeyError:
695 except KeyError:
696 raise util.Abort(_('invalid revision identifier %s') % rev)
696 raise util.Abort(_('invalid revision identifier %s') % rev)
697
697
698 def debugdate(ui, date, range=None, **opts):
698 def debugdate(ui, date, range=None, **opts):
699 """parse and display a date"""
699 """parse and display a date"""
700 if opts["extended"]:
700 if opts["extended"]:
701 d = util.parsedate(date, util.extendeddateformats)
701 d = util.parsedate(date, util.extendeddateformats)
702 else:
702 else:
703 d = util.parsedate(date)
703 d = util.parsedate(date)
704 ui.write("internal: %s %s\n" % d)
704 ui.write("internal: %s %s\n" % d)
705 ui.write("standard: %s\n" % util.datestr(d))
705 ui.write("standard: %s\n" % util.datestr(d))
706 if range:
706 if range:
707 m = util.matchdate(range)
707 m = util.matchdate(range)
708 ui.write("match: %s\n" % m(d[0]))
708 ui.write("match: %s\n" % m(d[0]))
709
709
710 def debugindex(ui, file_):
710 def debugindex(ui, file_):
711 """dump the contents of an index file"""
711 """dump the contents of an index file"""
712 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
712 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
713 ui.write(" rev offset length base linkrev" +
713 ui.write(" rev offset length base linkrev" +
714 " nodeid p1 p2\n")
714 " nodeid p1 p2\n")
715 for i in xrange(r.count()):
715 for i in xrange(r.count()):
716 node = r.node(i)
716 node = r.node(i)
717 try:
717 try:
718 pp = r.parents(node)
718 pp = r.parents(node)
719 except:
719 except:
720 pp = [nullid, nullid]
720 pp = [nullid, nullid]
721 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
721 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
722 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
722 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
723 short(node), short(pp[0]), short(pp[1])))
723 short(node), short(pp[0]), short(pp[1])))
724
724
725 def debugindexdot(ui, file_):
725 def debugindexdot(ui, file_):
726 """dump an index DAG as a .dot file"""
726 """dump an index DAG as a .dot file"""
727 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
727 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
728 ui.write("digraph G {\n")
728 ui.write("digraph G {\n")
729 for i in xrange(r.count()):
729 for i in xrange(r.count()):
730 node = r.node(i)
730 node = r.node(i)
731 pp = r.parents(node)
731 pp = r.parents(node)
732 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
732 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
733 if pp[1] != nullid:
733 if pp[1] != nullid:
734 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
734 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
735 ui.write("}\n")
735 ui.write("}\n")
736
736
737 def debuginstall(ui):
737 def debuginstall(ui):
738 '''test Mercurial installation'''
738 '''test Mercurial installation'''
739
739
740 def writetemp(contents):
740 def writetemp(contents):
741 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
741 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
742 f = os.fdopen(fd, "wb")
742 f = os.fdopen(fd, "wb")
743 f.write(contents)
743 f.write(contents)
744 f.close()
744 f.close()
745 return name
745 return name
746
746
747 problems = 0
747 problems = 0
748
748
749 # encoding
749 # encoding
750 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
750 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
751 try:
751 try:
752 util.fromlocal("test")
752 util.fromlocal("test")
753 except util.Abort, inst:
753 except util.Abort, inst:
754 ui.write(" %s\n" % inst)
754 ui.write(" %s\n" % inst)
755 ui.write(_(" (check that your locale is properly set)\n"))
755 ui.write(_(" (check that your locale is properly set)\n"))
756 problems += 1
756 problems += 1
757
757
758 # compiled modules
758 # compiled modules
759 ui.status(_("Checking extensions...\n"))
759 ui.status(_("Checking extensions...\n"))
760 try:
760 try:
761 import bdiff, mpatch, base85
761 import bdiff, mpatch, base85
762 except Exception, inst:
762 except Exception, inst:
763 ui.write(" %s\n" % inst)
763 ui.write(" %s\n" % inst)
764 ui.write(_(" One or more extensions could not be found"))
764 ui.write(_(" One or more extensions could not be found"))
765 ui.write(_(" (check that you compiled the extensions)\n"))
765 ui.write(_(" (check that you compiled the extensions)\n"))
766 problems += 1
766 problems += 1
767
767
768 # templates
768 # templates
769 ui.status(_("Checking templates...\n"))
769 ui.status(_("Checking templates...\n"))
770 try:
770 try:
771 import templater
771 import templater
772 t = templater.templater(templater.templatepath("map-cmdline.default"))
772 t = templater.templater(templater.templatepath("map-cmdline.default"))
773 except Exception, inst:
773 except Exception, inst:
774 ui.write(" %s\n" % inst)
774 ui.write(" %s\n" % inst)
775 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
775 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
776 problems += 1
776 problems += 1
777
777
778 # patch
778 # patch
779 ui.status(_("Checking patch...\n"))
779 ui.status(_("Checking patch...\n"))
780 patchproblems = 0
780 patchproblems = 0
781 a = "1\n2\n3\n4\n"
781 a = "1\n2\n3\n4\n"
782 b = "1\n2\n3\ninsert\n4\n"
782 b = "1\n2\n3\ninsert\n4\n"
783 fa = writetemp(a)
783 fa = writetemp(a)
784 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
784 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
785 os.path.basename(fa))
785 os.path.basename(fa))
786 fd = writetemp(d)
786 fd = writetemp(d)
787
787
788 files = {}
788 files = {}
789 try:
789 try:
790 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
790 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
791 except util.Abort, e:
791 except util.Abort, e:
792 ui.write(_(" patch call failed:\n"))
792 ui.write(_(" patch call failed:\n"))
793 ui.write(" " + str(e) + "\n")
793 ui.write(" " + str(e) + "\n")
794 patchproblems += 1
794 patchproblems += 1
795 else:
795 else:
796 if list(files) != [os.path.basename(fa)]:
796 if list(files) != [os.path.basename(fa)]:
797 ui.write(_(" unexpected patch output!\n"))
797 ui.write(_(" unexpected patch output!\n"))
798 patchproblems += 1
798 patchproblems += 1
799 a = file(fa).read()
799 a = file(fa).read()
800 if a != b:
800 if a != b:
801 ui.write(_(" patch test failed!\n"))
801 ui.write(_(" patch test failed!\n"))
802 patchproblems += 1
802 patchproblems += 1
803
803
804 if patchproblems:
804 if patchproblems:
805 if ui.config('ui', 'patch'):
805 if ui.config('ui', 'patch'):
806 ui.write(_(" (Current patch tool may be incompatible with patch,"
806 ui.write(_(" (Current patch tool may be incompatible with patch,"
807 " or misconfigured. Please check your .hgrc file)\n"))
807 " or misconfigured. Please check your .hgrc file)\n"))
808 else:
808 else:
809 ui.write(_(" Internal patcher failure, please report this error"
809 ui.write(_(" Internal patcher failure, please report this error"
810 " to http://www.selenic.com/mercurial/bts\n"))
810 " to http://www.selenic.com/mercurial/bts\n"))
811 problems += patchproblems
811 problems += patchproblems
812
812
813 os.unlink(fa)
813 os.unlink(fa)
814 os.unlink(fd)
814 os.unlink(fd)
815
815
816 # merge helper
816 # merge helper
817 ui.status(_("Checking merge helper...\n"))
817 ui.status(_("Checking merge helper...\n"))
818 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
818 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
819 or "hgmerge")
819 or "hgmerge")
820 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
820 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
821 if not cmdpath:
821 if not cmdpath:
822 if cmd == 'hgmerge':
822 if cmd == 'hgmerge':
823 ui.write(_(" No merge helper set and can't find default"
823 ui.write(_(" No merge helper set and can't find default"
824 " hgmerge script in PATH\n"))
824 " hgmerge script in PATH\n"))
825 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
825 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
826 else:
826 else:
827 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
827 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
828 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
828 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
829 problems += 1
829 problems += 1
830 else:
830 else:
831 # actually attempt a patch here
831 # actually attempt a patch here
832 fa = writetemp("1\n2\n3\n4\n")
832 fa = writetemp("1\n2\n3\n4\n")
833 fl = writetemp("1\n2\n3\ninsert\n4\n")
833 fl = writetemp("1\n2\n3\ninsert\n4\n")
834 fr = writetemp("begin\n1\n2\n3\n4\n")
834 fr = writetemp("begin\n1\n2\n3\n4\n")
835 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
835 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
836 if r:
836 if r:
837 ui.write(_(" Got unexpected merge error %d!\n") % r)
837 ui.write(_(" Got unexpected merge error %d!\n") % r)
838 problems += 1
838 problems += 1
839 m = file(fl).read()
839 m = file(fl).read()
840 if m != "begin\n1\n2\n3\ninsert\n4\n":
840 if m != "begin\n1\n2\n3\ninsert\n4\n":
841 ui.write(_(" Got unexpected merge results!\n"))
841 ui.write(_(" Got unexpected merge results!\n"))
842 ui.write(_(" (your merge helper may have the"
842 ui.write(_(" (your merge helper may have the"
843 " wrong argument order)\n"))
843 " wrong argument order)\n"))
844 ui.write(_(" Result: %r\n") % m)
844 ui.write(_(" Result: %r\n") % m)
845 problems += 1
845 problems += 1
846 os.unlink(fa)
846 os.unlink(fa)
847 os.unlink(fl)
847 os.unlink(fl)
848 os.unlink(fr)
848 os.unlink(fr)
849
849
850 # editor
850 # editor
851 ui.status(_("Checking commit editor...\n"))
851 ui.status(_("Checking commit editor...\n"))
852 editor = ui.geteditor()
852 editor = ui.geteditor()
853 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
853 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
854 if not cmdpath:
854 if not cmdpath:
855 if editor == 'vi':
855 if editor == 'vi':
856 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
856 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
857 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
857 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
858 else:
858 else:
859 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
859 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
860 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
860 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
861 problems += 1
861 problems += 1
862
862
863 # check username
863 # check username
864 ui.status(_("Checking username...\n"))
864 ui.status(_("Checking username...\n"))
865 user = os.environ.get("HGUSER")
865 user = os.environ.get("HGUSER")
866 if user is None:
866 if user is None:
867 user = ui.config("ui", "username")
867 user = ui.config("ui", "username")
868 if user is None:
868 if user is None:
869 user = os.environ.get("EMAIL")
869 user = os.environ.get("EMAIL")
870 if not user:
870 if not user:
871 ui.warn(" ")
871 ui.warn(" ")
872 ui.username()
872 ui.username()
873 ui.write(_(" (specify a username in your .hgrc file)\n"))
873 ui.write(_(" (specify a username in your .hgrc file)\n"))
874
874
875 if not problems:
875 if not problems:
876 ui.status(_("No problems detected\n"))
876 ui.status(_("No problems detected\n"))
877 else:
877 else:
878 ui.write(_("%s problems detected,"
878 ui.write(_("%s problems detected,"
879 " please check your install!\n") % problems)
879 " please check your install!\n") % problems)
880
880
881 return problems
881 return problems
882
882
883 def debugrename(ui, repo, file1, *pats, **opts):
883 def debugrename(ui, repo, file1, *pats, **opts):
884 """dump rename information"""
884 """dump rename information"""
885
885
886 ctx = repo.changectx(opts.get('rev', 'tip'))
886 ctx = repo.changectx(opts.get('rev', 'tip'))
887 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
887 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
888 ctx.node()):
888 ctx.node()):
889 fctx = ctx.filectx(abs)
889 fctx = ctx.filectx(abs)
890 m = fctx.filelog().renamed(fctx.filenode())
890 m = fctx.filelog().renamed(fctx.filenode())
891 if m:
891 if m:
892 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
892 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
893 else:
893 else:
894 ui.write(_("%s not renamed\n") % rel)
894 ui.write(_("%s not renamed\n") % rel)
895
895
896 def debugwalk(ui, repo, *pats, **opts):
896 def debugwalk(ui, repo, *pats, **opts):
897 """show how files match on given patterns"""
897 """show how files match on given patterns"""
898 items = list(cmdutil.walk(repo, pats, opts))
898 items = list(cmdutil.walk(repo, pats, opts))
899 if not items:
899 if not items:
900 return
900 return
901 fmt = '%%s %%-%ds %%-%ds %%s' % (
901 fmt = '%%s %%-%ds %%-%ds %%s' % (
902 max([len(abs) for (src, abs, rel, exact) in items]),
902 max([len(abs) for (src, abs, rel, exact) in items]),
903 max([len(rel) for (src, abs, rel, exact) in items]))
903 max([len(rel) for (src, abs, rel, exact) in items]))
904 for src, abs, rel, exact in items:
904 for src, abs, rel, exact in items:
905 line = fmt % (src, abs, rel, exact and 'exact' or '')
905 line = fmt % (src, abs, rel, exact and 'exact' or '')
906 ui.write("%s\n" % line.rstrip())
906 ui.write("%s\n" % line.rstrip())
907
907
908 def diff(ui, repo, *pats, **opts):
908 def diff(ui, repo, *pats, **opts):
909 """diff repository (or selected files)
909 """diff repository (or selected files)
910
910
911 Show differences between revisions for the specified files.
911 Show differences between revisions for the specified files.
912
912
913 Differences between files are shown using the unified diff format.
913 Differences between files are shown using the unified diff format.
914
914
915 NOTE: diff may generate unexpected results for merges, as it will
915 NOTE: diff may generate unexpected results for merges, as it will
916 default to comparing against the working directory's first parent
916 default to comparing against the working directory's first parent
917 changeset if no revisions are specified.
917 changeset if no revisions are specified.
918
918
919 When two revision arguments are given, then changes are shown
919 When two revision arguments are given, then changes are shown
920 between those revisions. If only one revision is specified then
920 between those revisions. If only one revision is specified then
921 that revision is compared to the working directory, and, when no
921 that revision is compared to the working directory, and, when no
922 revisions are specified, the working directory files are compared
922 revisions are specified, the working directory files are compared
923 to its parent.
923 to its parent.
924
924
925 Without the -a option, diff will avoid generating diffs of files
925 Without the -a option, diff will avoid generating diffs of files
926 it detects as binary. With -a, diff will generate a diff anyway,
926 it detects as binary. With -a, diff will generate a diff anyway,
927 probably with undesirable results.
927 probably with undesirable results.
928 """
928 """
929 node1, node2 = cmdutil.revpair(repo, opts['rev'])
929 node1, node2 = cmdutil.revpair(repo, opts['rev'])
930
930
931 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
931 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
932
932
933 patch.diff(repo, node1, node2, fns, match=matchfn,
933 patch.diff(repo, node1, node2, fns, match=matchfn,
934 opts=patch.diffopts(ui, opts))
934 opts=patch.diffopts(ui, opts))
935
935
936 def export(ui, repo, *changesets, **opts):
936 def export(ui, repo, *changesets, **opts):
937 """dump the header and diffs for one or more changesets
937 """dump the header and diffs for one or more changesets
938
938
939 Print the changeset header and diffs for one or more revisions.
939 Print the changeset header and diffs for one or more revisions.
940
940
941 The information shown in the changeset header is: author,
941 The information shown in the changeset header is: author,
942 changeset hash, parent(s) and commit comment.
942 changeset hash, parent(s) and commit comment.
943
943
944 NOTE: export may generate unexpected diff output for merge changesets,
944 NOTE: export may generate unexpected diff output for merge changesets,
945 as it will compare the merge changeset against its first parent only.
945 as it will compare the merge changeset against its first parent only.
946
946
947 Output may be to a file, in which case the name of the file is
947 Output may be to a file, in which case the name of the file is
948 given using a format string. The formatting rules are as follows:
948 given using a format string. The formatting rules are as follows:
949
949
950 %% literal "%" character
950 %% literal "%" character
951 %H changeset hash (40 bytes of hexadecimal)
951 %H changeset hash (40 bytes of hexadecimal)
952 %N number of patches being generated
952 %N number of patches being generated
953 %R changeset revision number
953 %R changeset revision number
954 %b basename of the exporting repository
954 %b basename of the exporting repository
955 %h short-form changeset hash (12 bytes of hexadecimal)
955 %h short-form changeset hash (12 bytes of hexadecimal)
956 %n zero-padded sequence number, starting at 1
956 %n zero-padded sequence number, starting at 1
957 %r zero-padded changeset revision number
957 %r zero-padded changeset revision number
958
958
959 Without the -a option, export will avoid generating diffs of files
959 Without the -a option, export will avoid generating diffs of files
960 it detects as binary. With -a, export will generate a diff anyway,
960 it detects as binary. With -a, export will generate a diff anyway,
961 probably with undesirable results.
961 probably with undesirable results.
962
962
963 With the --switch-parent option, the diff will be against the second
963 With the --switch-parent option, the diff will be against the second
964 parent. It can be useful to review a merge.
964 parent. It can be useful to review a merge.
965 """
965 """
966 if not changesets:
966 if not changesets:
967 raise util.Abort(_("export requires at least one changeset"))
967 raise util.Abort(_("export requires at least one changeset"))
968 revs = cmdutil.revrange(repo, changesets)
968 revs = cmdutil.revrange(repo, changesets)
969 if len(revs) > 1:
969 if len(revs) > 1:
970 ui.note(_('exporting patches:\n'))
970 ui.note(_('exporting patches:\n'))
971 else:
971 else:
972 ui.note(_('exporting patch:\n'))
972 ui.note(_('exporting patch:\n'))
973 patch.export(repo, revs, template=opts['output'],
973 patch.export(repo, revs, template=opts['output'],
974 switch_parent=opts['switch_parent'],
974 switch_parent=opts['switch_parent'],
975 opts=patch.diffopts(ui, opts))
975 opts=patch.diffopts(ui, opts))
976
976
977 def grep(ui, repo, pattern, *pats, **opts):
977 def grep(ui, repo, pattern, *pats, **opts):
978 """search for a pattern in specified files and revisions
978 """search for a pattern in specified files and revisions
979
979
980 Search revisions of files for a regular expression.
980 Search revisions of files for a regular expression.
981
981
982 This command behaves differently than Unix grep. It only accepts
982 This command behaves differently than Unix grep. It only accepts
983 Python/Perl regexps. It searches repository history, not the
983 Python/Perl regexps. It searches repository history, not the
984 working directory. It always prints the revision number in which
984 working directory. It always prints the revision number in which
985 a match appears.
985 a match appears.
986
986
987 By default, grep only prints output for the first revision of a
987 By default, grep only prints output for the first revision of a
988 file in which it finds a match. To get it to print every revision
988 file in which it finds a match. To get it to print every revision
989 that contains a change in match status ("-" for a match that
989 that contains a change in match status ("-" for a match that
990 becomes a non-match, or "+" for a non-match that becomes a match),
990 becomes a non-match, or "+" for a non-match that becomes a match),
991 use the --all flag.
991 use the --all flag.
992 """
992 """
993 reflags = 0
993 reflags = 0
994 if opts['ignore_case']:
994 if opts['ignore_case']:
995 reflags |= re.I
995 reflags |= re.I
996 try:
996 try:
997 regexp = re.compile(pattern, reflags)
997 regexp = re.compile(pattern, reflags)
998 except Exception, inst:
998 except Exception, inst:
999 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
999 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1000 return None
1000 return None
1001 sep, eol = ':', '\n'
1001 sep, eol = ':', '\n'
1002 if opts['print0']:
1002 if opts['print0']:
1003 sep = eol = '\0'
1003 sep = eol = '\0'
1004
1004
1005 fcache = {}
1005 fcache = {}
1006 def getfile(fn):
1006 def getfile(fn):
1007 if fn not in fcache:
1007 if fn not in fcache:
1008 fcache[fn] = repo.file(fn)
1008 fcache[fn] = repo.file(fn)
1009 return fcache[fn]
1009 return fcache[fn]
1010
1010
1011 def matchlines(body):
1011 def matchlines(body):
1012 begin = 0
1012 begin = 0
1013 linenum = 0
1013 linenum = 0
1014 while True:
1014 while True:
1015 match = regexp.search(body, begin)
1015 match = regexp.search(body, begin)
1016 if not match:
1016 if not match:
1017 break
1017 break
1018 mstart, mend = match.span()
1018 mstart, mend = match.span()
1019 linenum += body.count('\n', begin, mstart) + 1
1019 linenum += body.count('\n', begin, mstart) + 1
1020 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1020 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1021 lend = body.find('\n', mend)
1021 lend = body.find('\n', mend)
1022 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1022 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1023 begin = lend + 1
1023 begin = lend + 1
1024
1024
1025 class linestate(object):
1025 class linestate(object):
1026 def __init__(self, line, linenum, colstart, colend):
1026 def __init__(self, line, linenum, colstart, colend):
1027 self.line = line
1027 self.line = line
1028 self.linenum = linenum
1028 self.linenum = linenum
1029 self.colstart = colstart
1029 self.colstart = colstart
1030 self.colend = colend
1030 self.colend = colend
1031
1031
1032 def __eq__(self, other):
1032 def __eq__(self, other):
1033 return self.line == other.line
1033 return self.line == other.line
1034
1034
1035 matches = {}
1035 matches = {}
1036 copies = {}
1036 copies = {}
1037 def grepbody(fn, rev, body):
1037 def grepbody(fn, rev, body):
1038 matches[rev].setdefault(fn, [])
1038 matches[rev].setdefault(fn, [])
1039 m = matches[rev][fn]
1039 m = matches[rev][fn]
1040 for lnum, cstart, cend, line in matchlines(body):
1040 for lnum, cstart, cend, line in matchlines(body):
1041 s = linestate(line, lnum, cstart, cend)
1041 s = linestate(line, lnum, cstart, cend)
1042 m.append(s)
1042 m.append(s)
1043
1043
1044 def difflinestates(a, b):
1044 def difflinestates(a, b):
1045 sm = difflib.SequenceMatcher(None, a, b)
1045 sm = difflib.SequenceMatcher(None, a, b)
1046 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1046 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1047 if tag == 'insert':
1047 if tag == 'insert':
1048 for i in xrange(blo, bhi):
1048 for i in xrange(blo, bhi):
1049 yield ('+', b[i])
1049 yield ('+', b[i])
1050 elif tag == 'delete':
1050 elif tag == 'delete':
1051 for i in xrange(alo, ahi):
1051 for i in xrange(alo, ahi):
1052 yield ('-', a[i])
1052 yield ('-', a[i])
1053 elif tag == 'replace':
1053 elif tag == 'replace':
1054 for i in xrange(alo, ahi):
1054 for i in xrange(alo, ahi):
1055 yield ('-', a[i])
1055 yield ('-', a[i])
1056 for i in xrange(blo, bhi):
1056 for i in xrange(blo, bhi):
1057 yield ('+', b[i])
1057 yield ('+', b[i])
1058
1058
1059 prev = {}
1059 prev = {}
1060 def display(fn, rev, states, prevstates):
1060 def display(fn, rev, states, prevstates):
1061 found = False
1061 found = False
1062 filerevmatches = {}
1062 filerevmatches = {}
1063 r = prev.get(fn, -1)
1063 r = prev.get(fn, -1)
1064 if opts['all']:
1064 if opts['all']:
1065 iter = difflinestates(states, prevstates)
1065 iter = difflinestates(states, prevstates)
1066 else:
1066 else:
1067 iter = [('', l) for l in prevstates]
1067 iter = [('', l) for l in prevstates]
1068 for change, l in iter:
1068 for change, l in iter:
1069 cols = [fn, str(r)]
1069 cols = [fn, str(r)]
1070 if opts['line_number']:
1070 if opts['line_number']:
1071 cols.append(str(l.linenum))
1071 cols.append(str(l.linenum))
1072 if opts['all']:
1072 if opts['all']:
1073 cols.append(change)
1073 cols.append(change)
1074 if opts['user']:
1074 if opts['user']:
1075 cols.append(ui.shortuser(get(r)[1]))
1075 cols.append(ui.shortuser(get(r)[1]))
1076 if opts['files_with_matches']:
1076 if opts['files_with_matches']:
1077 c = (fn, r)
1077 c = (fn, r)
1078 if c in filerevmatches:
1078 if c in filerevmatches:
1079 continue
1079 continue
1080 filerevmatches[c] = 1
1080 filerevmatches[c] = 1
1081 else:
1081 else:
1082 cols.append(l.line)
1082 cols.append(l.line)
1083 ui.write(sep.join(cols), eol)
1083 ui.write(sep.join(cols), eol)
1084 found = True
1084 found = True
1085 return found
1085 return found
1086
1086
1087 fstate = {}
1087 fstate = {}
1088 skip = {}
1088 skip = {}
1089 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1089 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1090 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1090 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1091 found = False
1091 found = False
1092 follow = opts.get('follow')
1092 follow = opts.get('follow')
1093 for st, rev, fns in changeiter:
1093 for st, rev, fns in changeiter:
1094 if st == 'window':
1094 if st == 'window':
1095 matches.clear()
1095 matches.clear()
1096 elif st == 'add':
1096 elif st == 'add':
1097 mf = repo.changectx(rev).manifest()
1097 mf = repo.changectx(rev).manifest()
1098 matches[rev] = {}
1098 matches[rev] = {}
1099 for fn in fns:
1099 for fn in fns:
1100 if fn in skip:
1100 if fn in skip:
1101 continue
1101 continue
1102 try:
1102 try:
1103 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1103 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1104 fstate.setdefault(fn, [])
1104 fstate.setdefault(fn, [])
1105 if follow:
1105 if follow:
1106 copied = getfile(fn).renamed(mf[fn])
1106 copied = getfile(fn).renamed(mf[fn])
1107 if copied:
1107 if copied:
1108 copies.setdefault(rev, {})[fn] = copied[0]
1108 copies.setdefault(rev, {})[fn] = copied[0]
1109 except KeyError:
1109 except KeyError:
1110 pass
1110 pass
1111 elif st == 'iter':
1111 elif st == 'iter':
1112 states = matches[rev].items()
1112 states = matches[rev].items()
1113 states.sort()
1113 states.sort()
1114 for fn, m in states:
1114 for fn, m in states:
1115 copy = copies.get(rev, {}).get(fn)
1115 copy = copies.get(rev, {}).get(fn)
1116 if fn in skip:
1116 if fn in skip:
1117 if copy:
1117 if copy:
1118 skip[copy] = True
1118 skip[copy] = True
1119 continue
1119 continue
1120 if fn in prev or fstate[fn]:
1120 if fn in prev or fstate[fn]:
1121 r = display(fn, rev, m, fstate[fn])
1121 r = display(fn, rev, m, fstate[fn])
1122 found = found or r
1122 found = found or r
1123 if r and not opts['all']:
1123 if r and not opts['all']:
1124 skip[fn] = True
1124 skip[fn] = True
1125 if copy:
1125 if copy:
1126 skip[copy] = True
1126 skip[copy] = True
1127 fstate[fn] = m
1127 fstate[fn] = m
1128 if copy:
1128 if copy:
1129 fstate[copy] = m
1129 fstate[copy] = m
1130 prev[fn] = rev
1130 prev[fn] = rev
1131
1131
1132 fstate = fstate.items()
1132 fstate = fstate.items()
1133 fstate.sort()
1133 fstate.sort()
1134 for fn, state in fstate:
1134 for fn, state in fstate:
1135 if fn in skip:
1135 if fn in skip:
1136 continue
1136 continue
1137 if fn not in copies.get(prev[fn], {}):
1137 if fn not in copies.get(prev[fn], {}):
1138 found = display(fn, rev, {}, state) or found
1138 found = display(fn, rev, {}, state) or found
1139 return (not found and 1) or 0
1139 return (not found and 1) or 0
1140
1140
1141 def heads(ui, repo, *branchrevs, **opts):
1141 def heads(ui, repo, *branchrevs, **opts):
1142 """show current repository heads or show branch heads
1142 """show current repository heads or show branch heads
1143
1143
1144 With no arguments, show all repository head changesets.
1144 With no arguments, show all repository head changesets.
1145
1145
1146 If branch or revisions names are given this will show the heads of
1146 If branch or revisions names are given this will show the heads of
1147 the specified branches or the branches those revisions are tagged
1147 the specified branches or the branches those revisions are tagged
1148 with.
1148 with.
1149
1149
1150 Repository "heads" are changesets that don't have child
1150 Repository "heads" are changesets that don't have child
1151 changesets. They are where development generally takes place and
1151 changesets. They are where development generally takes place and
1152 are the usual targets for update and merge operations.
1152 are the usual targets for update and merge operations.
1153
1153
1154 Branch heads are changesets that have a given branch tag, but have
1154 Branch heads are changesets that have a given branch tag, but have
1155 no child changesets with that tag. They are usually where
1155 no child changesets with that tag. They are usually where
1156 development on the given branch takes place.
1156 development on the given branch takes place.
1157 """
1157 """
1158 if opts['rev']:
1158 if opts['rev']:
1159 start = repo.lookup(opts['rev'])
1159 start = repo.lookup(opts['rev'])
1160 else:
1160 else:
1161 start = None
1161 start = None
1162 if not branchrevs:
1162 if not branchrevs:
1163 # Assume we're looking repo-wide heads if no revs were specified.
1163 # Assume we're looking repo-wide heads if no revs were specified.
1164 heads = repo.heads(start)
1164 heads = repo.heads(start)
1165 else:
1165 else:
1166 heads = []
1166 heads = []
1167 visitedset = util.set()
1167 visitedset = util.set()
1168 for branchrev in branchrevs:
1168 for branchrev in branchrevs:
1169 branch = repo.changectx(branchrev).branch()
1169 branch = repo.changectx(branchrev).branch()
1170 if branch in visitedset:
1170 if branch in visitedset:
1171 continue
1171 continue
1172 visitedset.add(branch)
1172 visitedset.add(branch)
1173 bheads = repo.branchheads(branch, start)
1173 bheads = repo.branchheads(branch, start)
1174 if not bheads:
1174 if not bheads:
1175 if branch != branchrev:
1175 if branch != branchrev:
1176 ui.warn(_("no changes on branch %s containing %s are "
1176 ui.warn(_("no changes on branch %s containing %s are "
1177 "reachable from %s\n")
1177 "reachable from %s\n")
1178 % (branch, branchrev, opts['rev']))
1178 % (branch, branchrev, opts['rev']))
1179 else:
1179 else:
1180 ui.warn(_("no changes on branch %s are reachable from %s\n")
1180 ui.warn(_("no changes on branch %s are reachable from %s\n")
1181 % (branch, opts['rev']))
1181 % (branch, opts['rev']))
1182 heads.extend(bheads)
1182 heads.extend(bheads)
1183 if not heads:
1183 if not heads:
1184 return 1
1184 return 1
1185 displayer = cmdutil.show_changeset(ui, repo, opts)
1185 displayer = cmdutil.show_changeset(ui, repo, opts)
1186 for n in heads:
1186 for n in heads:
1187 displayer.show(changenode=n)
1187 displayer.show(changenode=n)
1188
1188
1189 def help_(ui, name=None, with_version=False):
1189 def help_(ui, name=None, with_version=False):
1190 """show help for a command, extension, or list of commands
1190 """show help for a command, extension, or list of commands
1191
1191
1192 With no arguments, print a list of commands and short help.
1192 With no arguments, print a list of commands and short help.
1193
1193
1194 Given a command name, print help for that command.
1194 Given a command name, print help for that command.
1195
1195
1196 Given an extension name, print help for that extension, and the
1196 Given an extension name, print help for that extension, and the
1197 commands it provides."""
1197 commands it provides."""
1198 option_lists = []
1198 option_lists = []
1199
1199
1200 def addglobalopts(aliases):
1200 def addglobalopts(aliases):
1201 if ui.verbose:
1201 if ui.verbose:
1202 option_lists.append((_("global options:"), globalopts))
1202 option_lists.append((_("global options:"), globalopts))
1203 if name == 'shortlist':
1203 if name == 'shortlist':
1204 option_lists.append((_('use "hg help" for the full list '
1204 option_lists.append((_('use "hg help" for the full list '
1205 'of commands'), ()))
1205 'of commands'), ()))
1206 else:
1206 else:
1207 if name == 'shortlist':
1207 if name == 'shortlist':
1208 msg = _('use "hg help" for the full list of commands '
1208 msg = _('use "hg help" for the full list of commands '
1209 'or "hg -v" for details')
1209 'or "hg -v" for details')
1210 elif aliases:
1210 elif aliases:
1211 msg = _('use "hg -v help%s" to show aliases and '
1211 msg = _('use "hg -v help%s" to show aliases and '
1212 'global options') % (name and " " + name or "")
1212 'global options') % (name and " " + name or "")
1213 else:
1213 else:
1214 msg = _('use "hg -v help %s" to show global options') % name
1214 msg = _('use "hg -v help %s" to show global options') % name
1215 option_lists.append((msg, ()))
1215 option_lists.append((msg, ()))
1216
1216
1217 def helpcmd(name):
1217 def helpcmd(name):
1218 if with_version:
1218 if with_version:
1219 version_(ui)
1219 version_(ui)
1220 ui.write('\n')
1220 ui.write('\n')
1221 aliases, i = cmdutil.findcmd(ui, name, table)
1221 aliases, i = cmdutil.findcmd(ui, name, table)
1222 # synopsis
1222 # synopsis
1223 ui.write("%s\n" % i[2])
1223 ui.write("%s\n" % i[2])
1224
1224
1225 # aliases
1225 # aliases
1226 if not ui.quiet and len(aliases) > 1:
1226 if not ui.quiet and len(aliases) > 1:
1227 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1227 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1228
1228
1229 # description
1229 # description
1230 doc = i[0].__doc__
1230 doc = i[0].__doc__
1231 if not doc:
1231 if not doc:
1232 doc = _("(No help text available)")
1232 doc = _("(No help text available)")
1233 if ui.quiet:
1233 if ui.quiet:
1234 doc = doc.splitlines(0)[0]
1234 doc = doc.splitlines(0)[0]
1235 ui.write("\n%s\n" % doc.rstrip())
1235 ui.write("\n%s\n" % doc.rstrip())
1236
1236
1237 if not ui.quiet:
1237 if not ui.quiet:
1238 # options
1238 # options
1239 if i[1]:
1239 if i[1]:
1240 option_lists.append((_("options:\n"), i[1]))
1240 option_lists.append((_("options:\n"), i[1]))
1241
1241
1242 addglobalopts(False)
1242 addglobalopts(False)
1243
1243
1244 def helplist(header, select=None):
1244 def helplist(header, select=None):
1245 h = {}
1245 h = {}
1246 cmds = {}
1246 cmds = {}
1247 for c, e in table.items():
1247 for c, e in table.items():
1248 f = c.split("|", 1)[0]
1248 f = c.split("|", 1)[0]
1249 if select and not select(f):
1249 if select and not select(f):
1250 continue
1250 continue
1251 if name == "shortlist" and not f.startswith("^"):
1251 if name == "shortlist" and not f.startswith("^"):
1252 continue
1252 continue
1253 f = f.lstrip("^")
1253 f = f.lstrip("^")
1254 if not ui.debugflag and f.startswith("debug"):
1254 if not ui.debugflag and f.startswith("debug"):
1255 continue
1255 continue
1256 doc = e[0].__doc__
1256 doc = e[0].__doc__
1257 if not doc:
1257 if not doc:
1258 doc = _("(No help text available)")
1258 doc = _("(No help text available)")
1259 h[f] = doc.splitlines(0)[0].rstrip()
1259 h[f] = doc.splitlines(0)[0].rstrip()
1260 cmds[f] = c.lstrip("^")
1260 cmds[f] = c.lstrip("^")
1261
1261
1262 if not h:
1262 if not h:
1263 ui.status(_('no commands defined\n'))
1263 ui.status(_('no commands defined\n'))
1264 return
1264 return
1265
1265
1266 ui.status(header)
1266 ui.status(header)
1267 fns = h.keys()
1267 fns = h.keys()
1268 fns.sort()
1268 fns.sort()
1269 m = max(map(len, fns))
1269 m = max(map(len, fns))
1270 for f in fns:
1270 for f in fns:
1271 if ui.verbose:
1271 if ui.verbose:
1272 commands = cmds[f].replace("|",", ")
1272 commands = cmds[f].replace("|",", ")
1273 ui.write(" %s:\n %s\n"%(commands, h[f]))
1273 ui.write(" %s:\n %s\n"%(commands, h[f]))
1274 else:
1274 else:
1275 ui.write(' %-*s %s\n' % (m, f, h[f]))
1275 ui.write(' %-*s %s\n' % (m, f, h[f]))
1276
1276
1277 if not ui.quiet:
1277 if not ui.quiet:
1278 addglobalopts(True)
1278 addglobalopts(True)
1279
1279
1280 def helptopic(name):
1280 def helptopic(name):
1281 v = None
1281 v = None
1282 for i in help.helptable:
1282 for i in help.helptable:
1283 l = i.split('|')
1283 l = i.split('|')
1284 if name in l:
1284 if name in l:
1285 v = i
1285 v = i
1286 header = l[-1]
1286 header = l[-1]
1287 if not v:
1287 if not v:
1288 raise cmdutil.UnknownCommand(name)
1288 raise cmdutil.UnknownCommand(name)
1289
1289
1290 # description
1290 # description
1291 doc = help.helptable[v]
1291 doc = help.helptable[v]
1292 if not doc:
1292 if not doc:
1293 doc = _("(No help text available)")
1293 doc = _("(No help text available)")
1294 if callable(doc):
1294 if callable(doc):
1295 doc = doc()
1295 doc = doc()
1296
1296
1297 ui.write("%s\n" % header)
1297 ui.write("%s\n" % header)
1298 ui.write("%s\n" % doc.rstrip())
1298 ui.write("%s\n" % doc.rstrip())
1299
1299
1300 def helpext(name):
1300 def helpext(name):
1301 try:
1301 try:
1302 mod = extensions.find(name)
1302 mod = extensions.find(name)
1303 except KeyError:
1303 except KeyError:
1304 raise cmdutil.UnknownCommand(name)
1304 raise cmdutil.UnknownCommand(name)
1305
1305
1306 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1306 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1307 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1307 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1308 for d in doc[1:]:
1308 for d in doc[1:]:
1309 ui.write(d, '\n')
1309 ui.write(d, '\n')
1310
1310
1311 ui.status('\n')
1311 ui.status('\n')
1312
1312
1313 try:
1313 try:
1314 ct = mod.cmdtable
1314 ct = mod.cmdtable
1315 except AttributeError:
1315 except AttributeError:
1316 ct = {}
1316 ct = {}
1317
1317
1318 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1318 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1319 helplist(_('list of commands:\n\n'), modcmds.has_key)
1319 helplist(_('list of commands:\n\n'), modcmds.has_key)
1320
1320
1321 if name and name != 'shortlist':
1321 if name and name != 'shortlist':
1322 i = None
1322 i = None
1323 for f in (helpcmd, helptopic, helpext):
1323 for f in (helpcmd, helptopic, helpext):
1324 try:
1324 try:
1325 f(name)
1325 f(name)
1326 i = None
1326 i = None
1327 break
1327 break
1328 except cmdutil.UnknownCommand, inst:
1328 except cmdutil.UnknownCommand, inst:
1329 i = inst
1329 i = inst
1330 if i:
1330 if i:
1331 raise i
1331 raise i
1332
1332
1333 else:
1333 else:
1334 # program name
1334 # program name
1335 if ui.verbose or with_version:
1335 if ui.verbose or with_version:
1336 version_(ui)
1336 version_(ui)
1337 else:
1337 else:
1338 ui.status(_("Mercurial Distributed SCM\n"))
1338 ui.status(_("Mercurial Distributed SCM\n"))
1339 ui.status('\n')
1339 ui.status('\n')
1340
1340
1341 # list of commands
1341 # list of commands
1342 if name == "shortlist":
1342 if name == "shortlist":
1343 header = _('basic commands:\n\n')
1343 header = _('basic commands:\n\n')
1344 else:
1344 else:
1345 header = _('list of commands:\n\n')
1345 header = _('list of commands:\n\n')
1346
1346
1347 helplist(header)
1347 helplist(header)
1348
1348
1349 # list all option lists
1349 # list all option lists
1350 opt_output = []
1350 opt_output = []
1351 for title, options in option_lists:
1351 for title, options in option_lists:
1352 opt_output.append(("\n%s" % title, None))
1352 opt_output.append(("\n%s" % title, None))
1353 for shortopt, longopt, default, desc in options:
1353 for shortopt, longopt, default, desc in options:
1354 if "DEPRECATED" in desc and not ui.verbose: continue
1354 if "DEPRECATED" in desc and not ui.verbose: continue
1355 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1355 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1356 longopt and " --%s" % longopt),
1356 longopt and " --%s" % longopt),
1357 "%s%s" % (desc,
1357 "%s%s" % (desc,
1358 default
1358 default
1359 and _(" (default: %s)") % default
1359 and _(" (default: %s)") % default
1360 or "")))
1360 or "")))
1361
1361
1362 if opt_output:
1362 if opt_output:
1363 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1363 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1364 for first, second in opt_output:
1364 for first, second in opt_output:
1365 if second:
1365 if second:
1366 ui.write(" %-*s %s\n" % (opts_len, first, second))
1366 ui.write(" %-*s %s\n" % (opts_len, first, second))
1367 else:
1367 else:
1368 ui.write("%s\n" % first)
1368 ui.write("%s\n" % first)
1369
1369
1370 def identify(ui, repo, source=None,
1370 def identify(ui, repo, source=None,
1371 rev=None, num=None, id=None, branch=None, tags=None):
1371 rev=None, num=None, id=None, branch=None, tags=None):
1372 """identify the working copy or specified revision
1372 """identify the working copy or specified revision
1373
1373
1374 With no revision, print a summary of the current state of the repo.
1374 With no revision, print a summary of the current state of the repo.
1375
1375
1376 With a path, do a lookup in another repository.
1376 With a path, do a lookup in another repository.
1377
1377
1378 This summary identifies the repository state using one or two parent
1378 This summary identifies the repository state using one or two parent
1379 hash identifiers, followed by a "+" if there are uncommitted changes
1379 hash identifiers, followed by a "+" if there are uncommitted changes
1380 in the working directory, a list of tags for this revision and a branch
1380 in the working directory, a list of tags for this revision and a branch
1381 name for non-default branches.
1381 name for non-default branches.
1382 """
1382 """
1383
1383
1384 if not repo and not source:
1384 if not repo and not source:
1385 raise util.Abort(_("There is no Mercurial repository here "
1385 raise util.Abort(_("There is no Mercurial repository here "
1386 "(.hg not found)"))
1386 "(.hg not found)"))
1387
1387
1388 hexfunc = ui.debugflag and hex or short
1388 hexfunc = ui.debugflag and hex or short
1389 default = not (num or id or branch or tags)
1389 default = not (num or id or branch or tags)
1390 output = []
1390 output = []
1391
1391
1392 if source:
1392 if source:
1393 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1393 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1394 srepo = hg.repository(ui, source)
1394 srepo = hg.repository(ui, source)
1395 if not rev and revs:
1395 if not rev and revs:
1396 rev = revs[0]
1396 rev = revs[0]
1397 if not rev:
1397 if not rev:
1398 rev = "tip"
1398 rev = "tip"
1399 if num or branch or tags:
1399 if num or branch or tags:
1400 raise util.Abort(
1400 raise util.Abort(
1401 "can't query remote revision number, branch, or tags")
1401 "can't query remote revision number, branch, or tags")
1402 output = [hexfunc(srepo.lookup(rev))]
1402 output = [hexfunc(srepo.lookup(rev))]
1403 elif not rev:
1403 elif not rev:
1404 ctx = repo.workingctx()
1404 ctx = repo.workingctx()
1405 parents = ctx.parents()
1405 parents = ctx.parents()
1406 changed = False
1406 changed = False
1407 if default or id or num:
1407 if default or id or num:
1408 changed = ctx.files() + ctx.deleted()
1408 changed = ctx.files() + ctx.deleted()
1409 if default or id:
1409 if default or id:
1410 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1410 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1411 (changed) and "+" or "")]
1411 (changed) and "+" or "")]
1412 if num:
1412 if num:
1413 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1413 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1414 (changed) and "+" or ""))
1414 (changed) and "+" or ""))
1415 else:
1415 else:
1416 ctx = repo.changectx(rev)
1416 ctx = repo.changectx(rev)
1417 if default or id:
1417 if default or id:
1418 output = [hexfunc(ctx.node())]
1418 output = [hexfunc(ctx.node())]
1419 if num:
1419 if num:
1420 output.append(str(ctx.rev()))
1420 output.append(str(ctx.rev()))
1421
1421
1422 if not source and default and not ui.quiet:
1422 if not source and default and not ui.quiet:
1423 b = util.tolocal(ctx.branch())
1423 b = util.tolocal(ctx.branch())
1424 if b != 'default':
1424 if b != 'default':
1425 output.append("(%s)" % b)
1425 output.append("(%s)" % b)
1426
1426
1427 # multiple tags for a single parent separated by '/'
1427 # multiple tags for a single parent separated by '/'
1428 t = "/".join(ctx.tags())
1428 t = "/".join(ctx.tags())
1429 if t:
1429 if t:
1430 output.append(t)
1430 output.append(t)
1431
1431
1432 if branch:
1432 if branch:
1433 output.append(util.tolocal(ctx.branch()))
1433 output.append(util.tolocal(ctx.branch()))
1434
1434
1435 if tags:
1435 if tags:
1436 output.extend(ctx.tags())
1436 output.extend(ctx.tags())
1437
1437
1438 ui.write("%s\n" % ' '.join(output))
1438 ui.write("%s\n" % ' '.join(output))
1439
1439
1440 def import_(ui, repo, patch1, *patches, **opts):
1440 def import_(ui, repo, patch1, *patches, **opts):
1441 """import an ordered set of patches
1441 """import an ordered set of patches
1442
1442
1443 Import a list of patches and commit them individually.
1443 Import a list of patches and commit them individually.
1444
1444
1445 If there are outstanding changes in the working directory, import
1445 If there are outstanding changes in the working directory, import
1446 will abort unless given the -f flag.
1446 will abort unless given the -f flag.
1447
1447
1448 You can import a patch straight from a mail message. Even patches
1448 You can import a patch straight from a mail message. Even patches
1449 as attachments work (body part must be type text/plain or
1449 as attachments work (body part must be type text/plain or
1450 text/x-patch to be used). From and Subject headers of email
1450 text/x-patch to be used). From and Subject headers of email
1451 message are used as default committer and commit message. All
1451 message are used as default committer and commit message. All
1452 text/plain body parts before first diff are added to commit
1452 text/plain body parts before first diff are added to commit
1453 message.
1453 message.
1454
1454
1455 If the imported patch was generated by hg export, user and description
1455 If the imported patch was generated by hg export, user and description
1456 from patch override values from message headers and body. Values
1456 from patch override values from message headers and body. Values
1457 given on command line with -m and -u override these.
1457 given on command line with -m and -u override these.
1458
1458
1459 If --exact is specified, import will set the working directory
1459 If --exact is specified, import will set the working directory
1460 to the parent of each patch before applying it, and will abort
1460 to the parent of each patch before applying it, and will abort
1461 if the resulting changeset has a different ID than the one
1461 if the resulting changeset has a different ID than the one
1462 recorded in the patch. This may happen due to character set
1462 recorded in the patch. This may happen due to character set
1463 problems or other deficiencies in the text patch format.
1463 problems or other deficiencies in the text patch format.
1464
1464
1465 To read a patch from standard input, use patch name "-".
1465 To read a patch from standard input, use patch name "-".
1466 """
1466 """
1467 patches = (patch1,) + patches
1467 patches = (patch1,) + patches
1468
1468
1469 if opts.get('exact') or not opts['force']:
1469 if opts.get('exact') or not opts['force']:
1470 cmdutil.bail_if_changed(repo)
1470 cmdutil.bail_if_changed(repo)
1471
1471
1472 d = opts["base"]
1472 d = opts["base"]
1473 strip = opts["strip"]
1473 strip = opts["strip"]
1474 wlock = lock = None
1474 wlock = lock = None
1475 try:
1475 try:
1476 wlock = repo.wlock()
1476 wlock = repo.wlock()
1477 lock = repo.lock()
1477 lock = repo.lock()
1478 for p in patches:
1478 for p in patches:
1479 pf = os.path.join(d, p)
1479 pf = os.path.join(d, p)
1480
1480
1481 if pf == '-':
1481 if pf == '-':
1482 ui.status(_("applying patch from stdin\n"))
1482 ui.status(_("applying patch from stdin\n"))
1483 data = patch.extract(ui, sys.stdin)
1483 data = patch.extract(ui, sys.stdin)
1484 else:
1484 else:
1485 ui.status(_("applying %s\n") % p)
1485 ui.status(_("applying %s\n") % p)
1486 if os.path.exists(pf):
1486 if os.path.exists(pf):
1487 data = patch.extract(ui, file(pf, 'rb'))
1487 data = patch.extract(ui, file(pf, 'rb'))
1488 else:
1488 else:
1489 data = patch.extract(ui, urllib.urlopen(pf))
1489 data = patch.extract(ui, urllib.urlopen(pf))
1490 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1490 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1491
1491
1492 if tmpname is None:
1492 if tmpname is None:
1493 raise util.Abort(_('no diffs found'))
1493 raise util.Abort(_('no diffs found'))
1494
1494
1495 try:
1495 try:
1496 cmdline_message = cmdutil.logmessage(opts)
1496 cmdline_message = cmdutil.logmessage(opts)
1497 if cmdline_message:
1497 if cmdline_message:
1498 # pickup the cmdline msg
1498 # pickup the cmdline msg
1499 message = cmdline_message
1499 message = cmdline_message
1500 elif message:
1500 elif message:
1501 # pickup the patch msg
1501 # pickup the patch msg
1502 message = message.strip()
1502 message = message.strip()
1503 else:
1503 else:
1504 # launch the editor
1504 # launch the editor
1505 message = None
1505 message = None
1506 ui.debug(_('message:\n%s\n') % message)
1506 ui.debug(_('message:\n%s\n') % message)
1507
1507
1508 wp = repo.workingctx().parents()
1508 wp = repo.workingctx().parents()
1509 if opts.get('exact'):
1509 if opts.get('exact'):
1510 if not nodeid or not p1:
1510 if not nodeid or not p1:
1511 raise util.Abort(_('not a mercurial patch'))
1511 raise util.Abort(_('not a mercurial patch'))
1512 p1 = repo.lookup(p1)
1512 p1 = repo.lookup(p1)
1513 p2 = repo.lookup(p2 or hex(nullid))
1513 p2 = repo.lookup(p2 or hex(nullid))
1514
1514
1515 if p1 != wp[0].node():
1515 if p1 != wp[0].node():
1516 hg.clean(repo, p1)
1516 hg.clean(repo, p1)
1517 repo.dirstate.setparents(p1, p2)
1517 repo.dirstate.setparents(p1, p2)
1518 elif p2:
1518 elif p2:
1519 try:
1519 try:
1520 p1 = repo.lookup(p1)
1520 p1 = repo.lookup(p1)
1521 p2 = repo.lookup(p2)
1521 p2 = repo.lookup(p2)
1522 if p1 == wp[0].node():
1522 if p1 == wp[0].node():
1523 repo.dirstate.setparents(p1, p2)
1523 repo.dirstate.setparents(p1, p2)
1524 except hg.RepoError:
1524 except hg.RepoError:
1525 pass
1525 pass
1526 if opts.get('exact') or opts.get('import_branch'):
1526 if opts.get('exact') or opts.get('import_branch'):
1527 repo.dirstate.setbranch(branch or 'default')
1527 repo.dirstate.setbranch(branch or 'default')
1528
1528
1529 files = {}
1529 files = {}
1530 try:
1530 try:
1531 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1531 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1532 files=files)
1532 files=files)
1533 finally:
1533 finally:
1534 files = patch.updatedir(ui, repo, files)
1534 files = patch.updatedir(ui, repo, files)
1535 if not opts.get('no_commit'):
1535 if not opts.get('no_commit'):
1536 n = repo.commit(files, message, opts.get('user') or user,
1536 n = repo.commit(files, message, opts.get('user') or user,
1537 opts.get('date') or date)
1537 opts.get('date') or date)
1538 if opts.get('exact'):
1538 if opts.get('exact'):
1539 if hex(n) != nodeid:
1539 if hex(n) != nodeid:
1540 repo.rollback()
1540 repo.rollback()
1541 raise util.Abort(_('patch is damaged'
1541 raise util.Abort(_('patch is damaged'
1542 ' or loses information'))
1542 ' or loses information'))
1543 # Force a dirstate write so that the next transaction
1544 # backups an up-do-date file.
1545 repo.dirstate.write()
1543 finally:
1546 finally:
1544 os.unlink(tmpname)
1547 os.unlink(tmpname)
1545 finally:
1548 finally:
1546 del lock, wlock
1549 del lock, wlock
1547
1550
1548 def incoming(ui, repo, source="default", **opts):
1551 def incoming(ui, repo, source="default", **opts):
1549 """show new changesets found in source
1552 """show new changesets found in source
1550
1553
1551 Show new changesets found in the specified path/URL or the default
1554 Show new changesets found in the specified path/URL or the default
1552 pull location. These are the changesets that would be pulled if a pull
1555 pull location. These are the changesets that would be pulled if a pull
1553 was requested.
1556 was requested.
1554
1557
1555 For remote repository, using --bundle avoids downloading the changesets
1558 For remote repository, using --bundle avoids downloading the changesets
1556 twice if the incoming is followed by a pull.
1559 twice if the incoming is followed by a pull.
1557
1560
1558 See pull for valid source format details.
1561 See pull for valid source format details.
1559 """
1562 """
1560 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1563 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1561 cmdutil.setremoteconfig(ui, opts)
1564 cmdutil.setremoteconfig(ui, opts)
1562
1565
1563 other = hg.repository(ui, source)
1566 other = hg.repository(ui, source)
1564 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1567 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1565 if revs:
1568 if revs:
1566 revs = [other.lookup(rev) for rev in revs]
1569 revs = [other.lookup(rev) for rev in revs]
1567 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1570 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1568 if not incoming:
1571 if not incoming:
1569 try:
1572 try:
1570 os.unlink(opts["bundle"])
1573 os.unlink(opts["bundle"])
1571 except:
1574 except:
1572 pass
1575 pass
1573 ui.status(_("no changes found\n"))
1576 ui.status(_("no changes found\n"))
1574 return 1
1577 return 1
1575
1578
1576 cleanup = None
1579 cleanup = None
1577 try:
1580 try:
1578 fname = opts["bundle"]
1581 fname = opts["bundle"]
1579 if fname or not other.local():
1582 if fname or not other.local():
1580 # create a bundle (uncompressed if other repo is not local)
1583 # create a bundle (uncompressed if other repo is not local)
1581 if revs is None:
1584 if revs is None:
1582 cg = other.changegroup(incoming, "incoming")
1585 cg = other.changegroup(incoming, "incoming")
1583 else:
1586 else:
1584 cg = other.changegroupsubset(incoming, revs, 'incoming')
1587 cg = other.changegroupsubset(incoming, revs, 'incoming')
1585 bundletype = other.local() and "HG10BZ" or "HG10UN"
1588 bundletype = other.local() and "HG10BZ" or "HG10UN"
1586 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1589 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1587 # keep written bundle?
1590 # keep written bundle?
1588 if opts["bundle"]:
1591 if opts["bundle"]:
1589 cleanup = None
1592 cleanup = None
1590 if not other.local():
1593 if not other.local():
1591 # use the created uncompressed bundlerepo
1594 # use the created uncompressed bundlerepo
1592 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1595 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1593
1596
1594 o = other.changelog.nodesbetween(incoming, revs)[0]
1597 o = other.changelog.nodesbetween(incoming, revs)[0]
1595 if opts['newest_first']:
1598 if opts['newest_first']:
1596 o.reverse()
1599 o.reverse()
1597 displayer = cmdutil.show_changeset(ui, other, opts)
1600 displayer = cmdutil.show_changeset(ui, other, opts)
1598 for n in o:
1601 for n in o:
1599 parents = [p for p in other.changelog.parents(n) if p != nullid]
1602 parents = [p for p in other.changelog.parents(n) if p != nullid]
1600 if opts['no_merges'] and len(parents) == 2:
1603 if opts['no_merges'] and len(parents) == 2:
1601 continue
1604 continue
1602 displayer.show(changenode=n)
1605 displayer.show(changenode=n)
1603 finally:
1606 finally:
1604 if hasattr(other, 'close'):
1607 if hasattr(other, 'close'):
1605 other.close()
1608 other.close()
1606 if cleanup:
1609 if cleanup:
1607 os.unlink(cleanup)
1610 os.unlink(cleanup)
1608
1611
1609 def init(ui, dest=".", **opts):
1612 def init(ui, dest=".", **opts):
1610 """create a new repository in the given directory
1613 """create a new repository in the given directory
1611
1614
1612 Initialize a new repository in the given directory. If the given
1615 Initialize a new repository in the given directory. If the given
1613 directory does not exist, it is created.
1616 directory does not exist, it is created.
1614
1617
1615 If no directory is given, the current directory is used.
1618 If no directory is given, the current directory is used.
1616
1619
1617 It is possible to specify an ssh:// URL as the destination.
1620 It is possible to specify an ssh:// URL as the destination.
1618 Look at the help text for the pull command for important details
1621 Look at the help text for the pull command for important details
1619 about ssh:// URLs.
1622 about ssh:// URLs.
1620 """
1623 """
1621 cmdutil.setremoteconfig(ui, opts)
1624 cmdutil.setremoteconfig(ui, opts)
1622 hg.repository(ui, dest, create=1)
1625 hg.repository(ui, dest, create=1)
1623
1626
1624 def locate(ui, repo, *pats, **opts):
1627 def locate(ui, repo, *pats, **opts):
1625 """locate files matching specific patterns
1628 """locate files matching specific patterns
1626
1629
1627 Print all files under Mercurial control whose names match the
1630 Print all files under Mercurial control whose names match the
1628 given patterns.
1631 given patterns.
1629
1632
1630 This command searches the entire repository by default. To search
1633 This command searches the entire repository by default. To search
1631 just the current directory and its subdirectories, use
1634 just the current directory and its subdirectories, use
1632 "--include .".
1635 "--include .".
1633
1636
1634 If no patterns are given to match, this command prints all file
1637 If no patterns are given to match, this command prints all file
1635 names.
1638 names.
1636
1639
1637 If you want to feed the output of this command into the "xargs"
1640 If you want to feed the output of this command into the "xargs"
1638 command, use the "-0" option to both this command and "xargs".
1641 command, use the "-0" option to both this command and "xargs".
1639 This will avoid the problem of "xargs" treating single filenames
1642 This will avoid the problem of "xargs" treating single filenames
1640 that contain white space as multiple filenames.
1643 that contain white space as multiple filenames.
1641 """
1644 """
1642 end = opts['print0'] and '\0' or '\n'
1645 end = opts['print0'] and '\0' or '\n'
1643 rev = opts['rev']
1646 rev = opts['rev']
1644 if rev:
1647 if rev:
1645 node = repo.lookup(rev)
1648 node = repo.lookup(rev)
1646 else:
1649 else:
1647 node = None
1650 node = None
1648
1651
1649 ret = 1
1652 ret = 1
1650 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1653 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1651 badmatch=util.always,
1654 badmatch=util.always,
1652 default='relglob'):
1655 default='relglob'):
1653 if src == 'b':
1656 if src == 'b':
1654 continue
1657 continue
1655 if not node and abs not in repo.dirstate:
1658 if not node and abs not in repo.dirstate:
1656 continue
1659 continue
1657 if opts['fullpath']:
1660 if opts['fullpath']:
1658 ui.write(os.path.join(repo.root, abs), end)
1661 ui.write(os.path.join(repo.root, abs), end)
1659 else:
1662 else:
1660 ui.write(((pats and rel) or abs), end)
1663 ui.write(((pats and rel) or abs), end)
1661 ret = 0
1664 ret = 0
1662
1665
1663 return ret
1666 return ret
1664
1667
1665 def log(ui, repo, *pats, **opts):
1668 def log(ui, repo, *pats, **opts):
1666 """show revision history of entire repository or files
1669 """show revision history of entire repository or files
1667
1670
1668 Print the revision history of the specified files or the entire
1671 Print the revision history of the specified files or the entire
1669 project.
1672 project.
1670
1673
1671 File history is shown without following rename or copy history of
1674 File history is shown without following rename or copy history of
1672 files. Use -f/--follow with a file name to follow history across
1675 files. Use -f/--follow with a file name to follow history across
1673 renames and copies. --follow without a file name will only show
1676 renames and copies. --follow without a file name will only show
1674 ancestors or descendants of the starting revision. --follow-first
1677 ancestors or descendants of the starting revision. --follow-first
1675 only follows the first parent of merge revisions.
1678 only follows the first parent of merge revisions.
1676
1679
1677 If no revision range is specified, the default is tip:0 unless
1680 If no revision range is specified, the default is tip:0 unless
1678 --follow is set, in which case the working directory parent is
1681 --follow is set, in which case the working directory parent is
1679 used as the starting revision.
1682 used as the starting revision.
1680
1683
1681 By default this command outputs: changeset id and hash, tags,
1684 By default this command outputs: changeset id and hash, tags,
1682 non-trivial parents, user, date and time, and a summary for each
1685 non-trivial parents, user, date and time, and a summary for each
1683 commit. When the -v/--verbose switch is used, the list of changed
1686 commit. When the -v/--verbose switch is used, the list of changed
1684 files and full commit message is shown.
1687 files and full commit message is shown.
1685
1688
1686 NOTE: log -p may generate unexpected diff output for merge
1689 NOTE: log -p may generate unexpected diff output for merge
1687 changesets, as it will compare the merge changeset against its
1690 changesets, as it will compare the merge changeset against its
1688 first parent only. Also, the files: list will only reflect files
1691 first parent only. Also, the files: list will only reflect files
1689 that are different from BOTH parents.
1692 that are different from BOTH parents.
1690
1693
1691 """
1694 """
1692
1695
1693 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1696 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1694 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1697 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1695
1698
1696 if opts['limit']:
1699 if opts['limit']:
1697 try:
1700 try:
1698 limit = int(opts['limit'])
1701 limit = int(opts['limit'])
1699 except ValueError:
1702 except ValueError:
1700 raise util.Abort(_('limit must be a positive integer'))
1703 raise util.Abort(_('limit must be a positive integer'))
1701 if limit <= 0: raise util.Abort(_('limit must be positive'))
1704 if limit <= 0: raise util.Abort(_('limit must be positive'))
1702 else:
1705 else:
1703 limit = sys.maxint
1706 limit = sys.maxint
1704 count = 0
1707 count = 0
1705
1708
1706 if opts['copies'] and opts['rev']:
1709 if opts['copies'] and opts['rev']:
1707 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1710 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1708 else:
1711 else:
1709 endrev = repo.changelog.count()
1712 endrev = repo.changelog.count()
1710 rcache = {}
1713 rcache = {}
1711 ncache = {}
1714 ncache = {}
1712 def getrenamed(fn, rev):
1715 def getrenamed(fn, rev):
1713 '''looks up all renames for a file (up to endrev) the first
1716 '''looks up all renames for a file (up to endrev) the first
1714 time the file is given. It indexes on the changerev and only
1717 time the file is given. It indexes on the changerev and only
1715 parses the manifest if linkrev != changerev.
1718 parses the manifest if linkrev != changerev.
1716 Returns rename info for fn at changerev rev.'''
1719 Returns rename info for fn at changerev rev.'''
1717 if fn not in rcache:
1720 if fn not in rcache:
1718 rcache[fn] = {}
1721 rcache[fn] = {}
1719 ncache[fn] = {}
1722 ncache[fn] = {}
1720 fl = repo.file(fn)
1723 fl = repo.file(fn)
1721 for i in xrange(fl.count()):
1724 for i in xrange(fl.count()):
1722 node = fl.node(i)
1725 node = fl.node(i)
1723 lr = fl.linkrev(node)
1726 lr = fl.linkrev(node)
1724 renamed = fl.renamed(node)
1727 renamed = fl.renamed(node)
1725 rcache[fn][lr] = renamed
1728 rcache[fn][lr] = renamed
1726 if renamed:
1729 if renamed:
1727 ncache[fn][node] = renamed
1730 ncache[fn][node] = renamed
1728 if lr >= endrev:
1731 if lr >= endrev:
1729 break
1732 break
1730 if rev in rcache[fn]:
1733 if rev in rcache[fn]:
1731 return rcache[fn][rev]
1734 return rcache[fn][rev]
1732
1735
1733 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1736 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1734 # filectx logic.
1737 # filectx logic.
1735
1738
1736 try:
1739 try:
1737 return repo.changectx(rev).filectx(fn).renamed()
1740 return repo.changectx(rev).filectx(fn).renamed()
1738 except revlog.LookupError:
1741 except revlog.LookupError:
1739 pass
1742 pass
1740 return None
1743 return None
1741
1744
1742 df = False
1745 df = False
1743 if opts["date"]:
1746 if opts["date"]:
1744 df = util.matchdate(opts["date"])
1747 df = util.matchdate(opts["date"])
1745
1748
1746 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1749 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1747 for st, rev, fns in changeiter:
1750 for st, rev, fns in changeiter:
1748 if st == 'add':
1751 if st == 'add':
1749 changenode = repo.changelog.node(rev)
1752 changenode = repo.changelog.node(rev)
1750 parents = [p for p in repo.changelog.parentrevs(rev)
1753 parents = [p for p in repo.changelog.parentrevs(rev)
1751 if p != nullrev]
1754 if p != nullrev]
1752 if opts['no_merges'] and len(parents) == 2:
1755 if opts['no_merges'] and len(parents) == 2:
1753 continue
1756 continue
1754 if opts['only_merges'] and len(parents) != 2:
1757 if opts['only_merges'] and len(parents) != 2:
1755 continue
1758 continue
1756
1759
1757 if df:
1760 if df:
1758 changes = get(rev)
1761 changes = get(rev)
1759 if not df(changes[2][0]):
1762 if not df(changes[2][0]):
1760 continue
1763 continue
1761
1764
1762 if opts['keyword']:
1765 if opts['keyword']:
1763 changes = get(rev)
1766 changes = get(rev)
1764 miss = 0
1767 miss = 0
1765 for k in [kw.lower() for kw in opts['keyword']]:
1768 for k in [kw.lower() for kw in opts['keyword']]:
1766 if not (k in changes[1].lower() or
1769 if not (k in changes[1].lower() or
1767 k in changes[4].lower() or
1770 k in changes[4].lower() or
1768 k in " ".join(changes[3]).lower()):
1771 k in " ".join(changes[3]).lower()):
1769 miss = 1
1772 miss = 1
1770 break
1773 break
1771 if miss:
1774 if miss:
1772 continue
1775 continue
1773
1776
1774 copies = []
1777 copies = []
1775 if opts.get('copies') and rev:
1778 if opts.get('copies') and rev:
1776 for fn in get(rev)[3]:
1779 for fn in get(rev)[3]:
1777 rename = getrenamed(fn, rev)
1780 rename = getrenamed(fn, rev)
1778 if rename:
1781 if rename:
1779 copies.append((fn, rename[0]))
1782 copies.append((fn, rename[0]))
1780 displayer.show(rev, changenode, copies=copies)
1783 displayer.show(rev, changenode, copies=copies)
1781 elif st == 'iter':
1784 elif st == 'iter':
1782 if count == limit: break
1785 if count == limit: break
1783 if displayer.flush(rev):
1786 if displayer.flush(rev):
1784 count += 1
1787 count += 1
1785
1788
1786 def manifest(ui, repo, node=None, rev=None):
1789 def manifest(ui, repo, node=None, rev=None):
1787 """output the current or given revision of the project manifest
1790 """output the current or given revision of the project manifest
1788
1791
1789 Print a list of version controlled files for the given revision.
1792 Print a list of version controlled files for the given revision.
1790 If no revision is given, the parent of the working directory is used,
1793 If no revision is given, the parent of the working directory is used,
1791 or tip if no revision is checked out.
1794 or tip if no revision is checked out.
1792
1795
1793 The manifest is the list of files being version controlled. If no revision
1796 The manifest is the list of files being version controlled. If no revision
1794 is given then the first parent of the working directory is used.
1797 is given then the first parent of the working directory is used.
1795
1798
1796 With -v flag, print file permissions, symlink and executable bits. With
1799 With -v flag, print file permissions, symlink and executable bits. With
1797 --debug flag, print file revision hashes.
1800 --debug flag, print file revision hashes.
1798 """
1801 """
1799
1802
1800 if rev and node:
1803 if rev and node:
1801 raise util.Abort(_("please specify just one revision"))
1804 raise util.Abort(_("please specify just one revision"))
1802
1805
1803 if not node:
1806 if not node:
1804 node = rev
1807 node = rev
1805
1808
1806 m = repo.changectx(node).manifest()
1809 m = repo.changectx(node).manifest()
1807 files = m.keys()
1810 files = m.keys()
1808 files.sort()
1811 files.sort()
1809
1812
1810 for f in files:
1813 for f in files:
1811 if ui.debugflag:
1814 if ui.debugflag:
1812 ui.write("%40s " % hex(m[f]))
1815 ui.write("%40s " % hex(m[f]))
1813 if ui.verbose:
1816 if ui.verbose:
1814 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1817 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1815 perm = m.execf(f) and "755" or "644"
1818 perm = m.execf(f) and "755" or "644"
1816 ui.write("%3s %1s " % (perm, type))
1819 ui.write("%3s %1s " % (perm, type))
1817 ui.write("%s\n" % f)
1820 ui.write("%s\n" % f)
1818
1821
1819 def merge(ui, repo, node=None, force=None, rev=None):
1822 def merge(ui, repo, node=None, force=None, rev=None):
1820 """merge working directory with another revision
1823 """merge working directory with another revision
1821
1824
1822 Merge the contents of the current working directory and the
1825 Merge the contents of the current working directory and the
1823 requested revision. Files that changed between either parent are
1826 requested revision. Files that changed between either parent are
1824 marked as changed for the next commit and a commit must be
1827 marked as changed for the next commit and a commit must be
1825 performed before any further updates are allowed.
1828 performed before any further updates are allowed.
1826
1829
1827 If no revision is specified, the working directory's parent is a
1830 If no revision is specified, the working directory's parent is a
1828 head revision, and the repository contains exactly one other head,
1831 head revision, and the repository contains exactly one other head,
1829 the other head is merged with by default. Otherwise, an explicit
1832 the other head is merged with by default. Otherwise, an explicit
1830 revision to merge with must be provided.
1833 revision to merge with must be provided.
1831 """
1834 """
1832
1835
1833 if rev and node:
1836 if rev and node:
1834 raise util.Abort(_("please specify just one revision"))
1837 raise util.Abort(_("please specify just one revision"))
1835 if not node:
1838 if not node:
1836 node = rev
1839 node = rev
1837
1840
1838 if not node:
1841 if not node:
1839 heads = repo.heads()
1842 heads = repo.heads()
1840 if len(heads) > 2:
1843 if len(heads) > 2:
1841 raise util.Abort(_('repo has %d heads - '
1844 raise util.Abort(_('repo has %d heads - '
1842 'please merge with an explicit rev') %
1845 'please merge with an explicit rev') %
1843 len(heads))
1846 len(heads))
1844 parent = repo.dirstate.parents()[0]
1847 parent = repo.dirstate.parents()[0]
1845 if len(heads) == 1:
1848 if len(heads) == 1:
1846 msg = _('there is nothing to merge')
1849 msg = _('there is nothing to merge')
1847 if parent != repo.lookup(repo.workingctx().branch()):
1850 if parent != repo.lookup(repo.workingctx().branch()):
1848 msg = _('%s - use "hg update" instead') % msg
1851 msg = _('%s - use "hg update" instead') % msg
1849 raise util.Abort(msg)
1852 raise util.Abort(msg)
1850
1853
1851 if parent not in heads:
1854 if parent not in heads:
1852 raise util.Abort(_('working dir not at a head rev - '
1855 raise util.Abort(_('working dir not at a head rev - '
1853 'use "hg update" or merge with an explicit rev'))
1856 'use "hg update" or merge with an explicit rev'))
1854 node = parent == heads[0] and heads[-1] or heads[0]
1857 node = parent == heads[0] and heads[-1] or heads[0]
1855 return hg.merge(repo, node, force=force)
1858 return hg.merge(repo, node, force=force)
1856
1859
1857 def outgoing(ui, repo, dest=None, **opts):
1860 def outgoing(ui, repo, dest=None, **opts):
1858 """show changesets not found in destination
1861 """show changesets not found in destination
1859
1862
1860 Show changesets not found in the specified destination repository or
1863 Show changesets not found in the specified destination repository or
1861 the default push location. These are the changesets that would be pushed
1864 the default push location. These are the changesets that would be pushed
1862 if a push was requested.
1865 if a push was requested.
1863
1866
1864 See pull for valid destination format details.
1867 See pull for valid destination format details.
1865 """
1868 """
1866 dest, revs, checkout = hg.parseurl(
1869 dest, revs, checkout = hg.parseurl(
1867 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1870 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1868 cmdutil.setremoteconfig(ui, opts)
1871 cmdutil.setremoteconfig(ui, opts)
1869 if revs:
1872 if revs:
1870 revs = [repo.lookup(rev) for rev in revs]
1873 revs = [repo.lookup(rev) for rev in revs]
1871
1874
1872 other = hg.repository(ui, dest)
1875 other = hg.repository(ui, dest)
1873 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1876 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1874 o = repo.findoutgoing(other, force=opts['force'])
1877 o = repo.findoutgoing(other, force=opts['force'])
1875 if not o:
1878 if not o:
1876 ui.status(_("no changes found\n"))
1879 ui.status(_("no changes found\n"))
1877 return 1
1880 return 1
1878 o = repo.changelog.nodesbetween(o, revs)[0]
1881 o = repo.changelog.nodesbetween(o, revs)[0]
1879 if opts['newest_first']:
1882 if opts['newest_first']:
1880 o.reverse()
1883 o.reverse()
1881 displayer = cmdutil.show_changeset(ui, repo, opts)
1884 displayer = cmdutil.show_changeset(ui, repo, opts)
1882 for n in o:
1885 for n in o:
1883 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1886 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1884 if opts['no_merges'] and len(parents) == 2:
1887 if opts['no_merges'] and len(parents) == 2:
1885 continue
1888 continue
1886 displayer.show(changenode=n)
1889 displayer.show(changenode=n)
1887
1890
1888 def parents(ui, repo, file_=None, **opts):
1891 def parents(ui, repo, file_=None, **opts):
1889 """show the parents of the working dir or revision
1892 """show the parents of the working dir or revision
1890
1893
1891 Print the working directory's parent revisions. If a
1894 Print the working directory's parent revisions. If a
1892 revision is given via --rev, the parent of that revision
1895 revision is given via --rev, the parent of that revision
1893 will be printed. If a file argument is given, revision in
1896 will be printed. If a file argument is given, revision in
1894 which the file was last changed (before the working directory
1897 which the file was last changed (before the working directory
1895 revision or the argument to --rev if given) is printed.
1898 revision or the argument to --rev if given) is printed.
1896 """
1899 """
1897 rev = opts.get('rev')
1900 rev = opts.get('rev')
1898 if rev:
1901 if rev:
1899 ctx = repo.changectx(rev)
1902 ctx = repo.changectx(rev)
1900 else:
1903 else:
1901 ctx = repo.workingctx()
1904 ctx = repo.workingctx()
1902
1905
1903 if file_:
1906 if file_:
1904 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1907 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1905 if anypats or len(files) != 1:
1908 if anypats or len(files) != 1:
1906 raise util.Abort(_('can only specify an explicit file name'))
1909 raise util.Abort(_('can only specify an explicit file name'))
1907 file_ = files[0]
1910 file_ = files[0]
1908 filenodes = []
1911 filenodes = []
1909 for cp in ctx.parents():
1912 for cp in ctx.parents():
1910 if not cp:
1913 if not cp:
1911 continue
1914 continue
1912 try:
1915 try:
1913 filenodes.append(cp.filenode(file_))
1916 filenodes.append(cp.filenode(file_))
1914 except revlog.LookupError:
1917 except revlog.LookupError:
1915 pass
1918 pass
1916 if not filenodes:
1919 if not filenodes:
1917 raise util.Abort(_("'%s' not found in manifest!") % file_)
1920 raise util.Abort(_("'%s' not found in manifest!") % file_)
1918 fl = repo.file(file_)
1921 fl = repo.file(file_)
1919 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1922 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1920 else:
1923 else:
1921 p = [cp.node() for cp in ctx.parents()]
1924 p = [cp.node() for cp in ctx.parents()]
1922
1925
1923 displayer = cmdutil.show_changeset(ui, repo, opts)
1926 displayer = cmdutil.show_changeset(ui, repo, opts)
1924 for n in p:
1927 for n in p:
1925 if n != nullid:
1928 if n != nullid:
1926 displayer.show(changenode=n)
1929 displayer.show(changenode=n)
1927
1930
1928 def paths(ui, repo, search=None):
1931 def paths(ui, repo, search=None):
1929 """show definition of symbolic path names
1932 """show definition of symbolic path names
1930
1933
1931 Show definition of symbolic path name NAME. If no name is given, show
1934 Show definition of symbolic path name NAME. If no name is given, show
1932 definition of available names.
1935 definition of available names.
1933
1936
1934 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1937 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1935 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1938 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1936 """
1939 """
1937 if search:
1940 if search:
1938 for name, path in ui.configitems("paths"):
1941 for name, path in ui.configitems("paths"):
1939 if name == search:
1942 if name == search:
1940 ui.write("%s\n" % path)
1943 ui.write("%s\n" % path)
1941 return
1944 return
1942 ui.warn(_("not found!\n"))
1945 ui.warn(_("not found!\n"))
1943 return 1
1946 return 1
1944 else:
1947 else:
1945 for name, path in ui.configitems("paths"):
1948 for name, path in ui.configitems("paths"):
1946 ui.write("%s = %s\n" % (name, path))
1949 ui.write("%s = %s\n" % (name, path))
1947
1950
1948 def postincoming(ui, repo, modheads, optupdate, checkout):
1951 def postincoming(ui, repo, modheads, optupdate, checkout):
1949 if modheads == 0:
1952 if modheads == 0:
1950 return
1953 return
1951 if optupdate:
1954 if optupdate:
1952 if modheads <= 1 or checkout:
1955 if modheads <= 1 or checkout:
1953 return hg.update(repo, checkout)
1956 return hg.update(repo, checkout)
1954 else:
1957 else:
1955 ui.status(_("not updating, since new heads added\n"))
1958 ui.status(_("not updating, since new heads added\n"))
1956 if modheads > 1:
1959 if modheads > 1:
1957 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1960 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1958 else:
1961 else:
1959 ui.status(_("(run 'hg update' to get a working copy)\n"))
1962 ui.status(_("(run 'hg update' to get a working copy)\n"))
1960
1963
1961 def pull(ui, repo, source="default", **opts):
1964 def pull(ui, repo, source="default", **opts):
1962 """pull changes from the specified source
1965 """pull changes from the specified source
1963
1966
1964 Pull changes from a remote repository to a local one.
1967 Pull changes from a remote repository to a local one.
1965
1968
1966 This finds all changes from the repository at the specified path
1969 This finds all changes from the repository at the specified path
1967 or URL and adds them to the local repository. By default, this
1970 or URL and adds them to the local repository. By default, this
1968 does not update the copy of the project in the working directory.
1971 does not update the copy of the project in the working directory.
1969
1972
1970 Valid URLs are of the form:
1973 Valid URLs are of the form:
1971
1974
1972 local/filesystem/path (or file://local/filesystem/path)
1975 local/filesystem/path (or file://local/filesystem/path)
1973 http://[user@]host[:port]/[path]
1976 http://[user@]host[:port]/[path]
1974 https://[user@]host[:port]/[path]
1977 https://[user@]host[:port]/[path]
1975 ssh://[user@]host[:port]/[path]
1978 ssh://[user@]host[:port]/[path]
1976 static-http://host[:port]/[path]
1979 static-http://host[:port]/[path]
1977
1980
1978 Paths in the local filesystem can either point to Mercurial
1981 Paths in the local filesystem can either point to Mercurial
1979 repositories or to bundle files (as created by 'hg bundle' or
1982 repositories or to bundle files (as created by 'hg bundle' or
1980 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1983 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1981 allows access to a Mercurial repository where you simply use a web
1984 allows access to a Mercurial repository where you simply use a web
1982 server to publish the .hg directory as static content.
1985 server to publish the .hg directory as static content.
1983
1986
1984 An optional identifier after # indicates a particular branch, tag,
1987 An optional identifier after # indicates a particular branch, tag,
1985 or changeset to pull.
1988 or changeset to pull.
1986
1989
1987 Some notes about using SSH with Mercurial:
1990 Some notes about using SSH with Mercurial:
1988 - SSH requires an accessible shell account on the destination machine
1991 - SSH requires an accessible shell account on the destination machine
1989 and a copy of hg in the remote path or specified with as remotecmd.
1992 and a copy of hg in the remote path or specified with as remotecmd.
1990 - path is relative to the remote user's home directory by default.
1993 - path is relative to the remote user's home directory by default.
1991 Use an extra slash at the start of a path to specify an absolute path:
1994 Use an extra slash at the start of a path to specify an absolute path:
1992 ssh://example.com//tmp/repository
1995 ssh://example.com//tmp/repository
1993 - Mercurial doesn't use its own compression via SSH; the right thing
1996 - Mercurial doesn't use its own compression via SSH; the right thing
1994 to do is to configure it in your ~/.ssh/config, e.g.:
1997 to do is to configure it in your ~/.ssh/config, e.g.:
1995 Host *.mylocalnetwork.example.com
1998 Host *.mylocalnetwork.example.com
1996 Compression no
1999 Compression no
1997 Host *
2000 Host *
1998 Compression yes
2001 Compression yes
1999 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2002 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2000 with the --ssh command line option.
2003 with the --ssh command line option.
2001 """
2004 """
2002 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2005 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2003 cmdutil.setremoteconfig(ui, opts)
2006 cmdutil.setremoteconfig(ui, opts)
2004
2007
2005 other = hg.repository(ui, source)
2008 other = hg.repository(ui, source)
2006 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2009 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2007 if revs:
2010 if revs:
2008 try:
2011 try:
2009 revs = [other.lookup(rev) for rev in revs]
2012 revs = [other.lookup(rev) for rev in revs]
2010 except repo.NoCapability:
2013 except repo.NoCapability:
2011 error = _("Other repository doesn't support revision lookup, "
2014 error = _("Other repository doesn't support revision lookup, "
2012 "so a rev cannot be specified.")
2015 "so a rev cannot be specified.")
2013 raise util.Abort(error)
2016 raise util.Abort(error)
2014
2017
2015 modheads = repo.pull(other, heads=revs, force=opts['force'])
2018 modheads = repo.pull(other, heads=revs, force=opts['force'])
2016 return postincoming(ui, repo, modheads, opts['update'], checkout)
2019 return postincoming(ui, repo, modheads, opts['update'], checkout)
2017
2020
2018 def push(ui, repo, dest=None, **opts):
2021 def push(ui, repo, dest=None, **opts):
2019 """push changes to the specified destination
2022 """push changes to the specified destination
2020
2023
2021 Push changes from the local repository to the given destination.
2024 Push changes from the local repository to the given destination.
2022
2025
2023 This is the symmetrical operation for pull. It helps to move
2026 This is the symmetrical operation for pull. It helps to move
2024 changes from the current repository to a different one. If the
2027 changes from the current repository to a different one. If the
2025 destination is local this is identical to a pull in that directory
2028 destination is local this is identical to a pull in that directory
2026 from the current one.
2029 from the current one.
2027
2030
2028 By default, push will refuse to run if it detects the result would
2031 By default, push will refuse to run if it detects the result would
2029 increase the number of remote heads. This generally indicates the
2032 increase the number of remote heads. This generally indicates the
2030 the client has forgotten to sync and merge before pushing.
2033 the client has forgotten to sync and merge before pushing.
2031
2034
2032 Valid URLs are of the form:
2035 Valid URLs are of the form:
2033
2036
2034 local/filesystem/path (or file://local/filesystem/path)
2037 local/filesystem/path (or file://local/filesystem/path)
2035 ssh://[user@]host[:port]/[path]
2038 ssh://[user@]host[:port]/[path]
2036 http://[user@]host[:port]/[path]
2039 http://[user@]host[:port]/[path]
2037 https://[user@]host[:port]/[path]
2040 https://[user@]host[:port]/[path]
2038
2041
2039 An optional identifier after # indicates a particular branch, tag,
2042 An optional identifier after # indicates a particular branch, tag,
2040 or changeset to push.
2043 or changeset to push.
2041
2044
2042 Look at the help text for the pull command for important details
2045 Look at the help text for the pull command for important details
2043 about ssh:// URLs.
2046 about ssh:// URLs.
2044
2047
2045 Pushing to http:// and https:// URLs is only possible, if this
2048 Pushing to http:// and https:// URLs is only possible, if this
2046 feature is explicitly enabled on the remote Mercurial server.
2049 feature is explicitly enabled on the remote Mercurial server.
2047 """
2050 """
2048 dest, revs, checkout = hg.parseurl(
2051 dest, revs, checkout = hg.parseurl(
2049 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2052 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2050 cmdutil.setremoteconfig(ui, opts)
2053 cmdutil.setremoteconfig(ui, opts)
2051
2054
2052 other = hg.repository(ui, dest)
2055 other = hg.repository(ui, dest)
2053 ui.status('pushing to %s\n' % util.hidepassword(dest))
2056 ui.status('pushing to %s\n' % util.hidepassword(dest))
2054 if revs:
2057 if revs:
2055 revs = [repo.lookup(rev) for rev in revs]
2058 revs = [repo.lookup(rev) for rev in revs]
2056 r = repo.push(other, opts['force'], revs=revs)
2059 r = repo.push(other, opts['force'], revs=revs)
2057 return r == 0
2060 return r == 0
2058
2061
2059 def rawcommit(ui, repo, *pats, **opts):
2062 def rawcommit(ui, repo, *pats, **opts):
2060 """raw commit interface (DEPRECATED)
2063 """raw commit interface (DEPRECATED)
2061
2064
2062 (DEPRECATED)
2065 (DEPRECATED)
2063 Lowlevel commit, for use in helper scripts.
2066 Lowlevel commit, for use in helper scripts.
2064
2067
2065 This command is not intended to be used by normal users, as it is
2068 This command is not intended to be used by normal users, as it is
2066 primarily useful for importing from other SCMs.
2069 primarily useful for importing from other SCMs.
2067
2070
2068 This command is now deprecated and will be removed in a future
2071 This command is now deprecated and will be removed in a future
2069 release, please use debugsetparents and commit instead.
2072 release, please use debugsetparents and commit instead.
2070 """
2073 """
2071
2074
2072 ui.warn(_("(the rawcommit command is deprecated)\n"))
2075 ui.warn(_("(the rawcommit command is deprecated)\n"))
2073
2076
2074 message = cmdutil.logmessage(opts)
2077 message = cmdutil.logmessage(opts)
2075
2078
2076 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2079 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2077 if opts['files']:
2080 if opts['files']:
2078 files += open(opts['files']).read().splitlines()
2081 files += open(opts['files']).read().splitlines()
2079
2082
2080 parents = [repo.lookup(p) for p in opts['parent']]
2083 parents = [repo.lookup(p) for p in opts['parent']]
2081
2084
2082 try:
2085 try:
2083 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2086 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2084 except ValueError, inst:
2087 except ValueError, inst:
2085 raise util.Abort(str(inst))
2088 raise util.Abort(str(inst))
2086
2089
2087 def recover(ui, repo):
2090 def recover(ui, repo):
2088 """roll back an interrupted transaction
2091 """roll back an interrupted transaction
2089
2092
2090 Recover from an interrupted commit or pull.
2093 Recover from an interrupted commit or pull.
2091
2094
2092 This command tries to fix the repository status after an interrupted
2095 This command tries to fix the repository status after an interrupted
2093 operation. It should only be necessary when Mercurial suggests it.
2096 operation. It should only be necessary when Mercurial suggests it.
2094 """
2097 """
2095 if repo.recover():
2098 if repo.recover():
2096 return hg.verify(repo)
2099 return hg.verify(repo)
2097 return 1
2100 return 1
2098
2101
2099 def remove(ui, repo, *pats, **opts):
2102 def remove(ui, repo, *pats, **opts):
2100 """remove the specified files on the next commit
2103 """remove the specified files on the next commit
2101
2104
2102 Schedule the indicated files for removal from the repository.
2105 Schedule the indicated files for removal from the repository.
2103
2106
2104 This only removes files from the current branch, not from the
2107 This only removes files from the current branch, not from the
2105 entire project history. If the files still exist in the working
2108 entire project history. If the files still exist in the working
2106 directory, they will be deleted from it. If invoked with --after,
2109 directory, they will be deleted from it. If invoked with --after,
2107 files are marked as removed, but not actually unlinked unless --force
2110 files are marked as removed, but not actually unlinked unless --force
2108 is also given. Without exact file names, --after will only mark
2111 is also given. Without exact file names, --after will only mark
2109 files as removed if they are no longer in the working directory.
2112 files as removed if they are no longer in the working directory.
2110
2113
2111 This command schedules the files to be removed at the next commit.
2114 This command schedules the files to be removed at the next commit.
2112 To undo a remove before that, see hg revert.
2115 To undo a remove before that, see hg revert.
2113
2116
2114 Modified files and added files are not removed by default. To
2117 Modified files and added files are not removed by default. To
2115 remove them, use the -f/--force option.
2118 remove them, use the -f/--force option.
2116 """
2119 """
2117 if not opts['after'] and not pats:
2120 if not opts['after'] and not pats:
2118 raise util.Abort(_('no files specified'))
2121 raise util.Abort(_('no files specified'))
2119 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2120 exact = dict.fromkeys(files)
2123 exact = dict.fromkeys(files)
2121 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2124 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2122 modified, added, removed, deleted, unknown = mardu
2125 modified, added, removed, deleted, unknown = mardu
2123 remove, forget = [], []
2126 remove, forget = [], []
2124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2127 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2125 reason = None
2128 reason = None
2126 if abs in modified and not opts['force']:
2129 if abs in modified and not opts['force']:
2127 reason = _('is modified (use -f to force removal)')
2130 reason = _('is modified (use -f to force removal)')
2128 elif abs in added:
2131 elif abs in added:
2129 if opts['force']:
2132 if opts['force']:
2130 forget.append(abs)
2133 forget.append(abs)
2131 continue
2134 continue
2132 reason = _('has been marked for add (use -f to force removal)')
2135 reason = _('has been marked for add (use -f to force removal)')
2133 exact = 1 # force the message
2136 exact = 1 # force the message
2134 elif abs not in repo.dirstate:
2137 elif abs not in repo.dirstate:
2135 reason = _('is not managed')
2138 reason = _('is not managed')
2136 elif opts['after'] and not exact and abs not in deleted:
2139 elif opts['after'] and not exact and abs not in deleted:
2137 continue
2140 continue
2138 elif abs in removed:
2141 elif abs in removed:
2139 continue
2142 continue
2140 if reason:
2143 if reason:
2141 if exact:
2144 if exact:
2142 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2145 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2143 else:
2146 else:
2144 if ui.verbose or not exact:
2147 if ui.verbose or not exact:
2145 ui.status(_('removing %s\n') % rel)
2148 ui.status(_('removing %s\n') % rel)
2146 remove.append(abs)
2149 remove.append(abs)
2147 repo.forget(forget)
2150 repo.forget(forget)
2148 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2151 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2149
2152
2150 def rename(ui, repo, *pats, **opts):
2153 def rename(ui, repo, *pats, **opts):
2151 """rename files; equivalent of copy + remove
2154 """rename files; equivalent of copy + remove
2152
2155
2153 Mark dest as copies of sources; mark sources for deletion. If
2156 Mark dest as copies of sources; mark sources for deletion. If
2154 dest is a directory, copies are put in that directory. If dest is
2157 dest is a directory, copies are put in that directory. If dest is
2155 a file, there can only be one source.
2158 a file, there can only be one source.
2156
2159
2157 By default, this command copies the contents of files as they
2160 By default, this command copies the contents of files as they
2158 stand in the working directory. If invoked with --after, the
2161 stand in the working directory. If invoked with --after, the
2159 operation is recorded, but no copying is performed.
2162 operation is recorded, but no copying is performed.
2160
2163
2161 This command takes effect in the next commit. To undo a rename
2164 This command takes effect in the next commit. To undo a rename
2162 before that, see hg revert.
2165 before that, see hg revert.
2163 """
2166 """
2164 wlock = repo.wlock(False)
2167 wlock = repo.wlock(False)
2165 try:
2168 try:
2166 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2169 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2167 finally:
2170 finally:
2168 del wlock
2171 del wlock
2169
2172
2170 def revert(ui, repo, *pats, **opts):
2173 def revert(ui, repo, *pats, **opts):
2171 """restore individual files or dirs to an earlier state
2174 """restore individual files or dirs to an earlier state
2172
2175
2173 (use update -r to check out earlier revisions, revert does not
2176 (use update -r to check out earlier revisions, revert does not
2174 change the working dir parents)
2177 change the working dir parents)
2175
2178
2176 With no revision specified, revert the named files or directories
2179 With no revision specified, revert the named files or directories
2177 to the contents they had in the parent of the working directory.
2180 to the contents they had in the parent of the working directory.
2178 This restores the contents of the affected files to an unmodified
2181 This restores the contents of the affected files to an unmodified
2179 state and unschedules adds, removes, copies, and renames. If the
2182 state and unschedules adds, removes, copies, and renames. If the
2180 working directory has two parents, you must explicitly specify the
2183 working directory has two parents, you must explicitly specify the
2181 revision to revert to.
2184 revision to revert to.
2182
2185
2183 Using the -r option, revert the given files or directories to their
2186 Using the -r option, revert the given files or directories to their
2184 contents as of a specific revision. This can be helpful to "roll
2187 contents as of a specific revision. This can be helpful to "roll
2185 back" some or all of an earlier change.
2188 back" some or all of an earlier change.
2186
2189
2187 Revert modifies the working directory. It does not commit any
2190 Revert modifies the working directory. It does not commit any
2188 changes, or change the parent of the working directory. If you
2191 changes, or change the parent of the working directory. If you
2189 revert to a revision other than the parent of the working
2192 revert to a revision other than the parent of the working
2190 directory, the reverted files will thus appear modified
2193 directory, the reverted files will thus appear modified
2191 afterwards.
2194 afterwards.
2192
2195
2193 If a file has been deleted, it is restored. If the executable
2196 If a file has been deleted, it is restored. If the executable
2194 mode of a file was changed, it is reset.
2197 mode of a file was changed, it is reset.
2195
2198
2196 If names are given, all files matching the names are reverted.
2199 If names are given, all files matching the names are reverted.
2197
2200
2198 If no arguments are given, no files are reverted.
2201 If no arguments are given, no files are reverted.
2199
2202
2200 Modified files are saved with a .orig suffix before reverting.
2203 Modified files are saved with a .orig suffix before reverting.
2201 To disable these backups, use --no-backup.
2204 To disable these backups, use --no-backup.
2202 """
2205 """
2203
2206
2204 if opts["date"]:
2207 if opts["date"]:
2205 if opts["rev"]:
2208 if opts["rev"]:
2206 raise util.Abort(_("you can't specify a revision and a date"))
2209 raise util.Abort(_("you can't specify a revision and a date"))
2207 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2210 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2208
2211
2209 if not pats and not opts['all']:
2212 if not pats and not opts['all']:
2210 raise util.Abort(_('no files or directories specified; '
2213 raise util.Abort(_('no files or directories specified; '
2211 'use --all to revert the whole repo'))
2214 'use --all to revert the whole repo'))
2212
2215
2213 parent, p2 = repo.dirstate.parents()
2216 parent, p2 = repo.dirstate.parents()
2214 if not opts['rev'] and p2 != nullid:
2217 if not opts['rev'] and p2 != nullid:
2215 raise util.Abort(_('uncommitted merge - please provide a '
2218 raise util.Abort(_('uncommitted merge - please provide a '
2216 'specific revision'))
2219 'specific revision'))
2217 ctx = repo.changectx(opts['rev'])
2220 ctx = repo.changectx(opts['rev'])
2218 node = ctx.node()
2221 node = ctx.node()
2219 mf = ctx.manifest()
2222 mf = ctx.manifest()
2220 if node == parent:
2223 if node == parent:
2221 pmf = mf
2224 pmf = mf
2222 else:
2225 else:
2223 pmf = None
2226 pmf = None
2224
2227
2225 # need all matching names in dirstate and manifest of target rev,
2228 # need all matching names in dirstate and manifest of target rev,
2226 # so have to walk both. do not print errors if files exist in one
2229 # so have to walk both. do not print errors if files exist in one
2227 # but not other.
2230 # but not other.
2228
2231
2229 names = {}
2232 names = {}
2230 target_only = {}
2233 target_only = {}
2231
2234
2232 wlock = repo.wlock()
2235 wlock = repo.wlock()
2233 try:
2236 try:
2234 # walk dirstate.
2237 # walk dirstate.
2235 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2238 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2236 badmatch=mf.has_key):
2239 badmatch=mf.has_key):
2237 names[abs] = (rel, exact)
2240 names[abs] = (rel, exact)
2238 if src == 'b':
2241 if src == 'b':
2239 target_only[abs] = True
2242 target_only[abs] = True
2240
2243
2241 # walk target manifest.
2244 # walk target manifest.
2242
2245
2243 def badmatch(path):
2246 def badmatch(path):
2244 if path in names:
2247 if path in names:
2245 return True
2248 return True
2246 path_ = path + '/'
2249 path_ = path + '/'
2247 for f in names:
2250 for f in names:
2248 if f.startswith(path_):
2251 if f.startswith(path_):
2249 return True
2252 return True
2250 return False
2253 return False
2251
2254
2252 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2255 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2253 badmatch=badmatch):
2256 badmatch=badmatch):
2254 if abs in names or src == 'b':
2257 if abs in names or src == 'b':
2255 continue
2258 continue
2256 names[abs] = (rel, exact)
2259 names[abs] = (rel, exact)
2257 target_only[abs] = True
2260 target_only[abs] = True
2258
2261
2259 changes = repo.status(match=names.has_key)[:5]
2262 changes = repo.status(match=names.has_key)[:5]
2260 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2263 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2261
2264
2262 # if f is a rename, also revert the source
2265 # if f is a rename, also revert the source
2263 cwd = repo.getcwd()
2266 cwd = repo.getcwd()
2264 for f in added:
2267 for f in added:
2265 src = repo.dirstate.copied(f)
2268 src = repo.dirstate.copied(f)
2266 if src and src not in names and repo.dirstate[src] == 'r':
2269 if src and src not in names and repo.dirstate[src] == 'r':
2267 removed[src] = None
2270 removed[src] = None
2268 names[src] = (repo.pathto(src, cwd), True)
2271 names[src] = (repo.pathto(src, cwd), True)
2269
2272
2270 revert = ([], _('reverting %s\n'))
2273 revert = ([], _('reverting %s\n'))
2271 add = ([], _('adding %s\n'))
2274 add = ([], _('adding %s\n'))
2272 remove = ([], _('removing %s\n'))
2275 remove = ([], _('removing %s\n'))
2273 forget = ([], _('forgetting %s\n'))
2276 forget = ([], _('forgetting %s\n'))
2274 undelete = ([], _('undeleting %s\n'))
2277 undelete = ([], _('undeleting %s\n'))
2275 update = {}
2278 update = {}
2276
2279
2277 disptable = (
2280 disptable = (
2278 # dispatch table:
2281 # dispatch table:
2279 # file state
2282 # file state
2280 # action if in target manifest
2283 # action if in target manifest
2281 # action if not in target manifest
2284 # action if not in target manifest
2282 # make backup if in target manifest
2285 # make backup if in target manifest
2283 # make backup if not in target manifest
2286 # make backup if not in target manifest
2284 (modified, revert, remove, True, True),
2287 (modified, revert, remove, True, True),
2285 (added, revert, forget, True, False),
2288 (added, revert, forget, True, False),
2286 (removed, undelete, None, False, False),
2289 (removed, undelete, None, False, False),
2287 (deleted, revert, remove, False, False),
2290 (deleted, revert, remove, False, False),
2288 (unknown, add, None, True, False),
2291 (unknown, add, None, True, False),
2289 (target_only, add, None, False, False),
2292 (target_only, add, None, False, False),
2290 )
2293 )
2291
2294
2292 entries = names.items()
2295 entries = names.items()
2293 entries.sort()
2296 entries.sort()
2294
2297
2295 for abs, (rel, exact) in entries:
2298 for abs, (rel, exact) in entries:
2296 mfentry = mf.get(abs)
2299 mfentry = mf.get(abs)
2297 target = repo.wjoin(abs)
2300 target = repo.wjoin(abs)
2298 def handle(xlist, dobackup):
2301 def handle(xlist, dobackup):
2299 xlist[0].append(abs)
2302 xlist[0].append(abs)
2300 update[abs] = 1
2303 update[abs] = 1
2301 if dobackup and not opts['no_backup'] and util.lexists(target):
2304 if dobackup and not opts['no_backup'] and util.lexists(target):
2302 bakname = "%s.orig" % rel
2305 bakname = "%s.orig" % rel
2303 ui.note(_('saving current version of %s as %s\n') %
2306 ui.note(_('saving current version of %s as %s\n') %
2304 (rel, bakname))
2307 (rel, bakname))
2305 if not opts.get('dry_run'):
2308 if not opts.get('dry_run'):
2306 util.copyfile(target, bakname)
2309 util.copyfile(target, bakname)
2307 if ui.verbose or not exact:
2310 if ui.verbose or not exact:
2308 ui.status(xlist[1] % rel)
2311 ui.status(xlist[1] % rel)
2309 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2312 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2310 if abs not in table: continue
2313 if abs not in table: continue
2311 # file has changed in dirstate
2314 # file has changed in dirstate
2312 if mfentry:
2315 if mfentry:
2313 handle(hitlist, backuphit)
2316 handle(hitlist, backuphit)
2314 elif misslist is not None:
2317 elif misslist is not None:
2315 handle(misslist, backupmiss)
2318 handle(misslist, backupmiss)
2316 else:
2319 else:
2317 if exact: ui.warn(_('file not managed: %s\n') % rel)
2320 if exact: ui.warn(_('file not managed: %s\n') % rel)
2318 break
2321 break
2319 else:
2322 else:
2320 # file has not changed in dirstate
2323 # file has not changed in dirstate
2321 if node == parent:
2324 if node == parent:
2322 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2325 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2323 continue
2326 continue
2324 if pmf is None:
2327 if pmf is None:
2325 # only need parent manifest in this unlikely case,
2328 # only need parent manifest in this unlikely case,
2326 # so do not read by default
2329 # so do not read by default
2327 pmf = repo.changectx(parent).manifest()
2330 pmf = repo.changectx(parent).manifest()
2328 if abs in pmf:
2331 if abs in pmf:
2329 if mfentry:
2332 if mfentry:
2330 # if version of file is same in parent and target
2333 # if version of file is same in parent and target
2331 # manifests, do nothing
2334 # manifests, do nothing
2332 if pmf[abs] != mfentry:
2335 if pmf[abs] != mfentry:
2333 handle(revert, False)
2336 handle(revert, False)
2334 else:
2337 else:
2335 handle(remove, False)
2338 handle(remove, False)
2336
2339
2337 if not opts.get('dry_run'):
2340 if not opts.get('dry_run'):
2338 for f in forget[0]:
2341 for f in forget[0]:
2339 repo.dirstate.forget(f)
2342 repo.dirstate.forget(f)
2340 r = hg.revert(repo, node, update.has_key)
2343 r = hg.revert(repo, node, update.has_key)
2341 for f in add[0]:
2344 for f in add[0]:
2342 repo.dirstate.add(f)
2345 repo.dirstate.add(f)
2343 for f in undelete[0]:
2346 for f in undelete[0]:
2344 repo.dirstate.normal(f)
2347 repo.dirstate.normal(f)
2345 for f in remove[0]:
2348 for f in remove[0]:
2346 repo.dirstate.remove(f)
2349 repo.dirstate.remove(f)
2347 return r
2350 return r
2348 finally:
2351 finally:
2349 del wlock
2352 del wlock
2350
2353
2351 def rollback(ui, repo):
2354 def rollback(ui, repo):
2352 """roll back the last transaction
2355 """roll back the last transaction
2353
2356
2354 This command should be used with care. There is only one level of
2357 This command should be used with care. There is only one level of
2355 rollback, and there is no way to undo a rollback. It will also
2358 rollback, and there is no way to undo a rollback. It will also
2356 restore the dirstate at the time of the last transaction, losing
2359 restore the dirstate at the time of the last transaction, losing
2357 any dirstate changes since that time.
2360 any dirstate changes since that time.
2358
2361
2359 Transactions are used to encapsulate the effects of all commands
2362 Transactions are used to encapsulate the effects of all commands
2360 that create new changesets or propagate existing changesets into a
2363 that create new changesets or propagate existing changesets into a
2361 repository. For example, the following commands are transactional,
2364 repository. For example, the following commands are transactional,
2362 and their effects can be rolled back:
2365 and their effects can be rolled back:
2363
2366
2364 commit
2367 commit
2365 import
2368 import
2366 pull
2369 pull
2367 push (with this repository as destination)
2370 push (with this repository as destination)
2368 unbundle
2371 unbundle
2369
2372
2370 This command is not intended for use on public repositories. Once
2373 This command is not intended for use on public repositories. Once
2371 changes are visible for pull by other users, rolling a transaction
2374 changes are visible for pull by other users, rolling a transaction
2372 back locally is ineffective (someone else may already have pulled
2375 back locally is ineffective (someone else may already have pulled
2373 the changes). Furthermore, a race is possible with readers of the
2376 the changes). Furthermore, a race is possible with readers of the
2374 repository; for example an in-progress pull from the repository
2377 repository; for example an in-progress pull from the repository
2375 may fail if a rollback is performed.
2378 may fail if a rollback is performed.
2376 """
2379 """
2377 repo.rollback()
2380 repo.rollback()
2378
2381
2379 def root(ui, repo):
2382 def root(ui, repo):
2380 """print the root (top) of the current working dir
2383 """print the root (top) of the current working dir
2381
2384
2382 Print the root directory of the current repository.
2385 Print the root directory of the current repository.
2383 """
2386 """
2384 ui.write(repo.root + "\n")
2387 ui.write(repo.root + "\n")
2385
2388
2386 def serve(ui, repo, **opts):
2389 def serve(ui, repo, **opts):
2387 """export the repository via HTTP
2390 """export the repository via HTTP
2388
2391
2389 Start a local HTTP repository browser and pull server.
2392 Start a local HTTP repository browser and pull server.
2390
2393
2391 By default, the server logs accesses to stdout and errors to
2394 By default, the server logs accesses to stdout and errors to
2392 stderr. Use the "-A" and "-E" options to log to files.
2395 stderr. Use the "-A" and "-E" options to log to files.
2393 """
2396 """
2394
2397
2395 if opts["stdio"]:
2398 if opts["stdio"]:
2396 if repo is None:
2399 if repo is None:
2397 raise hg.RepoError(_("There is no Mercurial repository here"
2400 raise hg.RepoError(_("There is no Mercurial repository here"
2398 " (.hg not found)"))
2401 " (.hg not found)"))
2399 s = sshserver.sshserver(ui, repo)
2402 s = sshserver.sshserver(ui, repo)
2400 s.serve_forever()
2403 s.serve_forever()
2401
2404
2402 parentui = ui.parentui or ui
2405 parentui = ui.parentui or ui
2403 optlist = ("name templates style address port prefix ipv6"
2406 optlist = ("name templates style address port prefix ipv6"
2404 " accesslog errorlog webdir_conf certificate")
2407 " accesslog errorlog webdir_conf certificate")
2405 for o in optlist.split():
2408 for o in optlist.split():
2406 if opts[o]:
2409 if opts[o]:
2407 parentui.setconfig("web", o, str(opts[o]))
2410 parentui.setconfig("web", o, str(opts[o]))
2408 if (repo is not None) and (repo.ui != parentui):
2411 if (repo is not None) and (repo.ui != parentui):
2409 repo.ui.setconfig("web", o, str(opts[o]))
2412 repo.ui.setconfig("web", o, str(opts[o]))
2410
2413
2411 if repo is None and not ui.config("web", "webdir_conf"):
2414 if repo is None and not ui.config("web", "webdir_conf"):
2412 raise hg.RepoError(_("There is no Mercurial repository here"
2415 raise hg.RepoError(_("There is no Mercurial repository here"
2413 " (.hg not found)"))
2416 " (.hg not found)"))
2414
2417
2415 class service:
2418 class service:
2416 def init(self):
2419 def init(self):
2417 util.set_signal_handler()
2420 util.set_signal_handler()
2418 try:
2421 try:
2419 self.httpd = hgweb.server.create_server(parentui, repo)
2422 self.httpd = hgweb.server.create_server(parentui, repo)
2420 except socket.error, inst:
2423 except socket.error, inst:
2421 raise util.Abort(_('cannot start server: ') + inst.args[1])
2424 raise util.Abort(_('cannot start server: ') + inst.args[1])
2422
2425
2423 if not ui.verbose: return
2426 if not ui.verbose: return
2424
2427
2425 if self.httpd.prefix:
2428 if self.httpd.prefix:
2426 prefix = self.httpd.prefix.strip('/') + '/'
2429 prefix = self.httpd.prefix.strip('/') + '/'
2427 else:
2430 else:
2428 prefix = ''
2431 prefix = ''
2429
2432
2430 if self.httpd.port != 80:
2433 if self.httpd.port != 80:
2431 ui.status(_('listening at http://%s:%d/%s\n') %
2434 ui.status(_('listening at http://%s:%d/%s\n') %
2432 (self.httpd.addr, self.httpd.port, prefix))
2435 (self.httpd.addr, self.httpd.port, prefix))
2433 else:
2436 else:
2434 ui.status(_('listening at http://%s/%s\n') %
2437 ui.status(_('listening at http://%s/%s\n') %
2435 (self.httpd.addr, prefix))
2438 (self.httpd.addr, prefix))
2436
2439
2437 def run(self):
2440 def run(self):
2438 self.httpd.serve_forever()
2441 self.httpd.serve_forever()
2439
2442
2440 service = service()
2443 service = service()
2441
2444
2442 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2445 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2443
2446
2444 def status(ui, repo, *pats, **opts):
2447 def status(ui, repo, *pats, **opts):
2445 """show changed files in the working directory
2448 """show changed files in the working directory
2446
2449
2447 Show status of files in the repository. If names are given, only
2450 Show status of files in the repository. If names are given, only
2448 files that match are shown. Files that are clean or ignored or
2451 files that match are shown. Files that are clean or ignored or
2449 source of a copy/move operation, are not listed unless -c (clean),
2452 source of a copy/move operation, are not listed unless -c (clean),
2450 -i (ignored), -C (copies) or -A is given. Unless options described
2453 -i (ignored), -C (copies) or -A is given. Unless options described
2451 with "show only ..." are given, the options -mardu are used.
2454 with "show only ..." are given, the options -mardu are used.
2452
2455
2453 NOTE: status may appear to disagree with diff if permissions have
2456 NOTE: status may appear to disagree with diff if permissions have
2454 changed or a merge has occurred. The standard diff format does not
2457 changed or a merge has occurred. The standard diff format does not
2455 report permission changes and diff only reports changes relative
2458 report permission changes and diff only reports changes relative
2456 to one merge parent.
2459 to one merge parent.
2457
2460
2458 If one revision is given, it is used as the base revision.
2461 If one revision is given, it is used as the base revision.
2459 If two revisions are given, the difference between them is shown.
2462 If two revisions are given, the difference between them is shown.
2460
2463
2461 The codes used to show the status of files are:
2464 The codes used to show the status of files are:
2462 M = modified
2465 M = modified
2463 A = added
2466 A = added
2464 R = removed
2467 R = removed
2465 C = clean
2468 C = clean
2466 ! = deleted, but still tracked
2469 ! = deleted, but still tracked
2467 ? = not tracked
2470 ? = not tracked
2468 I = ignored
2471 I = ignored
2469 = the previous added file was copied from here
2472 = the previous added file was copied from here
2470 """
2473 """
2471
2474
2472 all = opts['all']
2475 all = opts['all']
2473 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2476 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2474
2477
2475 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2478 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2476 cwd = (pats and repo.getcwd()) or ''
2479 cwd = (pats and repo.getcwd()) or ''
2477 modified, added, removed, deleted, unknown, ignored, clean = [
2480 modified, added, removed, deleted, unknown, ignored, clean = [
2478 n for n in repo.status(node1=node1, node2=node2, files=files,
2481 n for n in repo.status(node1=node1, node2=node2, files=files,
2479 match=matchfn,
2482 match=matchfn,
2480 list_ignored=all or opts['ignored'],
2483 list_ignored=all or opts['ignored'],
2481 list_clean=all or opts['clean'])]
2484 list_clean=all or opts['clean'])]
2482
2485
2483 changetypes = (('modified', 'M', modified),
2486 changetypes = (('modified', 'M', modified),
2484 ('added', 'A', added),
2487 ('added', 'A', added),
2485 ('removed', 'R', removed),
2488 ('removed', 'R', removed),
2486 ('deleted', '!', deleted),
2489 ('deleted', '!', deleted),
2487 ('unknown', '?', unknown),
2490 ('unknown', '?', unknown),
2488 ('ignored', 'I', ignored))
2491 ('ignored', 'I', ignored))
2489
2492
2490 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2493 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2491
2494
2492 end = opts['print0'] and '\0' or '\n'
2495 end = opts['print0'] and '\0' or '\n'
2493
2496
2494 for opt, char, changes in ([ct for ct in explicit_changetypes
2497 for opt, char, changes in ([ct for ct in explicit_changetypes
2495 if all or opts[ct[0]]]
2498 if all or opts[ct[0]]]
2496 or changetypes):
2499 or changetypes):
2497 if opts['no_status']:
2500 if opts['no_status']:
2498 format = "%%s%s" % end
2501 format = "%%s%s" % end
2499 else:
2502 else:
2500 format = "%s %%s%s" % (char, end)
2503 format = "%s %%s%s" % (char, end)
2501
2504
2502 for f in changes:
2505 for f in changes:
2503 ui.write(format % repo.pathto(f, cwd))
2506 ui.write(format % repo.pathto(f, cwd))
2504 if ((all or opts.get('copies')) and not opts.get('no_status')):
2507 if ((all or opts.get('copies')) and not opts.get('no_status')):
2505 copied = repo.dirstate.copied(f)
2508 copied = repo.dirstate.copied(f)
2506 if copied:
2509 if copied:
2507 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2510 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2508
2511
2509 def tag(ui, repo, name, rev_=None, **opts):
2512 def tag(ui, repo, name, rev_=None, **opts):
2510 """add a tag for the current or given revision
2513 """add a tag for the current or given revision
2511
2514
2512 Name a particular revision using <name>.
2515 Name a particular revision using <name>.
2513
2516
2514 Tags are used to name particular revisions of the repository and are
2517 Tags are used to name particular revisions of the repository and are
2515 very useful to compare different revision, to go back to significant
2518 very useful to compare different revision, to go back to significant
2516 earlier versions or to mark branch points as releases, etc.
2519 earlier versions or to mark branch points as releases, etc.
2517
2520
2518 If no revision is given, the parent of the working directory is used,
2521 If no revision is given, the parent of the working directory is used,
2519 or tip if no revision is checked out.
2522 or tip if no revision is checked out.
2520
2523
2521 To facilitate version control, distribution, and merging of tags,
2524 To facilitate version control, distribution, and merging of tags,
2522 they are stored as a file named ".hgtags" which is managed
2525 they are stored as a file named ".hgtags" which is managed
2523 similarly to other project files and can be hand-edited if
2526 similarly to other project files and can be hand-edited if
2524 necessary. The file '.hg/localtags' is used for local tags (not
2527 necessary. The file '.hg/localtags' is used for local tags (not
2525 shared among repositories).
2528 shared among repositories).
2526 """
2529 """
2527 if name in ['tip', '.', 'null']:
2530 if name in ['tip', '.', 'null']:
2528 raise util.Abort(_("the name '%s' is reserved") % name)
2531 raise util.Abort(_("the name '%s' is reserved") % name)
2529 if rev_ is not None:
2532 if rev_ is not None:
2530 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2533 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2531 "please use 'hg tag [-r REV] NAME' instead\n"))
2534 "please use 'hg tag [-r REV] NAME' instead\n"))
2532 if opts['rev']:
2535 if opts['rev']:
2533 raise util.Abort(_("use only one form to specify the revision"))
2536 raise util.Abort(_("use only one form to specify the revision"))
2534 if opts['rev'] and opts['remove']:
2537 if opts['rev'] and opts['remove']:
2535 raise util.Abort(_("--rev and --remove are incompatible"))
2538 raise util.Abort(_("--rev and --remove are incompatible"))
2536 if opts['rev']:
2539 if opts['rev']:
2537 rev_ = opts['rev']
2540 rev_ = opts['rev']
2538 message = opts['message']
2541 message = opts['message']
2539 if opts['remove']:
2542 if opts['remove']:
2540 tagtype = repo.tagtype(name)
2543 tagtype = repo.tagtype(name)
2541
2544
2542 if not tagtype:
2545 if not tagtype:
2543 raise util.Abort(_('tag %s does not exist') % name)
2546 raise util.Abort(_('tag %s does not exist') % name)
2544 if opts['local'] and tagtype == 'global':
2547 if opts['local'] and tagtype == 'global':
2545 raise util.Abort(_('%s tag is global') % name)
2548 raise util.Abort(_('%s tag is global') % name)
2546 if not opts['local'] and tagtype == 'local':
2549 if not opts['local'] and tagtype == 'local':
2547 raise util.Abort(_('%s tag is local') % name)
2550 raise util.Abort(_('%s tag is local') % name)
2548
2551
2549 rev_ = nullid
2552 rev_ = nullid
2550 if not message:
2553 if not message:
2551 message = _('Removed tag %s') % name
2554 message = _('Removed tag %s') % name
2552 elif name in repo.tags() and not opts['force']:
2555 elif name in repo.tags() and not opts['force']:
2553 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2556 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2554 % name)
2557 % name)
2555 if not rev_ and repo.dirstate.parents()[1] != nullid:
2558 if not rev_ and repo.dirstate.parents()[1] != nullid:
2556 raise util.Abort(_('uncommitted merge - please provide a '
2559 raise util.Abort(_('uncommitted merge - please provide a '
2557 'specific revision'))
2560 'specific revision'))
2558 r = repo.changectx(rev_).node()
2561 r = repo.changectx(rev_).node()
2559
2562
2560 if not message:
2563 if not message:
2561 message = _('Added tag %s for changeset %s') % (name, short(r))
2564 message = _('Added tag %s for changeset %s') % (name, short(r))
2562
2565
2563 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2566 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2564
2567
2565 def tags(ui, repo):
2568 def tags(ui, repo):
2566 """list repository tags
2569 """list repository tags
2567
2570
2568 List the repository tags.
2571 List the repository tags.
2569
2572
2570 This lists both regular and local tags. When the -v/--verbose switch
2573 This lists both regular and local tags. When the -v/--verbose switch
2571 is used, a third column "local" is printed for local tags.
2574 is used, a third column "local" is printed for local tags.
2572 """
2575 """
2573
2576
2574 l = repo.tagslist()
2577 l = repo.tagslist()
2575 l.reverse()
2578 l.reverse()
2576 hexfunc = ui.debugflag and hex or short
2579 hexfunc = ui.debugflag and hex or short
2577 tagtype = ""
2580 tagtype = ""
2578
2581
2579 for t, n in l:
2582 for t, n in l:
2580 if ui.quiet:
2583 if ui.quiet:
2581 ui.write("%s\n" % t)
2584 ui.write("%s\n" % t)
2582 continue
2585 continue
2583
2586
2584 try:
2587 try:
2585 hn = hexfunc(n)
2588 hn = hexfunc(n)
2586 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2589 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2587 except revlog.LookupError:
2590 except revlog.LookupError:
2588 r = " ?:%s" % hn
2591 r = " ?:%s" % hn
2589 else:
2592 else:
2590 spaces = " " * (30 - util.locallen(t))
2593 spaces = " " * (30 - util.locallen(t))
2591 if ui.verbose:
2594 if ui.verbose:
2592 if repo.tagtype(t) == 'local':
2595 if repo.tagtype(t) == 'local':
2593 tagtype = " local"
2596 tagtype = " local"
2594 else:
2597 else:
2595 tagtype = ""
2598 tagtype = ""
2596 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2599 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2597
2600
2598 def tip(ui, repo, **opts):
2601 def tip(ui, repo, **opts):
2599 """show the tip revision
2602 """show the tip revision
2600
2603
2601 Show the tip revision.
2604 Show the tip revision.
2602 """
2605 """
2603 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2606 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2604
2607
2605 def unbundle(ui, repo, fname1, *fnames, **opts):
2608 def unbundle(ui, repo, fname1, *fnames, **opts):
2606 """apply one or more changegroup files
2609 """apply one or more changegroup files
2607
2610
2608 Apply one or more compressed changegroup files generated by the
2611 Apply one or more compressed changegroup files generated by the
2609 bundle command.
2612 bundle command.
2610 """
2613 """
2611 fnames = (fname1,) + fnames
2614 fnames = (fname1,) + fnames
2612 for fname in fnames:
2615 for fname in fnames:
2613 if os.path.exists(fname):
2616 if os.path.exists(fname):
2614 f = open(fname, "rb")
2617 f = open(fname, "rb")
2615 else:
2618 else:
2616 f = urllib.urlopen(fname)
2619 f = urllib.urlopen(fname)
2617 gen = changegroup.readbundle(f, fname)
2620 gen = changegroup.readbundle(f, fname)
2618 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2621 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2619
2622
2620 return postincoming(ui, repo, modheads, opts['update'], None)
2623 return postincoming(ui, repo, modheads, opts['update'], None)
2621
2624
2622 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2625 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2623 """update working directory
2626 """update working directory
2624
2627
2625 Update the working directory to the specified revision, or the
2628 Update the working directory to the specified revision, or the
2626 tip of the current branch if none is specified.
2629 tip of the current branch if none is specified.
2627
2630
2628 If there are no outstanding changes in the working directory and
2631 If there are no outstanding changes in the working directory and
2629 there is a linear relationship between the current version and the
2632 there is a linear relationship between the current version and the
2630 requested version, the result is the requested version.
2633 requested version, the result is the requested version.
2631
2634
2632 To merge the working directory with another revision, use the
2635 To merge the working directory with another revision, use the
2633 merge command.
2636 merge command.
2634
2637
2635 By default, update will refuse to run if doing so would require
2638 By default, update will refuse to run if doing so would require
2636 discarding local changes.
2639 discarding local changes.
2637 """
2640 """
2638 if rev and node:
2641 if rev and node:
2639 raise util.Abort(_("please specify just one revision"))
2642 raise util.Abort(_("please specify just one revision"))
2640
2643
2641 if not rev:
2644 if not rev:
2642 rev = node
2645 rev = node
2643
2646
2644 if date:
2647 if date:
2645 if rev:
2648 if rev:
2646 raise util.Abort(_("you can't specify a revision and a date"))
2649 raise util.Abort(_("you can't specify a revision and a date"))
2647 rev = cmdutil.finddate(ui, repo, date)
2650 rev = cmdutil.finddate(ui, repo, date)
2648
2651
2649 if clean:
2652 if clean:
2650 return hg.clean(repo, rev)
2653 return hg.clean(repo, rev)
2651 else:
2654 else:
2652 return hg.update(repo, rev)
2655 return hg.update(repo, rev)
2653
2656
2654 def verify(ui, repo):
2657 def verify(ui, repo):
2655 """verify the integrity of the repository
2658 """verify the integrity of the repository
2656
2659
2657 Verify the integrity of the current repository.
2660 Verify the integrity of the current repository.
2658
2661
2659 This will perform an extensive check of the repository's
2662 This will perform an extensive check of the repository's
2660 integrity, validating the hashes and checksums of each entry in
2663 integrity, validating the hashes and checksums of each entry in
2661 the changelog, manifest, and tracked files, as well as the
2664 the changelog, manifest, and tracked files, as well as the
2662 integrity of their crosslinks and indices.
2665 integrity of their crosslinks and indices.
2663 """
2666 """
2664 return hg.verify(repo)
2667 return hg.verify(repo)
2665
2668
2666 def version_(ui):
2669 def version_(ui):
2667 """output version and copyright information"""
2670 """output version and copyright information"""
2668 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2671 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2669 % version.get_version())
2672 % version.get_version())
2670 ui.status(_(
2673 ui.status(_(
2671 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2674 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2672 "This is free software; see the source for copying conditions. "
2675 "This is free software; see the source for copying conditions. "
2673 "There is NO\nwarranty; "
2676 "There is NO\nwarranty; "
2674 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2677 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2675 ))
2678 ))
2676
2679
2677 # Command options and aliases are listed here, alphabetically
2680 # Command options and aliases are listed here, alphabetically
2678
2681
2679 globalopts = [
2682 globalopts = [
2680 ('R', 'repository', '',
2683 ('R', 'repository', '',
2681 _('repository root directory or symbolic path name')),
2684 _('repository root directory or symbolic path name')),
2682 ('', 'cwd', '', _('change working directory')),
2685 ('', 'cwd', '', _('change working directory')),
2683 ('y', 'noninteractive', None,
2686 ('y', 'noninteractive', None,
2684 _('do not prompt, assume \'yes\' for any required answers')),
2687 _('do not prompt, assume \'yes\' for any required answers')),
2685 ('q', 'quiet', None, _('suppress output')),
2688 ('q', 'quiet', None, _('suppress output')),
2686 ('v', 'verbose', None, _('enable additional output')),
2689 ('v', 'verbose', None, _('enable additional output')),
2687 ('', 'config', [], _('set/override config option')),
2690 ('', 'config', [], _('set/override config option')),
2688 ('', 'debug', None, _('enable debugging output')),
2691 ('', 'debug', None, _('enable debugging output')),
2689 ('', 'debugger', None, _('start debugger')),
2692 ('', 'debugger', None, _('start debugger')),
2690 ('', 'encoding', util._encoding, _('set the charset encoding')),
2693 ('', 'encoding', util._encoding, _('set the charset encoding')),
2691 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2694 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2692 ('', 'lsprof', None, _('print improved command execution profile')),
2695 ('', 'lsprof', None, _('print improved command execution profile')),
2693 ('', 'traceback', None, _('print traceback on exception')),
2696 ('', 'traceback', None, _('print traceback on exception')),
2694 ('', 'time', None, _('time how long the command takes')),
2697 ('', 'time', None, _('time how long the command takes')),
2695 ('', 'profile', None, _('print command execution profile')),
2698 ('', 'profile', None, _('print command execution profile')),
2696 ('', 'version', None, _('output version information and exit')),
2699 ('', 'version', None, _('output version information and exit')),
2697 ('h', 'help', None, _('display help and exit')),
2700 ('h', 'help', None, _('display help and exit')),
2698 ]
2701 ]
2699
2702
2700 dryrunopts = [('n', 'dry-run', None,
2703 dryrunopts = [('n', 'dry-run', None,
2701 _('do not perform actions, just print output'))]
2704 _('do not perform actions, just print output'))]
2702
2705
2703 remoteopts = [
2706 remoteopts = [
2704 ('e', 'ssh', '', _('specify ssh command to use')),
2707 ('e', 'ssh', '', _('specify ssh command to use')),
2705 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2708 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2706 ]
2709 ]
2707
2710
2708 walkopts = [
2711 walkopts = [
2709 ('I', 'include', [], _('include names matching the given patterns')),
2712 ('I', 'include', [], _('include names matching the given patterns')),
2710 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2713 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2711 ]
2714 ]
2712
2715
2713 commitopts = [
2716 commitopts = [
2714 ('m', 'message', '', _('use <text> as commit message')),
2717 ('m', 'message', '', _('use <text> as commit message')),
2715 ('l', 'logfile', '', _('read commit message from <file>')),
2718 ('l', 'logfile', '', _('read commit message from <file>')),
2716 ]
2719 ]
2717
2720
2718 commitopts2 = [
2721 commitopts2 = [
2719 ('d', 'date', '', _('record datecode as commit date')),
2722 ('d', 'date', '', _('record datecode as commit date')),
2720 ('u', 'user', '', _('record user as committer')),
2723 ('u', 'user', '', _('record user as committer')),
2721 ]
2724 ]
2722
2725
2723 table = {
2726 table = {
2724 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2727 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2725 "addremove":
2728 "addremove":
2726 (addremove,
2729 (addremove,
2727 [('s', 'similarity', '',
2730 [('s', 'similarity', '',
2728 _('guess renamed files by similarity (0<=s<=100)')),
2731 _('guess renamed files by similarity (0<=s<=100)')),
2729 ] + walkopts + dryrunopts,
2732 ] + walkopts + dryrunopts,
2730 _('hg addremove [OPTION]... [FILE]...')),
2733 _('hg addremove [OPTION]... [FILE]...')),
2731 "^annotate":
2734 "^annotate":
2732 (annotate,
2735 (annotate,
2733 [('r', 'rev', '', _('annotate the specified revision')),
2736 [('r', 'rev', '', _('annotate the specified revision')),
2734 ('f', 'follow', None, _('follow file copies and renames')),
2737 ('f', 'follow', None, _('follow file copies and renames')),
2735 ('a', 'text', None, _('treat all files as text')),
2738 ('a', 'text', None, _('treat all files as text')),
2736 ('u', 'user', None, _('list the author')),
2739 ('u', 'user', None, _('list the author')),
2737 ('d', 'date', None, _('list the date')),
2740 ('d', 'date', None, _('list the date')),
2738 ('n', 'number', None, _('list the revision number (default)')),
2741 ('n', 'number', None, _('list the revision number (default)')),
2739 ('c', 'changeset', None, _('list the changeset')),
2742 ('c', 'changeset', None, _('list the changeset')),
2740 ('l', 'line-number', None,
2743 ('l', 'line-number', None,
2741 _('show line number at the first appearance'))
2744 _('show line number at the first appearance'))
2742 ] + walkopts,
2745 ] + walkopts,
2743 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2746 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2744 "archive":
2747 "archive":
2745 (archive,
2748 (archive,
2746 [('', 'no-decode', None, _('do not pass files through decoders')),
2749 [('', 'no-decode', None, _('do not pass files through decoders')),
2747 ('p', 'prefix', '', _('directory prefix for files in archive')),
2750 ('p', 'prefix', '', _('directory prefix for files in archive')),
2748 ('r', 'rev', '', _('revision to distribute')),
2751 ('r', 'rev', '', _('revision to distribute')),
2749 ('t', 'type', '', _('type of distribution to create')),
2752 ('t', 'type', '', _('type of distribution to create')),
2750 ] + walkopts,
2753 ] + walkopts,
2751 _('hg archive [OPTION]... DEST')),
2754 _('hg archive [OPTION]... DEST')),
2752 "backout":
2755 "backout":
2753 (backout,
2756 (backout,
2754 [('', 'merge', None,
2757 [('', 'merge', None,
2755 _('merge with old dirstate parent after backout')),
2758 _('merge with old dirstate parent after backout')),
2756 ('', 'parent', '', _('parent to choose when backing out merge')),
2759 ('', 'parent', '', _('parent to choose when backing out merge')),
2757 ('r', 'rev', '', _('revision to backout')),
2760 ('r', 'rev', '', _('revision to backout')),
2758 ] + walkopts + commitopts + commitopts2,
2761 ] + walkopts + commitopts + commitopts2,
2759 _('hg backout [OPTION]... [-r] REV')),
2762 _('hg backout [OPTION]... [-r] REV')),
2760 "bisect":
2763 "bisect":
2761 (bisect,
2764 (bisect,
2762 [('r', 'reset', False, _('reset bisect state')),
2765 [('r', 'reset', False, _('reset bisect state')),
2763 ('g', 'good', False, _('mark changeset good')),
2766 ('g', 'good', False, _('mark changeset good')),
2764 ('b', 'bad', False, _('mark changeset bad')),
2767 ('b', 'bad', False, _('mark changeset bad')),
2765 ('s', 'skip', False, _('skip testing changeset')),
2768 ('s', 'skip', False, _('skip testing changeset')),
2766 ('U', 'noupdate', False, _('do not update to target'))],
2769 ('U', 'noupdate', False, _('do not update to target'))],
2767 _("hg bisect [-gbsr] [REV]")),
2770 _("hg bisect [-gbsr] [REV]")),
2768 "branch":
2771 "branch":
2769 (branch,
2772 (branch,
2770 [('f', 'force', None,
2773 [('f', 'force', None,
2771 _('set branch name even if it shadows an existing branch'))],
2774 _('set branch name even if it shadows an existing branch'))],
2772 _('hg branch [-f] [NAME]')),
2775 _('hg branch [-f] [NAME]')),
2773 "branches":
2776 "branches":
2774 (branches,
2777 (branches,
2775 [('a', 'active', False,
2778 [('a', 'active', False,
2776 _('show only branches that have unmerged heads'))],
2779 _('show only branches that have unmerged heads'))],
2777 _('hg branches [-a]')),
2780 _('hg branches [-a]')),
2778 "bundle":
2781 "bundle":
2779 (bundle,
2782 (bundle,
2780 [('f', 'force', None,
2783 [('f', 'force', None,
2781 _('run even when remote repository is unrelated')),
2784 _('run even when remote repository is unrelated')),
2782 ('r', 'rev', [],
2785 ('r', 'rev', [],
2783 _('a changeset you would like to bundle')),
2786 _('a changeset you would like to bundle')),
2784 ('', 'base', [],
2787 ('', 'base', [],
2785 _('a base changeset to specify instead of a destination')),
2788 _('a base changeset to specify instead of a destination')),
2786 ] + remoteopts,
2789 ] + remoteopts,
2787 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2790 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2788 "cat":
2791 "cat":
2789 (cat,
2792 (cat,
2790 [('o', 'output', '', _('print output to file with formatted name')),
2793 [('o', 'output', '', _('print output to file with formatted name')),
2791 ('r', 'rev', '', _('print the given revision')),
2794 ('r', 'rev', '', _('print the given revision')),
2792 ] + walkopts,
2795 ] + walkopts,
2793 _('hg cat [OPTION]... FILE...')),
2796 _('hg cat [OPTION]... FILE...')),
2794 "^clone":
2797 "^clone":
2795 (clone,
2798 (clone,
2796 [('U', 'noupdate', None, _('do not update the new working directory')),
2799 [('U', 'noupdate', None, _('do not update the new working directory')),
2797 ('r', 'rev', [],
2800 ('r', 'rev', [],
2798 _('a changeset you would like to have after cloning')),
2801 _('a changeset you would like to have after cloning')),
2799 ('', 'pull', None, _('use pull protocol to copy metadata')),
2802 ('', 'pull', None, _('use pull protocol to copy metadata')),
2800 ('', 'uncompressed', None,
2803 ('', 'uncompressed', None,
2801 _('use uncompressed transfer (fast over LAN)')),
2804 _('use uncompressed transfer (fast over LAN)')),
2802 ] + remoteopts,
2805 ] + remoteopts,
2803 _('hg clone [OPTION]... SOURCE [DEST]')),
2806 _('hg clone [OPTION]... SOURCE [DEST]')),
2804 "^commit|ci":
2807 "^commit|ci":
2805 (commit,
2808 (commit,
2806 [('A', 'addremove', None,
2809 [('A', 'addremove', None,
2807 _('mark new/missing files as added/removed before committing')),
2810 _('mark new/missing files as added/removed before committing')),
2808 ] + walkopts + commitopts + commitopts2,
2811 ] + walkopts + commitopts + commitopts2,
2809 _('hg commit [OPTION]... [FILE]...')),
2812 _('hg commit [OPTION]... [FILE]...')),
2810 "copy|cp":
2813 "copy|cp":
2811 (copy,
2814 (copy,
2812 [('A', 'after', None, _('record a copy that has already occurred')),
2815 [('A', 'after', None, _('record a copy that has already occurred')),
2813 ('f', 'force', None,
2816 ('f', 'force', None,
2814 _('forcibly copy over an existing managed file')),
2817 _('forcibly copy over an existing managed file')),
2815 ] + walkopts + dryrunopts,
2818 ] + walkopts + dryrunopts,
2816 _('hg copy [OPTION]... [SOURCE]... DEST')),
2819 _('hg copy [OPTION]... [SOURCE]... DEST')),
2817 "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
2820 "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
2818 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2821 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2819 "debugcomplete":
2822 "debugcomplete":
2820 (debugcomplete,
2823 (debugcomplete,
2821 [('o', 'options', None, _('show the command options'))],
2824 [('o', 'options', None, _('show the command options'))],
2822 _('hg debugcomplete [-o] CMD')),
2825 _('hg debugcomplete [-o] CMD')),
2823 "debugdate":
2826 "debugdate":
2824 (debugdate,
2827 (debugdate,
2825 [('e', 'extended', None, _('try extended date formats'))],
2828 [('e', 'extended', None, _('try extended date formats'))],
2826 _('hg debugdate [-e] DATE [RANGE]')),
2829 _('hg debugdate [-e] DATE [RANGE]')),
2827 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2830 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2828 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2831 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2829 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2832 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2830 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2833 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2831 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2834 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2832 "debugrawcommit|rawcommit":
2835 "debugrawcommit|rawcommit":
2833 (rawcommit,
2836 (rawcommit,
2834 [('p', 'parent', [], _('parent')),
2837 [('p', 'parent', [], _('parent')),
2835 ('F', 'files', '', _('file list'))
2838 ('F', 'files', '', _('file list'))
2836 ] + commitopts + commitopts2,
2839 ] + commitopts + commitopts2,
2837 _('hg debugrawcommit [OPTION]... [FILE]...')),
2840 _('hg debugrawcommit [OPTION]... [FILE]...')),
2838 "debugrebuildstate":
2841 "debugrebuildstate":
2839 (debugrebuildstate,
2842 (debugrebuildstate,
2840 [('r', 'rev', '', _('revision to rebuild to'))],
2843 [('r', 'rev', '', _('revision to rebuild to'))],
2841 _('hg debugrebuildstate [-r REV] [REV]')),
2844 _('hg debugrebuildstate [-r REV] [REV]')),
2842 "debugrename":
2845 "debugrename":
2843 (debugrename,
2846 (debugrename,
2844 [('r', 'rev', '', _('revision to debug'))],
2847 [('r', 'rev', '', _('revision to debug'))],
2845 _('hg debugrename [-r REV] FILE')),
2848 _('hg debugrename [-r REV] FILE')),
2846 "debugsetparents":
2849 "debugsetparents":
2847 (debugsetparents,
2850 (debugsetparents,
2848 [],
2851 [],
2849 _('hg debugsetparents REV1 [REV2]')),
2852 _('hg debugsetparents REV1 [REV2]')),
2850 "debugstate": (debugstate, [], _('hg debugstate')),
2853 "debugstate": (debugstate, [], _('hg debugstate')),
2851 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2854 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2852 "^diff":
2855 "^diff":
2853 (diff,
2856 (diff,
2854 [('r', 'rev', [], _('revision')),
2857 [('r', 'rev', [], _('revision')),
2855 ('a', 'text', None, _('treat all files as text')),
2858 ('a', 'text', None, _('treat all files as text')),
2856 ('p', 'show-function', None,
2859 ('p', 'show-function', None,
2857 _('show which function each change is in')),
2860 _('show which function each change is in')),
2858 ('g', 'git', None, _('use git extended diff format')),
2861 ('g', 'git', None, _('use git extended diff format')),
2859 ('', 'nodates', None, _("don't include dates in diff headers")),
2862 ('', 'nodates', None, _("don't include dates in diff headers")),
2860 ('w', 'ignore-all-space', None,
2863 ('w', 'ignore-all-space', None,
2861 _('ignore white space when comparing lines')),
2864 _('ignore white space when comparing lines')),
2862 ('b', 'ignore-space-change', None,
2865 ('b', 'ignore-space-change', None,
2863 _('ignore changes in the amount of white space')),
2866 _('ignore changes in the amount of white space')),
2864 ('B', 'ignore-blank-lines', None,
2867 ('B', 'ignore-blank-lines', None,
2865 _('ignore changes whose lines are all blank')),
2868 _('ignore changes whose lines are all blank')),
2866 ] + walkopts,
2869 ] + walkopts,
2867 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2870 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2868 "^export":
2871 "^export":
2869 (export,
2872 (export,
2870 [('o', 'output', '', _('print output to file with formatted name')),
2873 [('o', 'output', '', _('print output to file with formatted name')),
2871 ('a', 'text', None, _('treat all files as text')),
2874 ('a', 'text', None, _('treat all files as text')),
2872 ('g', 'git', None, _('use git extended diff format')),
2875 ('g', 'git', None, _('use git extended diff format')),
2873 ('', 'nodates', None, _("don't include dates in diff headers")),
2876 ('', 'nodates', None, _("don't include dates in diff headers")),
2874 ('', 'switch-parent', None, _('diff against the second parent'))],
2877 ('', 'switch-parent', None, _('diff against the second parent'))],
2875 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2878 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2876 "grep":
2879 "grep":
2877 (grep,
2880 (grep,
2878 [('0', 'print0', None, _('end fields with NUL')),
2881 [('0', 'print0', None, _('end fields with NUL')),
2879 ('', 'all', None, _('print all revisions that match')),
2882 ('', 'all', None, _('print all revisions that match')),
2880 ('f', 'follow', None,
2883 ('f', 'follow', None,
2881 _('follow changeset history, or file history across copies and renames')),
2884 _('follow changeset history, or file history across copies and renames')),
2882 ('i', 'ignore-case', None, _('ignore case when matching')),
2885 ('i', 'ignore-case', None, _('ignore case when matching')),
2883 ('l', 'files-with-matches', None,
2886 ('l', 'files-with-matches', None,
2884 _('print only filenames and revs that match')),
2887 _('print only filenames and revs that match')),
2885 ('n', 'line-number', None, _('print matching line numbers')),
2888 ('n', 'line-number', None, _('print matching line numbers')),
2886 ('r', 'rev', [], _('search in given revision range')),
2889 ('r', 'rev', [], _('search in given revision range')),
2887 ('u', 'user', None, _('print user who committed change')),
2890 ('u', 'user', None, _('print user who committed change')),
2888 ] + walkopts,
2891 ] + walkopts,
2889 _('hg grep [OPTION]... PATTERN [FILE]...')),
2892 _('hg grep [OPTION]... PATTERN [FILE]...')),
2890 "heads":
2893 "heads":
2891 (heads,
2894 (heads,
2892 [('', 'style', '', _('display using template map file')),
2895 [('', 'style', '', _('display using template map file')),
2893 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2896 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2894 ('', 'template', '', _('display with template'))],
2897 ('', 'template', '', _('display with template'))],
2895 _('hg heads [-r REV] [REV]...')),
2898 _('hg heads [-r REV] [REV]...')),
2896 "help": (help_, [], _('hg help [COMMAND]')),
2899 "help": (help_, [], _('hg help [COMMAND]')),
2897 "identify|id":
2900 "identify|id":
2898 (identify,
2901 (identify,
2899 [('r', 'rev', '', _('identify the specified rev')),
2902 [('r', 'rev', '', _('identify the specified rev')),
2900 ('n', 'num', None, _('show local revision number')),
2903 ('n', 'num', None, _('show local revision number')),
2901 ('i', 'id', None, _('show global revision id')),
2904 ('i', 'id', None, _('show global revision id')),
2902 ('b', 'branch', None, _('show branch')),
2905 ('b', 'branch', None, _('show branch')),
2903 ('t', 'tags', None, _('show tags'))],
2906 ('t', 'tags', None, _('show tags'))],
2904 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2907 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2905 "import|patch":
2908 "import|patch":
2906 (import_,
2909 (import_,
2907 [('p', 'strip', 1,
2910 [('p', 'strip', 1,
2908 _('directory strip option for patch. This has the same\n'
2911 _('directory strip option for patch. This has the same\n'
2909 'meaning as the corresponding patch option')),
2912 'meaning as the corresponding patch option')),
2910 ('b', 'base', '', _('base path')),
2913 ('b', 'base', '', _('base path')),
2911 ('f', 'force', None,
2914 ('f', 'force', None,
2912 _('skip check for outstanding uncommitted changes')),
2915 _('skip check for outstanding uncommitted changes')),
2913 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2916 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2914 ('', 'exact', None,
2917 ('', 'exact', None,
2915 _('apply patch to the nodes from which it was generated')),
2918 _('apply patch to the nodes from which it was generated')),
2916 ('', 'import-branch', None,
2919 ('', 'import-branch', None,
2917 _('Use any branch information in patch (implied by --exact)'))] +
2920 _('Use any branch information in patch (implied by --exact)'))] +
2918 commitopts + commitopts2,
2921 commitopts + commitopts2,
2919 _('hg import [OPTION]... PATCH...')),
2922 _('hg import [OPTION]... PATCH...')),
2920 "incoming|in":
2923 "incoming|in":
2921 (incoming,
2924 (incoming,
2922 [('M', 'no-merges', None, _('do not show merges')),
2925 [('M', 'no-merges', None, _('do not show merges')),
2923 ('f', 'force', None,
2926 ('f', 'force', None,
2924 _('run even when remote repository is unrelated')),
2927 _('run even when remote repository is unrelated')),
2925 ('', 'style', '', _('display using template map file')),
2928 ('', 'style', '', _('display using template map file')),
2926 ('n', 'newest-first', None, _('show newest record first')),
2929 ('n', 'newest-first', None, _('show newest record first')),
2927 ('', 'bundle', '', _('file to store the bundles into')),
2930 ('', 'bundle', '', _('file to store the bundles into')),
2928 ('p', 'patch', None, _('show patch')),
2931 ('p', 'patch', None, _('show patch')),
2929 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2932 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2930 ('', 'template', '', _('display with template')),
2933 ('', 'template', '', _('display with template')),
2931 ] + remoteopts,
2934 ] + remoteopts,
2932 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2935 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2933 ' [--bundle FILENAME] [SOURCE]')),
2936 ' [--bundle FILENAME] [SOURCE]')),
2934 "^init":
2937 "^init":
2935 (init,
2938 (init,
2936 remoteopts,
2939 remoteopts,
2937 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2940 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2938 "locate":
2941 "locate":
2939 (locate,
2942 (locate,
2940 [('r', 'rev', '', _('search the repository as it stood at rev')),
2943 [('r', 'rev', '', _('search the repository as it stood at rev')),
2941 ('0', 'print0', None,
2944 ('0', 'print0', None,
2942 _('end filenames with NUL, for use with xargs')),
2945 _('end filenames with NUL, for use with xargs')),
2943 ('f', 'fullpath', None,
2946 ('f', 'fullpath', None,
2944 _('print complete paths from the filesystem root')),
2947 _('print complete paths from the filesystem root')),
2945 ] + walkopts,
2948 ] + walkopts,
2946 _('hg locate [OPTION]... [PATTERN]...')),
2949 _('hg locate [OPTION]... [PATTERN]...')),
2947 "^log|history":
2950 "^log|history":
2948 (log,
2951 (log,
2949 [('f', 'follow', None,
2952 [('f', 'follow', None,
2950 _('follow changeset history, or file history across copies and renames')),
2953 _('follow changeset history, or file history across copies and renames')),
2951 ('', 'follow-first', None,
2954 ('', 'follow-first', None,
2952 _('only follow the first parent of merge changesets')),
2955 _('only follow the first parent of merge changesets')),
2953 ('d', 'date', '', _('show revs matching date spec')),
2956 ('d', 'date', '', _('show revs matching date spec')),
2954 ('C', 'copies', None, _('show copied files')),
2957 ('C', 'copies', None, _('show copied files')),
2955 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2958 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2956 ('l', 'limit', '', _('limit number of changes displayed')),
2959 ('l', 'limit', '', _('limit number of changes displayed')),
2957 ('r', 'rev', [], _('show the specified revision or range')),
2960 ('r', 'rev', [], _('show the specified revision or range')),
2958 ('', 'removed', None, _('include revs where files were removed')),
2961 ('', 'removed', None, _('include revs where files were removed')),
2959 ('M', 'no-merges', None, _('do not show merges')),
2962 ('M', 'no-merges', None, _('do not show merges')),
2960 ('', 'style', '', _('display using template map file')),
2963 ('', 'style', '', _('display using template map file')),
2961 ('m', 'only-merges', None, _('show only merges')),
2964 ('m', 'only-merges', None, _('show only merges')),
2962 ('p', 'patch', None, _('show patch')),
2965 ('p', 'patch', None, _('show patch')),
2963 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2966 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2964 ('', 'template', '', _('display with template')),
2967 ('', 'template', '', _('display with template')),
2965 ] + walkopts,
2968 ] + walkopts,
2966 _('hg log [OPTION]... [FILE]')),
2969 _('hg log [OPTION]... [FILE]')),
2967 "manifest":
2970 "manifest":
2968 (manifest,
2971 (manifest,
2969 [('r', 'rev', '', _('revision to display'))],
2972 [('r', 'rev', '', _('revision to display'))],
2970 _('hg manifest [-r REV]')),
2973 _('hg manifest [-r REV]')),
2971 "^merge":
2974 "^merge":
2972 (merge,
2975 (merge,
2973 [('f', 'force', None, _('force a merge with outstanding changes')),
2976 [('f', 'force', None, _('force a merge with outstanding changes')),
2974 ('r', 'rev', '', _('revision to merge')),
2977 ('r', 'rev', '', _('revision to merge')),
2975 ],
2978 ],
2976 _('hg merge [-f] [[-r] REV]')),
2979 _('hg merge [-f] [[-r] REV]')),
2977 "outgoing|out":
2980 "outgoing|out":
2978 (outgoing,
2981 (outgoing,
2979 [('M', 'no-merges', None, _('do not show merges')),
2982 [('M', 'no-merges', None, _('do not show merges')),
2980 ('f', 'force', None,
2983 ('f', 'force', None,
2981 _('run even when remote repository is unrelated')),
2984 _('run even when remote repository is unrelated')),
2982 ('p', 'patch', None, _('show patch')),
2985 ('p', 'patch', None, _('show patch')),
2983 ('', 'style', '', _('display using template map file')),
2986 ('', 'style', '', _('display using template map file')),
2984 ('r', 'rev', [], _('a specific revision you would like to push')),
2987 ('r', 'rev', [], _('a specific revision you would like to push')),
2985 ('n', 'newest-first', None, _('show newest record first')),
2988 ('n', 'newest-first', None, _('show newest record first')),
2986 ('', 'template', '', _('display with template')),
2989 ('', 'template', '', _('display with template')),
2987 ] + remoteopts,
2990 ] + remoteopts,
2988 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2991 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2989 "^parents":
2992 "^parents":
2990 (parents,
2993 (parents,
2991 [('r', 'rev', '', _('show parents from the specified rev')),
2994 [('r', 'rev', '', _('show parents from the specified rev')),
2992 ('', 'style', '', _('display using template map file')),
2995 ('', 'style', '', _('display using template map file')),
2993 ('', 'template', '', _('display with template'))],
2996 ('', 'template', '', _('display with template'))],
2994 _('hg parents [-r REV] [FILE]')),
2997 _('hg parents [-r REV] [FILE]')),
2995 "paths": (paths, [], _('hg paths [NAME]')),
2998 "paths": (paths, [], _('hg paths [NAME]')),
2996 "^pull":
2999 "^pull":
2997 (pull,
3000 (pull,
2998 [('u', 'update', None,
3001 [('u', 'update', None,
2999 _('update to new tip if changesets were pulled')),
3002 _('update to new tip if changesets were pulled')),
3000 ('f', 'force', None,
3003 ('f', 'force', None,
3001 _('run even when remote repository is unrelated')),
3004 _('run even when remote repository is unrelated')),
3002 ('r', 'rev', [],
3005 ('r', 'rev', [],
3003 _('a specific revision up to which you would like to pull')),
3006 _('a specific revision up to which you would like to pull')),
3004 ] + remoteopts,
3007 ] + remoteopts,
3005 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3008 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3006 "^push":
3009 "^push":
3007 (push,
3010 (push,
3008 [('f', 'force', None, _('force push')),
3011 [('f', 'force', None, _('force push')),
3009 ('r', 'rev', [], _('a specific revision you would like to push')),
3012 ('r', 'rev', [], _('a specific revision you would like to push')),
3010 ] + remoteopts,
3013 ] + remoteopts,
3011 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3014 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3012 "recover": (recover, [], _('hg recover')),
3015 "recover": (recover, [], _('hg recover')),
3013 "^remove|rm":
3016 "^remove|rm":
3014 (remove,
3017 (remove,
3015 [('A', 'after', None, _('record remove without deleting')),
3018 [('A', 'after', None, _('record remove without deleting')),
3016 ('f', 'force', None, _('remove file even if modified')),
3019 ('f', 'force', None, _('remove file even if modified')),
3017 ] + walkopts,
3020 ] + walkopts,
3018 _('hg remove [OPTION]... FILE...')),
3021 _('hg remove [OPTION]... FILE...')),
3019 "rename|mv":
3022 "rename|mv":
3020 (rename,
3023 (rename,
3021 [('A', 'after', None, _('record a rename that has already occurred')),
3024 [('A', 'after', None, _('record a rename that has already occurred')),
3022 ('f', 'force', None,
3025 ('f', 'force', None,
3023 _('forcibly copy over an existing managed file')),
3026 _('forcibly copy over an existing managed file')),
3024 ] + walkopts + dryrunopts,
3027 ] + walkopts + dryrunopts,
3025 _('hg rename [OPTION]... SOURCE... DEST')),
3028 _('hg rename [OPTION]... SOURCE... DEST')),
3026 "revert":
3029 "revert":
3027 (revert,
3030 (revert,
3028 [('a', 'all', None, _('revert all changes when no arguments given')),
3031 [('a', 'all', None, _('revert all changes when no arguments given')),
3029 ('d', 'date', '', _('tipmost revision matching date')),
3032 ('d', 'date', '', _('tipmost revision matching date')),
3030 ('r', 'rev', '', _('revision to revert to')),
3033 ('r', 'rev', '', _('revision to revert to')),
3031 ('', 'no-backup', None, _('do not save backup copies of files')),
3034 ('', 'no-backup', None, _('do not save backup copies of files')),
3032 ] + walkopts + dryrunopts,
3035 ] + walkopts + dryrunopts,
3033 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3036 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3034 "rollback": (rollback, [], _('hg rollback')),
3037 "rollback": (rollback, [], _('hg rollback')),
3035 "root": (root, [], _('hg root')),
3038 "root": (root, [], _('hg root')),
3036 "^serve":
3039 "^serve":
3037 (serve,
3040 (serve,
3038 [('A', 'accesslog', '', _('name of access log file to write to')),
3041 [('A', 'accesslog', '', _('name of access log file to write to')),
3039 ('d', 'daemon', None, _('run server in background')),
3042 ('d', 'daemon', None, _('run server in background')),
3040 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3043 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3041 ('E', 'errorlog', '', _('name of error log file to write to')),
3044 ('E', 'errorlog', '', _('name of error log file to write to')),
3042 ('p', 'port', 0, _('port to use (default: 8000)')),
3045 ('p', 'port', 0, _('port to use (default: 8000)')),
3043 ('a', 'address', '', _('address to use')),
3046 ('a', 'address', '', _('address to use')),
3044 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3047 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3045 ('n', 'name', '',
3048 ('n', 'name', '',
3046 _('name to show in web pages (default: working dir)')),
3049 _('name to show in web pages (default: working dir)')),
3047 ('', 'webdir-conf', '', _('name of the webdir config file'
3050 ('', 'webdir-conf', '', _('name of the webdir config file'
3048 ' (serve more than one repo)')),
3051 ' (serve more than one repo)')),
3049 ('', 'pid-file', '', _('name of file to write process ID to')),
3052 ('', 'pid-file', '', _('name of file to write process ID to')),
3050 ('', 'stdio', None, _('for remote clients')),
3053 ('', 'stdio', None, _('for remote clients')),
3051 ('t', 'templates', '', _('web templates to use')),
3054 ('t', 'templates', '', _('web templates to use')),
3052 ('', 'style', '', _('template style to use')),
3055 ('', 'style', '', _('template style to use')),
3053 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3056 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3054 ('', 'certificate', '', _('SSL certificate file'))],
3057 ('', 'certificate', '', _('SSL certificate file'))],
3055 _('hg serve [OPTION]...')),
3058 _('hg serve [OPTION]...')),
3056 "showconfig|debugconfig":
3059 "showconfig|debugconfig":
3057 (showconfig,
3060 (showconfig,
3058 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3061 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3059 _('hg showconfig [-u] [NAME]...')),
3062 _('hg showconfig [-u] [NAME]...')),
3060 "^status|st":
3063 "^status|st":
3061 (status,
3064 (status,
3062 [('A', 'all', None, _('show status of all files')),
3065 [('A', 'all', None, _('show status of all files')),
3063 ('m', 'modified', None, _('show only modified files')),
3066 ('m', 'modified', None, _('show only modified files')),
3064 ('a', 'added', None, _('show only added files')),
3067 ('a', 'added', None, _('show only added files')),
3065 ('r', 'removed', None, _('show only removed files')),
3068 ('r', 'removed', None, _('show only removed files')),
3066 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3069 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3067 ('c', 'clean', None, _('show only files without changes')),
3070 ('c', 'clean', None, _('show only files without changes')),
3068 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3071 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3069 ('i', 'ignored', None, _('show only ignored files')),
3072 ('i', 'ignored', None, _('show only ignored files')),
3070 ('n', 'no-status', None, _('hide status prefix')),
3073 ('n', 'no-status', None, _('hide status prefix')),
3071 ('C', 'copies', None, _('show source of copied files')),
3074 ('C', 'copies', None, _('show source of copied files')),
3072 ('0', 'print0', None,
3075 ('0', 'print0', None,
3073 _('end filenames with NUL, for use with xargs')),
3076 _('end filenames with NUL, for use with xargs')),
3074 ('', 'rev', [], _('show difference from revision')),
3077 ('', 'rev', [], _('show difference from revision')),
3075 ] + walkopts,
3078 ] + walkopts,
3076 _('hg status [OPTION]... [FILE]...')),
3079 _('hg status [OPTION]... [FILE]...')),
3077 "tag":
3080 "tag":
3078 (tag,
3081 (tag,
3079 [('f', 'force', None, _('replace existing tag')),
3082 [('f', 'force', None, _('replace existing tag')),
3080 ('l', 'local', None, _('make the tag local')),
3083 ('l', 'local', None, _('make the tag local')),
3081 ('r', 'rev', '', _('revision to tag')),
3084 ('r', 'rev', '', _('revision to tag')),
3082 ('', 'remove', None, _('remove a tag')),
3085 ('', 'remove', None, _('remove a tag')),
3083 # -l/--local is already there, commitopts cannot be used
3086 # -l/--local is already there, commitopts cannot be used
3084 ('m', 'message', '', _('use <text> as commit message')),
3087 ('m', 'message', '', _('use <text> as commit message')),
3085 ] + commitopts2,
3088 ] + commitopts2,
3086 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3089 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3087 "tags": (tags, [], _('hg tags')),
3090 "tags": (tags, [], _('hg tags')),
3088 "tip":
3091 "tip":
3089 (tip,
3092 (tip,
3090 [('', 'style', '', _('display using template map file')),
3093 [('', 'style', '', _('display using template map file')),
3091 ('p', 'patch', None, _('show patch')),
3094 ('p', 'patch', None, _('show patch')),
3092 ('', 'template', '', _('display with template'))],
3095 ('', 'template', '', _('display with template'))],
3093 _('hg tip [-p]')),
3096 _('hg tip [-p]')),
3094 "unbundle":
3097 "unbundle":
3095 (unbundle,
3098 (unbundle,
3096 [('u', 'update', None,
3099 [('u', 'update', None,
3097 _('update to new tip if changesets were unbundled'))],
3100 _('update to new tip if changesets were unbundled'))],
3098 _('hg unbundle [-u] FILE...')),
3101 _('hg unbundle [-u] FILE...')),
3099 "^update|up|checkout|co":
3102 "^update|up|checkout|co":
3100 (update,
3103 (update,
3101 [('C', 'clean', None, _('overwrite locally modified files')),
3104 [('C', 'clean', None, _('overwrite locally modified files')),
3102 ('d', 'date', '', _('tipmost revision matching date')),
3105 ('d', 'date', '', _('tipmost revision matching date')),
3103 ('r', 'rev', '', _('revision'))],
3106 ('r', 'rev', '', _('revision'))],
3104 _('hg update [-C] [-d DATE] [[-r] REV]')),
3107 _('hg update [-C] [-d DATE] [[-r] REV]')),
3105 "verify": (verify, [], _('hg verify')),
3108 "verify": (verify, [], _('hg verify')),
3106 "version": (version_, [], _('hg version')),
3109 "version": (version_, [], _('hg version')),
3107 }
3110 }
3108
3111
3109 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3112 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3110 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3113 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3111 optionalrepo = ("identify paths serve showconfig")
3114 optionalrepo = ("identify paths serve showconfig")
@@ -1,462 +1,458 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from remoterepo import *
10 from remoterepo import *
11 from i18n import _
11 from i18n import _
12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 import errno, keepalive, tempfile, socket, changegroup
13 import errno, keepalive, tempfile, socket, changegroup
14
14
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 def __init__(self, ui):
16 def __init__(self, ui):
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 self.ui = ui
18 self.ui = ui
19
19
20 def find_user_password(self, realm, authuri):
20 def find_user_password(self, realm, authuri):
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 self, realm, authuri)
22 self, realm, authuri)
23 user, passwd = authinfo
23 user, passwd = authinfo
24 if user and passwd:
24 if user and passwd:
25 return (user, passwd)
25 return (user, passwd)
26
26
27 if not self.ui.interactive:
27 if not self.ui.interactive:
28 raise util.Abort(_('http authorization required'))
28 raise util.Abort(_('http authorization required'))
29
29
30 self.ui.write(_("http authorization required\n"))
30 self.ui.write(_("http authorization required\n"))
31 self.ui.status(_("realm: %s\n") % realm)
31 self.ui.status(_("realm: %s\n") % realm)
32 if user:
32 if user:
33 self.ui.status(_("user: %s\n") % user)
33 self.ui.status(_("user: %s\n") % user)
34 else:
34 else:
35 user = self.ui.prompt(_("user:"), default=None)
35 user = self.ui.prompt(_("user:"), default=None)
36
36
37 if not passwd:
37 if not passwd:
38 passwd = self.ui.getpass()
38 passwd = self.ui.getpass()
39
39
40 self.add_password(realm, authuri, user, passwd)
40 self.add_password(realm, authuri, user, passwd)
41 return (user, passwd)
41 return (user, passwd)
42
42
43 def netlocsplit(netloc):
43 def netlocsplit(netloc):
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45
45
46 a = netloc.find('@')
46 a = netloc.find('@')
47 if a == -1:
47 if a == -1:
48 user, passwd = None, None
48 user, passwd = None, None
49 else:
49 else:
50 userpass, netloc = netloc[:a], netloc[a+1:]
50 userpass, netloc = netloc[:a], netloc[a+1:]
51 c = userpass.find(':')
51 c = userpass.find(':')
52 if c == -1:
52 if c == -1:
53 user, passwd = urllib.unquote(userpass), None
53 user, passwd = urllib.unquote(userpass), None
54 else:
54 else:
55 user = urllib.unquote(userpass[:c])
55 user = urllib.unquote(userpass[:c])
56 passwd = urllib.unquote(userpass[c+1:])
56 passwd = urllib.unquote(userpass[c+1:])
57 c = netloc.find(':')
57 c = netloc.find(':')
58 if c == -1:
58 if c == -1:
59 host, port = netloc, None
59 host, port = netloc, None
60 else:
60 else:
61 host, port = netloc[:c], netloc[c+1:]
61 host, port = netloc[:c], netloc[c+1:]
62 return host, port, user, passwd
62 return host, port, user, passwd
63
63
64 def netlocunsplit(host, port, user=None, passwd=None):
64 def netlocunsplit(host, port, user=None, passwd=None):
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 if port:
66 if port:
67 hostport = host + ':' + port
67 hostport = host + ':' + port
68 else:
68 else:
69 hostport = host
69 hostport = host
70 if user:
70 if user:
71 if passwd:
71 if passwd:
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 else:
73 else:
74 userpass = urllib.quote(user)
74 userpass = urllib.quote(user)
75 return userpass + '@' + hostport
75 return userpass + '@' + hostport
76 return hostport
76 return hostport
77
77
78 # work around a bug in Python < 2.4.2
78 # work around a bug in Python < 2.4.2
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 class request(urllib2.Request):
80 class request(urllib2.Request):
81 def add_header(self, key, val):
81 def add_header(self, key, val):
82 if key.lower() == 'proxy-authorization':
82 if key.lower() == 'proxy-authorization':
83 val = val.strip()
83 val = val.strip()
84 return urllib2.Request.add_header(self, key, val)
84 return urllib2.Request.add_header(self, key, val)
85
85
86 class httpsendfile(file):
86 class httpsendfile(file):
87 def __len__(self):
87 def __len__(self):
88 return os.fstat(self.fileno()).st_size
88 return os.fstat(self.fileno()).st_size
89
89
90 def _gen_sendfile(connection):
90 def _gen_sendfile(connection):
91 def _sendfile(self, data):
91 def _sendfile(self, data):
92 # send a file
92 # send a file
93 if isinstance(data, httpsendfile):
93 if isinstance(data, httpsendfile):
94 # if auth required, some data sent twice, so rewind here
94 # if auth required, some data sent twice, so rewind here
95 data.seek(0)
95 data.seek(0)
96 for chunk in util.filechunkiter(data):
96 for chunk in util.filechunkiter(data):
97 connection.send(self, chunk)
97 connection.send(self, chunk)
98 else:
98 else:
99 connection.send(self, data)
99 connection.send(self, data)
100 return _sendfile
100 return _sendfile
101
101
102 class httpconnection(keepalive.HTTPConnection):
102 class httpconnection(keepalive.HTTPConnection):
103 # must be able to send big bundle as stream.
103 # must be able to send big bundle as stream.
104 send = _gen_sendfile(keepalive.HTTPConnection)
104 send = _gen_sendfile(keepalive.HTTPConnection)
105
105
106 class basehttphandler(keepalive.HTTPHandler):
106 class httphandler(keepalive.HTTPHandler):
107 def http_open(self, req):
107 def http_open(self, req):
108 return self.do_open(httpconnection, req)
108 return self.do_open(httpconnection, req)
109
109
110 def __del__(self):
111 self.close_all()
112
110 has_https = hasattr(urllib2, 'HTTPSHandler')
113 has_https = hasattr(urllib2, 'HTTPSHandler')
111 if has_https:
114 if has_https:
112 class httpsconnection(httplib.HTTPSConnection):
115 class httpsconnection(httplib.HTTPSConnection):
113 response_class = keepalive.HTTPResponse
116 response_class = keepalive.HTTPResponse
114 # must be able to send big bundle as stream.
117 # must be able to send big bundle as stream.
115 send = _gen_sendfile(httplib.HTTPSConnection)
118 send = _gen_sendfile(httplib.HTTPSConnection)
116
119
117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
118 def https_open(self, req):
121 def https_open(self, req):
119 return self.do_open(httpsconnection, req)
122 return self.do_open(httpsconnection, req)
120 else:
121 class httphandler(basehttphandler):
122 pass
123
123
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 # it doesn't know about the auth type requested. This can happen if
125 # it doesn't know about the auth type requested. This can happen if
126 # somebody is using BasicAuth and types a bad password.
126 # somebody is using BasicAuth and types a bad password.
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 try:
129 try:
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 self, auth_header, host, req, headers)
131 self, auth_header, host, req, headers)
132 except ValueError, inst:
132 except ValueError, inst:
133 arg = inst.args[0]
133 arg = inst.args[0]
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 return
135 return
136 raise
136 raise
137
137
138 def zgenerator(f):
138 def zgenerator(f):
139 zd = zlib.decompressobj()
139 zd = zlib.decompressobj()
140 try:
140 try:
141 for chunk in util.filechunkiter(f):
141 for chunk in util.filechunkiter(f):
142 yield zd.decompress(chunk)
142 yield zd.decompress(chunk)
143 except httplib.HTTPException, inst:
143 except httplib.HTTPException, inst:
144 raise IOError(None, _('connection ended unexpectedly'))
144 raise IOError(None, _('connection ended unexpectedly'))
145 yield zd.flush()
145 yield zd.flush()
146
146
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 '0123456789' '_.-/')
149 '0123456789' '_.-/')
150 _safeset = None
150 _safeset = None
151 _hex = None
151 _hex = None
152 def quotepath(path):
152 def quotepath(path):
153 '''quote the path part of a URL
153 '''quote the path part of a URL
154
154
155 This is similar to urllib.quote, but it also tries to avoid
155 This is similar to urllib.quote, but it also tries to avoid
156 quoting things twice (inspired by wget):
156 quoting things twice (inspired by wget):
157
157
158 >>> quotepath('abc def')
158 >>> quotepath('abc def')
159 'abc%20def'
159 'abc%20def'
160 >>> quotepath('abc%20def')
160 >>> quotepath('abc%20def')
161 'abc%20def'
161 'abc%20def'
162 >>> quotepath('abc%20 def')
162 >>> quotepath('abc%20 def')
163 'abc%20%20def'
163 'abc%20%20def'
164 >>> quotepath('abc def%20')
164 >>> quotepath('abc def%20')
165 'abc%20def%20'
165 'abc%20def%20'
166 >>> quotepath('abc def%2')
166 >>> quotepath('abc def%2')
167 'abc%20def%252'
167 'abc%20def%252'
168 >>> quotepath('abc def%')
168 >>> quotepath('abc def%')
169 'abc%20def%25'
169 'abc%20def%25'
170 '''
170 '''
171 global _safeset, _hex
171 global _safeset, _hex
172 if _safeset is None:
172 if _safeset is None:
173 _safeset = util.set(_safe)
173 _safeset = util.set(_safe)
174 _hex = util.set('abcdefABCDEF0123456789')
174 _hex = util.set('abcdefABCDEF0123456789')
175 l = list(path)
175 l = list(path)
176 for i in xrange(len(l)):
176 for i in xrange(len(l)):
177 c = l[i]
177 c = l[i]
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 pass
179 pass
180 elif c not in _safeset:
180 elif c not in _safeset:
181 l[i] = '%%%02X' % ord(c)
181 l[i] = '%%%02X' % ord(c)
182 return ''.join(l)
182 return ''.join(l)
183
183
184 class httprepository(remoterepository):
184 class httprepository(remoterepository):
185 def __init__(self, ui, path):
185 def __init__(self, ui, path):
186 self.path = path
186 self.path = path
187 self.caps = None
187 self.caps = None
188 self.handler = None
188 self.handler = None
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 if query or frag:
190 if query or frag:
191 raise util.Abort(_('unsupported URL component: "%s"') %
191 raise util.Abort(_('unsupported URL component: "%s"') %
192 (query or frag))
192 (query or frag))
193 if not urlpath:
193 if not urlpath:
194 urlpath = '/'
194 urlpath = '/'
195 urlpath = quotepath(urlpath)
195 urlpath = quotepath(urlpath)
196 host, port, user, passwd = netlocsplit(netloc)
196 host, port, user, passwd = netlocsplit(netloc)
197
197
198 # urllib cannot handle URLs with embedded user or passwd
198 # urllib cannot handle URLs with embedded user or passwd
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 urlpath, '', ''))
200 urlpath, '', ''))
201 self.ui = ui
201 self.ui = ui
202 self.ui.debug(_('using %s\n') % self._url)
202 self.ui.debug(_('using %s\n') % self._url)
203
203
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 # XXX proxyauthinfo = None
205 # XXX proxyauthinfo = None
206 self.handler = httphandler()
206 handlers = [httphandler()]
207 handlers = [self.handler]
207 if has_https:
208 handlers.append(httpshandler())
208
209
209 if proxyurl:
210 if proxyurl:
210 # proxy can be proper url or host[:port]
211 # proxy can be proper url or host[:port]
211 if not (proxyurl.startswith('http:') or
212 if not (proxyurl.startswith('http:') or
212 proxyurl.startswith('https:')):
213 proxyurl.startswith('https:')):
213 proxyurl = 'http://' + proxyurl + '/'
214 proxyurl = 'http://' + proxyurl + '/'
214 snpqf = urlparse.urlsplit(proxyurl)
215 snpqf = urlparse.urlsplit(proxyurl)
215 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
216 hpup = netlocsplit(proxynetloc)
217 hpup = netlocsplit(proxynetloc)
217
218
218 proxyhost, proxyport, proxyuser, proxypasswd = hpup
219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
219 if not proxyuser:
220 if not proxyuser:
220 proxyuser = ui.config("http_proxy", "user")
221 proxyuser = ui.config("http_proxy", "user")
221 proxypasswd = ui.config("http_proxy", "passwd")
222 proxypasswd = ui.config("http_proxy", "passwd")
222
223
223 # see if we should use a proxy for this url
224 # see if we should use a proxy for this url
224 no_list = [ "localhost", "127.0.0.1" ]
225 no_list = [ "localhost", "127.0.0.1" ]
225 no_list.extend([p.lower() for
226 no_list.extend([p.lower() for
226 p in ui.configlist("http_proxy", "no")])
227 p in ui.configlist("http_proxy", "no")])
227 no_list.extend([p.strip().lower() for
228 no_list.extend([p.strip().lower() for
228 p in os.getenv("no_proxy", '').split(',')
229 p in os.getenv("no_proxy", '').split(',')
229 if p.strip()])
230 if p.strip()])
230 # "http_proxy.always" config is for running tests on localhost
231 # "http_proxy.always" config is for running tests on localhost
231 if (not ui.configbool("http_proxy", "always") and
232 if (not ui.configbool("http_proxy", "always") and
232 host.lower() in no_list):
233 host.lower() in no_list):
233 # avoid auto-detection of proxy settings by appending
234 # avoid auto-detection of proxy settings by appending
234 # a ProxyHandler with no proxies defined.
235 # a ProxyHandler with no proxies defined.
235 handlers.append(urllib2.ProxyHandler({}))
236 handlers.append(urllib2.ProxyHandler({}))
236 ui.debug(_('disabling proxy for %s\n') % host)
237 ui.debug(_('disabling proxy for %s\n') % host)
237 else:
238 else:
238 proxyurl = urlparse.urlunsplit((
239 proxyurl = urlparse.urlunsplit((
239 proxyscheme, netlocunsplit(proxyhost, proxyport,
240 proxyscheme, netlocunsplit(proxyhost, proxyport,
240 proxyuser, proxypasswd or ''),
241 proxyuser, proxypasswd or ''),
241 proxypath, proxyquery, proxyfrag))
242 proxypath, proxyquery, proxyfrag))
242 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
243 ui.debug(_('proxying through http://%s:%s\n') %
244 ui.debug(_('proxying through http://%s:%s\n') %
244 (proxyhost, proxyport))
245 (proxyhost, proxyport))
245
246
246 # urllib2 takes proxy values from the environment and those
247 # urllib2 takes proxy values from the environment and those
247 # will take precedence if found, so drop them
248 # will take precedence if found, so drop them
248 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
249 try:
250 try:
250 if env in os.environ:
251 if env in os.environ:
251 del os.environ[env]
252 del os.environ[env]
252 except OSError:
253 except OSError:
253 pass
254 pass
254
255
255 passmgr = passwordmgr(ui)
256 passmgr = passwordmgr(ui)
256 if user:
257 if user:
257 ui.debug(_('http auth: user %s, password %s\n') %
258 ui.debug(_('http auth: user %s, password %s\n') %
258 (user, passwd and '*' * len(passwd) or 'not set'))
259 (user, passwd and '*' * len(passwd) or 'not set'))
259 netloc = host
260 netloc = host
260 if port:
261 if port:
261 netloc += ':' + port
262 netloc += ':' + port
262 # Python < 2.4.3 uses only the netloc to search for a password
263 # Python < 2.4.3 uses only the netloc to search for a password
263 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
264
265
265 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
266 httpdigestauthhandler(passmgr)))
267 httpdigestauthhandler(passmgr)))
267 opener = urllib2.build_opener(*handlers)
268 opener = urllib2.build_opener(*handlers)
268
269
269 # 1.0 here is the _protocol_ version
270 # 1.0 here is the _protocol_ version
270 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
271 urllib2.install_opener(opener)
272 urllib2.install_opener(opener)
272
273
273 def __del__(self):
274 if self.handler:
275 self.handler.close_all()
276 self.handler = None
277
278 def url(self):
274 def url(self):
279 return self.path
275 return self.path
280
276
281 # look up capabilities only when needed
277 # look up capabilities only when needed
282
278
283 def get_caps(self):
279 def get_caps(self):
284 if self.caps is None:
280 if self.caps is None:
285 try:
281 try:
286 self.caps = util.set(self.do_read('capabilities').split())
282 self.caps = util.set(self.do_read('capabilities').split())
287 except repo.RepoError:
283 except repo.RepoError:
288 self.caps = util.set()
284 self.caps = util.set()
289 self.ui.debug(_('capabilities: %s\n') %
285 self.ui.debug(_('capabilities: %s\n') %
290 (' '.join(self.caps or ['none'])))
286 (' '.join(self.caps or ['none'])))
291 return self.caps
287 return self.caps
292
288
293 capabilities = property(get_caps)
289 capabilities = property(get_caps)
294
290
295 def lock(self):
291 def lock(self):
296 raise util.Abort(_('operation not supported over http'))
292 raise util.Abort(_('operation not supported over http'))
297
293
298 def do_cmd(self, cmd, **args):
294 def do_cmd(self, cmd, **args):
299 data = args.pop('data', None)
295 data = args.pop('data', None)
300 headers = args.pop('headers', {})
296 headers = args.pop('headers', {})
301 self.ui.debug(_("sending %s command\n") % cmd)
297 self.ui.debug(_("sending %s command\n") % cmd)
302 q = {"cmd": cmd}
298 q = {"cmd": cmd}
303 q.update(args)
299 q.update(args)
304 qs = '?%s' % urllib.urlencode(q)
300 qs = '?%s' % urllib.urlencode(q)
305 cu = "%s%s" % (self._url, qs)
301 cu = "%s%s" % (self._url, qs)
306 try:
302 try:
307 if data:
303 if data:
308 self.ui.debug(_("sending %s bytes\n") % len(data))
304 self.ui.debug(_("sending %s bytes\n") % len(data))
309 resp = urllib2.urlopen(request(cu, data, headers))
305 resp = urllib2.urlopen(request(cu, data, headers))
310 except urllib2.HTTPError, inst:
306 except urllib2.HTTPError, inst:
311 if inst.code == 401:
307 if inst.code == 401:
312 raise util.Abort(_('authorization failed'))
308 raise util.Abort(_('authorization failed'))
313 raise
309 raise
314 except httplib.HTTPException, inst:
310 except httplib.HTTPException, inst:
315 self.ui.debug(_('http error while sending %s command\n') % cmd)
311 self.ui.debug(_('http error while sending %s command\n') % cmd)
316 self.ui.print_exc()
312 self.ui.print_exc()
317 raise IOError(None, inst)
313 raise IOError(None, inst)
318 except IndexError:
314 except IndexError:
319 # this only happens with Python 2.3, later versions raise URLError
315 # this only happens with Python 2.3, later versions raise URLError
320 raise util.Abort(_('http error, possibly caused by proxy setting'))
316 raise util.Abort(_('http error, possibly caused by proxy setting'))
321 # record the url we got redirected to
317 # record the url we got redirected to
322 resp_url = resp.geturl()
318 resp_url = resp.geturl()
323 if resp_url.endswith(qs):
319 if resp_url.endswith(qs):
324 resp_url = resp_url[:-len(qs)]
320 resp_url = resp_url[:-len(qs)]
325 if self._url != resp_url:
321 if self._url != resp_url:
326 self.ui.status(_('real URL is %s\n') % resp_url)
322 self.ui.status(_('real URL is %s\n') % resp_url)
327 self._url = resp_url
323 self._url = resp_url
328 try:
324 try:
329 proto = resp.getheader('content-type')
325 proto = resp.getheader('content-type')
330 except AttributeError:
326 except AttributeError:
331 proto = resp.headers['content-type']
327 proto = resp.headers['content-type']
332
328
333 # accept old "text/plain" and "application/hg-changegroup" for now
329 # accept old "text/plain" and "application/hg-changegroup" for now
334 if not (proto.startswith('application/mercurial-') or
330 if not (proto.startswith('application/mercurial-') or
335 proto.startswith('text/plain') or
331 proto.startswith('text/plain') or
336 proto.startswith('application/hg-changegroup')):
332 proto.startswith('application/hg-changegroup')):
337 self.ui.debug(_("Requested URL: '%s'\n") % cu)
333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
338 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
339 % self._url)
335 % self._url)
340
336
341 if proto.startswith('application/mercurial-'):
337 if proto.startswith('application/mercurial-'):
342 try:
338 try:
343 version = proto.split('-', 1)[1]
339 version = proto.split('-', 1)[1]
344 version_info = tuple([int(n) for n in version.split('.')])
340 version_info = tuple([int(n) for n in version.split('.')])
345 except ValueError:
341 except ValueError:
346 raise repo.RepoError(_("'%s' sent a broken Content-Type "
342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
347 "header (%s)") % (self._url, proto))
343 "header (%s)") % (self._url, proto))
348 if version_info > (0, 1):
344 if version_info > (0, 1):
349 raise repo.RepoError(_("'%s' uses newer protocol %s") %
345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
350 (self._url, version))
346 (self._url, version))
351
347
352 return resp
348 return resp
353
349
354 def do_read(self, cmd, **args):
350 def do_read(self, cmd, **args):
355 fp = self.do_cmd(cmd, **args)
351 fp = self.do_cmd(cmd, **args)
356 try:
352 try:
357 return fp.read()
353 return fp.read()
358 finally:
354 finally:
359 # if using keepalive, allow connection to be reused
355 # if using keepalive, allow connection to be reused
360 fp.close()
356 fp.close()
361
357
362 def lookup(self, key):
358 def lookup(self, key):
363 self.requirecap('lookup', _('look up remote revision'))
359 self.requirecap('lookup', _('look up remote revision'))
364 d = self.do_cmd("lookup", key = key).read()
360 d = self.do_cmd("lookup", key = key).read()
365 success, data = d[:-1].split(' ', 1)
361 success, data = d[:-1].split(' ', 1)
366 if int(success):
362 if int(success):
367 return bin(data)
363 return bin(data)
368 raise repo.RepoError(data)
364 raise repo.RepoError(data)
369
365
370 def heads(self):
366 def heads(self):
371 d = self.do_read("heads")
367 d = self.do_read("heads")
372 try:
368 try:
373 return map(bin, d[:-1].split(" "))
369 return map(bin, d[:-1].split(" "))
374 except:
370 except:
375 raise util.UnexpectedOutput(_("unexpected response:"), d)
371 raise util.UnexpectedOutput(_("unexpected response:"), d)
376
372
377 def branches(self, nodes):
373 def branches(self, nodes):
378 n = " ".join(map(hex, nodes))
374 n = " ".join(map(hex, nodes))
379 d = self.do_read("branches", nodes=n)
375 d = self.do_read("branches", nodes=n)
380 try:
376 try:
381 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
382 return br
378 return br
383 except:
379 except:
384 raise util.UnexpectedOutput(_("unexpected response:"), d)
380 raise util.UnexpectedOutput(_("unexpected response:"), d)
385
381
386 def between(self, pairs):
382 def between(self, pairs):
387 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
388 d = self.do_read("between", pairs=n)
384 d = self.do_read("between", pairs=n)
389 try:
385 try:
390 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
391 return p
387 return p
392 except:
388 except:
393 raise util.UnexpectedOutput(_("unexpected response:"), d)
389 raise util.UnexpectedOutput(_("unexpected response:"), d)
394
390
395 def changegroup(self, nodes, kind):
391 def changegroup(self, nodes, kind):
396 n = " ".join(map(hex, nodes))
392 n = " ".join(map(hex, nodes))
397 f = self.do_cmd("changegroup", roots=n)
393 f = self.do_cmd("changegroup", roots=n)
398 return util.chunkbuffer(zgenerator(f))
394 return util.chunkbuffer(zgenerator(f))
399
395
400 def changegroupsubset(self, bases, heads, source):
396 def changegroupsubset(self, bases, heads, source):
401 self.requirecap('changegroupsubset', _('look up remote changes'))
397 self.requirecap('changegroupsubset', _('look up remote changes'))
402 baselst = " ".join([hex(n) for n in bases])
398 baselst = " ".join([hex(n) for n in bases])
403 headlst = " ".join([hex(n) for n in heads])
399 headlst = " ".join([hex(n) for n in heads])
404 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
405 return util.chunkbuffer(zgenerator(f))
401 return util.chunkbuffer(zgenerator(f))
406
402
407 def unbundle(self, cg, heads, source):
403 def unbundle(self, cg, heads, source):
408 # have to stream bundle to a temp file because we do not have
404 # have to stream bundle to a temp file because we do not have
409 # http 1.1 chunked transfer.
405 # http 1.1 chunked transfer.
410
406
411 type = ""
407 type = ""
412 types = self.capable('unbundle')
408 types = self.capable('unbundle')
413 # servers older than d1b16a746db6 will send 'unbundle' as a
409 # servers older than d1b16a746db6 will send 'unbundle' as a
414 # boolean capability
410 # boolean capability
415 try:
411 try:
416 types = types.split(',')
412 types = types.split(',')
417 except AttributeError:
413 except AttributeError:
418 types = [""]
414 types = [""]
419 if types:
415 if types:
420 for x in types:
416 for x in types:
421 if x in changegroup.bundletypes:
417 if x in changegroup.bundletypes:
422 type = x
418 type = x
423 break
419 break
424
420
425 tempname = changegroup.writebundle(cg, None, type)
421 tempname = changegroup.writebundle(cg, None, type)
426 fp = httpsendfile(tempname, "rb")
422 fp = httpsendfile(tempname, "rb")
427 try:
423 try:
428 try:
424 try:
429 rfp = self.do_cmd(
425 rfp = self.do_cmd(
430 'unbundle', data=fp,
426 'unbundle', data=fp,
431 headers={'Content-Type': 'application/octet-stream'},
427 headers={'Content-Type': 'application/octet-stream'},
432 heads=' '.join(map(hex, heads)))
428 heads=' '.join(map(hex, heads)))
433 try:
429 try:
434 ret = int(rfp.readline())
430 ret = int(rfp.readline())
435 self.ui.write(rfp.read())
431 self.ui.write(rfp.read())
436 return ret
432 return ret
437 finally:
433 finally:
438 rfp.close()
434 rfp.close()
439 except socket.error, err:
435 except socket.error, err:
440 if err[0] in (errno.ECONNRESET, errno.EPIPE):
436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
441 raise util.Abort(_('push failed: %s') % err[1])
437 raise util.Abort(_('push failed: %s') % err[1])
442 raise util.Abort(err[1])
438 raise util.Abort(err[1])
443 finally:
439 finally:
444 fp.close()
440 fp.close()
445 os.unlink(tempname)
441 os.unlink(tempname)
446
442
447 def stream_out(self):
443 def stream_out(self):
448 return self.do_cmd('stream_out')
444 return self.do_cmd('stream_out')
449
445
450 class httpsrepository(httprepository):
446 class httpsrepository(httprepository):
451 def __init__(self, ui, path):
447 def __init__(self, ui, path):
452 if not has_https:
448 if not has_https:
453 raise util.Abort(_('Python support for SSL and HTTPS '
449 raise util.Abort(_('Python support for SSL and HTTPS '
454 'is not installed'))
450 'is not installed'))
455 httprepository.__init__(self, ui, path)
451 httprepository.__init__(self, ui, path)
456
452
457 def instance(ui, path, create):
453 def instance(ui, path, create):
458 if create:
454 if create:
459 raise util.Abort(_('cannot create new http repository'))
455 raise util.Abort(_('cannot create new http repository'))
460 if path.startswith('https:'):
456 if path.startswith('https:'):
461 return httpsrepository(ui, path)
457 return httpsrepository(ui, path)
462 return httprepository(ui, path)
458 return httprepository(ui, path)
@@ -1,579 +1,582 b''
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, write to the
12 # License along with this library; if not, write to the
13 # Free Software Foundation, Inc.,
13 # Free Software Foundation, Inc.,
14 # 59 Temple Place, Suite 330,
14 # 59 Temple Place, Suite 330,
15 # Boston, MA 02111-1307 USA
15 # Boston, MA 02111-1307 USA
16
16
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
19
19
20 # Modified by Benoit Boissinot:
20 # Modified by Benoit Boissinot:
21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
22
22
23 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
23 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
24
24
25 >>> import urllib2
25 >>> import urllib2
26 >>> from keepalive import HTTPHandler
26 >>> from keepalive import HTTPHandler
27 >>> keepalive_handler = HTTPHandler()
27 >>> keepalive_handler = HTTPHandler()
28 >>> opener = urllib2.build_opener(keepalive_handler)
28 >>> opener = urllib2.build_opener(keepalive_handler)
29 >>> urllib2.install_opener(opener)
29 >>> urllib2.install_opener(opener)
30 >>>
30 >>>
31 >>> fo = urllib2.urlopen('http://www.python.org')
31 >>> fo = urllib2.urlopen('http://www.python.org')
32
32
33 If a connection to a given host is requested, and all of the existing
33 If a connection to a given host is requested, and all of the existing
34 connections are still in use, another connection will be opened. If
34 connections are still in use, another connection will be opened. If
35 the handler tries to use an existing connection but it fails in some
35 the handler tries to use an existing connection but it fails in some
36 way, it will be closed and removed from the pool.
36 way, it will be closed and removed from the pool.
37
37
38 To remove the handler, simply re-run build_opener with no arguments, and
38 To remove the handler, simply re-run build_opener with no arguments, and
39 install that opener.
39 install that opener.
40
40
41 You can explicitly close connections by using the close_connection()
41 You can explicitly close connections by using the close_connection()
42 method of the returned file-like object (described below) or you can
42 method of the returned file-like object (described below) or you can
43 use the handler methods:
43 use the handler methods:
44
44
45 close_connection(host)
45 close_connection(host)
46 close_all()
46 close_all()
47 open_connections()
47 open_connections()
48
48
49 NOTE: using the close_connection and close_all methods of the handler
49 NOTE: using the close_connection and close_all methods of the handler
50 should be done with care when using multiple threads.
50 should be done with care when using multiple threads.
51 * there is nothing that prevents another thread from creating new
51 * there is nothing that prevents another thread from creating new
52 connections immediately after connections are closed
52 connections immediately after connections are closed
53 * no checks are done to prevent in-use connections from being closed
53 * no checks are done to prevent in-use connections from being closed
54
54
55 >>> keepalive_handler.close_all()
55 >>> keepalive_handler.close_all()
56
56
57 EXTRA ATTRIBUTES AND METHODS
57 EXTRA ATTRIBUTES AND METHODS
58
58
59 Upon a status of 200, the object returned has a few additional
59 Upon a status of 200, the object returned has a few additional
60 attributes and methods, which should not be used if you want to
60 attributes and methods, which should not be used if you want to
61 remain consistent with the normal urllib2-returned objects:
61 remain consistent with the normal urllib2-returned objects:
62
62
63 close_connection() - close the connection to the host
63 close_connection() - close the connection to the host
64 readlines() - you know, readlines()
64 readlines() - you know, readlines()
65 status - the return status (ie 404)
65 status - the return status (ie 404)
66 reason - english translation of status (ie 'File not found')
66 reason - english translation of status (ie 'File not found')
67
67
68 If you want the best of both worlds, use this inside an
68 If you want the best of both worlds, use this inside an
69 AttributeError-catching try:
69 AttributeError-catching try:
70
70
71 >>> try: status = fo.status
71 >>> try: status = fo.status
72 >>> except AttributeError: status = None
72 >>> except AttributeError: status = None
73
73
74 Unfortunately, these are ONLY there if status == 200, so it's not
74 Unfortunately, these are ONLY there if status == 200, so it's not
75 easy to distinguish between non-200 responses. The reason is that
75 easy to distinguish between non-200 responses. The reason is that
76 urllib2 tries to do clever things with error codes 301, 302, 401,
76 urllib2 tries to do clever things with error codes 301, 302, 401,
77 and 407, and it wraps the object upon return.
77 and 407, and it wraps the object upon return.
78
78
79 For python versions earlier than 2.4, you can avoid this fancy error
79 For python versions earlier than 2.4, you can avoid this fancy error
80 handling by setting the module-level global HANDLE_ERRORS to zero.
80 handling by setting the module-level global HANDLE_ERRORS to zero.
81 You see, prior to 2.4, it's the HTTP Handler's job to determine what
81 You see, prior to 2.4, it's the HTTP Handler's job to determine what
82 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
82 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
83 means "pass everything up". In python 2.4, however, this job no
83 means "pass everything up". In python 2.4, however, this job no
84 longer belongs to the HTTP Handler and is now done by a NEW handler,
84 longer belongs to the HTTP Handler and is now done by a NEW handler,
85 HTTPErrorProcessor. Here's the bottom line:
85 HTTPErrorProcessor. Here's the bottom line:
86
86
87 python version < 2.4
87 python version < 2.4
88 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
88 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
89 errors
89 errors
90 HANDLE_ERRORS == 0 pass everything up, error processing is
90 HANDLE_ERRORS == 0 pass everything up, error processing is
91 left to the calling code
91 left to the calling code
92 python version >= 2.4
92 python version >= 2.4
93 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
93 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
94 HANDLE_ERRORS == 0 (default) pass everything up, let the
94 HANDLE_ERRORS == 0 (default) pass everything up, let the
95 other handlers (specifically,
95 other handlers (specifically,
96 HTTPErrorProcessor) decide what to do
96 HTTPErrorProcessor) decide what to do
97
97
98 In practice, setting the variable either way makes little difference
98 In practice, setting the variable either way makes little difference
99 in python 2.4, so for the most consistent behavior across versions,
99 in python 2.4, so for the most consistent behavior across versions,
100 you probably just want to use the defaults, which will give you
100 you probably just want to use the defaults, which will give you
101 exceptions on errors.
101 exceptions on errors.
102
102
103 """
103 """
104
104
105 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
105 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
106
106
107 import urllib2
107 import urllib2
108 import httplib
108 import httplib
109 import socket
109 import socket
110 import thread
110 import thread
111
111
112 DEBUG = None
112 DEBUG = None
113
113
114 import sys
114 import sys
115 if sys.version_info < (2, 4): HANDLE_ERRORS = 1
115 if sys.version_info < (2, 4): HANDLE_ERRORS = 1
116 else: HANDLE_ERRORS = 0
116 else: HANDLE_ERRORS = 0
117
117
118 class ConnectionManager:
118 class ConnectionManager:
119 """
119 """
120 The connection manager must be able to:
120 The connection manager must be able to:
121 * keep track of all existing
121 * keep track of all existing
122 """
122 """
123 def __init__(self):
123 def __init__(self):
124 self._lock = thread.allocate_lock()
124 self._lock = thread.allocate_lock()
125 self._hostmap = {} # map hosts to a list of connections
125 self._hostmap = {} # map hosts to a list of connections
126 self._connmap = {} # map connections to host
126 self._connmap = {} # map connections to host
127 self._readymap = {} # map connection to ready state
127 self._readymap = {} # map connection to ready state
128
128
129 def add(self, host, connection, ready):
129 def add(self, host, connection, ready):
130 self._lock.acquire()
130 self._lock.acquire()
131 try:
131 try:
132 if not host in self._hostmap: self._hostmap[host] = []
132 if not host in self._hostmap: self._hostmap[host] = []
133 self._hostmap[host].append(connection)
133 self._hostmap[host].append(connection)
134 self._connmap[connection] = host
134 self._connmap[connection] = host
135 self._readymap[connection] = ready
135 self._readymap[connection] = ready
136 finally:
136 finally:
137 self._lock.release()
137 self._lock.release()
138
138
139 def remove(self, connection):
139 def remove(self, connection):
140 self._lock.acquire()
140 self._lock.acquire()
141 try:
141 try:
142 try:
142 try:
143 host = self._connmap[connection]
143 host = self._connmap[connection]
144 except KeyError:
144 except KeyError:
145 pass
145 pass
146 else:
146 else:
147 del self._connmap[connection]
147 del self._connmap[connection]
148 del self._readymap[connection]
148 del self._readymap[connection]
149 self._hostmap[host].remove(connection)
149 self._hostmap[host].remove(connection)
150 if not self._hostmap[host]: del self._hostmap[host]
150 if not self._hostmap[host]: del self._hostmap[host]
151 finally:
151 finally:
152 self._lock.release()
152 self._lock.release()
153
153
154 def set_ready(self, connection, ready):
154 def set_ready(self, connection, ready):
155 try: self._readymap[connection] = ready
155 try: self._readymap[connection] = ready
156 except KeyError: pass
156 except KeyError: pass
157
157
158 def get_ready_conn(self, host):
158 def get_ready_conn(self, host):
159 conn = None
159 conn = None
160 self._lock.acquire()
160 self._lock.acquire()
161 try:
161 try:
162 if host in self._hostmap:
162 if host in self._hostmap:
163 for c in self._hostmap[host]:
163 for c in self._hostmap[host]:
164 if self._readymap[c]:
164 if self._readymap[c]:
165 self._readymap[c] = 0
165 self._readymap[c] = 0
166 conn = c
166 conn = c
167 break
167 break
168 finally:
168 finally:
169 self._lock.release()
169 self._lock.release()
170 return conn
170 return conn
171
171
172 def get_all(self, host=None):
172 def get_all(self, host=None):
173 if host:
173 if host:
174 return list(self._hostmap.get(host, []))
174 return list(self._hostmap.get(host, []))
175 else:
175 else:
176 return dict(self._hostmap)
176 return dict(self._hostmap)
177
177
178 class HTTPHandler(urllib2.HTTPHandler):
178 class KeepAliveHandler:
179 def __init__(self):
179 def __init__(self):
180 self._cm = ConnectionManager()
180 self._cm = ConnectionManager()
181
181
182 #### Connection Management
182 #### Connection Management
183 def open_connections(self):
183 def open_connections(self):
184 """return a list of connected hosts and the number of connections
184 """return a list of connected hosts and the number of connections
185 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
185 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
186 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
186 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
187
187
188 def close_connection(self, host):
188 def close_connection(self, host):
189 """close connection(s) to <host>
189 """close connection(s) to <host>
190 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
190 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
191 no error occurs if there is no connection to that host."""
191 no error occurs if there is no connection to that host."""
192 for h in self._cm.get_all(host):
192 for h in self._cm.get_all(host):
193 self._cm.remove(h)
193 self._cm.remove(h)
194 h.close()
194 h.close()
195
195
196 def close_all(self):
196 def close_all(self):
197 """close all open connections"""
197 """close all open connections"""
198 for host, conns in self._cm.get_all().items():
198 for host, conns in self._cm.get_all().items():
199 for h in conns:
199 for h in conns:
200 self._cm.remove(h)
200 self._cm.remove(h)
201 h.close()
201 h.close()
202
202
203 def _request_closed(self, request, host, connection):
203 def _request_closed(self, request, host, connection):
204 """tells us that this request is now closed and the the
204 """tells us that this request is now closed and the the
205 connection is ready for another request"""
205 connection is ready for another request"""
206 self._cm.set_ready(connection, 1)
206 self._cm.set_ready(connection, 1)
207
207
208 def _remove_connection(self, host, connection, close=0):
208 def _remove_connection(self, host, connection, close=0):
209 if close: connection.close()
209 if close: connection.close()
210 self._cm.remove(connection)
210 self._cm.remove(connection)
211
211
212 #### Transaction Execution
212 #### Transaction Execution
213 def http_open(self, req):
213 def http_open(self, req):
214 return self.do_open(HTTPConnection, req)
214 return self.do_open(HTTPConnection, req)
215
215
216 def do_open(self, http_class, req):
216 def do_open(self, http_class, req):
217 host = req.get_host()
217 host = req.get_host()
218 if not host:
218 if not host:
219 raise urllib2.URLError('no host given')
219 raise urllib2.URLError('no host given')
220
220
221 try:
221 try:
222 h = self._cm.get_ready_conn(host)
222 h = self._cm.get_ready_conn(host)
223 while h:
223 while h:
224 r = self._reuse_connection(h, req, host)
224 r = self._reuse_connection(h, req, host)
225
225
226 # if this response is non-None, then it worked and we're
226 # if this response is non-None, then it worked and we're
227 # done. Break out, skipping the else block.
227 # done. Break out, skipping the else block.
228 if r: break
228 if r: break
229
229
230 # connection is bad - possibly closed by server
230 # connection is bad - possibly closed by server
231 # discard it and ask for the next free connection
231 # discard it and ask for the next free connection
232 h.close()
232 h.close()
233 self._cm.remove(h)
233 self._cm.remove(h)
234 h = self._cm.get_ready_conn(host)
234 h = self._cm.get_ready_conn(host)
235 else:
235 else:
236 # no (working) free connections were found. Create a new one.
236 # no (working) free connections were found. Create a new one.
237 h = http_class(host)
237 h = http_class(host)
238 if DEBUG: DEBUG.info("creating new connection to %s (%d)",
238 if DEBUG: DEBUG.info("creating new connection to %s (%d)",
239 host, id(h))
239 host, id(h))
240 self._cm.add(host, h, 0)
240 self._cm.add(host, h, 0)
241 self._start_transaction(h, req)
241 self._start_transaction(h, req)
242 r = h.getresponse()
242 r = h.getresponse()
243 except (socket.error, httplib.HTTPException), err:
243 except (socket.error, httplib.HTTPException), err:
244 raise urllib2.URLError(err)
244 raise urllib2.URLError(err)
245
245
246 # if not a persistent connection, don't try to reuse it
246 # if not a persistent connection, don't try to reuse it
247 if r.will_close: self._cm.remove(h)
247 if r.will_close: self._cm.remove(h)
248
248
249 if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
249 if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
250 r._handler = self
250 r._handler = self
251 r._host = host
251 r._host = host
252 r._url = req.get_full_url()
252 r._url = req.get_full_url()
253 r._connection = h
253 r._connection = h
254 r.code = r.status
254 r.code = r.status
255 r.headers = r.msg
255 r.headers = r.msg
256 r.msg = r.reason
256 r.msg = r.reason
257
257
258 if r.status == 200 or not HANDLE_ERRORS:
258 if r.status == 200 or not HANDLE_ERRORS:
259 return r
259 return r
260 else:
260 else:
261 return self.parent.error('http', req, r,
261 return self.parent.error('http', req, r,
262 r.status, r.msg, r.headers)
262 r.status, r.msg, r.headers)
263
263
264 def _reuse_connection(self, h, req, host):
264 def _reuse_connection(self, h, req, host):
265 """start the transaction with a re-used connection
265 """start the transaction with a re-used connection
266 return a response object (r) upon success or None on failure.
266 return a response object (r) upon success or None on failure.
267 This DOES not close or remove bad connections in cases where
267 This DOES not close or remove bad connections in cases where
268 it returns. However, if an unexpected exception occurs, it
268 it returns. However, if an unexpected exception occurs, it
269 will close and remove the connection before re-raising.
269 will close and remove the connection before re-raising.
270 """
270 """
271 try:
271 try:
272 self._start_transaction(h, req)
272 self._start_transaction(h, req)
273 r = h.getresponse()
273 r = h.getresponse()
274 # note: just because we got something back doesn't mean it
274 # note: just because we got something back doesn't mean it
275 # worked. We'll check the version below, too.
275 # worked. We'll check the version below, too.
276 except (socket.error, httplib.HTTPException):
276 except (socket.error, httplib.HTTPException):
277 r = None
277 r = None
278 except:
278 except:
279 # adding this block just in case we've missed
279 # adding this block just in case we've missed
280 # something we will still raise the exception, but
280 # something we will still raise the exception, but
281 # lets try and close the connection and remove it
281 # lets try and close the connection and remove it
282 # first. We previously got into a nasty loop
282 # first. We previously got into a nasty loop
283 # where an exception was uncaught, and so the
283 # where an exception was uncaught, and so the
284 # connection stayed open. On the next try, the
284 # connection stayed open. On the next try, the
285 # same exception was raised, etc. The tradeoff is
285 # same exception was raised, etc. The tradeoff is
286 # that it's now possible this call will raise
286 # that it's now possible this call will raise
287 # a DIFFERENT exception
287 # a DIFFERENT exception
288 if DEBUG: DEBUG.error("unexpected exception - closing " + \
288 if DEBUG: DEBUG.error("unexpected exception - closing " + \
289 "connection to %s (%d)", host, id(h))
289 "connection to %s (%d)", host, id(h))
290 self._cm.remove(h)
290 self._cm.remove(h)
291 h.close()
291 h.close()
292 raise
292 raise
293
293
294 if r is None or r.version == 9:
294 if r is None or r.version == 9:
295 # httplib falls back to assuming HTTP 0.9 if it gets a
295 # httplib falls back to assuming HTTP 0.9 if it gets a
296 # bad header back. This is most likely to happen if
296 # bad header back. This is most likely to happen if
297 # the socket has been closed by the server since we
297 # the socket has been closed by the server since we
298 # last used the connection.
298 # last used the connection.
299 if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
299 if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
300 host, id(h))
300 host, id(h))
301 r = None
301 r = None
302 else:
302 else:
303 if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
303 if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
304
304
305 return r
305 return r
306
306
307 def _start_transaction(self, h, req):
307 def _start_transaction(self, h, req):
308 headers = req.headers.copy()
308 headers = req.headers.copy()
309 body = req.data
309 body = req.data
310 if sys.version_info >= (2, 4):
310 if sys.version_info >= (2, 4):
311 headers.update(req.unredirected_hdrs)
311 headers.update(req.unredirected_hdrs)
312 try:
312 try:
313 h.request(req.get_method(), req.get_selector(), body, headers)
313 h.request(req.get_method(), req.get_selector(), body, headers)
314 except socket.error, err: # XXX what error?
314 except socket.error, err: # XXX what error?
315 raise urllib2.URLError(err)
315 raise urllib2.URLError(err)
316
316
317 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
318 pass
319
317 class HTTPResponse(httplib.HTTPResponse):
320 class HTTPResponse(httplib.HTTPResponse):
318 # we need to subclass HTTPResponse in order to
321 # we need to subclass HTTPResponse in order to
319 # 1) add readline() and readlines() methods
322 # 1) add readline() and readlines() methods
320 # 2) add close_connection() methods
323 # 2) add close_connection() methods
321 # 3) add info() and geturl() methods
324 # 3) add info() and geturl() methods
322
325
323 # in order to add readline(), read must be modified to deal with a
326 # in order to add readline(), read must be modified to deal with a
324 # buffer. example: readline must read a buffer and then spit back
327 # buffer. example: readline must read a buffer and then spit back
325 # one line at a time. The only real alternative is to read one
328 # one line at a time. The only real alternative is to read one
326 # BYTE at a time (ick). Once something has been read, it can't be
329 # BYTE at a time (ick). Once something has been read, it can't be
327 # put back (ok, maybe it can, but that's even uglier than this),
330 # put back (ok, maybe it can, but that's even uglier than this),
328 # so if you THEN do a normal read, you must first take stuff from
331 # so if you THEN do a normal read, you must first take stuff from
329 # the buffer.
332 # the buffer.
330
333
331 # the read method wraps the original to accomodate buffering,
334 # the read method wraps the original to accomodate buffering,
332 # although read() never adds to the buffer.
335 # although read() never adds to the buffer.
333 # Both readline and readlines have been stolen with almost no
336 # Both readline and readlines have been stolen with almost no
334 # modification from socket.py
337 # modification from socket.py
335
338
336
339
337 def __init__(self, sock, debuglevel=0, strict=0, method=None):
340 def __init__(self, sock, debuglevel=0, strict=0, method=None):
338 if method: # the httplib in python 2.3 uses the method arg
341 if method: # the httplib in python 2.3 uses the method arg
339 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
342 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
340 else: # 2.2 doesn't
343 else: # 2.2 doesn't
341 httplib.HTTPResponse.__init__(self, sock, debuglevel)
344 httplib.HTTPResponse.__init__(self, sock, debuglevel)
342 self.fileno = sock.fileno
345 self.fileno = sock.fileno
343 self.code = None
346 self.code = None
344 self._rbuf = ''
347 self._rbuf = ''
345 self._rbufsize = 8096
348 self._rbufsize = 8096
346 self._handler = None # inserted by the handler later
349 self._handler = None # inserted by the handler later
347 self._host = None # (same)
350 self._host = None # (same)
348 self._url = None # (same)
351 self._url = None # (same)
349 self._connection = None # (same)
352 self._connection = None # (same)
350
353
351 _raw_read = httplib.HTTPResponse.read
354 _raw_read = httplib.HTTPResponse.read
352
355
353 def close(self):
356 def close(self):
354 if self.fp:
357 if self.fp:
355 self.fp.close()
358 self.fp.close()
356 self.fp = None
359 self.fp = None
357 if self._handler:
360 if self._handler:
358 self._handler._request_closed(self, self._host,
361 self._handler._request_closed(self, self._host,
359 self._connection)
362 self._connection)
360
363
361 def close_connection(self):
364 def close_connection(self):
362 self._handler._remove_connection(self._host, self._connection, close=1)
365 self._handler._remove_connection(self._host, self._connection, close=1)
363 self.close()
366 self.close()
364
367
365 def info(self):
368 def info(self):
366 return self.headers
369 return self.headers
367
370
368 def geturl(self):
371 def geturl(self):
369 return self._url
372 return self._url
370
373
371 def read(self, amt=None):
374 def read(self, amt=None):
372 # the _rbuf test is only in this first if for speed. It's not
375 # the _rbuf test is only in this first if for speed. It's not
373 # logically necessary
376 # logically necessary
374 if self._rbuf and not amt is None:
377 if self._rbuf and not amt is None:
375 L = len(self._rbuf)
378 L = len(self._rbuf)
376 if amt > L:
379 if amt > L:
377 amt -= L
380 amt -= L
378 else:
381 else:
379 s = self._rbuf[:amt]
382 s = self._rbuf[:amt]
380 self._rbuf = self._rbuf[amt:]
383 self._rbuf = self._rbuf[amt:]
381 return s
384 return s
382
385
383 s = self._rbuf + self._raw_read(amt)
386 s = self._rbuf + self._raw_read(amt)
384 self._rbuf = ''
387 self._rbuf = ''
385 return s
388 return s
386
389
387 def readline(self, limit=-1):
390 def readline(self, limit=-1):
388 data = ""
391 data = ""
389 i = self._rbuf.find('\n')
392 i = self._rbuf.find('\n')
390 while i < 0 and not (0 < limit <= len(self._rbuf)):
393 while i < 0 and not (0 < limit <= len(self._rbuf)):
391 new = self._raw_read(self._rbufsize)
394 new = self._raw_read(self._rbufsize)
392 if not new: break
395 if not new: break
393 i = new.find('\n')
396 i = new.find('\n')
394 if i >= 0: i = i + len(self._rbuf)
397 if i >= 0: i = i + len(self._rbuf)
395 self._rbuf = self._rbuf + new
398 self._rbuf = self._rbuf + new
396 if i < 0: i = len(self._rbuf)
399 if i < 0: i = len(self._rbuf)
397 else: i = i+1
400 else: i = i+1
398 if 0 <= limit < len(self._rbuf): i = limit
401 if 0 <= limit < len(self._rbuf): i = limit
399 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
402 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
400 return data
403 return data
401
404
402 def readlines(self, sizehint = 0):
405 def readlines(self, sizehint = 0):
403 total = 0
406 total = 0
404 list = []
407 list = []
405 while 1:
408 while 1:
406 line = self.readline()
409 line = self.readline()
407 if not line: break
410 if not line: break
408 list.append(line)
411 list.append(line)
409 total += len(line)
412 total += len(line)
410 if sizehint and total >= sizehint:
413 if sizehint and total >= sizehint:
411 break
414 break
412 return list
415 return list
413
416
414
417
415 class HTTPConnection(httplib.HTTPConnection):
418 class HTTPConnection(httplib.HTTPConnection):
416 # use the modified response class
419 # use the modified response class
417 response_class = HTTPResponse
420 response_class = HTTPResponse
418
421
419 #########################################################################
422 #########################################################################
420 ##### TEST FUNCTIONS
423 ##### TEST FUNCTIONS
421 #########################################################################
424 #########################################################################
422
425
423 def error_handler(url):
426 def error_handler(url):
424 global HANDLE_ERRORS
427 global HANDLE_ERRORS
425 orig = HANDLE_ERRORS
428 orig = HANDLE_ERRORS
426 keepalive_handler = HTTPHandler()
429 keepalive_handler = HTTPHandler()
427 opener = urllib2.build_opener(keepalive_handler)
430 opener = urllib2.build_opener(keepalive_handler)
428 urllib2.install_opener(opener)
431 urllib2.install_opener(opener)
429 pos = {0: 'off', 1: 'on'}
432 pos = {0: 'off', 1: 'on'}
430 for i in (0, 1):
433 for i in (0, 1):
431 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
434 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
432 HANDLE_ERRORS = i
435 HANDLE_ERRORS = i
433 try:
436 try:
434 fo = urllib2.urlopen(url)
437 fo = urllib2.urlopen(url)
435 foo = fo.read()
438 foo = fo.read()
436 fo.close()
439 fo.close()
437 try: status, reason = fo.status, fo.reason
440 try: status, reason = fo.status, fo.reason
438 except AttributeError: status, reason = None, None
441 except AttributeError: status, reason = None, None
439 except IOError, e:
442 except IOError, e:
440 print " EXCEPTION: %s" % e
443 print " EXCEPTION: %s" % e
441 raise
444 raise
442 else:
445 else:
443 print " status = %s, reason = %s" % (status, reason)
446 print " status = %s, reason = %s" % (status, reason)
444 HANDLE_ERRORS = orig
447 HANDLE_ERRORS = orig
445 hosts = keepalive_handler.open_connections()
448 hosts = keepalive_handler.open_connections()
446 print "open connections:", hosts
449 print "open connections:", hosts
447 keepalive_handler.close_all()
450 keepalive_handler.close_all()
448
451
449 def continuity(url):
452 def continuity(url):
450 import md5
453 import md5
451 format = '%25s: %s'
454 format = '%25s: %s'
452
455
453 # first fetch the file with the normal http handler
456 # first fetch the file with the normal http handler
454 opener = urllib2.build_opener()
457 opener = urllib2.build_opener()
455 urllib2.install_opener(opener)
458 urllib2.install_opener(opener)
456 fo = urllib2.urlopen(url)
459 fo = urllib2.urlopen(url)
457 foo = fo.read()
460 foo = fo.read()
458 fo.close()
461 fo.close()
459 m = md5.new(foo)
462 m = md5.new(foo)
460 print format % ('normal urllib', m.hexdigest())
463 print format % ('normal urllib', m.hexdigest())
461
464
462 # now install the keepalive handler and try again
465 # now install the keepalive handler and try again
463 opener = urllib2.build_opener(HTTPHandler())
466 opener = urllib2.build_opener(HTTPHandler())
464 urllib2.install_opener(opener)
467 urllib2.install_opener(opener)
465
468
466 fo = urllib2.urlopen(url)
469 fo = urllib2.urlopen(url)
467 foo = fo.read()
470 foo = fo.read()
468 fo.close()
471 fo.close()
469 m = md5.new(foo)
472 m = md5.new(foo)
470 print format % ('keepalive read', m.hexdigest())
473 print format % ('keepalive read', m.hexdigest())
471
474
472 fo = urllib2.urlopen(url)
475 fo = urllib2.urlopen(url)
473 foo = ''
476 foo = ''
474 while 1:
477 while 1:
475 f = fo.readline()
478 f = fo.readline()
476 if f: foo = foo + f
479 if f: foo = foo + f
477 else: break
480 else: break
478 fo.close()
481 fo.close()
479 m = md5.new(foo)
482 m = md5.new(foo)
480 print format % ('keepalive readline', m.hexdigest())
483 print format % ('keepalive readline', m.hexdigest())
481
484
482 def comp(N, url):
485 def comp(N, url):
483 print ' making %i connections to:\n %s' % (N, url)
486 print ' making %i connections to:\n %s' % (N, url)
484
487
485 sys.stdout.write(' first using the normal urllib handlers')
488 sys.stdout.write(' first using the normal urllib handlers')
486 # first use normal opener
489 # first use normal opener
487 opener = urllib2.build_opener()
490 opener = urllib2.build_opener()
488 urllib2.install_opener(opener)
491 urllib2.install_opener(opener)
489 t1 = fetch(N, url)
492 t1 = fetch(N, url)
490 print ' TIME: %.3f s' % t1
493 print ' TIME: %.3f s' % t1
491
494
492 sys.stdout.write(' now using the keepalive handler ')
495 sys.stdout.write(' now using the keepalive handler ')
493 # now install the keepalive handler and try again
496 # now install the keepalive handler and try again
494 opener = urllib2.build_opener(HTTPHandler())
497 opener = urllib2.build_opener(HTTPHandler())
495 urllib2.install_opener(opener)
498 urllib2.install_opener(opener)
496 t2 = fetch(N, url)
499 t2 = fetch(N, url)
497 print ' TIME: %.3f s' % t2
500 print ' TIME: %.3f s' % t2
498 print ' improvement factor: %.2f' % (t1/t2, )
501 print ' improvement factor: %.2f' % (t1/t2, )
499
502
500 def fetch(N, url, delay=0):
503 def fetch(N, url, delay=0):
501 import time
504 import time
502 lens = []
505 lens = []
503 starttime = time.time()
506 starttime = time.time()
504 for i in range(N):
507 for i in range(N):
505 if delay and i > 0: time.sleep(delay)
508 if delay and i > 0: time.sleep(delay)
506 fo = urllib2.urlopen(url)
509 fo = urllib2.urlopen(url)
507 foo = fo.read()
510 foo = fo.read()
508 fo.close()
511 fo.close()
509 lens.append(len(foo))
512 lens.append(len(foo))
510 diff = time.time() - starttime
513 diff = time.time() - starttime
511
514
512 j = 0
515 j = 0
513 for i in lens[1:]:
516 for i in lens[1:]:
514 j = j + 1
517 j = j + 1
515 if not i == lens[0]:
518 if not i == lens[0]:
516 print "WARNING: inconsistent length on read %i: %i" % (j, i)
519 print "WARNING: inconsistent length on read %i: %i" % (j, i)
517
520
518 return diff
521 return diff
519
522
520 def test_timeout(url):
523 def test_timeout(url):
521 global DEBUG
524 global DEBUG
522 dbbackup = DEBUG
525 dbbackup = DEBUG
523 class FakeLogger:
526 class FakeLogger:
524 def debug(self, msg, *args): print msg % args
527 def debug(self, msg, *args): print msg % args
525 info = warning = error = debug
528 info = warning = error = debug
526 DEBUG = FakeLogger()
529 DEBUG = FakeLogger()
527 print " fetching the file to establish a connection"
530 print " fetching the file to establish a connection"
528 fo = urllib2.urlopen(url)
531 fo = urllib2.urlopen(url)
529 data1 = fo.read()
532 data1 = fo.read()
530 fo.close()
533 fo.close()
531
534
532 i = 20
535 i = 20
533 print " waiting %i seconds for the server to close the connection" % i
536 print " waiting %i seconds for the server to close the connection" % i
534 while i > 0:
537 while i > 0:
535 sys.stdout.write('\r %2i' % i)
538 sys.stdout.write('\r %2i' % i)
536 sys.stdout.flush()
539 sys.stdout.flush()
537 time.sleep(1)
540 time.sleep(1)
538 i -= 1
541 i -= 1
539 sys.stderr.write('\r')
542 sys.stderr.write('\r')
540
543
541 print " fetching the file a second time"
544 print " fetching the file a second time"
542 fo = urllib2.urlopen(url)
545 fo = urllib2.urlopen(url)
543 data2 = fo.read()
546 data2 = fo.read()
544 fo.close()
547 fo.close()
545
548
546 if data1 == data2:
549 if data1 == data2:
547 print ' data are identical'
550 print ' data are identical'
548 else:
551 else:
549 print ' ERROR: DATA DIFFER'
552 print ' ERROR: DATA DIFFER'
550
553
551 DEBUG = dbbackup
554 DEBUG = dbbackup
552
555
553
556
554 def test(url, N=10):
557 def test(url, N=10):
555 print "checking error hander (do this on a non-200)"
558 print "checking error hander (do this on a non-200)"
556 try: error_handler(url)
559 try: error_handler(url)
557 except IOError, e:
560 except IOError, e:
558 print "exiting - exception will prevent further tests"
561 print "exiting - exception will prevent further tests"
559 sys.exit()
562 sys.exit()
560 print
563 print
561 print "performing continuity test (making sure stuff isn't corrupted)"
564 print "performing continuity test (making sure stuff isn't corrupted)"
562 continuity(url)
565 continuity(url)
563 print
566 print
564 print "performing speed comparison"
567 print "performing speed comparison"
565 comp(N, url)
568 comp(N, url)
566 print
569 print
567 print "performing dropped-connection check"
570 print "performing dropped-connection check"
568 test_timeout(url)
571 test_timeout(url)
569
572
570 if __name__ == '__main__':
573 if __name__ == '__main__':
571 import time
574 import time
572 import sys
575 import sys
573 try:
576 try:
574 N = int(sys.argv[1])
577 N = int(sys.argv[1])
575 url = sys.argv[2]
578 url = sys.argv[2]
576 except:
579 except:
577 print "%s <integer> <url>" % sys.argv[0]
580 print "%s <integer> <url>" % sys.argv[0]
578 else:
581 else:
579 test(url, N)
582 test(url, N)
@@ -1,2076 +1,2081 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, errno, ui
12 import re, lock, transaction, tempfile, stat, errno, ui
13 import os, revlog, time, util, extensions, hook, inspect
13 import os, revlog, time, util, extensions, hook, inspect
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = util.set(('lookup', 'changegroupsubset'))
16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 repo.repository.__init__(self)
20 repo.repository.__init__(self)
21 self.root = os.path.realpath(path)
21 self.root = os.path.realpath(path)
22 self.path = os.path.join(self.root, ".hg")
22 self.path = os.path.join(self.root, ".hg")
23 self.origroot = path
23 self.origroot = path
24 self.opener = util.opener(self.path)
24 self.opener = util.opener(self.path)
25 self.wopener = util.opener(self.root)
25 self.wopener = util.opener(self.root)
26
26
27 if not os.path.isdir(self.path):
27 if not os.path.isdir(self.path):
28 if create:
28 if create:
29 if not os.path.exists(path):
29 if not os.path.exists(path):
30 os.mkdir(path)
30 os.mkdir(path)
31 os.mkdir(self.path)
31 os.mkdir(self.path)
32 requirements = ["revlogv1"]
32 requirements = ["revlogv1"]
33 if parentui.configbool('format', 'usestore', True):
33 if parentui.configbool('format', 'usestore', True):
34 os.mkdir(os.path.join(self.path, "store"))
34 os.mkdir(os.path.join(self.path, "store"))
35 requirements.append("store")
35 requirements.append("store")
36 # create an invalid changelog
36 # create an invalid changelog
37 self.opener("00changelog.i", "a").write(
37 self.opener("00changelog.i", "a").write(
38 '\0\0\0\2' # represents revlogv2
38 '\0\0\0\2' # represents revlogv2
39 ' dummy changelog to prevent using the old repo layout'
39 ' dummy changelog to prevent using the old repo layout'
40 )
40 )
41 reqfile = self.opener("requires", "w")
41 reqfile = self.opener("requires", "w")
42 for r in requirements:
42 for r in requirements:
43 reqfile.write("%s\n" % r)
43 reqfile.write("%s\n" % r)
44 reqfile.close()
44 reqfile.close()
45 else:
45 else:
46 raise repo.RepoError(_("repository %s not found") % path)
46 raise repo.RepoError(_("repository %s not found") % path)
47 elif create:
47 elif create:
48 raise repo.RepoError(_("repository %s already exists") % path)
48 raise repo.RepoError(_("repository %s already exists") % path)
49 else:
49 else:
50 # find requirements
50 # find requirements
51 try:
51 try:
52 requirements = self.opener("requires").read().splitlines()
52 requirements = self.opener("requires").read().splitlines()
53 except IOError, inst:
53 except IOError, inst:
54 if inst.errno != errno.ENOENT:
54 if inst.errno != errno.ENOENT:
55 raise
55 raise
56 requirements = []
56 requirements = []
57 # check them
57 # check them
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61
61
62 # setup store
62 # setup store
63 if "store" in requirements:
63 if "store" in requirements:
64 self.encodefn = util.encodefilename
64 self.encodefn = util.encodefilename
65 self.decodefn = util.decodefilename
65 self.decodefn = util.decodefilename
66 self.spath = os.path.join(self.path, "store")
66 self.spath = os.path.join(self.path, "store")
67 else:
67 else:
68 self.encodefn = lambda x: x
68 self.encodefn = lambda x: x
69 self.decodefn = lambda x: x
69 self.decodefn = lambda x: x
70 self.spath = self.path
70 self.spath = self.path
71 self.sopener = util.encodedopener(util.opener(self.spath),
71 self.sopener = util.encodedopener(util.opener(self.spath),
72 self.encodefn)
72 self.encodefn)
73
73
74 self.ui = ui.ui(parentui=parentui)
74 self.ui = ui.ui(parentui=parentui)
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.filterpats = {}
85 self.filterpats = {}
86 self._datafilters = {}
86 self._datafilters = {}
87 self._transref = self._lockref = self._wlockref = None
87 self._transref = self._lockref = self._wlockref = None
88
88
89 def __getattr__(self, name):
89 def __getattr__(self, name):
90 if name == 'changelog':
90 if name == 'changelog':
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 return self.changelog
93 return self.changelog
94 if name == 'manifest':
94 if name == 'manifest':
95 self.changelog
95 self.changelog
96 self.manifest = manifest.manifest(self.sopener)
96 self.manifest = manifest.manifest(self.sopener)
97 return self.manifest
97 return self.manifest
98 if name == 'dirstate':
98 if name == 'dirstate':
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 return self.dirstate
100 return self.dirstate
101 else:
101 else:
102 raise AttributeError, name
102 raise AttributeError, name
103
103
104 def url(self):
104 def url(self):
105 return 'file:' + self.root
105 return 'file:' + self.root
106
106
107 def hook(self, name, throw=False, **args):
107 def hook(self, name, throw=False, **args):
108 return hook.hook(self.ui, self, name, throw, **args)
108 return hook.hook(self.ui, self, name, throw, **args)
109
109
110 tag_disallowed = ':\r\n'
110 tag_disallowed = ':\r\n'
111
111
112 def _tag(self, name, node, message, local, user, date, parent=None,
112 def _tag(self, name, node, message, local, user, date, parent=None,
113 extra={}):
113 extra={}):
114 use_dirstate = parent is None
114 use_dirstate = parent is None
115
115
116 for c in self.tag_disallowed:
116 for c in self.tag_disallowed:
117 if c in name:
117 if c in name:
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119
119
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121
121
122 def writetag(fp, name, munge, prevtags):
122 def writetag(fp, name, munge, prevtags):
123 fp.seek(0, 2)
123 if prevtags and prevtags[-1] != '\n':
124 if prevtags and prevtags[-1] != '\n':
124 fp.write('\n')
125 fp.write('\n')
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 fp.close()
127 fp.close()
127
128
128 prevtags = ''
129 prevtags = ''
129 if local:
130 if local:
130 try:
131 try:
131 fp = self.opener('localtags', 'r+')
132 fp = self.opener('localtags', 'r+')
132 except IOError, err:
133 except IOError, err:
133 fp = self.opener('localtags', 'a')
134 fp = self.opener('localtags', 'a')
134 else:
135 else:
135 prevtags = fp.read()
136 prevtags = fp.read()
136
137
137 # local tags are stored in the current charset
138 # local tags are stored in the current charset
138 writetag(fp, name, None, prevtags)
139 writetag(fp, name, None, prevtags)
139 self.hook('tag', node=hex(node), tag=name, local=local)
140 self.hook('tag', node=hex(node), tag=name, local=local)
140 return
141 return
141
142
142 if use_dirstate:
143 if use_dirstate:
143 try:
144 try:
144 fp = self.wfile('.hgtags', 'rb+')
145 fp = self.wfile('.hgtags', 'rb+')
145 except IOError, err:
146 except IOError, err:
146 fp = self.wfile('.hgtags', 'ab')
147 fp = self.wfile('.hgtags', 'ab')
147 else:
148 else:
148 prevtags = fp.read()
149 prevtags = fp.read()
149 else:
150 else:
150 try:
151 try:
151 prevtags = self.filectx('.hgtags', parent).data()
152 prevtags = self.filectx('.hgtags', parent).data()
152 except revlog.LookupError:
153 except revlog.LookupError:
153 pass
154 pass
154 fp = self.wfile('.hgtags', 'wb')
155 fp = self.wfile('.hgtags', 'wb')
155 if prevtags:
156 if prevtags:
156 fp.write(prevtags)
157 fp.write(prevtags)
157
158
158 # committed tags are stored in UTF-8
159 # committed tags are stored in UTF-8
159 writetag(fp, name, util.fromlocal, prevtags)
160 writetag(fp, name, util.fromlocal, prevtags)
160
161
161 if use_dirstate and '.hgtags' not in self.dirstate:
162 if use_dirstate and '.hgtags' not in self.dirstate:
162 self.add(['.hgtags'])
163 self.add(['.hgtags'])
163
164
164 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
165 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
165 extra=extra)
166 extra=extra)
166
167
167 self.hook('tag', node=hex(node), tag=name, local=local)
168 self.hook('tag', node=hex(node), tag=name, local=local)
168
169
169 return tagnode
170 return tagnode
170
171
171 def tag(self, name, node, message, local, user, date):
172 def tag(self, name, node, message, local, user, date):
172 '''tag a revision with a symbolic name.
173 '''tag a revision with a symbolic name.
173
174
174 if local is True, the tag is stored in a per-repository file.
175 if local is True, the tag is stored in a per-repository file.
175 otherwise, it is stored in the .hgtags file, and a new
176 otherwise, it is stored in the .hgtags file, and a new
176 changeset is committed with the change.
177 changeset is committed with the change.
177
178
178 keyword arguments:
179 keyword arguments:
179
180
180 local: whether to store tag in non-version-controlled file
181 local: whether to store tag in non-version-controlled file
181 (default False)
182 (default False)
182
183
183 message: commit message to use if committing
184 message: commit message to use if committing
184
185
185 user: name of user to use if committing
186 user: name of user to use if committing
186
187
187 date: date tuple to use if committing'''
188 date: date tuple to use if committing'''
188
189
189 for x in self.status()[:5]:
190 for x in self.status()[:5]:
190 if '.hgtags' in x:
191 if '.hgtags' in x:
191 raise util.Abort(_('working copy of .hgtags is changed '
192 raise util.Abort(_('working copy of .hgtags is changed '
192 '(please commit .hgtags manually)'))
193 '(please commit .hgtags manually)'))
193
194
194
195
195 self._tag(name, node, message, local, user, date)
196 self._tag(name, node, message, local, user, date)
196
197
197 def tags(self):
198 def tags(self):
198 '''return a mapping of tag to node'''
199 '''return a mapping of tag to node'''
199 if self.tagscache:
200 if self.tagscache:
200 return self.tagscache
201 return self.tagscache
201
202
202 globaltags = {}
203 globaltags = {}
203 tagtypes = {}
204 tagtypes = {}
204
205
205 def readtags(lines, fn, tagtype):
206 def readtags(lines, fn, tagtype):
206 filetags = {}
207 filetags = {}
207 count = 0
208 count = 0
208
209
209 def warn(msg):
210 def warn(msg):
210 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
211 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
211
212
212 for l in lines:
213 for l in lines:
213 count += 1
214 count += 1
214 if not l:
215 if not l:
215 continue
216 continue
216 s = l.split(" ", 1)
217 s = l.split(" ", 1)
217 if len(s) != 2:
218 if len(s) != 2:
218 warn(_("cannot parse entry"))
219 warn(_("cannot parse entry"))
219 continue
220 continue
220 node, key = s
221 node, key = s
221 key = util.tolocal(key.strip()) # stored in UTF-8
222 key = util.tolocal(key.strip()) # stored in UTF-8
222 try:
223 try:
223 bin_n = bin(node)
224 bin_n = bin(node)
224 except TypeError:
225 except TypeError:
225 warn(_("node '%s' is not well formed") % node)
226 warn(_("node '%s' is not well formed") % node)
226 continue
227 continue
227 if bin_n not in self.changelog.nodemap:
228 if bin_n not in self.changelog.nodemap:
228 warn(_("tag '%s' refers to unknown node") % key)
229 warn(_("tag '%s' refers to unknown node") % key)
229 continue
230 continue
230
231
231 h = []
232 h = []
232 if key in filetags:
233 if key in filetags:
233 n, h = filetags[key]
234 n, h = filetags[key]
234 h.append(n)
235 h.append(n)
235 filetags[key] = (bin_n, h)
236 filetags[key] = (bin_n, h)
236
237
237 for k, nh in filetags.items():
238 for k, nh in filetags.items():
238 if k not in globaltags:
239 if k not in globaltags:
239 globaltags[k] = nh
240 globaltags[k] = nh
240 tagtypes[k] = tagtype
241 tagtypes[k] = tagtype
241 continue
242 continue
242
243
243 # we prefer the global tag if:
244 # we prefer the global tag if:
244 # it supercedes us OR
245 # it supercedes us OR
245 # mutual supercedes and it has a higher rank
246 # mutual supercedes and it has a higher rank
246 # otherwise we win because we're tip-most
247 # otherwise we win because we're tip-most
247 an, ah = nh
248 an, ah = nh
248 bn, bh = globaltags[k]
249 bn, bh = globaltags[k]
249 if (bn != an and an in bh and
250 if (bn != an and an in bh and
250 (bn not in ah or len(bh) > len(ah))):
251 (bn not in ah or len(bh) > len(ah))):
251 an = bn
252 an = bn
252 ah.extend([n for n in bh if n not in ah])
253 ah.extend([n for n in bh if n not in ah])
253 globaltags[k] = an, ah
254 globaltags[k] = an, ah
254 tagtypes[k] = tagtype
255 tagtypes[k] = tagtype
255
256
256 # read the tags file from each head, ending with the tip
257 # read the tags file from each head, ending with the tip
257 f = None
258 f = None
258 for rev, node, fnode in self._hgtagsnodes():
259 for rev, node, fnode in self._hgtagsnodes():
259 f = (f and f.filectx(fnode) or
260 f = (f and f.filectx(fnode) or
260 self.filectx('.hgtags', fileid=fnode))
261 self.filectx('.hgtags', fileid=fnode))
261 readtags(f.data().splitlines(), f, "global")
262 readtags(f.data().splitlines(), f, "global")
262
263
263 try:
264 try:
264 data = util.fromlocal(self.opener("localtags").read())
265 data = util.fromlocal(self.opener("localtags").read())
265 # localtags are stored in the local character set
266 # localtags are stored in the local character set
266 # while the internal tag table is stored in UTF-8
267 # while the internal tag table is stored in UTF-8
267 readtags(data.splitlines(), "localtags", "local")
268 readtags(data.splitlines(), "localtags", "local")
268 except IOError:
269 except IOError:
269 pass
270 pass
270
271
271 self.tagscache = {}
272 self.tagscache = {}
272 self._tagstypecache = {}
273 self._tagstypecache = {}
273 for k,nh in globaltags.items():
274 for k,nh in globaltags.items():
274 n = nh[0]
275 n = nh[0]
275 if n != nullid:
276 if n != nullid:
276 self.tagscache[k] = n
277 self.tagscache[k] = n
277 self._tagstypecache[k] = tagtypes[k]
278 self._tagstypecache[k] = tagtypes[k]
278 self.tagscache['tip'] = self.changelog.tip()
279 self.tagscache['tip'] = self.changelog.tip()
279
280
280 return self.tagscache
281 return self.tagscache
281
282
282 def tagtype(self, tagname):
283 def tagtype(self, tagname):
283 '''
284 '''
284 return the type of the given tag. result can be:
285 return the type of the given tag. result can be:
285
286
286 'local' : a local tag
287 'local' : a local tag
287 'global' : a global tag
288 'global' : a global tag
288 None : tag does not exist
289 None : tag does not exist
289 '''
290 '''
290
291
291 self.tags()
292 self.tags()
292
293
293 return self._tagstypecache.get(tagname)
294 return self._tagstypecache.get(tagname)
294
295
295 def _hgtagsnodes(self):
296 def _hgtagsnodes(self):
296 heads = self.heads()
297 heads = self.heads()
297 heads.reverse()
298 heads.reverse()
298 last = {}
299 last = {}
299 ret = []
300 ret = []
300 for node in heads:
301 for node in heads:
301 c = self.changectx(node)
302 c = self.changectx(node)
302 rev = c.rev()
303 rev = c.rev()
303 try:
304 try:
304 fnode = c.filenode('.hgtags')
305 fnode = c.filenode('.hgtags')
305 except revlog.LookupError:
306 except revlog.LookupError:
306 continue
307 continue
307 ret.append((rev, node, fnode))
308 ret.append((rev, node, fnode))
308 if fnode in last:
309 if fnode in last:
309 ret[last[fnode]] = None
310 ret[last[fnode]] = None
310 last[fnode] = len(ret) - 1
311 last[fnode] = len(ret) - 1
311 return [item for item in ret if item]
312 return [item for item in ret if item]
312
313
313 def tagslist(self):
314 def tagslist(self):
314 '''return a list of tags ordered by revision'''
315 '''return a list of tags ordered by revision'''
315 l = []
316 l = []
316 for t, n in self.tags().items():
317 for t, n in self.tags().items():
317 try:
318 try:
318 r = self.changelog.rev(n)
319 r = self.changelog.rev(n)
319 except:
320 except:
320 r = -2 # sort to the beginning of the list if unknown
321 r = -2 # sort to the beginning of the list if unknown
321 l.append((r, t, n))
322 l.append((r, t, n))
322 l.sort()
323 l.sort()
323 return [(t, n) for r, t, n in l]
324 return [(t, n) for r, t, n in l]
324
325
325 def nodetags(self, node):
326 def nodetags(self, node):
326 '''return the tags associated with a node'''
327 '''return the tags associated with a node'''
327 if not self.nodetagscache:
328 if not self.nodetagscache:
328 self.nodetagscache = {}
329 self.nodetagscache = {}
329 for t, n in self.tags().items():
330 for t, n in self.tags().items():
330 self.nodetagscache.setdefault(n, []).append(t)
331 self.nodetagscache.setdefault(n, []).append(t)
331 return self.nodetagscache.get(node, [])
332 return self.nodetagscache.get(node, [])
332
333
333 def _branchtags(self):
334 def _branchtags(self):
334 partial, last, lrev = self._readbranchcache()
335 partial, last, lrev = self._readbranchcache()
335
336
336 tiprev = self.changelog.count() - 1
337 tiprev = self.changelog.count() - 1
337 if lrev != tiprev:
338 if lrev != tiprev:
338 self._updatebranchcache(partial, lrev+1, tiprev+1)
339 self._updatebranchcache(partial, lrev+1, tiprev+1)
339 self._writebranchcache(partial, self.changelog.tip(), tiprev)
340 self._writebranchcache(partial, self.changelog.tip(), tiprev)
340
341
341 return partial
342 return partial
342
343
343 def branchtags(self):
344 def branchtags(self):
344 if self.branchcache is not None:
345 if self.branchcache is not None:
345 return self.branchcache
346 return self.branchcache
346
347
347 self.branchcache = {} # avoid recursion in changectx
348 self.branchcache = {} # avoid recursion in changectx
348 partial = self._branchtags()
349 partial = self._branchtags()
349
350
350 # the branch cache is stored on disk as UTF-8, but in the local
351 # the branch cache is stored on disk as UTF-8, but in the local
351 # charset internally
352 # charset internally
352 for k, v in partial.items():
353 for k, v in partial.items():
353 self.branchcache[util.tolocal(k)] = v
354 self.branchcache[util.tolocal(k)] = v
354 return self.branchcache
355 return self.branchcache
355
356
356 def _readbranchcache(self):
357 def _readbranchcache(self):
357 partial = {}
358 partial = {}
358 try:
359 try:
359 f = self.opener("branch.cache")
360 f = self.opener("branch.cache")
360 lines = f.read().split('\n')
361 lines = f.read().split('\n')
361 f.close()
362 f.close()
362 except (IOError, OSError):
363 except (IOError, OSError):
363 return {}, nullid, nullrev
364 return {}, nullid, nullrev
364
365
365 try:
366 try:
366 last, lrev = lines.pop(0).split(" ", 1)
367 last, lrev = lines.pop(0).split(" ", 1)
367 last, lrev = bin(last), int(lrev)
368 last, lrev = bin(last), int(lrev)
368 if not (lrev < self.changelog.count() and
369 if not (lrev < self.changelog.count() and
369 self.changelog.node(lrev) == last): # sanity check
370 self.changelog.node(lrev) == last): # sanity check
370 # invalidate the cache
371 # invalidate the cache
371 raise ValueError('Invalid branch cache: unknown tip')
372 raise ValueError('Invalid branch cache: unknown tip')
372 for l in lines:
373 for l in lines:
373 if not l: continue
374 if not l: continue
374 node, label = l.split(" ", 1)
375 node, label = l.split(" ", 1)
375 partial[label.strip()] = bin(node)
376 partial[label.strip()] = bin(node)
376 except (KeyboardInterrupt, util.SignalInterrupt):
377 except (KeyboardInterrupt, util.SignalInterrupt):
377 raise
378 raise
378 except Exception, inst:
379 except Exception, inst:
379 if self.ui.debugflag:
380 if self.ui.debugflag:
380 self.ui.warn(str(inst), '\n')
381 self.ui.warn(str(inst), '\n')
381 partial, last, lrev = {}, nullid, nullrev
382 partial, last, lrev = {}, nullid, nullrev
382 return partial, last, lrev
383 return partial, last, lrev
383
384
384 def _writebranchcache(self, branches, tip, tiprev):
385 def _writebranchcache(self, branches, tip, tiprev):
385 try:
386 try:
386 f = self.opener("branch.cache", "w", atomictemp=True)
387 f = self.opener("branch.cache", "w", atomictemp=True)
387 f.write("%s %s\n" % (hex(tip), tiprev))
388 f.write("%s %s\n" % (hex(tip), tiprev))
388 for label, node in branches.iteritems():
389 for label, node in branches.iteritems():
389 f.write("%s %s\n" % (hex(node), label))
390 f.write("%s %s\n" % (hex(node), label))
390 f.rename()
391 f.rename()
391 except (IOError, OSError):
392 except (IOError, OSError):
392 pass
393 pass
393
394
394 def _updatebranchcache(self, partial, start, end):
395 def _updatebranchcache(self, partial, start, end):
395 for r in xrange(start, end):
396 for r in xrange(start, end):
396 c = self.changectx(r)
397 c = self.changectx(r)
397 b = c.branch()
398 b = c.branch()
398 partial[b] = c.node()
399 partial[b] = c.node()
399
400
400 def lookup(self, key):
401 def lookup(self, key):
401 if key == '.':
402 if key == '.':
402 key, second = self.dirstate.parents()
403 key, second = self.dirstate.parents()
403 if key == nullid:
404 if key == nullid:
404 raise repo.RepoError(_("no revision checked out"))
405 raise repo.RepoError(_("no revision checked out"))
405 if second != nullid:
406 if second != nullid:
406 self.ui.warn(_("warning: working directory has two parents, "
407 self.ui.warn(_("warning: working directory has two parents, "
407 "tag '.' uses the first\n"))
408 "tag '.' uses the first\n"))
408 elif key == 'null':
409 elif key == 'null':
409 return nullid
410 return nullid
410 n = self.changelog._match(key)
411 n = self.changelog._match(key)
411 if n:
412 if n:
412 return n
413 return n
413 if key in self.tags():
414 if key in self.tags():
414 return self.tags()[key]
415 return self.tags()[key]
415 if key in self.branchtags():
416 if key in self.branchtags():
416 return self.branchtags()[key]
417 return self.branchtags()[key]
417 n = self.changelog._partialmatch(key)
418 n = self.changelog._partialmatch(key)
418 if n:
419 if n:
419 return n
420 return n
420 try:
421 try:
421 if len(key) == 20:
422 if len(key) == 20:
422 key = hex(key)
423 key = hex(key)
423 except:
424 except:
424 pass
425 pass
425 raise repo.RepoError(_("unknown revision '%s'") % key)
426 raise repo.RepoError(_("unknown revision '%s'") % key)
426
427
427 def dev(self):
428 def dev(self):
428 return os.lstat(self.path).st_dev
429 return os.lstat(self.path).st_dev
429
430
430 def local(self):
431 def local(self):
431 return True
432 return True
432
433
433 def join(self, f):
434 def join(self, f):
434 return os.path.join(self.path, f)
435 return os.path.join(self.path, f)
435
436
436 def sjoin(self, f):
437 def sjoin(self, f):
437 f = self.encodefn(f)
438 f = self.encodefn(f)
438 return os.path.join(self.spath, f)
439 return os.path.join(self.spath, f)
439
440
440 def wjoin(self, f):
441 def wjoin(self, f):
441 return os.path.join(self.root, f)
442 return os.path.join(self.root, f)
442
443
443 def file(self, f):
444 def file(self, f):
444 if f[0] == '/':
445 if f[0] == '/':
445 f = f[1:]
446 f = f[1:]
446 return filelog.filelog(self.sopener, f)
447 return filelog.filelog(self.sopener, f)
447
448
448 def changectx(self, changeid=None):
449 def changectx(self, changeid=None):
449 return context.changectx(self, changeid)
450 return context.changectx(self, changeid)
450
451
451 def workingctx(self):
452 def workingctx(self):
452 return context.workingctx(self)
453 return context.workingctx(self)
453
454
454 def parents(self, changeid=None):
455 def parents(self, changeid=None):
455 '''
456 '''
456 get list of changectxs for parents of changeid or working directory
457 get list of changectxs for parents of changeid or working directory
457 '''
458 '''
458 if changeid is None:
459 if changeid is None:
459 pl = self.dirstate.parents()
460 pl = self.dirstate.parents()
460 else:
461 else:
461 n = self.changelog.lookup(changeid)
462 n = self.changelog.lookup(changeid)
462 pl = self.changelog.parents(n)
463 pl = self.changelog.parents(n)
463 if pl[1] == nullid:
464 if pl[1] == nullid:
464 return [self.changectx(pl[0])]
465 return [self.changectx(pl[0])]
465 return [self.changectx(pl[0]), self.changectx(pl[1])]
466 return [self.changectx(pl[0]), self.changectx(pl[1])]
466
467
467 def filectx(self, path, changeid=None, fileid=None):
468 def filectx(self, path, changeid=None, fileid=None):
468 """changeid can be a changeset revision, node, or tag.
469 """changeid can be a changeset revision, node, or tag.
469 fileid can be a file revision or node."""
470 fileid can be a file revision or node."""
470 return context.filectx(self, path, changeid, fileid)
471 return context.filectx(self, path, changeid, fileid)
471
472
472 def getcwd(self):
473 def getcwd(self):
473 return self.dirstate.getcwd()
474 return self.dirstate.getcwd()
474
475
475 def pathto(self, f, cwd=None):
476 def pathto(self, f, cwd=None):
476 return self.dirstate.pathto(f, cwd)
477 return self.dirstate.pathto(f, cwd)
477
478
478 def wfile(self, f, mode='r'):
479 def wfile(self, f, mode='r'):
479 return self.wopener(f, mode)
480 return self.wopener(f, mode)
480
481
481 def _link(self, f):
482 def _link(self, f):
482 return os.path.islink(self.wjoin(f))
483 return os.path.islink(self.wjoin(f))
483
484
484 def _filter(self, filter, filename, data):
485 def _filter(self, filter, filename, data):
485 if filter not in self.filterpats:
486 if filter not in self.filterpats:
486 l = []
487 l = []
487 for pat, cmd in self.ui.configitems(filter):
488 for pat, cmd in self.ui.configitems(filter):
488 mf = util.matcher(self.root, "", [pat], [], [])[1]
489 mf = util.matcher(self.root, "", [pat], [], [])[1]
489 fn = None
490 fn = None
490 for name, filterfn in self._datafilters.iteritems():
491 for name, filterfn in self._datafilters.iteritems():
491 if cmd.startswith(name):
492 if cmd.startswith(name):
492 fn = filterfn
493 fn = filterfn
493 break
494 break
494 if not fn:
495 if not fn:
495 fn = lambda s, c, **kwargs: util.filter(s, c)
496 fn = lambda s, c, **kwargs: util.filter(s, c)
496 # Wrap old filters not supporting keyword arguments
497 # Wrap old filters not supporting keyword arguments
497 if not inspect.getargspec(fn)[2]:
498 if not inspect.getargspec(fn)[2]:
498 oldfn = fn
499 oldfn = fn
499 fn = lambda s, c, **kwargs: oldfn(s, c)
500 fn = lambda s, c, **kwargs: oldfn(s, c)
500 l.append((mf, fn, cmd))
501 l.append((mf, fn, cmd))
501 self.filterpats[filter] = l
502 self.filterpats[filter] = l
502
503
503 for mf, fn, cmd in self.filterpats[filter]:
504 for mf, fn, cmd in self.filterpats[filter]:
504 if mf(filename):
505 if mf(filename):
505 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
506 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
506 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
507 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
507 break
508 break
508
509
509 return data
510 return data
510
511
511 def adddatafilter(self, name, filter):
512 def adddatafilter(self, name, filter):
512 self._datafilters[name] = filter
513 self._datafilters[name] = filter
513
514
514 def wread(self, filename):
515 def wread(self, filename):
515 if self._link(filename):
516 if self._link(filename):
516 data = os.readlink(self.wjoin(filename))
517 data = os.readlink(self.wjoin(filename))
517 else:
518 else:
518 data = self.wopener(filename, 'r').read()
519 data = self.wopener(filename, 'r').read()
519 return self._filter("encode", filename, data)
520 return self._filter("encode", filename, data)
520
521
521 def wwrite(self, filename, data, flags):
522 def wwrite(self, filename, data, flags):
522 data = self._filter("decode", filename, data)
523 data = self._filter("decode", filename, data)
523 try:
524 try:
524 os.unlink(self.wjoin(filename))
525 os.unlink(self.wjoin(filename))
525 except OSError:
526 except OSError:
526 pass
527 pass
527 self.wopener(filename, 'w').write(data)
528 self.wopener(filename, 'w').write(data)
528 util.set_flags(self.wjoin(filename), flags)
529 util.set_flags(self.wjoin(filename), flags)
529
530
530 def wwritedata(self, filename, data):
531 def wwritedata(self, filename, data):
531 return self._filter("decode", filename, data)
532 return self._filter("decode", filename, data)
532
533
533 def transaction(self):
534 def transaction(self):
534 if self._transref and self._transref():
535 if self._transref and self._transref():
535 return self._transref().nest()
536 return self._transref().nest()
536
537
537 # abort here if the journal already exists
538 # abort here if the journal already exists
538 if os.path.exists(self.sjoin("journal")):
539 if os.path.exists(self.sjoin("journal")):
539 raise repo.RepoError(_("journal already exists - run hg recover"))
540 raise repo.RepoError(_("journal already exists - run hg recover"))
540
541
541 # save dirstate for rollback
542 # save dirstate for rollback
542 try:
543 try:
543 ds = self.opener("dirstate").read()
544 ds = self.opener("dirstate").read()
544 except IOError:
545 except IOError:
545 ds = ""
546 ds = ""
546 self.opener("journal.dirstate", "w").write(ds)
547 self.opener("journal.dirstate", "w").write(ds)
547 self.opener("journal.branch", "w").write(self.dirstate.branch())
548 self.opener("journal.branch", "w").write(self.dirstate.branch())
548
549
549 renames = [(self.sjoin("journal"), self.sjoin("undo")),
550 renames = [(self.sjoin("journal"), self.sjoin("undo")),
550 (self.join("journal.dirstate"), self.join("undo.dirstate")),
551 (self.join("journal.dirstate"), self.join("undo.dirstate")),
551 (self.join("journal.branch"), self.join("undo.branch"))]
552 (self.join("journal.branch"), self.join("undo.branch"))]
552 tr = transaction.transaction(self.ui.warn, self.sopener,
553 tr = transaction.transaction(self.ui.warn, self.sopener,
553 self.sjoin("journal"),
554 self.sjoin("journal"),
554 aftertrans(renames))
555 aftertrans(renames))
555 self._transref = weakref.ref(tr)
556 self._transref = weakref.ref(tr)
556 return tr
557 return tr
557
558
558 def recover(self):
559 def recover(self):
559 l = self.lock()
560 l = self.lock()
560 try:
561 try:
561 if os.path.exists(self.sjoin("journal")):
562 if os.path.exists(self.sjoin("journal")):
562 self.ui.status(_("rolling back interrupted transaction\n"))
563 self.ui.status(_("rolling back interrupted transaction\n"))
563 transaction.rollback(self.sopener, self.sjoin("journal"))
564 transaction.rollback(self.sopener, self.sjoin("journal"))
564 self.invalidate()
565 self.invalidate()
565 return True
566 return True
566 else:
567 else:
567 self.ui.warn(_("no interrupted transaction available\n"))
568 self.ui.warn(_("no interrupted transaction available\n"))
568 return False
569 return False
569 finally:
570 finally:
570 del l
571 del l
571
572
572 def rollback(self):
573 def rollback(self):
573 wlock = lock = None
574 wlock = lock = None
574 try:
575 try:
575 wlock = self.wlock()
576 wlock = self.wlock()
576 lock = self.lock()
577 lock = self.lock()
577 if os.path.exists(self.sjoin("undo")):
578 if os.path.exists(self.sjoin("undo")):
578 self.ui.status(_("rolling back last transaction\n"))
579 self.ui.status(_("rolling back last transaction\n"))
579 transaction.rollback(self.sopener, self.sjoin("undo"))
580 transaction.rollback(self.sopener, self.sjoin("undo"))
580 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
581 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
581 branch = self.opener("undo.branch").read()
582 branch = self.opener("undo.branch").read()
582 self.dirstate.setbranch(branch)
583 self.dirstate.setbranch(branch)
583 self.invalidate()
584 self.invalidate()
584 self.dirstate.invalidate()
585 self.dirstate.invalidate()
585 else:
586 else:
586 self.ui.warn(_("no rollback information available\n"))
587 self.ui.warn(_("no rollback information available\n"))
587 finally:
588 finally:
588 del lock, wlock
589 del lock, wlock
589
590
590 def invalidate(self):
591 def invalidate(self):
591 for a in "changelog manifest".split():
592 for a in "changelog manifest".split():
592 if hasattr(self, a):
593 if hasattr(self, a):
593 self.__delattr__(a)
594 self.__delattr__(a)
594 self.tagscache = None
595 self.tagscache = None
595 self._tagstypecache = None
596 self._tagstypecache = None
596 self.nodetagscache = None
597 self.nodetagscache = None
597
598
598 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
599 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
599 try:
600 try:
600 l = lock.lock(lockname, 0, releasefn, desc=desc)
601 l = lock.lock(lockname, 0, releasefn, desc=desc)
601 except lock.LockHeld, inst:
602 except lock.LockHeld, inst:
602 if not wait:
603 if not wait:
603 raise
604 raise
604 self.ui.warn(_("waiting for lock on %s held by %r\n") %
605 self.ui.warn(_("waiting for lock on %s held by %r\n") %
605 (desc, inst.locker))
606 (desc, inst.locker))
606 # default to 600 seconds timeout
607 # default to 600 seconds timeout
607 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
608 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
608 releasefn, desc=desc)
609 releasefn, desc=desc)
609 if acquirefn:
610 if acquirefn:
610 acquirefn()
611 acquirefn()
611 return l
612 return l
612
613
613 def lock(self, wait=True):
614 def lock(self, wait=True):
614 if self._lockref and self._lockref():
615 if self._lockref and self._lockref():
615 return self._lockref()
616 return self._lockref()
616
617
617 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
618 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
618 _('repository %s') % self.origroot)
619 _('repository %s') % self.origroot)
619 self._lockref = weakref.ref(l)
620 self._lockref = weakref.ref(l)
620 return l
621 return l
621
622
622 def wlock(self, wait=True):
623 def wlock(self, wait=True):
623 if self._wlockref and self._wlockref():
624 if self._wlockref and self._wlockref():
624 return self._wlockref()
625 return self._wlockref()
625
626
626 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
627 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
627 self.dirstate.invalidate, _('working directory of %s') %
628 self.dirstate.invalidate, _('working directory of %s') %
628 self.origroot)
629 self.origroot)
629 self._wlockref = weakref.ref(l)
630 self._wlockref = weakref.ref(l)
630 return l
631 return l
631
632
632 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
633 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
633 """
634 """
634 commit an individual file as part of a larger transaction
635 commit an individual file as part of a larger transaction
635 """
636 """
636
637
637 t = self.wread(fn)
638 t = self.wread(fn)
638 fl = self.file(fn)
639 fl = self.file(fn)
639 fp1 = manifest1.get(fn, nullid)
640 fp1 = manifest1.get(fn, nullid)
640 fp2 = manifest2.get(fn, nullid)
641 fp2 = manifest2.get(fn, nullid)
641
642
642 meta = {}
643 meta = {}
643 cp = self.dirstate.copied(fn)
644 cp = self.dirstate.copied(fn)
644 if cp:
645 if cp:
645 # Mark the new revision of this file as a copy of another
646 # Mark the new revision of this file as a copy of another
646 # file. This copy data will effectively act as a parent
647 # file. This copy data will effectively act as a parent
647 # of this new revision. If this is a merge, the first
648 # of this new revision. If this is a merge, the first
648 # parent will be the nullid (meaning "look up the copy data")
649 # parent will be the nullid (meaning "look up the copy data")
649 # and the second one will be the other parent. For example:
650 # and the second one will be the other parent. For example:
650 #
651 #
651 # 0 --- 1 --- 3 rev1 changes file foo
652 # 0 --- 1 --- 3 rev1 changes file foo
652 # \ / rev2 renames foo to bar and changes it
653 # \ / rev2 renames foo to bar and changes it
653 # \- 2 -/ rev3 should have bar with all changes and
654 # \- 2 -/ rev3 should have bar with all changes and
654 # should record that bar descends from
655 # should record that bar descends from
655 # bar in rev2 and foo in rev1
656 # bar in rev2 and foo in rev1
656 #
657 #
657 # this allows this merge to succeed:
658 # this allows this merge to succeed:
658 #
659 #
659 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
660 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
660 # \ / merging rev3 and rev4 should use bar@rev2
661 # \ / merging rev3 and rev4 should use bar@rev2
661 # \- 2 --- 4 as the merge base
662 # \- 2 --- 4 as the merge base
662 #
663 #
663 meta["copy"] = cp
664 meta["copy"] = cp
664 if not manifest2: # not a branch merge
665 if not manifest2: # not a branch merge
665 meta["copyrev"] = hex(manifest1.get(cp, nullid))
666 meta["copyrev"] = hex(manifest1.get(cp, nullid))
666 fp2 = nullid
667 fp2 = nullid
667 elif fp2 != nullid: # copied on remote side
668 elif fp2 != nullid: # copied on remote side
668 meta["copyrev"] = hex(manifest1.get(cp, nullid))
669 meta["copyrev"] = hex(manifest1.get(cp, nullid))
669 elif fp1 != nullid: # copied on local side, reversed
670 elif fp1 != nullid: # copied on local side, reversed
670 meta["copyrev"] = hex(manifest2.get(cp))
671 meta["copyrev"] = hex(manifest2.get(cp))
671 fp2 = fp1
672 fp2 = fp1
672 elif cp in manifest2: # directory rename on local side
673 elif cp in manifest2: # directory rename on local side
673 meta["copyrev"] = hex(manifest2[cp])
674 meta["copyrev"] = hex(manifest2[cp])
674 else: # directory rename on remote side
675 else: # directory rename on remote side
675 meta["copyrev"] = hex(manifest1.get(cp, nullid))
676 meta["copyrev"] = hex(manifest1.get(cp, nullid))
676 self.ui.debug(_(" %s: copy %s:%s\n") %
677 self.ui.debug(_(" %s: copy %s:%s\n") %
677 (fn, cp, meta["copyrev"]))
678 (fn, cp, meta["copyrev"]))
678 fp1 = nullid
679 fp1 = nullid
679 elif fp2 != nullid:
680 elif fp2 != nullid:
680 # is one parent an ancestor of the other?
681 # is one parent an ancestor of the other?
681 fpa = fl.ancestor(fp1, fp2)
682 fpa = fl.ancestor(fp1, fp2)
682 if fpa == fp1:
683 if fpa == fp1:
683 fp1, fp2 = fp2, nullid
684 fp1, fp2 = fp2, nullid
684 elif fpa == fp2:
685 elif fpa == fp2:
685 fp2 = nullid
686 fp2 = nullid
686
687
687 # is the file unmodified from the parent? report existing entry
688 # is the file unmodified from the parent? report existing entry
688 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
689 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
689 return fp1
690 return fp1
690
691
691 changelist.append(fn)
692 changelist.append(fn)
692 return fl.add(t, meta, tr, linkrev, fp1, fp2)
693 return fl.add(t, meta, tr, linkrev, fp1, fp2)
693
694
694 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
695 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
695 if p1 is None:
696 if p1 is None:
696 p1, p2 = self.dirstate.parents()
697 p1, p2 = self.dirstate.parents()
697 return self.commit(files=files, text=text, user=user, date=date,
698 return self.commit(files=files, text=text, user=user, date=date,
698 p1=p1, p2=p2, extra=extra, empty_ok=True)
699 p1=p1, p2=p2, extra=extra, empty_ok=True)
699
700
700 def commit(self, files=None, text="", user=None, date=None,
701 def commit(self, files=None, text="", user=None, date=None,
701 match=util.always, force=False, force_editor=False,
702 match=util.always, force=False, force_editor=False,
702 p1=None, p2=None, extra={}, empty_ok=False):
703 p1=None, p2=None, extra={}, empty_ok=False):
703 wlock = lock = tr = None
704 wlock = lock = tr = None
704 valid = 0 # don't save the dirstate if this isn't set
705 valid = 0 # don't save the dirstate if this isn't set
705 if files:
706 if files:
706 files = util.unique(files)
707 files = util.unique(files)
707 try:
708 try:
708 commit = []
709 commit = []
709 remove = []
710 remove = []
710 changed = []
711 changed = []
711 use_dirstate = (p1 is None) # not rawcommit
712 use_dirstate = (p1 is None) # not rawcommit
712 extra = extra.copy()
713 extra = extra.copy()
713
714
714 if use_dirstate:
715 if use_dirstate:
715 if files:
716 if files:
716 for f in files:
717 for f in files:
717 s = self.dirstate[f]
718 s = self.dirstate[f]
718 if s in 'nma':
719 if s in 'nma':
719 commit.append(f)
720 commit.append(f)
720 elif s == 'r':
721 elif s == 'r':
721 remove.append(f)
722 remove.append(f)
722 else:
723 else:
723 self.ui.warn(_("%s not tracked!\n") % f)
724 self.ui.warn(_("%s not tracked!\n") % f)
724 else:
725 else:
725 changes = self.status(match=match)[:5]
726 changes = self.status(match=match)[:5]
726 modified, added, removed, deleted, unknown = changes
727 modified, added, removed, deleted, unknown = changes
727 commit = modified + added
728 commit = modified + added
728 remove = removed
729 remove = removed
729 else:
730 else:
730 commit = files
731 commit = files
731
732
732 if use_dirstate:
733 if use_dirstate:
733 p1, p2 = self.dirstate.parents()
734 p1, p2 = self.dirstate.parents()
734 update_dirstate = True
735 update_dirstate = True
735 else:
736 else:
736 p1, p2 = p1, p2 or nullid
737 p1, p2 = p1, p2 or nullid
737 update_dirstate = (self.dirstate.parents()[0] == p1)
738 update_dirstate = (self.dirstate.parents()[0] == p1)
738
739
739 c1 = self.changelog.read(p1)
740 c1 = self.changelog.read(p1)
740 c2 = self.changelog.read(p2)
741 c2 = self.changelog.read(p2)
741 m1 = self.manifest.read(c1[0]).copy()
742 m1 = self.manifest.read(c1[0]).copy()
742 m2 = self.manifest.read(c2[0])
743 m2 = self.manifest.read(c2[0])
743
744
744 if use_dirstate:
745 if use_dirstate:
745 branchname = self.workingctx().branch()
746 branchname = self.workingctx().branch()
746 try:
747 try:
747 branchname = branchname.decode('UTF-8').encode('UTF-8')
748 branchname = branchname.decode('UTF-8').encode('UTF-8')
748 except UnicodeDecodeError:
749 except UnicodeDecodeError:
749 raise util.Abort(_('branch name not in UTF-8!'))
750 raise util.Abort(_('branch name not in UTF-8!'))
750 else:
751 else:
751 branchname = ""
752 branchname = ""
752
753
753 if use_dirstate:
754 if use_dirstate:
754 oldname = c1[5].get("branch") # stored in UTF-8
755 oldname = c1[5].get("branch") # stored in UTF-8
755 if (not commit and not remove and not force and p2 == nullid
756 if (not commit and not remove and not force and p2 == nullid
756 and branchname == oldname):
757 and branchname == oldname):
757 self.ui.status(_("nothing changed\n"))
758 self.ui.status(_("nothing changed\n"))
758 return None
759 return None
759
760
760 xp1 = hex(p1)
761 xp1 = hex(p1)
761 if p2 == nullid: xp2 = ''
762 if p2 == nullid: xp2 = ''
762 else: xp2 = hex(p2)
763 else: xp2 = hex(p2)
763
764
764 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
765 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
765
766
766 wlock = self.wlock()
767 wlock = self.wlock()
767 lock = self.lock()
768 lock = self.lock()
768 tr = self.transaction()
769 tr = self.transaction()
769 trp = weakref.proxy(tr)
770 trp = weakref.proxy(tr)
770
771
771 # check in files
772 # check in files
772 new = {}
773 new = {}
773 linkrev = self.changelog.count()
774 linkrev = self.changelog.count()
774 commit.sort()
775 commit.sort()
775 is_exec = util.execfunc(self.root, m1.execf)
776 is_exec = util.execfunc(self.root, m1.execf)
776 is_link = util.linkfunc(self.root, m1.linkf)
777 is_link = util.linkfunc(self.root, m1.linkf)
777 for f in commit:
778 for f in commit:
778 self.ui.note(f + "\n")
779 self.ui.note(f + "\n")
779 try:
780 try:
780 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
781 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
781 new_exec = is_exec(f)
782 new_exec = is_exec(f)
782 new_link = is_link(f)
783 new_link = is_link(f)
783 if ((not changed or changed[-1] != f) and
784 if ((not changed or changed[-1] != f) and
784 m2.get(f) != new[f]):
785 m2.get(f) != new[f]):
785 # mention the file in the changelog if some
786 # mention the file in the changelog if some
786 # flag changed, even if there was no content
787 # flag changed, even if there was no content
787 # change.
788 # change.
788 old_exec = m1.execf(f)
789 old_exec = m1.execf(f)
789 old_link = m1.linkf(f)
790 old_link = m1.linkf(f)
790 if old_exec != new_exec or old_link != new_link:
791 if old_exec != new_exec or old_link != new_link:
791 changed.append(f)
792 changed.append(f)
792 m1.set(f, new_exec, new_link)
793 m1.set(f, new_exec, new_link)
793 if use_dirstate:
794 if use_dirstate:
794 self.dirstate.normal(f)
795 self.dirstate.normal(f)
795
796
796 except (OSError, IOError):
797 except (OSError, IOError):
797 if use_dirstate:
798 if use_dirstate:
798 self.ui.warn(_("trouble committing %s!\n") % f)
799 self.ui.warn(_("trouble committing %s!\n") % f)
799 raise
800 raise
800 else:
801 else:
801 remove.append(f)
802 remove.append(f)
802
803
803 # update manifest
804 # update manifest
804 m1.update(new)
805 m1.update(new)
805 remove.sort()
806 remove.sort()
806 removed = []
807 removed = []
807
808
808 for f in remove:
809 for f in remove:
809 if f in m1:
810 if f in m1:
810 del m1[f]
811 del m1[f]
811 removed.append(f)
812 removed.append(f)
812 elif f in m2:
813 elif f in m2:
813 removed.append(f)
814 removed.append(f)
814 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
815 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
815 (new, removed))
816 (new, removed))
816
817
817 # add changeset
818 # add changeset
818 new = new.keys()
819 new = new.keys()
819 new.sort()
820 new.sort()
820
821
821 user = user or self.ui.username()
822 user = user or self.ui.username()
822 if (not empty_ok and not text) or force_editor:
823 if (not empty_ok and not text) or force_editor:
823 edittext = []
824 edittext = []
824 if text:
825 if text:
825 edittext.append(text)
826 edittext.append(text)
826 edittext.append("")
827 edittext.append("")
827 edittext.append(_("HG: Enter commit message."
828 edittext.append(_("HG: Enter commit message."
828 " Lines beginning with 'HG:' are removed."))
829 " Lines beginning with 'HG:' are removed."))
829 edittext.append("HG: --")
830 edittext.append("HG: --")
830 edittext.append("HG: user: %s" % user)
831 edittext.append("HG: user: %s" % user)
831 if p2 != nullid:
832 if p2 != nullid:
832 edittext.append("HG: branch merge")
833 edittext.append("HG: branch merge")
833 if branchname:
834 if branchname:
834 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
835 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
835 edittext.extend(["HG: changed %s" % f for f in changed])
836 edittext.extend(["HG: changed %s" % f for f in changed])
836 edittext.extend(["HG: removed %s" % f for f in removed])
837 edittext.extend(["HG: removed %s" % f for f in removed])
837 if not changed and not remove:
838 if not changed and not remove:
838 edittext.append("HG: no files changed")
839 edittext.append("HG: no files changed")
839 edittext.append("")
840 edittext.append("")
840 # run editor in the repository root
841 # run editor in the repository root
841 olddir = os.getcwd()
842 olddir = os.getcwd()
842 os.chdir(self.root)
843 os.chdir(self.root)
843 text = self.ui.edit("\n".join(edittext), user)
844 text = self.ui.edit("\n".join(edittext), user)
844 os.chdir(olddir)
845 os.chdir(olddir)
845
846
846 if branchname:
847 if branchname:
847 extra["branch"] = branchname
848 extra["branch"] = branchname
848
849
849 if use_dirstate:
850 if use_dirstate:
850 lines = [line.rstrip() for line in text.rstrip().splitlines()]
851 lines = [line.rstrip() for line in text.rstrip().splitlines()]
851 while lines and not lines[0]:
852 while lines and not lines[0]:
852 del lines[0]
853 del lines[0]
853 if not lines:
854 if not lines:
854 raise util.Abort(_("empty commit message"))
855 raise util.Abort(_("empty commit message"))
855 text = '\n'.join(lines)
856 text = '\n'.join(lines)
856
857
857 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
858 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
858 user, date, extra)
859 user, date, extra)
859 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
860 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
860 parent2=xp2)
861 parent2=xp2)
861 tr.close()
862 tr.close()
862
863
863 if self.branchcache and "branch" in extra:
864 if self.branchcache and "branch" in extra:
864 self.branchcache[util.tolocal(extra["branch"])] = n
865 self.branchcache[util.tolocal(extra["branch"])] = n
865
866
866 if use_dirstate or update_dirstate:
867 if use_dirstate or update_dirstate:
867 self.dirstate.setparents(n)
868 self.dirstate.setparents(n)
868 if use_dirstate:
869 if use_dirstate:
869 for f in removed:
870 for f in removed:
870 self.dirstate.forget(f)
871 self.dirstate.forget(f)
871 valid = 1 # our dirstate updates are complete
872 valid = 1 # our dirstate updates are complete
872
873
873 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
874 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
874 return n
875 return n
875 finally:
876 finally:
876 if not valid: # don't save our updated dirstate
877 if not valid: # don't save our updated dirstate
877 self.dirstate.invalidate()
878 self.dirstate.invalidate()
878 del tr, lock, wlock
879 del tr, lock, wlock
879
880
880 def walk(self, node=None, files=[], match=util.always, badmatch=None):
881 def walk(self, node=None, files=[], match=util.always, badmatch=None):
881 '''
882 '''
882 walk recursively through the directory tree or a given
883 walk recursively through the directory tree or a given
883 changeset, finding all files matched by the match
884 changeset, finding all files matched by the match
884 function
885 function
885
886
886 results are yielded in a tuple (src, filename), where src
887 results are yielded in a tuple (src, filename), where src
887 is one of:
888 is one of:
888 'f' the file was found in the directory tree
889 'f' the file was found in the directory tree
889 'm' the file was only in the dirstate and not in the tree
890 'm' the file was only in the dirstate and not in the tree
890 'b' file was not found and matched badmatch
891 'b' file was not found and matched badmatch
891 '''
892 '''
892
893
893 if node:
894 if node:
894 fdict = dict.fromkeys(files)
895 fdict = dict.fromkeys(files)
895 # for dirstate.walk, files=['.'] means "walk the whole tree".
896 # for dirstate.walk, files=['.'] means "walk the whole tree".
896 # follow that here, too
897 # follow that here, too
897 fdict.pop('.', None)
898 fdict.pop('.', None)
898 mdict = self.manifest.read(self.changelog.read(node)[0])
899 mdict = self.manifest.read(self.changelog.read(node)[0])
899 mfiles = mdict.keys()
900 mfiles = mdict.keys()
900 mfiles.sort()
901 mfiles.sort()
901 for fn in mfiles:
902 for fn in mfiles:
902 for ffn in fdict:
903 for ffn in fdict:
903 # match if the file is the exact name or a directory
904 # match if the file is the exact name or a directory
904 if ffn == fn or fn.startswith("%s/" % ffn):
905 if ffn == fn or fn.startswith("%s/" % ffn):
905 del fdict[ffn]
906 del fdict[ffn]
906 break
907 break
907 if match(fn):
908 if match(fn):
908 yield 'm', fn
909 yield 'm', fn
909 ffiles = fdict.keys()
910 ffiles = fdict.keys()
910 ffiles.sort()
911 ffiles.sort()
911 for fn in ffiles:
912 for fn in ffiles:
912 if badmatch and badmatch(fn):
913 if badmatch and badmatch(fn):
913 if match(fn):
914 if match(fn):
914 yield 'b', fn
915 yield 'b', fn
915 else:
916 else:
916 self.ui.warn(_('%s: No such file in rev %s\n')
917 self.ui.warn(_('%s: No such file in rev %s\n')
917 % (self.pathto(fn), short(node)))
918 % (self.pathto(fn), short(node)))
918 else:
919 else:
919 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
920 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
920 yield src, fn
921 yield src, fn
921
922
922 def status(self, node1=None, node2=None, files=[], match=util.always,
923 def status(self, node1=None, node2=None, files=[], match=util.always,
923 list_ignored=False, list_clean=False):
924 list_ignored=False, list_clean=False):
924 """return status of files between two nodes or node and working directory
925 """return status of files between two nodes or node and working directory
925
926
926 If node1 is None, use the first dirstate parent instead.
927 If node1 is None, use the first dirstate parent instead.
927 If node2 is None, compare node1 with working directory.
928 If node2 is None, compare node1 with working directory.
928 """
929 """
929
930
930 def fcmp(fn, getnode):
931 def fcmp(fn, getnode):
931 t1 = self.wread(fn)
932 t1 = self.wread(fn)
932 return self.file(fn).cmp(getnode(fn), t1)
933 return self.file(fn).cmp(getnode(fn), t1)
933
934
934 def mfmatches(node):
935 def mfmatches(node):
935 change = self.changelog.read(node)
936 change = self.changelog.read(node)
936 mf = self.manifest.read(change[0]).copy()
937 mf = self.manifest.read(change[0]).copy()
937 for fn in mf.keys():
938 for fn in mf.keys():
938 if not match(fn):
939 if not match(fn):
939 del mf[fn]
940 del mf[fn]
940 return mf
941 return mf
941
942
942 modified, added, removed, deleted, unknown = [], [], [], [], []
943 modified, added, removed, deleted, unknown = [], [], [], [], []
943 ignored, clean = [], []
944 ignored, clean = [], []
944
945
945 compareworking = False
946 compareworking = False
946 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
947 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
947 compareworking = True
948 compareworking = True
948
949
949 if not compareworking:
950 if not compareworking:
950 # read the manifest from node1 before the manifest from node2,
951 # read the manifest from node1 before the manifest from node2,
951 # so that we'll hit the manifest cache if we're going through
952 # so that we'll hit the manifest cache if we're going through
952 # all the revisions in parent->child order.
953 # all the revisions in parent->child order.
953 mf1 = mfmatches(node1)
954 mf1 = mfmatches(node1)
954
955
955 # are we comparing the working directory?
956 # are we comparing the working directory?
956 if not node2:
957 if not node2:
957 (lookup, modified, added, removed, deleted, unknown,
958 (lookup, modified, added, removed, deleted, unknown,
958 ignored, clean) = self.dirstate.status(files, match,
959 ignored, clean) = self.dirstate.status(files, match,
959 list_ignored, list_clean)
960 list_ignored, list_clean)
960
961
961 # are we comparing working dir against its parent?
962 # are we comparing working dir against its parent?
962 if compareworking:
963 if compareworking:
963 if lookup:
964 if lookup:
964 fixup = []
965 fixup = []
965 # do a full compare of any files that might have changed
966 # do a full compare of any files that might have changed
966 ctx = self.changectx()
967 ctx = self.changectx()
967 for f in lookup:
968 for f in lookup:
968 if f not in ctx or ctx[f].cmp(self.wread(f)):
969 if f not in ctx or ctx[f].cmp(self.wread(f)):
969 modified.append(f)
970 modified.append(f)
970 else:
971 else:
971 fixup.append(f)
972 fixup.append(f)
972 if list_clean:
973 if list_clean:
973 clean.append(f)
974 clean.append(f)
974
975
975 # update dirstate for files that are actually clean
976 # update dirstate for files that are actually clean
976 if fixup:
977 if fixup:
977 wlock = None
978 wlock = None
978 try:
979 try:
979 try:
980 try:
980 wlock = self.wlock(False)
981 wlock = self.wlock(False)
981 except lock.LockException:
982 except lock.LockException:
982 pass
983 pass
983 if wlock:
984 if wlock:
984 for f in fixup:
985 for f in fixup:
985 self.dirstate.normal(f)
986 self.dirstate.normal(f)
986 finally:
987 finally:
987 del wlock
988 del wlock
988 else:
989 else:
989 # we are comparing working dir against non-parent
990 # we are comparing working dir against non-parent
990 # generate a pseudo-manifest for the working dir
991 # generate a pseudo-manifest for the working dir
991 # XXX: create it in dirstate.py ?
992 # XXX: create it in dirstate.py ?
992 mf2 = mfmatches(self.dirstate.parents()[0])
993 mf2 = mfmatches(self.dirstate.parents()[0])
993 is_exec = util.execfunc(self.root, mf2.execf)
994 is_exec = util.execfunc(self.root, mf2.execf)
994 is_link = util.linkfunc(self.root, mf2.linkf)
995 is_link = util.linkfunc(self.root, mf2.linkf)
995 for f in lookup + modified + added:
996 for f in lookup + modified + added:
996 mf2[f] = ""
997 mf2[f] = ""
997 mf2.set(f, is_exec(f), is_link(f))
998 mf2.set(f, is_exec(f), is_link(f))
998 for f in removed:
999 for f in removed:
999 if f in mf2:
1000 if f in mf2:
1000 del mf2[f]
1001 del mf2[f]
1001
1002
1002 else:
1003 else:
1003 # we are comparing two revisions
1004 # we are comparing two revisions
1004 mf2 = mfmatches(node2)
1005 mf2 = mfmatches(node2)
1005
1006
1006 if not compareworking:
1007 if not compareworking:
1007 # flush lists from dirstate before comparing manifests
1008 # flush lists from dirstate before comparing manifests
1008 modified, added, clean = [], [], []
1009 modified, added, clean = [], [], []
1009
1010
1010 # make sure to sort the files so we talk to the disk in a
1011 # make sure to sort the files so we talk to the disk in a
1011 # reasonable order
1012 # reasonable order
1012 mf2keys = mf2.keys()
1013 mf2keys = mf2.keys()
1013 mf2keys.sort()
1014 mf2keys.sort()
1014 getnode = lambda fn: mf1.get(fn, nullid)
1015 getnode = lambda fn: mf1.get(fn, nullid)
1015 for fn in mf2keys:
1016 for fn in mf2keys:
1016 if fn in mf1:
1017 if fn in mf1:
1017 if (mf1.flags(fn) != mf2.flags(fn) or
1018 if (mf1.flags(fn) != mf2.flags(fn) or
1018 (mf1[fn] != mf2[fn] and
1019 (mf1[fn] != mf2[fn] and
1019 (mf2[fn] != "" or fcmp(fn, getnode)))):
1020 (mf2[fn] != "" or fcmp(fn, getnode)))):
1020 modified.append(fn)
1021 modified.append(fn)
1021 elif list_clean:
1022 elif list_clean:
1022 clean.append(fn)
1023 clean.append(fn)
1023 del mf1[fn]
1024 del mf1[fn]
1024 else:
1025 else:
1025 added.append(fn)
1026 added.append(fn)
1026
1027
1027 removed = mf1.keys()
1028 removed = mf1.keys()
1028
1029
1029 # sort and return results:
1030 # sort and return results:
1030 for l in modified, added, removed, deleted, unknown, ignored, clean:
1031 for l in modified, added, removed, deleted, unknown, ignored, clean:
1031 l.sort()
1032 l.sort()
1032 return (modified, added, removed, deleted, unknown, ignored, clean)
1033 return (modified, added, removed, deleted, unknown, ignored, clean)
1033
1034
1034 def add(self, list):
1035 def add(self, list):
1035 wlock = self.wlock()
1036 wlock = self.wlock()
1036 try:
1037 try:
1037 rejected = []
1038 rejected = []
1038 for f in list:
1039 for f in list:
1039 p = self.wjoin(f)
1040 p = self.wjoin(f)
1040 try:
1041 try:
1041 st = os.lstat(p)
1042 st = os.lstat(p)
1042 except:
1043 except:
1043 self.ui.warn(_("%s does not exist!\n") % f)
1044 self.ui.warn(_("%s does not exist!\n") % f)
1044 rejected.append(f)
1045 rejected.append(f)
1045 continue
1046 continue
1046 if st.st_size > 10000000:
1047 if st.st_size > 10000000:
1047 self.ui.warn(_("%s: files over 10MB may cause memory and"
1048 self.ui.warn(_("%s: files over 10MB may cause memory and"
1048 " performance problems\n"
1049 " performance problems\n"
1049 "(use 'hg revert %s' to unadd the file)\n")
1050 "(use 'hg revert %s' to unadd the file)\n")
1050 % (f, f))
1051 % (f, f))
1051 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1052 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1052 self.ui.warn(_("%s not added: only files and symlinks "
1053 self.ui.warn(_("%s not added: only files and symlinks "
1053 "supported currently\n") % f)
1054 "supported currently\n") % f)
1054 rejected.append(p)
1055 rejected.append(p)
1055 elif self.dirstate[f] in 'amn':
1056 elif self.dirstate[f] in 'amn':
1056 self.ui.warn(_("%s already tracked!\n") % f)
1057 self.ui.warn(_("%s already tracked!\n") % f)
1057 elif self.dirstate[f] == 'r':
1058 elif self.dirstate[f] == 'r':
1058 self.dirstate.normallookup(f)
1059 self.dirstate.normallookup(f)
1059 else:
1060 else:
1060 self.dirstate.add(f)
1061 self.dirstate.add(f)
1061 return rejected
1062 return rejected
1062 finally:
1063 finally:
1063 del wlock
1064 del wlock
1064
1065
1065 def forget(self, list):
1066 def forget(self, list):
1066 wlock = self.wlock()
1067 wlock = self.wlock()
1067 try:
1068 try:
1068 for f in list:
1069 for f in list:
1069 if self.dirstate[f] != 'a':
1070 if self.dirstate[f] != 'a':
1070 self.ui.warn(_("%s not added!\n") % f)
1071 self.ui.warn(_("%s not added!\n") % f)
1071 else:
1072 else:
1072 self.dirstate.forget(f)
1073 self.dirstate.forget(f)
1073 finally:
1074 finally:
1074 del wlock
1075 del wlock
1075
1076
1076 def remove(self, list, unlink=False):
1077 def remove(self, list, unlink=False):
1077 wlock = None
1078 wlock = None
1078 try:
1079 try:
1079 if unlink:
1080 if unlink:
1080 for f in list:
1081 for f in list:
1081 try:
1082 try:
1082 util.unlink(self.wjoin(f))
1083 util.unlink(self.wjoin(f))
1083 except OSError, inst:
1084 except OSError, inst:
1084 if inst.errno != errno.ENOENT:
1085 if inst.errno != errno.ENOENT:
1085 raise
1086 raise
1086 wlock = self.wlock()
1087 wlock = self.wlock()
1087 for f in list:
1088 for f in list:
1088 if unlink and os.path.exists(self.wjoin(f)):
1089 if unlink and os.path.exists(self.wjoin(f)):
1089 self.ui.warn(_("%s still exists!\n") % f)
1090 self.ui.warn(_("%s still exists!\n") % f)
1090 elif self.dirstate[f] == 'a':
1091 elif self.dirstate[f] == 'a':
1091 self.dirstate.forget(f)
1092 self.dirstate.forget(f)
1092 elif f not in self.dirstate:
1093 elif f not in self.dirstate:
1093 self.ui.warn(_("%s not tracked!\n") % f)
1094 self.ui.warn(_("%s not tracked!\n") % f)
1094 else:
1095 else:
1095 self.dirstate.remove(f)
1096 self.dirstate.remove(f)
1096 finally:
1097 finally:
1097 del wlock
1098 del wlock
1098
1099
1099 def undelete(self, list):
1100 def undelete(self, list):
1100 wlock = None
1101 wlock = None
1101 try:
1102 try:
1102 manifests = [self.manifest.read(self.changelog.read(p)[0])
1103 manifests = [self.manifest.read(self.changelog.read(p)[0])
1103 for p in self.dirstate.parents() if p != nullid]
1104 for p in self.dirstate.parents() if p != nullid]
1104 wlock = self.wlock()
1105 wlock = self.wlock()
1105 for f in list:
1106 for f in list:
1106 if self.dirstate[f] != 'r':
1107 if self.dirstate[f] != 'r':
1107 self.ui.warn("%s not removed!\n" % f)
1108 self.ui.warn("%s not removed!\n" % f)
1108 else:
1109 else:
1109 m = f in manifests[0] and manifests[0] or manifests[1]
1110 m = f in manifests[0] and manifests[0] or manifests[1]
1110 t = self.file(f).read(m[f])
1111 t = self.file(f).read(m[f])
1111 self.wwrite(f, t, m.flags(f))
1112 self.wwrite(f, t, m.flags(f))
1112 self.dirstate.normal(f)
1113 self.dirstate.normal(f)
1113 finally:
1114 finally:
1114 del wlock
1115 del wlock
1115
1116
1116 def copy(self, source, dest):
1117 def copy(self, source, dest):
1117 wlock = None
1118 wlock = None
1118 try:
1119 try:
1119 p = self.wjoin(dest)
1120 p = self.wjoin(dest)
1120 if not (os.path.exists(p) or os.path.islink(p)):
1121 if not (os.path.exists(p) or os.path.islink(p)):
1121 self.ui.warn(_("%s does not exist!\n") % dest)
1122 self.ui.warn(_("%s does not exist!\n") % dest)
1122 elif not (os.path.isfile(p) or os.path.islink(p)):
1123 elif not (os.path.isfile(p) or os.path.islink(p)):
1123 self.ui.warn(_("copy failed: %s is not a file or a "
1124 self.ui.warn(_("copy failed: %s is not a file or a "
1124 "symbolic link\n") % dest)
1125 "symbolic link\n") % dest)
1125 else:
1126 else:
1126 wlock = self.wlock()
1127 wlock = self.wlock()
1127 if dest not in self.dirstate:
1128 if dest not in self.dirstate:
1128 self.dirstate.add(dest)
1129 self.dirstate.add(dest)
1129 self.dirstate.copy(source, dest)
1130 self.dirstate.copy(source, dest)
1130 finally:
1131 finally:
1131 del wlock
1132 del wlock
1132
1133
1133 def heads(self, start=None):
1134 def heads(self, start=None):
1134 heads = self.changelog.heads(start)
1135 heads = self.changelog.heads(start)
1135 # sort the output in rev descending order
1136 # sort the output in rev descending order
1136 heads = [(-self.changelog.rev(h), h) for h in heads]
1137 heads = [(-self.changelog.rev(h), h) for h in heads]
1137 heads.sort()
1138 heads.sort()
1138 return [n for (r, n) in heads]
1139 return [n for (r, n) in heads]
1139
1140
1140 def branchheads(self, branch, start=None):
1141 def branchheads(self, branch, start=None):
1141 branches = self.branchtags()
1142 branches = self.branchtags()
1142 if branch not in branches:
1143 if branch not in branches:
1143 return []
1144 return []
1144 # The basic algorithm is this:
1145 # The basic algorithm is this:
1145 #
1146 #
1146 # Start from the branch tip since there are no later revisions that can
1147 # Start from the branch tip since there are no later revisions that can
1147 # possibly be in this branch, and the tip is a guaranteed head.
1148 # possibly be in this branch, and the tip is a guaranteed head.
1148 #
1149 #
1149 # Remember the tip's parents as the first ancestors, since these by
1150 # Remember the tip's parents as the first ancestors, since these by
1150 # definition are not heads.
1151 # definition are not heads.
1151 #
1152 #
1152 # Step backwards from the brach tip through all the revisions. We are
1153 # Step backwards from the brach tip through all the revisions. We are
1153 # guaranteed by the rules of Mercurial that we will now be visiting the
1154 # guaranteed by the rules of Mercurial that we will now be visiting the
1154 # nodes in reverse topological order (children before parents).
1155 # nodes in reverse topological order (children before parents).
1155 #
1156 #
1156 # If a revision is one of the ancestors of a head then we can toss it
1157 # If a revision is one of the ancestors of a head then we can toss it
1157 # out of the ancestors set (we've already found it and won't be
1158 # out of the ancestors set (we've already found it and won't be
1158 # visiting it again) and put its parents in the ancestors set.
1159 # visiting it again) and put its parents in the ancestors set.
1159 #
1160 #
1160 # Otherwise, if a revision is in the branch it's another head, since it
1161 # Otherwise, if a revision is in the branch it's another head, since it
1161 # wasn't in the ancestor list of an existing head. So add it to the
1162 # wasn't in the ancestor list of an existing head. So add it to the
1162 # head list, and add its parents to the ancestor list.
1163 # head list, and add its parents to the ancestor list.
1163 #
1164 #
1164 # If it is not in the branch ignore it.
1165 # If it is not in the branch ignore it.
1165 #
1166 #
1166 # Once we have a list of heads, use nodesbetween to filter out all the
1167 # Once we have a list of heads, use nodesbetween to filter out all the
1167 # heads that cannot be reached from startrev. There may be a more
1168 # heads that cannot be reached from startrev. There may be a more
1168 # efficient way to do this as part of the previous algorithm.
1169 # efficient way to do this as part of the previous algorithm.
1169
1170
1170 set = util.set
1171 set = util.set
1171 heads = [self.changelog.rev(branches[branch])]
1172 heads = [self.changelog.rev(branches[branch])]
1172 # Don't care if ancestors contains nullrev or not.
1173 # Don't care if ancestors contains nullrev or not.
1173 ancestors = set(self.changelog.parentrevs(heads[0]))
1174 ancestors = set(self.changelog.parentrevs(heads[0]))
1174 for rev in xrange(heads[0] - 1, nullrev, -1):
1175 for rev in xrange(heads[0] - 1, nullrev, -1):
1175 if rev in ancestors:
1176 if rev in ancestors:
1176 ancestors.update(self.changelog.parentrevs(rev))
1177 ancestors.update(self.changelog.parentrevs(rev))
1177 ancestors.remove(rev)
1178 ancestors.remove(rev)
1178 elif self.changectx(rev).branch() == branch:
1179 elif self.changectx(rev).branch() == branch:
1179 heads.append(rev)
1180 heads.append(rev)
1180 ancestors.update(self.changelog.parentrevs(rev))
1181 ancestors.update(self.changelog.parentrevs(rev))
1181 heads = [self.changelog.node(rev) for rev in heads]
1182 heads = [self.changelog.node(rev) for rev in heads]
1182 if start is not None:
1183 if start is not None:
1183 heads = self.changelog.nodesbetween([start], heads)[2]
1184 heads = self.changelog.nodesbetween([start], heads)[2]
1184 return heads
1185 return heads
1185
1186
1186 def branches(self, nodes):
1187 def branches(self, nodes):
1187 if not nodes:
1188 if not nodes:
1188 nodes = [self.changelog.tip()]
1189 nodes = [self.changelog.tip()]
1189 b = []
1190 b = []
1190 for n in nodes:
1191 for n in nodes:
1191 t = n
1192 t = n
1192 while 1:
1193 while 1:
1193 p = self.changelog.parents(n)
1194 p = self.changelog.parents(n)
1194 if p[1] != nullid or p[0] == nullid:
1195 if p[1] != nullid or p[0] == nullid:
1195 b.append((t, n, p[0], p[1]))
1196 b.append((t, n, p[0], p[1]))
1196 break
1197 break
1197 n = p[0]
1198 n = p[0]
1198 return b
1199 return b
1199
1200
1200 def between(self, pairs):
1201 def between(self, pairs):
1201 r = []
1202 r = []
1202
1203
1203 for top, bottom in pairs:
1204 for top, bottom in pairs:
1204 n, l, i = top, [], 0
1205 n, l, i = top, [], 0
1205 f = 1
1206 f = 1
1206
1207
1207 while n != bottom:
1208 while n != bottom:
1208 p = self.changelog.parents(n)[0]
1209 p = self.changelog.parents(n)[0]
1209 if i == f:
1210 if i == f:
1210 l.append(n)
1211 l.append(n)
1211 f = f * 2
1212 f = f * 2
1212 n = p
1213 n = p
1213 i += 1
1214 i += 1
1214
1215
1215 r.append(l)
1216 r.append(l)
1216
1217
1217 return r
1218 return r
1218
1219
1219 def findincoming(self, remote, base=None, heads=None, force=False):
1220 def findincoming(self, remote, base=None, heads=None, force=False):
1220 """Return list of roots of the subsets of missing nodes from remote
1221 """Return list of roots of the subsets of missing nodes from remote
1221
1222
1222 If base dict is specified, assume that these nodes and their parents
1223 If base dict is specified, assume that these nodes and their parents
1223 exist on the remote side and that no child of a node of base exists
1224 exist on the remote side and that no child of a node of base exists
1224 in both remote and self.
1225 in both remote and self.
1225 Furthermore base will be updated to include the nodes that exists
1226 Furthermore base will be updated to include the nodes that exists
1226 in self and remote but no children exists in self and remote.
1227 in self and remote but no children exists in self and remote.
1227 If a list of heads is specified, return only nodes which are heads
1228 If a list of heads is specified, return only nodes which are heads
1228 or ancestors of these heads.
1229 or ancestors of these heads.
1229
1230
1230 All the ancestors of base are in self and in remote.
1231 All the ancestors of base are in self and in remote.
1231 All the descendants of the list returned are missing in self.
1232 All the descendants of the list returned are missing in self.
1232 (and so we know that the rest of the nodes are missing in remote, see
1233 (and so we know that the rest of the nodes are missing in remote, see
1233 outgoing)
1234 outgoing)
1234 """
1235 """
1235 m = self.changelog.nodemap
1236 m = self.changelog.nodemap
1236 search = []
1237 search = []
1237 fetch = {}
1238 fetch = {}
1238 seen = {}
1239 seen = {}
1239 seenbranch = {}
1240 seenbranch = {}
1240 if base == None:
1241 if base == None:
1241 base = {}
1242 base = {}
1242
1243
1243 if not heads:
1244 if not heads:
1244 heads = remote.heads()
1245 heads = remote.heads()
1245
1246
1246 if self.changelog.tip() == nullid:
1247 if self.changelog.tip() == nullid:
1247 base[nullid] = 1
1248 base[nullid] = 1
1248 if heads != [nullid]:
1249 if heads != [nullid]:
1249 return [nullid]
1250 return [nullid]
1250 return []
1251 return []
1251
1252
1252 # assume we're closer to the tip than the root
1253 # assume we're closer to the tip than the root
1253 # and start by examining the heads
1254 # and start by examining the heads
1254 self.ui.status(_("searching for changes\n"))
1255 self.ui.status(_("searching for changes\n"))
1255
1256
1256 unknown = []
1257 unknown = []
1257 for h in heads:
1258 for h in heads:
1258 if h not in m:
1259 if h not in m:
1259 unknown.append(h)
1260 unknown.append(h)
1260 else:
1261 else:
1261 base[h] = 1
1262 base[h] = 1
1262
1263
1263 if not unknown:
1264 if not unknown:
1264 return []
1265 return []
1265
1266
1266 req = dict.fromkeys(unknown)
1267 req = dict.fromkeys(unknown)
1267 reqcnt = 0
1268 reqcnt = 0
1268
1269
1269 # search through remote branches
1270 # search through remote branches
1270 # a 'branch' here is a linear segment of history, with four parts:
1271 # a 'branch' here is a linear segment of history, with four parts:
1271 # head, root, first parent, second parent
1272 # head, root, first parent, second parent
1272 # (a branch always has two parents (or none) by definition)
1273 # (a branch always has two parents (or none) by definition)
1273 unknown = remote.branches(unknown)
1274 unknown = remote.branches(unknown)
1274 while unknown:
1275 while unknown:
1275 r = []
1276 r = []
1276 while unknown:
1277 while unknown:
1277 n = unknown.pop(0)
1278 n = unknown.pop(0)
1278 if n[0] in seen:
1279 if n[0] in seen:
1279 continue
1280 continue
1280
1281
1281 self.ui.debug(_("examining %s:%s\n")
1282 self.ui.debug(_("examining %s:%s\n")
1282 % (short(n[0]), short(n[1])))
1283 % (short(n[0]), short(n[1])))
1283 if n[0] == nullid: # found the end of the branch
1284 if n[0] == nullid: # found the end of the branch
1284 pass
1285 pass
1285 elif n in seenbranch:
1286 elif n in seenbranch:
1286 self.ui.debug(_("branch already found\n"))
1287 self.ui.debug(_("branch already found\n"))
1287 continue
1288 continue
1288 elif n[1] and n[1] in m: # do we know the base?
1289 elif n[1] and n[1] in m: # do we know the base?
1289 self.ui.debug(_("found incomplete branch %s:%s\n")
1290 self.ui.debug(_("found incomplete branch %s:%s\n")
1290 % (short(n[0]), short(n[1])))
1291 % (short(n[0]), short(n[1])))
1291 search.append(n) # schedule branch range for scanning
1292 search.append(n) # schedule branch range for scanning
1292 seenbranch[n] = 1
1293 seenbranch[n] = 1
1293 else:
1294 else:
1294 if n[1] not in seen and n[1] not in fetch:
1295 if n[1] not in seen and n[1] not in fetch:
1295 if n[2] in m and n[3] in m:
1296 if n[2] in m and n[3] in m:
1296 self.ui.debug(_("found new changeset %s\n") %
1297 self.ui.debug(_("found new changeset %s\n") %
1297 short(n[1]))
1298 short(n[1]))
1298 fetch[n[1]] = 1 # earliest unknown
1299 fetch[n[1]] = 1 # earliest unknown
1299 for p in n[2:4]:
1300 for p in n[2:4]:
1300 if p in m:
1301 if p in m:
1301 base[p] = 1 # latest known
1302 base[p] = 1 # latest known
1302
1303
1303 for p in n[2:4]:
1304 for p in n[2:4]:
1304 if p not in req and p not in m:
1305 if p not in req and p not in m:
1305 r.append(p)
1306 r.append(p)
1306 req[p] = 1
1307 req[p] = 1
1307 seen[n[0]] = 1
1308 seen[n[0]] = 1
1308
1309
1309 if r:
1310 if r:
1310 reqcnt += 1
1311 reqcnt += 1
1311 self.ui.debug(_("request %d: %s\n") %
1312 self.ui.debug(_("request %d: %s\n") %
1312 (reqcnt, " ".join(map(short, r))))
1313 (reqcnt, " ".join(map(short, r))))
1313 for p in xrange(0, len(r), 10):
1314 for p in xrange(0, len(r), 10):
1314 for b in remote.branches(r[p:p+10]):
1315 for b in remote.branches(r[p:p+10]):
1315 self.ui.debug(_("received %s:%s\n") %
1316 self.ui.debug(_("received %s:%s\n") %
1316 (short(b[0]), short(b[1])))
1317 (short(b[0]), short(b[1])))
1317 unknown.append(b)
1318 unknown.append(b)
1318
1319
1319 # do binary search on the branches we found
1320 # do binary search on the branches we found
1320 while search:
1321 while search:
1321 n = search.pop(0)
1322 n = search.pop(0)
1322 reqcnt += 1
1323 reqcnt += 1
1323 l = remote.between([(n[0], n[1])])[0]
1324 l = remote.between([(n[0], n[1])])[0]
1324 l.append(n[1])
1325 l.append(n[1])
1325 p = n[0]
1326 p = n[0]
1326 f = 1
1327 f = 1
1327 for i in l:
1328 for i in l:
1328 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1329 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1329 if i in m:
1330 if i in m:
1330 if f <= 2:
1331 if f <= 2:
1331 self.ui.debug(_("found new branch changeset %s\n") %
1332 self.ui.debug(_("found new branch changeset %s\n") %
1332 short(p))
1333 short(p))
1333 fetch[p] = 1
1334 fetch[p] = 1
1334 base[i] = 1
1335 base[i] = 1
1335 else:
1336 else:
1336 self.ui.debug(_("narrowed branch search to %s:%s\n")
1337 self.ui.debug(_("narrowed branch search to %s:%s\n")
1337 % (short(p), short(i)))
1338 % (short(p), short(i)))
1338 search.append((p, i))
1339 search.append((p, i))
1339 break
1340 break
1340 p, f = i, f * 2
1341 p, f = i, f * 2
1341
1342
1342 # sanity check our fetch list
1343 # sanity check our fetch list
1343 for f in fetch.keys():
1344 for f in fetch.keys():
1344 if f in m:
1345 if f in m:
1345 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1346 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1346
1347
1347 if base.keys() == [nullid]:
1348 if base.keys() == [nullid]:
1348 if force:
1349 if force:
1349 self.ui.warn(_("warning: repository is unrelated\n"))
1350 self.ui.warn(_("warning: repository is unrelated\n"))
1350 else:
1351 else:
1351 raise util.Abort(_("repository is unrelated"))
1352 raise util.Abort(_("repository is unrelated"))
1352
1353
1353 self.ui.debug(_("found new changesets starting at ") +
1354 self.ui.debug(_("found new changesets starting at ") +
1354 " ".join([short(f) for f in fetch]) + "\n")
1355 " ".join([short(f) for f in fetch]) + "\n")
1355
1356
1356 self.ui.debug(_("%d total queries\n") % reqcnt)
1357 self.ui.debug(_("%d total queries\n") % reqcnt)
1357
1358
1358 return fetch.keys()
1359 return fetch.keys()
1359
1360
1360 def findoutgoing(self, remote, base=None, heads=None, force=False):
1361 def findoutgoing(self, remote, base=None, heads=None, force=False):
1361 """Return list of nodes that are roots of subsets not in remote
1362 """Return list of nodes that are roots of subsets not in remote
1362
1363
1363 If base dict is specified, assume that these nodes and their parents
1364 If base dict is specified, assume that these nodes and their parents
1364 exist on the remote side.
1365 exist on the remote side.
1365 If a list of heads is specified, return only nodes which are heads
1366 If a list of heads is specified, return only nodes which are heads
1366 or ancestors of these heads, and return a second element which
1367 or ancestors of these heads, and return a second element which
1367 contains all remote heads which get new children.
1368 contains all remote heads which get new children.
1368 """
1369 """
1369 if base == None:
1370 if base == None:
1370 base = {}
1371 base = {}
1371 self.findincoming(remote, base, heads, force=force)
1372 self.findincoming(remote, base, heads, force=force)
1372
1373
1373 self.ui.debug(_("common changesets up to ")
1374 self.ui.debug(_("common changesets up to ")
1374 + " ".join(map(short, base.keys())) + "\n")
1375 + " ".join(map(short, base.keys())) + "\n")
1375
1376
1376 remain = dict.fromkeys(self.changelog.nodemap)
1377 remain = dict.fromkeys(self.changelog.nodemap)
1377
1378
1378 # prune everything remote has from the tree
1379 # prune everything remote has from the tree
1379 del remain[nullid]
1380 del remain[nullid]
1380 remove = base.keys()
1381 remove = base.keys()
1381 while remove:
1382 while remove:
1382 n = remove.pop(0)
1383 n = remove.pop(0)
1383 if n in remain:
1384 if n in remain:
1384 del remain[n]
1385 del remain[n]
1385 for p in self.changelog.parents(n):
1386 for p in self.changelog.parents(n):
1386 remove.append(p)
1387 remove.append(p)
1387
1388
1388 # find every node whose parents have been pruned
1389 # find every node whose parents have been pruned
1389 subset = []
1390 subset = []
1390 # find every remote head that will get new children
1391 # find every remote head that will get new children
1391 updated_heads = {}
1392 updated_heads = {}
1392 for n in remain:
1393 for n in remain:
1393 p1, p2 = self.changelog.parents(n)
1394 p1, p2 = self.changelog.parents(n)
1394 if p1 not in remain and p2 not in remain:
1395 if p1 not in remain and p2 not in remain:
1395 subset.append(n)
1396 subset.append(n)
1396 if heads:
1397 if heads:
1397 if p1 in heads:
1398 if p1 in heads:
1398 updated_heads[p1] = True
1399 updated_heads[p1] = True
1399 if p2 in heads:
1400 if p2 in heads:
1400 updated_heads[p2] = True
1401 updated_heads[p2] = True
1401
1402
1402 # this is the set of all roots we have to push
1403 # this is the set of all roots we have to push
1403 if heads:
1404 if heads:
1404 return subset, updated_heads.keys()
1405 return subset, updated_heads.keys()
1405 else:
1406 else:
1406 return subset
1407 return subset
1407
1408
1408 def pull(self, remote, heads=None, force=False):
1409 def pull(self, remote, heads=None, force=False):
1409 lock = self.lock()
1410 lock = self.lock()
1410 try:
1411 try:
1411 fetch = self.findincoming(remote, heads=heads, force=force)
1412 fetch = self.findincoming(remote, heads=heads, force=force)
1412 if fetch == [nullid]:
1413 if fetch == [nullid]:
1413 self.ui.status(_("requesting all changes\n"))
1414 self.ui.status(_("requesting all changes\n"))
1414
1415
1415 if not fetch:
1416 if not fetch:
1416 self.ui.status(_("no changes found\n"))
1417 self.ui.status(_("no changes found\n"))
1417 return 0
1418 return 0
1418
1419
1419 if heads is None:
1420 if heads is None:
1420 cg = remote.changegroup(fetch, 'pull')
1421 cg = remote.changegroup(fetch, 'pull')
1421 else:
1422 else:
1422 if 'changegroupsubset' not in remote.capabilities:
1423 if 'changegroupsubset' not in remote.capabilities:
1423 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1424 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1424 cg = remote.changegroupsubset(fetch, heads, 'pull')
1425 cg = remote.changegroupsubset(fetch, heads, 'pull')
1425 return self.addchangegroup(cg, 'pull', remote.url())
1426 return self.addchangegroup(cg, 'pull', remote.url())
1426 finally:
1427 finally:
1427 del lock
1428 del lock
1428
1429
1429 def push(self, remote, force=False, revs=None):
1430 def push(self, remote, force=False, revs=None):
1430 # there are two ways to push to remote repo:
1431 # there are two ways to push to remote repo:
1431 #
1432 #
1432 # addchangegroup assumes local user can lock remote
1433 # addchangegroup assumes local user can lock remote
1433 # repo (local filesystem, old ssh servers).
1434 # repo (local filesystem, old ssh servers).
1434 #
1435 #
1435 # unbundle assumes local user cannot lock remote repo (new ssh
1436 # unbundle assumes local user cannot lock remote repo (new ssh
1436 # servers, http servers).
1437 # servers, http servers).
1437
1438
1438 if remote.capable('unbundle'):
1439 if remote.capable('unbundle'):
1439 return self.push_unbundle(remote, force, revs)
1440 return self.push_unbundle(remote, force, revs)
1440 return self.push_addchangegroup(remote, force, revs)
1441 return self.push_addchangegroup(remote, force, revs)
1441
1442
1442 def prepush(self, remote, force, revs):
1443 def prepush(self, remote, force, revs):
1443 base = {}
1444 base = {}
1444 remote_heads = remote.heads()
1445 remote_heads = remote.heads()
1445 inc = self.findincoming(remote, base, remote_heads, force=force)
1446 inc = self.findincoming(remote, base, remote_heads, force=force)
1446
1447
1447 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1448 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1448 if revs is not None:
1449 if revs is not None:
1449 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1450 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1450 else:
1451 else:
1451 bases, heads = update, self.changelog.heads()
1452 bases, heads = update, self.changelog.heads()
1452
1453
1453 if not bases:
1454 if not bases:
1454 self.ui.status(_("no changes found\n"))
1455 self.ui.status(_("no changes found\n"))
1455 return None, 1
1456 return None, 1
1456 elif not force:
1457 elif not force:
1457 # check if we're creating new remote heads
1458 # check if we're creating new remote heads
1458 # to be a remote head after push, node must be either
1459 # to be a remote head after push, node must be either
1459 # - unknown locally
1460 # - unknown locally
1460 # - a local outgoing head descended from update
1461 # - a local outgoing head descended from update
1461 # - a remote head that's known locally and not
1462 # - a remote head that's known locally and not
1462 # ancestral to an outgoing head
1463 # ancestral to an outgoing head
1463
1464
1464 warn = 0
1465 warn = 0
1465
1466
1466 if remote_heads == [nullid]:
1467 if remote_heads == [nullid]:
1467 warn = 0
1468 warn = 0
1468 elif not revs and len(heads) > len(remote_heads):
1469 elif not revs and len(heads) > len(remote_heads):
1469 warn = 1
1470 warn = 1
1470 else:
1471 else:
1471 newheads = list(heads)
1472 newheads = list(heads)
1472 for r in remote_heads:
1473 for r in remote_heads:
1473 if r in self.changelog.nodemap:
1474 if r in self.changelog.nodemap:
1474 desc = self.changelog.heads(r, heads)
1475 desc = self.changelog.heads(r, heads)
1475 l = [h for h in heads if h in desc]
1476 l = [h for h in heads if h in desc]
1476 if not l:
1477 if not l:
1477 newheads.append(r)
1478 newheads.append(r)
1478 else:
1479 else:
1479 newheads.append(r)
1480 newheads.append(r)
1480 if len(newheads) > len(remote_heads):
1481 if len(newheads) > len(remote_heads):
1481 warn = 1
1482 warn = 1
1482
1483
1483 if warn:
1484 if warn:
1484 self.ui.warn(_("abort: push creates new remote branches!\n"))
1485 self.ui.warn(_("abort: push creates new remote branches!\n"))
1485 self.ui.status(_("(did you forget to merge?"
1486 self.ui.status(_("(did you forget to merge?"
1486 " use push -f to force)\n"))
1487 " use push -f to force)\n"))
1487 return None, 1
1488 return None, 1
1488 elif inc:
1489 elif inc:
1489 self.ui.warn(_("note: unsynced remote changes!\n"))
1490 self.ui.warn(_("note: unsynced remote changes!\n"))
1490
1491
1491
1492
1492 if revs is None:
1493 if revs is None:
1493 cg = self.changegroup(update, 'push')
1494 cg = self.changegroup(update, 'push')
1494 else:
1495 else:
1495 cg = self.changegroupsubset(update, revs, 'push')
1496 cg = self.changegroupsubset(update, revs, 'push')
1496 return cg, remote_heads
1497 return cg, remote_heads
1497
1498
1498 def push_addchangegroup(self, remote, force, revs):
1499 def push_addchangegroup(self, remote, force, revs):
1499 lock = remote.lock()
1500 lock = remote.lock()
1500 try:
1501 try:
1501 ret = self.prepush(remote, force, revs)
1502 ret = self.prepush(remote, force, revs)
1502 if ret[0] is not None:
1503 if ret[0] is not None:
1503 cg, remote_heads = ret
1504 cg, remote_heads = ret
1504 return remote.addchangegroup(cg, 'push', self.url())
1505 return remote.addchangegroup(cg, 'push', self.url())
1505 return ret[1]
1506 return ret[1]
1506 finally:
1507 finally:
1507 del lock
1508 del lock
1508
1509
1509 def push_unbundle(self, remote, force, revs):
1510 def push_unbundle(self, remote, force, revs):
1510 # local repo finds heads on server, finds out what revs it
1511 # local repo finds heads on server, finds out what revs it
1511 # must push. once revs transferred, if server finds it has
1512 # must push. once revs transferred, if server finds it has
1512 # different heads (someone else won commit/push race), server
1513 # different heads (someone else won commit/push race), server
1513 # aborts.
1514 # aborts.
1514
1515
1515 ret = self.prepush(remote, force, revs)
1516 ret = self.prepush(remote, force, revs)
1516 if ret[0] is not None:
1517 if ret[0] is not None:
1517 cg, remote_heads = ret
1518 cg, remote_heads = ret
1518 if force: remote_heads = ['force']
1519 if force: remote_heads = ['force']
1519 return remote.unbundle(cg, remote_heads, 'push')
1520 return remote.unbundle(cg, remote_heads, 'push')
1520 return ret[1]
1521 return ret[1]
1521
1522
1522 def changegroupinfo(self, nodes, source):
1523 def changegroupinfo(self, nodes, source):
1523 if self.ui.verbose or source == 'bundle':
1524 if self.ui.verbose or source == 'bundle':
1524 self.ui.status(_("%d changesets found\n") % len(nodes))
1525 self.ui.status(_("%d changesets found\n") % len(nodes))
1525 if self.ui.debugflag:
1526 if self.ui.debugflag:
1526 self.ui.debug(_("List of changesets:\n"))
1527 self.ui.debug(_("List of changesets:\n"))
1527 for node in nodes:
1528 for node in nodes:
1528 self.ui.debug("%s\n" % hex(node))
1529 self.ui.debug("%s\n" % hex(node))
1529
1530
1530 def changegroupsubset(self, bases, heads, source, extranodes=None):
1531 def changegroupsubset(self, bases, heads, source, extranodes=None):
1531 """This function generates a changegroup consisting of all the nodes
1532 """This function generates a changegroup consisting of all the nodes
1532 that are descendents of any of the bases, and ancestors of any of
1533 that are descendents of any of the bases, and ancestors of any of
1533 the heads.
1534 the heads.
1534
1535
1535 It is fairly complex as determining which filenodes and which
1536 It is fairly complex as determining which filenodes and which
1536 manifest nodes need to be included for the changeset to be complete
1537 manifest nodes need to be included for the changeset to be complete
1537 is non-trivial.
1538 is non-trivial.
1538
1539
1539 Another wrinkle is doing the reverse, figuring out which changeset in
1540 Another wrinkle is doing the reverse, figuring out which changeset in
1540 the changegroup a particular filenode or manifestnode belongs to.
1541 the changegroup a particular filenode or manifestnode belongs to.
1541
1542
1542 The caller can specify some nodes that must be included in the
1543 The caller can specify some nodes that must be included in the
1543 changegroup using the extranodes argument. It should be a dict
1544 changegroup using the extranodes argument. It should be a dict
1544 where the keys are the filenames (or 1 for the manifest), and the
1545 where the keys are the filenames (or 1 for the manifest), and the
1545 values are lists of (node, linknode) tuples, where node is a wanted
1546 values are lists of (node, linknode) tuples, where node is a wanted
1546 node and linknode is the changelog node that should be transmitted as
1547 node and linknode is the changelog node that should be transmitted as
1547 the linkrev.
1548 the linkrev.
1548 """
1549 """
1549
1550
1550 self.hook('preoutgoing', throw=True, source=source)
1551 self.hook('preoutgoing', throw=True, source=source)
1551
1552
1552 # Set up some initial variables
1553 # Set up some initial variables
1553 # Make it easy to refer to self.changelog
1554 # Make it easy to refer to self.changelog
1554 cl = self.changelog
1555 cl = self.changelog
1555 # msng is short for missing - compute the list of changesets in this
1556 # msng is short for missing - compute the list of changesets in this
1556 # changegroup.
1557 # changegroup.
1557 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1558 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1558 self.changegroupinfo(msng_cl_lst, source)
1559 self.changegroupinfo(msng_cl_lst, source)
1559 # Some bases may turn out to be superfluous, and some heads may be
1560 # Some bases may turn out to be superfluous, and some heads may be
1560 # too. nodesbetween will return the minimal set of bases and heads
1561 # too. nodesbetween will return the minimal set of bases and heads
1561 # necessary to re-create the changegroup.
1562 # necessary to re-create the changegroup.
1562
1563
1563 # Known heads are the list of heads that it is assumed the recipient
1564 # Known heads are the list of heads that it is assumed the recipient
1564 # of this changegroup will know about.
1565 # of this changegroup will know about.
1565 knownheads = {}
1566 knownheads = {}
1566 # We assume that all parents of bases are known heads.
1567 # We assume that all parents of bases are known heads.
1567 for n in bases:
1568 for n in bases:
1568 for p in cl.parents(n):
1569 for p in cl.parents(n):
1569 if p != nullid:
1570 if p != nullid:
1570 knownheads[p] = 1
1571 knownheads[p] = 1
1571 knownheads = knownheads.keys()
1572 knownheads = knownheads.keys()
1572 if knownheads:
1573 if knownheads:
1573 # Now that we know what heads are known, we can compute which
1574 # Now that we know what heads are known, we can compute which
1574 # changesets are known. The recipient must know about all
1575 # changesets are known. The recipient must know about all
1575 # changesets required to reach the known heads from the null
1576 # changesets required to reach the known heads from the null
1576 # changeset.
1577 # changeset.
1577 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1578 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1578 junk = None
1579 junk = None
1579 # Transform the list into an ersatz set.
1580 # Transform the list into an ersatz set.
1580 has_cl_set = dict.fromkeys(has_cl_set)
1581 has_cl_set = dict.fromkeys(has_cl_set)
1581 else:
1582 else:
1582 # If there were no known heads, the recipient cannot be assumed to
1583 # If there were no known heads, the recipient cannot be assumed to
1583 # know about any changesets.
1584 # know about any changesets.
1584 has_cl_set = {}
1585 has_cl_set = {}
1585
1586
1586 # Make it easy to refer to self.manifest
1587 # Make it easy to refer to self.manifest
1587 mnfst = self.manifest
1588 mnfst = self.manifest
1588 # We don't know which manifests are missing yet
1589 # We don't know which manifests are missing yet
1589 msng_mnfst_set = {}
1590 msng_mnfst_set = {}
1590 # Nor do we know which filenodes are missing.
1591 # Nor do we know which filenodes are missing.
1591 msng_filenode_set = {}
1592 msng_filenode_set = {}
1592
1593
1593 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1594 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1594 junk = None
1595 junk = None
1595
1596
1596 # A changeset always belongs to itself, so the changenode lookup
1597 # A changeset always belongs to itself, so the changenode lookup
1597 # function for a changenode is identity.
1598 # function for a changenode is identity.
1598 def identity(x):
1599 def identity(x):
1599 return x
1600 return x
1600
1601
1601 # A function generating function. Sets up an environment for the
1602 # A function generating function. Sets up an environment for the
1602 # inner function.
1603 # inner function.
1603 def cmp_by_rev_func(revlog):
1604 def cmp_by_rev_func(revlog):
1604 # Compare two nodes by their revision number in the environment's
1605 # Compare two nodes by their revision number in the environment's
1605 # revision history. Since the revision number both represents the
1606 # revision history. Since the revision number both represents the
1606 # most efficient order to read the nodes in, and represents a
1607 # most efficient order to read the nodes in, and represents a
1607 # topological sorting of the nodes, this function is often useful.
1608 # topological sorting of the nodes, this function is often useful.
1608 def cmp_by_rev(a, b):
1609 def cmp_by_rev(a, b):
1609 return cmp(revlog.rev(a), revlog.rev(b))
1610 return cmp(revlog.rev(a), revlog.rev(b))
1610 return cmp_by_rev
1611 return cmp_by_rev
1611
1612
1612 # If we determine that a particular file or manifest node must be a
1613 # If we determine that a particular file or manifest node must be a
1613 # node that the recipient of the changegroup will already have, we can
1614 # node that the recipient of the changegroup will already have, we can
1614 # also assume the recipient will have all the parents. This function
1615 # also assume the recipient will have all the parents. This function
1615 # prunes them from the set of missing nodes.
1616 # prunes them from the set of missing nodes.
1616 def prune_parents(revlog, hasset, msngset):
1617 def prune_parents(revlog, hasset, msngset):
1617 haslst = hasset.keys()
1618 haslst = hasset.keys()
1618 haslst.sort(cmp_by_rev_func(revlog))
1619 haslst.sort(cmp_by_rev_func(revlog))
1619 for node in haslst:
1620 for node in haslst:
1620 parentlst = [p for p in revlog.parents(node) if p != nullid]
1621 parentlst = [p for p in revlog.parents(node) if p != nullid]
1621 while parentlst:
1622 while parentlst:
1622 n = parentlst.pop()
1623 n = parentlst.pop()
1623 if n not in hasset:
1624 if n not in hasset:
1624 hasset[n] = 1
1625 hasset[n] = 1
1625 p = [p for p in revlog.parents(n) if p != nullid]
1626 p = [p for p in revlog.parents(n) if p != nullid]
1626 parentlst.extend(p)
1627 parentlst.extend(p)
1627 for n in hasset:
1628 for n in hasset:
1628 msngset.pop(n, None)
1629 msngset.pop(n, None)
1629
1630
1630 # This is a function generating function used to set up an environment
1631 # This is a function generating function used to set up an environment
1631 # for the inner function to execute in.
1632 # for the inner function to execute in.
1632 def manifest_and_file_collector(changedfileset):
1633 def manifest_and_file_collector(changedfileset):
1633 # This is an information gathering function that gathers
1634 # This is an information gathering function that gathers
1634 # information from each changeset node that goes out as part of
1635 # information from each changeset node that goes out as part of
1635 # the changegroup. The information gathered is a list of which
1636 # the changegroup. The information gathered is a list of which
1636 # manifest nodes are potentially required (the recipient may
1637 # manifest nodes are potentially required (the recipient may
1637 # already have them) and total list of all files which were
1638 # already have them) and total list of all files which were
1638 # changed in any changeset in the changegroup.
1639 # changed in any changeset in the changegroup.
1639 #
1640 #
1640 # We also remember the first changenode we saw any manifest
1641 # We also remember the first changenode we saw any manifest
1641 # referenced by so we can later determine which changenode 'owns'
1642 # referenced by so we can later determine which changenode 'owns'
1642 # the manifest.
1643 # the manifest.
1643 def collect_manifests_and_files(clnode):
1644 def collect_manifests_and_files(clnode):
1644 c = cl.read(clnode)
1645 c = cl.read(clnode)
1645 for f in c[3]:
1646 for f in c[3]:
1646 # This is to make sure we only have one instance of each
1647 # This is to make sure we only have one instance of each
1647 # filename string for each filename.
1648 # filename string for each filename.
1648 changedfileset.setdefault(f, f)
1649 changedfileset.setdefault(f, f)
1649 msng_mnfst_set.setdefault(c[0], clnode)
1650 msng_mnfst_set.setdefault(c[0], clnode)
1650 return collect_manifests_and_files
1651 return collect_manifests_and_files
1651
1652
1652 # Figure out which manifest nodes (of the ones we think might be part
1653 # Figure out which manifest nodes (of the ones we think might be part
1653 # of the changegroup) the recipient must know about and remove them
1654 # of the changegroup) the recipient must know about and remove them
1654 # from the changegroup.
1655 # from the changegroup.
1655 def prune_manifests():
1656 def prune_manifests():
1656 has_mnfst_set = {}
1657 has_mnfst_set = {}
1657 for n in msng_mnfst_set:
1658 for n in msng_mnfst_set:
1658 # If a 'missing' manifest thinks it belongs to a changenode
1659 # If a 'missing' manifest thinks it belongs to a changenode
1659 # the recipient is assumed to have, obviously the recipient
1660 # the recipient is assumed to have, obviously the recipient
1660 # must have that manifest.
1661 # must have that manifest.
1661 linknode = cl.node(mnfst.linkrev(n))
1662 linknode = cl.node(mnfst.linkrev(n))
1662 if linknode in has_cl_set:
1663 if linknode in has_cl_set:
1663 has_mnfst_set[n] = 1
1664 has_mnfst_set[n] = 1
1664 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1665 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1665
1666
1666 # Use the information collected in collect_manifests_and_files to say
1667 # Use the information collected in collect_manifests_and_files to say
1667 # which changenode any manifestnode belongs to.
1668 # which changenode any manifestnode belongs to.
1668 def lookup_manifest_link(mnfstnode):
1669 def lookup_manifest_link(mnfstnode):
1669 return msng_mnfst_set[mnfstnode]
1670 return msng_mnfst_set[mnfstnode]
1670
1671
1671 # A function generating function that sets up the initial environment
1672 # A function generating function that sets up the initial environment
1672 # the inner function.
1673 # the inner function.
1673 def filenode_collector(changedfiles):
1674 def filenode_collector(changedfiles):
1674 next_rev = [0]
1675 next_rev = [0]
1675 # This gathers information from each manifestnode included in the
1676 # This gathers information from each manifestnode included in the
1676 # changegroup about which filenodes the manifest node references
1677 # changegroup about which filenodes the manifest node references
1677 # so we can include those in the changegroup too.
1678 # so we can include those in the changegroup too.
1678 #
1679 #
1679 # It also remembers which changenode each filenode belongs to. It
1680 # It also remembers which changenode each filenode belongs to. It
1680 # does this by assuming the a filenode belongs to the changenode
1681 # does this by assuming the a filenode belongs to the changenode
1681 # the first manifest that references it belongs to.
1682 # the first manifest that references it belongs to.
1682 def collect_msng_filenodes(mnfstnode):
1683 def collect_msng_filenodes(mnfstnode):
1683 r = mnfst.rev(mnfstnode)
1684 r = mnfst.rev(mnfstnode)
1684 if r == next_rev[0]:
1685 if r == next_rev[0]:
1685 # If the last rev we looked at was the one just previous,
1686 # If the last rev we looked at was the one just previous,
1686 # we only need to see a diff.
1687 # we only need to see a diff.
1687 deltamf = mnfst.readdelta(mnfstnode)
1688 deltamf = mnfst.readdelta(mnfstnode)
1688 # For each line in the delta
1689 # For each line in the delta
1689 for f, fnode in deltamf.items():
1690 for f, fnode in deltamf.items():
1690 f = changedfiles.get(f, None)
1691 f = changedfiles.get(f, None)
1691 # And if the file is in the list of files we care
1692 # And if the file is in the list of files we care
1692 # about.
1693 # about.
1693 if f is not None:
1694 if f is not None:
1694 # Get the changenode this manifest belongs to
1695 # Get the changenode this manifest belongs to
1695 clnode = msng_mnfst_set[mnfstnode]
1696 clnode = msng_mnfst_set[mnfstnode]
1696 # Create the set of filenodes for the file if
1697 # Create the set of filenodes for the file if
1697 # there isn't one already.
1698 # there isn't one already.
1698 ndset = msng_filenode_set.setdefault(f, {})
1699 ndset = msng_filenode_set.setdefault(f, {})
1699 # And set the filenode's changelog node to the
1700 # And set the filenode's changelog node to the
1700 # manifest's if it hasn't been set already.
1701 # manifest's if it hasn't been set already.
1701 ndset.setdefault(fnode, clnode)
1702 ndset.setdefault(fnode, clnode)
1702 else:
1703 else:
1703 # Otherwise we need a full manifest.
1704 # Otherwise we need a full manifest.
1704 m = mnfst.read(mnfstnode)
1705 m = mnfst.read(mnfstnode)
1705 # For every file in we care about.
1706 # For every file in we care about.
1706 for f in changedfiles:
1707 for f in changedfiles:
1707 fnode = m.get(f, None)
1708 fnode = m.get(f, None)
1708 # If it's in the manifest
1709 # If it's in the manifest
1709 if fnode is not None:
1710 if fnode is not None:
1710 # See comments above.
1711 # See comments above.
1711 clnode = msng_mnfst_set[mnfstnode]
1712 clnode = msng_mnfst_set[mnfstnode]
1712 ndset = msng_filenode_set.setdefault(f, {})
1713 ndset = msng_filenode_set.setdefault(f, {})
1713 ndset.setdefault(fnode, clnode)
1714 ndset.setdefault(fnode, clnode)
1714 # Remember the revision we hope to see next.
1715 # Remember the revision we hope to see next.
1715 next_rev[0] = r + 1
1716 next_rev[0] = r + 1
1716 return collect_msng_filenodes
1717 return collect_msng_filenodes
1717
1718
1718 # We have a list of filenodes we think we need for a file, lets remove
1719 # We have a list of filenodes we think we need for a file, lets remove
1719 # all those we now the recipient must have.
1720 # all those we now the recipient must have.
1720 def prune_filenodes(f, filerevlog):
1721 def prune_filenodes(f, filerevlog):
1721 msngset = msng_filenode_set[f]
1722 msngset = msng_filenode_set[f]
1722 hasset = {}
1723 hasset = {}
1723 # If a 'missing' filenode thinks it belongs to a changenode we
1724 # If a 'missing' filenode thinks it belongs to a changenode we
1724 # assume the recipient must have, then the recipient must have
1725 # assume the recipient must have, then the recipient must have
1725 # that filenode.
1726 # that filenode.
1726 for n in msngset:
1727 for n in msngset:
1727 clnode = cl.node(filerevlog.linkrev(n))
1728 clnode = cl.node(filerevlog.linkrev(n))
1728 if clnode in has_cl_set:
1729 if clnode in has_cl_set:
1729 hasset[n] = 1
1730 hasset[n] = 1
1730 prune_parents(filerevlog, hasset, msngset)
1731 prune_parents(filerevlog, hasset, msngset)
1731
1732
1732 # A function generator function that sets up the a context for the
1733 # A function generator function that sets up the a context for the
1733 # inner function.
1734 # inner function.
1734 def lookup_filenode_link_func(fname):
1735 def lookup_filenode_link_func(fname):
1735 msngset = msng_filenode_set[fname]
1736 msngset = msng_filenode_set[fname]
1736 # Lookup the changenode the filenode belongs to.
1737 # Lookup the changenode the filenode belongs to.
1737 def lookup_filenode_link(fnode):
1738 def lookup_filenode_link(fnode):
1738 return msngset[fnode]
1739 return msngset[fnode]
1739 return lookup_filenode_link
1740 return lookup_filenode_link
1740
1741
1741 # Add the nodes that were explicitly requested.
1742 # Add the nodes that were explicitly requested.
1742 def add_extra_nodes(name, nodes):
1743 def add_extra_nodes(name, nodes):
1743 if not extranodes or name not in extranodes:
1744 if not extranodes or name not in extranodes:
1744 return
1745 return
1745
1746
1746 for node, linknode in extranodes[name]:
1747 for node, linknode in extranodes[name]:
1747 if node not in nodes:
1748 if node not in nodes:
1748 nodes[node] = linknode
1749 nodes[node] = linknode
1749
1750
1750 # Now that we have all theses utility functions to help out and
1751 # Now that we have all theses utility functions to help out and
1751 # logically divide up the task, generate the group.
1752 # logically divide up the task, generate the group.
1752 def gengroup():
1753 def gengroup():
1753 # The set of changed files starts empty.
1754 # The set of changed files starts empty.
1754 changedfiles = {}
1755 changedfiles = {}
1755 # Create a changenode group generator that will call our functions
1756 # Create a changenode group generator that will call our functions
1756 # back to lookup the owning changenode and collect information.
1757 # back to lookup the owning changenode and collect information.
1757 group = cl.group(msng_cl_lst, identity,
1758 group = cl.group(msng_cl_lst, identity,
1758 manifest_and_file_collector(changedfiles))
1759 manifest_and_file_collector(changedfiles))
1759 for chnk in group:
1760 for chnk in group:
1760 yield chnk
1761 yield chnk
1761
1762
1762 # The list of manifests has been collected by the generator
1763 # The list of manifests has been collected by the generator
1763 # calling our functions back.
1764 # calling our functions back.
1764 prune_manifests()
1765 prune_manifests()
1765 add_extra_nodes(1, msng_mnfst_set)
1766 add_extra_nodes(1, msng_mnfst_set)
1766 msng_mnfst_lst = msng_mnfst_set.keys()
1767 msng_mnfst_lst = msng_mnfst_set.keys()
1767 # Sort the manifestnodes by revision number.
1768 # Sort the manifestnodes by revision number.
1768 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1769 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1769 # Create a generator for the manifestnodes that calls our lookup
1770 # Create a generator for the manifestnodes that calls our lookup
1770 # and data collection functions back.
1771 # and data collection functions back.
1771 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1772 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1772 filenode_collector(changedfiles))
1773 filenode_collector(changedfiles))
1773 for chnk in group:
1774 for chnk in group:
1774 yield chnk
1775 yield chnk
1775
1776
1776 # These are no longer needed, dereference and toss the memory for
1777 # These are no longer needed, dereference and toss the memory for
1777 # them.
1778 # them.
1778 msng_mnfst_lst = None
1779 msng_mnfst_lst = None
1779 msng_mnfst_set.clear()
1780 msng_mnfst_set.clear()
1780
1781
1781 if extranodes:
1782 if extranodes:
1782 for fname in extranodes:
1783 for fname in extranodes:
1783 if isinstance(fname, int):
1784 if isinstance(fname, int):
1784 continue
1785 continue
1785 add_extra_nodes(fname,
1786 add_extra_nodes(fname,
1786 msng_filenode_set.setdefault(fname, {}))
1787 msng_filenode_set.setdefault(fname, {}))
1787 changedfiles[fname] = 1
1788 changedfiles[fname] = 1
1788 changedfiles = changedfiles.keys()
1789 changedfiles = changedfiles.keys()
1789 changedfiles.sort()
1790 changedfiles.sort()
1790 # Go through all our files in order sorted by name.
1791 # Go through all our files in order sorted by name.
1791 for fname in changedfiles:
1792 for fname in changedfiles:
1792 filerevlog = self.file(fname)
1793 filerevlog = self.file(fname)
1793 if filerevlog.count() == 0:
1794 if filerevlog.count() == 0:
1794 raise util.Abort(_("empty or missing revlog for %s") % fname)
1795 raise util.Abort(_("empty or missing revlog for %s") % fname)
1795 # Toss out the filenodes that the recipient isn't really
1796 # Toss out the filenodes that the recipient isn't really
1796 # missing.
1797 # missing.
1797 if fname in msng_filenode_set:
1798 if fname in msng_filenode_set:
1798 prune_filenodes(fname, filerevlog)
1799 prune_filenodes(fname, filerevlog)
1799 msng_filenode_lst = msng_filenode_set[fname].keys()
1800 msng_filenode_lst = msng_filenode_set[fname].keys()
1800 else:
1801 else:
1801 msng_filenode_lst = []
1802 msng_filenode_lst = []
1802 # If any filenodes are left, generate the group for them,
1803 # If any filenodes are left, generate the group for them,
1803 # otherwise don't bother.
1804 # otherwise don't bother.
1804 if len(msng_filenode_lst) > 0:
1805 if len(msng_filenode_lst) > 0:
1805 yield changegroup.chunkheader(len(fname))
1806 yield changegroup.chunkheader(len(fname))
1806 yield fname
1807 yield fname
1807 # Sort the filenodes by their revision #
1808 # Sort the filenodes by their revision #
1808 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1809 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1809 # Create a group generator and only pass in a changenode
1810 # Create a group generator and only pass in a changenode
1810 # lookup function as we need to collect no information
1811 # lookup function as we need to collect no information
1811 # from filenodes.
1812 # from filenodes.
1812 group = filerevlog.group(msng_filenode_lst,
1813 group = filerevlog.group(msng_filenode_lst,
1813 lookup_filenode_link_func(fname))
1814 lookup_filenode_link_func(fname))
1814 for chnk in group:
1815 for chnk in group:
1815 yield chnk
1816 yield chnk
1816 if fname in msng_filenode_set:
1817 if fname in msng_filenode_set:
1817 # Don't need this anymore, toss it to free memory.
1818 # Don't need this anymore, toss it to free memory.
1818 del msng_filenode_set[fname]
1819 del msng_filenode_set[fname]
1819 # Signal that no more groups are left.
1820 # Signal that no more groups are left.
1820 yield changegroup.closechunk()
1821 yield changegroup.closechunk()
1821
1822
1822 if msng_cl_lst:
1823 if msng_cl_lst:
1823 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1824 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1824
1825
1825 return util.chunkbuffer(gengroup())
1826 return util.chunkbuffer(gengroup())
1826
1827
1827 def changegroup(self, basenodes, source):
1828 def changegroup(self, basenodes, source):
1828 """Generate a changegroup of all nodes that we have that a recipient
1829 """Generate a changegroup of all nodes that we have that a recipient
1829 doesn't.
1830 doesn't.
1830
1831
1831 This is much easier than the previous function as we can assume that
1832 This is much easier than the previous function as we can assume that
1832 the recipient has any changenode we aren't sending them."""
1833 the recipient has any changenode we aren't sending them."""
1833
1834
1834 self.hook('preoutgoing', throw=True, source=source)
1835 self.hook('preoutgoing', throw=True, source=source)
1835
1836
1836 cl = self.changelog
1837 cl = self.changelog
1837 nodes = cl.nodesbetween(basenodes, None)[0]
1838 nodes = cl.nodesbetween(basenodes, None)[0]
1838 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1839 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1839 self.changegroupinfo(nodes, source)
1840 self.changegroupinfo(nodes, source)
1840
1841
1841 def identity(x):
1842 def identity(x):
1842 return x
1843 return x
1843
1844
1844 def gennodelst(revlog):
1845 def gennodelst(revlog):
1845 for r in xrange(0, revlog.count()):
1846 for r in xrange(0, revlog.count()):
1846 n = revlog.node(r)
1847 n = revlog.node(r)
1847 if revlog.linkrev(n) in revset:
1848 if revlog.linkrev(n) in revset:
1848 yield n
1849 yield n
1849
1850
1850 def changed_file_collector(changedfileset):
1851 def changed_file_collector(changedfileset):
1851 def collect_changed_files(clnode):
1852 def collect_changed_files(clnode):
1852 c = cl.read(clnode)
1853 c = cl.read(clnode)
1853 for fname in c[3]:
1854 for fname in c[3]:
1854 changedfileset[fname] = 1
1855 changedfileset[fname] = 1
1855 return collect_changed_files
1856 return collect_changed_files
1856
1857
1857 def lookuprevlink_func(revlog):
1858 def lookuprevlink_func(revlog):
1858 def lookuprevlink(n):
1859 def lookuprevlink(n):
1859 return cl.node(revlog.linkrev(n))
1860 return cl.node(revlog.linkrev(n))
1860 return lookuprevlink
1861 return lookuprevlink
1861
1862
1862 def gengroup():
1863 def gengroup():
1863 # construct a list of all changed files
1864 # construct a list of all changed files
1864 changedfiles = {}
1865 changedfiles = {}
1865
1866
1866 for chnk in cl.group(nodes, identity,
1867 for chnk in cl.group(nodes, identity,
1867 changed_file_collector(changedfiles)):
1868 changed_file_collector(changedfiles)):
1868 yield chnk
1869 yield chnk
1869 changedfiles = changedfiles.keys()
1870 changedfiles = changedfiles.keys()
1870 changedfiles.sort()
1871 changedfiles.sort()
1871
1872
1872 mnfst = self.manifest
1873 mnfst = self.manifest
1873 nodeiter = gennodelst(mnfst)
1874 nodeiter = gennodelst(mnfst)
1874 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1875 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1875 yield chnk
1876 yield chnk
1876
1877
1877 for fname in changedfiles:
1878 for fname in changedfiles:
1878 filerevlog = self.file(fname)
1879 filerevlog = self.file(fname)
1879 if filerevlog.count() == 0:
1880 if filerevlog.count() == 0:
1880 raise util.Abort(_("empty or missing revlog for %s") % fname)
1881 raise util.Abort(_("empty or missing revlog for %s") % fname)
1881 nodeiter = gennodelst(filerevlog)
1882 nodeiter = gennodelst(filerevlog)
1882 nodeiter = list(nodeiter)
1883 nodeiter = list(nodeiter)
1883 if nodeiter:
1884 if nodeiter:
1884 yield changegroup.chunkheader(len(fname))
1885 yield changegroup.chunkheader(len(fname))
1885 yield fname
1886 yield fname
1886 lookup = lookuprevlink_func(filerevlog)
1887 lookup = lookuprevlink_func(filerevlog)
1887 for chnk in filerevlog.group(nodeiter, lookup):
1888 for chnk in filerevlog.group(nodeiter, lookup):
1888 yield chnk
1889 yield chnk
1889
1890
1890 yield changegroup.closechunk()
1891 yield changegroup.closechunk()
1891
1892
1892 if nodes:
1893 if nodes:
1893 self.hook('outgoing', node=hex(nodes[0]), source=source)
1894 self.hook('outgoing', node=hex(nodes[0]), source=source)
1894
1895
1895 return util.chunkbuffer(gengroup())
1896 return util.chunkbuffer(gengroup())
1896
1897
1897 def addchangegroup(self, source, srctype, url, emptyok=False):
1898 def addchangegroup(self, source, srctype, url, emptyok=False):
1898 """add changegroup to repo.
1899 """add changegroup to repo.
1899
1900
1900 return values:
1901 return values:
1901 - nothing changed or no source: 0
1902 - nothing changed or no source: 0
1902 - more heads than before: 1+added heads (2..n)
1903 - more heads than before: 1+added heads (2..n)
1903 - less heads than before: -1-removed heads (-2..-n)
1904 - less heads than before: -1-removed heads (-2..-n)
1904 - number of heads stays the same: 1
1905 - number of heads stays the same: 1
1905 """
1906 """
1906 def csmap(x):
1907 def csmap(x):
1907 self.ui.debug(_("add changeset %s\n") % short(x))
1908 self.ui.debug(_("add changeset %s\n") % short(x))
1908 return cl.count()
1909 return cl.count()
1909
1910
1910 def revmap(x):
1911 def revmap(x):
1911 return cl.rev(x)
1912 return cl.rev(x)
1912
1913
1913 if not source:
1914 if not source:
1914 return 0
1915 return 0
1915
1916
1916 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1917 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1917
1918
1918 changesets = files = revisions = 0
1919 changesets = files = revisions = 0
1919
1920
1920 # write changelog data to temp files so concurrent readers will not see
1921 # write changelog data to temp files so concurrent readers will not see
1921 # inconsistent view
1922 # inconsistent view
1922 cl = self.changelog
1923 cl = self.changelog
1923 cl.delayupdate()
1924 cl.delayupdate()
1924 oldheads = len(cl.heads())
1925 oldheads = len(cl.heads())
1925
1926
1926 tr = self.transaction()
1927 tr = self.transaction()
1927 try:
1928 try:
1928 trp = weakref.proxy(tr)
1929 trp = weakref.proxy(tr)
1929 # pull off the changeset group
1930 # pull off the changeset group
1930 self.ui.status(_("adding changesets\n"))
1931 self.ui.status(_("adding changesets\n"))
1931 cor = cl.count() - 1
1932 cor = cl.count() - 1
1932 chunkiter = changegroup.chunkiter(source)
1933 chunkiter = changegroup.chunkiter(source)
1933 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1934 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1934 raise util.Abort(_("received changelog group is empty"))
1935 raise util.Abort(_("received changelog group is empty"))
1935 cnr = cl.count() - 1
1936 cnr = cl.count() - 1
1936 changesets = cnr - cor
1937 changesets = cnr - cor
1937
1938
1938 # pull off the manifest group
1939 # pull off the manifest group
1939 self.ui.status(_("adding manifests\n"))
1940 self.ui.status(_("adding manifests\n"))
1940 chunkiter = changegroup.chunkiter(source)
1941 chunkiter = changegroup.chunkiter(source)
1941 # no need to check for empty manifest group here:
1942 # no need to check for empty manifest group here:
1942 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1943 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1943 # no new manifest will be created and the manifest group will
1944 # no new manifest will be created and the manifest group will
1944 # be empty during the pull
1945 # be empty during the pull
1945 self.manifest.addgroup(chunkiter, revmap, trp)
1946 self.manifest.addgroup(chunkiter, revmap, trp)
1946
1947
1947 # process the files
1948 # process the files
1948 self.ui.status(_("adding file changes\n"))
1949 self.ui.status(_("adding file changes\n"))
1949 while 1:
1950 while 1:
1950 f = changegroup.getchunk(source)
1951 f = changegroup.getchunk(source)
1951 if not f:
1952 if not f:
1952 break
1953 break
1953 self.ui.debug(_("adding %s revisions\n") % f)
1954 self.ui.debug(_("adding %s revisions\n") % f)
1954 fl = self.file(f)
1955 fl = self.file(f)
1955 o = fl.count()
1956 o = fl.count()
1956 chunkiter = changegroup.chunkiter(source)
1957 chunkiter = changegroup.chunkiter(source)
1957 if fl.addgroup(chunkiter, revmap, trp) is None:
1958 if fl.addgroup(chunkiter, revmap, trp) is None:
1958 raise util.Abort(_("received file revlog group is empty"))
1959 raise util.Abort(_("received file revlog group is empty"))
1959 revisions += fl.count() - o
1960 revisions += fl.count() - o
1960 files += 1
1961 files += 1
1961
1962
1962 # make changelog see real files again
1963 # make changelog see real files again
1963 cl.finalize(trp)
1964 cl.finalize(trp)
1964
1965
1965 newheads = len(self.changelog.heads())
1966 newheads = len(self.changelog.heads())
1966 heads = ""
1967 heads = ""
1967 if oldheads and newheads != oldheads:
1968 if oldheads and newheads != oldheads:
1968 heads = _(" (%+d heads)") % (newheads - oldheads)
1969 heads = _(" (%+d heads)") % (newheads - oldheads)
1969
1970
1970 self.ui.status(_("added %d changesets"
1971 self.ui.status(_("added %d changesets"
1971 " with %d changes to %d files%s\n")
1972 " with %d changes to %d files%s\n")
1972 % (changesets, revisions, files, heads))
1973 % (changesets, revisions, files, heads))
1973
1974
1974 if changesets > 0:
1975 if changesets > 0:
1975 self.hook('pretxnchangegroup', throw=True,
1976 self.hook('pretxnchangegroup', throw=True,
1976 node=hex(self.changelog.node(cor+1)), source=srctype,
1977 node=hex(self.changelog.node(cor+1)), source=srctype,
1977 url=url)
1978 url=url)
1978
1979
1979 tr.close()
1980 tr.close()
1980 finally:
1981 finally:
1981 del tr
1982 del tr
1982
1983
1983 if changesets > 0:
1984 if changesets > 0:
1985 # forcefully update the on-disk branch cache
1986 self.ui.debug(_("updating the branch cache\n"))
1987 self.branchcache = None
1988 self.branchtags()
1984 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1989 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1985 source=srctype, url=url)
1990 source=srctype, url=url)
1986
1991
1987 for i in xrange(cor + 1, cnr + 1):
1992 for i in xrange(cor + 1, cnr + 1):
1988 self.hook("incoming", node=hex(self.changelog.node(i)),
1993 self.hook("incoming", node=hex(self.changelog.node(i)),
1989 source=srctype, url=url)
1994 source=srctype, url=url)
1990
1995
1991 # never return 0 here:
1996 # never return 0 here:
1992 if newheads < oldheads:
1997 if newheads < oldheads:
1993 return newheads - oldheads - 1
1998 return newheads - oldheads - 1
1994 else:
1999 else:
1995 return newheads - oldheads + 1
2000 return newheads - oldheads + 1
1996
2001
1997
2002
1998 def stream_in(self, remote):
2003 def stream_in(self, remote):
1999 fp = remote.stream_out()
2004 fp = remote.stream_out()
2000 l = fp.readline()
2005 l = fp.readline()
2001 try:
2006 try:
2002 resp = int(l)
2007 resp = int(l)
2003 except ValueError:
2008 except ValueError:
2004 raise util.UnexpectedOutput(
2009 raise util.UnexpectedOutput(
2005 _('Unexpected response from remote server:'), l)
2010 _('Unexpected response from remote server:'), l)
2006 if resp == 1:
2011 if resp == 1:
2007 raise util.Abort(_('operation forbidden by server'))
2012 raise util.Abort(_('operation forbidden by server'))
2008 elif resp == 2:
2013 elif resp == 2:
2009 raise util.Abort(_('locking the remote repository failed'))
2014 raise util.Abort(_('locking the remote repository failed'))
2010 elif resp != 0:
2015 elif resp != 0:
2011 raise util.Abort(_('the server sent an unknown error code'))
2016 raise util.Abort(_('the server sent an unknown error code'))
2012 self.ui.status(_('streaming all changes\n'))
2017 self.ui.status(_('streaming all changes\n'))
2013 l = fp.readline()
2018 l = fp.readline()
2014 try:
2019 try:
2015 total_files, total_bytes = map(int, l.split(' ', 1))
2020 total_files, total_bytes = map(int, l.split(' ', 1))
2016 except ValueError, TypeError:
2021 except ValueError, TypeError:
2017 raise util.UnexpectedOutput(
2022 raise util.UnexpectedOutput(
2018 _('Unexpected response from remote server:'), l)
2023 _('Unexpected response from remote server:'), l)
2019 self.ui.status(_('%d files to transfer, %s of data\n') %
2024 self.ui.status(_('%d files to transfer, %s of data\n') %
2020 (total_files, util.bytecount(total_bytes)))
2025 (total_files, util.bytecount(total_bytes)))
2021 start = time.time()
2026 start = time.time()
2022 for i in xrange(total_files):
2027 for i in xrange(total_files):
2023 # XXX doesn't support '\n' or '\r' in filenames
2028 # XXX doesn't support '\n' or '\r' in filenames
2024 l = fp.readline()
2029 l = fp.readline()
2025 try:
2030 try:
2026 name, size = l.split('\0', 1)
2031 name, size = l.split('\0', 1)
2027 size = int(size)
2032 size = int(size)
2028 except ValueError, TypeError:
2033 except ValueError, TypeError:
2029 raise util.UnexpectedOutput(
2034 raise util.UnexpectedOutput(
2030 _('Unexpected response from remote server:'), l)
2035 _('Unexpected response from remote server:'), l)
2031 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2036 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2032 ofp = self.sopener(name, 'w')
2037 ofp = self.sopener(name, 'w')
2033 for chunk in util.filechunkiter(fp, limit=size):
2038 for chunk in util.filechunkiter(fp, limit=size):
2034 ofp.write(chunk)
2039 ofp.write(chunk)
2035 ofp.close()
2040 ofp.close()
2036 elapsed = time.time() - start
2041 elapsed = time.time() - start
2037 if elapsed <= 0:
2042 if elapsed <= 0:
2038 elapsed = 0.001
2043 elapsed = 0.001
2039 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2044 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2040 (util.bytecount(total_bytes), elapsed,
2045 (util.bytecount(total_bytes), elapsed,
2041 util.bytecount(total_bytes / elapsed)))
2046 util.bytecount(total_bytes / elapsed)))
2042 self.invalidate()
2047 self.invalidate()
2043 return len(self.heads()) + 1
2048 return len(self.heads()) + 1
2044
2049
2045 def clone(self, remote, heads=[], stream=False):
2050 def clone(self, remote, heads=[], stream=False):
2046 '''clone remote repository.
2051 '''clone remote repository.
2047
2052
2048 keyword arguments:
2053 keyword arguments:
2049 heads: list of revs to clone (forces use of pull)
2054 heads: list of revs to clone (forces use of pull)
2050 stream: use streaming clone if possible'''
2055 stream: use streaming clone if possible'''
2051
2056
2052 # now, all clients that can request uncompressed clones can
2057 # now, all clients that can request uncompressed clones can
2053 # read repo formats supported by all servers that can serve
2058 # read repo formats supported by all servers that can serve
2054 # them.
2059 # them.
2055
2060
2056 # if revlog format changes, client will have to check version
2061 # if revlog format changes, client will have to check version
2057 # and format flags on "stream" capability, and use
2062 # and format flags on "stream" capability, and use
2058 # uncompressed only if compatible.
2063 # uncompressed only if compatible.
2059
2064
2060 if stream and not heads and remote.capable('stream'):
2065 if stream and not heads and remote.capable('stream'):
2061 return self.stream_in(remote)
2066 return self.stream_in(remote)
2062 return self.pull(remote, heads)
2067 return self.pull(remote, heads)
2063
2068
2064 # used to avoid circular references so destructors work
2069 # used to avoid circular references so destructors work
2065 def aftertrans(files):
2070 def aftertrans(files):
2066 renamefiles = [tuple(t) for t in files]
2071 renamefiles = [tuple(t) for t in files]
2067 def a():
2072 def a():
2068 for src, dest in renamefiles:
2073 for src, dest in renamefiles:
2069 util.rename(src, dest)
2074 util.rename(src, dest)
2070 return a
2075 return a
2071
2076
2072 def instance(ui, path, create):
2077 def instance(ui, path, create):
2073 return localrepository(ui, util.drop_scheme('file', path), create)
2078 return localrepository(ui, util.drop_scheme('file', path), create)
2074
2079
2075 def islocal(path):
2080 def islocal(path):
2076 return True
2081 return True
@@ -1,227 +1,238 b''
1 # sshrepo.py - ssh repository proxy class for mercurial
1 # sshrepo.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from remoterepo import *
9 from remoterepo import *
10 from i18n import _
10 from i18n import _
11 import repo, os, re, stat, util
11 import repo, os, re, stat, util
12
12
13 class sshrepository(remoterepository):
13 class sshrepository(remoterepository):
14 def __init__(self, ui, path, create=0):
14 def __init__(self, ui, path, create=0):
15 self._url = path
15 self._url = path
16 self.ui = ui
16 self.ui = ui
17
17
18 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
18 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
19 if not m:
19 if not m:
20 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
20 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
21
21
22 self.user = m.group(2)
22 self.user = m.group(2)
23 self.host = m.group(3)
23 self.host = m.group(3)
24 self.port = m.group(5)
24 self.port = m.group(5)
25 self.path = m.group(7) or "."
25 self.path = m.group(7) or "."
26
26
27 sshcmd = self.ui.config("ui", "ssh", "ssh")
27 sshcmd = self.ui.config("ui", "ssh", "ssh")
28 remotecmd = self.ui.config("ui", "remotecmd", "hg")
28 remotecmd = self.ui.config("ui", "remotecmd", "hg")
29
29
30 args = util.sshargs(sshcmd, self.host, self.user, self.port)
30 args = util.sshargs(sshcmd, self.host, self.user, self.port)
31
31
32 if create:
32 if create:
33 cmd = '%s %s "%s init %s"'
33 cmd = '%s %s "%s init %s"'
34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
35
35
36 ui.note('running %s\n' % cmd)
36 ui.note('running %s\n' % cmd)
37 res = util.system(cmd)
37 res = util.system(cmd)
38 if res != 0:
38 if res != 0:
39 self.raise_(repo.RepoError(_("could not create remote repo")))
39 self.raise_(repo.RepoError(_("could not create remote repo")))
40
40
41 self.validate_repo(ui, sshcmd, args, remotecmd)
41 self.validate_repo(ui, sshcmd, args, remotecmd)
42
42
43 def url(self):
43 def url(self):
44 return self._url
44 return self._url
45
45
46 def validate_repo(self, ui, sshcmd, args, remotecmd):
46 def validate_repo(self, ui, sshcmd, args, remotecmd):
47 # cleanup up previous run
47 # cleanup up previous run
48 self.cleanup()
48 self.cleanup()
49
49
50 cmd = '%s %s "%s -R %s serve --stdio"'
50 cmd = '%s %s "%s -R %s serve --stdio"'
51 cmd = cmd % (sshcmd, args, remotecmd, self.path)
51 cmd = cmd % (sshcmd, args, remotecmd, self.path)
52
52
53 cmd = util.quotecommand(cmd)
53 cmd = util.quotecommand(cmd)
54 ui.note('running %s\n' % cmd)
54 ui.note('running %s\n' % cmd)
55 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
55 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
56
56
57 # skip any noise generated by remote shell
57 # skip any noise generated by remote shell
58 self.do_cmd("hello")
58 self.do_cmd("hello")
59 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
59 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
60 lines = ["", "dummy"]
60 lines = ["", "dummy"]
61 max_noise = 500
61 max_noise = 500
62 while lines[-1] and max_noise:
62 while lines[-1] and max_noise:
63 l = r.readline()
63 l = r.readline()
64 self.readerr()
64 self.readerr()
65 if lines[-1] == "1\n" and l == "\n":
65 if lines[-1] == "1\n" and l == "\n":
66 break
66 break
67 if l:
67 if l:
68 ui.debug(_("remote: "), l)
68 ui.debug(_("remote: "), l)
69 lines.append(l)
69 lines.append(l)
70 max_noise -= 1
70 max_noise -= 1
71 else:
71 else:
72 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
72 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
73
73
74 self.capabilities = util.set()
74 self.capabilities = util.set()
75 lines.reverse()
75 lines.reverse()
76 for l in lines:
76 for l in lines:
77 if l.startswith("capabilities:"):
77 if l.startswith("capabilities:"):
78 self.capabilities.update(l[:-1].split(":")[1].split())
78 self.capabilities.update(l[:-1].split(":")[1].split())
79 break
79 break
80
80
81 def readerr(self):
81 def readerr(self):
82 while 1:
82 while 1:
83 size = util.fstat(self.pipee).st_size
83 size = util.fstat(self.pipee).st_size
84 if size == 0: break
84 if size == 0: break
85 l = self.pipee.readline()
85 l = self.pipee.readline()
86 if not l: break
86 if not l: break
87 self.ui.status(_("remote: "), l)
87 self.ui.status(_("remote: "), l)
88
88
89 def raise_(self, exception):
89 def raise_(self, exception):
90 self.cleanup()
90 self.cleanup()
91 raise exception
91 raise exception
92
92
93 def cleanup(self):
93 def cleanup(self):
94 try:
94 try:
95 self.pipeo.close()
95 self.pipeo.close()
96 self.pipei.close()
96 self.pipei.close()
97 # read the error descriptor until EOF
97 # read the error descriptor until EOF
98 for l in self.pipee:
98 for l in self.pipee:
99 self.ui.status(_("remote: "), l)
99 self.ui.status(_("remote: "), l)
100 self.pipee.close()
100 self.pipee.close()
101 except:
101 except:
102 pass
102 pass
103
103
104 __del__ = cleanup
104 __del__ = cleanup
105
105
106 def do_cmd(self, cmd, **args):
106 def do_cmd(self, cmd, **args):
107 self.ui.debug(_("sending %s command\n") % cmd)
107 self.ui.debug(_("sending %s command\n") % cmd)
108 self.pipeo.write("%s\n" % cmd)
108 self.pipeo.write("%s\n" % cmd)
109 for k, v in args.items():
109 for k, v in args.items():
110 self.pipeo.write("%s %d\n" % (k, len(v)))
110 self.pipeo.write("%s %d\n" % (k, len(v)))
111 self.pipeo.write(v)
111 self.pipeo.write(v)
112 self.pipeo.flush()
112 self.pipeo.flush()
113
113
114 return self.pipei
114 return self.pipei
115
115
116 def call(self, cmd, **args):
116 def call(self, cmd, **args):
117 r = self.do_cmd(cmd, **args)
117 self.do_cmd(cmd, **args)
118 l = r.readline()
118 return self._recv()
119
120 def _recv(self):
121 l = self.pipei.readline()
119 self.readerr()
122 self.readerr()
120 try:
123 try:
121 l = int(l)
124 l = int(l)
122 except:
125 except:
123 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
126 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
124 return r.read(l)
127 return self.pipei.read(l)
128
129 def _send(self, data, flush=False):
130 self.pipeo.write("%d\n" % len(data))
131 if data:
132 self.pipeo.write(data)
133 if flush:
134 self.pipeo.flush()
135 self.readerr()
125
136
126 def lock(self):
137 def lock(self):
127 self.call("lock")
138 self.call("lock")
128 return remotelock(self)
139 return remotelock(self)
129
140
130 def unlock(self):
141 def unlock(self):
131 self.call("unlock")
142 self.call("unlock")
132
143
133 def lookup(self, key):
144 def lookup(self, key):
134 self.requirecap('lookup', _('look up remote revision'))
145 self.requirecap('lookup', _('look up remote revision'))
135 d = self.call("lookup", key=key)
146 d = self.call("lookup", key=key)
136 success, data = d[:-1].split(" ", 1)
147 success, data = d[:-1].split(" ", 1)
137 if int(success):
148 if int(success):
138 return bin(data)
149 return bin(data)
139 else:
150 else:
140 self.raise_(repo.RepoError(data))
151 self.raise_(repo.RepoError(data))
141
152
142 def heads(self):
153 def heads(self):
143 d = self.call("heads")
154 d = self.call("heads")
144 try:
155 try:
145 return map(bin, d[:-1].split(" "))
156 return map(bin, d[:-1].split(" "))
146 except:
157 except:
147 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
158 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
148
159
149 def branches(self, nodes):
160 def branches(self, nodes):
150 n = " ".join(map(hex, nodes))
161 n = " ".join(map(hex, nodes))
151 d = self.call("branches", nodes=n)
162 d = self.call("branches", nodes=n)
152 try:
163 try:
153 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
164 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
154 return br
165 return br
155 except:
166 except:
156 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
167 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
157
168
158 def between(self, pairs):
169 def between(self, pairs):
159 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
170 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
160 d = self.call("between", pairs=n)
171 d = self.call("between", pairs=n)
161 try:
172 try:
162 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
173 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
163 return p
174 return p
164 except:
175 except:
165 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
176 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
166
177
167 def changegroup(self, nodes, kind):
178 def changegroup(self, nodes, kind):
168 n = " ".join(map(hex, nodes))
179 n = " ".join(map(hex, nodes))
169 return self.do_cmd("changegroup", roots=n)
180 return self.do_cmd("changegroup", roots=n)
170
181
171 def changegroupsubset(self, bases, heads, kind):
182 def changegroupsubset(self, bases, heads, kind):
172 self.requirecap('changegroupsubset', _('look up remote changes'))
183 self.requirecap('changegroupsubset', _('look up remote changes'))
173 bases = " ".join(map(hex, bases))
184 bases = " ".join(map(hex, bases))
174 heads = " ".join(map(hex, heads))
185 heads = " ".join(map(hex, heads))
175 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
186 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
176
187
177 def unbundle(self, cg, heads, source):
188 def unbundle(self, cg, heads, source):
178 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
189 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
179 if d:
190 if d:
180 # remote may send "unsynced changes"
191 # remote may send "unsynced changes"
181 self.raise_(repo.RepoError(_("push refused: %s") % d))
192 self.raise_(repo.RepoError(_("push refused: %s") % d))
182
193
183 while 1:
194 while 1:
184 d = cg.read(4096)
195 d = cg.read(4096)
185 if not d: break
196 if not d:
186 self.pipeo.write(str(len(d)) + '\n')
197 break
187 self.pipeo.write(d)
198 self._send(d)
188 self.readerr()
189
199
190 self.pipeo.write('0\n')
200 self._send("", flush=True)
191 self.pipeo.flush()
192
201
193 self.readerr()
202 r = self._recv()
194 l = int(self.pipei.readline())
195 r = self.pipei.read(l)
196 if r:
203 if r:
197 # remote may send "unsynced changes"
204 # remote may send "unsynced changes"
198 self.raise_(repo.RepoError(_("push failed: %s") % r))
205 self.raise_(repo.RepoError(_("push failed: %s") % r))
199
206
200 self.readerr()
207 r = self._recv()
201 l = int(self.pipei.readline())
208 try:
202 r = self.pipei.read(l)
209 return int(r)
203 return int(r)
210 except:
211 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
204
212
205 def addchangegroup(self, cg, source, url):
213 def addchangegroup(self, cg, source, url):
206 d = self.call("addchangegroup")
214 d = self.call("addchangegroup")
207 if d:
215 if d:
208 self.raise_(repo.RepoError(_("push refused: %s") % d))
216 self.raise_(repo.RepoError(_("push refused: %s") % d))
209 while 1:
217 while 1:
210 d = cg.read(4096)
218 d = cg.read(4096)
211 if not d: break
219 if not d:
220 break
212 self.pipeo.write(d)
221 self.pipeo.write(d)
213 self.readerr()
222 self.readerr()
214
223
215 self.pipeo.flush()
224 self.pipeo.flush()
216
225
217 self.readerr()
226 self.readerr()
218 l = int(self.pipei.readline())
227 r = self._recv()
219 r = self.pipei.read(l)
220 if not r:
228 if not r:
221 return 1
229 return 1
222 return int(r)
230 try:
231 return int(r)
232 except:
233 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
223
234
224 def stream_out(self):
235 def stream_out(self):
225 return self.do_cmd('stream_out')
236 return self.do_cmd('stream_out')
226
237
227 instance = sshrepository
238 instance = sshrepository
@@ -1,6 +1,6 b''
1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
2 <html>
2 <html>
3 <head>
3 <head>
4 <link rel="icon" href="#staticurl#hgicon.png" type="image/png">
4 <link rel="icon" href="#staticurl#hgicon.png" type="image/png" />
5 <meta name="robots" content="index, nofollow" />
5 <meta name="robots" content="index, nofollow" />
6 <link rel="stylesheet" href="#staticurl#style.css" type="text/css" />
6 <link rel="stylesheet" href="#staticurl#style.css" type="text/css" />
@@ -1,6 +1,6 b''
1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
2 <html>
2 <html>
3 <head>
3 <head>
4 <link rel="icon" href="?static=hgicon.png" type="image/png">
4 <link rel="icon" href="?static=hgicon.png" type="image/png" />
5 <meta name="robots" content="index, nofollow" />
5 <meta name="robots" content="index, nofollow" />
6 <link rel="stylesheet" href="?static=style.css" type="text/css" />
6 <link rel="stylesheet" href="?static=style.css" type="text/css" />
@@ -1,583 +1,588 b''
1 3:911600dab2ae
1 3:911600dab2ae
2 requesting all changes
2 requesting all changes
3 adding changesets
3 adding changesets
4 adding manifests
4 adding manifests
5 adding file changes
5 adding file changes
6 added 1 changesets with 3 changes to 3 files
6 added 1 changesets with 3 changes to 3 files
7 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
7 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
8
8
9 Extension disabled for lack of a hook
9 Extension disabled for lack of a hook
10 Pushing as user fred
10 Pushing as user fred
11 hgrc = """
11 hgrc = """
12 """
12 """
13 pushing to ../b
13 pushing to ../b
14 searching for changes
14 searching for changes
15 common changesets up to 6675d58eff77
15 common changesets up to 6675d58eff77
16 3 changesets found
16 3 changesets found
17 List of changesets:
17 List of changesets:
18 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
18 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
19 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
19 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
20 911600dab2ae7a9baff75958b84fe606851ce955
20 911600dab2ae7a9baff75958b84fe606851ce955
21 adding changesets
21 adding changesets
22 add changeset ef1ea85a6374
22 add changeset ef1ea85a6374
23 add changeset f9cafe1212c8
23 add changeset f9cafe1212c8
24 add changeset 911600dab2ae
24 add changeset 911600dab2ae
25 adding manifests
25 adding manifests
26 adding file changes
26 adding file changes
27 adding foo/Bar/file.txt revisions
27 adding foo/Bar/file.txt revisions
28 adding foo/file.txt revisions
28 adding foo/file.txt revisions
29 adding quux/file.py revisions
29 adding quux/file.py revisions
30 added 3 changesets with 3 changes to 3 files
30 added 3 changesets with 3 changes to 3 files
31 updating the branch cache
31 rolling back last transaction
32 rolling back last transaction
32 0:6675d58eff77
33 0:6675d58eff77
33
34
34 Extension disabled for lack of acl.sources
35 Extension disabled for lack of acl.sources
35 Pushing as user fred
36 Pushing as user fred
36 hgrc = """
37 hgrc = """
37 [hooks]
38 [hooks]
38 pretxnchangegroup.acl = python:hgext.acl.hook
39 pretxnchangegroup.acl = python:hgext.acl.hook
39 """
40 """
40 pushing to ../b
41 pushing to ../b
41 searching for changes
42 searching for changes
42 common changesets up to 6675d58eff77
43 common changesets up to 6675d58eff77
43 3 changesets found
44 3 changesets found
44 List of changesets:
45 List of changesets:
45 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
46 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
46 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
47 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
47 911600dab2ae7a9baff75958b84fe606851ce955
48 911600dab2ae7a9baff75958b84fe606851ce955
48 adding changesets
49 adding changesets
49 add changeset ef1ea85a6374
50 add changeset ef1ea85a6374
50 add changeset f9cafe1212c8
51 add changeset f9cafe1212c8
51 add changeset 911600dab2ae
52 add changeset 911600dab2ae
52 adding manifests
53 adding manifests
53 adding file changes
54 adding file changes
54 adding foo/Bar/file.txt revisions
55 adding foo/Bar/file.txt revisions
55 adding foo/file.txt revisions
56 adding foo/file.txt revisions
56 adding quux/file.py revisions
57 adding quux/file.py revisions
57 added 3 changesets with 3 changes to 3 files
58 added 3 changesets with 3 changes to 3 files
58 calling hook pretxnchangegroup.acl: hgext.acl.hook
59 calling hook pretxnchangegroup.acl: hgext.acl.hook
59 acl: acl.allow not enabled
60 acl: acl.allow not enabled
60 acl: acl.deny not enabled
61 acl: acl.deny not enabled
61 acl: changes have source "push" - skipping
62 acl: changes have source "push" - skipping
63 updating the branch cache
62 rolling back last transaction
64 rolling back last transaction
63 0:6675d58eff77
65 0:6675d58eff77
64
66
65 No [acl.allow]/[acl.deny]
67 No [acl.allow]/[acl.deny]
66 Pushing as user fred
68 Pushing as user fred
67 hgrc = """
69 hgrc = """
68 [hooks]
70 [hooks]
69 pretxnchangegroup.acl = python:hgext.acl.hook
71 pretxnchangegroup.acl = python:hgext.acl.hook
70 [acl]
72 [acl]
71 sources = push
73 sources = push
72 """
74 """
73 pushing to ../b
75 pushing to ../b
74 searching for changes
76 searching for changes
75 common changesets up to 6675d58eff77
77 common changesets up to 6675d58eff77
76 3 changesets found
78 3 changesets found
77 List of changesets:
79 List of changesets:
78 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
80 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
79 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
81 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
80 911600dab2ae7a9baff75958b84fe606851ce955
82 911600dab2ae7a9baff75958b84fe606851ce955
81 adding changesets
83 adding changesets
82 add changeset ef1ea85a6374
84 add changeset ef1ea85a6374
83 add changeset f9cafe1212c8
85 add changeset f9cafe1212c8
84 add changeset 911600dab2ae
86 add changeset 911600dab2ae
85 adding manifests
87 adding manifests
86 adding file changes
88 adding file changes
87 adding foo/Bar/file.txt revisions
89 adding foo/Bar/file.txt revisions
88 adding foo/file.txt revisions
90 adding foo/file.txt revisions
89 adding quux/file.py revisions
91 adding quux/file.py revisions
90 added 3 changesets with 3 changes to 3 files
92 added 3 changesets with 3 changes to 3 files
91 calling hook pretxnchangegroup.acl: hgext.acl.hook
93 calling hook pretxnchangegroup.acl: hgext.acl.hook
92 acl: acl.allow not enabled
94 acl: acl.allow not enabled
93 acl: acl.deny not enabled
95 acl: acl.deny not enabled
94 acl: allowing changeset ef1ea85a6374
96 acl: allowing changeset ef1ea85a6374
95 acl: allowing changeset f9cafe1212c8
97 acl: allowing changeset f9cafe1212c8
96 acl: allowing changeset 911600dab2ae
98 acl: allowing changeset 911600dab2ae
99 updating the branch cache
97 rolling back last transaction
100 rolling back last transaction
98 0:6675d58eff77
101 0:6675d58eff77
99
102
100 Empty [acl.allow]
103 Empty [acl.allow]
101 Pushing as user fred
104 Pushing as user fred
102 hgrc = """
105 hgrc = """
103 [hooks]
106 [hooks]
104 pretxnchangegroup.acl = python:hgext.acl.hook
107 pretxnchangegroup.acl = python:hgext.acl.hook
105 [acl]
108 [acl]
106 sources = push
109 sources = push
107 [acl.allow]
110 [acl.allow]
108 """
111 """
109 pushing to ../b
112 pushing to ../b
110 searching for changes
113 searching for changes
111 common changesets up to 6675d58eff77
114 common changesets up to 6675d58eff77
112 3 changesets found
115 3 changesets found
113 List of changesets:
116 List of changesets:
114 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
117 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
115 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
118 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
116 911600dab2ae7a9baff75958b84fe606851ce955
119 911600dab2ae7a9baff75958b84fe606851ce955
117 adding changesets
120 adding changesets
118 add changeset ef1ea85a6374
121 add changeset ef1ea85a6374
119 add changeset f9cafe1212c8
122 add changeset f9cafe1212c8
120 add changeset 911600dab2ae
123 add changeset 911600dab2ae
121 adding manifests
124 adding manifests
122 adding file changes
125 adding file changes
123 adding foo/Bar/file.txt revisions
126 adding foo/Bar/file.txt revisions
124 adding foo/file.txt revisions
127 adding foo/file.txt revisions
125 adding quux/file.py revisions
128 adding quux/file.py revisions
126 added 3 changesets with 3 changes to 3 files
129 added 3 changesets with 3 changes to 3 files
127 calling hook pretxnchangegroup.acl: hgext.acl.hook
130 calling hook pretxnchangegroup.acl: hgext.acl.hook
128 acl: acl.allow enabled, 0 entries for user fred
131 acl: acl.allow enabled, 0 entries for user fred
129 acl: acl.deny not enabled
132 acl: acl.deny not enabled
130 acl: user fred not allowed on foo/file.txt
133 acl: user fred not allowed on foo/file.txt
131 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
134 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
132 transaction abort!
135 transaction abort!
133 rollback completed
136 rollback completed
134 abort: acl: access denied for changeset ef1ea85a6374
137 abort: acl: access denied for changeset ef1ea85a6374
135 no rollback information available
138 no rollback information available
136 0:6675d58eff77
139 0:6675d58eff77
137
140
138 fred is allowed inside foo/
141 fred is allowed inside foo/
139 Pushing as user fred
142 Pushing as user fred
140 hgrc = """
143 hgrc = """
141 [hooks]
144 [hooks]
142 pretxnchangegroup.acl = python:hgext.acl.hook
145 pretxnchangegroup.acl = python:hgext.acl.hook
143 [acl]
146 [acl]
144 sources = push
147 sources = push
145 [acl.allow]
148 [acl.allow]
146 foo/** = fred
149 foo/** = fred
147 """
150 """
148 pushing to ../b
151 pushing to ../b
149 searching for changes
152 searching for changes
150 common changesets up to 6675d58eff77
153 common changesets up to 6675d58eff77
151 3 changesets found
154 3 changesets found
152 List of changesets:
155 List of changesets:
153 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
156 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
154 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
157 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
155 911600dab2ae7a9baff75958b84fe606851ce955
158 911600dab2ae7a9baff75958b84fe606851ce955
156 adding changesets
159 adding changesets
157 add changeset ef1ea85a6374
160 add changeset ef1ea85a6374
158 add changeset f9cafe1212c8
161 add changeset f9cafe1212c8
159 add changeset 911600dab2ae
162 add changeset 911600dab2ae
160 adding manifests
163 adding manifests
161 adding file changes
164 adding file changes
162 adding foo/Bar/file.txt revisions
165 adding foo/Bar/file.txt revisions
163 adding foo/file.txt revisions
166 adding foo/file.txt revisions
164 adding quux/file.py revisions
167 adding quux/file.py revisions
165 added 3 changesets with 3 changes to 3 files
168 added 3 changesets with 3 changes to 3 files
166 calling hook pretxnchangegroup.acl: hgext.acl.hook
169 calling hook pretxnchangegroup.acl: hgext.acl.hook
167 acl: acl.allow enabled, 1 entries for user fred
170 acl: acl.allow enabled, 1 entries for user fred
168 acl: acl.deny not enabled
171 acl: acl.deny not enabled
169 acl: allowing changeset ef1ea85a6374
172 acl: allowing changeset ef1ea85a6374
170 acl: allowing changeset f9cafe1212c8
173 acl: allowing changeset f9cafe1212c8
171 acl: user fred not allowed on quux/file.py
174 acl: user fred not allowed on quux/file.py
172 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
175 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
173 transaction abort!
176 transaction abort!
174 rollback completed
177 rollback completed
175 abort: acl: access denied for changeset 911600dab2ae
178 abort: acl: access denied for changeset 911600dab2ae
176 no rollback information available
179 no rollback information available
177 0:6675d58eff77
180 0:6675d58eff77
178
181
179 Empty [acl.deny]
182 Empty [acl.deny]
180 Pushing as user barney
183 Pushing as user barney
181 hgrc = """
184 hgrc = """
182 [hooks]
185 [hooks]
183 pretxnchangegroup.acl = python:hgext.acl.hook
186 pretxnchangegroup.acl = python:hgext.acl.hook
184 [acl]
187 [acl]
185 sources = push
188 sources = push
186 [acl.allow]
189 [acl.allow]
187 foo/** = fred
190 foo/** = fred
188 [acl.deny]
191 [acl.deny]
189 """
192 """
190 pushing to ../b
193 pushing to ../b
191 searching for changes
194 searching for changes
192 common changesets up to 6675d58eff77
195 common changesets up to 6675d58eff77
193 3 changesets found
196 3 changesets found
194 List of changesets:
197 List of changesets:
195 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
198 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
196 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
199 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
197 911600dab2ae7a9baff75958b84fe606851ce955
200 911600dab2ae7a9baff75958b84fe606851ce955
198 adding changesets
201 adding changesets
199 add changeset ef1ea85a6374
202 add changeset ef1ea85a6374
200 add changeset f9cafe1212c8
203 add changeset f9cafe1212c8
201 add changeset 911600dab2ae
204 add changeset 911600dab2ae
202 adding manifests
205 adding manifests
203 adding file changes
206 adding file changes
204 adding foo/Bar/file.txt revisions
207 adding foo/Bar/file.txt revisions
205 adding foo/file.txt revisions
208 adding foo/file.txt revisions
206 adding quux/file.py revisions
209 adding quux/file.py revisions
207 added 3 changesets with 3 changes to 3 files
210 added 3 changesets with 3 changes to 3 files
208 calling hook pretxnchangegroup.acl: hgext.acl.hook
211 calling hook pretxnchangegroup.acl: hgext.acl.hook
209 acl: acl.allow enabled, 0 entries for user barney
212 acl: acl.allow enabled, 0 entries for user barney
210 acl: acl.deny enabled, 0 entries for user barney
213 acl: acl.deny enabled, 0 entries for user barney
211 acl: user barney not allowed on foo/file.txt
214 acl: user barney not allowed on foo/file.txt
212 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
215 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
213 transaction abort!
216 transaction abort!
214 rollback completed
217 rollback completed
215 abort: acl: access denied for changeset ef1ea85a6374
218 abort: acl: access denied for changeset ef1ea85a6374
216 no rollback information available
219 no rollback information available
217 0:6675d58eff77
220 0:6675d58eff77
218
221
219 fred is allowed inside foo/, but not foo/bar/ (case matters)
222 fred is allowed inside foo/, but not foo/bar/ (case matters)
220 Pushing as user fred
223 Pushing as user fred
221 hgrc = """
224 hgrc = """
222 [hooks]
225 [hooks]
223 pretxnchangegroup.acl = python:hgext.acl.hook
226 pretxnchangegroup.acl = python:hgext.acl.hook
224 [acl]
227 [acl]
225 sources = push
228 sources = push
226 [acl.allow]
229 [acl.allow]
227 foo/** = fred
230 foo/** = fred
228 [acl.deny]
231 [acl.deny]
229 foo/bar/** = fred
232 foo/bar/** = fred
230 """
233 """
231 pushing to ../b
234 pushing to ../b
232 searching for changes
235 searching for changes
233 common changesets up to 6675d58eff77
236 common changesets up to 6675d58eff77
234 3 changesets found
237 3 changesets found
235 List of changesets:
238 List of changesets:
236 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
239 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
237 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
240 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
238 911600dab2ae7a9baff75958b84fe606851ce955
241 911600dab2ae7a9baff75958b84fe606851ce955
239 adding changesets
242 adding changesets
240 add changeset ef1ea85a6374
243 add changeset ef1ea85a6374
241 add changeset f9cafe1212c8
244 add changeset f9cafe1212c8
242 add changeset 911600dab2ae
245 add changeset 911600dab2ae
243 adding manifests
246 adding manifests
244 adding file changes
247 adding file changes
245 adding foo/Bar/file.txt revisions
248 adding foo/Bar/file.txt revisions
246 adding foo/file.txt revisions
249 adding foo/file.txt revisions
247 adding quux/file.py revisions
250 adding quux/file.py revisions
248 added 3 changesets with 3 changes to 3 files
251 added 3 changesets with 3 changes to 3 files
249 calling hook pretxnchangegroup.acl: hgext.acl.hook
252 calling hook pretxnchangegroup.acl: hgext.acl.hook
250 acl: acl.allow enabled, 1 entries for user fred
253 acl: acl.allow enabled, 1 entries for user fred
251 acl: acl.deny enabled, 1 entries for user fred
254 acl: acl.deny enabled, 1 entries for user fred
252 acl: allowing changeset ef1ea85a6374
255 acl: allowing changeset ef1ea85a6374
253 acl: allowing changeset f9cafe1212c8
256 acl: allowing changeset f9cafe1212c8
254 acl: user fred not allowed on quux/file.py
257 acl: user fred not allowed on quux/file.py
255 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
258 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
256 transaction abort!
259 transaction abort!
257 rollback completed
260 rollback completed
258 abort: acl: access denied for changeset 911600dab2ae
261 abort: acl: access denied for changeset 911600dab2ae
259 no rollback information available
262 no rollback information available
260 0:6675d58eff77
263 0:6675d58eff77
261
264
262 fred is allowed inside foo/, but not foo/Bar/
265 fred is allowed inside foo/, but not foo/Bar/
263 Pushing as user fred
266 Pushing as user fred
264 hgrc = """
267 hgrc = """
265 [hooks]
268 [hooks]
266 pretxnchangegroup.acl = python:hgext.acl.hook
269 pretxnchangegroup.acl = python:hgext.acl.hook
267 [acl]
270 [acl]
268 sources = push
271 sources = push
269 [acl.allow]
272 [acl.allow]
270 foo/** = fred
273 foo/** = fred
271 [acl.deny]
274 [acl.deny]
272 foo/bar/** = fred
275 foo/bar/** = fred
273 foo/Bar/** = fred
276 foo/Bar/** = fred
274 """
277 """
275 pushing to ../b
278 pushing to ../b
276 searching for changes
279 searching for changes
277 common changesets up to 6675d58eff77
280 common changesets up to 6675d58eff77
278 3 changesets found
281 3 changesets found
279 List of changesets:
282 List of changesets:
280 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
283 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
281 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
284 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
282 911600dab2ae7a9baff75958b84fe606851ce955
285 911600dab2ae7a9baff75958b84fe606851ce955
283 adding changesets
286 adding changesets
284 add changeset ef1ea85a6374
287 add changeset ef1ea85a6374
285 add changeset f9cafe1212c8
288 add changeset f9cafe1212c8
286 add changeset 911600dab2ae
289 add changeset 911600dab2ae
287 adding manifests
290 adding manifests
288 adding file changes
291 adding file changes
289 adding foo/Bar/file.txt revisions
292 adding foo/Bar/file.txt revisions
290 adding foo/file.txt revisions
293 adding foo/file.txt revisions
291 adding quux/file.py revisions
294 adding quux/file.py revisions
292 added 3 changesets with 3 changes to 3 files
295 added 3 changesets with 3 changes to 3 files
293 calling hook pretxnchangegroup.acl: hgext.acl.hook
296 calling hook pretxnchangegroup.acl: hgext.acl.hook
294 acl: acl.allow enabled, 1 entries for user fred
297 acl: acl.allow enabled, 1 entries for user fred
295 acl: acl.deny enabled, 2 entries for user fred
298 acl: acl.deny enabled, 2 entries for user fred
296 acl: allowing changeset ef1ea85a6374
299 acl: allowing changeset ef1ea85a6374
297 acl: user fred denied on foo/Bar/file.txt
300 acl: user fred denied on foo/Bar/file.txt
298 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
301 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
299 transaction abort!
302 transaction abort!
300 rollback completed
303 rollback completed
301 abort: acl: access denied for changeset f9cafe1212c8
304 abort: acl: access denied for changeset f9cafe1212c8
302 no rollback information available
305 no rollback information available
303 0:6675d58eff77
306 0:6675d58eff77
304
307
305 barney is not mentioned => not allowed anywhere
308 barney is not mentioned => not allowed anywhere
306 Pushing as user barney
309 Pushing as user barney
307 hgrc = """
310 hgrc = """
308 [hooks]
311 [hooks]
309 pretxnchangegroup.acl = python:hgext.acl.hook
312 pretxnchangegroup.acl = python:hgext.acl.hook
310 [acl]
313 [acl]
311 sources = push
314 sources = push
312 [acl.allow]
315 [acl.allow]
313 foo/** = fred
316 foo/** = fred
314 [acl.deny]
317 [acl.deny]
315 foo/bar/** = fred
318 foo/bar/** = fred
316 foo/Bar/** = fred
319 foo/Bar/** = fred
317 """
320 """
318 pushing to ../b
321 pushing to ../b
319 searching for changes
322 searching for changes
320 common changesets up to 6675d58eff77
323 common changesets up to 6675d58eff77
321 3 changesets found
324 3 changesets found
322 List of changesets:
325 List of changesets:
323 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
326 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
324 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
327 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
325 911600dab2ae7a9baff75958b84fe606851ce955
328 911600dab2ae7a9baff75958b84fe606851ce955
326 adding changesets
329 adding changesets
327 add changeset ef1ea85a6374
330 add changeset ef1ea85a6374
328 add changeset f9cafe1212c8
331 add changeset f9cafe1212c8
329 add changeset 911600dab2ae
332 add changeset 911600dab2ae
330 adding manifests
333 adding manifests
331 adding file changes
334 adding file changes
332 adding foo/Bar/file.txt revisions
335 adding foo/Bar/file.txt revisions
333 adding foo/file.txt revisions
336 adding foo/file.txt revisions
334 adding quux/file.py revisions
337 adding quux/file.py revisions
335 added 3 changesets with 3 changes to 3 files
338 added 3 changesets with 3 changes to 3 files
336 calling hook pretxnchangegroup.acl: hgext.acl.hook
339 calling hook pretxnchangegroup.acl: hgext.acl.hook
337 acl: acl.allow enabled, 0 entries for user barney
340 acl: acl.allow enabled, 0 entries for user barney
338 acl: acl.deny enabled, 0 entries for user barney
341 acl: acl.deny enabled, 0 entries for user barney
339 acl: user barney not allowed on foo/file.txt
342 acl: user barney not allowed on foo/file.txt
340 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
343 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
341 transaction abort!
344 transaction abort!
342 rollback completed
345 rollback completed
343 abort: acl: access denied for changeset ef1ea85a6374
346 abort: acl: access denied for changeset ef1ea85a6374
344 no rollback information available
347 no rollback information available
345 0:6675d58eff77
348 0:6675d58eff77
346
349
347 barney is allowed everywhere
350 barney is allowed everywhere
348 Pushing as user barney
351 Pushing as user barney
349 hgrc = """
352 hgrc = """
350 [hooks]
353 [hooks]
351 pretxnchangegroup.acl = python:hgext.acl.hook
354 pretxnchangegroup.acl = python:hgext.acl.hook
352 [acl]
355 [acl]
353 sources = push
356 sources = push
354 [acl.allow]
357 [acl.allow]
355 foo/** = fred
358 foo/** = fred
356 [acl.deny]
359 [acl.deny]
357 foo/bar/** = fred
360 foo/bar/** = fred
358 foo/Bar/** = fred
361 foo/Bar/** = fred
359 [acl.allow]
362 [acl.allow]
360 ** = barney
363 ** = barney
361 """
364 """
362 pushing to ../b
365 pushing to ../b
363 searching for changes
366 searching for changes
364 common changesets up to 6675d58eff77
367 common changesets up to 6675d58eff77
365 3 changesets found
368 3 changesets found
366 List of changesets:
369 List of changesets:
367 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
370 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
368 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
371 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
369 911600dab2ae7a9baff75958b84fe606851ce955
372 911600dab2ae7a9baff75958b84fe606851ce955
370 adding changesets
373 adding changesets
371 add changeset ef1ea85a6374
374 add changeset ef1ea85a6374
372 add changeset f9cafe1212c8
375 add changeset f9cafe1212c8
373 add changeset 911600dab2ae
376 add changeset 911600dab2ae
374 adding manifests
377 adding manifests
375 adding file changes
378 adding file changes
376 adding foo/Bar/file.txt revisions
379 adding foo/Bar/file.txt revisions
377 adding foo/file.txt revisions
380 adding foo/file.txt revisions
378 adding quux/file.py revisions
381 adding quux/file.py revisions
379 added 3 changesets with 3 changes to 3 files
382 added 3 changesets with 3 changes to 3 files
380 calling hook pretxnchangegroup.acl: hgext.acl.hook
383 calling hook pretxnchangegroup.acl: hgext.acl.hook
381 acl: acl.allow enabled, 1 entries for user barney
384 acl: acl.allow enabled, 1 entries for user barney
382 acl: acl.deny enabled, 0 entries for user barney
385 acl: acl.deny enabled, 0 entries for user barney
383 acl: allowing changeset ef1ea85a6374
386 acl: allowing changeset ef1ea85a6374
384 acl: allowing changeset f9cafe1212c8
387 acl: allowing changeset f9cafe1212c8
385 acl: allowing changeset 911600dab2ae
388 acl: allowing changeset 911600dab2ae
389 updating the branch cache
386 rolling back last transaction
390 rolling back last transaction
387 0:6675d58eff77
391 0:6675d58eff77
388
392
389 wilma can change files with a .txt extension
393 wilma can change files with a .txt extension
390 Pushing as user wilma
394 Pushing as user wilma
391 hgrc = """
395 hgrc = """
392 [hooks]
396 [hooks]
393 pretxnchangegroup.acl = python:hgext.acl.hook
397 pretxnchangegroup.acl = python:hgext.acl.hook
394 [acl]
398 [acl]
395 sources = push
399 sources = push
396 [acl.allow]
400 [acl.allow]
397 foo/** = fred
401 foo/** = fred
398 [acl.deny]
402 [acl.deny]
399 foo/bar/** = fred
403 foo/bar/** = fred
400 foo/Bar/** = fred
404 foo/Bar/** = fred
401 [acl.allow]
405 [acl.allow]
402 ** = barney
406 ** = barney
403 **/*.txt = wilma
407 **/*.txt = wilma
404 """
408 """
405 pushing to ../b
409 pushing to ../b
406 searching for changes
410 searching for changes
407 common changesets up to 6675d58eff77
411 common changesets up to 6675d58eff77
408 3 changesets found
412 3 changesets found
409 List of changesets:
413 List of changesets:
410 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
414 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
411 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
415 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
412 911600dab2ae7a9baff75958b84fe606851ce955
416 911600dab2ae7a9baff75958b84fe606851ce955
413 adding changesets
417 adding changesets
414 add changeset ef1ea85a6374
418 add changeset ef1ea85a6374
415 add changeset f9cafe1212c8
419 add changeset f9cafe1212c8
416 add changeset 911600dab2ae
420 add changeset 911600dab2ae
417 adding manifests
421 adding manifests
418 adding file changes
422 adding file changes
419 adding foo/Bar/file.txt revisions
423 adding foo/Bar/file.txt revisions
420 adding foo/file.txt revisions
424 adding foo/file.txt revisions
421 adding quux/file.py revisions
425 adding quux/file.py revisions
422 added 3 changesets with 3 changes to 3 files
426 added 3 changesets with 3 changes to 3 files
423 calling hook pretxnchangegroup.acl: hgext.acl.hook
427 calling hook pretxnchangegroup.acl: hgext.acl.hook
424 acl: acl.allow enabled, 1 entries for user wilma
428 acl: acl.allow enabled, 1 entries for user wilma
425 acl: acl.deny enabled, 0 entries for user wilma
429 acl: acl.deny enabled, 0 entries for user wilma
426 acl: allowing changeset ef1ea85a6374
430 acl: allowing changeset ef1ea85a6374
427 acl: allowing changeset f9cafe1212c8
431 acl: allowing changeset f9cafe1212c8
428 acl: user wilma not allowed on quux/file.py
432 acl: user wilma not allowed on quux/file.py
429 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
433 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
430 transaction abort!
434 transaction abort!
431 rollback completed
435 rollback completed
432 abort: acl: access denied for changeset 911600dab2ae
436 abort: acl: access denied for changeset 911600dab2ae
433 no rollback information available
437 no rollback information available
434 0:6675d58eff77
438 0:6675d58eff77
435
439
436 file specified by acl.config does not exist
440 file specified by acl.config does not exist
437 Pushing as user barney
441 Pushing as user barney
438 hgrc = """
442 hgrc = """
439 [hooks]
443 [hooks]
440 pretxnchangegroup.acl = python:hgext.acl.hook
444 pretxnchangegroup.acl = python:hgext.acl.hook
441 [acl]
445 [acl]
442 sources = push
446 sources = push
443 [acl.allow]
447 [acl.allow]
444 foo/** = fred
448 foo/** = fred
445 [acl.deny]
449 [acl.deny]
446 foo/bar/** = fred
450 foo/bar/** = fred
447 foo/Bar/** = fred
451 foo/Bar/** = fred
448 [acl.allow]
452 [acl.allow]
449 ** = barney
453 ** = barney
450 **/*.txt = wilma
454 **/*.txt = wilma
451 [acl]
455 [acl]
452 config = ../acl.config
456 config = ../acl.config
453 """
457 """
454 pushing to ../b
458 pushing to ../b
455 searching for changes
459 searching for changes
456 common changesets up to 6675d58eff77
460 common changesets up to 6675d58eff77
457 3 changesets found
461 3 changesets found
458 List of changesets:
462 List of changesets:
459 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
463 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
460 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
464 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
461 911600dab2ae7a9baff75958b84fe606851ce955
465 911600dab2ae7a9baff75958b84fe606851ce955
462 adding changesets
466 adding changesets
463 add changeset ef1ea85a6374
467 add changeset ef1ea85a6374
464 add changeset f9cafe1212c8
468 add changeset f9cafe1212c8
465 add changeset 911600dab2ae
469 add changeset 911600dab2ae
466 adding manifests
470 adding manifests
467 adding file changes
471 adding file changes
468 adding foo/Bar/file.txt revisions
472 adding foo/Bar/file.txt revisions
469 adding foo/file.txt revisions
473 adding foo/file.txt revisions
470 adding quux/file.py revisions
474 adding quux/file.py revisions
471 added 3 changesets with 3 changes to 3 files
475 added 3 changesets with 3 changes to 3 files
472 calling hook pretxnchangegroup.acl: hgext.acl.hook
476 calling hook pretxnchangegroup.acl: hgext.acl.hook
473 error: pretxnchangegroup.acl hook failed: unable to open ../acl.config: No such file or directory
477 error: pretxnchangegroup.acl hook failed: unable to open ../acl.config: No such file or directory
474 transaction abort!
478 transaction abort!
475 rollback completed
479 rollback completed
476 abort: unable to open ../acl.config: No such file or directory
480 abort: unable to open ../acl.config: No such file or directory
477 no rollback information available
481 no rollback information available
478 0:6675d58eff77
482 0:6675d58eff77
479
483
480 betty is allowed inside foo/ by a acl.config file
484 betty is allowed inside foo/ by a acl.config file
481 Pushing as user betty
485 Pushing as user betty
482 hgrc = """
486 hgrc = """
483 [hooks]
487 [hooks]
484 pretxnchangegroup.acl = python:hgext.acl.hook
488 pretxnchangegroup.acl = python:hgext.acl.hook
485 [acl]
489 [acl]
486 sources = push
490 sources = push
487 [acl.allow]
491 [acl.allow]
488 foo/** = fred
492 foo/** = fred
489 [acl.deny]
493 [acl.deny]
490 foo/bar/** = fred
494 foo/bar/** = fred
491 foo/Bar/** = fred
495 foo/Bar/** = fred
492 [acl.allow]
496 [acl.allow]
493 ** = barney
497 ** = barney
494 **/*.txt = wilma
498 **/*.txt = wilma
495 [acl]
499 [acl]
496 config = ../acl.config
500 config = ../acl.config
497 """
501 """
498 acl.config = """
502 acl.config = """
499 [acl.allow]
503 [acl.allow]
500 foo/** = betty
504 foo/** = betty
501 """
505 """
502 pushing to ../b
506 pushing to ../b
503 searching for changes
507 searching for changes
504 common changesets up to 6675d58eff77
508 common changesets up to 6675d58eff77
505 3 changesets found
509 3 changesets found
506 List of changesets:
510 List of changesets:
507 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
511 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
508 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
512 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
509 911600dab2ae7a9baff75958b84fe606851ce955
513 911600dab2ae7a9baff75958b84fe606851ce955
510 adding changesets
514 adding changesets
511 add changeset ef1ea85a6374
515 add changeset ef1ea85a6374
512 add changeset f9cafe1212c8
516 add changeset f9cafe1212c8
513 add changeset 911600dab2ae
517 add changeset 911600dab2ae
514 adding manifests
518 adding manifests
515 adding file changes
519 adding file changes
516 adding foo/Bar/file.txt revisions
520 adding foo/Bar/file.txt revisions
517 adding foo/file.txt revisions
521 adding foo/file.txt revisions
518 adding quux/file.py revisions
522 adding quux/file.py revisions
519 added 3 changesets with 3 changes to 3 files
523 added 3 changesets with 3 changes to 3 files
520 calling hook pretxnchangegroup.acl: hgext.acl.hook
524 calling hook pretxnchangegroup.acl: hgext.acl.hook
521 acl: acl.allow enabled, 1 entries for user betty
525 acl: acl.allow enabled, 1 entries for user betty
522 acl: acl.deny enabled, 0 entries for user betty
526 acl: acl.deny enabled, 0 entries for user betty
523 acl: allowing changeset ef1ea85a6374
527 acl: allowing changeset ef1ea85a6374
524 acl: allowing changeset f9cafe1212c8
528 acl: allowing changeset f9cafe1212c8
525 acl: user betty not allowed on quux/file.py
529 acl: user betty not allowed on quux/file.py
526 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
530 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
527 transaction abort!
531 transaction abort!
528 rollback completed
532 rollback completed
529 abort: acl: access denied for changeset 911600dab2ae
533 abort: acl: access denied for changeset 911600dab2ae
530 no rollback information available
534 no rollback information available
531 0:6675d58eff77
535 0:6675d58eff77
532
536
533 acl.config can set only [acl.allow]/[acl.deny]
537 acl.config can set only [acl.allow]/[acl.deny]
534 Pushing as user barney
538 Pushing as user barney
535 hgrc = """
539 hgrc = """
536 [hooks]
540 [hooks]
537 pretxnchangegroup.acl = python:hgext.acl.hook
541 pretxnchangegroup.acl = python:hgext.acl.hook
538 [acl]
542 [acl]
539 sources = push
543 sources = push
540 [acl.allow]
544 [acl.allow]
541 foo/** = fred
545 foo/** = fred
542 [acl.deny]
546 [acl.deny]
543 foo/bar/** = fred
547 foo/bar/** = fred
544 foo/Bar/** = fred
548 foo/Bar/** = fred
545 [acl.allow]
549 [acl.allow]
546 ** = barney
550 ** = barney
547 **/*.txt = wilma
551 **/*.txt = wilma
548 [acl]
552 [acl]
549 config = ../acl.config
553 config = ../acl.config
550 """
554 """
551 acl.config = """
555 acl.config = """
552 [acl.allow]
556 [acl.allow]
553 foo/** = betty
557 foo/** = betty
554 [hooks]
558 [hooks]
555 changegroup.acl = false
559 changegroup.acl = false
556 """
560 """
557 pushing to ../b
561 pushing to ../b
558 searching for changes
562 searching for changes
559 common changesets up to 6675d58eff77
563 common changesets up to 6675d58eff77
560 3 changesets found
564 3 changesets found
561 List of changesets:
565 List of changesets:
562 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
566 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
563 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
567 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
564 911600dab2ae7a9baff75958b84fe606851ce955
568 911600dab2ae7a9baff75958b84fe606851ce955
565 adding changesets
569 adding changesets
566 add changeset ef1ea85a6374
570 add changeset ef1ea85a6374
567 add changeset f9cafe1212c8
571 add changeset f9cafe1212c8
568 add changeset 911600dab2ae
572 add changeset 911600dab2ae
569 adding manifests
573 adding manifests
570 adding file changes
574 adding file changes
571 adding foo/Bar/file.txt revisions
575 adding foo/Bar/file.txt revisions
572 adding foo/file.txt revisions
576 adding foo/file.txt revisions
573 adding quux/file.py revisions
577 adding quux/file.py revisions
574 added 3 changesets with 3 changes to 3 files
578 added 3 changesets with 3 changes to 3 files
575 calling hook pretxnchangegroup.acl: hgext.acl.hook
579 calling hook pretxnchangegroup.acl: hgext.acl.hook
576 acl: acl.allow enabled, 1 entries for user barney
580 acl: acl.allow enabled, 1 entries for user barney
577 acl: acl.deny enabled, 0 entries for user barney
581 acl: acl.deny enabled, 0 entries for user barney
578 acl: allowing changeset ef1ea85a6374
582 acl: allowing changeset ef1ea85a6374
579 acl: allowing changeset f9cafe1212c8
583 acl: allowing changeset f9cafe1212c8
580 acl: allowing changeset 911600dab2ae
584 acl: allowing changeset 911600dab2ae
585 updating the branch cache
581 rolling back last transaction
586 rolling back last transaction
582 0:6675d58eff77
587 0:6675d58eff77
583
588
1 NO CONTENT: modified file, binary diff hidden
NO CONTENT: modified file, binary diff hidden
@@ -1,139 +1,139 b''
1 adding da/foo
1 adding da/foo
2 adding foo
2 adding foo
3 % manifest
3 % manifest
4 200 Script output follows
4 200 Script output follows
5
5
6
6
7 drwxr-xr-x da
7 drwxr-xr-x da
8 -rw-r--r-- 4 foo
8 -rw-r--r-- 4 foo
9
9
10
10
11 200 Script output follows
11 200 Script output follows
12
12
13
13
14 -rw-r--r-- 4 foo
14 -rw-r--r-- 4 foo
15
15
16
16
17 % plain file
17 % plain file
18 200 Script output follows
18 200 Script output follows
19
19
20 foo
20 foo
21 % should give a 404 - static file that does not exist
21 % should give a 404 - static file that does not exist
22 404 Not Found
22 404 Not Found
23
23
24 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
24 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
25 <html>
25 <html>
26 <head>
26 <head>
27 <link rel="icon" href="/static/hgicon.png" type="image/png">
27 <link rel="icon" href="/static/hgicon.png" type="image/png" />
28 <meta name="robots" content="index, nofollow" />
28 <meta name="robots" content="index, nofollow" />
29 <link rel="stylesheet" href="/static/style.css" type="text/css" />
29 <link rel="stylesheet" href="/static/style.css" type="text/css" />
30
30
31 <title>Mercurial Error</title>
31 <title>Mercurial Error</title>
32 </head>
32 </head>
33 <body>
33 <body>
34
34
35 <h2>Mercurial Error</h2>
35 <h2>Mercurial Error</h2>
36
36
37 <p>
37 <p>
38 An error occurred while processing your request:
38 An error occurred while processing your request:
39 </p>
39 </p>
40 <p>
40 <p>
41 Not Found
41 Not Found
42 </p>
42 </p>
43
43
44
44
45 <div class="logo">
45 <div class="logo">
46 powered by<br/>
46 powered by<br/>
47 <a href="http://www.selenic.com/mercurial/">mercurial</a>
47 <a href="http://www.selenic.com/mercurial/">mercurial</a>
48 </div>
48 </div>
49
49
50 </body>
50 </body>
51 </html>
51 </html>
52
52
53 % should give a 404 - bad revision
53 % should give a 404 - bad revision
54 404 Not Found
54 404 Not Found
55
55
56
56
57 error: revision not found: spam
57 error: revision not found: spam
58 % should give a 400 - bad command
58 % should give a 400 - bad command
59 400
59 400
60
60
61
61
62 error: No such method: spam
62 error: No such method: spam
63 % should give a 404 - file does not exist
63 % should give a 404 - file does not exist
64 404 Not Found
64 404 Not Found
65
65
66
66
67 error: Path not found: bork/
67 error: Path not found: bork/
68 % stop and restart
68 % stop and restart
69 7 log lines written
69 7 log lines written
70 % static file
70 % static file
71 200 Script output follows
71 200 Script output follows
72
72
73 body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; }
73 body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; }
74 a { color:#0000cc; }
74 a { color:#0000cc; }
75 a:hover, a:visited, a:active { color:#880000; }
75 a:hover, a:visited, a:active { color:#880000; }
76 div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
76 div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
77 div.page_header a:visited { color:#0000cc; }
77 div.page_header a:visited { color:#0000cc; }
78 div.page_header a:hover { color:#880000; }
78 div.page_header a:hover { color:#880000; }
79 div.page_nav { padding:8px; }
79 div.page_nav { padding:8px; }
80 div.page_nav a:visited { color:#0000cc; }
80 div.page_nav a:visited { color:#0000cc; }
81 div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
81 div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
82 div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
82 div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
83 div.page_footer_text { float:left; color:#555555; font-style:italic; }
83 div.page_footer_text { float:left; color:#555555; font-style:italic; }
84 div.page_body { padding:8px; }
84 div.page_body { padding:8px; }
85 div.title, a.title {
85 div.title, a.title {
86 display:block; padding:6px 8px;
86 display:block; padding:6px 8px;
87 font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000;
87 font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000;
88 }
88 }
89 a.title:hover { background-color: #d9d8d1; }
89 a.title:hover { background-color: #d9d8d1; }
90 div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; }
90 div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; }
91 div.log_body { padding:8px 8px 8px 150px; }
91 div.log_body { padding:8px 8px 8px 150px; }
92 .age { white-space:nowrap; }
92 .age { white-space:nowrap; }
93 span.age { position:relative; float:left; width:142px; font-style:italic; }
93 span.age { position:relative; float:left; width:142px; font-style:italic; }
94 div.log_link {
94 div.log_link {
95 padding:0px 8px;
95 padding:0px 8px;
96 font-size:10px; font-family:sans-serif; font-style:normal;
96 font-size:10px; font-family:sans-serif; font-style:normal;
97 position:relative; float:left; width:136px;
97 position:relative; float:left; width:136px;
98 }
98 }
99 div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; }
99 div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; }
100 a.list { text-decoration:none; color:#000000; }
100 a.list { text-decoration:none; color:#000000; }
101 a.list:hover { text-decoration:underline; color:#880000; }
101 a.list:hover { text-decoration:underline; color:#880000; }
102 table { padding:8px 4px; }
102 table { padding:8px 4px; }
103 th { padding:2px 5px; font-size:12px; text-align:left; }
103 th { padding:2px 5px; font-size:12px; text-align:left; }
104 tr.light:hover, .parity0:hover { background-color:#edece6; }
104 tr.light:hover, .parity0:hover { background-color:#edece6; }
105 tr.dark, .parity1 { background-color:#f6f6f0; }
105 tr.dark, .parity1 { background-color:#f6f6f0; }
106 tr.dark:hover, .parity1:hover { background-color:#edece6; }
106 tr.dark:hover, .parity1:hover { background-color:#edece6; }
107 td { padding:2px 5px; font-size:12px; vertical-align:top; }
107 td { padding:2px 5px; font-size:12px; vertical-align:top; }
108 td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
108 td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
109 div.pre { font-family:monospace; font-size:12px; white-space:pre; }
109 div.pre { font-family:monospace; font-size:12px; white-space:pre; }
110 div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
110 div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
111 div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
111 div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
112 div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
112 div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
113 .linenr { color:#999999; text-decoration:none }
113 .linenr { color:#999999; text-decoration:none }
114 a.rss_logo {
114 a.rss_logo {
115 float:right; padding:3px 6px; line-height:10px;
115 float:right; padding:3px 6px; line-height:10px;
116 border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e;
116 border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e;
117 color:#ffffff; background-color:#ff6600;
117 color:#ffffff; background-color:#ff6600;
118 font-weight:bold; font-family:sans-serif; font-size:10px;
118 font-weight:bold; font-family:sans-serif; font-size:10px;
119 text-align:center; text-decoration:none;
119 text-align:center; text-decoration:none;
120 }
120 }
121 a.rss_logo:hover { background-color:#ee5500; }
121 a.rss_logo:hover { background-color:#ee5500; }
122 pre { margin: 0; }
122 pre { margin: 0; }
123 span.logtags span {
123 span.logtags span {
124 padding: 0px 4px;
124 padding: 0px 4px;
125 font-size: 10px;
125 font-size: 10px;
126 font-weight: normal;
126 font-weight: normal;
127 border: 1px solid;
127 border: 1px solid;
128 background-color: #ffaaff;
128 background-color: #ffaaff;
129 border-color: #ffccff #ff00ee #ff00ee #ffccff;
129 border-color: #ffccff #ff00ee #ff00ee #ffccff;
130 }
130 }
131 span.logtags span.tagtag {
131 span.logtags span.tagtag {
132 background-color: #ffffaa;
132 background-color: #ffffaa;
133 border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
133 border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
134 }
134 }
135 span.logtags span.branchtag {
135 span.logtags span.branchtag {
136 background-color: #aaffaa;
136 background-color: #aaffaa;
137 border-color: #ccffcc #00cc33 #00cc33 #ccffcc;
137 border-color: #ccffcc #00cc33 #00cc33 #ccffcc;
138 }
138 }
139 % errors
139 % errors
@@ -1,197 +1,209 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init a
3 hg init a
4 mkdir a/d1
4 mkdir a/d1
5 mkdir a/d1/d2
5 mkdir a/d1/d2
6 echo line 1 > a/a
6 echo line 1 > a/a
7 echo line 1 > a/d1/d2/a
7 echo line 1 > a/d1/d2/a
8 hg --cwd a ci -d '0 0' -Ama
8 hg --cwd a ci -d '0 0' -Ama
9
9
10 echo line 2 >> a/a
10 echo line 2 >> a/a
11 hg --cwd a ci -u someone -d '1 0' -m'second change'
11 hg --cwd a ci -u someone -d '1 0' -m'second change'
12
12
13 echo % import exported patch
13 echo % import exported patch
14 hg clone -r0 a b
14 hg clone -r0 a b
15 hg --cwd a export tip > tip.patch
15 hg --cwd a export tip > tip.patch
16 hg --cwd b import ../tip.patch
16 hg --cwd b import ../tip.patch
17 echo % message should be same
17 echo % message should be same
18 hg --cwd b tip | grep 'second change'
18 hg --cwd b tip | grep 'second change'
19 echo % committer should be same
19 echo % committer should be same
20 hg --cwd b tip | grep someone
20 hg --cwd b tip | grep someone
21 rm -r b
21 rm -r b
22
22
23 echo % import of plain diff should fail without message
23 echo % import of plain diff should fail without message
24 hg clone -r0 a b
24 hg clone -r0 a b
25 hg --cwd a diff -r0:1 > tip.patch
25 hg --cwd a diff -r0:1 > tip.patch
26 hg --cwd b import ../tip.patch
26 hg --cwd b import ../tip.patch
27 rm -r b
27 rm -r b
28
28
29 echo % import of plain diff should be ok with message
29 echo % import of plain diff should be ok with message
30 hg clone -r0 a b
30 hg clone -r0 a b
31 hg --cwd a diff -r0:1 > tip.patch
31 hg --cwd a diff -r0:1 > tip.patch
32 hg --cwd b import -mpatch ../tip.patch
32 hg --cwd b import -mpatch ../tip.patch
33 rm -r b
33 rm -r b
34
34
35 echo % import of plain diff with specific date and user
35 echo % import of plain diff with specific date and user
36 hg clone -r0 a b
36 hg clone -r0 a b
37 hg --cwd a diff -r0:1 > tip.patch
37 hg --cwd a diff -r0:1 > tip.patch
38 hg --cwd b import -mpatch -d '1 0' -u 'user@nowhere.net' ../tip.patch
38 hg --cwd b import -mpatch -d '1 0' -u 'user@nowhere.net' ../tip.patch
39 hg -R b tip -pv
39 hg -R b tip -pv
40 rm -r b
40 rm -r b
41
41
42 echo % import of plain diff should be ok with --no-commit
42 echo % import of plain diff should be ok with --no-commit
43 hg clone -r0 a b
43 hg clone -r0 a b
44 hg --cwd a diff -r0:1 > tip.patch
44 hg --cwd a diff -r0:1 > tip.patch
45 hg --cwd b import --no-commit ../tip.patch
45 hg --cwd b import --no-commit ../tip.patch
46 hg --cwd b diff --nodates
46 hg --cwd b diff --nodates
47 rm -r b
47 rm -r b
48
48
49 echo % hg -R repo import
49 echo % hg -R repo import
50 # put the clone in a subdir - having a directory named "a"
50 # put the clone in a subdir - having a directory named "a"
51 # used to hide a bug.
51 # used to hide a bug.
52 mkdir dir
52 mkdir dir
53 hg clone -r0 a dir/b
53 hg clone -r0 a dir/b
54 hg --cwd a export tip > dir/tip.patch
54 hg --cwd a export tip > dir/tip.patch
55 cd dir
55 cd dir
56 hg -R b import tip.patch
56 hg -R b import tip.patch
57 cd ..
57 cd ..
58 rm -r dir
58 rm -r dir
59
59
60 echo % import from stdin
60 echo % import from stdin
61 hg clone -r0 a b
61 hg clone -r0 a b
62 hg --cwd a export tip | hg --cwd b import -
62 hg --cwd a export tip | hg --cwd b import -
63 rm -r b
63 rm -r b
64
64
65 echo % override commit message
65 echo % override commit message
66 hg clone -r0 a b
66 hg clone -r0 a b
67 hg --cwd a export tip | hg --cwd b import -m 'override' -
67 hg --cwd a export tip | hg --cwd b import -m 'override' -
68 hg --cwd b tip | grep override
68 hg --cwd b tip | grep override
69 rm -r b
69 rm -r b
70
70
71 cat > mkmsg.py <<EOF
71 cat > mkmsg.py <<EOF
72 import email.Message, sys
72 import email.Message, sys
73 msg = email.Message.Message()
73 msg = email.Message.Message()
74 msg.set_payload('email commit message\n' + open('tip.patch', 'rb').read())
74 msg.set_payload('email commit message\n' + open('tip.patch', 'rb').read())
75 msg['Subject'] = 'email patch'
75 msg['Subject'] = 'email patch'
76 msg['From'] = 'email patcher'
76 msg['From'] = 'email patcher'
77 sys.stdout.write(msg.as_string())
77 sys.stdout.write(msg.as_string())
78 EOF
78 EOF
79
79
80 echo % plain diff in email, subject, message body
80 echo % plain diff in email, subject, message body
81 hg clone -r0 a b
81 hg clone -r0 a b
82 hg --cwd a diff -r0:1 > tip.patch
82 hg --cwd a diff -r0:1 > tip.patch
83 python mkmsg.py > msg.patch
83 python mkmsg.py > msg.patch
84 hg --cwd b import ../msg.patch
84 hg --cwd b import ../msg.patch
85 hg --cwd b tip | grep email
85 hg --cwd b tip | grep email
86 rm -r b
86 rm -r b
87
87
88 echo % plain diff in email, no subject, message body
88 echo % plain diff in email, no subject, message body
89 hg clone -r0 a b
89 hg clone -r0 a b
90 grep -v '^Subject:' msg.patch | hg --cwd b import -
90 grep -v '^Subject:' msg.patch | hg --cwd b import -
91 rm -r b
91 rm -r b
92
92
93 echo % plain diff in email, subject, no message body
93 echo % plain diff in email, subject, no message body
94 hg clone -r0 a b
94 hg clone -r0 a b
95 grep -v '^email ' msg.patch | hg --cwd b import -
95 grep -v '^email ' msg.patch | hg --cwd b import -
96 rm -r b
96 rm -r b
97
97
98 echo % plain diff in email, no subject, no message body, should fail
98 echo % plain diff in email, no subject, no message body, should fail
99 hg clone -r0 a b
99 hg clone -r0 a b
100 egrep -v '^(Subject|email)' msg.patch | hg --cwd b import -
100 egrep -v '^(Subject|email)' msg.patch | hg --cwd b import -
101 rm -r b
101 rm -r b
102
102
103 echo % hg export in email, should use patch header
103 echo % hg export in email, should use patch header
104 hg clone -r0 a b
104 hg clone -r0 a b
105 hg --cwd a export tip > tip.patch
105 hg --cwd a export tip > tip.patch
106 python mkmsg.py | hg --cwd b import -
106 python mkmsg.py | hg --cwd b import -
107 hg --cwd b tip | grep second
107 hg --cwd b tip | grep second
108 rm -r b
108 rm -r b
109
109
110 # subject: duplicate detection, removal of [PATCH]
110 # subject: duplicate detection, removal of [PATCH]
111 # The '---' tests the gitsendmail handling without proper mail headers
111 # The '---' tests the gitsendmail handling without proper mail headers
112 cat > mkmsg2.py <<EOF
112 cat > mkmsg2.py <<EOF
113 import email.Message, sys
113 import email.Message, sys
114 msg = email.Message.Message()
114 msg = email.Message.Message()
115 msg.set_payload('email patch\n\nnext line\n---\n' + open('tip.patch').read())
115 msg.set_payload('email patch\n\nnext line\n---\n' + open('tip.patch').read())
116 msg['Subject'] = '[PATCH] email patch'
116 msg['Subject'] = '[PATCH] email patch'
117 msg['From'] = 'email patcher'
117 msg['From'] = 'email patcher'
118 sys.stdout.write(msg.as_string())
118 sys.stdout.write(msg.as_string())
119 EOF
119 EOF
120
120
121 echo '% plain diff in email, [PATCH] subject, message body with subject'
121 echo '% plain diff in email, [PATCH] subject, message body with subject'
122 hg clone -r0 a b
122 hg clone -r0 a b
123 hg --cwd a diff -r0:1 > tip.patch
123 hg --cwd a diff -r0:1 > tip.patch
124 python mkmsg2.py | hg --cwd b import -
124 python mkmsg2.py | hg --cwd b import -
125 hg --cwd b tip --template '{desc}\n'
125 hg --cwd b tip --template '{desc}\n'
126 rm -r b
126 rm -r b
127
127
128 # We weren't backing up the correct dirstate file when importing many patches
129 # (issue963)
130 echo '% import patch1 patch2; rollback'
131 echo line 3 >> a/a
132 hg --cwd a ci -m'third change'
133 hg --cwd a export -o '../patch%R' 1 2
134 hg clone -qr0 a b
135 hg --cwd b parents --template 'parent: #rev#\n'
136 hg --cwd b import ../patch1 ../patch2
137 hg --cwd b rollback
138 hg --cwd b parents --template 'parent: #rev#\n'
139 rm -r b
128
140
129 # bug non regression test
141 # bug non regression test
130 # importing a patch in a subdirectory failed at the commit stage
142 # importing a patch in a subdirectory failed at the commit stage
131 echo line 2 >> a/d1/d2/a
143 echo line 2 >> a/d1/d2/a
132 hg --cwd a ci -u someoneelse -d '1 0' -m'subdir change'
144 hg --cwd a ci -u someoneelse -d '1 0' -m'subdir change'
133 echo % hg import in a subdirectory
145 echo % hg import in a subdirectory
134 hg clone -r0 a b
146 hg clone -r0 a b
135 hg --cwd a export tip | sed -e 's/d1\/d2\///' > tip.patch
147 hg --cwd a export tip | sed -e 's/d1\/d2\///' > tip.patch
136 dir=`pwd`
148 dir=`pwd`
137 cd b/d1/d2 2>&1 > /dev/null
149 cd b/d1/d2 2>&1 > /dev/null
138 hg import ../../../tip.patch
150 hg import ../../../tip.patch
139 cd $dir
151 cd $dir
140 echo "% message should be 'subdir change'"
152 echo "% message should be 'subdir change'"
141 hg --cwd b tip | grep 'subdir change'
153 hg --cwd b tip | grep 'subdir change'
142 echo "% committer should be 'someoneelse'"
154 echo "% committer should be 'someoneelse'"
143 hg --cwd b tip | grep someoneelse
155 hg --cwd b tip | grep someoneelse
144 echo "% should be empty"
156 echo "% should be empty"
145 hg --cwd b status
157 hg --cwd b status
146
158
147
159
148 # Test fuzziness (ambiguous patch location, fuzz=2)
160 # Test fuzziness (ambiguous patch location, fuzz=2)
149 echo % test fuzziness
161 echo % test fuzziness
150 hg init fuzzy
162 hg init fuzzy
151 cd fuzzy
163 cd fuzzy
152 echo line1 > a
164 echo line1 > a
153 echo line0 >> a
165 echo line0 >> a
154 echo line3 >> a
166 echo line3 >> a
155 hg ci -Am adda
167 hg ci -Am adda
156 echo line1 > a
168 echo line1 > a
157 echo line2 >> a
169 echo line2 >> a
158 echo line0 >> a
170 echo line0 >> a
159 echo line3 >> a
171 echo line3 >> a
160 hg ci -m change a
172 hg ci -m change a
161 hg export tip > tip.patch
173 hg export tip > tip.patch
162 hg up -C 0
174 hg up -C 0
163 echo line1 > a
175 echo line1 > a
164 echo line0 >> a
176 echo line0 >> a
165 echo line1 >> a
177 echo line1 >> a
166 echo line0 >> a
178 echo line0 >> a
167 hg ci -m brancha
179 hg ci -m brancha
168 hg import -v tip.patch
180 hg import -v tip.patch
169 cd ..
181 cd ..
170
182
171 # Test hunk touching empty files (issue906)
183 # Test hunk touching empty files (issue906)
172 hg init empty
184 hg init empty
173 cd empty
185 cd empty
174 touch a
186 touch a
175 touch b1
187 touch b1
176 touch c1
188 touch c1
177 echo d > d
189 echo d > d
178 hg ci -Am init
190 hg ci -Am init
179 echo a > a
191 echo a > a
180 echo b > b1
192 echo b > b1
181 hg mv b1 b2
193 hg mv b1 b2
182 echo c > c1
194 echo c > c1
183 hg copy c1 c2
195 hg copy c1 c2
184 rm d
196 rm d
185 touch d
197 touch d
186 hg diff --git
198 hg diff --git
187 hg ci -m empty
199 hg ci -m empty
188 hg export --git tip > empty.diff
200 hg export --git tip > empty.diff
189 hg up -C 0
201 hg up -C 0
190 hg import empty.diff
202 hg import empty.diff
191 for name in a b1 b2 c1 c2 d;
203 for name in a b1 b2 c1 c2 d;
192 do
204 do
193 echo % $name file
205 echo % $name file
194 test -f $name && cat $name
206 test -f $name && cat $name
195 done
207 done
196 cd ..
208 cd ..
197
209
@@ -1,219 +1,225 b''
1 adding a
1 adding a
2 adding d1/d2/a
2 adding d1/d2/a
3 % import exported patch
3 % import exported patch
4 requesting all changes
4 requesting all changes
5 adding changesets
5 adding changesets
6 adding manifests
6 adding manifests
7 adding file changes
7 adding file changes
8 added 1 changesets with 2 changes to 2 files
8 added 1 changesets with 2 changes to 2 files
9 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
9 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
10 applying ../tip.patch
10 applying ../tip.patch
11 % message should be same
11 % message should be same
12 summary: second change
12 summary: second change
13 % committer should be same
13 % committer should be same
14 user: someone
14 user: someone
15 % import of plain diff should fail without message
15 % import of plain diff should fail without message
16 requesting all changes
16 requesting all changes
17 adding changesets
17 adding changesets
18 adding manifests
18 adding manifests
19 adding file changes
19 adding file changes
20 added 1 changesets with 2 changes to 2 files
20 added 1 changesets with 2 changes to 2 files
21 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
21 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 applying ../tip.patch
22 applying ../tip.patch
23 transaction abort!
23 transaction abort!
24 rollback completed
24 rollback completed
25 abort: empty commit message
25 abort: empty commit message
26 % import of plain diff should be ok with message
26 % import of plain diff should be ok with message
27 requesting all changes
27 requesting all changes
28 adding changesets
28 adding changesets
29 adding manifests
29 adding manifests
30 adding file changes
30 adding file changes
31 added 1 changesets with 2 changes to 2 files
31 added 1 changesets with 2 changes to 2 files
32 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
32 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
33 applying ../tip.patch
33 applying ../tip.patch
34 % import of plain diff with specific date and user
34 % import of plain diff with specific date and user
35 requesting all changes
35 requesting all changes
36 adding changesets
36 adding changesets
37 adding manifests
37 adding manifests
38 adding file changes
38 adding file changes
39 added 1 changesets with 2 changes to 2 files
39 added 1 changesets with 2 changes to 2 files
40 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 applying ../tip.patch
41 applying ../tip.patch
42 changeset: 1:ca68f19f3a40
42 changeset: 1:ca68f19f3a40
43 tag: tip
43 tag: tip
44 user: user@nowhere.net
44 user: user@nowhere.net
45 date: Thu Jan 01 00:00:01 1970 +0000
45 date: Thu Jan 01 00:00:01 1970 +0000
46 files: a
46 files: a
47 description:
47 description:
48 patch
48 patch
49
49
50
50
51 diff -r 80971e65b431 -r ca68f19f3a40 a
51 diff -r 80971e65b431 -r ca68f19f3a40 a
52 --- a/a Thu Jan 01 00:00:00 1970 +0000
52 --- a/a Thu Jan 01 00:00:00 1970 +0000
53 +++ b/a Thu Jan 01 00:00:01 1970 +0000
53 +++ b/a Thu Jan 01 00:00:01 1970 +0000
54 @@ -1,1 +1,2 @@
54 @@ -1,1 +1,2 @@
55 line 1
55 line 1
56 +line 2
56 +line 2
57
57
58 % import of plain diff should be ok with --no-commit
58 % import of plain diff should be ok with --no-commit
59 requesting all changes
59 requesting all changes
60 adding changesets
60 adding changesets
61 adding manifests
61 adding manifests
62 adding file changes
62 adding file changes
63 added 1 changesets with 2 changes to 2 files
63 added 1 changesets with 2 changes to 2 files
64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 applying ../tip.patch
65 applying ../tip.patch
66 diff -r 80971e65b431 a
66 diff -r 80971e65b431 a
67 --- a/a
67 --- a/a
68 +++ b/a
68 +++ b/a
69 @@ -1,1 +1,2 @@
69 @@ -1,1 +1,2 @@
70 line 1
70 line 1
71 +line 2
71 +line 2
72 % hg -R repo import
72 % hg -R repo import
73 requesting all changes
73 requesting all changes
74 adding changesets
74 adding changesets
75 adding manifests
75 adding manifests
76 adding file changes
76 adding file changes
77 added 1 changesets with 2 changes to 2 files
77 added 1 changesets with 2 changes to 2 files
78 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
79 applying tip.patch
79 applying tip.patch
80 % import from stdin
80 % import from stdin
81 requesting all changes
81 requesting all changes
82 adding changesets
82 adding changesets
83 adding manifests
83 adding manifests
84 adding file changes
84 adding file changes
85 added 1 changesets with 2 changes to 2 files
85 added 1 changesets with 2 changes to 2 files
86 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
86 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 applying patch from stdin
87 applying patch from stdin
88 % override commit message
88 % override commit message
89 requesting all changes
89 requesting all changes
90 adding changesets
90 adding changesets
91 adding manifests
91 adding manifests
92 adding file changes
92 adding file changes
93 added 1 changesets with 2 changes to 2 files
93 added 1 changesets with 2 changes to 2 files
94 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
94 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
95 applying patch from stdin
95 applying patch from stdin
96 summary: override
96 summary: override
97 % plain diff in email, subject, message body
97 % plain diff in email, subject, message body
98 requesting all changes
98 requesting all changes
99 adding changesets
99 adding changesets
100 adding manifests
100 adding manifests
101 adding file changes
101 adding file changes
102 added 1 changesets with 2 changes to 2 files
102 added 1 changesets with 2 changes to 2 files
103 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
103 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
104 applying ../msg.patch
104 applying ../msg.patch
105 user: email patcher
105 user: email patcher
106 summary: email patch
106 summary: email patch
107 % plain diff in email, no subject, message body
107 % plain diff in email, no subject, message body
108 requesting all changes
108 requesting all changes
109 adding changesets
109 adding changesets
110 adding manifests
110 adding manifests
111 adding file changes
111 adding file changes
112 added 1 changesets with 2 changes to 2 files
112 added 1 changesets with 2 changes to 2 files
113 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
113 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
114 applying patch from stdin
114 applying patch from stdin
115 % plain diff in email, subject, no message body
115 % plain diff in email, subject, no message body
116 requesting all changes
116 requesting all changes
117 adding changesets
117 adding changesets
118 adding manifests
118 adding manifests
119 adding file changes
119 adding file changes
120 added 1 changesets with 2 changes to 2 files
120 added 1 changesets with 2 changes to 2 files
121 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
121 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
122 applying patch from stdin
122 applying patch from stdin
123 % plain diff in email, no subject, no message body, should fail
123 % plain diff in email, no subject, no message body, should fail
124 requesting all changes
124 requesting all changes
125 adding changesets
125 adding changesets
126 adding manifests
126 adding manifests
127 adding file changes
127 adding file changes
128 added 1 changesets with 2 changes to 2 files
128 added 1 changesets with 2 changes to 2 files
129 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
129 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 applying patch from stdin
130 applying patch from stdin
131 transaction abort!
131 transaction abort!
132 rollback completed
132 rollback completed
133 abort: empty commit message
133 abort: empty commit message
134 % hg export in email, should use patch header
134 % hg export in email, should use patch header
135 requesting all changes
135 requesting all changes
136 adding changesets
136 adding changesets
137 adding manifests
137 adding manifests
138 adding file changes
138 adding file changes
139 added 1 changesets with 2 changes to 2 files
139 added 1 changesets with 2 changes to 2 files
140 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
140 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
141 applying patch from stdin
141 applying patch from stdin
142 summary: second change
142 summary: second change
143 % plain diff in email, [PATCH] subject, message body with subject
143 % plain diff in email, [PATCH] subject, message body with subject
144 requesting all changes
144 requesting all changes
145 adding changesets
145 adding changesets
146 adding manifests
146 adding manifests
147 adding file changes
147 adding file changes
148 added 1 changesets with 2 changes to 2 files
148 added 1 changesets with 2 changes to 2 files
149 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
149 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
150 applying patch from stdin
150 applying patch from stdin
151 email patch
151 email patch
152
152
153 next line
153 next line
154 ---
154 ---
155 % import patch1 patch2; rollback
156 parent: 0
157 applying ../patch1
158 applying ../patch2
159 rolling back last transaction
160 parent: 1
155 % hg import in a subdirectory
161 % hg import in a subdirectory
156 requesting all changes
162 requesting all changes
157 adding changesets
163 adding changesets
158 adding manifests
164 adding manifests
159 adding file changes
165 adding file changes
160 added 1 changesets with 2 changes to 2 files
166 added 1 changesets with 2 changes to 2 files
161 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
167 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
162 applying ../../../tip.patch
168 applying ../../../tip.patch
163 % message should be 'subdir change'
169 % message should be 'subdir change'
164 summary: subdir change
170 summary: subdir change
165 % committer should be 'someoneelse'
171 % committer should be 'someoneelse'
166 user: someoneelse
172 user: someoneelse
167 % should be empty
173 % should be empty
168 % test fuzziness
174 % test fuzziness
169 adding a
175 adding a
170 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
176 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 applying tip.patch
177 applying tip.patch
172 patching file a
178 patching file a
173 Hunk #1 succeeded at 1 with fuzz 2 (offset -2 lines).
179 Hunk #1 succeeded at 1 with fuzz 2 (offset -2 lines).
174 a
180 a
175 adding a
181 adding a
176 adding b1
182 adding b1
177 adding c1
183 adding c1
178 adding d
184 adding d
179 diff --git a/a b/a
185 diff --git a/a b/a
180 --- a/a
186 --- a/a
181 +++ b/a
187 +++ b/a
182 @@ -0,0 +1,1 @@
188 @@ -0,0 +1,1 @@
183 +a
189 +a
184 diff --git a/b1 b/b2
190 diff --git a/b1 b/b2
185 rename from b1
191 rename from b1
186 rename to b2
192 rename to b2
187 --- a/b1
193 --- a/b1
188 +++ b/b2
194 +++ b/b2
189 @@ -0,0 +1,1 @@
195 @@ -0,0 +1,1 @@
190 +b
196 +b
191 diff --git a/c1 b/c1
197 diff --git a/c1 b/c1
192 --- a/c1
198 --- a/c1
193 +++ b/c1
199 +++ b/c1
194 @@ -0,0 +1,1 @@
200 @@ -0,0 +1,1 @@
195 +c
201 +c
196 diff --git a/c1 b/c2
202 diff --git a/c1 b/c2
197 copy from c1
203 copy from c1
198 copy to c2
204 copy to c2
199 --- a/c1
205 --- a/c1
200 +++ b/c2
206 +++ b/c2
201 @@ -0,0 +1,1 @@
207 @@ -0,0 +1,1 @@
202 +c
208 +c
203 diff --git a/d b/d
209 diff --git a/d b/d
204 --- a/d
210 --- a/d
205 +++ b/d
211 +++ b/d
206 @@ -1,1 +0,0 @@
212 @@ -1,1 +0,0 @@
207 -d
213 -d
208 4 files updated, 0 files merged, 2 files removed, 0 files unresolved
214 4 files updated, 0 files merged, 2 files removed, 0 files unresolved
209 applying empty.diff
215 applying empty.diff
210 % a file
216 % a file
211 a
217 a
212 % b1 file
218 % b1 file
213 % b2 file
219 % b2 file
214 b
220 b
215 % c1 file
221 % c1 file
216 c
222 c
217 % c2 file
223 % c2 file
218 c
224 c
219 % d file
225 % d file
@@ -1,476 +1,489 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 checkundo()
3 checkundo()
4 {
4 {
5 if [ -f .hg/store/undo ]; then
5 if [ -f .hg/store/undo ]; then
6 echo ".hg/store/undo still exists after $1"
6 echo ".hg/store/undo still exists after $1"
7 fi
7 fi
8 }
8 }
9
9
10 echo "[extensions]" >> $HGRCPATH
10 echo "[extensions]" >> $HGRCPATH
11 echo "mq=" >> $HGRCPATH
11 echo "mq=" >> $HGRCPATH
12
12
13 echo % help
13 echo % help
14 hg help mq
14 hg help mq
15
15
16 hg init a
16 hg init a
17 cd a
17 cd a
18 echo a > a
18 echo a > a
19 hg ci -Ama
19 hg ci -Ama
20
20
21 hg clone . ../k
21 hg clone . ../k
22
22
23 mkdir b
23 mkdir b
24 echo z > b/z
24 echo z > b/z
25 hg ci -Ama
25 hg ci -Ama
26
26
27 echo % qinit
27 echo % qinit
28
28
29 hg qinit
29 hg qinit
30
30
31 cd ..
31 cd ..
32 hg init b
32 hg init b
33
33
34 echo % -R qinit
34 echo % -R qinit
35
35
36 hg -R b qinit
36 hg -R b qinit
37
37
38 hg init c
38 hg init c
39
39
40 echo % qinit -c
40 echo % qinit -c
41
41
42 hg --cwd c qinit -c
42 hg --cwd c qinit -c
43 hg -R c/.hg/patches st
43 hg -R c/.hg/patches st
44
44
45 echo % qnew should refuse bad patch names
46 hg -R c qnew series
47 hg -R c qnew status
48 hg -R c qnew guards
49 hg -R c qnew .hgignore
50
45 echo % qnew implies add
51 echo % qnew implies add
46
52
47 hg -R c qnew test.patch
53 hg -R c qnew test.patch
48 hg -R c/.hg/patches st
54 hg -R c/.hg/patches st
49
55
50 echo '% qinit; qinit -c'
56 echo '% qinit; qinit -c'
51 hg init d
57 hg init d
52 cd d
58 cd d
53 hg qinit
59 hg qinit
54 hg qinit -c
60 hg qinit -c
55 # qinit -c should create both files if they don't exist
61 # qinit -c should create both files if they don't exist
56 echo ' .hgignore:'
62 echo ' .hgignore:'
57 cat .hg/patches/.hgignore
63 cat .hg/patches/.hgignore
58 echo ' series:'
64 echo ' series:'
59 cat .hg/patches/series
65 cat .hg/patches/series
60 hg qinit -c 2>&1 | sed -e 's/repository.*already/repository already/'
66 hg qinit -c 2>&1 | sed -e 's/repository.*already/repository already/'
61 cd ..
67 cd ..
62
68
63 echo '% qinit; <stuff>; qinit -c'
69 echo '% qinit; <stuff>; qinit -c'
64 hg init e
70 hg init e
65 cd e
71 cd e
66 hg qnew A
72 hg qnew A
67 checkundo qnew
73 checkundo qnew
68 echo foo > foo
74 echo foo > foo
69 hg add foo
75 hg add foo
70 hg qrefresh
76 hg qrefresh
71 hg qnew B
77 hg qnew B
72 echo >> foo
78 echo >> foo
73 hg qrefresh
79 hg qrefresh
74 echo status >> .hg/patches/.hgignore
80 echo status >> .hg/patches/.hgignore
75 echo bleh >> .hg/patches/.hgignore
81 echo bleh >> .hg/patches/.hgignore
76 hg qinit -c
82 hg qinit -c
77 hg -R .hg/patches status
83 hg -R .hg/patches status
78 # qinit -c shouldn't touch these files if they already exist
84 # qinit -c shouldn't touch these files if they already exist
79 echo ' .hgignore:'
85 echo ' .hgignore:'
80 cat .hg/patches/.hgignore
86 cat .hg/patches/.hgignore
81 echo ' series:'
87 echo ' series:'
82 cat .hg/patches/series
88 cat .hg/patches/series
83 cd ..
89 cd ..
84
90
85 cd a
91 cd a
86
92
87 echo a > somefile
93 echo a > somefile
88 hg add somefile
94 hg add somefile
89
95
90 echo % qnew with uncommitted changes
96 echo % qnew with uncommitted changes
91
97
92 hg qnew uncommitted.patch
98 hg qnew uncommitted.patch
93 hg st
99 hg st
94 hg qseries
100 hg qseries
95
101
96 echo '% qnew with uncommitted changes and missing file (issue 803)'
102 echo '% qnew with uncommitted changes and missing file (issue 803)'
97
103
98 hg qnew issue803.patch someotherfile 2>&1 | \
104 hg qnew issue803.patch someotherfile 2>&1 | \
99 sed -e 's/someotherfile:.*/someotherfile: No such file or directory/'
105 sed -e 's/someotherfile:.*/someotherfile: No such file or directory/'
100 hg st
106 hg st
101 hg qseries
107 hg qseries
102 hg qpop -f
108 hg qpop -f
103 hg qdel issue803.patch
109 hg qdel issue803.patch
104
110
105 hg revert --no-backup somefile
111 hg revert --no-backup somefile
106 rm somefile
112 rm somefile
107
113
108 echo % qnew -m
114 echo % qnew -m
109
115
110 hg qnew -m 'foo bar' test.patch
116 hg qnew -m 'foo bar' test.patch
111 cat .hg/patches/test.patch
117 cat .hg/patches/test.patch
112
118
113 echo % qrefresh
119 echo % qrefresh
114
120
115 echo a >> a
121 echo a >> a
116 hg qrefresh
122 hg qrefresh
117 sed -e "s/^\(diff -r \)\([a-f0-9]* \)/\1 x/" \
123 sed -e "s/^\(diff -r \)\([a-f0-9]* \)/\1 x/" \
118 -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
124 -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
119 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/test.patch
125 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/test.patch
120
126
121 echo % empty qrefresh
127 echo % empty qrefresh
122
128
123 hg qrefresh -X a
129 hg qrefresh -X a
124 echo 'revision:'
130 echo 'revision:'
125 hg diff -r -2 -r -1
131 hg diff -r -2 -r -1
126 echo 'patch:'
132 echo 'patch:'
127 cat .hg/patches/test.patch
133 cat .hg/patches/test.patch
128 echo 'working dir diff:'
134 echo 'working dir diff:'
129 hg diff --nodates -q
135 hg diff --nodates -q
130 # restore things
136 # restore things
131 hg qrefresh
137 hg qrefresh
132 checkundo qrefresh
138 checkundo qrefresh
133
139
134 echo % qpop
140 echo % qpop
135
141
136 hg qpop
142 hg qpop
137 checkundo qpop
143 checkundo qpop
138
144
139 echo % qpush
145 echo % qpush
140
146
141 hg qpush
147 hg qpush
142 checkundo qpush
148 checkundo qpush
143
149
144 cd ..
150 cd ..
145
151
146 echo % pop/push outside repo
152 echo % pop/push outside repo
147
153
148 hg -R a qpop
154 hg -R a qpop
149 hg -R a qpush
155 hg -R a qpush
150
156
151 cd a
157 cd a
152 hg qnew test2.patch
158 hg qnew test2.patch
153
159
154 echo % qrefresh in subdir
160 echo % qrefresh in subdir
155
161
156 cd b
162 cd b
157 echo a > a
163 echo a > a
158 hg add a
164 hg add a
159 hg qrefresh
165 hg qrefresh
160
166
161 echo % pop/push -a in subdir
167 echo % pop/push -a in subdir
162
168
163 hg qpop -a
169 hg qpop -a
164 hg --traceback qpush -a
170 hg --traceback qpush -a
165
171
166 echo % qseries
172 echo % qseries
167 hg qseries
173 hg qseries
168 hg qpop
174 hg qpop
169 hg qseries -vs
175 hg qseries -vs
170 hg qpush
176 hg qpush
171
177
172 echo % qapplied
178 echo % qapplied
173 hg qapplied
179 hg qapplied
174
180
175 echo % qtop
181 echo % qtop
176 hg qtop
182 hg qtop
177
183
178 echo % qprev
184 echo % qprev
179 hg qprev
185 hg qprev
180
186
181 echo % qnext
187 echo % qnext
182 hg qnext
188 hg qnext
183
189
184 echo % pop, qnext, qprev, qapplied
190 echo % pop, qnext, qprev, qapplied
185 hg qpop
191 hg qpop
186 hg qnext
192 hg qnext
187 hg qprev
193 hg qprev
188 hg qapplied
194 hg qapplied
189
195
190 echo % commit should fail
196 echo % commit should fail
191 hg commit
197 hg commit
192
198
193 echo % push should fail
199 echo % push should fail
194 hg push ../../k
200 hg push ../../k
195
201
196 echo % qunapplied
202 echo % qunapplied
197 hg qunapplied
203 hg qunapplied
198
204
199 echo % qpush/qpop with index
205 echo % qpush/qpop with index
200 hg qnew test1b.patch
206 hg qnew test1b.patch
201 echo 1b > 1b
207 echo 1b > 1b
202 hg add 1b
208 hg add 1b
203 hg qrefresh
209 hg qrefresh
204 hg qpush 2
210 hg qpush 2
205 hg qpop 0
211 hg qpop 0
206 hg qpush test.patch+1
212 hg qpush test.patch+1
207 hg qpush test.patch+2
213 hg qpush test.patch+2
208 hg qpop test2.patch-1
214 hg qpop test2.patch-1
209 hg qpop test2.patch-2
215 hg qpop test2.patch-2
210 hg qpush test1b.patch+1
216 hg qpush test1b.patch+1
211
217
212 echo % push should succeed
218 echo % push should succeed
213 hg qpop -a
219 hg qpop -a
214 hg push ../../k
220 hg push ../../k
215
221
216 echo % qpush/qpop error codes
222 echo % qpush/qpop error codes
217 errorcode()
223 errorcode()
218 {
224 {
219 hg "$@" && echo " $@ succeeds" || echo " $@ fails"
225 hg "$@" && echo " $@ succeeds" || echo " $@ fails"
220 }
226 }
221
227
222 # we want to start with some patches applied
228 # we want to start with some patches applied
223 hg qpush -a
229 hg qpush -a
224 echo " % pops all patches and succeeds"
230 echo " % pops all patches and succeeds"
225 errorcode qpop -a
231 errorcode qpop -a
226 echo " % does nothing and succeeds"
232 echo " % does nothing and succeeds"
227 errorcode qpop -a
233 errorcode qpop -a
228 echo " % fails - nothing else to pop"
234 echo " % fails - nothing else to pop"
229 errorcode qpop
235 errorcode qpop
230 echo " % pushes a patch and succeeds"
236 echo " % pushes a patch and succeeds"
231 errorcode qpush
237 errorcode qpush
232 echo " % pops a patch and succeeds"
238 echo " % pops a patch and succeeds"
233 errorcode qpop
239 errorcode qpop
234 echo " % pushes up to test1b.patch and succeeds"
240 echo " % pushes up to test1b.patch and succeeds"
235 errorcode qpush test1b.patch
241 errorcode qpush test1b.patch
236 echo " % does nothing and succeeds"
242 echo " % does nothing and succeeds"
237 errorcode qpush test1b.patch
243 errorcode qpush test1b.patch
238 echo " % does nothing and succeeds"
244 echo " % does nothing and succeeds"
239 errorcode qpop test1b.patch
245 errorcode qpop test1b.patch
240 echo " % fails - can't push to this patch"
246 echo " % fails - can't push to this patch"
241 errorcode qpush test.patch
247 errorcode qpush test.patch
242 echo " % fails - can't pop to this patch"
248 echo " % fails - can't pop to this patch"
243 errorcode qpop test2.patch
249 errorcode qpop test2.patch
244 echo " % pops up to test.patch and succeeds"
250 echo " % pops up to test.patch and succeeds"
245 errorcode qpop test.patch
251 errorcode qpop test.patch
246 echo " % pushes all patches and succeeds"
252 echo " % pushes all patches and succeeds"
247 errorcode qpush -a
253 errorcode qpush -a
248 echo " % does nothing and succeeds"
254 echo " % does nothing and succeeds"
249 errorcode qpush -a
255 errorcode qpush -a
250 echo " % fails - nothing else to push"
256 echo " % fails - nothing else to push"
251 errorcode qpush
257 errorcode qpush
252 echo " % does nothing and succeeds"
258 echo " % does nothing and succeeds"
253 errorcode qpush test2.patch
259 errorcode qpush test2.patch
254
260
255
261
256 echo % strip
262 echo % strip
257 cd ../../b
263 cd ../../b
258 echo x>x
264 echo x>x
259 hg ci -Ama
265 hg ci -Ama
260 hg strip tip 2>&1 | sed 's/\(saving bundle to \).*/\1/'
266 hg strip tip 2>&1 | sed 's/\(saving bundle to \).*/\1/'
261 hg unbundle .hg/strip-backup/*
267 hg unbundle .hg/strip-backup/*
262
268
263 echo '% cd b; hg qrefresh'
269 echo '% cd b; hg qrefresh'
264 hg init refresh
270 hg init refresh
265 cd refresh
271 cd refresh
266 echo a > a
272 echo a > a
267 hg ci -Ama -d'0 0'
273 hg ci -Ama -d'0 0'
268 hg qnew -mfoo foo
274 hg qnew -mfoo foo
269 echo a >> a
275 echo a >> a
270 hg qrefresh
276 hg qrefresh
271 mkdir b
277 mkdir b
272 cd b
278 cd b
273 echo f > f
279 echo f > f
274 hg add f
280 hg add f
275 hg qrefresh
281 hg qrefresh
276 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
282 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
277 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" ../.hg/patches/foo
283 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" ../.hg/patches/foo
278 echo % hg qrefresh .
284 echo % hg qrefresh .
279 hg qrefresh .
285 hg qrefresh .
280 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
286 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
281 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" ../.hg/patches/foo
287 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" ../.hg/patches/foo
282 hg status
288 hg status
283
289
284 echo % qpush failure
290 echo % qpush failure
285 cd ..
291 cd ..
286 hg qrefresh
292 hg qrefresh
287 hg qnew -mbar bar
293 hg qnew -mbar bar
288 echo foo > foo
294 echo foo > foo
289 echo bar > bar
295 echo bar > bar
290 hg add foo bar
296 hg add foo bar
291 hg qrefresh
297 hg qrefresh
292 hg qpop -a
298 hg qpop -a
293 echo bar > foo
299 echo bar > foo
294 hg qpush -a
300 hg qpush -a
295 hg st
301 hg st
296
302
297 echo % mq tags
303 echo % mq tags
298 hg log --template '{rev} {tags}\n' -r qparent:qtip
304 hg log --template '{rev} {tags}\n' -r qparent:qtip
299
305
306 echo % bad node in status
307 hg qpop
308 hg strip -qn tip
309 hg tip 2>&1 | sed -e 's/unknown node .*/unknown node/'
310 hg branches 2>&1 | sed -e 's/unknown node .*/unknown node/'
311 hg qpop
312
300 cat >>$HGRCPATH <<EOF
313 cat >>$HGRCPATH <<EOF
301 [diff]
314 [diff]
302 git = True
315 git = True
303 EOF
316 EOF
304 cd ..
317 cd ..
305 hg init git
318 hg init git
306 cd git
319 cd git
307 hg qinit
320 hg qinit
308
321
309 hg qnew -m'new file' new
322 hg qnew -m'new file' new
310 echo foo > new
323 echo foo > new
311 chmod +x new
324 chmod +x new
312 hg add new
325 hg add new
313 hg qrefresh
326 hg qrefresh
314 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
327 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
315 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/new
328 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/new
316
329
317 hg qnew -m'copy file' copy
330 hg qnew -m'copy file' copy
318 hg cp new copy
331 hg cp new copy
319 hg qrefresh
332 hg qrefresh
320 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
333 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
321 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/copy
334 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/copy
322
335
323 hg qpop
336 hg qpop
324 hg qpush
337 hg qpush
325 hg qdiff
338 hg qdiff
326 cat >>$HGRCPATH <<EOF
339 cat >>$HGRCPATH <<EOF
327 [diff]
340 [diff]
328 git = False
341 git = False
329 EOF
342 EOF
330 hg qdiff --git
343 hg qdiff --git
331
344
332 cd ..
345 cd ..
333 hg init slow
346 hg init slow
334 cd slow
347 cd slow
335 hg qinit
348 hg qinit
336 echo foo > foo
349 echo foo > foo
337 hg add foo
350 hg add foo
338 hg ci -m 'add foo'
351 hg ci -m 'add foo'
339 hg qnew bar
352 hg qnew bar
340 echo bar > bar
353 echo bar > bar
341 hg add bar
354 hg add bar
342 hg mv foo baz
355 hg mv foo baz
343 hg qrefresh --git
356 hg qrefresh --git
344 hg up -C 0
357 hg up -C 0
345 echo >> foo
358 echo >> foo
346 hg ci -m 'change foo'
359 hg ci -m 'change foo'
347 hg up -C 1
360 hg up -C 1
348 hg qrefresh --git 2>&1 | grep -v 'saving bundle'
361 hg qrefresh --git 2>&1 | grep -v 'saving bundle'
349 cat .hg/patches/bar
362 cat .hg/patches/bar
350 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
363 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
351 hg qrefresh --git
364 hg qrefresh --git
352 cat .hg/patches/bar
365 cat .hg/patches/bar
353 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
366 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
354 hg qrefresh
367 hg qrefresh
355 grep 'diff --git' .hg/patches/bar
368 grep 'diff --git' .hg/patches/bar
356
369
357 echo
370 echo
358 hg up -C 1
371 hg up -C 1
359 echo >> foo
372 echo >> foo
360 hg ci -m 'change foo again'
373 hg ci -m 'change foo again'
361 hg up -C 2
374 hg up -C 2
362 hg mv bar quux
375 hg mv bar quux
363 hg mv baz bleh
376 hg mv baz bleh
364 hg qrefresh --git 2>&1 | grep -v 'saving bundle'
377 hg qrefresh --git 2>&1 | grep -v 'saving bundle'
365 cat .hg/patches/bar
378 cat .hg/patches/bar
366 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
379 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
367 hg mv quux fred
380 hg mv quux fred
368 hg mv bleh barney
381 hg mv bleh barney
369 hg qrefresh --git
382 hg qrefresh --git
370 cat .hg/patches/bar
383 cat .hg/patches/bar
371 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
384 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
372
385
373 echo % refresh omitting an added file
386 echo % refresh omitting an added file
374 hg qnew baz
387 hg qnew baz
375 echo newfile > newfile
388 echo newfile > newfile
376 hg add newfile
389 hg add newfile
377 hg qrefresh
390 hg qrefresh
378 hg st -A newfile
391 hg st -A newfile
379 hg qrefresh -X newfile
392 hg qrefresh -X newfile
380 hg st -A newfile
393 hg st -A newfile
381 hg revert newfile
394 hg revert newfile
382 rm newfile
395 rm newfile
383 hg qpop
396 hg qpop
384 hg qdel baz
397 hg qdel baz
385
398
386 echo % create a git patch
399 echo % create a git patch
387 echo a > alexander
400 echo a > alexander
388 hg add alexander
401 hg add alexander
389 hg qnew -f --git addalexander
402 hg qnew -f --git addalexander
390 grep diff .hg/patches/addalexander
403 grep diff .hg/patches/addalexander
391
404
392 echo % create a git binary patch
405 echo % create a git binary patch
393 cat > writebin.py <<EOF
406 cat > writebin.py <<EOF
394 import sys
407 import sys
395 path = sys.argv[1]
408 path = sys.argv[1]
396 open(path, 'wb').write('BIN\x00ARY')
409 open(path, 'wb').write('BIN\x00ARY')
397 EOF
410 EOF
398 python writebin.py bucephalus
411 python writebin.py bucephalus
399
412
400 python "$TESTDIR/md5sum.py" bucephalus
413 python "$TESTDIR/md5sum.py" bucephalus
401 hg add bucephalus
414 hg add bucephalus
402 hg qnew -f --git addbucephalus
415 hg qnew -f --git addbucephalus
403 grep diff .hg/patches/addbucephalus
416 grep diff .hg/patches/addbucephalus
404
417
405 echo % check binary patches can be popped and pushed
418 echo % check binary patches can be popped and pushed
406 hg qpop
419 hg qpop
407 test -f bucephalus && echo % bucephalus should not be there
420 test -f bucephalus && echo % bucephalus should not be there
408 hg qpush
421 hg qpush
409 test -f bucephalus || echo % bucephalus should be there
422 test -f bucephalus || echo % bucephalus should be there
410 python "$TESTDIR/md5sum.py" bucephalus
423 python "$TESTDIR/md5sum.py" bucephalus
411
424
412
425
413 echo '% strip again'
426 echo '% strip again'
414 cd ..
427 cd ..
415 hg init strip
428 hg init strip
416 cd strip
429 cd strip
417 touch foo
430 touch foo
418 hg add foo
431 hg add foo
419 hg ci -m 'add foo' -d '0 0'
432 hg ci -m 'add foo' -d '0 0'
420 echo >> foo
433 echo >> foo
421 hg ci -m 'change foo 1' -d '0 0'
434 hg ci -m 'change foo 1' -d '0 0'
422 hg up -C 0
435 hg up -C 0
423 echo 1 >> foo
436 echo 1 >> foo
424 hg ci -m 'change foo 2' -d '0 0'
437 hg ci -m 'change foo 2' -d '0 0'
425 HGMERGE=true hg merge
438 HGMERGE=true hg merge
426 hg ci -m merge -d '0 0'
439 hg ci -m merge -d '0 0'
427 hg log
440 hg log
428 hg strip 1 2>&1 | sed 's/\(saving bundle to \).*/\1/'
441 hg strip 1 2>&1 | sed 's/\(saving bundle to \).*/\1/'
429 checkundo strip
442 checkundo strip
430 hg log
443 hg log
431 cd ..
444 cd ..
432
445
433 echo '% qclone'
446 echo '% qclone'
434 qlog()
447 qlog()
435 {
448 {
436 echo 'main repo:'
449 echo 'main repo:'
437 hg log --template ' rev {rev}: {desc}\n'
450 hg log --template ' rev {rev}: {desc}\n'
438 echo 'patch repo:'
451 echo 'patch repo:'
439 hg -R .hg/patches log --template ' rev {rev}: {desc}\n'
452 hg -R .hg/patches log --template ' rev {rev}: {desc}\n'
440 }
453 }
441 hg init qclonesource
454 hg init qclonesource
442 cd qclonesource
455 cd qclonesource
443 echo foo > foo
456 echo foo > foo
444 hg add foo
457 hg add foo
445 hg ci -m 'add foo'
458 hg ci -m 'add foo'
446 hg qinit
459 hg qinit
447 hg qnew patch1
460 hg qnew patch1
448 echo bar >> foo
461 echo bar >> foo
449 hg qrefresh -m 'change foo'
462 hg qrefresh -m 'change foo'
450 cd ..
463 cd ..
451
464
452 # repo with unversioned patch dir
465 # repo with unversioned patch dir
453 hg qclone qclonesource failure
466 hg qclone qclonesource failure
454
467
455 cd qclonesource
468 cd qclonesource
456 hg qinit -c
469 hg qinit -c
457 hg qci -m checkpoint
470 hg qci -m checkpoint
458 qlog
471 qlog
459 cd ..
472 cd ..
460
473
461 # repo with patches applied
474 # repo with patches applied
462 hg qclone qclonesource qclonedest
475 hg qclone qclonesource qclonedest
463 cd qclonedest
476 cd qclonedest
464 qlog
477 qlog
465 cd ..
478 cd ..
466
479
467 # repo with patches unapplied
480 # repo with patches unapplied
468 cd qclonesource
481 cd qclonesource
469 hg qpop -a
482 hg qpop -a
470 qlog
483 qlog
471 cd ..
484 cd ..
472 hg qclone qclonesource qclonedest2
485 hg qclone qclonesource qclonedest2
473 cd qclonedest2
486 cd qclonedest2
474 qlog
487 qlog
475 cd ..
488 cd ..
476
489
@@ -1,455 +1,472 b''
1 % help
1 % help
2 mq extension - patch management and development
2 mq extension - patch management and development
3
3
4 This extension lets you work with a stack of patches in a Mercurial
4 This extension lets you work with a stack of patches in a Mercurial
5 repository. It manages two stacks of patches - all known patches, and
5 repository. It manages two stacks of patches - all known patches, and
6 applied patches (subset of known patches).
6 applied patches (subset of known patches).
7
7
8 Known patches are represented as patch files in the .hg/patches
8 Known patches are represented as patch files in the .hg/patches
9 directory. Applied patches are both patch files and changesets.
9 directory. Applied patches are both patch files and changesets.
10
10
11 Common tasks (use "hg help command" for more details):
11 Common tasks (use "hg help command" for more details):
12
12
13 prepare repository to work with patches qinit
13 prepare repository to work with patches qinit
14 create new patch qnew
14 create new patch qnew
15 import existing patch qimport
15 import existing patch qimport
16
16
17 print patch series qseries
17 print patch series qseries
18 print applied patches qapplied
18 print applied patches qapplied
19 print name of top applied patch qtop
19 print name of top applied patch qtop
20
20
21 add known patch to applied stack qpush
21 add known patch to applied stack qpush
22 remove patch from applied stack qpop
22 remove patch from applied stack qpop
23 refresh contents of top applied patch qrefresh
23 refresh contents of top applied patch qrefresh
24
24
25 list of commands:
25 list of commands:
26
26
27 qapplied print the patches already applied
27 qapplied print the patches already applied
28 qclone clone main and patch repository at same time
28 qclone clone main and patch repository at same time
29 qcommit commit changes in the queue repository
29 qcommit commit changes in the queue repository
30 qdelete remove patches from queue
30 qdelete remove patches from queue
31 qdiff diff of the current patch
31 qdiff diff of the current patch
32 qfold fold the named patches into the current patch
32 qfold fold the named patches into the current patch
33 qgoto push or pop patches until named patch is at top of stack
33 qgoto push or pop patches until named patch is at top of stack
34 qguard set or print guards for a patch
34 qguard set or print guards for a patch
35 qheader Print the header of the topmost or specified patch
35 qheader Print the header of the topmost or specified patch
36 qimport import a patch
36 qimport import a patch
37 qinit init a new queue repository
37 qinit init a new queue repository
38 qnew create a new patch
38 qnew create a new patch
39 qnext print the name of the next patch
39 qnext print the name of the next patch
40 qpop pop the current patch off the stack
40 qpop pop the current patch off the stack
41 qprev print the name of the previous patch
41 qprev print the name of the previous patch
42 qpush push the next patch onto the stack
42 qpush push the next patch onto the stack
43 qrefresh update the current patch
43 qrefresh update the current patch
44 qrename rename a patch
44 qrename rename a patch
45 qrestore restore the queue state saved by a rev
45 qrestore restore the queue state saved by a rev
46 qsave save current queue state
46 qsave save current queue state
47 qselect set or print guarded patches to push
47 qselect set or print guarded patches to push
48 qseries print the entire series file
48 qseries print the entire series file
49 qtop print the name of the current patch
49 qtop print the name of the current patch
50 qunapplied print the patches not yet applied
50 qunapplied print the patches not yet applied
51 strip strip a revision and all later revs on the same branch
51 strip strip a revision and all later revs on the same branch
52
52
53 use "hg -v help mq" to show aliases and global options
53 use "hg -v help mq" to show aliases and global options
54 adding a
54 adding a
55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 adding b/z
56 adding b/z
57 % qinit
57 % qinit
58 % -R qinit
58 % -R qinit
59 % qinit -c
59 % qinit -c
60 A .hgignore
60 A .hgignore
61 A series
61 A series
62 % qnew should refuse bad patch names
63 abort: "series" cannot be used as the name of a patch
64 abort: "status" cannot be used as the name of a patch
65 abort: "guards" cannot be used as the name of a patch
66 abort: ".hgignore" cannot be used as the name of a patch
62 % qnew implies add
67 % qnew implies add
63 A .hgignore
68 A .hgignore
64 A series
69 A series
65 A test.patch
70 A test.patch
66 % qinit; qinit -c
71 % qinit; qinit -c
67 .hgignore:
72 .hgignore:
68 syntax: glob
73 syntax: glob
69 status
74 status
70 guards
75 guards
71 series:
76 series:
72 abort: repository already exists!
77 abort: repository already exists!
73 % qinit; <stuff>; qinit -c
78 % qinit; <stuff>; qinit -c
74 adding .hg/patches/A
79 adding .hg/patches/A
75 adding .hg/patches/B
80 adding .hg/patches/B
76 A .hgignore
81 A .hgignore
77 A A
82 A A
78 A B
83 A B
79 A series
84 A series
80 .hgignore:
85 .hgignore:
81 status
86 status
82 bleh
87 bleh
83 series:
88 series:
84 A
89 A
85 B
90 B
86 % qnew with uncommitted changes
91 % qnew with uncommitted changes
87 abort: local changes found, refresh first
92 abort: local changes found, refresh first
88 A somefile
93 A somefile
89 % qnew with uncommitted changes and missing file (issue 803)
94 % qnew with uncommitted changes and missing file (issue 803)
90 someotherfile: No such file or directory
95 someotherfile: No such file or directory
91 A somefile
96 A somefile
92 issue803.patch
97 issue803.patch
93 Patch queue now empty
98 Patch queue now empty
94 % qnew -m
99 % qnew -m
95 foo bar
100 foo bar
96 % qrefresh
101 % qrefresh
97 foo bar
102 foo bar
98
103
99 diff -r xa
104 diff -r xa
100 --- a/a
105 --- a/a
101 +++ b/a
106 +++ b/a
102 @@ -1,1 +1,2 @@
107 @@ -1,1 +1,2 @@
103 a
108 a
104 +a
109 +a
105 % empty qrefresh
110 % empty qrefresh
106 revision:
111 revision:
107 patch:
112 patch:
108 foo bar
113 foo bar
109
114
110 working dir diff:
115 working dir diff:
111 --- a/a
116 --- a/a
112 +++ b/a
117 +++ b/a
113 @@ -1,1 +1,2 @@
118 @@ -1,1 +1,2 @@
114 a
119 a
115 +a
120 +a
116 % qpop
121 % qpop
117 Patch queue now empty
122 Patch queue now empty
118 % qpush
123 % qpush
119 applying test.patch
124 applying test.patch
120 Now at: test.patch
125 Now at: test.patch
121 % pop/push outside repo
126 % pop/push outside repo
122 Patch queue now empty
127 Patch queue now empty
123 applying test.patch
128 applying test.patch
124 Now at: test.patch
129 Now at: test.patch
125 % qrefresh in subdir
130 % qrefresh in subdir
126 % pop/push -a in subdir
131 % pop/push -a in subdir
127 Patch queue now empty
132 Patch queue now empty
128 applying test.patch
133 applying test.patch
129 applying test2.patch
134 applying test2.patch
130 Now at: test2.patch
135 Now at: test2.patch
131 % qseries
136 % qseries
132 test.patch
137 test.patch
133 test2.patch
138 test2.patch
134 Now at: test.patch
139 Now at: test.patch
135 0 A test.patch: foo bar
140 0 A test.patch: foo bar
136 1 U test2.patch:
141 1 U test2.patch:
137 applying test2.patch
142 applying test2.patch
138 Now at: test2.patch
143 Now at: test2.patch
139 % qapplied
144 % qapplied
140 test.patch
145 test.patch
141 test2.patch
146 test2.patch
142 % qtop
147 % qtop
143 test2.patch
148 test2.patch
144 % qprev
149 % qprev
145 test.patch
150 test.patch
146 % qnext
151 % qnext
147 All patches applied
152 All patches applied
148 % pop, qnext, qprev, qapplied
153 % pop, qnext, qprev, qapplied
149 Now at: test.patch
154 Now at: test.patch
150 test2.patch
155 test2.patch
151 Only one patch applied
156 Only one patch applied
152 test.patch
157 test.patch
153 % commit should fail
158 % commit should fail
154 abort: cannot commit over an applied mq patch
159 abort: cannot commit over an applied mq patch
155 % push should fail
160 % push should fail
156 pushing to ../../k
161 pushing to ../../k
157 abort: source has mq patches applied
162 abort: source has mq patches applied
158 % qunapplied
163 % qunapplied
159 test2.patch
164 test2.patch
160 % qpush/qpop with index
165 % qpush/qpop with index
161 applying test2.patch
166 applying test2.patch
162 Now at: test2.patch
167 Now at: test2.patch
163 Now at: test.patch
168 Now at: test.patch
164 applying test1b.patch
169 applying test1b.patch
165 Now at: test1b.patch
170 Now at: test1b.patch
166 applying test2.patch
171 applying test2.patch
167 Now at: test2.patch
172 Now at: test2.patch
168 Now at: test1b.patch
173 Now at: test1b.patch
169 Now at: test.patch
174 Now at: test.patch
170 applying test1b.patch
175 applying test1b.patch
171 applying test2.patch
176 applying test2.patch
172 Now at: test2.patch
177 Now at: test2.patch
173 % push should succeed
178 % push should succeed
174 Patch queue now empty
179 Patch queue now empty
175 pushing to ../../k
180 pushing to ../../k
176 searching for changes
181 searching for changes
177 adding changesets
182 adding changesets
178 adding manifests
183 adding manifests
179 adding file changes
184 adding file changes
180 added 1 changesets with 1 changes to 1 files
185 added 1 changesets with 1 changes to 1 files
181 % qpush/qpop error codes
186 % qpush/qpop error codes
182 applying test.patch
187 applying test.patch
183 applying test1b.patch
188 applying test1b.patch
184 applying test2.patch
189 applying test2.patch
185 Now at: test2.patch
190 Now at: test2.patch
186 % pops all patches and succeeds
191 % pops all patches and succeeds
187 Patch queue now empty
192 Patch queue now empty
188 qpop -a succeeds
193 qpop -a succeeds
189 % does nothing and succeeds
194 % does nothing and succeeds
190 no patches applied
195 no patches applied
191 qpop -a succeeds
196 qpop -a succeeds
192 % fails - nothing else to pop
197 % fails - nothing else to pop
193 no patches applied
198 no patches applied
194 qpop fails
199 qpop fails
195 % pushes a patch and succeeds
200 % pushes a patch and succeeds
196 applying test.patch
201 applying test.patch
197 Now at: test.patch
202 Now at: test.patch
198 qpush succeeds
203 qpush succeeds
199 % pops a patch and succeeds
204 % pops a patch and succeeds
200 Patch queue now empty
205 Patch queue now empty
201 qpop succeeds
206 qpop succeeds
202 % pushes up to test1b.patch and succeeds
207 % pushes up to test1b.patch and succeeds
203 applying test.patch
208 applying test.patch
204 applying test1b.patch
209 applying test1b.patch
205 Now at: test1b.patch
210 Now at: test1b.patch
206 qpush test1b.patch succeeds
211 qpush test1b.patch succeeds
207 % does nothing and succeeds
212 % does nothing and succeeds
208 qpush: test1b.patch is already at the top
213 qpush: test1b.patch is already at the top
209 qpush test1b.patch succeeds
214 qpush test1b.patch succeeds
210 % does nothing and succeeds
215 % does nothing and succeeds
211 qpop: test1b.patch is already at the top
216 qpop: test1b.patch is already at the top
212 qpop test1b.patch succeeds
217 qpop test1b.patch succeeds
213 % fails - can't push to this patch
218 % fails - can't push to this patch
214 abort: cannot push to a previous patch: test.patch
219 abort: cannot push to a previous patch: test.patch
215 qpush test.patch fails
220 qpush test.patch fails
216 % fails - can't pop to this patch
221 % fails - can't pop to this patch
217 abort: patch test2.patch is not applied
222 abort: patch test2.patch is not applied
218 qpop test2.patch fails
223 qpop test2.patch fails
219 % pops up to test.patch and succeeds
224 % pops up to test.patch and succeeds
220 Now at: test.patch
225 Now at: test.patch
221 qpop test.patch succeeds
226 qpop test.patch succeeds
222 % pushes all patches and succeeds
227 % pushes all patches and succeeds
223 applying test1b.patch
228 applying test1b.patch
224 applying test2.patch
229 applying test2.patch
225 Now at: test2.patch
230 Now at: test2.patch
226 qpush -a succeeds
231 qpush -a succeeds
227 % does nothing and succeeds
232 % does nothing and succeeds
228 all patches are currently applied
233 all patches are currently applied
229 qpush -a succeeds
234 qpush -a succeeds
230 % fails - nothing else to push
235 % fails - nothing else to push
231 patch series already fully applied
236 patch series already fully applied
232 qpush fails
237 qpush fails
233 % does nothing and succeeds
238 % does nothing and succeeds
234 all patches are currently applied
239 all patches are currently applied
235 qpush test2.patch succeeds
240 qpush test2.patch succeeds
236 % strip
241 % strip
237 adding x
242 adding x
238 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
243 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
239 saving bundle to
244 saving bundle to
240 adding changesets
245 adding changesets
241 adding manifests
246 adding manifests
242 adding file changes
247 adding file changes
243 added 1 changesets with 1 changes to 1 files
248 added 1 changesets with 1 changes to 1 files
244 (run 'hg update' to get a working copy)
249 (run 'hg update' to get a working copy)
245 % cd b; hg qrefresh
250 % cd b; hg qrefresh
246 adding a
251 adding a
247 foo
252 foo
248
253
249 diff -r cb9a9f314b8b a
254 diff -r cb9a9f314b8b a
250 --- a/a
255 --- a/a
251 +++ b/a
256 +++ b/a
252 @@ -1,1 +1,2 @@
257 @@ -1,1 +1,2 @@
253 a
258 a
254 +a
259 +a
255 diff -r cb9a9f314b8b b/f
260 diff -r cb9a9f314b8b b/f
256 --- /dev/null
261 --- /dev/null
257 +++ b/b/f
262 +++ b/b/f
258 @@ -0,0 +1,1 @@
263 @@ -0,0 +1,1 @@
259 +f
264 +f
260 % hg qrefresh .
265 % hg qrefresh .
261 foo
266 foo
262
267
263 diff -r cb9a9f314b8b b/f
268 diff -r cb9a9f314b8b b/f
264 --- /dev/null
269 --- /dev/null
265 +++ b/b/f
270 +++ b/b/f
266 @@ -0,0 +1,1 @@
271 @@ -0,0 +1,1 @@
267 +f
272 +f
268 M a
273 M a
269 % qpush failure
274 % qpush failure
270 Patch queue now empty
275 Patch queue now empty
271 applying foo
276 applying foo
272 applying bar
277 applying bar
273 file foo already exists
278 file foo already exists
274 1 out of 1 hunk FAILED -- saving rejects to file foo.rej
279 1 out of 1 hunk FAILED -- saving rejects to file foo.rej
275 patch failed, unable to continue (try -v)
280 patch failed, unable to continue (try -v)
276 patch failed, rejects left in working dir
281 patch failed, rejects left in working dir
277 Errors during apply, please fix and refresh bar
282 Errors during apply, please fix and refresh bar
278 ? foo
283 ? foo
279 ? foo.rej
284 ? foo.rej
280 % mq tags
285 % mq tags
281 0 qparent
286 0 qparent
282 1 qbase foo
287 1 qbase foo
283 2 qtip bar tip
288 2 qtip bar tip
289 % bad node in status
290 Now at: foo
291 changeset: 0:cb9a9f314b8b
292 mq status file refers to unknown node
293 tag: tip
294 user: test
295 date: Thu Jan 01 00:00:00 1970 +0000
296 summary: a
297
298 mq status file refers to unknown node
299 default 0:cb9a9f314b8b
300 abort: working directory revision is not qtip
284 new file
301 new file
285
302
286 diff --git a/new b/new
303 diff --git a/new b/new
287 new file mode 100755
304 new file mode 100755
288 --- /dev/null
305 --- /dev/null
289 +++ b/new
306 +++ b/new
290 @@ -0,0 +1,1 @@
307 @@ -0,0 +1,1 @@
291 +foo
308 +foo
292 copy file
309 copy file
293
310
294 diff --git a/new b/copy
311 diff --git a/new b/copy
295 copy from new
312 copy from new
296 copy to copy
313 copy to copy
297 Now at: new
314 Now at: new
298 applying copy
315 applying copy
299 Now at: copy
316 Now at: copy
300 diff --git a/new b/copy
317 diff --git a/new b/copy
301 copy from new
318 copy from new
302 copy to copy
319 copy to copy
303 diff --git a/new b/copy
320 diff --git a/new b/copy
304 copy from new
321 copy from new
305 copy to copy
322 copy to copy
306 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
323 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
307 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
324 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
308 adding branch
325 adding branch
309 adding changesets
326 adding changesets
310 adding manifests
327 adding manifests
311 adding file changes
328 adding file changes
312 added 1 changesets with 1 changes to 1 files
329 added 1 changesets with 1 changes to 1 files
313 Patch queue now empty
330 Patch queue now empty
314 applying bar
331 applying bar
315 Now at: bar
332 Now at: bar
316 diff --git a/bar b/bar
333 diff --git a/bar b/bar
317 new file mode 100644
334 new file mode 100644
318 --- /dev/null
335 --- /dev/null
319 +++ b/bar
336 +++ b/bar
320 @@ -0,0 +1,1 @@
337 @@ -0,0 +1,1 @@
321 +bar
338 +bar
322 diff --git a/foo b/baz
339 diff --git a/foo b/baz
323 rename from foo
340 rename from foo
324 rename to baz
341 rename to baz
325 2 baz (foo)
342 2 baz (foo)
326 diff --git a/bar b/bar
343 diff --git a/bar b/bar
327 new file mode 100644
344 new file mode 100644
328 --- /dev/null
345 --- /dev/null
329 +++ b/bar
346 +++ b/bar
330 @@ -0,0 +1,1 @@
347 @@ -0,0 +1,1 @@
331 +bar
348 +bar
332 diff --git a/foo b/baz
349 diff --git a/foo b/baz
333 rename from foo
350 rename from foo
334 rename to baz
351 rename to baz
335 2 baz (foo)
352 2 baz (foo)
336 diff --git a/bar b/bar
353 diff --git a/bar b/bar
337 diff --git a/foo b/baz
354 diff --git a/foo b/baz
338
355
339 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
356 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
340 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
357 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
341 adding branch
358 adding branch
342 adding changesets
359 adding changesets
343 adding manifests
360 adding manifests
344 adding file changes
361 adding file changes
345 added 1 changesets with 1 changes to 1 files
362 added 1 changesets with 1 changes to 1 files
346 Patch queue now empty
363 Patch queue now empty
347 applying bar
364 applying bar
348 Now at: bar
365 Now at: bar
349 diff --git a/foo b/bleh
366 diff --git a/foo b/bleh
350 rename from foo
367 rename from foo
351 rename to bleh
368 rename to bleh
352 diff --git a/quux b/quux
369 diff --git a/quux b/quux
353 new file mode 100644
370 new file mode 100644
354 --- /dev/null
371 --- /dev/null
355 +++ b/quux
372 +++ b/quux
356 @@ -0,0 +1,1 @@
373 @@ -0,0 +1,1 @@
357 +bar
374 +bar
358 3 bleh (foo)
375 3 bleh (foo)
359 diff --git a/foo b/barney
376 diff --git a/foo b/barney
360 rename from foo
377 rename from foo
361 rename to barney
378 rename to barney
362 diff --git a/fred b/fred
379 diff --git a/fred b/fred
363 new file mode 100644
380 new file mode 100644
364 --- /dev/null
381 --- /dev/null
365 +++ b/fred
382 +++ b/fred
366 @@ -0,0 +1,1 @@
383 @@ -0,0 +1,1 @@
367 +bar
384 +bar
368 3 barney (foo)
385 3 barney (foo)
369 % refresh omitting an added file
386 % refresh omitting an added file
370 C newfile
387 C newfile
371 A newfile
388 A newfile
372 Now at: bar
389 Now at: bar
373 % create a git patch
390 % create a git patch
374 diff --git a/alexander b/alexander
391 diff --git a/alexander b/alexander
375 % create a git binary patch
392 % create a git binary patch
376 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
393 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
377 diff --git a/bucephalus b/bucephalus
394 diff --git a/bucephalus b/bucephalus
378 % check binary patches can be popped and pushed
395 % check binary patches can be popped and pushed
379 Now at: addalexander
396 Now at: addalexander
380 applying addbucephalus
397 applying addbucephalus
381 Now at: addbucephalus
398 Now at: addbucephalus
382 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
399 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
383 % strip again
400 % strip again
384 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
401 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
385 merging foo
402 merging foo
386 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
403 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
387 (branch merge, don't forget to commit)
404 (branch merge, don't forget to commit)
388 changeset: 3:99615015637b
405 changeset: 3:99615015637b
389 tag: tip
406 tag: tip
390 parent: 2:20cbbe65cff7
407 parent: 2:20cbbe65cff7
391 parent: 1:d2871fc282d4
408 parent: 1:d2871fc282d4
392 user: test
409 user: test
393 date: Thu Jan 01 00:00:00 1970 +0000
410 date: Thu Jan 01 00:00:00 1970 +0000
394 summary: merge
411 summary: merge
395
412
396 changeset: 2:20cbbe65cff7
413 changeset: 2:20cbbe65cff7
397 parent: 0:53245c60e682
414 parent: 0:53245c60e682
398 user: test
415 user: test
399 date: Thu Jan 01 00:00:00 1970 +0000
416 date: Thu Jan 01 00:00:00 1970 +0000
400 summary: change foo 2
417 summary: change foo 2
401
418
402 changeset: 1:d2871fc282d4
419 changeset: 1:d2871fc282d4
403 user: test
420 user: test
404 date: Thu Jan 01 00:00:00 1970 +0000
421 date: Thu Jan 01 00:00:00 1970 +0000
405 summary: change foo 1
422 summary: change foo 1
406
423
407 changeset: 0:53245c60e682
424 changeset: 0:53245c60e682
408 user: test
425 user: test
409 date: Thu Jan 01 00:00:00 1970 +0000
426 date: Thu Jan 01 00:00:00 1970 +0000
410 summary: add foo
427 summary: add foo
411
428
412 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
429 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
413 saving bundle to
430 saving bundle to
414 saving bundle to
431 saving bundle to
415 adding branch
432 adding branch
416 adding changesets
433 adding changesets
417 adding manifests
434 adding manifests
418 adding file changes
435 adding file changes
419 added 1 changesets with 1 changes to 1 files
436 added 1 changesets with 1 changes to 1 files
420 changeset: 1:20cbbe65cff7
437 changeset: 1:20cbbe65cff7
421 tag: tip
438 tag: tip
422 user: test
439 user: test
423 date: Thu Jan 01 00:00:00 1970 +0000
440 date: Thu Jan 01 00:00:00 1970 +0000
424 summary: change foo 2
441 summary: change foo 2
425
442
426 changeset: 0:53245c60e682
443 changeset: 0:53245c60e682
427 user: test
444 user: test
428 date: Thu Jan 01 00:00:00 1970 +0000
445 date: Thu Jan 01 00:00:00 1970 +0000
429 summary: add foo
446 summary: add foo
430
447
431 % qclone
448 % qclone
432 abort: versioned patch repository not found (see qinit -c)
449 abort: versioned patch repository not found (see qinit -c)
433 adding .hg/patches/patch1
450 adding .hg/patches/patch1
434 main repo:
451 main repo:
435 rev 1: change foo
452 rev 1: change foo
436 rev 0: add foo
453 rev 0: add foo
437 patch repo:
454 patch repo:
438 rev 0: checkpoint
455 rev 0: checkpoint
439 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
456 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
440 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
457 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
441 main repo:
458 main repo:
442 rev 0: add foo
459 rev 0: add foo
443 patch repo:
460 patch repo:
444 rev 0: checkpoint
461 rev 0: checkpoint
445 Patch queue now empty
462 Patch queue now empty
446 main repo:
463 main repo:
447 rev 0: add foo
464 rev 0: add foo
448 patch repo:
465 patch repo:
449 rev 0: checkpoint
466 rev 0: checkpoint
450 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
467 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
451 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
468 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
452 main repo:
469 main repo:
453 rev 0: add foo
470 rev 0: add foo
454 patch repo:
471 patch repo:
455 rev 0: checkpoint
472 rev 0: checkpoint
@@ -1,65 +1,74 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init t
3 hg init t
4 cd t
4 cd t
5 hg branches
5 hg branches
6
6
7 echo foo > a
7 echo foo > a
8 hg add a
8 hg add a
9 hg ci -m "initial" -d "1000000 0"
9 hg ci -m "initial" -d "1000000 0"
10 hg branch foo
10 hg branch foo
11 hg branch
11 hg branch
12 hg ci -m "add branch name" -d "1000000 0"
12 hg ci -m "add branch name" -d "1000000 0"
13 hg branch bar
13 hg branch bar
14 hg ci -m "change branch name" -d "1000000 0"
14 hg ci -m "change branch name" -d "1000000 0"
15 echo % branch shadowing
15 echo % branch shadowing
16 hg branch default
16 hg branch default
17 hg branch -f default
17 hg branch -f default
18 hg ci -m "clear branch name" -d "1000000 0"
18 hg ci -m "clear branch name" -d "1000000 0"
19
19
20 hg co foo
20 hg co foo
21 hg branch
21 hg branch
22 echo bleah > a
22 echo bleah > a
23 hg ci -m "modify a branch" -d "1000000 0"
23 hg ci -m "modify a branch" -d "1000000 0"
24
24
25 hg merge
25 hg merge
26 hg branch
26 hg branch
27 hg ci -m "merge" -d "1000000 0"
27 hg ci -m "merge" -d "1000000 0"
28 hg log
28 hg log
29
29
30 hg branches
30 hg branches
31 hg branches -q
31 hg branches -q
32
32
33 echo % test for invalid branch cache
33 echo % test for invalid branch cache
34 hg rollback
34 hg rollback
35 cp .hg/branch.cache .hg/bc-invalid
35 cp .hg/branch.cache .hg/bc-invalid
36 hg log -r foo
36 hg log -r foo
37 cp .hg/bc-invalid .hg/branch.cache
37 cp .hg/bc-invalid .hg/branch.cache
38 hg --debug log -r foo
38 hg --debug log -r foo
39 rm .hg/branch.cache
39 rm .hg/branch.cache
40 echo corrupted > .hg/branch.cache
40 echo corrupted > .hg/branch.cache
41 hg log -qr foo
41 hg log -qr foo
42 cat .hg/branch.cache
42 cat .hg/branch.cache
43
43
44 echo % push should update the branch cache
45 hg init ../target
46 echo % pushing just rev 0
47 hg push -qr 0 ../target
48 cat ../target/.hg/branch.cache
49 echo % pushing everything
50 hg push -qf ../target
51 cat ../target/.hg/branch.cache
52
44 echo % update with no arguments: tipmost revision of the current branch
53 echo % update with no arguments: tipmost revision of the current branch
45 hg up -q -C 0
54 hg up -q -C 0
46 hg up -q
55 hg up -q
47 hg id
56 hg id
48 hg up -q 1
57 hg up -q 1
49 hg up -q
58 hg up -q
50 hg id
59 hg id
51 hg branch foobar
60 hg branch foobar
52 hg up
61 hg up
53
62
54 echo % fastforward merge
63 echo % fastforward merge
55 hg branch ff
64 hg branch ff
56 echo ff > ff
65 echo ff > ff
57 hg ci -Am'fast forward' -d '1000000 0'
66 hg ci -Am'fast forward' -d '1000000 0'
58 hg up foo
67 hg up foo
59 hg merge ff
68 hg merge ff
60 hg branch
69 hg branch
61 hg commit -m'Merge ff into foo' -d '1000000 0'
70 hg commit -m'Merge ff into foo' -d '1000000 0'
62 hg parents
71 hg parents
63 hg manifest
72 hg manifest
64
73
65 exit 0
74 exit 0
@@ -1,108 +1,117 b''
1 marked working directory as branch foo
1 marked working directory as branch foo
2 foo
2 foo
3 marked working directory as branch bar
3 marked working directory as branch bar
4 % branch shadowing
4 % branch shadowing
5 abort: a branch of the same name already exists (use --force to override)
5 abort: a branch of the same name already exists (use --force to override)
6 marked working directory as branch default
6 marked working directory as branch default
7 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
7 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
8 foo
8 foo
9 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
9 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
10 (branch merge, don't forget to commit)
10 (branch merge, don't forget to commit)
11 foo
11 foo
12 changeset: 5:5f8fb06e083e
12 changeset: 5:5f8fb06e083e
13 branch: foo
13 branch: foo
14 tag: tip
14 tag: tip
15 parent: 4:4909a3732169
15 parent: 4:4909a3732169
16 parent: 3:bf1bc2f45e83
16 parent: 3:bf1bc2f45e83
17 user: test
17 user: test
18 date: Mon Jan 12 13:46:40 1970 +0000
18 date: Mon Jan 12 13:46:40 1970 +0000
19 summary: merge
19 summary: merge
20
20
21 changeset: 4:4909a3732169
21 changeset: 4:4909a3732169
22 branch: foo
22 branch: foo
23 parent: 1:b699b1cec9c2
23 parent: 1:b699b1cec9c2
24 user: test
24 user: test
25 date: Mon Jan 12 13:46:40 1970 +0000
25 date: Mon Jan 12 13:46:40 1970 +0000
26 summary: modify a branch
26 summary: modify a branch
27
27
28 changeset: 3:bf1bc2f45e83
28 changeset: 3:bf1bc2f45e83
29 user: test
29 user: test
30 date: Mon Jan 12 13:46:40 1970 +0000
30 date: Mon Jan 12 13:46:40 1970 +0000
31 summary: clear branch name
31 summary: clear branch name
32
32
33 changeset: 2:67ec16bde7f1
33 changeset: 2:67ec16bde7f1
34 branch: bar
34 branch: bar
35 user: test
35 user: test
36 date: Mon Jan 12 13:46:40 1970 +0000
36 date: Mon Jan 12 13:46:40 1970 +0000
37 summary: change branch name
37 summary: change branch name
38
38
39 changeset: 1:b699b1cec9c2
39 changeset: 1:b699b1cec9c2
40 branch: foo
40 branch: foo
41 user: test
41 user: test
42 date: Mon Jan 12 13:46:40 1970 +0000
42 date: Mon Jan 12 13:46:40 1970 +0000
43 summary: add branch name
43 summary: add branch name
44
44
45 changeset: 0:be8523e69bf8
45 changeset: 0:be8523e69bf8
46 user: test
46 user: test
47 date: Mon Jan 12 13:46:40 1970 +0000
47 date: Mon Jan 12 13:46:40 1970 +0000
48 summary: initial
48 summary: initial
49
49
50 foo 5:5f8fb06e083e
50 foo 5:5f8fb06e083e
51 default 3:bf1bc2f45e83 (inactive)
51 default 3:bf1bc2f45e83 (inactive)
52 bar 2:67ec16bde7f1 (inactive)
52 bar 2:67ec16bde7f1 (inactive)
53 foo
53 foo
54 default
54 default
55 bar
55 bar
56 % test for invalid branch cache
56 % test for invalid branch cache
57 rolling back last transaction
57 rolling back last transaction
58 changeset: 4:4909a3732169
58 changeset: 4:4909a3732169
59 branch: foo
59 branch: foo
60 tag: tip
60 tag: tip
61 parent: 1:b699b1cec9c2
61 parent: 1:b699b1cec9c2
62 user: test
62 user: test
63 date: Mon Jan 12 13:46:40 1970 +0000
63 date: Mon Jan 12 13:46:40 1970 +0000
64 summary: modify a branch
64 summary: modify a branch
65
65
66 Invalid branch cache: unknown tip
66 Invalid branch cache: unknown tip
67 changeset: 4:4909a3732169c0c20011c4f4b8fdff4e3d89b23f
67 changeset: 4:4909a3732169c0c20011c4f4b8fdff4e3d89b23f
68 branch: foo
68 branch: foo
69 tag: tip
69 tag: tip
70 parent: 1:b699b1cec9c2966b3700de4fef0dc123cd754c31
70 parent: 1:b699b1cec9c2966b3700de4fef0dc123cd754c31
71 parent: -1:0000000000000000000000000000000000000000
71 parent: -1:0000000000000000000000000000000000000000
72 manifest: 4:d01b250baaa05909152f7ae07d7a649deea0df9a
72 manifest: 4:d01b250baaa05909152f7ae07d7a649deea0df9a
73 user: test
73 user: test
74 date: Mon Jan 12 13:46:40 1970 +0000
74 date: Mon Jan 12 13:46:40 1970 +0000
75 files: a
75 files: a
76 extra: branch=foo
76 extra: branch=foo
77 description:
77 description:
78 modify a branch
78 modify a branch
79
79
80
80
81 4:4909a3732169
81 4:4909a3732169
82 4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
82 4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
83 bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
83 bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
84 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
84 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
85 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
85 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
86 % push should update the branch cache
87 % pushing just rev 0
88 be8523e69bf892e25817fc97187516b3c0804ae4 0
89 be8523e69bf892e25817fc97187516b3c0804ae4 default
90 % pushing everything
91 4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
92 bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
93 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
94 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
86 % update with no arguments: tipmost revision of the current branch
95 % update with no arguments: tipmost revision of the current branch
87 bf1bc2f45e83
96 bf1bc2f45e83
88 4909a3732169 (foo) tip
97 4909a3732169 (foo) tip
89 marked working directory as branch foobar
98 marked working directory as branch foobar
90 abort: branch foobar not found
99 abort: branch foobar not found
91 % fastforward merge
100 % fastforward merge
92 marked working directory as branch ff
101 marked working directory as branch ff
93 adding ff
102 adding ff
94 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
103 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
95 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
104 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 (branch merge, don't forget to commit)
105 (branch merge, don't forget to commit)
97 foo
106 foo
98 changeset: 6:f0c74f92a385
107 changeset: 6:f0c74f92a385
99 branch: foo
108 branch: foo
100 tag: tip
109 tag: tip
101 parent: 4:4909a3732169
110 parent: 4:4909a3732169
102 parent: 5:c420d2121b71
111 parent: 5:c420d2121b71
103 user: test
112 user: test
104 date: Mon Jan 12 13:46:40 1970 +0000
113 date: Mon Jan 12 13:46:40 1970 +0000
105 summary: Merge ff into foo
114 summary: Merge ff into foo
106
115
107 a
116 a
108 ff
117 ff
@@ -1,103 +1,111 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 cp "$TESTDIR"/printenv.py .
3 cp "$TESTDIR"/printenv.py .
4
4
5 # This test tries to exercise the ssh functionality with a dummy script
5 # This test tries to exercise the ssh functionality with a dummy script
6
6
7 cat <<EOF > dummyssh
7 cat <<EOF > dummyssh
8 import sys
8 import sys
9 import os
9 import os
10
10
11 os.chdir(os.path.dirname(sys.argv[0]))
11 os.chdir(os.path.dirname(sys.argv[0]))
12 if sys.argv[1] != "user@dummy":
12 if sys.argv[1] != "user@dummy":
13 sys.exit(-1)
13 sys.exit(-1)
14
14
15 if not os.path.exists("dummyssh"):
15 if not os.path.exists("dummyssh"):
16 sys.exit(-1)
16 sys.exit(-1)
17
17
18 os.environ["SSH_CLIENT"] = "127.0.0.1 1 2"
18 os.environ["SSH_CLIENT"] = "127.0.0.1 1 2"
19
19
20 log = open("dummylog", "ab")
20 log = open("dummylog", "ab")
21 log.write("Got arguments")
21 log.write("Got arguments")
22 for i, arg in enumerate(sys.argv[1:]):
22 for i, arg in enumerate(sys.argv[1:]):
23 log.write(" %d:%s" % (i+1, arg))
23 log.write(" %d:%s" % (i+1, arg))
24 log.write("\n")
24 log.write("\n")
25 log.close()
25 log.close()
26 r = os.system(sys.argv[2])
26 r = os.system(sys.argv[2])
27 sys.exit(bool(r))
27 sys.exit(bool(r))
28 EOF
28 EOF
29
29
30 cat <<EOF > badhook
31 import sys
32 sys.stdout.write("KABOOM")
33 EOF
34
30 echo "# creating 'remote'"
35 echo "# creating 'remote'"
31 hg init remote
36 hg init remote
32 cd remote
37 cd remote
33 echo this > foo
38 echo this > foo
34 echo this > fooO
39 echo this > fooO
35 hg ci -A -m "init" -d "1000000 0" foo fooO
40 hg ci -A -m "init" -d "1000000 0" foo fooO
36 echo '[server]' > .hg/hgrc
41 echo '[server]' > .hg/hgrc
37 echo 'uncompressed = True' >> .hg/hgrc
42 echo 'uncompressed = True' >> .hg/hgrc
38 echo '[hooks]' >> .hg/hgrc
43 echo '[hooks]' >> .hg/hgrc
39 echo 'changegroup = python ../printenv.py changegroup-in-remote 0 ../dummylog' >> .hg/hgrc
44 echo 'changegroup = python ../printenv.py changegroup-in-remote 0 ../dummylog' >> .hg/hgrc
40
45
41 cd ..
46 cd ..
42
47
43 echo "# repo not found error"
48 echo "# repo not found error"
44 hg clone -e "python ./dummyssh" ssh://user@dummy/nonexistent local
49 hg clone -e "python ./dummyssh" ssh://user@dummy/nonexistent local
45
50
46 echo "# clone remote via stream"
51 echo "# clone remote via stream"
47 hg clone -e "python ./dummyssh" --uncompressed ssh://user@dummy/remote local-stream 2>&1 | \
52 hg clone -e "python ./dummyssh" --uncompressed ssh://user@dummy/remote local-stream 2>&1 | \
48 sed -e 's/[0-9][0-9.]*/XXX/g' -e 's/[KM]\(B\/sec\)/X\1/'
53 sed -e 's/[0-9][0-9.]*/XXX/g' -e 's/[KM]\(B\/sec\)/X\1/'
49 cd local-stream
54 cd local-stream
50 hg verify
55 hg verify
51 cd ..
56 cd ..
52
57
53 echo "# clone remote via pull"
58 echo "# clone remote via pull"
54 hg clone -e "python ./dummyssh" ssh://user@dummy/remote local
59 hg clone -e "python ./dummyssh" ssh://user@dummy/remote local
55
60
56 echo "# verify"
61 echo "# verify"
57 cd local
62 cd local
58 hg verify
63 hg verify
59
64
60 echo '[hooks]' >> .hg/hgrc
65 echo '[hooks]' >> .hg/hgrc
61 echo 'changegroup = python ../printenv.py changegroup-in-local 0 ../dummylog' >> .hg/hgrc
66 echo 'changegroup = python ../printenv.py changegroup-in-local 0 ../dummylog' >> .hg/hgrc
62
67
63 echo "# empty default pull"
68 echo "# empty default pull"
64 hg paths
69 hg paths
65 hg pull -e "python ../dummyssh"
70 hg pull -e "python ../dummyssh"
66
71
67 echo "# local change"
72 echo "# local change"
68 echo bleah > foo
73 echo bleah > foo
69 hg ci -m "add" -d "1000000 0"
74 hg ci -m "add" -d "1000000 0"
70
75
71 echo "# updating rc"
76 echo "# updating rc"
72 echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
77 echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
73 echo "[ui]" >> .hg/hgrc
78 echo "[ui]" >> .hg/hgrc
74 echo "ssh = python ../dummyssh" >> .hg/hgrc
79 echo "ssh = python ../dummyssh" >> .hg/hgrc
75
80
76 echo "# find outgoing"
81 echo "# find outgoing"
77 hg out ssh://user@dummy/remote
82 hg out ssh://user@dummy/remote
78
83
79 echo "# find incoming on the remote side"
84 echo "# find incoming on the remote side"
80 hg incoming -R ../remote -e "python ../dummyssh" ssh://user@dummy/local
85 hg incoming -R ../remote -e "python ../dummyssh" ssh://user@dummy/local
81
86
82 echo "# push"
87 echo "# push"
83 hg push
88 hg push
84
89
85 cd ../remote
90 cd ../remote
86
91
87 echo "# check remote tip"
92 echo "# check remote tip"
88 hg tip
93 hg tip
89 hg verify
94 hg verify
90 hg cat -r tip foo
95 hg cat -r tip foo
91
96
92 echo z > z
97 echo z > z
93 hg ci -A -m z -d '1000001 0' z
98 hg ci -A -m z -d '1000001 0' z
99 # a bad, evil hook that prints to stdout
100 echo 'changegroup.stdout = python ../badhook' >> .hg/hgrc
94
101
95 cd ../local
102 cd ../local
96 echo r > r
103 echo r > r
97 hg ci -A -m z -d '1000002 0' r
104 hg ci -A -m z -d '1000002 0' r
98
105
99 echo "# push should succeed"
106 echo "# push should succeed even though it has an unexpected response"
100 hg push
107 hg push
108 hg -R ../remote heads
101
109
102 cd ..
110 cd ..
103 cat dummylog
111 cat dummylog
@@ -1,90 +1,105 b''
1 # creating 'remote'
1 # creating 'remote'
2 # repo not found error
2 # repo not found error
3 remote: abort: There is no Mercurial repository here (.hg not found)!
3 remote: abort: There is no Mercurial repository here (.hg not found)!
4 abort: no suitable response from remote hg!
4 abort: no suitable response from remote hg!
5 # clone remote via stream
5 # clone remote via stream
6 streaming all changes
6 streaming all changes
7 XXX files to transfer, XXX bytes of data
7 XXX files to transfer, XXX bytes of data
8 transferred XXX bytes in XXX seconds (XXX XB/sec)
8 transferred XXX bytes in XXX seconds (XXX XB/sec)
9 XXX files updated, XXX files merged, XXX files removed, XXX files unresolved
9 XXX files updated, XXX files merged, XXX files removed, XXX files unresolved
10 checking changesets
10 checking changesets
11 checking manifests
11 checking manifests
12 crosschecking files in changesets and manifests
12 crosschecking files in changesets and manifests
13 checking files
13 checking files
14 2 files, 1 changesets, 2 total revisions
14 2 files, 1 changesets, 2 total revisions
15 # clone remote via pull
15 # clone remote via pull
16 requesting all changes
16 requesting all changes
17 adding changesets
17 adding changesets
18 adding manifests
18 adding manifests
19 adding file changes
19 adding file changes
20 added 1 changesets with 2 changes to 2 files
20 added 1 changesets with 2 changes to 2 files
21 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
21 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 # verify
22 # verify
23 checking changesets
23 checking changesets
24 checking manifests
24 checking manifests
25 crosschecking files in changesets and manifests
25 crosschecking files in changesets and manifests
26 checking files
26 checking files
27 2 files, 1 changesets, 2 total revisions
27 2 files, 1 changesets, 2 total revisions
28 # empty default pull
28 # empty default pull
29 default = ssh://user@dummy/remote
29 default = ssh://user@dummy/remote
30 pulling from ssh://user@dummy/remote
30 pulling from ssh://user@dummy/remote
31 searching for changes
31 searching for changes
32 no changes found
32 no changes found
33 # local change
33 # local change
34 # updating rc
34 # updating rc
35 # find outgoing
35 # find outgoing
36 comparing with ssh://user@dummy/remote
36 comparing with ssh://user@dummy/remote
37 searching for changes
37 searching for changes
38 changeset: 1:572896fe480d
38 changeset: 1:572896fe480d
39 tag: tip
39 tag: tip
40 user: test
40 user: test
41 date: Mon Jan 12 13:46:40 1970 +0000
41 date: Mon Jan 12 13:46:40 1970 +0000
42 summary: add
42 summary: add
43
43
44 # find incoming on the remote side
44 # find incoming on the remote side
45 comparing with ssh://user@dummy/local
45 comparing with ssh://user@dummy/local
46 searching for changes
46 searching for changes
47 changeset: 1:572896fe480d
47 changeset: 1:572896fe480d
48 tag: tip
48 tag: tip
49 user: test
49 user: test
50 date: Mon Jan 12 13:46:40 1970 +0000
50 date: Mon Jan 12 13:46:40 1970 +0000
51 summary: add
51 summary: add
52
52
53 # push
53 # push
54 pushing to ssh://user@dummy/remote
54 pushing to ssh://user@dummy/remote
55 searching for changes
55 searching for changes
56 remote: adding changesets
56 remote: adding changesets
57 remote: adding manifests
57 remote: adding manifests
58 remote: adding file changes
58 remote: adding file changes
59 remote: added 1 changesets with 1 changes to 1 files
59 remote: added 1 changesets with 1 changes to 1 files
60 # check remote tip
60 # check remote tip
61 changeset: 1:572896fe480d
61 changeset: 1:572896fe480d
62 tag: tip
62 tag: tip
63 user: test
63 user: test
64 date: Mon Jan 12 13:46:40 1970 +0000
64 date: Mon Jan 12 13:46:40 1970 +0000
65 summary: add
65 summary: add
66
66
67 checking changesets
67 checking changesets
68 checking manifests
68 checking manifests
69 crosschecking files in changesets and manifests
69 crosschecking files in changesets and manifests
70 checking files
70 checking files
71 2 files, 2 changesets, 3 total revisions
71 2 files, 2 changesets, 3 total revisions
72 bleah
72 bleah
73 # push should succeed
73 # push should succeed even though it has an unexpected response
74 pushing to ssh://user@dummy/remote
74 pushing to ssh://user@dummy/remote
75 searching for changes
75 searching for changes
76 note: unsynced remote changes!
76 note: unsynced remote changes!
77 remote: adding changesets
77 remote: adding changesets
78 remote: adding manifests
78 remote: adding manifests
79 remote: adding file changes
79 remote: adding file changes
80 remote: added 1 changesets with 1 changes to 1 files
80 remote: added 1 changesets with 1 changes to 1 files
81 abort: unexpected response:
82 'KABOOM1\n'
83 changeset: 3:ac7448082955
84 tag: tip
85 parent: 1:572896fe480d
86 user: test
87 date: Mon Jan 12 13:46:42 1970 +0000
88 summary: z
89
90 changeset: 2:187c6caa0d1e
91 parent: 0:e34318c26897
92 user: test
93 date: Mon Jan 12 13:46:41 1970 +0000
94 summary: z
95
81 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
96 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
82 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
97 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
83 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
98 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
84 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
99 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
85 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
100 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
86 Got arguments 1:user@dummy 2:hg -R local serve --stdio
101 Got arguments 1:user@dummy 2:hg -R local serve --stdio
87 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
102 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
88 changegroup-in-remote hook: HG_NODE=572896fe480d7581849806ee402175c49cb20037 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
103 changegroup-in-remote hook: HG_NODE=572896fe480d7581849806ee402175c49cb20037 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
89 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
104 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
90 changegroup-in-remote hook: HG_NODE=ac7448082955a0b2ff5cb4512c1e061c779bbc79 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
105 changegroup-in-remote hook: HG_NODE=ac7448082955a0b2ff5cb4512c1e061c779bbc79 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
General Comments 0
You need to be logged in to leave comments. Login now