##// END OF EJS Templates
merge with crew-stable
Alexis S. L. Carvalho -
r6001:30d2feca merge default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 #!/bin/sh
2
3 echo '[extensions]' >> $HGRCPATH
4 echo 'hgext.mq =' >> $HGRCPATH
5
6 hg init repo
7 cd repo
8
9 echo foo > foo
10 hg ci -qAm 'add a file'
11
12 hg qinit
13
14 hg qnew foo
15 echo foo >> foo
16 hg qrefresh -m 'append foo'
17
18 hg qnew bar
19 echo bar >> foo
20 hg qrefresh -m 'append bar'
21
22 echo '% try to commit on top of a patch'
23 echo quux >> foo
24 hg ci -m 'append quux'
25
26 # cheat a bit...
27 mv .hg/patches .hg/patches2
28 hg ci -m 'append quux'
29 mv .hg/patches2 .hg/patches
30
31 echo '% qpop/qrefresh on the wrong revision'
32 hg qpop
33 hg qpop -n patches 2>&1 | sed -e 's/\(using patch queue:\).*/\1/'
34 hg qrefresh
35
36 hg up -C qtip
37 echo '% qpop'
38 hg qpop
39
40 echo '% qrefresh'
41 hg qrefresh
42
43 echo '% tip:'
44 hg tip --template '#rev# #desc#\n'
@@ -0,0 +1,14 b''
1 % try to commit on top of a patch
2 abort: cannot commit over an applied mq patch
3 % qpop/qrefresh on the wrong revision
4 abort: working directory revision is not qtip
5 using patch queue:
6 abort: popping would remove a revision not managed by this patch queue
7 abort: working directory revision is not qtip
8 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
9 % qpop
10 abort: popping would remove a revision not managed by this patch queue
11 % qrefresh
12 abort: cannot refresh a revision with children
13 % tip:
14 3 append quux
@@ -1,343 +1,343 b''
1 1 # common code for the convert extension
2 2 import base64, errno
3 3 import os
4 4 import cPickle as pickle
5 5 from mercurial import util
6 6 from mercurial.i18n import _
7 7
8 8 def encodeargs(args):
9 9 def encodearg(s):
10 10 lines = base64.encodestring(s)
11 11 lines = [l.splitlines()[0] for l in lines]
12 12 return ''.join(lines)
13 13
14 14 s = pickle.dumps(args)
15 15 return encodearg(s)
16 16
17 17 def decodeargs(s):
18 18 s = base64.decodestring(s)
19 19 return pickle.loads(s)
20 20
21 21 def checktool(exe, name=None):
22 22 name = name or exe
23 23 if not util.find_exe(exe):
24 24 raise util.Abort('cannot find required "%s" tool' % name)
25 25
26 26 class NoRepo(Exception): pass
27 27
28 28 SKIPREV = 'SKIP'
29 29
30 30 class commit(object):
31 31 def __init__(self, author, date, desc, parents, branch=None, rev=None,
32 32 extra={}):
33 self.author = author
34 self.date = date
33 self.author = author or 'unknown'
34 self.date = date or '0 0'
35 35 self.desc = desc
36 36 self.parents = parents
37 37 self.branch = branch
38 38 self.rev = rev
39 39 self.extra = extra
40 40
41 41 class converter_source(object):
42 42 """Conversion source interface"""
43 43
44 44 def __init__(self, ui, path=None, rev=None):
45 45 """Initialize conversion source (or raise NoRepo("message")
46 46 exception if path is not a valid repository)"""
47 47 self.ui = ui
48 48 self.path = path
49 49 self.rev = rev
50 50
51 51 self.encoding = 'utf-8'
52 52
53 53 def before(self):
54 54 pass
55 55
56 56 def after(self):
57 57 pass
58 58
59 59 def setrevmap(self, revmap):
60 60 """set the map of already-converted revisions"""
61 61 pass
62 62
63 63 def getheads(self):
64 64 """Return a list of this repository's heads"""
65 65 raise NotImplementedError()
66 66
67 67 def getfile(self, name, rev):
68 68 """Return file contents as a string"""
69 69 raise NotImplementedError()
70 70
71 71 def getmode(self, name, rev):
72 72 """Return file mode, eg. '', 'x', or 'l'"""
73 73 raise NotImplementedError()
74 74
75 75 def getchanges(self, version):
76 76 """Returns a tuple of (files, copies)
77 77 Files is a sorted list of (filename, id) tuples for all files changed
78 78 in version, where id is the source revision id of the file.
79 79
80 80 copies is a dictionary of dest: source
81 81 """
82 82 raise NotImplementedError()
83 83
84 84 def getcommit(self, version):
85 85 """Return the commit object for version"""
86 86 raise NotImplementedError()
87 87
88 88 def gettags(self):
89 89 """Return the tags as a dictionary of name: revision"""
90 90 raise NotImplementedError()
91 91
92 92 def recode(self, s, encoding=None):
93 93 if not encoding:
94 94 encoding = self.encoding or 'utf-8'
95 95
96 96 if isinstance(s, unicode):
97 97 return s.encode("utf-8")
98 98 try:
99 99 return s.decode(encoding).encode("utf-8")
100 100 except:
101 101 try:
102 102 return s.decode("latin-1").encode("utf-8")
103 103 except:
104 104 return s.decode(encoding, "replace").encode("utf-8")
105 105
106 106 def getchangedfiles(self, rev, i):
107 107 """Return the files changed by rev compared to parent[i].
108 108
109 109 i is an index selecting one of the parents of rev. The return
110 110 value should be the list of files that are different in rev and
111 111 this parent.
112 112
113 113 If rev has no parents, i is None.
114 114
115 115 This function is only needed to support --filemap
116 116 """
117 117 raise NotImplementedError()
118 118
119 119 def converted(self, rev, sinkrev):
120 120 '''Notify the source that a revision has been converted.'''
121 121 pass
122 122
123 123
124 124 class converter_sink(object):
125 125 """Conversion sink (target) interface"""
126 126
127 127 def __init__(self, ui, path):
128 128 """Initialize conversion sink (or raise NoRepo("message")
129 129 exception if path is not a valid repository)
130 130
131 131 created is a list of paths to remove if a fatal error occurs
132 132 later"""
133 133 self.ui = ui
134 134 self.path = path
135 135 self.created = []
136 136
137 137 def getheads(self):
138 138 """Return a list of this repository's heads"""
139 139 raise NotImplementedError()
140 140
141 141 def revmapfile(self):
142 142 """Path to a file that will contain lines
143 143 source_rev_id sink_rev_id
144 144 mapping equivalent revision identifiers for each system."""
145 145 raise NotImplementedError()
146 146
147 147 def authorfile(self):
148 148 """Path to a file that will contain lines
149 149 srcauthor=dstauthor
150 150 mapping equivalent authors identifiers for each system."""
151 151 return None
152 152
153 153 def putfile(self, f, e, data):
154 154 """Put file for next putcommit().
155 155 f: path to file
156 156 e: '', 'x', or 'l' (regular file, executable, or symlink)
157 157 data: file contents"""
158 158 raise NotImplementedError()
159 159
160 160 def delfile(self, f):
161 161 """Delete file for next putcommit().
162 162 f: path to file"""
163 163 raise NotImplementedError()
164 164
165 165 def putcommit(self, files, parents, commit):
166 166 """Create a revision with all changed files listed in 'files'
167 167 and having listed parents. 'commit' is a commit object containing
168 168 at a minimum the author, date, and message for this changeset.
169 169 Called after putfile() and delfile() calls. Note that the sink
170 170 repository is not told to update itself to a particular revision
171 171 (or even what that revision would be) before it receives the
172 172 file data."""
173 173 raise NotImplementedError()
174 174
175 175 def puttags(self, tags):
176 176 """Put tags into sink.
177 177 tags: {tagname: sink_rev_id, ...}"""
178 178 raise NotImplementedError()
179 179
180 180 def setbranch(self, branch, pbranches):
181 181 """Set the current branch name. Called before the first putfile
182 182 on the branch.
183 183 branch: branch name for subsequent commits
184 184 pbranches: (converted parent revision, parent branch) tuples"""
185 185 pass
186 186
187 187 def setfilemapmode(self, active):
188 188 """Tell the destination that we're using a filemap
189 189
190 190 Some converter_sources (svn in particular) can claim that a file
191 191 was changed in a revision, even if there was no change. This method
192 192 tells the destination that we're using a filemap and that it should
193 193 filter empty revisions.
194 194 """
195 195 pass
196 196
197 197 def before(self):
198 198 pass
199 199
200 200 def after(self):
201 201 pass
202 202
203 203
204 204 class commandline(object):
205 205 def __init__(self, ui, command):
206 206 self.ui = ui
207 207 self.command = command
208 208
209 209 def prerun(self):
210 210 pass
211 211
212 212 def postrun(self):
213 213 pass
214 214
215 215 def _cmdline(self, cmd, *args, **kwargs):
216 216 cmdline = [self.command, cmd] + list(args)
217 217 for k, v in kwargs.iteritems():
218 218 if len(k) == 1:
219 219 cmdline.append('-' + k)
220 220 else:
221 221 cmdline.append('--' + k.replace('_', '-'))
222 222 try:
223 223 if len(k) == 1:
224 224 cmdline.append('' + v)
225 225 else:
226 226 cmdline[-1] += '=' + v
227 227 except TypeError:
228 228 pass
229 229 cmdline = [util.shellquote(arg) for arg in cmdline]
230 230 cmdline += ['<', util.nulldev]
231 231 cmdline = ' '.join(cmdline)
232 232 self.ui.debug(cmdline, '\n')
233 233 return cmdline
234 234
235 235 def _run(self, cmd, *args, **kwargs):
236 236 cmdline = self._cmdline(cmd, *args, **kwargs)
237 237 self.prerun()
238 238 try:
239 239 return util.popen(cmdline)
240 240 finally:
241 241 self.postrun()
242 242
243 243 def run(self, cmd, *args, **kwargs):
244 244 fp = self._run(cmd, *args, **kwargs)
245 245 output = fp.read()
246 246 self.ui.debug(output)
247 247 return output, fp.close()
248 248
249 249 def checkexit(self, status, output=''):
250 250 if status:
251 251 if output:
252 252 self.ui.warn(_('%s error:\n') % self.command)
253 253 self.ui.warn(output)
254 254 msg = util.explain_exit(status)[0]
255 255 raise util.Abort(_('%s %s') % (self.command, msg))
256 256
257 257 def run0(self, cmd, *args, **kwargs):
258 258 output, status = self.run(cmd, *args, **kwargs)
259 259 self.checkexit(status, output)
260 260 return output
261 261
262 262 def getargmax(self):
263 263 if '_argmax' in self.__dict__:
264 264 return self._argmax
265 265
266 266 # POSIX requires at least 4096 bytes for ARG_MAX
267 267 self._argmax = 4096
268 268 try:
269 269 self._argmax = os.sysconf("SC_ARG_MAX")
270 270 except:
271 271 pass
272 272
273 273 # Windows shells impose their own limits on command line length,
274 274 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
275 275 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
276 276 # details about cmd.exe limitations.
277 277
278 278 # Since ARG_MAX is for command line _and_ environment, lower our limit
279 279 # (and make happy Windows shells while doing this).
280 280
281 281 self._argmax = self._argmax/2 - 1
282 282 return self._argmax
283 283
284 284 def limit_arglist(self, arglist, cmd, *args, **kwargs):
285 285 limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
286 286 bytes = 0
287 287 fl = []
288 288 for fn in arglist:
289 289 b = len(fn) + 3
290 290 if bytes + b < limit or len(fl) == 0:
291 291 fl.append(fn)
292 292 bytes += b
293 293 else:
294 294 yield fl
295 295 fl = [fn]
296 296 bytes = b
297 297 if fl:
298 298 yield fl
299 299
300 300 def xargs(self, arglist, cmd, *args, **kwargs):
301 301 for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
302 302 self.run0(cmd, *(list(args) + l), **kwargs)
303 303
304 304 class mapfile(dict):
305 305 def __init__(self, ui, path):
306 306 super(mapfile, self).__init__()
307 307 self.ui = ui
308 308 self.path = path
309 309 self.fp = None
310 310 self.order = []
311 311 self._read()
312 312
313 313 def _read(self):
314 314 if self.path is None:
315 315 return
316 316 try:
317 317 fp = open(self.path, 'r')
318 318 except IOError, err:
319 319 if err.errno != errno.ENOENT:
320 320 raise
321 321 return
322 322 for line in fp:
323 323 key, value = line[:-1].split(' ', 1)
324 324 if key not in self:
325 325 self.order.append(key)
326 326 super(mapfile, self).__setitem__(key, value)
327 327 fp.close()
328 328
329 329 def __setitem__(self, key, value):
330 330 if self.fp is None:
331 331 try:
332 332 self.fp = open(self.path, 'a')
333 333 except IOError, err:
334 334 raise util.Abort(_('could not open map file %r: %s') %
335 335 (self.path, err.strerror))
336 336 self.fp.write('%s %s\n' % (key, value))
337 337 self.fp.flush()
338 338 super(mapfile, self).__setitem__(key, value)
339 339
340 340 def close(self):
341 341 if self.fp:
342 342 self.fp.close()
343 343 self.fp = None
@@ -1,143 +1,142 b''
1 1 # git support for the convert extension
2 2
3 3 import os
4 4 from mercurial import util
5 5
6 6 from common import NoRepo, commit, converter_source, checktool
7 7
8 8 class convert_git(converter_source):
9 9 # Windows does not support GIT_DIR= construct while other systems
10 10 # cannot remove environment variable. Just assume none have
11 11 # both issues.
12 12 if hasattr(os, 'unsetenv'):
13 13 def gitcmd(self, s):
14 14 prevgitdir = os.environ.get('GIT_DIR')
15 15 os.environ['GIT_DIR'] = self.path
16 16 try:
17 17 return util.popen(s)
18 18 finally:
19 19 if prevgitdir is None:
20 20 del os.environ['GIT_DIR']
21 21 else:
22 22 os.environ['GIT_DIR'] = prevgitdir
23 23 else:
24 24 def gitcmd(self, s):
25 25 return util.popen('GIT_DIR=%s %s' % (self.path, s))
26 26
27 27 def __init__(self, ui, path, rev=None):
28 28 super(convert_git, self).__init__(ui, path, rev=rev)
29 29
30 30 if os.path.isdir(path + "/.git"):
31 31 path += "/.git"
32 32 if not os.path.exists(path + "/objects"):
33 33 raise NoRepo("%s does not look like a Git repo" % path)
34 34
35 35 checktool('git-rev-parse', 'git')
36 36
37 37 self.path = path
38 38
39 39 def getheads(self):
40 40 if not self.rev:
41 41 return self.gitcmd('git-rev-parse --branches').read().splitlines()
42 42 else:
43 43 fh = self.gitcmd("git-rev-parse --verify %s" % self.rev)
44 44 return [fh.read()[:-1]]
45 45
46 46 def catfile(self, rev, type):
47 47 if rev == "0" * 40: raise IOError()
48 48 fh = self.gitcmd("git-cat-file %s %s" % (type, rev))
49 49 return fh.read()
50 50
51 51 def getfile(self, name, rev):
52 52 return self.catfile(rev, "blob")
53 53
54 54 def getmode(self, name, rev):
55 55 return self.modecache[(name, rev)]
56 56
57 57 def getchanges(self, version):
58 58 self.modecache = {}
59 59 fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
60 60 changes = []
61 61 seen = {}
62 62 for l in fh:
63 63 if "\t" not in l:
64 64 continue
65 65 m, f = l[:-1].split("\t")
66 66 if f in seen:
67 67 continue
68 68 seen[f] = 1
69 69 m = m.split()
70 70 h = m[3]
71 71 p = (m[1] == "100755")
72 72 s = (m[1] == "120000")
73 73 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
74 74 changes.append((f, h))
75 75 return (changes, {})
76 76
77 77 def getcommit(self, version):
78 78 c = self.catfile(version, "commit") # read the commit hash
79 79 end = c.find("\n\n")
80 80 message = c[end+2:]
81 81 message = self.recode(message)
82 82 l = c[:end].splitlines()
83 83 manifest = l[0].split()[1]
84 84 parents = []
85 85 for e in l[1:]:
86 86 n, v = e.split(" ", 1)
87 87 if n == "author":
88 88 p = v.split()
89 89 tm, tz = p[-2:]
90 90 author = " ".join(p[:-2])
91 91 if author[0] == "<": author = author[1:-1]
92 92 author = self.recode(author)
93 93 if n == "committer":
94 94 p = v.split()
95 95 tm, tz = p[-2:]
96 96 committer = " ".join(p[:-2])
97 97 if committer[0] == "<": committer = committer[1:-1]
98 98 committer = self.recode(committer)
99 99 message += "\ncommitter: %s\n" % committer
100 100 if n == "parent": parents.append(v)
101 101
102 102 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
103 103 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
104 104 date = tm + " " + str(tz)
105 author = author or "unknown"
106 105
107 106 c = commit(parents=parents, date=date, author=author, desc=message,
108 107 rev=version)
109 108 return c
110 109
111 110 def gettags(self):
112 111 tags = {}
113 112 fh = self.gitcmd('git-ls-remote --tags "%s"' % self.path)
114 113 prefix = 'refs/tags/'
115 114 for line in fh:
116 115 line = line.strip()
117 116 if not line.endswith("^{}"):
118 117 continue
119 118 node, tag = line.split(None, 1)
120 119 if not tag.startswith(prefix):
121 120 continue
122 121 tag = tag[len(prefix):-3]
123 122 tags[tag] = node
124 123
125 124 return tags
126 125
127 126 def getchangedfiles(self, version, i):
128 127 changes = []
129 128 if i is None:
130 129 fh = self.gitcmd("git-diff-tree --root -m -r %s" % version)
131 130 for l in fh:
132 131 if "\t" not in l:
133 132 continue
134 133 m, f = l[:-1].split("\t")
135 134 changes.append(f)
136 135 fh.close()
137 136 else:
138 137 fh = self.gitcmd('git-diff-tree --name-only --root -r %s "%s^%s" --'
139 138 % (version, version, i+1))
140 139 changes = [f.rstrip('\n') for f in fh]
141 140 fh.close()
142 141
143 142 return changes
@@ -1,2315 +1,2347 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 fp.write("%s\n" % i)
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head)
341 341 self.strip(repo, n, update=False, backup='strip')
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 348 if n == None:
349 349 raise util.Abort(_("repo commit failed"))
350 350 try:
351 351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 352 except:
353 353 raise util.Abort(_("unable to read %s") % patch)
354 354
355 355 patchf = self.opener(patch, "w")
356 356 if comments:
357 357 comments = "\n".join(comments) + '\n\n'
358 358 patchf.write(comments)
359 359 self.printdiff(repo, head, n, fp=patchf)
360 360 patchf.close()
361 361 self.removeundo(repo)
362 362 return (0, n)
363 363
364 364 def qparents(self, repo, rev=None):
365 365 if rev is None:
366 366 (p1, p2) = repo.dirstate.parents()
367 367 if p2 == revlog.nullid:
368 368 return p1
369 369 if len(self.applied) == 0:
370 370 return None
371 371 return revlog.bin(self.applied[-1].rev)
372 372 pp = repo.changelog.parents(rev)
373 373 if pp[1] != revlog.nullid:
374 374 arevs = [ x.rev for x in self.applied ]
375 375 p0 = revlog.hex(pp[0])
376 376 p1 = revlog.hex(pp[1])
377 377 if p0 in arevs:
378 378 return pp[0]
379 379 if p1 in arevs:
380 380 return pp[1]
381 381 return pp[0]
382 382
383 383 def mergepatch(self, repo, mergeq, series):
384 384 if len(self.applied) == 0:
385 385 # each of the patches merged in will have two parents. This
386 386 # can confuse the qrefresh, qdiff, and strip code because it
387 387 # needs to know which parent is actually in the patch queue.
388 388 # so, we insert a merge marker with only one parent. This way
389 389 # the first patch in the queue is never a merge patch
390 390 #
391 391 pname = ".hg.patches.merge.marker"
392 392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 393 self.removeundo(repo)
394 394 self.applied.append(statusentry(revlog.hex(n), pname))
395 395 self.applied_dirty = 1
396 396
397 397 head = self.qparents(repo)
398 398
399 399 for patch in series:
400 400 patch = mergeq.lookup(patch, strict=True)
401 401 if not patch:
402 402 self.ui.warn("patch %s does not exist\n" % patch)
403 403 return (1, None)
404 404 pushable, reason = self.pushable(patch)
405 405 if not pushable:
406 406 self.explain_pushable(patch, all_patches=True)
407 407 continue
408 408 info = mergeq.isapplied(patch)
409 409 if not info:
410 410 self.ui.warn("patch %s is not applied\n" % patch)
411 411 return (1, None)
412 412 rev = revlog.bin(info[1])
413 413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 414 if head:
415 415 self.applied.append(statusentry(revlog.hex(head), patch))
416 416 self.applied_dirty = 1
417 417 if err:
418 418 return (err, head)
419 419 self.save_dirty()
420 420 return (0, head)
421 421
422 422 def patch(self, repo, patchfile):
423 423 '''Apply patchfile to the working directory.
424 424 patchfile: file name of patch'''
425 425 files = {}
426 426 try:
427 427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 428 files=files)
429 429 except Exception, inst:
430 430 self.ui.note(str(inst) + '\n')
431 431 if not self.ui.verbose:
432 432 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 433 return (False, files, False)
434 434
435 435 return (True, files, fuzz)
436 436
437 437 def apply(self, repo, series, list=False, update_status=True,
438 438 strict=False, patchdir=None, merge=None, all_files={}):
439 439 wlock = lock = tr = None
440 440 try:
441 441 wlock = repo.wlock()
442 442 lock = repo.lock()
443 443 tr = repo.transaction()
444 444 try:
445 445 ret = self._apply(repo, series, list, update_status,
446 446 strict, patchdir, merge, all_files=all_files)
447 447 tr.close()
448 448 self.save_dirty()
449 449 return ret
450 450 except:
451 451 try:
452 452 tr.abort()
453 453 finally:
454 454 repo.invalidate()
455 455 repo.dirstate.invalidate()
456 456 raise
457 457 finally:
458 458 del tr, lock, wlock
459 459 self.removeundo(repo)
460 460
461 461 def _apply(self, repo, series, list=False, update_status=True,
462 462 strict=False, patchdir=None, merge=None, all_files={}):
463 463 # TODO unify with commands.py
464 464 if not patchdir:
465 465 patchdir = self.path
466 466 err = 0
467 467 n = None
468 468 for patchname in series:
469 469 pushable, reason = self.pushable(patchname)
470 470 if not pushable:
471 471 self.explain_pushable(patchname, all_patches=True)
472 472 continue
473 473 self.ui.warn("applying %s\n" % patchname)
474 474 pf = os.path.join(patchdir, patchname)
475 475
476 476 try:
477 477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 478 except:
479 479 self.ui.warn("Unable to read %s\n" % patchname)
480 480 err = 1
481 481 break
482 482
483 483 if not message:
484 484 message = "imported patch %s\n" % patchname
485 485 else:
486 486 if list:
487 487 message.append("\nimported patch %s" % patchname)
488 488 message = '\n'.join(message)
489 489
490 490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 491 all_files.update(files)
492 492 patcherr = not patcherr
493 493
494 494 if merge and files:
495 495 # Mark as removed/merged and update dirstate parent info
496 496 removed = []
497 497 merged = []
498 498 for f in files:
499 499 if os.path.exists(repo.wjoin(f)):
500 500 merged.append(f)
501 501 else:
502 502 removed.append(f)
503 503 for f in removed:
504 504 repo.dirstate.remove(f)
505 505 for f in merged:
506 506 repo.dirstate.merge(f)
507 507 p1, p2 = repo.dirstate.parents()
508 508 repo.dirstate.setparents(p1, merge)
509 509 files = patch.updatedir(self.ui, repo, files)
510 510 n = repo.commit(files, message, user, date, force=1)
511 511
512 512 if n == None:
513 513 raise util.Abort(_("repo commit failed"))
514 514
515 515 if update_status:
516 516 self.applied.append(statusentry(revlog.hex(n), patchname))
517 517
518 518 if patcherr:
519 519 if not patchfound:
520 520 self.ui.warn("patch %s is empty\n" % patchname)
521 521 err = 0
522 522 else:
523 523 self.ui.warn("patch failed, rejects left in working dir\n")
524 524 err = 1
525 525 break
526 526
527 527 if fuzz and strict:
528 528 self.ui.warn("fuzz found when applying patch, stopping\n")
529 529 err = 1
530 530 break
531 531 return (err, n)
532 532
533 533 def delete(self, repo, patches, opts):
534 534 if not patches and not opts.get('rev'):
535 535 raise util.Abort(_('qdelete requires at least one revision or '
536 536 'patch name'))
537 537
538 538 realpatches = []
539 539 for patch in patches:
540 540 patch = self.lookup(patch, strict=True)
541 541 info = self.isapplied(patch)
542 542 if info:
543 543 raise util.Abort(_("cannot delete applied patch %s") % patch)
544 544 if patch not in self.series:
545 545 raise util.Abort(_("patch %s not in series file") % patch)
546 546 realpatches.append(patch)
547 547
548 548 appliedbase = 0
549 549 if opts.get('rev'):
550 550 if not self.applied:
551 551 raise util.Abort(_('no patches applied'))
552 552 revs = cmdutil.revrange(repo, opts['rev'])
553 553 if len(revs) > 1 and revs[0] > revs[1]:
554 554 revs.reverse()
555 555 for rev in revs:
556 556 if appliedbase >= len(self.applied):
557 557 raise util.Abort(_("revision %d is not managed") % rev)
558 558
559 559 base = revlog.bin(self.applied[appliedbase].rev)
560 560 node = repo.changelog.node(rev)
561 561 if node != base:
562 562 raise util.Abort(_("cannot delete revision %d above "
563 563 "applied patches") % rev)
564 564 realpatches.append(self.applied[appliedbase].name)
565 565 appliedbase += 1
566 566
567 567 if not opts.get('keep'):
568 568 r = self.qrepo()
569 569 if r:
570 570 r.remove(realpatches, True)
571 571 else:
572 572 for p in realpatches:
573 573 os.unlink(self.join(p))
574 574
575 575 if appliedbase:
576 576 del self.applied[:appliedbase]
577 577 self.applied_dirty = 1
578 578 indices = [self.find_series(p) for p in realpatches]
579 579 indices.sort()
580 580 for i in indices[-1::-1]:
581 581 del self.full_series[i]
582 582 self.parse_series()
583 583 self.series_dirty = 1
584 584
585 585 def check_toppatch(self, repo):
586 586 if len(self.applied) > 0:
587 587 top = revlog.bin(self.applied[-1].rev)
588 588 pp = repo.dirstate.parents()
589 589 if top not in pp:
590 590 raise util.Abort(_("working directory revision is not qtip"))
591 591 return top
592 592 return None
593 593 def check_localchanges(self, repo, force=False, refresh=True):
594 594 m, a, r, d = repo.status()[:4]
595 595 if m or a or r or d:
596 596 if not force:
597 597 if refresh:
598 598 raise util.Abort(_("local changes found, refresh first"))
599 599 else:
600 600 raise util.Abort(_("local changes found"))
601 601 return m, a, r, d
602 602
603 _reserved = ('series', 'status', 'guards')
604 def check_reserved_name(self, name):
605 if (name in self._reserved or name.startswith('.hg')
606 or name.startswith('.mq')):
607 raise util.Abort(_('"%s" cannot be used as the name of a patch')
608 % name)
609
603 610 def new(self, repo, patch, *pats, **opts):
604 611 msg = opts.get('msg')
605 612 force = opts.get('force')
606 613 user = opts.get('user')
607 614 date = opts.get('date')
615 self.check_reserved_name(patch)
608 616 if os.path.exists(self.join(patch)):
609 617 raise util.Abort(_('patch "%s" already exists') % patch)
610 618 if opts.get('include') or opts.get('exclude') or pats:
611 619 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
612 620 m, a, r, d = repo.status(files=fns, match=match)[:4]
613 621 else:
614 622 m, a, r, d = self.check_localchanges(repo, force)
615 623 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
616 624 commitfiles = m + a + r
617 625 self.check_toppatch(repo)
618 626 wlock = repo.wlock()
619 627 try:
620 628 insert = self.full_series_end()
621 629 commitmsg = msg and msg or ("[mq]: %s" % patch)
622 630 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
623 631 if n == None:
624 632 raise util.Abort(_("repo commit failed"))
625 633 self.full_series[insert:insert] = [patch]
626 634 self.applied.append(statusentry(revlog.hex(n), patch))
627 635 self.parse_series()
628 636 self.series_dirty = 1
629 637 self.applied_dirty = 1
630 638 p = self.opener(patch, "w")
631 639 if date:
632 640 p.write("# HG changeset patch\n")
633 641 if user:
634 642 p.write("# User " + user + "\n")
635 643 p.write("# Date " + date + "\n")
636 644 p.write("\n")
637 645 elif user:
638 646 p.write("From: " + user + "\n")
639 647 p.write("\n")
640 648 if msg:
641 649 msg = msg + "\n"
642 650 p.write(msg)
643 651 p.close()
644 652 wlock = None
645 653 r = self.qrepo()
646 654 if r: r.add([patch])
647 655 if commitfiles:
648 656 self.refresh(repo, short=True, git=opts.get('git'))
649 657 self.removeundo(repo)
650 658 finally:
651 659 del wlock
652 660
653 661 def strip(self, repo, rev, update=True, backup="all"):
654 662 wlock = lock = None
655 663 try:
656 664 wlock = repo.wlock()
657 665 lock = repo.lock()
658 666
659 667 if update:
660 668 self.check_localchanges(repo, refresh=False)
661 669 urev = self.qparents(repo, rev)
662 670 hg.clean(repo, urev)
663 671 repo.dirstate.write()
664 672
665 673 self.removeundo(repo)
666 674 repair.strip(self.ui, repo, rev, backup)
667 675 # strip may have unbundled a set of backed up revisions after
668 676 # the actual strip
669 677 self.removeundo(repo)
670 678 finally:
671 679 del lock, wlock
672 680
673 681 def isapplied(self, patch):
674 682 """returns (index, rev, patch)"""
675 683 for i in xrange(len(self.applied)):
676 684 a = self.applied[i]
677 685 if a.name == patch:
678 686 return (i, a.rev, a.name)
679 687 return None
680 688
681 689 # if the exact patch name does not exist, we try a few
682 690 # variations. If strict is passed, we try only #1
683 691 #
684 692 # 1) a number to indicate an offset in the series file
685 693 # 2) a unique substring of the patch name was given
686 694 # 3) patchname[-+]num to indicate an offset in the series file
687 695 def lookup(self, patch, strict=False):
688 696 patch = patch and str(patch)
689 697
690 698 def partial_name(s):
691 699 if s in self.series:
692 700 return s
693 701 matches = [x for x in self.series if s in x]
694 702 if len(matches) > 1:
695 703 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
696 704 for m in matches:
697 705 self.ui.warn(' %s\n' % m)
698 706 return None
699 707 if matches:
700 708 return matches[0]
701 709 if len(self.series) > 0 and len(self.applied) > 0:
702 710 if s == 'qtip':
703 711 return self.series[self.series_end(True)-1]
704 712 if s == 'qbase':
705 713 return self.series[0]
706 714 return None
707 715 if patch == None:
708 716 return None
709 717
710 718 # we don't want to return a partial match until we make
711 719 # sure the file name passed in does not exist (checked below)
712 720 res = partial_name(patch)
713 721 if res and res == patch:
714 722 return res
715 723
716 724 if not os.path.isfile(self.join(patch)):
717 725 try:
718 726 sno = int(patch)
719 727 except(ValueError, OverflowError):
720 728 pass
721 729 else:
722 730 if sno < len(self.series):
723 731 return self.series[sno]
724 732 if not strict:
725 733 # return any partial match made above
726 734 if res:
727 735 return res
728 736 minus = patch.rfind('-')
729 737 if minus >= 0:
730 738 res = partial_name(patch[:minus])
731 739 if res:
732 740 i = self.series.index(res)
733 741 try:
734 742 off = int(patch[minus+1:] or 1)
735 743 except(ValueError, OverflowError):
736 744 pass
737 745 else:
738 746 if i - off >= 0:
739 747 return self.series[i - off]
740 748 plus = patch.rfind('+')
741 749 if plus >= 0:
742 750 res = partial_name(patch[:plus])
743 751 if res:
744 752 i = self.series.index(res)
745 753 try:
746 754 off = int(patch[plus+1:] or 1)
747 755 except(ValueError, OverflowError):
748 756 pass
749 757 else:
750 758 if i + off < len(self.series):
751 759 return self.series[i + off]
752 760 raise util.Abort(_("patch %s not in series") % patch)
753 761
754 762 def push(self, repo, patch=None, force=False, list=False,
755 763 mergeq=None):
756 764 wlock = repo.wlock()
757 765 try:
758 766 patch = self.lookup(patch)
759 767 # Suppose our series file is: A B C and the current 'top'
760 768 # patch is B. qpush C should be performed (moving forward)
761 769 # qpush B is a NOP (no change) qpush A is an error (can't
762 770 # go backwards with qpush)
763 771 if patch:
764 772 info = self.isapplied(patch)
765 773 if info:
766 774 if info[0] < len(self.applied) - 1:
767 775 raise util.Abort(
768 776 _("cannot push to a previous patch: %s") % patch)
769 777 if info[0] < len(self.series) - 1:
770 778 self.ui.warn(
771 779 _('qpush: %s is already at the top\n') % patch)
772 780 else:
773 781 self.ui.warn(_('all patches are currently applied\n'))
774 782 return
775 783
776 784 # Following the above example, starting at 'top' of B:
777 785 # qpush should be performed (pushes C), but a subsequent
778 786 # qpush without an argument is an error (nothing to
779 787 # apply). This allows a loop of "...while hg qpush..." to
780 788 # work as it detects an error when done
781 789 if self.series_end() == len(self.series):
782 790 self.ui.warn(_('patch series already fully applied\n'))
783 791 return 1
784 792 if not force:
785 793 self.check_localchanges(repo)
786 794
787 795 self.applied_dirty = 1;
788 796 start = self.series_end()
789 797 if start > 0:
790 798 self.check_toppatch(repo)
791 799 if not patch:
792 800 patch = self.series[start]
793 801 end = start + 1
794 802 else:
795 803 end = self.series.index(patch, start) + 1
796 804 s = self.series[start:end]
797 805 all_files = {}
798 806 try:
799 807 if mergeq:
800 808 ret = self.mergepatch(repo, mergeq, s)
801 809 else:
802 810 ret = self.apply(repo, s, list, all_files=all_files)
803 811 except:
804 812 self.ui.warn(_('cleaning up working directory...'))
805 813 node = repo.dirstate.parents()[0]
806 814 hg.revert(repo, node, None)
807 815 unknown = repo.status()[4]
808 816 # only remove unknown files that we know we touched or
809 817 # created while patching
810 818 for f in unknown:
811 819 if f in all_files:
812 820 util.unlink(repo.wjoin(f))
813 821 self.ui.warn(_('done\n'))
814 822 raise
815 823 top = self.applied[-1].name
816 824 if ret[0]:
817 825 self.ui.write(
818 826 "Errors during apply, please fix and refresh %s\n" % top)
819 827 else:
820 828 self.ui.write("Now at: %s\n" % top)
821 829 return ret[0]
822 830 finally:
823 831 del wlock
824 832
825 833 def pop(self, repo, patch=None, force=False, update=True, all=False):
826 834 def getfile(f, rev, flags):
827 835 t = repo.file(f).read(rev)
828 836 repo.wwrite(f, t, flags)
829 837
830 838 wlock = repo.wlock()
831 839 try:
832 840 if patch:
833 841 # index, rev, patch
834 842 info = self.isapplied(patch)
835 843 if not info:
836 844 patch = self.lookup(patch)
837 845 info = self.isapplied(patch)
838 846 if not info:
839 847 raise util.Abort(_("patch %s is not applied") % patch)
840 848
841 849 if len(self.applied) == 0:
842 850 # Allow qpop -a to work repeatedly,
843 851 # but not qpop without an argument
844 852 self.ui.warn(_("no patches applied\n"))
845 853 return not all
846 854
847 855 if not update:
848 856 parents = repo.dirstate.parents()
849 857 rr = [ revlog.bin(x.rev) for x in self.applied ]
850 858 for p in parents:
851 859 if p in rr:
852 860 self.ui.warn("qpop: forcing dirstate update\n")
853 861 update = True
854 862
855 863 if not force and update:
856 864 self.check_localchanges(repo)
857 865
858 866 self.applied_dirty = 1;
859 867 end = len(self.applied)
860 868 if not patch:
861 869 if all:
862 870 popi = 0
863 871 else:
864 872 popi = len(self.applied) - 1
865 873 else:
866 874 popi = info[0] + 1
867 875 if popi >= end:
868 876 self.ui.warn("qpop: %s is already at the top\n" % patch)
869 877 return
870 878 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
871 879
872 880 start = info[0]
873 881 rev = revlog.bin(info[1])
874 882
883 if update:
884 top = self.check_toppatch(repo)
885
886 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
887 raise util.Abort("popping would remove a revision not "
888 "managed by this patch queue")
889
875 890 # we know there are no local changes, so we can make a simplified
876 891 # form of hg.update.
877 892 if update:
878 top = self.check_toppatch(repo)
879 893 qp = self.qparents(repo, rev)
880 894 changes = repo.changelog.read(qp)
881 895 mmap = repo.manifest.read(changes[0])
882 896 m, a, r, d, u = repo.status(qp, top)[:5]
883 897 if d:
884 898 raise util.Abort("deletions found between repo revs")
885 899 for f in m:
886 900 getfile(f, mmap[f], mmap.flags(f))
887 901 for f in r:
888 902 getfile(f, mmap[f], mmap.flags(f))
889 903 for f in m + r:
890 904 repo.dirstate.normal(f)
891 905 for f in a:
892 906 try:
893 907 os.unlink(repo.wjoin(f))
894 908 except OSError, e:
895 909 if e.errno != errno.ENOENT:
896 910 raise
897 911 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
898 912 except: pass
899 913 repo.dirstate.forget(f)
900 914 repo.dirstate.setparents(qp, revlog.nullid)
915 del self.applied[start:end]
901 916 self.strip(repo, rev, update=False, backup='strip')
902 del self.applied[start:end]
903 917 if len(self.applied):
904 918 self.ui.write("Now at: %s\n" % self.applied[-1].name)
905 919 else:
906 920 self.ui.write("Patch queue now empty\n")
907 921 finally:
908 922 del wlock
909 923
910 924 def diff(self, repo, pats, opts):
911 925 top = self.check_toppatch(repo)
912 926 if not top:
913 927 self.ui.write("No patches applied\n")
914 928 return
915 929 qp = self.qparents(repo, top)
916 930 if opts.get('git'):
917 931 self.diffopts().git = True
918 932 self.printdiff(repo, qp, files=pats, opts=opts)
919 933
920 934 def refresh(self, repo, pats=None, **opts):
921 935 if len(self.applied) == 0:
922 936 self.ui.write("No patches applied\n")
923 937 return 1
924 938 wlock = repo.wlock()
925 939 try:
926 940 self.check_toppatch(repo)
927 941 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
928 942 top = revlog.bin(top)
943 if repo.changelog.heads(top) != [top]:
944 raise util.Abort("cannot refresh a revision with children")
929 945 cparents = repo.changelog.parents(top)
930 946 patchparent = self.qparents(repo, top)
931 947 message, comments, user, date, patchfound = self.readheaders(patchfn)
932 948
933 949 patchf = self.opener(patchfn, 'r+')
934 950
935 951 # if the patch was a git patch, refresh it as a git patch
936 952 for line in patchf:
937 953 if line.startswith('diff --git'):
938 954 self.diffopts().git = True
939 955 break
940 956
941 957 msg = opts.get('msg', '').rstrip()
942 958 if msg and comments:
943 959 # Remove existing message, keeping the rest of the comments
944 960 # fields.
945 961 # If comments contains 'subject: ', message will prepend
946 962 # the field and a blank line.
947 963 if message:
948 964 subj = 'subject: ' + message[0].lower()
949 965 for i in xrange(len(comments)):
950 966 if subj == comments[i].lower():
951 967 del comments[i]
952 968 message = message[2:]
953 969 break
954 970 ci = 0
955 971 for mi in xrange(len(message)):
956 972 while message[mi] != comments[ci]:
957 973 ci += 1
958 974 del comments[ci]
959 975
960 976 def setheaderfield(comments, prefixes, new):
961 977 # Update all references to a field in the patch header.
962 978 # If none found, add it email style.
963 979 res = False
964 980 for prefix in prefixes:
965 981 for i in xrange(len(comments)):
966 982 if comments[i].startswith(prefix):
967 983 comments[i] = prefix + new
968 984 res = True
969 985 break
970 986 return res
971 987
972 988 newuser = opts.get('user')
973 989 if newuser:
974 990 if not setheaderfield(comments, ['From: ', '# User '], newuser):
975 991 try:
976 992 patchheaderat = comments.index('# HG changeset patch')
977 993 comments.insert(patchheaderat + 1,'# User ' + newuser)
978 994 except ValueError:
979 995 comments = ['From: ' + newuser, ''] + comments
980 996 user = newuser
981 997
982 998 newdate = opts.get('date')
983 999 if newdate:
984 1000 if setheaderfield(comments, ['# Date '], newdate):
985 1001 date = newdate
986 1002
987 1003 if msg:
988 1004 comments.append(msg)
989 1005
990 1006 patchf.seek(0)
991 1007 patchf.truncate()
992 1008
993 1009 if comments:
994 1010 comments = "\n".join(comments) + '\n\n'
995 1011 patchf.write(comments)
996 1012
997 1013 if opts.get('git'):
998 1014 self.diffopts().git = True
999 1015 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1000 1016 tip = repo.changelog.tip()
1001 1017 if top == tip:
1002 1018 # if the top of our patch queue is also the tip, there is an
1003 1019 # optimization here. We update the dirstate in place and strip
1004 1020 # off the tip commit. Then just commit the current directory
1005 1021 # tree. We can also send repo.commit the list of files
1006 1022 # changed to speed up the diff
1007 1023 #
1008 1024 # in short mode, we only diff the files included in the
1009 1025 # patch already
1010 1026 #
1011 1027 # this should really read:
1012 1028 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1013 1029 # but we do it backwards to take advantage of manifest/chlog
1014 1030 # caching against the next repo.status call
1015 1031 #
1016 1032 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1017 1033 changes = repo.changelog.read(tip)
1018 1034 man = repo.manifest.read(changes[0])
1019 1035 aaa = aa[:]
1020 1036 if opts.get('short'):
1021 1037 filelist = mm + aa + dd
1022 1038 match = dict.fromkeys(filelist).__contains__
1023 1039 else:
1024 1040 filelist = None
1025 1041 match = util.always
1026 1042 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
1027 1043
1028 1044 # we might end up with files that were added between
1029 1045 # tip and the dirstate parent, but then changed in the
1030 1046 # local dirstate. in this case, we want them to only
1031 1047 # show up in the added section
1032 1048 for x in m:
1033 1049 if x not in aa:
1034 1050 mm.append(x)
1035 1051 # we might end up with files added by the local dirstate that
1036 1052 # were deleted by the patch. In this case, they should only
1037 1053 # show up in the changed section.
1038 1054 for x in a:
1039 1055 if x in dd:
1040 1056 del dd[dd.index(x)]
1041 1057 mm.append(x)
1042 1058 else:
1043 1059 aa.append(x)
1044 1060 # make sure any files deleted in the local dirstate
1045 1061 # are not in the add or change column of the patch
1046 1062 forget = []
1047 1063 for x in d + r:
1048 1064 if x in aa:
1049 1065 del aa[aa.index(x)]
1050 1066 forget.append(x)
1051 1067 continue
1052 1068 elif x in mm:
1053 1069 del mm[mm.index(x)]
1054 1070 dd.append(x)
1055 1071
1056 1072 m = util.unique(mm)
1057 1073 r = util.unique(dd)
1058 1074 a = util.unique(aa)
1059 1075 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1060 1076 filelist = util.unique(c[0] + c[1] + c[2])
1061 1077 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1062 1078 fp=patchf, changes=c, opts=self.diffopts())
1063 1079 patchf.close()
1064 1080
1065 1081 repo.dirstate.setparents(*cparents)
1066 1082 copies = {}
1067 1083 for dst in a:
1068 1084 src = repo.dirstate.copied(dst)
1069 1085 if src is not None:
1070 1086 copies.setdefault(src, []).append(dst)
1071 1087 repo.dirstate.add(dst)
1072 1088 # remember the copies between patchparent and tip
1073 1089 # this may be slow, so don't do it if we're not tracking copies
1074 1090 if self.diffopts().git:
1075 1091 for dst in aaa:
1076 1092 f = repo.file(dst)
1077 1093 src = f.renamed(man[dst])
1078 1094 if src:
1079 1095 copies[src[0]] = copies.get(dst, [])
1080 1096 if dst in a:
1081 1097 copies[src[0]].append(dst)
1082 1098 # we can't copy a file created by the patch itself
1083 1099 if dst in copies:
1084 1100 del copies[dst]
1085 1101 for src, dsts in copies.iteritems():
1086 1102 for dst in dsts:
1087 1103 repo.dirstate.copy(src, dst)
1088 1104 for f in r:
1089 1105 repo.dirstate.remove(f)
1090 1106 # if the patch excludes a modified file, mark that
1091 1107 # file with mtime=0 so status can see it.
1092 1108 mm = []
1093 1109 for i in xrange(len(m)-1, -1, -1):
1094 1110 if not matchfn(m[i]):
1095 1111 mm.append(m[i])
1096 1112 del m[i]
1097 1113 for f in m:
1098 1114 repo.dirstate.normal(f)
1099 1115 for f in mm:
1100 1116 repo.dirstate.normallookup(f)
1101 1117 for f in forget:
1102 1118 repo.dirstate.forget(f)
1103 1119
1104 1120 if not msg:
1105 1121 if not message:
1106 1122 message = "[mq]: %s\n" % patchfn
1107 1123 else:
1108 1124 message = "\n".join(message)
1109 1125 else:
1110 1126 message = msg
1111 1127
1112 1128 if not user:
1113 1129 user = changes[1]
1114 1130
1131 self.applied.pop()
1132 self.applied_dirty = 1
1115 1133 self.strip(repo, top, update=False,
1116 1134 backup='strip')
1117 1135 n = repo.commit(filelist, message, user, date, match=matchfn,
1118 1136 force=1)
1119 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1120 self.applied_dirty = 1
1137 self.applied.append(statusentry(revlog.hex(n), patchfn))
1121 1138 self.removeundo(repo)
1122 1139 else:
1123 1140 self.printdiff(repo, patchparent, fp=patchf)
1124 1141 patchf.close()
1125 1142 added = repo.status()[1]
1126 1143 for a in added:
1127 1144 f = repo.wjoin(a)
1128 1145 try:
1129 1146 os.unlink(f)
1130 1147 except OSError, e:
1131 1148 if e.errno != errno.ENOENT:
1132 1149 raise
1133 1150 try: os.removedirs(os.path.dirname(f))
1134 1151 except: pass
1135 1152 # forget the file copies in the dirstate
1136 1153 # push should readd the files later on
1137 1154 repo.dirstate.forget(a)
1138 1155 self.pop(repo, force=True)
1139 1156 self.push(repo, force=True)
1140 1157 finally:
1141 1158 del wlock
1142 1159
1143 1160 def init(self, repo, create=False):
1144 1161 if not create and os.path.isdir(self.path):
1145 1162 raise util.Abort(_("patch queue directory already exists"))
1146 1163 try:
1147 1164 os.mkdir(self.path)
1148 1165 except OSError, inst:
1149 1166 if inst.errno != errno.EEXIST or not create:
1150 1167 raise
1151 1168 if create:
1152 1169 return self.qrepo(create=True)
1153 1170
1154 1171 def unapplied(self, repo, patch=None):
1155 1172 if patch and patch not in self.series:
1156 1173 raise util.Abort(_("patch %s is not in series file") % patch)
1157 1174 if not patch:
1158 1175 start = self.series_end()
1159 1176 else:
1160 1177 start = self.series.index(patch) + 1
1161 1178 unapplied = []
1162 1179 for i in xrange(start, len(self.series)):
1163 1180 pushable, reason = self.pushable(i)
1164 1181 if pushable:
1165 1182 unapplied.append((i, self.series[i]))
1166 1183 self.explain_pushable(i)
1167 1184 return unapplied
1168 1185
1169 1186 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1170 1187 summary=False):
1171 1188 def displayname(patchname):
1172 1189 if summary:
1173 1190 msg = self.readheaders(patchname)[0]
1174 1191 msg = msg and ': ' + msg[0] or ': '
1175 1192 else:
1176 1193 msg = ''
1177 1194 return '%s%s' % (patchname, msg)
1178 1195
1179 1196 applied = dict.fromkeys([p.name for p in self.applied])
1180 1197 if length is None:
1181 1198 length = len(self.series) - start
1182 1199 if not missing:
1183 1200 for i in xrange(start, start+length):
1184 1201 patch = self.series[i]
1185 1202 if patch in applied:
1186 1203 stat = 'A'
1187 1204 elif self.pushable(i)[0]:
1188 1205 stat = 'U'
1189 1206 else:
1190 1207 stat = 'G'
1191 1208 pfx = ''
1192 1209 if self.ui.verbose:
1193 1210 pfx = '%d %s ' % (i, stat)
1194 1211 elif status and status != stat:
1195 1212 continue
1196 1213 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1197 1214 else:
1198 1215 msng_list = []
1199 1216 for root, dirs, files in os.walk(self.path):
1200 1217 d = root[len(self.path) + 1:]
1201 1218 for f in files:
1202 1219 fl = os.path.join(d, f)
1203 1220 if (fl not in self.series and
1204 1221 fl not in (self.status_path, self.series_path,
1205 1222 self.guards_path)
1206 1223 and not fl.startswith('.')):
1207 1224 msng_list.append(fl)
1208 1225 msng_list.sort()
1209 1226 for x in msng_list:
1210 1227 pfx = self.ui.verbose and ('D ') or ''
1211 1228 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1212 1229
1213 1230 def issaveline(self, l):
1214 1231 if l.name == '.hg.patches.save.line':
1215 1232 return True
1216 1233
1217 1234 def qrepo(self, create=False):
1218 1235 if create or os.path.isdir(self.join(".hg")):
1219 1236 return hg.repository(self.ui, path=self.path, create=create)
1220 1237
1221 1238 def restore(self, repo, rev, delete=None, qupdate=None):
1222 1239 c = repo.changelog.read(rev)
1223 1240 desc = c[4].strip()
1224 1241 lines = desc.splitlines()
1225 1242 i = 0
1226 1243 datastart = None
1227 1244 series = []
1228 1245 applied = []
1229 1246 qpp = None
1230 1247 for i in xrange(0, len(lines)):
1231 1248 if lines[i] == 'Patch Data:':
1232 1249 datastart = i + 1
1233 1250 elif lines[i].startswith('Dirstate:'):
1234 1251 l = lines[i].rstrip()
1235 1252 l = l[10:].split(' ')
1236 1253 qpp = [ hg.bin(x) for x in l ]
1237 1254 elif datastart != None:
1238 1255 l = lines[i].rstrip()
1239 1256 se = statusentry(l)
1240 1257 file_ = se.name
1241 1258 if se.rev:
1242 1259 applied.append(se)
1243 1260 else:
1244 1261 series.append(file_)
1245 1262 if datastart == None:
1246 1263 self.ui.warn("No saved patch data found\n")
1247 1264 return 1
1248 1265 self.ui.warn("restoring status: %s\n" % lines[0])
1249 1266 self.full_series = series
1250 1267 self.applied = applied
1251 1268 self.parse_series()
1252 1269 self.series_dirty = 1
1253 1270 self.applied_dirty = 1
1254 1271 heads = repo.changelog.heads()
1255 1272 if delete:
1256 1273 if rev not in heads:
1257 1274 self.ui.warn("save entry has children, leaving it alone\n")
1258 1275 else:
1259 1276 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1260 1277 pp = repo.dirstate.parents()
1261 1278 if rev in pp:
1262 1279 update = True
1263 1280 else:
1264 1281 update = False
1265 1282 self.strip(repo, rev, update=update, backup='strip')
1266 1283 if qpp:
1267 1284 self.ui.warn("saved queue repository parents: %s %s\n" %
1268 1285 (hg.short(qpp[0]), hg.short(qpp[1])))
1269 1286 if qupdate:
1270 1287 self.ui.status(_("queue directory updating\n"))
1271 1288 r = self.qrepo()
1272 1289 if not r:
1273 1290 self.ui.warn("Unable to load queue repository\n")
1274 1291 return 1
1275 1292 hg.clean(r, qpp[0])
1276 1293
1277 1294 def save(self, repo, msg=None):
1278 1295 if len(self.applied) == 0:
1279 1296 self.ui.warn("save: no patches applied, exiting\n")
1280 1297 return 1
1281 1298 if self.issaveline(self.applied[-1]):
1282 1299 self.ui.warn("status is already saved\n")
1283 1300 return 1
1284 1301
1285 1302 ar = [ ':' + x for x in self.full_series ]
1286 1303 if not msg:
1287 1304 msg = "hg patches saved state"
1288 1305 else:
1289 1306 msg = "hg patches: " + msg.rstrip('\r\n')
1290 1307 r = self.qrepo()
1291 1308 if r:
1292 1309 pp = r.dirstate.parents()
1293 1310 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1294 1311 msg += "\n\nPatch Data:\n"
1295 1312 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1296 1313 "\n".join(ar) + '\n' or "")
1297 1314 n = repo.commit(None, text, user=None, force=1)
1298 1315 if not n:
1299 1316 self.ui.warn("repo commit failed\n")
1300 1317 return 1
1301 1318 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1302 1319 self.applied_dirty = 1
1303 1320 self.removeundo(repo)
1304 1321
1305 1322 def full_series_end(self):
1306 1323 if len(self.applied) > 0:
1307 1324 p = self.applied[-1].name
1308 1325 end = self.find_series(p)
1309 1326 if end == None:
1310 1327 return len(self.full_series)
1311 1328 return end + 1
1312 1329 return 0
1313 1330
1314 1331 def series_end(self, all_patches=False):
1315 1332 """If all_patches is False, return the index of the next pushable patch
1316 1333 in the series, or the series length. If all_patches is True, return the
1317 1334 index of the first patch past the last applied one.
1318 1335 """
1319 1336 end = 0
1320 1337 def next(start):
1321 1338 if all_patches:
1322 1339 return start
1323 1340 i = start
1324 1341 while i < len(self.series):
1325 1342 p, reason = self.pushable(i)
1326 1343 if p:
1327 1344 break
1328 1345 self.explain_pushable(i)
1329 1346 i += 1
1330 1347 return i
1331 1348 if len(self.applied) > 0:
1332 1349 p = self.applied[-1].name
1333 1350 try:
1334 1351 end = self.series.index(p)
1335 1352 except ValueError:
1336 1353 return 0
1337 1354 return next(end + 1)
1338 1355 return next(end)
1339 1356
1340 1357 def appliedname(self, index):
1341 1358 pname = self.applied[index].name
1342 1359 if not self.ui.verbose:
1343 1360 p = pname
1344 1361 else:
1345 1362 p = str(self.series.index(pname)) + " " + pname
1346 1363 return p
1347 1364
1348 1365 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1349 1366 force=None, git=False):
1350 1367 def checkseries(patchname):
1351 1368 if patchname in self.series:
1352 1369 raise util.Abort(_('patch %s is already in the series file')
1353 1370 % patchname)
1354 1371 def checkfile(patchname):
1355 1372 if not force and os.path.exists(self.join(patchname)):
1356 1373 raise util.Abort(_('patch "%s" already exists')
1357 1374 % patchname)
1358 1375
1359 1376 if rev:
1360 1377 if files:
1361 1378 raise util.Abort(_('option "-r" not valid when importing '
1362 1379 'files'))
1363 1380 rev = cmdutil.revrange(repo, rev)
1364 1381 rev.sort(lambda x, y: cmp(y, x))
1365 1382 if (len(files) > 1 or len(rev) > 1) and patchname:
1366 1383 raise util.Abort(_('option "-n" not valid when importing multiple '
1367 1384 'patches'))
1368 1385 i = 0
1369 1386 added = []
1370 1387 if rev:
1371 1388 # If mq patches are applied, we can only import revisions
1372 1389 # that form a linear path to qbase.
1373 1390 # Otherwise, they should form a linear path to a head.
1374 1391 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1375 1392 if len(heads) > 1:
1376 1393 raise util.Abort(_('revision %d is the root of more than one '
1377 1394 'branch') % rev[-1])
1378 1395 if self.applied:
1379 1396 base = revlog.hex(repo.changelog.node(rev[0]))
1380 1397 if base in [n.rev for n in self.applied]:
1381 1398 raise util.Abort(_('revision %d is already managed')
1382 1399 % rev[0])
1383 1400 if heads != [revlog.bin(self.applied[-1].rev)]:
1384 1401 raise util.Abort(_('revision %d is not the parent of '
1385 1402 'the queue') % rev[0])
1386 1403 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1387 1404 lastparent = repo.changelog.parentrevs(base)[0]
1388 1405 else:
1389 1406 if heads != [repo.changelog.node(rev[0])]:
1390 1407 raise util.Abort(_('revision %d has unmanaged children')
1391 1408 % rev[0])
1392 1409 lastparent = None
1393 1410
1394 1411 if git:
1395 1412 self.diffopts().git = True
1396 1413
1397 1414 for r in rev:
1398 1415 p1, p2 = repo.changelog.parentrevs(r)
1399 1416 n = repo.changelog.node(r)
1400 1417 if p2 != revlog.nullrev:
1401 1418 raise util.Abort(_('cannot import merge revision %d') % r)
1402 1419 if lastparent and lastparent != r:
1403 1420 raise util.Abort(_('revision %d is not the parent of %d')
1404 1421 % (r, lastparent))
1405 1422 lastparent = p1
1406 1423
1407 1424 if not patchname:
1408 1425 patchname = normname('%d.diff' % r)
1426 self.check_reserved_name(patchname)
1409 1427 checkseries(patchname)
1410 1428 checkfile(patchname)
1411 1429 self.full_series.insert(0, patchname)
1412 1430
1413 1431 patchf = self.opener(patchname, "w")
1414 1432 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1415 1433 patchf.close()
1416 1434
1417 1435 se = statusentry(revlog.hex(n), patchname)
1418 1436 self.applied.insert(0, se)
1419 1437
1420 1438 added.append(patchname)
1421 1439 patchname = None
1422 1440 self.parse_series()
1423 1441 self.applied_dirty = 1
1424 1442
1425 1443 for filename in files:
1426 1444 if existing:
1427 1445 if filename == '-':
1428 1446 raise util.Abort(_('-e is incompatible with import from -'))
1429 1447 if not patchname:
1430 1448 patchname = normname(filename)
1449 self.check_reserved_name(patchname)
1431 1450 if not os.path.isfile(self.join(patchname)):
1432 1451 raise util.Abort(_("patch %s does not exist") % patchname)
1433 1452 else:
1434 1453 try:
1435 1454 if filename == '-':
1436 1455 if not patchname:
1437 1456 raise util.Abort(_('need --name to import a patch from -'))
1438 1457 text = sys.stdin.read()
1439 1458 else:
1440 1459 text = file(filename).read()
1441 1460 except IOError:
1442 1461 raise util.Abort(_("unable to read %s") % patchname)
1443 1462 if not patchname:
1444 1463 patchname = normname(os.path.basename(filename))
1464 self.check_reserved_name(patchname)
1445 1465 checkfile(patchname)
1446 1466 patchf = self.opener(patchname, "w")
1447 1467 patchf.write(text)
1448 1468 checkseries(patchname)
1449 1469 index = self.full_series_end() + i
1450 1470 self.full_series[index:index] = [patchname]
1451 1471 self.parse_series()
1452 1472 self.ui.warn("adding %s to series file\n" % patchname)
1453 1473 i += 1
1454 1474 added.append(patchname)
1455 1475 patchname = None
1456 1476 self.series_dirty = 1
1457 1477 qrepo = self.qrepo()
1458 1478 if qrepo:
1459 1479 qrepo.add(added)
1460 1480
1461 1481 def delete(ui, repo, *patches, **opts):
1462 1482 """remove patches from queue
1463 1483
1464 1484 The patches must not be applied, unless they are arguments to
1465 1485 the --rev parameter. At least one patch or revision is required.
1466 1486
1467 1487 With --rev, mq will stop managing the named revisions (converting
1468 1488 them to regular mercurial changesets). The patches must be applied
1469 1489 and at the base of the stack. This option is useful when the patches
1470 1490 have been applied upstream.
1471 1491
1472 1492 With --keep, the patch files are preserved in the patch directory."""
1473 1493 q = repo.mq
1474 1494 q.delete(repo, patches, opts)
1475 1495 q.save_dirty()
1476 1496 return 0
1477 1497
1478 1498 def applied(ui, repo, patch=None, **opts):
1479 1499 """print the patches already applied"""
1480 1500 q = repo.mq
1481 1501 if patch:
1482 1502 if patch not in q.series:
1483 1503 raise util.Abort(_("patch %s is not in series file") % patch)
1484 1504 end = q.series.index(patch) + 1
1485 1505 else:
1486 1506 end = q.series_end(True)
1487 1507 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1488 1508
1489 1509 def unapplied(ui, repo, patch=None, **opts):
1490 1510 """print the patches not yet applied"""
1491 1511 q = repo.mq
1492 1512 if patch:
1493 1513 if patch not in q.series:
1494 1514 raise util.Abort(_("patch %s is not in series file") % patch)
1495 1515 start = q.series.index(patch) + 1
1496 1516 else:
1497 1517 start = q.series_end(True)
1498 1518 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1499 1519
1500 1520 def qimport(ui, repo, *filename, **opts):
1501 1521 """import a patch
1502 1522
1503 1523 The patch will have the same name as its source file unless you
1504 1524 give it a new one with --name.
1505 1525
1506 1526 You can register an existing patch inside the patch directory
1507 1527 with the --existing flag.
1508 1528
1509 1529 With --force, an existing patch of the same name will be overwritten.
1510 1530
1511 1531 An existing changeset may be placed under mq control with --rev
1512 1532 (e.g. qimport --rev tip -n patch will place tip under mq control).
1513 1533 With --git, patches imported with --rev will use the git diff
1514 1534 format.
1515 1535 """
1516 1536 q = repo.mq
1517 1537 q.qimport(repo, filename, patchname=opts['name'],
1518 1538 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1519 1539 git=opts['git'])
1520 1540 q.save_dirty()
1521 1541 return 0
1522 1542
1523 1543 def init(ui, repo, **opts):
1524 1544 """init a new queue repository
1525 1545
1526 1546 The queue repository is unversioned by default. If -c is
1527 1547 specified, qinit will create a separate nested repository
1528 1548 for patches (qinit -c may also be run later to convert
1529 1549 an unversioned patch repository into a versioned one).
1530 1550 You can use qcommit to commit changes to this queue repository."""
1531 1551 q = repo.mq
1532 1552 r = q.init(repo, create=opts['create_repo'])
1533 1553 q.save_dirty()
1534 1554 if r:
1535 1555 if not os.path.exists(r.wjoin('.hgignore')):
1536 1556 fp = r.wopener('.hgignore', 'w')
1537 1557 fp.write('syntax: glob\n')
1538 1558 fp.write('status\n')
1539 1559 fp.write('guards\n')
1540 1560 fp.close()
1541 1561 if not os.path.exists(r.wjoin('series')):
1542 1562 r.wopener('series', 'w').close()
1543 1563 r.add(['.hgignore', 'series'])
1544 1564 commands.add(ui, r)
1545 1565 return 0
1546 1566
1547 1567 def clone(ui, source, dest=None, **opts):
1548 1568 '''clone main and patch repository at same time
1549 1569
1550 1570 If source is local, destination will have no patches applied. If
1551 1571 source is remote, this command can not check if patches are
1552 1572 applied in source, so cannot guarantee that patches are not
1553 1573 applied in destination. If you clone remote repository, be sure
1554 1574 before that it has no patches applied.
1555 1575
1556 1576 Source patch repository is looked for in <src>/.hg/patches by
1557 1577 default. Use -p <url> to change.
1558 1578
1559 1579 The patch directory must be a nested mercurial repository, as
1560 1580 would be created by qinit -c.
1561 1581 '''
1562 1582 def patchdir(repo):
1563 1583 url = repo.url()
1564 1584 if url.endswith('/'):
1565 1585 url = url[:-1]
1566 1586 return url + '/.hg/patches'
1567 1587 cmdutil.setremoteconfig(ui, opts)
1568 1588 if dest is None:
1569 1589 dest = hg.defaultdest(source)
1570 1590 sr = hg.repository(ui, ui.expandpath(source))
1571 1591 patchespath = opts['patches'] or patchdir(sr)
1572 1592 try:
1573 1593 pr = hg.repository(ui, patchespath)
1574 1594 except hg.RepoError:
1575 1595 raise util.Abort(_('versioned patch repository not found'
1576 1596 ' (see qinit -c)'))
1577 1597 qbase, destrev = None, None
1578 1598 if sr.local():
1579 1599 if sr.mq.applied:
1580 1600 qbase = revlog.bin(sr.mq.applied[0].rev)
1581 1601 if not hg.islocal(dest):
1582 1602 heads = dict.fromkeys(sr.heads())
1583 1603 for h in sr.heads(qbase):
1584 1604 del heads[h]
1585 1605 destrev = heads.keys()
1586 1606 destrev.append(sr.changelog.parents(qbase)[0])
1587 1607 ui.note(_('cloning main repo\n'))
1588 1608 sr, dr = hg.clone(ui, sr.url(), dest,
1589 1609 pull=opts['pull'],
1590 1610 rev=destrev,
1591 1611 update=False,
1592 1612 stream=opts['uncompressed'])
1593 1613 ui.note(_('cloning patch repo\n'))
1594 1614 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1595 1615 pull=opts['pull'], update=not opts['noupdate'],
1596 1616 stream=opts['uncompressed'])
1597 1617 if dr.local():
1598 1618 if qbase:
1599 1619 ui.note(_('stripping applied patches from destination repo\n'))
1600 1620 dr.mq.strip(dr, qbase, update=False, backup=None)
1601 1621 if not opts['noupdate']:
1602 1622 ui.note(_('updating destination repo\n'))
1603 1623 hg.update(dr, dr.changelog.tip())
1604 1624
1605 1625 def commit(ui, repo, *pats, **opts):
1606 1626 """commit changes in the queue repository"""
1607 1627 q = repo.mq
1608 1628 r = q.qrepo()
1609 1629 if not r: raise util.Abort('no queue repository')
1610 1630 commands.commit(r.ui, r, *pats, **opts)
1611 1631
1612 1632 def series(ui, repo, **opts):
1613 1633 """print the entire series file"""
1614 1634 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1615 1635 return 0
1616 1636
1617 1637 def top(ui, repo, **opts):
1618 1638 """print the name of the current patch"""
1619 1639 q = repo.mq
1620 1640 t = q.applied and q.series_end(True) or 0
1621 1641 if t:
1622 1642 return q.qseries(repo, start=t-1, length=1, status='A',
1623 1643 summary=opts.get('summary'))
1624 1644 else:
1625 1645 ui.write("No patches applied\n")
1626 1646 return 1
1627 1647
1628 1648 def next(ui, repo, **opts):
1629 1649 """print the name of the next patch"""
1630 1650 q = repo.mq
1631 1651 end = q.series_end()
1632 1652 if end == len(q.series):
1633 1653 ui.write("All patches applied\n")
1634 1654 return 1
1635 1655 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1636 1656
1637 1657 def prev(ui, repo, **opts):
1638 1658 """print the name of the previous patch"""
1639 1659 q = repo.mq
1640 1660 l = len(q.applied)
1641 1661 if l == 1:
1642 1662 ui.write("Only one patch applied\n")
1643 1663 return 1
1644 1664 if not l:
1645 1665 ui.write("No patches applied\n")
1646 1666 return 1
1647 1667 return q.qseries(repo, start=l-2, length=1, status='A',
1648 1668 summary=opts.get('summary'))
1649 1669
1650 1670 def setupheaderopts(ui, opts):
1651 1671 def do(opt,val):
1652 1672 if not opts[opt] and opts['current' + opt]:
1653 1673 opts[opt] = val
1654 1674 do('user', ui.username())
1655 1675 do('date', "%d %d" % util.makedate())
1656 1676
1657 1677 def new(ui, repo, patch, *args, **opts):
1658 1678 """create a new patch
1659 1679
1660 1680 qnew creates a new patch on top of the currently-applied patch
1661 1681 (if any). It will refuse to run if there are any outstanding
1662 1682 changes unless -f is specified, in which case the patch will
1663 1683 be initialised with them. You may also use -I, -X, and/or a list of
1664 1684 files after the patch name to add only changes to matching files
1665 1685 to the new patch, leaving the rest as uncommitted modifications.
1666 1686
1667 1687 -e, -m or -l set the patch header as well as the commit message.
1668 1688 If none is specified, the patch header is empty and the
1669 1689 commit message is '[mq]: PATCH'"""
1670 1690 q = repo.mq
1671 1691 message = cmdutil.logmessage(opts)
1672 1692 if opts['edit']:
1673 1693 message = ui.edit(message, ui.username())
1674 1694 opts['msg'] = message
1675 1695 setupheaderopts(ui, opts)
1676 1696 q.new(repo, patch, *args, **opts)
1677 1697 q.save_dirty()
1678 1698 return 0
1679 1699
1680 1700 def refresh(ui, repo, *pats, **opts):
1681 1701 """update the current patch
1682 1702
1683 1703 If any file patterns are provided, the refreshed patch will contain only
1684 1704 the modifications that match those patterns; the remaining modifications
1685 1705 will remain in the working directory.
1686 1706
1687 1707 hg add/remove/copy/rename work as usual, though you might want to use
1688 1708 git-style patches (--git or [diff] git=1) to track copies and renames.
1689 1709 """
1690 1710 q = repo.mq
1691 1711 message = cmdutil.logmessage(opts)
1692 1712 if opts['edit']:
1693 1713 if not q.applied:
1694 1714 ui.write(_("No patches applied\n"))
1695 1715 return 1
1696 1716 if message:
1697 1717 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1698 1718 patch = q.applied[-1].name
1699 1719 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1700 1720 message = ui.edit('\n'.join(message), user or ui.username())
1701 1721 setupheaderopts(ui, opts)
1702 1722 ret = q.refresh(repo, pats, msg=message, **opts)
1703 1723 q.save_dirty()
1704 1724 return ret
1705 1725
1706 1726 def diff(ui, repo, *pats, **opts):
1707 1727 """diff of the current patch"""
1708 1728 repo.mq.diff(repo, pats, opts)
1709 1729 return 0
1710 1730
1711 1731 def fold(ui, repo, *files, **opts):
1712 1732 """fold the named patches into the current patch
1713 1733
1714 1734 Patches must not yet be applied. Each patch will be successively
1715 1735 applied to the current patch in the order given. If all the
1716 1736 patches apply successfully, the current patch will be refreshed
1717 1737 with the new cumulative patch, and the folded patches will
1718 1738 be deleted. With -k/--keep, the folded patch files will not
1719 1739 be removed afterwards.
1720 1740
1721 1741 The header for each folded patch will be concatenated with
1722 1742 the current patch header, separated by a line of '* * *'."""
1723 1743
1724 1744 q = repo.mq
1725 1745
1726 1746 if not files:
1727 1747 raise util.Abort(_('qfold requires at least one patch name'))
1728 1748 if not q.check_toppatch(repo):
1729 1749 raise util.Abort(_('No patches applied'))
1730 1750
1731 1751 message = cmdutil.logmessage(opts)
1732 1752 if opts['edit']:
1733 1753 if message:
1734 1754 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1735 1755
1736 1756 parent = q.lookup('qtip')
1737 1757 patches = []
1738 1758 messages = []
1739 1759 for f in files:
1740 1760 p = q.lookup(f)
1741 1761 if p in patches or p == parent:
1742 1762 ui.warn(_('Skipping already folded patch %s') % p)
1743 1763 if q.isapplied(p):
1744 1764 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1745 1765 patches.append(p)
1746 1766
1747 1767 for p in patches:
1748 1768 if not message:
1749 1769 messages.append(q.readheaders(p)[0])
1750 1770 pf = q.join(p)
1751 1771 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1752 1772 if not patchsuccess:
1753 1773 raise util.Abort(_('Error folding patch %s') % p)
1754 1774 patch.updatedir(ui, repo, files)
1755 1775
1756 1776 if not message:
1757 1777 message, comments, user = q.readheaders(parent)[0:3]
1758 1778 for msg in messages:
1759 1779 message.append('* * *')
1760 1780 message.extend(msg)
1761 1781 message = '\n'.join(message)
1762 1782
1763 1783 if opts['edit']:
1764 1784 message = ui.edit(message, user or ui.username())
1765 1785
1766 1786 q.refresh(repo, msg=message)
1767 1787 q.delete(repo, patches, opts)
1768 1788 q.save_dirty()
1769 1789
1770 1790 def goto(ui, repo, patch, **opts):
1771 1791 '''push or pop patches until named patch is at top of stack'''
1772 1792 q = repo.mq
1773 1793 patch = q.lookup(patch)
1774 1794 if q.isapplied(patch):
1775 1795 ret = q.pop(repo, patch, force=opts['force'])
1776 1796 else:
1777 1797 ret = q.push(repo, patch, force=opts['force'])
1778 1798 q.save_dirty()
1779 1799 return ret
1780 1800
1781 1801 def guard(ui, repo, *args, **opts):
1782 1802 '''set or print guards for a patch
1783 1803
1784 1804 Guards control whether a patch can be pushed. A patch with no
1785 1805 guards is always pushed. A patch with a positive guard ("+foo") is
1786 1806 pushed only if the qselect command has activated it. A patch with
1787 1807 a negative guard ("-foo") is never pushed if the qselect command
1788 1808 has activated it.
1789 1809
1790 1810 With no arguments, print the currently active guards.
1791 1811 With arguments, set guards for the named patch.
1792 1812
1793 1813 To set a negative guard "-foo" on topmost patch ("--" is needed so
1794 1814 hg will not interpret "-foo" as an option):
1795 1815 hg qguard -- -foo
1796 1816
1797 1817 To set guards on another patch:
1798 1818 hg qguard other.patch +2.6.17 -stable
1799 1819 '''
1800 1820 def status(idx):
1801 1821 guards = q.series_guards[idx] or ['unguarded']
1802 1822 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1803 1823 q = repo.mq
1804 1824 patch = None
1805 1825 args = list(args)
1806 1826 if opts['list']:
1807 1827 if args or opts['none']:
1808 1828 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1809 1829 for i in xrange(len(q.series)):
1810 1830 status(i)
1811 1831 return
1812 1832 if not args or args[0][0:1] in '-+':
1813 1833 if not q.applied:
1814 1834 raise util.Abort(_('no patches applied'))
1815 1835 patch = q.applied[-1].name
1816 1836 if patch is None and args[0][0:1] not in '-+':
1817 1837 patch = args.pop(0)
1818 1838 if patch is None:
1819 1839 raise util.Abort(_('no patch to work with'))
1820 1840 if args or opts['none']:
1821 1841 idx = q.find_series(patch)
1822 1842 if idx is None:
1823 1843 raise util.Abort(_('no patch named %s') % patch)
1824 1844 q.set_guards(idx, args)
1825 1845 q.save_dirty()
1826 1846 else:
1827 1847 status(q.series.index(q.lookup(patch)))
1828 1848
1829 1849 def header(ui, repo, patch=None):
1830 1850 """Print the header of the topmost or specified patch"""
1831 1851 q = repo.mq
1832 1852
1833 1853 if patch:
1834 1854 patch = q.lookup(patch)
1835 1855 else:
1836 1856 if not q.applied:
1837 1857 ui.write('No patches applied\n')
1838 1858 return 1
1839 1859 patch = q.lookup('qtip')
1840 1860 message = repo.mq.readheaders(patch)[0]
1841 1861
1842 1862 ui.write('\n'.join(message) + '\n')
1843 1863
1844 1864 def lastsavename(path):
1845 1865 (directory, base) = os.path.split(path)
1846 1866 names = os.listdir(directory)
1847 1867 namere = re.compile("%s.([0-9]+)" % base)
1848 1868 maxindex = None
1849 1869 maxname = None
1850 1870 for f in names:
1851 1871 m = namere.match(f)
1852 1872 if m:
1853 1873 index = int(m.group(1))
1854 1874 if maxindex == None or index > maxindex:
1855 1875 maxindex = index
1856 1876 maxname = f
1857 1877 if maxname:
1858 1878 return (os.path.join(directory, maxname), maxindex)
1859 1879 return (None, None)
1860 1880
1861 1881 def savename(path):
1862 1882 (last, index) = lastsavename(path)
1863 1883 if last is None:
1864 1884 index = 0
1865 1885 newpath = path + ".%d" % (index + 1)
1866 1886 return newpath
1867 1887
1868 1888 def push(ui, repo, patch=None, **opts):
1869 1889 """push the next patch onto the stack"""
1870 1890 q = repo.mq
1871 1891 mergeq = None
1872 1892
1873 1893 if opts['all']:
1874 1894 if not q.series:
1875 1895 ui.warn(_('no patches in series\n'))
1876 1896 return 0
1877 1897 patch = q.series[-1]
1878 1898 if opts['merge']:
1879 1899 if opts['name']:
1880 1900 newpath = opts['name']
1881 1901 else:
1882 1902 newpath, i = lastsavename(q.path)
1883 1903 if not newpath:
1884 1904 ui.warn("no saved queues found, please use -n\n")
1885 1905 return 1
1886 1906 mergeq = queue(ui, repo.join(""), newpath)
1887 1907 ui.warn("merging with queue at: %s\n" % mergeq.path)
1888 1908 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1889 1909 mergeq=mergeq)
1890 1910 return ret
1891 1911
1892 1912 def pop(ui, repo, patch=None, **opts):
1893 1913 """pop the current patch off the stack"""
1894 1914 localupdate = True
1895 1915 if opts['name']:
1896 1916 q = queue(ui, repo.join(""), repo.join(opts['name']))
1897 1917 ui.warn('using patch queue: %s\n' % q.path)
1898 1918 localupdate = False
1899 1919 else:
1900 1920 q = repo.mq
1901 1921 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1902 1922 all=opts['all'])
1903 1923 q.save_dirty()
1904 1924 return ret
1905 1925
1906 1926 def rename(ui, repo, patch, name=None, **opts):
1907 1927 """rename a patch
1908 1928
1909 1929 With one argument, renames the current patch to PATCH1.
1910 1930 With two arguments, renames PATCH1 to PATCH2."""
1911 1931
1912 1932 q = repo.mq
1913 1933
1914 1934 if not name:
1915 1935 name = patch
1916 1936 patch = None
1917 1937
1918 1938 if patch:
1919 1939 patch = q.lookup(patch)
1920 1940 else:
1921 1941 if not q.applied:
1922 1942 ui.write(_('No patches applied\n'))
1923 1943 return
1924 1944 patch = q.lookup('qtip')
1925 1945 absdest = q.join(name)
1926 1946 if os.path.isdir(absdest):
1927 1947 name = normname(os.path.join(name, os.path.basename(patch)))
1928 1948 absdest = q.join(name)
1929 1949 if os.path.exists(absdest):
1930 1950 raise util.Abort(_('%s already exists') % absdest)
1931 1951
1932 1952 if name in q.series:
1933 1953 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1934 1954
1935 1955 if ui.verbose:
1936 1956 ui.write('Renaming %s to %s\n' % (patch, name))
1937 1957 i = q.find_series(patch)
1938 1958 guards = q.guard_re.findall(q.full_series[i])
1939 1959 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1940 1960 q.parse_series()
1941 1961 q.series_dirty = 1
1942 1962
1943 1963 info = q.isapplied(patch)
1944 1964 if info:
1945 1965 q.applied[info[0]] = statusentry(info[1], name)
1946 1966 q.applied_dirty = 1
1947 1967
1948 1968 util.rename(q.join(patch), absdest)
1949 1969 r = q.qrepo()
1950 1970 if r:
1951 1971 wlock = r.wlock()
1952 1972 try:
1953 1973 if r.dirstate[name] == 'r':
1954 1974 r.undelete([name])
1955 1975 r.copy(patch, name)
1956 1976 r.remove([patch], False)
1957 1977 finally:
1958 1978 del wlock
1959 1979
1960 1980 q.save_dirty()
1961 1981
1962 1982 def restore(ui, repo, rev, **opts):
1963 1983 """restore the queue state saved by a rev"""
1964 1984 rev = repo.lookup(rev)
1965 1985 q = repo.mq
1966 1986 q.restore(repo, rev, delete=opts['delete'],
1967 1987 qupdate=opts['update'])
1968 1988 q.save_dirty()
1969 1989 return 0
1970 1990
1971 1991 def save(ui, repo, **opts):
1972 1992 """save current queue state"""
1973 1993 q = repo.mq
1974 1994 message = cmdutil.logmessage(opts)
1975 1995 ret = q.save(repo, msg=message)
1976 1996 if ret:
1977 1997 return ret
1978 1998 q.save_dirty()
1979 1999 if opts['copy']:
1980 2000 path = q.path
1981 2001 if opts['name']:
1982 2002 newpath = os.path.join(q.basepath, opts['name'])
1983 2003 if os.path.exists(newpath):
1984 2004 if not os.path.isdir(newpath):
1985 2005 raise util.Abort(_('destination %s exists and is not '
1986 2006 'a directory') % newpath)
1987 2007 if not opts['force']:
1988 2008 raise util.Abort(_('destination %s exists, '
1989 2009 'use -f to force') % newpath)
1990 2010 else:
1991 2011 newpath = savename(path)
1992 2012 ui.warn("copy %s to %s\n" % (path, newpath))
1993 2013 util.copyfiles(path, newpath)
1994 2014 if opts['empty']:
1995 2015 try:
1996 2016 os.unlink(q.join(q.status_path))
1997 2017 except:
1998 2018 pass
1999 2019 return 0
2000 2020
2001 2021 def strip(ui, repo, rev, **opts):
2002 2022 """strip a revision and all later revs on the same branch"""
2003 2023 rev = repo.lookup(rev)
2004 2024 backup = 'all'
2005 2025 if opts['backup']:
2006 2026 backup = 'strip'
2007 2027 elif opts['nobackup']:
2008 2028 backup = 'none'
2009 2029 update = repo.dirstate.parents()[0] != revlog.nullid
2010 2030 repo.mq.strip(repo, rev, backup=backup, update=update)
2011 2031 return 0
2012 2032
2013 2033 def select(ui, repo, *args, **opts):
2014 2034 '''set or print guarded patches to push
2015 2035
2016 2036 Use the qguard command to set or print guards on patch, then use
2017 2037 qselect to tell mq which guards to use. A patch will be pushed if it
2018 2038 has no guards or any positive guards match the currently selected guard,
2019 2039 but will not be pushed if any negative guards match the current guard.
2020 2040 For example:
2021 2041
2022 2042 qguard foo.patch -stable (negative guard)
2023 2043 qguard bar.patch +stable (positive guard)
2024 2044 qselect stable
2025 2045
2026 2046 This activates the "stable" guard. mq will skip foo.patch (because
2027 2047 it has a negative match) but push bar.patch (because it
2028 2048 has a positive match).
2029 2049
2030 2050 With no arguments, prints the currently active guards.
2031 2051 With one argument, sets the active guard.
2032 2052
2033 2053 Use -n/--none to deactivate guards (no other arguments needed).
2034 2054 When no guards are active, patches with positive guards are skipped
2035 2055 and patches with negative guards are pushed.
2036 2056
2037 2057 qselect can change the guards on applied patches. It does not pop
2038 2058 guarded patches by default. Use --pop to pop back to the last applied
2039 2059 patch that is not guarded. Use --reapply (which implies --pop) to push
2040 2060 back to the current patch afterwards, but skip guarded patches.
2041 2061
2042 2062 Use -s/--series to print a list of all guards in the series file (no
2043 2063 other arguments needed). Use -v for more information.'''
2044 2064
2045 2065 q = repo.mq
2046 2066 guards = q.active()
2047 2067 if args or opts['none']:
2048 2068 old_unapplied = q.unapplied(repo)
2049 2069 old_guarded = [i for i in xrange(len(q.applied)) if
2050 2070 not q.pushable(i)[0]]
2051 2071 q.set_active(args)
2052 2072 q.save_dirty()
2053 2073 if not args:
2054 2074 ui.status(_('guards deactivated\n'))
2055 2075 if not opts['pop'] and not opts['reapply']:
2056 2076 unapplied = q.unapplied(repo)
2057 2077 guarded = [i for i in xrange(len(q.applied))
2058 2078 if not q.pushable(i)[0]]
2059 2079 if len(unapplied) != len(old_unapplied):
2060 2080 ui.status(_('number of unguarded, unapplied patches has '
2061 2081 'changed from %d to %d\n') %
2062 2082 (len(old_unapplied), len(unapplied)))
2063 2083 if len(guarded) != len(old_guarded):
2064 2084 ui.status(_('number of guarded, applied patches has changed '
2065 2085 'from %d to %d\n') %
2066 2086 (len(old_guarded), len(guarded)))
2067 2087 elif opts['series']:
2068 2088 guards = {}
2069 2089 noguards = 0
2070 2090 for gs in q.series_guards:
2071 2091 if not gs:
2072 2092 noguards += 1
2073 2093 for g in gs:
2074 2094 guards.setdefault(g, 0)
2075 2095 guards[g] += 1
2076 2096 if ui.verbose:
2077 2097 guards['NONE'] = noguards
2078 2098 guards = guards.items()
2079 2099 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2080 2100 if guards:
2081 2101 ui.note(_('guards in series file:\n'))
2082 2102 for guard, count in guards:
2083 2103 ui.note('%2d ' % count)
2084 2104 ui.write(guard, '\n')
2085 2105 else:
2086 2106 ui.note(_('no guards in series file\n'))
2087 2107 else:
2088 2108 if guards:
2089 2109 ui.note(_('active guards:\n'))
2090 2110 for g in guards:
2091 2111 ui.write(g, '\n')
2092 2112 else:
2093 2113 ui.write(_('no active guards\n'))
2094 2114 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2095 2115 popped = False
2096 2116 if opts['pop'] or opts['reapply']:
2097 2117 for i in xrange(len(q.applied)):
2098 2118 pushable, reason = q.pushable(i)
2099 2119 if not pushable:
2100 2120 ui.status(_('popping guarded patches\n'))
2101 2121 popped = True
2102 2122 if i == 0:
2103 2123 q.pop(repo, all=True)
2104 2124 else:
2105 2125 q.pop(repo, i-1)
2106 2126 break
2107 2127 if popped:
2108 2128 try:
2109 2129 if reapply:
2110 2130 ui.status(_('reapplying unguarded patches\n'))
2111 2131 q.push(repo, reapply)
2112 2132 finally:
2113 2133 q.save_dirty()
2114 2134
2115 2135 def reposetup(ui, repo):
2116 2136 class mqrepo(repo.__class__):
2117 2137 def abort_if_wdir_patched(self, errmsg, force=False):
2118 2138 if self.mq.applied and not force:
2119 2139 parent = revlog.hex(self.dirstate.parents()[0])
2120 2140 if parent in [s.rev for s in self.mq.applied]:
2121 2141 raise util.Abort(errmsg)
2122 2142
2123 2143 def commit(self, *args, **opts):
2124 2144 if len(args) >= 6:
2125 2145 force = args[5]
2126 2146 else:
2127 2147 force = opts.get('force')
2128 2148 self.abort_if_wdir_patched(
2129 2149 _('cannot commit over an applied mq patch'),
2130 2150 force)
2131 2151
2132 2152 return super(mqrepo, self).commit(*args, **opts)
2133 2153
2134 2154 def push(self, remote, force=False, revs=None):
2135 2155 if self.mq.applied and not force and not revs:
2136 2156 raise util.Abort(_('source has mq patches applied'))
2137 2157 return super(mqrepo, self).push(remote, force, revs)
2138 2158
2139 2159 def tags(self):
2140 2160 if self.tagscache:
2141 2161 return self.tagscache
2142 2162
2143 2163 tagscache = super(mqrepo, self).tags()
2144 2164
2145 2165 q = self.mq
2146 2166 if not q.applied:
2147 2167 return tagscache
2148 2168
2149 2169 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2170
2171 if mqtags[-1][0] not in self.changelog.nodemap:
2172 self.ui.warn('mq status file refers to unknown node %s\n'
2173 % revlog.short(mqtags[-1][0]))
2174 return tagscache
2175
2150 2176 mqtags.append((mqtags[-1][0], 'qtip'))
2151 2177 mqtags.append((mqtags[0][0], 'qbase'))
2152 2178 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2153 2179 for patch in mqtags:
2154 2180 if patch[1] in tagscache:
2155 2181 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2156 2182 else:
2157 2183 tagscache[patch[1]] = patch[0]
2158 2184
2159 2185 return tagscache
2160 2186
2161 2187 def _branchtags(self):
2162 2188 q = self.mq
2163 2189 if not q.applied:
2164 2190 return super(mqrepo, self)._branchtags()
2165 2191
2192 cl = self.changelog
2193 qbasenode = revlog.bin(q.applied[0].rev)
2194 if qbasenode not in cl.nodemap:
2195 self.ui.warn('mq status file refers to unknown node %s\n'
2196 % revlog.short(qbasenode))
2197 return super(mqrepo, self)._branchtags()
2198
2166 2199 self.branchcache = {} # avoid recursion in changectx
2167 cl = self.changelog
2168 2200 partial, last, lrev = self._readbranchcache()
2169 2201
2170 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2202 qbase = cl.rev(qbasenode)
2171 2203 start = lrev + 1
2172 2204 if start < qbase:
2173 2205 # update the cache (excluding the patches) and save it
2174 2206 self._updatebranchcache(partial, lrev+1, qbase)
2175 2207 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2176 2208 start = qbase
2177 2209 # if start = qbase, the cache is as updated as it should be.
2178 2210 # if start > qbase, the cache includes (part of) the patches.
2179 2211 # we might as well use it, but we won't save it.
2180 2212
2181 2213 # update the cache up to the tip
2182 2214 self._updatebranchcache(partial, start, cl.count())
2183 2215
2184 2216 return partial
2185 2217
2186 2218 if repo.local():
2187 2219 repo.__class__ = mqrepo
2188 2220 repo.mq = queue(ui, repo.join(""))
2189 2221
2190 2222 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2191 2223
2192 2224 headeropts = [
2193 2225 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2194 2226 ('u', 'user', '', _('add "From: <given user>" to patch')),
2195 2227 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2196 2228 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2197 2229
2198 2230 cmdtable = {
2199 2231 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2200 2232 "qclone":
2201 2233 (clone,
2202 2234 [('', 'pull', None, _('use pull protocol to copy metadata')),
2203 2235 ('U', 'noupdate', None, _('do not update the new working directories')),
2204 2236 ('', 'uncompressed', None,
2205 2237 _('use uncompressed transfer (fast over LAN)')),
2206 2238 ('p', 'patches', '', _('location of source patch repo')),
2207 2239 ] + commands.remoteopts,
2208 2240 _('hg qclone [OPTION]... SOURCE [DEST]')),
2209 2241 "qcommit|qci":
2210 2242 (commit,
2211 2243 commands.table["^commit|ci"][1],
2212 2244 _('hg qcommit [OPTION]... [FILE]...')),
2213 2245 "^qdiff":
2214 2246 (diff,
2215 2247 [('g', 'git', None, _('use git extended diff format')),
2216 2248 ] + commands.walkopts,
2217 2249 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2218 2250 "qdelete|qremove|qrm":
2219 2251 (delete,
2220 2252 [('k', 'keep', None, _('keep patch file')),
2221 2253 ('r', 'rev', [], _('stop managing a revision'))],
2222 2254 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2223 2255 'qfold':
2224 2256 (fold,
2225 2257 [('e', 'edit', None, _('edit patch header')),
2226 2258 ('k', 'keep', None, _('keep folded patch files')),
2227 2259 ] + commands.commitopts,
2228 2260 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2229 2261 'qgoto':
2230 2262 (goto,
2231 2263 [('f', 'force', None, _('overwrite any local changes'))],
2232 2264 _('hg qgoto [OPTION]... PATCH')),
2233 2265 'qguard':
2234 2266 (guard,
2235 2267 [('l', 'list', None, _('list all patches and guards')),
2236 2268 ('n', 'none', None, _('drop all guards'))],
2237 2269 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2238 2270 'qheader': (header, [], _('hg qheader [PATCH]')),
2239 2271 "^qimport":
2240 2272 (qimport,
2241 2273 [('e', 'existing', None, 'import file in patch dir'),
2242 2274 ('n', 'name', '', 'patch file name'),
2243 2275 ('f', 'force', None, 'overwrite existing files'),
2244 2276 ('r', 'rev', [], 'place existing revisions under mq control'),
2245 2277 ('g', 'git', None, _('use git extended diff format'))],
2246 2278 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2247 2279 "^qinit":
2248 2280 (init,
2249 2281 [('c', 'create-repo', None, 'create queue repository')],
2250 2282 _('hg qinit [-c]')),
2251 2283 "qnew":
2252 2284 (new,
2253 2285 [('e', 'edit', None, _('edit commit message')),
2254 2286 ('f', 'force', None, _('import uncommitted changes into patch')),
2255 2287 ('g', 'git', None, _('use git extended diff format')),
2256 2288 ] + commands.walkopts + commands.commitopts + headeropts,
2257 2289 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2258 2290 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2259 2291 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2260 2292 "^qpop":
2261 2293 (pop,
2262 2294 [('a', 'all', None, _('pop all patches')),
2263 2295 ('n', 'name', '', _('queue name to pop')),
2264 2296 ('f', 'force', None, _('forget any local changes'))],
2265 2297 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2266 2298 "^qpush":
2267 2299 (push,
2268 2300 [('f', 'force', None, _('apply if the patch has rejects')),
2269 2301 ('l', 'list', None, _('list patch name in commit text')),
2270 2302 ('a', 'all', None, _('apply all patches')),
2271 2303 ('m', 'merge', None, _('merge from another queue')),
2272 2304 ('n', 'name', '', _('merge queue name'))],
2273 2305 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2274 2306 "^qrefresh":
2275 2307 (refresh,
2276 2308 [('e', 'edit', None, _('edit commit message')),
2277 2309 ('g', 'git', None, _('use git extended diff format')),
2278 2310 ('s', 'short', None, _('refresh only files already in the patch')),
2279 2311 ] + commands.walkopts + commands.commitopts + headeropts,
2280 2312 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2281 2313 'qrename|qmv':
2282 2314 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2283 2315 "qrestore":
2284 2316 (restore,
2285 2317 [('d', 'delete', None, _('delete save entry')),
2286 2318 ('u', 'update', None, _('update queue working dir'))],
2287 2319 _('hg qrestore [-d] [-u] REV')),
2288 2320 "qsave":
2289 2321 (save,
2290 2322 [('c', 'copy', None, _('copy patch directory')),
2291 2323 ('n', 'name', '', _('copy directory name')),
2292 2324 ('e', 'empty', None, _('clear queue status file')),
2293 2325 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2294 2326 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2295 2327 "qselect":
2296 2328 (select,
2297 2329 [('n', 'none', None, _('disable all guards')),
2298 2330 ('s', 'series', None, _('list all guards in series file')),
2299 2331 ('', 'pop', None, _('pop to before first guarded applied patch')),
2300 2332 ('', 'reapply', None, _('pop, then reapply patches'))],
2301 2333 _('hg qselect [OPTION]... [GUARD]...')),
2302 2334 "qseries":
2303 2335 (series,
2304 2336 [('m', 'missing', None, _('print patches not in series')),
2305 2337 ] + seriesopts,
2306 2338 _('hg qseries [-ms]')),
2307 2339 "^strip":
2308 2340 (strip,
2309 2341 [('f', 'force', None, _('force multi-head removal')),
2310 2342 ('b', 'backup', None, _('bundle unrelated changesets')),
2311 2343 ('n', 'nobackup', None, _('no backups'))],
2312 2344 _('hg strip [-f] [-b] [-n] REV')),
2313 2345 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2314 2346 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2315 2347 }
@@ -1,3111 +1,3114 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, re, sys, urllib
11 11 import hg, util, revlog, bundlerepo, extensions
12 12 import difflib, patch, time, help, mdiff, tempfile
13 13 import errno, version, socket
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 33 badmatch=util.always):
34 34 if exact:
35 35 if ui.verbose:
36 36 ui.status(_('adding %s\n') % rel)
37 37 names.append(abs)
38 38 exacts[abs] = 1
39 39 elif abs not in repo.dirstate:
40 40 ui.status(_('adding %s\n') % rel)
41 41 names.append(abs)
42 42 if not opts.get('dry_run'):
43 43 rejected = repo.add(names)
44 44 rejected = [p for p in rejected if p in exacts]
45 45 return rejected and 1 or 0
46 46
47 47 def addremove(ui, repo, *pats, **opts):
48 48 """add all new files, delete all missing files
49 49
50 50 Add all new files and remove all missing files from the repository.
51 51
52 52 New files are ignored if they match any of the patterns in .hgignore. As
53 53 with add, these changes take effect at the next commit.
54 54
55 55 Use the -s option to detect renamed files. With a parameter > 0,
56 56 this compares every removed file with every added file and records
57 57 those similar enough as renames. This option takes a percentage
58 58 between 0 (disabled) and 100 (files must be identical) as its
59 59 parameter. Detecting renamed files this way can be expensive.
60 60 """
61 61 try:
62 62 sim = float(opts.get('similarity') or 0)
63 63 except ValueError:
64 64 raise util.Abort(_('similarity must be a number'))
65 65 if sim < 0 or sim > 100:
66 66 raise util.Abort(_('similarity must be between 0 and 100'))
67 67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 68
69 69 def annotate(ui, repo, *pats, **opts):
70 70 """show changeset information per file line
71 71
72 72 List changes in files, showing the revision id responsible for each line
73 73
74 74 This command is useful to discover who did a change or when a change took
75 75 place.
76 76
77 77 Without the -a option, annotate will avoid processing files it
78 78 detects as binary. With -a, annotate will generate an annotation
79 79 anyway, probably with undesirable results.
80 80 """
81 81 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
82 82
83 83 if not pats:
84 84 raise util.Abort(_('at least one file name or pattern required'))
85 85
86 86 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
87 87 ('number', lambda x: str(x[0].rev())),
88 88 ('changeset', lambda x: short(x[0].node())),
89 89 ('date', getdate),
90 90 ('follow', lambda x: x[0].path()),
91 91 ]
92 92
93 93 if (not opts['user'] and not opts['changeset'] and not opts['date']
94 94 and not opts['follow']):
95 95 opts['number'] = 1
96 96
97 97 linenumber = opts.get('line_number') is not None
98 98 if (linenumber and (not opts['changeset']) and (not opts['number'])):
99 99 raise util.Abort(_('at least one of -n/-c is required for -l'))
100 100
101 101 funcmap = [func for op, func in opmap if opts.get(op)]
102 102 if linenumber:
103 103 lastfunc = funcmap[-1]
104 104 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
105 105
106 106 ctx = repo.changectx(opts['rev'])
107 107
108 108 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
109 109 node=ctx.node()):
110 110 fctx = ctx.filectx(abs)
111 111 if not opts['text'] and util.binary(fctx.data()):
112 112 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
113 113 continue
114 114
115 115 lines = fctx.annotate(follow=opts.get('follow'),
116 116 linenumber=linenumber)
117 117 pieces = []
118 118
119 119 for f in funcmap:
120 120 l = [f(n) for n, dummy in lines]
121 121 if l:
122 122 m = max(map(len, l))
123 123 pieces.append(["%*s" % (m, x) for x in l])
124 124
125 125 if pieces:
126 126 for p, l in zip(zip(*pieces), lines):
127 127 ui.write("%s: %s" % (" ".join(p), l[1]))
128 128
129 129 def archive(ui, repo, dest, **opts):
130 130 '''create unversioned archive of a repository revision
131 131
132 132 By default, the revision used is the parent of the working
133 133 directory; use "-r" to specify a different revision.
134 134
135 135 To specify the type of archive to create, use "-t". Valid
136 136 types are:
137 137
138 138 "files" (default): a directory full of files
139 139 "tar": tar archive, uncompressed
140 140 "tbz2": tar archive, compressed using bzip2
141 141 "tgz": tar archive, compressed using gzip
142 142 "uzip": zip archive, uncompressed
143 143 "zip": zip archive, compressed using deflate
144 144
145 145 The exact name of the destination archive or directory is given
146 146 using a format string; see "hg help export" for details.
147 147
148 148 Each member added to an archive file has a directory prefix
149 149 prepended. Use "-p" to specify a format string for the prefix.
150 150 The default is the basename of the archive, with suffixes removed.
151 151 '''
152 152
153 153 ctx = repo.changectx(opts['rev'])
154 154 if not ctx:
155 155 raise util.Abort(_('repository has no revisions'))
156 156 node = ctx.node()
157 157 dest = cmdutil.make_filename(repo, dest, node)
158 158 if os.path.realpath(dest) == repo.root:
159 159 raise util.Abort(_('repository root cannot be destination'))
160 160 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
161 161 kind = opts.get('type') or 'files'
162 162 prefix = opts['prefix']
163 163 if dest == '-':
164 164 if kind == 'files':
165 165 raise util.Abort(_('cannot archive plain files to stdout'))
166 166 dest = sys.stdout
167 167 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
168 168 prefix = cmdutil.make_filename(repo, prefix, node)
169 169 archival.archive(repo, dest, node, kind, not opts['no_decode'],
170 170 matchfn, prefix)
171 171
172 172 def backout(ui, repo, node=None, rev=None, **opts):
173 173 '''reverse effect of earlier changeset
174 174
175 175 Commit the backed out changes as a new changeset. The new
176 176 changeset is a child of the backed out changeset.
177 177
178 178 If you back out a changeset other than the tip, a new head is
179 179 created. This head is the parent of the working directory. If
180 180 you back out an old changeset, your working directory will appear
181 181 old after the backout. You should merge the backout changeset
182 182 with another head.
183 183
184 184 The --merge option remembers the parent of the working directory
185 185 before starting the backout, then merges the new head with that
186 186 changeset afterwards. This saves you from doing the merge by
187 187 hand. The result of this merge is not committed, as for a normal
188 188 merge.'''
189 189 if rev and node:
190 190 raise util.Abort(_("please specify just one revision"))
191 191
192 192 if not rev:
193 193 rev = node
194 194
195 195 if not rev:
196 196 raise util.Abort(_("please specify a revision to backout"))
197 197
198 198 cmdutil.bail_if_changed(repo)
199 199 node = repo.lookup(rev)
200 200
201 201 op1, op2 = repo.dirstate.parents()
202 202 a = repo.changelog.ancestor(op1, node)
203 203 if a != node:
204 204 raise util.Abort(_('cannot back out change on a different branch'))
205 205
206 206 p1, p2 = repo.changelog.parents(node)
207 207 if p1 == nullid:
208 208 raise util.Abort(_('cannot back out a change with no parents'))
209 209 if p2 != nullid:
210 210 if not opts['parent']:
211 211 raise util.Abort(_('cannot back out a merge changeset without '
212 212 '--parent'))
213 213 p = repo.lookup(opts['parent'])
214 214 if p not in (p1, p2):
215 215 raise util.Abort(_('%s is not a parent of %s') %
216 216 (short(p), short(node)))
217 217 parent = p
218 218 else:
219 219 if opts['parent']:
220 220 raise util.Abort(_('cannot use --parent on non-merge changeset'))
221 221 parent = p1
222 222
223 223 hg.clean(repo, node, show_stats=False)
224 224 revert_opts = opts.copy()
225 225 revert_opts['date'] = None
226 226 revert_opts['all'] = True
227 227 revert_opts['rev'] = hex(parent)
228 228 revert(ui, repo, **revert_opts)
229 229 commit_opts = opts.copy()
230 230 commit_opts['addremove'] = False
231 231 if not commit_opts['message'] and not commit_opts['logfile']:
232 232 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
233 233 commit_opts['force_editor'] = True
234 234 commit(ui, repo, **commit_opts)
235 235 def nice(node):
236 236 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 237 ui.status(_('changeset %s backs out changeset %s\n') %
238 238 (nice(repo.changelog.tip()), nice(node)))
239 239 if op1 != node:
240 240 if opts['merge']:
241 241 ui.status(_('merging with changeset %s\n') % nice(op1))
242 242 hg.merge(repo, hex(op1))
243 243 else:
244 244 ui.status(_('the backout changeset is a new head - '
245 245 'do not forget to merge\n'))
246 246 ui.status(_('(use "backout --merge" '
247 247 'if you want to auto-merge)\n'))
248 248
249 249 def bisect(ui, repo, rev=None, extra=None,
250 250 reset=None, good=None, bad=None, skip=None, noupdate=None):
251 251 """subdivision search of changesets
252 252
253 253 This command helps to find changesets which introduce problems.
254 254 To use, mark the earliest changeset you know exhibits the problem
255 255 as bad, then mark the latest changeset which is free from the
256 256 problem as good. Bisect will update your working directory to a
257 257 revision for testing. Once you have performed tests, mark the
258 258 working directory as bad or good and bisect will either update to
259 259 another candidate changeset or announce that it has found the bad
260 260 revision.
261 261 """
262 262 # backward compatibility
263 263 if rev in "good bad reset init".split():
264 264 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
265 265 cmd, rev, extra = rev, extra, None
266 266 if cmd == "good":
267 267 good = True
268 268 elif cmd == "bad":
269 269 bad = True
270 270 else:
271 271 reset = True
272 272 elif extra or good + bad + skip + reset > 1:
273 273 raise util.Abort("Incompatible arguments")
274 274
275 275 if reset:
276 276 p = repo.join("bisect.state")
277 277 if os.path.exists(p):
278 278 os.unlink(p)
279 279 return
280 280
281 281 # load state
282 282 state = {'good': [], 'bad': [], 'skip': []}
283 283 if os.path.exists(repo.join("bisect.state")):
284 284 for l in repo.opener("bisect.state"):
285 285 kind, node = l[:-1].split()
286 286 node = repo.lookup(node)
287 287 if kind not in state:
288 288 raise util.Abort(_("unknown bisect kind %s") % kind)
289 289 state[kind].append(node)
290 290
291 291 # update state
292 292 node = repo.lookup(rev or '.')
293 293 if good:
294 294 state['good'].append(node)
295 295 elif bad:
296 296 state['bad'].append(node)
297 297 elif skip:
298 298 state['skip'].append(node)
299 299
300 300 # save state
301 301 f = repo.opener("bisect.state", "w", atomictemp=True)
302 302 wlock = repo.wlock()
303 303 try:
304 304 for kind in state:
305 305 for node in state[kind]:
306 306 f.write("%s %s\n" % (kind, hg.hex(node)))
307 307 f.rename()
308 308 finally:
309 309 del wlock
310 310
311 311 if not state['good'] or not state['bad']:
312 312 return
313 313
314 314 # actually bisect
315 315 node, changesets, good = hbisect.bisect(repo.changelog, state)
316 316 if changesets == 0:
317 317 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
318 318 displayer = cmdutil.show_changeset(ui, repo, {})
319 319 displayer.show(changenode=node)
320 320 elif node is not None:
321 321 # compute the approximate number of remaining tests
322 322 tests, size = 0, 2
323 323 while size <= changesets:
324 324 tests, size = tests + 1, size * 2
325 325 rev = repo.changelog.rev(node)
326 326 ui.write(_("Testing changeset %s:%s "
327 327 "(%s changesets remaining, ~%s tests)\n")
328 328 % (rev, hg.short(node), changesets, tests))
329 329 if not noupdate:
330 330 cmdutil.bail_if_changed(repo)
331 331 return hg.clean(repo, node)
332 332
333 333 def branch(ui, repo, label=None, **opts):
334 334 """set or show the current branch name
335 335
336 336 With no argument, show the current branch name. With one argument,
337 337 set the working directory branch name (the branch does not exist in
338 338 the repository until the next commit).
339 339
340 340 Unless --force is specified, branch will not let you set a
341 341 branch name that shadows an existing branch.
342 342
343 343 Use the command 'hg update' to switch to an existing branch.
344 344 """
345 345
346 346 if label:
347 347 if not opts.get('force') and label in repo.branchtags():
348 348 if label not in [p.branch() for p in repo.workingctx().parents()]:
349 349 raise util.Abort(_('a branch of the same name already exists'
350 350 ' (use --force to override)'))
351 351 repo.dirstate.setbranch(util.fromlocal(label))
352 352 ui.status(_('marked working directory as branch %s\n') % label)
353 353 else:
354 354 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
355 355
356 356 def branches(ui, repo, active=False):
357 357 """list repository named branches
358 358
359 359 List the repository's named branches, indicating which ones are
360 360 inactive. If active is specified, only show active branches.
361 361
362 362 A branch is considered active if it contains unmerged heads.
363 363
364 364 Use the command 'hg update' to switch to an existing branch.
365 365 """
366 366 b = repo.branchtags()
367 367 heads = dict.fromkeys(repo.heads(), 1)
368 368 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
369 369 l.sort()
370 370 l.reverse()
371 371 for ishead, r, n, t in l:
372 372 if active and not ishead:
373 373 # If we're only displaying active branches, abort the loop on
374 374 # encountering the first inactive head
375 375 break
376 376 else:
377 377 hexfunc = ui.debugflag and hex or short
378 378 if ui.quiet:
379 379 ui.write("%s\n" % t)
380 380 else:
381 381 spaces = " " * (30 - util.locallen(t))
382 382 # The code only gets here if inactive branches are being
383 383 # displayed or the branch is active.
384 384 isinactive = ((not ishead) and " (inactive)") or ''
385 385 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
386 386
387 387 def bundle(ui, repo, fname, dest=None, **opts):
388 388 """create a changegroup file
389 389
390 390 Generate a compressed changegroup file collecting changesets not
391 391 found in the other repository.
392 392
393 393 If no destination repository is specified the destination is assumed
394 394 to have all the nodes specified by one or more --base parameters.
395 395 To create a bundle containing all changesets, use --base null.
396 396
397 397 The bundle file can then be transferred using conventional means and
398 398 applied to another repository with the unbundle or pull command.
399 399 This is useful when direct push and pull are not available or when
400 400 exporting an entire repository is undesirable.
401 401
402 402 Applying bundles preserves all changeset contents including
403 403 permissions, copy/rename information, and revision history.
404 404 """
405 405 revs = opts.get('rev') or None
406 406 if revs:
407 407 revs = [repo.lookup(rev) for rev in revs]
408 408 base = opts.get('base')
409 409 if base:
410 410 if dest:
411 411 raise util.Abort(_("--base is incompatible with specifiying "
412 412 "a destination"))
413 413 base = [repo.lookup(rev) for rev in base]
414 414 # create the right base
415 415 # XXX: nodesbetween / changegroup* should be "fixed" instead
416 416 o = []
417 417 has = {nullid: None}
418 418 for n in base:
419 419 has.update(repo.changelog.reachable(n))
420 420 if revs:
421 421 visit = list(revs)
422 422 else:
423 423 visit = repo.changelog.heads()
424 424 seen = {}
425 425 while visit:
426 426 n = visit.pop(0)
427 427 parents = [p for p in repo.changelog.parents(n) if p not in has]
428 428 if len(parents) == 0:
429 429 o.insert(0, n)
430 430 else:
431 431 for p in parents:
432 432 if p not in seen:
433 433 seen[p] = 1
434 434 visit.append(p)
435 435 else:
436 436 cmdutil.setremoteconfig(ui, opts)
437 437 dest, revs, checkout = hg.parseurl(
438 438 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
439 439 other = hg.repository(ui, dest)
440 440 o = repo.findoutgoing(other, force=opts['force'])
441 441
442 442 if revs:
443 443 cg = repo.changegroupsubset(o, revs, 'bundle')
444 444 else:
445 445 cg = repo.changegroup(o, 'bundle')
446 446 changegroup.writebundle(cg, fname, "HG10BZ")
447 447
448 448 def cat(ui, repo, file1, *pats, **opts):
449 449 """output the current or given revision of files
450 450
451 451 Print the specified files as they were at the given revision.
452 452 If no revision is given, the parent of the working directory is used,
453 453 or tip if no revision is checked out.
454 454
455 455 Output may be to a file, in which case the name of the file is
456 456 given using a format string. The formatting rules are the same as
457 457 for the export command, with the following additions:
458 458
459 459 %s basename of file being printed
460 460 %d dirname of file being printed, or '.' if in repo root
461 461 %p root-relative path name of file being printed
462 462 """
463 463 ctx = repo.changectx(opts['rev'])
464 464 err = 1
465 465 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
466 466 ctx.node()):
467 467 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
468 468 fp.write(ctx.filectx(abs).data())
469 469 err = 0
470 470 return err
471 471
472 472 def clone(ui, source, dest=None, **opts):
473 473 """make a copy of an existing repository
474 474
475 475 Create a copy of an existing repository in a new directory.
476 476
477 477 If no destination directory name is specified, it defaults to the
478 478 basename of the source.
479 479
480 480 The location of the source is added to the new repository's
481 481 .hg/hgrc file, as the default to be used for future pulls.
482 482
483 483 For efficiency, hardlinks are used for cloning whenever the source
484 484 and destination are on the same filesystem (note this applies only
485 485 to the repository data, not to the checked out files). Some
486 486 filesystems, such as AFS, implement hardlinking incorrectly, but
487 487 do not report errors. In these cases, use the --pull option to
488 488 avoid hardlinking.
489 489
490 490 You can safely clone repositories and checked out files using full
491 491 hardlinks with
492 492
493 493 $ cp -al REPO REPOCLONE
494 494
495 495 which is the fastest way to clone. However, the operation is not
496 496 atomic (making sure REPO is not modified during the operation is
497 497 up to you) and you have to make sure your editor breaks hardlinks
498 498 (Emacs and most Linux Kernel tools do so).
499 499
500 500 If you use the -r option to clone up to a specific revision, no
501 501 subsequent revisions will be present in the cloned repository.
502 502 This option implies --pull, even on local repositories.
503 503
504 504 See pull for valid source format details.
505 505
506 506 It is possible to specify an ssh:// URL as the destination, but no
507 507 .hg/hgrc and working directory will be created on the remote side.
508 508 Look at the help text for the pull command for important details
509 509 about ssh:// URLs.
510 510 """
511 511 cmdutil.setremoteconfig(ui, opts)
512 512 hg.clone(ui, source, dest,
513 513 pull=opts['pull'],
514 514 stream=opts['uncompressed'],
515 515 rev=opts['rev'],
516 516 update=not opts['noupdate'])
517 517
518 518 def commit(ui, repo, *pats, **opts):
519 519 """commit the specified files or all outstanding changes
520 520
521 521 Commit changes to the given files into the repository.
522 522
523 523 If a list of files is omitted, all changes reported by "hg status"
524 524 will be committed.
525 525
526 526 If no commit message is specified, the configured editor is started to
527 527 enter a message.
528 528 """
529 529 def commitfunc(ui, repo, files, message, match, opts):
530 530 return repo.commit(files, message, opts['user'], opts['date'], match,
531 531 force_editor=opts.get('force_editor'))
532 532 cmdutil.commit(ui, repo, commitfunc, pats, opts)
533 533
534 534 def copy(ui, repo, *pats, **opts):
535 535 """mark files as copied for the next commit
536 536
537 537 Mark dest as having copies of source files. If dest is a
538 538 directory, copies are put in that directory. If dest is a file,
539 539 there can only be one source.
540 540
541 541 By default, this command copies the contents of files as they
542 542 stand in the working directory. If invoked with --after, the
543 543 operation is recorded, but no copying is performed.
544 544
545 545 This command takes effect in the next commit. To undo a copy
546 546 before that, see hg revert.
547 547 """
548 548 wlock = repo.wlock(False)
549 549 try:
550 550 return cmdutil.copy(ui, repo, pats, opts)
551 551 finally:
552 552 del wlock
553 553
554 554 def debugancestor(ui, index, rev1, rev2):
555 555 """find the ancestor revision of two revisions in a given index"""
556 556 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
557 557 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
558 558 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
559 559
560 560 def debugcomplete(ui, cmd='', **opts):
561 561 """returns the completion list associated with the given command"""
562 562
563 563 if opts['options']:
564 564 options = []
565 565 otables = [globalopts]
566 566 if cmd:
567 567 aliases, entry = cmdutil.findcmd(ui, cmd, table)
568 568 otables.append(entry[1])
569 569 for t in otables:
570 570 for o in t:
571 571 if o[0]:
572 572 options.append('-%s' % o[0])
573 573 options.append('--%s' % o[1])
574 574 ui.write("%s\n" % "\n".join(options))
575 575 return
576 576
577 577 clist = cmdutil.findpossible(ui, cmd, table).keys()
578 578 clist.sort()
579 579 ui.write("%s\n" % "\n".join(clist))
580 580
581 581 def debugfsinfo(ui, path = "."):
582 582 file('.debugfsinfo', 'w').write('')
583 583 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
584 584 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
585 585 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
586 586 and 'yes' or 'no'))
587 587 os.unlink('.debugfsinfo')
588 588
589 589 def debugrebuildstate(ui, repo, rev=""):
590 590 """rebuild the dirstate as it would look like for the given revision"""
591 591 if rev == "":
592 592 rev = repo.changelog.tip()
593 593 ctx = repo.changectx(rev)
594 594 files = ctx.manifest()
595 595 wlock = repo.wlock()
596 596 try:
597 597 repo.dirstate.rebuild(rev, files)
598 598 finally:
599 599 del wlock
600 600
601 601 def debugcheckstate(ui, repo):
602 602 """validate the correctness of the current dirstate"""
603 603 parent1, parent2 = repo.dirstate.parents()
604 604 m1 = repo.changectx(parent1).manifest()
605 605 m2 = repo.changectx(parent2).manifest()
606 606 errors = 0
607 607 for f in repo.dirstate:
608 608 state = repo.dirstate[f]
609 609 if state in "nr" and f not in m1:
610 610 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
611 611 errors += 1
612 612 if state in "a" and f in m1:
613 613 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
614 614 errors += 1
615 615 if state in "m" and f not in m1 and f not in m2:
616 616 ui.warn(_("%s in state %s, but not in either manifest\n") %
617 617 (f, state))
618 618 errors += 1
619 619 for f in m1:
620 620 state = repo.dirstate[f]
621 621 if state not in "nrm":
622 622 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
623 623 errors += 1
624 624 if errors:
625 625 error = _(".hg/dirstate inconsistent with current parent's manifest")
626 626 raise util.Abort(error)
627 627
628 628 def showconfig(ui, repo, *values, **opts):
629 629 """show combined config settings from all hgrc files
630 630
631 631 With no args, print names and values of all config items.
632 632
633 633 With one arg of the form section.name, print just the value of
634 634 that config item.
635 635
636 636 With multiple args, print names and values of all config items
637 637 with matching section names."""
638 638
639 639 untrusted = bool(opts.get('untrusted'))
640 640 if values:
641 641 if len([v for v in values if '.' in v]) > 1:
642 642 raise util.Abort(_('only one config item permitted'))
643 643 for section, name, value in ui.walkconfig(untrusted=untrusted):
644 644 sectname = section + '.' + name
645 645 if values:
646 646 for v in values:
647 647 if v == section:
648 648 ui.write('%s=%s\n' % (sectname, value))
649 649 elif v == sectname:
650 650 ui.write(value, '\n')
651 651 else:
652 652 ui.write('%s=%s\n' % (sectname, value))
653 653
654 654 def debugsetparents(ui, repo, rev1, rev2=None):
655 655 """manually set the parents of the current working directory
656 656
657 657 This is useful for writing repository conversion tools, but should
658 658 be used with care.
659 659 """
660 660
661 661 if not rev2:
662 662 rev2 = hex(nullid)
663 663
664 664 wlock = repo.wlock()
665 665 try:
666 666 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
667 667 finally:
668 668 del wlock
669 669
670 670 def debugstate(ui, repo):
671 671 """show the contents of the current dirstate"""
672 672 k = repo.dirstate._map.items()
673 673 k.sort()
674 674 for file_, ent in k:
675 675 if ent[3] == -1:
676 676 # Pad or slice to locale representation
677 677 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
678 678 timestr = 'unset'
679 679 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
680 680 else:
681 681 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
682 682 if ent[1] & 020000:
683 683 mode = 'lnk'
684 684 else:
685 685 mode = '%3o' % (ent[1] & 0777)
686 686 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
687 687 for f in repo.dirstate.copies():
688 688 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
689 689
690 690 def debugdata(ui, file_, rev):
691 691 """dump the contents of a data file revision"""
692 692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
693 693 try:
694 694 ui.write(r.revision(r.lookup(rev)))
695 695 except KeyError:
696 696 raise util.Abort(_('invalid revision identifier %s') % rev)
697 697
698 698 def debugdate(ui, date, range=None, **opts):
699 699 """parse and display a date"""
700 700 if opts["extended"]:
701 701 d = util.parsedate(date, util.extendeddateformats)
702 702 else:
703 703 d = util.parsedate(date)
704 704 ui.write("internal: %s %s\n" % d)
705 705 ui.write("standard: %s\n" % util.datestr(d))
706 706 if range:
707 707 m = util.matchdate(range)
708 708 ui.write("match: %s\n" % m(d[0]))
709 709
710 710 def debugindex(ui, file_):
711 711 """dump the contents of an index file"""
712 712 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
713 713 ui.write(" rev offset length base linkrev" +
714 714 " nodeid p1 p2\n")
715 715 for i in xrange(r.count()):
716 716 node = r.node(i)
717 717 try:
718 718 pp = r.parents(node)
719 719 except:
720 720 pp = [nullid, nullid]
721 721 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
722 722 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
723 723 short(node), short(pp[0]), short(pp[1])))
724 724
725 725 def debugindexdot(ui, file_):
726 726 """dump an index DAG as a .dot file"""
727 727 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
728 728 ui.write("digraph G {\n")
729 729 for i in xrange(r.count()):
730 730 node = r.node(i)
731 731 pp = r.parents(node)
732 732 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
733 733 if pp[1] != nullid:
734 734 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
735 735 ui.write("}\n")
736 736
737 737 def debuginstall(ui):
738 738 '''test Mercurial installation'''
739 739
740 740 def writetemp(contents):
741 741 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
742 742 f = os.fdopen(fd, "wb")
743 743 f.write(contents)
744 744 f.close()
745 745 return name
746 746
747 747 problems = 0
748 748
749 749 # encoding
750 750 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
751 751 try:
752 752 util.fromlocal("test")
753 753 except util.Abort, inst:
754 754 ui.write(" %s\n" % inst)
755 755 ui.write(_(" (check that your locale is properly set)\n"))
756 756 problems += 1
757 757
758 758 # compiled modules
759 759 ui.status(_("Checking extensions...\n"))
760 760 try:
761 761 import bdiff, mpatch, base85
762 762 except Exception, inst:
763 763 ui.write(" %s\n" % inst)
764 764 ui.write(_(" One or more extensions could not be found"))
765 765 ui.write(_(" (check that you compiled the extensions)\n"))
766 766 problems += 1
767 767
768 768 # templates
769 769 ui.status(_("Checking templates...\n"))
770 770 try:
771 771 import templater
772 772 t = templater.templater(templater.templatepath("map-cmdline.default"))
773 773 except Exception, inst:
774 774 ui.write(" %s\n" % inst)
775 775 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
776 776 problems += 1
777 777
778 778 # patch
779 779 ui.status(_("Checking patch...\n"))
780 780 patchproblems = 0
781 781 a = "1\n2\n3\n4\n"
782 782 b = "1\n2\n3\ninsert\n4\n"
783 783 fa = writetemp(a)
784 784 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
785 785 os.path.basename(fa))
786 786 fd = writetemp(d)
787 787
788 788 files = {}
789 789 try:
790 790 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
791 791 except util.Abort, e:
792 792 ui.write(_(" patch call failed:\n"))
793 793 ui.write(" " + str(e) + "\n")
794 794 patchproblems += 1
795 795 else:
796 796 if list(files) != [os.path.basename(fa)]:
797 797 ui.write(_(" unexpected patch output!\n"))
798 798 patchproblems += 1
799 799 a = file(fa).read()
800 800 if a != b:
801 801 ui.write(_(" patch test failed!\n"))
802 802 patchproblems += 1
803 803
804 804 if patchproblems:
805 805 if ui.config('ui', 'patch'):
806 806 ui.write(_(" (Current patch tool may be incompatible with patch,"
807 807 " or misconfigured. Please check your .hgrc file)\n"))
808 808 else:
809 809 ui.write(_(" Internal patcher failure, please report this error"
810 810 " to http://www.selenic.com/mercurial/bts\n"))
811 811 problems += patchproblems
812 812
813 813 os.unlink(fa)
814 814 os.unlink(fd)
815 815
816 816 # merge helper
817 817 ui.status(_("Checking merge helper...\n"))
818 818 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
819 819 or "hgmerge")
820 820 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
821 821 if not cmdpath:
822 822 if cmd == 'hgmerge':
823 823 ui.write(_(" No merge helper set and can't find default"
824 824 " hgmerge script in PATH\n"))
825 825 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
826 826 else:
827 827 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
828 828 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
829 829 problems += 1
830 830 else:
831 831 # actually attempt a patch here
832 832 fa = writetemp("1\n2\n3\n4\n")
833 833 fl = writetemp("1\n2\n3\ninsert\n4\n")
834 834 fr = writetemp("begin\n1\n2\n3\n4\n")
835 835 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
836 836 if r:
837 837 ui.write(_(" Got unexpected merge error %d!\n") % r)
838 838 problems += 1
839 839 m = file(fl).read()
840 840 if m != "begin\n1\n2\n3\ninsert\n4\n":
841 841 ui.write(_(" Got unexpected merge results!\n"))
842 842 ui.write(_(" (your merge helper may have the"
843 843 " wrong argument order)\n"))
844 844 ui.write(_(" Result: %r\n") % m)
845 845 problems += 1
846 846 os.unlink(fa)
847 847 os.unlink(fl)
848 848 os.unlink(fr)
849 849
850 850 # editor
851 851 ui.status(_("Checking commit editor...\n"))
852 852 editor = ui.geteditor()
853 853 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
854 854 if not cmdpath:
855 855 if editor == 'vi':
856 856 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
857 857 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
858 858 else:
859 859 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
860 860 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
861 861 problems += 1
862 862
863 863 # check username
864 864 ui.status(_("Checking username...\n"))
865 865 user = os.environ.get("HGUSER")
866 866 if user is None:
867 867 user = ui.config("ui", "username")
868 868 if user is None:
869 869 user = os.environ.get("EMAIL")
870 870 if not user:
871 871 ui.warn(" ")
872 872 ui.username()
873 873 ui.write(_(" (specify a username in your .hgrc file)\n"))
874 874
875 875 if not problems:
876 876 ui.status(_("No problems detected\n"))
877 877 else:
878 878 ui.write(_("%s problems detected,"
879 879 " please check your install!\n") % problems)
880 880
881 881 return problems
882 882
883 883 def debugrename(ui, repo, file1, *pats, **opts):
884 884 """dump rename information"""
885 885
886 886 ctx = repo.changectx(opts.get('rev', 'tip'))
887 887 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
888 888 ctx.node()):
889 889 fctx = ctx.filectx(abs)
890 890 m = fctx.filelog().renamed(fctx.filenode())
891 891 if m:
892 892 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
893 893 else:
894 894 ui.write(_("%s not renamed\n") % rel)
895 895
896 896 def debugwalk(ui, repo, *pats, **opts):
897 897 """show how files match on given patterns"""
898 898 items = list(cmdutil.walk(repo, pats, opts))
899 899 if not items:
900 900 return
901 901 fmt = '%%s %%-%ds %%-%ds %%s' % (
902 902 max([len(abs) for (src, abs, rel, exact) in items]),
903 903 max([len(rel) for (src, abs, rel, exact) in items]))
904 904 for src, abs, rel, exact in items:
905 905 line = fmt % (src, abs, rel, exact and 'exact' or '')
906 906 ui.write("%s\n" % line.rstrip())
907 907
908 908 def diff(ui, repo, *pats, **opts):
909 909 """diff repository (or selected files)
910 910
911 911 Show differences between revisions for the specified files.
912 912
913 913 Differences between files are shown using the unified diff format.
914 914
915 915 NOTE: diff may generate unexpected results for merges, as it will
916 916 default to comparing against the working directory's first parent
917 917 changeset if no revisions are specified.
918 918
919 919 When two revision arguments are given, then changes are shown
920 920 between those revisions. If only one revision is specified then
921 921 that revision is compared to the working directory, and, when no
922 922 revisions are specified, the working directory files are compared
923 923 to its parent.
924 924
925 925 Without the -a option, diff will avoid generating diffs of files
926 926 it detects as binary. With -a, diff will generate a diff anyway,
927 927 probably with undesirable results.
928 928 """
929 929 node1, node2 = cmdutil.revpair(repo, opts['rev'])
930 930
931 931 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
932 932
933 933 patch.diff(repo, node1, node2, fns, match=matchfn,
934 934 opts=patch.diffopts(ui, opts))
935 935
936 936 def export(ui, repo, *changesets, **opts):
937 937 """dump the header and diffs for one or more changesets
938 938
939 939 Print the changeset header and diffs for one or more revisions.
940 940
941 941 The information shown in the changeset header is: author,
942 942 changeset hash, parent(s) and commit comment.
943 943
944 944 NOTE: export may generate unexpected diff output for merge changesets,
945 945 as it will compare the merge changeset against its first parent only.
946 946
947 947 Output may be to a file, in which case the name of the file is
948 948 given using a format string. The formatting rules are as follows:
949 949
950 950 %% literal "%" character
951 951 %H changeset hash (40 bytes of hexadecimal)
952 952 %N number of patches being generated
953 953 %R changeset revision number
954 954 %b basename of the exporting repository
955 955 %h short-form changeset hash (12 bytes of hexadecimal)
956 956 %n zero-padded sequence number, starting at 1
957 957 %r zero-padded changeset revision number
958 958
959 959 Without the -a option, export will avoid generating diffs of files
960 960 it detects as binary. With -a, export will generate a diff anyway,
961 961 probably with undesirable results.
962 962
963 963 With the --switch-parent option, the diff will be against the second
964 964 parent. It can be useful to review a merge.
965 965 """
966 966 if not changesets:
967 967 raise util.Abort(_("export requires at least one changeset"))
968 968 revs = cmdutil.revrange(repo, changesets)
969 969 if len(revs) > 1:
970 970 ui.note(_('exporting patches:\n'))
971 971 else:
972 972 ui.note(_('exporting patch:\n'))
973 973 patch.export(repo, revs, template=opts['output'],
974 974 switch_parent=opts['switch_parent'],
975 975 opts=patch.diffopts(ui, opts))
976 976
977 977 def grep(ui, repo, pattern, *pats, **opts):
978 978 """search for a pattern in specified files and revisions
979 979
980 980 Search revisions of files for a regular expression.
981 981
982 982 This command behaves differently than Unix grep. It only accepts
983 983 Python/Perl regexps. It searches repository history, not the
984 984 working directory. It always prints the revision number in which
985 985 a match appears.
986 986
987 987 By default, grep only prints output for the first revision of a
988 988 file in which it finds a match. To get it to print every revision
989 989 that contains a change in match status ("-" for a match that
990 990 becomes a non-match, or "+" for a non-match that becomes a match),
991 991 use the --all flag.
992 992 """
993 993 reflags = 0
994 994 if opts['ignore_case']:
995 995 reflags |= re.I
996 996 try:
997 997 regexp = re.compile(pattern, reflags)
998 998 except Exception, inst:
999 999 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1000 1000 return None
1001 1001 sep, eol = ':', '\n'
1002 1002 if opts['print0']:
1003 1003 sep = eol = '\0'
1004 1004
1005 1005 fcache = {}
1006 1006 def getfile(fn):
1007 1007 if fn not in fcache:
1008 1008 fcache[fn] = repo.file(fn)
1009 1009 return fcache[fn]
1010 1010
1011 1011 def matchlines(body):
1012 1012 begin = 0
1013 1013 linenum = 0
1014 1014 while True:
1015 1015 match = regexp.search(body, begin)
1016 1016 if not match:
1017 1017 break
1018 1018 mstart, mend = match.span()
1019 1019 linenum += body.count('\n', begin, mstart) + 1
1020 1020 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1021 1021 lend = body.find('\n', mend)
1022 1022 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1023 1023 begin = lend + 1
1024 1024
1025 1025 class linestate(object):
1026 1026 def __init__(self, line, linenum, colstart, colend):
1027 1027 self.line = line
1028 1028 self.linenum = linenum
1029 1029 self.colstart = colstart
1030 1030 self.colend = colend
1031 1031
1032 1032 def __eq__(self, other):
1033 1033 return self.line == other.line
1034 1034
1035 1035 matches = {}
1036 1036 copies = {}
1037 1037 def grepbody(fn, rev, body):
1038 1038 matches[rev].setdefault(fn, [])
1039 1039 m = matches[rev][fn]
1040 1040 for lnum, cstart, cend, line in matchlines(body):
1041 1041 s = linestate(line, lnum, cstart, cend)
1042 1042 m.append(s)
1043 1043
1044 1044 def difflinestates(a, b):
1045 1045 sm = difflib.SequenceMatcher(None, a, b)
1046 1046 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1047 1047 if tag == 'insert':
1048 1048 for i in xrange(blo, bhi):
1049 1049 yield ('+', b[i])
1050 1050 elif tag == 'delete':
1051 1051 for i in xrange(alo, ahi):
1052 1052 yield ('-', a[i])
1053 1053 elif tag == 'replace':
1054 1054 for i in xrange(alo, ahi):
1055 1055 yield ('-', a[i])
1056 1056 for i in xrange(blo, bhi):
1057 1057 yield ('+', b[i])
1058 1058
1059 1059 prev = {}
1060 1060 def display(fn, rev, states, prevstates):
1061 1061 found = False
1062 1062 filerevmatches = {}
1063 1063 r = prev.get(fn, -1)
1064 1064 if opts['all']:
1065 1065 iter = difflinestates(states, prevstates)
1066 1066 else:
1067 1067 iter = [('', l) for l in prevstates]
1068 1068 for change, l in iter:
1069 1069 cols = [fn, str(r)]
1070 1070 if opts['line_number']:
1071 1071 cols.append(str(l.linenum))
1072 1072 if opts['all']:
1073 1073 cols.append(change)
1074 1074 if opts['user']:
1075 1075 cols.append(ui.shortuser(get(r)[1]))
1076 1076 if opts['files_with_matches']:
1077 1077 c = (fn, r)
1078 1078 if c in filerevmatches:
1079 1079 continue
1080 1080 filerevmatches[c] = 1
1081 1081 else:
1082 1082 cols.append(l.line)
1083 1083 ui.write(sep.join(cols), eol)
1084 1084 found = True
1085 1085 return found
1086 1086
1087 1087 fstate = {}
1088 1088 skip = {}
1089 1089 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1090 1090 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1091 1091 found = False
1092 1092 follow = opts.get('follow')
1093 1093 for st, rev, fns in changeiter:
1094 1094 if st == 'window':
1095 1095 matches.clear()
1096 1096 elif st == 'add':
1097 1097 mf = repo.changectx(rev).manifest()
1098 1098 matches[rev] = {}
1099 1099 for fn in fns:
1100 1100 if fn in skip:
1101 1101 continue
1102 1102 try:
1103 1103 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1104 1104 fstate.setdefault(fn, [])
1105 1105 if follow:
1106 1106 copied = getfile(fn).renamed(mf[fn])
1107 1107 if copied:
1108 1108 copies.setdefault(rev, {})[fn] = copied[0]
1109 1109 except KeyError:
1110 1110 pass
1111 1111 elif st == 'iter':
1112 1112 states = matches[rev].items()
1113 1113 states.sort()
1114 1114 for fn, m in states:
1115 1115 copy = copies.get(rev, {}).get(fn)
1116 1116 if fn in skip:
1117 1117 if copy:
1118 1118 skip[copy] = True
1119 1119 continue
1120 1120 if fn in prev or fstate[fn]:
1121 1121 r = display(fn, rev, m, fstate[fn])
1122 1122 found = found or r
1123 1123 if r and not opts['all']:
1124 1124 skip[fn] = True
1125 1125 if copy:
1126 1126 skip[copy] = True
1127 1127 fstate[fn] = m
1128 1128 if copy:
1129 1129 fstate[copy] = m
1130 1130 prev[fn] = rev
1131 1131
1132 1132 fstate = fstate.items()
1133 1133 fstate.sort()
1134 1134 for fn, state in fstate:
1135 1135 if fn in skip:
1136 1136 continue
1137 1137 if fn not in copies.get(prev[fn], {}):
1138 1138 found = display(fn, rev, {}, state) or found
1139 1139 return (not found and 1) or 0
1140 1140
1141 1141 def heads(ui, repo, *branchrevs, **opts):
1142 1142 """show current repository heads or show branch heads
1143 1143
1144 1144 With no arguments, show all repository head changesets.
1145 1145
1146 1146 If branch or revisions names are given this will show the heads of
1147 1147 the specified branches or the branches those revisions are tagged
1148 1148 with.
1149 1149
1150 1150 Repository "heads" are changesets that don't have child
1151 1151 changesets. They are where development generally takes place and
1152 1152 are the usual targets for update and merge operations.
1153 1153
1154 1154 Branch heads are changesets that have a given branch tag, but have
1155 1155 no child changesets with that tag. They are usually where
1156 1156 development on the given branch takes place.
1157 1157 """
1158 1158 if opts['rev']:
1159 1159 start = repo.lookup(opts['rev'])
1160 1160 else:
1161 1161 start = None
1162 1162 if not branchrevs:
1163 1163 # Assume we're looking repo-wide heads if no revs were specified.
1164 1164 heads = repo.heads(start)
1165 1165 else:
1166 1166 heads = []
1167 1167 visitedset = util.set()
1168 1168 for branchrev in branchrevs:
1169 1169 branch = repo.changectx(branchrev).branch()
1170 1170 if branch in visitedset:
1171 1171 continue
1172 1172 visitedset.add(branch)
1173 1173 bheads = repo.branchheads(branch, start)
1174 1174 if not bheads:
1175 1175 if branch != branchrev:
1176 1176 ui.warn(_("no changes on branch %s containing %s are "
1177 1177 "reachable from %s\n")
1178 1178 % (branch, branchrev, opts['rev']))
1179 1179 else:
1180 1180 ui.warn(_("no changes on branch %s are reachable from %s\n")
1181 1181 % (branch, opts['rev']))
1182 1182 heads.extend(bheads)
1183 1183 if not heads:
1184 1184 return 1
1185 1185 displayer = cmdutil.show_changeset(ui, repo, opts)
1186 1186 for n in heads:
1187 1187 displayer.show(changenode=n)
1188 1188
1189 1189 def help_(ui, name=None, with_version=False):
1190 1190 """show help for a command, extension, or list of commands
1191 1191
1192 1192 With no arguments, print a list of commands and short help.
1193 1193
1194 1194 Given a command name, print help for that command.
1195 1195
1196 1196 Given an extension name, print help for that extension, and the
1197 1197 commands it provides."""
1198 1198 option_lists = []
1199 1199
1200 1200 def addglobalopts(aliases):
1201 1201 if ui.verbose:
1202 1202 option_lists.append((_("global options:"), globalopts))
1203 1203 if name == 'shortlist':
1204 1204 option_lists.append((_('use "hg help" for the full list '
1205 1205 'of commands'), ()))
1206 1206 else:
1207 1207 if name == 'shortlist':
1208 1208 msg = _('use "hg help" for the full list of commands '
1209 1209 'or "hg -v" for details')
1210 1210 elif aliases:
1211 1211 msg = _('use "hg -v help%s" to show aliases and '
1212 1212 'global options') % (name and " " + name or "")
1213 1213 else:
1214 1214 msg = _('use "hg -v help %s" to show global options') % name
1215 1215 option_lists.append((msg, ()))
1216 1216
1217 1217 def helpcmd(name):
1218 1218 if with_version:
1219 1219 version_(ui)
1220 1220 ui.write('\n')
1221 1221 aliases, i = cmdutil.findcmd(ui, name, table)
1222 1222 # synopsis
1223 1223 ui.write("%s\n" % i[2])
1224 1224
1225 1225 # aliases
1226 1226 if not ui.quiet and len(aliases) > 1:
1227 1227 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1228 1228
1229 1229 # description
1230 1230 doc = i[0].__doc__
1231 1231 if not doc:
1232 1232 doc = _("(No help text available)")
1233 1233 if ui.quiet:
1234 1234 doc = doc.splitlines(0)[0]
1235 1235 ui.write("\n%s\n" % doc.rstrip())
1236 1236
1237 1237 if not ui.quiet:
1238 1238 # options
1239 1239 if i[1]:
1240 1240 option_lists.append((_("options:\n"), i[1]))
1241 1241
1242 1242 addglobalopts(False)
1243 1243
1244 1244 def helplist(header, select=None):
1245 1245 h = {}
1246 1246 cmds = {}
1247 1247 for c, e in table.items():
1248 1248 f = c.split("|", 1)[0]
1249 1249 if select and not select(f):
1250 1250 continue
1251 1251 if name == "shortlist" and not f.startswith("^"):
1252 1252 continue
1253 1253 f = f.lstrip("^")
1254 1254 if not ui.debugflag and f.startswith("debug"):
1255 1255 continue
1256 1256 doc = e[0].__doc__
1257 1257 if not doc:
1258 1258 doc = _("(No help text available)")
1259 1259 h[f] = doc.splitlines(0)[0].rstrip()
1260 1260 cmds[f] = c.lstrip("^")
1261 1261
1262 1262 if not h:
1263 1263 ui.status(_('no commands defined\n'))
1264 1264 return
1265 1265
1266 1266 ui.status(header)
1267 1267 fns = h.keys()
1268 1268 fns.sort()
1269 1269 m = max(map(len, fns))
1270 1270 for f in fns:
1271 1271 if ui.verbose:
1272 1272 commands = cmds[f].replace("|",", ")
1273 1273 ui.write(" %s:\n %s\n"%(commands, h[f]))
1274 1274 else:
1275 1275 ui.write(' %-*s %s\n' % (m, f, h[f]))
1276 1276
1277 1277 if not ui.quiet:
1278 1278 addglobalopts(True)
1279 1279
1280 1280 def helptopic(name):
1281 1281 v = None
1282 1282 for i in help.helptable:
1283 1283 l = i.split('|')
1284 1284 if name in l:
1285 1285 v = i
1286 1286 header = l[-1]
1287 1287 if not v:
1288 1288 raise cmdutil.UnknownCommand(name)
1289 1289
1290 1290 # description
1291 1291 doc = help.helptable[v]
1292 1292 if not doc:
1293 1293 doc = _("(No help text available)")
1294 1294 if callable(doc):
1295 1295 doc = doc()
1296 1296
1297 1297 ui.write("%s\n" % header)
1298 1298 ui.write("%s\n" % doc.rstrip())
1299 1299
1300 1300 def helpext(name):
1301 1301 try:
1302 1302 mod = extensions.find(name)
1303 1303 except KeyError:
1304 1304 raise cmdutil.UnknownCommand(name)
1305 1305
1306 1306 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1307 1307 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1308 1308 for d in doc[1:]:
1309 1309 ui.write(d, '\n')
1310 1310
1311 1311 ui.status('\n')
1312 1312
1313 1313 try:
1314 1314 ct = mod.cmdtable
1315 1315 except AttributeError:
1316 1316 ct = {}
1317 1317
1318 1318 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1319 1319 helplist(_('list of commands:\n\n'), modcmds.has_key)
1320 1320
1321 1321 if name and name != 'shortlist':
1322 1322 i = None
1323 1323 for f in (helpcmd, helptopic, helpext):
1324 1324 try:
1325 1325 f(name)
1326 1326 i = None
1327 1327 break
1328 1328 except cmdutil.UnknownCommand, inst:
1329 1329 i = inst
1330 1330 if i:
1331 1331 raise i
1332 1332
1333 1333 else:
1334 1334 # program name
1335 1335 if ui.verbose or with_version:
1336 1336 version_(ui)
1337 1337 else:
1338 1338 ui.status(_("Mercurial Distributed SCM\n"))
1339 1339 ui.status('\n')
1340 1340
1341 1341 # list of commands
1342 1342 if name == "shortlist":
1343 1343 header = _('basic commands:\n\n')
1344 1344 else:
1345 1345 header = _('list of commands:\n\n')
1346 1346
1347 1347 helplist(header)
1348 1348
1349 1349 # list all option lists
1350 1350 opt_output = []
1351 1351 for title, options in option_lists:
1352 1352 opt_output.append(("\n%s" % title, None))
1353 1353 for shortopt, longopt, default, desc in options:
1354 1354 if "DEPRECATED" in desc and not ui.verbose: continue
1355 1355 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1356 1356 longopt and " --%s" % longopt),
1357 1357 "%s%s" % (desc,
1358 1358 default
1359 1359 and _(" (default: %s)") % default
1360 1360 or "")))
1361 1361
1362 1362 if opt_output:
1363 1363 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1364 1364 for first, second in opt_output:
1365 1365 if second:
1366 1366 ui.write(" %-*s %s\n" % (opts_len, first, second))
1367 1367 else:
1368 1368 ui.write("%s\n" % first)
1369 1369
1370 1370 def identify(ui, repo, source=None,
1371 1371 rev=None, num=None, id=None, branch=None, tags=None):
1372 1372 """identify the working copy or specified revision
1373 1373
1374 1374 With no revision, print a summary of the current state of the repo.
1375 1375
1376 1376 With a path, do a lookup in another repository.
1377 1377
1378 1378 This summary identifies the repository state using one or two parent
1379 1379 hash identifiers, followed by a "+" if there are uncommitted changes
1380 1380 in the working directory, a list of tags for this revision and a branch
1381 1381 name for non-default branches.
1382 1382 """
1383 1383
1384 1384 if not repo and not source:
1385 1385 raise util.Abort(_("There is no Mercurial repository here "
1386 1386 "(.hg not found)"))
1387 1387
1388 1388 hexfunc = ui.debugflag and hex or short
1389 1389 default = not (num or id or branch or tags)
1390 1390 output = []
1391 1391
1392 1392 if source:
1393 1393 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1394 1394 srepo = hg.repository(ui, source)
1395 1395 if not rev and revs:
1396 1396 rev = revs[0]
1397 1397 if not rev:
1398 1398 rev = "tip"
1399 1399 if num or branch or tags:
1400 1400 raise util.Abort(
1401 1401 "can't query remote revision number, branch, or tags")
1402 1402 output = [hexfunc(srepo.lookup(rev))]
1403 1403 elif not rev:
1404 1404 ctx = repo.workingctx()
1405 1405 parents = ctx.parents()
1406 1406 changed = False
1407 1407 if default or id or num:
1408 1408 changed = ctx.files() + ctx.deleted()
1409 1409 if default or id:
1410 1410 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1411 1411 (changed) and "+" or "")]
1412 1412 if num:
1413 1413 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1414 1414 (changed) and "+" or ""))
1415 1415 else:
1416 1416 ctx = repo.changectx(rev)
1417 1417 if default or id:
1418 1418 output = [hexfunc(ctx.node())]
1419 1419 if num:
1420 1420 output.append(str(ctx.rev()))
1421 1421
1422 1422 if not source and default and not ui.quiet:
1423 1423 b = util.tolocal(ctx.branch())
1424 1424 if b != 'default':
1425 1425 output.append("(%s)" % b)
1426 1426
1427 1427 # multiple tags for a single parent separated by '/'
1428 1428 t = "/".join(ctx.tags())
1429 1429 if t:
1430 1430 output.append(t)
1431 1431
1432 1432 if branch:
1433 1433 output.append(util.tolocal(ctx.branch()))
1434 1434
1435 1435 if tags:
1436 1436 output.extend(ctx.tags())
1437 1437
1438 1438 ui.write("%s\n" % ' '.join(output))
1439 1439
1440 1440 def import_(ui, repo, patch1, *patches, **opts):
1441 1441 """import an ordered set of patches
1442 1442
1443 1443 Import a list of patches and commit them individually.
1444 1444
1445 1445 If there are outstanding changes in the working directory, import
1446 1446 will abort unless given the -f flag.
1447 1447
1448 1448 You can import a patch straight from a mail message. Even patches
1449 1449 as attachments work (body part must be type text/plain or
1450 1450 text/x-patch to be used). From and Subject headers of email
1451 1451 message are used as default committer and commit message. All
1452 1452 text/plain body parts before first diff are added to commit
1453 1453 message.
1454 1454
1455 1455 If the imported patch was generated by hg export, user and description
1456 1456 from patch override values from message headers and body. Values
1457 1457 given on command line with -m and -u override these.
1458 1458
1459 1459 If --exact is specified, import will set the working directory
1460 1460 to the parent of each patch before applying it, and will abort
1461 1461 if the resulting changeset has a different ID than the one
1462 1462 recorded in the patch. This may happen due to character set
1463 1463 problems or other deficiencies in the text patch format.
1464 1464
1465 1465 To read a patch from standard input, use patch name "-".
1466 1466 """
1467 1467 patches = (patch1,) + patches
1468 1468
1469 1469 if opts.get('exact') or not opts['force']:
1470 1470 cmdutil.bail_if_changed(repo)
1471 1471
1472 1472 d = opts["base"]
1473 1473 strip = opts["strip"]
1474 1474 wlock = lock = None
1475 1475 try:
1476 1476 wlock = repo.wlock()
1477 1477 lock = repo.lock()
1478 1478 for p in patches:
1479 1479 pf = os.path.join(d, p)
1480 1480
1481 1481 if pf == '-':
1482 1482 ui.status(_("applying patch from stdin\n"))
1483 1483 data = patch.extract(ui, sys.stdin)
1484 1484 else:
1485 1485 ui.status(_("applying %s\n") % p)
1486 1486 if os.path.exists(pf):
1487 1487 data = patch.extract(ui, file(pf, 'rb'))
1488 1488 else:
1489 1489 data = patch.extract(ui, urllib.urlopen(pf))
1490 1490 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1491 1491
1492 1492 if tmpname is None:
1493 1493 raise util.Abort(_('no diffs found'))
1494 1494
1495 1495 try:
1496 1496 cmdline_message = cmdutil.logmessage(opts)
1497 1497 if cmdline_message:
1498 1498 # pickup the cmdline msg
1499 1499 message = cmdline_message
1500 1500 elif message:
1501 1501 # pickup the patch msg
1502 1502 message = message.strip()
1503 1503 else:
1504 1504 # launch the editor
1505 1505 message = None
1506 1506 ui.debug(_('message:\n%s\n') % message)
1507 1507
1508 1508 wp = repo.workingctx().parents()
1509 1509 if opts.get('exact'):
1510 1510 if not nodeid or not p1:
1511 1511 raise util.Abort(_('not a mercurial patch'))
1512 1512 p1 = repo.lookup(p1)
1513 1513 p2 = repo.lookup(p2 or hex(nullid))
1514 1514
1515 1515 if p1 != wp[0].node():
1516 1516 hg.clean(repo, p1)
1517 1517 repo.dirstate.setparents(p1, p2)
1518 1518 elif p2:
1519 1519 try:
1520 1520 p1 = repo.lookup(p1)
1521 1521 p2 = repo.lookup(p2)
1522 1522 if p1 == wp[0].node():
1523 1523 repo.dirstate.setparents(p1, p2)
1524 1524 except hg.RepoError:
1525 1525 pass
1526 1526 if opts.get('exact') or opts.get('import_branch'):
1527 1527 repo.dirstate.setbranch(branch or 'default')
1528 1528
1529 1529 files = {}
1530 1530 try:
1531 1531 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1532 1532 files=files)
1533 1533 finally:
1534 1534 files = patch.updatedir(ui, repo, files)
1535 1535 if not opts.get('no_commit'):
1536 1536 n = repo.commit(files, message, opts.get('user') or user,
1537 1537 opts.get('date') or date)
1538 1538 if opts.get('exact'):
1539 1539 if hex(n) != nodeid:
1540 1540 repo.rollback()
1541 1541 raise util.Abort(_('patch is damaged'
1542 1542 ' or loses information'))
1543 # Force a dirstate write so that the next transaction
1544 # backups an up-do-date file.
1545 repo.dirstate.write()
1543 1546 finally:
1544 1547 os.unlink(tmpname)
1545 1548 finally:
1546 1549 del lock, wlock
1547 1550
1548 1551 def incoming(ui, repo, source="default", **opts):
1549 1552 """show new changesets found in source
1550 1553
1551 1554 Show new changesets found in the specified path/URL or the default
1552 1555 pull location. These are the changesets that would be pulled if a pull
1553 1556 was requested.
1554 1557
1555 1558 For remote repository, using --bundle avoids downloading the changesets
1556 1559 twice if the incoming is followed by a pull.
1557 1560
1558 1561 See pull for valid source format details.
1559 1562 """
1560 1563 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1561 1564 cmdutil.setremoteconfig(ui, opts)
1562 1565
1563 1566 other = hg.repository(ui, source)
1564 1567 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1565 1568 if revs:
1566 1569 revs = [other.lookup(rev) for rev in revs]
1567 1570 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1568 1571 if not incoming:
1569 1572 try:
1570 1573 os.unlink(opts["bundle"])
1571 1574 except:
1572 1575 pass
1573 1576 ui.status(_("no changes found\n"))
1574 1577 return 1
1575 1578
1576 1579 cleanup = None
1577 1580 try:
1578 1581 fname = opts["bundle"]
1579 1582 if fname or not other.local():
1580 1583 # create a bundle (uncompressed if other repo is not local)
1581 1584 if revs is None:
1582 1585 cg = other.changegroup(incoming, "incoming")
1583 1586 else:
1584 1587 cg = other.changegroupsubset(incoming, revs, 'incoming')
1585 1588 bundletype = other.local() and "HG10BZ" or "HG10UN"
1586 1589 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1587 1590 # keep written bundle?
1588 1591 if opts["bundle"]:
1589 1592 cleanup = None
1590 1593 if not other.local():
1591 1594 # use the created uncompressed bundlerepo
1592 1595 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1593 1596
1594 1597 o = other.changelog.nodesbetween(incoming, revs)[0]
1595 1598 if opts['newest_first']:
1596 1599 o.reverse()
1597 1600 displayer = cmdutil.show_changeset(ui, other, opts)
1598 1601 for n in o:
1599 1602 parents = [p for p in other.changelog.parents(n) if p != nullid]
1600 1603 if opts['no_merges'] and len(parents) == 2:
1601 1604 continue
1602 1605 displayer.show(changenode=n)
1603 1606 finally:
1604 1607 if hasattr(other, 'close'):
1605 1608 other.close()
1606 1609 if cleanup:
1607 1610 os.unlink(cleanup)
1608 1611
1609 1612 def init(ui, dest=".", **opts):
1610 1613 """create a new repository in the given directory
1611 1614
1612 1615 Initialize a new repository in the given directory. If the given
1613 1616 directory does not exist, it is created.
1614 1617
1615 1618 If no directory is given, the current directory is used.
1616 1619
1617 1620 It is possible to specify an ssh:// URL as the destination.
1618 1621 Look at the help text for the pull command for important details
1619 1622 about ssh:// URLs.
1620 1623 """
1621 1624 cmdutil.setremoteconfig(ui, opts)
1622 1625 hg.repository(ui, dest, create=1)
1623 1626
1624 1627 def locate(ui, repo, *pats, **opts):
1625 1628 """locate files matching specific patterns
1626 1629
1627 1630 Print all files under Mercurial control whose names match the
1628 1631 given patterns.
1629 1632
1630 1633 This command searches the entire repository by default. To search
1631 1634 just the current directory and its subdirectories, use
1632 1635 "--include .".
1633 1636
1634 1637 If no patterns are given to match, this command prints all file
1635 1638 names.
1636 1639
1637 1640 If you want to feed the output of this command into the "xargs"
1638 1641 command, use the "-0" option to both this command and "xargs".
1639 1642 This will avoid the problem of "xargs" treating single filenames
1640 1643 that contain white space as multiple filenames.
1641 1644 """
1642 1645 end = opts['print0'] and '\0' or '\n'
1643 1646 rev = opts['rev']
1644 1647 if rev:
1645 1648 node = repo.lookup(rev)
1646 1649 else:
1647 1650 node = None
1648 1651
1649 1652 ret = 1
1650 1653 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1651 1654 badmatch=util.always,
1652 1655 default='relglob'):
1653 1656 if src == 'b':
1654 1657 continue
1655 1658 if not node and abs not in repo.dirstate:
1656 1659 continue
1657 1660 if opts['fullpath']:
1658 1661 ui.write(os.path.join(repo.root, abs), end)
1659 1662 else:
1660 1663 ui.write(((pats and rel) or abs), end)
1661 1664 ret = 0
1662 1665
1663 1666 return ret
1664 1667
1665 1668 def log(ui, repo, *pats, **opts):
1666 1669 """show revision history of entire repository or files
1667 1670
1668 1671 Print the revision history of the specified files or the entire
1669 1672 project.
1670 1673
1671 1674 File history is shown without following rename or copy history of
1672 1675 files. Use -f/--follow with a file name to follow history across
1673 1676 renames and copies. --follow without a file name will only show
1674 1677 ancestors or descendants of the starting revision. --follow-first
1675 1678 only follows the first parent of merge revisions.
1676 1679
1677 1680 If no revision range is specified, the default is tip:0 unless
1678 1681 --follow is set, in which case the working directory parent is
1679 1682 used as the starting revision.
1680 1683
1681 1684 By default this command outputs: changeset id and hash, tags,
1682 1685 non-trivial parents, user, date and time, and a summary for each
1683 1686 commit. When the -v/--verbose switch is used, the list of changed
1684 1687 files and full commit message is shown.
1685 1688
1686 1689 NOTE: log -p may generate unexpected diff output for merge
1687 1690 changesets, as it will compare the merge changeset against its
1688 1691 first parent only. Also, the files: list will only reflect files
1689 1692 that are different from BOTH parents.
1690 1693
1691 1694 """
1692 1695
1693 1696 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1694 1697 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1695 1698
1696 1699 if opts['limit']:
1697 1700 try:
1698 1701 limit = int(opts['limit'])
1699 1702 except ValueError:
1700 1703 raise util.Abort(_('limit must be a positive integer'))
1701 1704 if limit <= 0: raise util.Abort(_('limit must be positive'))
1702 1705 else:
1703 1706 limit = sys.maxint
1704 1707 count = 0
1705 1708
1706 1709 if opts['copies'] and opts['rev']:
1707 1710 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1708 1711 else:
1709 1712 endrev = repo.changelog.count()
1710 1713 rcache = {}
1711 1714 ncache = {}
1712 1715 def getrenamed(fn, rev):
1713 1716 '''looks up all renames for a file (up to endrev) the first
1714 1717 time the file is given. It indexes on the changerev and only
1715 1718 parses the manifest if linkrev != changerev.
1716 1719 Returns rename info for fn at changerev rev.'''
1717 1720 if fn not in rcache:
1718 1721 rcache[fn] = {}
1719 1722 ncache[fn] = {}
1720 1723 fl = repo.file(fn)
1721 1724 for i in xrange(fl.count()):
1722 1725 node = fl.node(i)
1723 1726 lr = fl.linkrev(node)
1724 1727 renamed = fl.renamed(node)
1725 1728 rcache[fn][lr] = renamed
1726 1729 if renamed:
1727 1730 ncache[fn][node] = renamed
1728 1731 if lr >= endrev:
1729 1732 break
1730 1733 if rev in rcache[fn]:
1731 1734 return rcache[fn][rev]
1732 1735
1733 1736 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1734 1737 # filectx logic.
1735 1738
1736 1739 try:
1737 1740 return repo.changectx(rev).filectx(fn).renamed()
1738 1741 except revlog.LookupError:
1739 1742 pass
1740 1743 return None
1741 1744
1742 1745 df = False
1743 1746 if opts["date"]:
1744 1747 df = util.matchdate(opts["date"])
1745 1748
1746 1749 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1747 1750 for st, rev, fns in changeiter:
1748 1751 if st == 'add':
1749 1752 changenode = repo.changelog.node(rev)
1750 1753 parents = [p for p in repo.changelog.parentrevs(rev)
1751 1754 if p != nullrev]
1752 1755 if opts['no_merges'] and len(parents) == 2:
1753 1756 continue
1754 1757 if opts['only_merges'] and len(parents) != 2:
1755 1758 continue
1756 1759
1757 1760 if df:
1758 1761 changes = get(rev)
1759 1762 if not df(changes[2][0]):
1760 1763 continue
1761 1764
1762 1765 if opts['keyword']:
1763 1766 changes = get(rev)
1764 1767 miss = 0
1765 1768 for k in [kw.lower() for kw in opts['keyword']]:
1766 1769 if not (k in changes[1].lower() or
1767 1770 k in changes[4].lower() or
1768 1771 k in " ".join(changes[3]).lower()):
1769 1772 miss = 1
1770 1773 break
1771 1774 if miss:
1772 1775 continue
1773 1776
1774 1777 copies = []
1775 1778 if opts.get('copies') and rev:
1776 1779 for fn in get(rev)[3]:
1777 1780 rename = getrenamed(fn, rev)
1778 1781 if rename:
1779 1782 copies.append((fn, rename[0]))
1780 1783 displayer.show(rev, changenode, copies=copies)
1781 1784 elif st == 'iter':
1782 1785 if count == limit: break
1783 1786 if displayer.flush(rev):
1784 1787 count += 1
1785 1788
1786 1789 def manifest(ui, repo, node=None, rev=None):
1787 1790 """output the current or given revision of the project manifest
1788 1791
1789 1792 Print a list of version controlled files for the given revision.
1790 1793 If no revision is given, the parent of the working directory is used,
1791 1794 or tip if no revision is checked out.
1792 1795
1793 1796 The manifest is the list of files being version controlled. If no revision
1794 1797 is given then the first parent of the working directory is used.
1795 1798
1796 1799 With -v flag, print file permissions, symlink and executable bits. With
1797 1800 --debug flag, print file revision hashes.
1798 1801 """
1799 1802
1800 1803 if rev and node:
1801 1804 raise util.Abort(_("please specify just one revision"))
1802 1805
1803 1806 if not node:
1804 1807 node = rev
1805 1808
1806 1809 m = repo.changectx(node).manifest()
1807 1810 files = m.keys()
1808 1811 files.sort()
1809 1812
1810 1813 for f in files:
1811 1814 if ui.debugflag:
1812 1815 ui.write("%40s " % hex(m[f]))
1813 1816 if ui.verbose:
1814 1817 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1815 1818 perm = m.execf(f) and "755" or "644"
1816 1819 ui.write("%3s %1s " % (perm, type))
1817 1820 ui.write("%s\n" % f)
1818 1821
1819 1822 def merge(ui, repo, node=None, force=None, rev=None):
1820 1823 """merge working directory with another revision
1821 1824
1822 1825 Merge the contents of the current working directory and the
1823 1826 requested revision. Files that changed between either parent are
1824 1827 marked as changed for the next commit and a commit must be
1825 1828 performed before any further updates are allowed.
1826 1829
1827 1830 If no revision is specified, the working directory's parent is a
1828 1831 head revision, and the repository contains exactly one other head,
1829 1832 the other head is merged with by default. Otherwise, an explicit
1830 1833 revision to merge with must be provided.
1831 1834 """
1832 1835
1833 1836 if rev and node:
1834 1837 raise util.Abort(_("please specify just one revision"))
1835 1838 if not node:
1836 1839 node = rev
1837 1840
1838 1841 if not node:
1839 1842 heads = repo.heads()
1840 1843 if len(heads) > 2:
1841 1844 raise util.Abort(_('repo has %d heads - '
1842 1845 'please merge with an explicit rev') %
1843 1846 len(heads))
1844 1847 parent = repo.dirstate.parents()[0]
1845 1848 if len(heads) == 1:
1846 1849 msg = _('there is nothing to merge')
1847 1850 if parent != repo.lookup(repo.workingctx().branch()):
1848 1851 msg = _('%s - use "hg update" instead') % msg
1849 1852 raise util.Abort(msg)
1850 1853
1851 1854 if parent not in heads:
1852 1855 raise util.Abort(_('working dir not at a head rev - '
1853 1856 'use "hg update" or merge with an explicit rev'))
1854 1857 node = parent == heads[0] and heads[-1] or heads[0]
1855 1858 return hg.merge(repo, node, force=force)
1856 1859
1857 1860 def outgoing(ui, repo, dest=None, **opts):
1858 1861 """show changesets not found in destination
1859 1862
1860 1863 Show changesets not found in the specified destination repository or
1861 1864 the default push location. These are the changesets that would be pushed
1862 1865 if a push was requested.
1863 1866
1864 1867 See pull for valid destination format details.
1865 1868 """
1866 1869 dest, revs, checkout = hg.parseurl(
1867 1870 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1868 1871 cmdutil.setremoteconfig(ui, opts)
1869 1872 if revs:
1870 1873 revs = [repo.lookup(rev) for rev in revs]
1871 1874
1872 1875 other = hg.repository(ui, dest)
1873 1876 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1874 1877 o = repo.findoutgoing(other, force=opts['force'])
1875 1878 if not o:
1876 1879 ui.status(_("no changes found\n"))
1877 1880 return 1
1878 1881 o = repo.changelog.nodesbetween(o, revs)[0]
1879 1882 if opts['newest_first']:
1880 1883 o.reverse()
1881 1884 displayer = cmdutil.show_changeset(ui, repo, opts)
1882 1885 for n in o:
1883 1886 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1884 1887 if opts['no_merges'] and len(parents) == 2:
1885 1888 continue
1886 1889 displayer.show(changenode=n)
1887 1890
1888 1891 def parents(ui, repo, file_=None, **opts):
1889 1892 """show the parents of the working dir or revision
1890 1893
1891 1894 Print the working directory's parent revisions. If a
1892 1895 revision is given via --rev, the parent of that revision
1893 1896 will be printed. If a file argument is given, revision in
1894 1897 which the file was last changed (before the working directory
1895 1898 revision or the argument to --rev if given) is printed.
1896 1899 """
1897 1900 rev = opts.get('rev')
1898 1901 if rev:
1899 1902 ctx = repo.changectx(rev)
1900 1903 else:
1901 1904 ctx = repo.workingctx()
1902 1905
1903 1906 if file_:
1904 1907 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1905 1908 if anypats or len(files) != 1:
1906 1909 raise util.Abort(_('can only specify an explicit file name'))
1907 1910 file_ = files[0]
1908 1911 filenodes = []
1909 1912 for cp in ctx.parents():
1910 1913 if not cp:
1911 1914 continue
1912 1915 try:
1913 1916 filenodes.append(cp.filenode(file_))
1914 1917 except revlog.LookupError:
1915 1918 pass
1916 1919 if not filenodes:
1917 1920 raise util.Abort(_("'%s' not found in manifest!") % file_)
1918 1921 fl = repo.file(file_)
1919 1922 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1920 1923 else:
1921 1924 p = [cp.node() for cp in ctx.parents()]
1922 1925
1923 1926 displayer = cmdutil.show_changeset(ui, repo, opts)
1924 1927 for n in p:
1925 1928 if n != nullid:
1926 1929 displayer.show(changenode=n)
1927 1930
1928 1931 def paths(ui, repo, search=None):
1929 1932 """show definition of symbolic path names
1930 1933
1931 1934 Show definition of symbolic path name NAME. If no name is given, show
1932 1935 definition of available names.
1933 1936
1934 1937 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1935 1938 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1936 1939 """
1937 1940 if search:
1938 1941 for name, path in ui.configitems("paths"):
1939 1942 if name == search:
1940 1943 ui.write("%s\n" % path)
1941 1944 return
1942 1945 ui.warn(_("not found!\n"))
1943 1946 return 1
1944 1947 else:
1945 1948 for name, path in ui.configitems("paths"):
1946 1949 ui.write("%s = %s\n" % (name, path))
1947 1950
1948 1951 def postincoming(ui, repo, modheads, optupdate, checkout):
1949 1952 if modheads == 0:
1950 1953 return
1951 1954 if optupdate:
1952 1955 if modheads <= 1 or checkout:
1953 1956 return hg.update(repo, checkout)
1954 1957 else:
1955 1958 ui.status(_("not updating, since new heads added\n"))
1956 1959 if modheads > 1:
1957 1960 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1958 1961 else:
1959 1962 ui.status(_("(run 'hg update' to get a working copy)\n"))
1960 1963
1961 1964 def pull(ui, repo, source="default", **opts):
1962 1965 """pull changes from the specified source
1963 1966
1964 1967 Pull changes from a remote repository to a local one.
1965 1968
1966 1969 This finds all changes from the repository at the specified path
1967 1970 or URL and adds them to the local repository. By default, this
1968 1971 does not update the copy of the project in the working directory.
1969 1972
1970 1973 Valid URLs are of the form:
1971 1974
1972 1975 local/filesystem/path (or file://local/filesystem/path)
1973 1976 http://[user@]host[:port]/[path]
1974 1977 https://[user@]host[:port]/[path]
1975 1978 ssh://[user@]host[:port]/[path]
1976 1979 static-http://host[:port]/[path]
1977 1980
1978 1981 Paths in the local filesystem can either point to Mercurial
1979 1982 repositories or to bundle files (as created by 'hg bundle' or
1980 1983 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1981 1984 allows access to a Mercurial repository where you simply use a web
1982 1985 server to publish the .hg directory as static content.
1983 1986
1984 1987 An optional identifier after # indicates a particular branch, tag,
1985 1988 or changeset to pull.
1986 1989
1987 1990 Some notes about using SSH with Mercurial:
1988 1991 - SSH requires an accessible shell account on the destination machine
1989 1992 and a copy of hg in the remote path or specified with as remotecmd.
1990 1993 - path is relative to the remote user's home directory by default.
1991 1994 Use an extra slash at the start of a path to specify an absolute path:
1992 1995 ssh://example.com//tmp/repository
1993 1996 - Mercurial doesn't use its own compression via SSH; the right thing
1994 1997 to do is to configure it in your ~/.ssh/config, e.g.:
1995 1998 Host *.mylocalnetwork.example.com
1996 1999 Compression no
1997 2000 Host *
1998 2001 Compression yes
1999 2002 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2000 2003 with the --ssh command line option.
2001 2004 """
2002 2005 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2003 2006 cmdutil.setremoteconfig(ui, opts)
2004 2007
2005 2008 other = hg.repository(ui, source)
2006 2009 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2007 2010 if revs:
2008 2011 try:
2009 2012 revs = [other.lookup(rev) for rev in revs]
2010 2013 except repo.NoCapability:
2011 2014 error = _("Other repository doesn't support revision lookup, "
2012 2015 "so a rev cannot be specified.")
2013 2016 raise util.Abort(error)
2014 2017
2015 2018 modheads = repo.pull(other, heads=revs, force=opts['force'])
2016 2019 return postincoming(ui, repo, modheads, opts['update'], checkout)
2017 2020
2018 2021 def push(ui, repo, dest=None, **opts):
2019 2022 """push changes to the specified destination
2020 2023
2021 2024 Push changes from the local repository to the given destination.
2022 2025
2023 2026 This is the symmetrical operation for pull. It helps to move
2024 2027 changes from the current repository to a different one. If the
2025 2028 destination is local this is identical to a pull in that directory
2026 2029 from the current one.
2027 2030
2028 2031 By default, push will refuse to run if it detects the result would
2029 2032 increase the number of remote heads. This generally indicates the
2030 2033 the client has forgotten to sync and merge before pushing.
2031 2034
2032 2035 Valid URLs are of the form:
2033 2036
2034 2037 local/filesystem/path (or file://local/filesystem/path)
2035 2038 ssh://[user@]host[:port]/[path]
2036 2039 http://[user@]host[:port]/[path]
2037 2040 https://[user@]host[:port]/[path]
2038 2041
2039 2042 An optional identifier after # indicates a particular branch, tag,
2040 2043 or changeset to push.
2041 2044
2042 2045 Look at the help text for the pull command for important details
2043 2046 about ssh:// URLs.
2044 2047
2045 2048 Pushing to http:// and https:// URLs is only possible, if this
2046 2049 feature is explicitly enabled on the remote Mercurial server.
2047 2050 """
2048 2051 dest, revs, checkout = hg.parseurl(
2049 2052 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2050 2053 cmdutil.setremoteconfig(ui, opts)
2051 2054
2052 2055 other = hg.repository(ui, dest)
2053 2056 ui.status('pushing to %s\n' % util.hidepassword(dest))
2054 2057 if revs:
2055 2058 revs = [repo.lookup(rev) for rev in revs]
2056 2059 r = repo.push(other, opts['force'], revs=revs)
2057 2060 return r == 0
2058 2061
2059 2062 def rawcommit(ui, repo, *pats, **opts):
2060 2063 """raw commit interface (DEPRECATED)
2061 2064
2062 2065 (DEPRECATED)
2063 2066 Lowlevel commit, for use in helper scripts.
2064 2067
2065 2068 This command is not intended to be used by normal users, as it is
2066 2069 primarily useful for importing from other SCMs.
2067 2070
2068 2071 This command is now deprecated and will be removed in a future
2069 2072 release, please use debugsetparents and commit instead.
2070 2073 """
2071 2074
2072 2075 ui.warn(_("(the rawcommit command is deprecated)\n"))
2073 2076
2074 2077 message = cmdutil.logmessage(opts)
2075 2078
2076 2079 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2077 2080 if opts['files']:
2078 2081 files += open(opts['files']).read().splitlines()
2079 2082
2080 2083 parents = [repo.lookup(p) for p in opts['parent']]
2081 2084
2082 2085 try:
2083 2086 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2084 2087 except ValueError, inst:
2085 2088 raise util.Abort(str(inst))
2086 2089
2087 2090 def recover(ui, repo):
2088 2091 """roll back an interrupted transaction
2089 2092
2090 2093 Recover from an interrupted commit or pull.
2091 2094
2092 2095 This command tries to fix the repository status after an interrupted
2093 2096 operation. It should only be necessary when Mercurial suggests it.
2094 2097 """
2095 2098 if repo.recover():
2096 2099 return hg.verify(repo)
2097 2100 return 1
2098 2101
2099 2102 def remove(ui, repo, *pats, **opts):
2100 2103 """remove the specified files on the next commit
2101 2104
2102 2105 Schedule the indicated files for removal from the repository.
2103 2106
2104 2107 This only removes files from the current branch, not from the
2105 2108 entire project history. If the files still exist in the working
2106 2109 directory, they will be deleted from it. If invoked with --after,
2107 2110 files are marked as removed, but not actually unlinked unless --force
2108 2111 is also given. Without exact file names, --after will only mark
2109 2112 files as removed if they are no longer in the working directory.
2110 2113
2111 2114 This command schedules the files to be removed at the next commit.
2112 2115 To undo a remove before that, see hg revert.
2113 2116
2114 2117 Modified files and added files are not removed by default. To
2115 2118 remove them, use the -f/--force option.
2116 2119 """
2117 2120 if not opts['after'] and not pats:
2118 2121 raise util.Abort(_('no files specified'))
2119 2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2120 2123 exact = dict.fromkeys(files)
2121 2124 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2122 2125 modified, added, removed, deleted, unknown = mardu
2123 2126 remove, forget = [], []
2124 2127 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2125 2128 reason = None
2126 2129 if abs in modified and not opts['force']:
2127 2130 reason = _('is modified (use -f to force removal)')
2128 2131 elif abs in added:
2129 2132 if opts['force']:
2130 2133 forget.append(abs)
2131 2134 continue
2132 2135 reason = _('has been marked for add (use -f to force removal)')
2133 2136 exact = 1 # force the message
2134 2137 elif abs not in repo.dirstate:
2135 2138 reason = _('is not managed')
2136 2139 elif opts['after'] and not exact and abs not in deleted:
2137 2140 continue
2138 2141 elif abs in removed:
2139 2142 continue
2140 2143 if reason:
2141 2144 if exact:
2142 2145 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2143 2146 else:
2144 2147 if ui.verbose or not exact:
2145 2148 ui.status(_('removing %s\n') % rel)
2146 2149 remove.append(abs)
2147 2150 repo.forget(forget)
2148 2151 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2149 2152
2150 2153 def rename(ui, repo, *pats, **opts):
2151 2154 """rename files; equivalent of copy + remove
2152 2155
2153 2156 Mark dest as copies of sources; mark sources for deletion. If
2154 2157 dest is a directory, copies are put in that directory. If dest is
2155 2158 a file, there can only be one source.
2156 2159
2157 2160 By default, this command copies the contents of files as they
2158 2161 stand in the working directory. If invoked with --after, the
2159 2162 operation is recorded, but no copying is performed.
2160 2163
2161 2164 This command takes effect in the next commit. To undo a rename
2162 2165 before that, see hg revert.
2163 2166 """
2164 2167 wlock = repo.wlock(False)
2165 2168 try:
2166 2169 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2167 2170 finally:
2168 2171 del wlock
2169 2172
2170 2173 def revert(ui, repo, *pats, **opts):
2171 2174 """restore individual files or dirs to an earlier state
2172 2175
2173 2176 (use update -r to check out earlier revisions, revert does not
2174 2177 change the working dir parents)
2175 2178
2176 2179 With no revision specified, revert the named files or directories
2177 2180 to the contents they had in the parent of the working directory.
2178 2181 This restores the contents of the affected files to an unmodified
2179 2182 state and unschedules adds, removes, copies, and renames. If the
2180 2183 working directory has two parents, you must explicitly specify the
2181 2184 revision to revert to.
2182 2185
2183 2186 Using the -r option, revert the given files or directories to their
2184 2187 contents as of a specific revision. This can be helpful to "roll
2185 2188 back" some or all of an earlier change.
2186 2189
2187 2190 Revert modifies the working directory. It does not commit any
2188 2191 changes, or change the parent of the working directory. If you
2189 2192 revert to a revision other than the parent of the working
2190 2193 directory, the reverted files will thus appear modified
2191 2194 afterwards.
2192 2195
2193 2196 If a file has been deleted, it is restored. If the executable
2194 2197 mode of a file was changed, it is reset.
2195 2198
2196 2199 If names are given, all files matching the names are reverted.
2197 2200
2198 2201 If no arguments are given, no files are reverted.
2199 2202
2200 2203 Modified files are saved with a .orig suffix before reverting.
2201 2204 To disable these backups, use --no-backup.
2202 2205 """
2203 2206
2204 2207 if opts["date"]:
2205 2208 if opts["rev"]:
2206 2209 raise util.Abort(_("you can't specify a revision and a date"))
2207 2210 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2208 2211
2209 2212 if not pats and not opts['all']:
2210 2213 raise util.Abort(_('no files or directories specified; '
2211 2214 'use --all to revert the whole repo'))
2212 2215
2213 2216 parent, p2 = repo.dirstate.parents()
2214 2217 if not opts['rev'] and p2 != nullid:
2215 2218 raise util.Abort(_('uncommitted merge - please provide a '
2216 2219 'specific revision'))
2217 2220 ctx = repo.changectx(opts['rev'])
2218 2221 node = ctx.node()
2219 2222 mf = ctx.manifest()
2220 2223 if node == parent:
2221 2224 pmf = mf
2222 2225 else:
2223 2226 pmf = None
2224 2227
2225 2228 # need all matching names in dirstate and manifest of target rev,
2226 2229 # so have to walk both. do not print errors if files exist in one
2227 2230 # but not other.
2228 2231
2229 2232 names = {}
2230 2233 target_only = {}
2231 2234
2232 2235 wlock = repo.wlock()
2233 2236 try:
2234 2237 # walk dirstate.
2235 2238 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2236 2239 badmatch=mf.has_key):
2237 2240 names[abs] = (rel, exact)
2238 2241 if src == 'b':
2239 2242 target_only[abs] = True
2240 2243
2241 2244 # walk target manifest.
2242 2245
2243 2246 def badmatch(path):
2244 2247 if path in names:
2245 2248 return True
2246 2249 path_ = path + '/'
2247 2250 for f in names:
2248 2251 if f.startswith(path_):
2249 2252 return True
2250 2253 return False
2251 2254
2252 2255 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2253 2256 badmatch=badmatch):
2254 2257 if abs in names or src == 'b':
2255 2258 continue
2256 2259 names[abs] = (rel, exact)
2257 2260 target_only[abs] = True
2258 2261
2259 2262 changes = repo.status(match=names.has_key)[:5]
2260 2263 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2261 2264
2262 2265 # if f is a rename, also revert the source
2263 2266 cwd = repo.getcwd()
2264 2267 for f in added:
2265 2268 src = repo.dirstate.copied(f)
2266 2269 if src and src not in names and repo.dirstate[src] == 'r':
2267 2270 removed[src] = None
2268 2271 names[src] = (repo.pathto(src, cwd), True)
2269 2272
2270 2273 revert = ([], _('reverting %s\n'))
2271 2274 add = ([], _('adding %s\n'))
2272 2275 remove = ([], _('removing %s\n'))
2273 2276 forget = ([], _('forgetting %s\n'))
2274 2277 undelete = ([], _('undeleting %s\n'))
2275 2278 update = {}
2276 2279
2277 2280 disptable = (
2278 2281 # dispatch table:
2279 2282 # file state
2280 2283 # action if in target manifest
2281 2284 # action if not in target manifest
2282 2285 # make backup if in target manifest
2283 2286 # make backup if not in target manifest
2284 2287 (modified, revert, remove, True, True),
2285 2288 (added, revert, forget, True, False),
2286 2289 (removed, undelete, None, False, False),
2287 2290 (deleted, revert, remove, False, False),
2288 2291 (unknown, add, None, True, False),
2289 2292 (target_only, add, None, False, False),
2290 2293 )
2291 2294
2292 2295 entries = names.items()
2293 2296 entries.sort()
2294 2297
2295 2298 for abs, (rel, exact) in entries:
2296 2299 mfentry = mf.get(abs)
2297 2300 target = repo.wjoin(abs)
2298 2301 def handle(xlist, dobackup):
2299 2302 xlist[0].append(abs)
2300 2303 update[abs] = 1
2301 2304 if dobackup and not opts['no_backup'] and util.lexists(target):
2302 2305 bakname = "%s.orig" % rel
2303 2306 ui.note(_('saving current version of %s as %s\n') %
2304 2307 (rel, bakname))
2305 2308 if not opts.get('dry_run'):
2306 2309 util.copyfile(target, bakname)
2307 2310 if ui.verbose or not exact:
2308 2311 ui.status(xlist[1] % rel)
2309 2312 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2310 2313 if abs not in table: continue
2311 2314 # file has changed in dirstate
2312 2315 if mfentry:
2313 2316 handle(hitlist, backuphit)
2314 2317 elif misslist is not None:
2315 2318 handle(misslist, backupmiss)
2316 2319 else:
2317 2320 if exact: ui.warn(_('file not managed: %s\n') % rel)
2318 2321 break
2319 2322 else:
2320 2323 # file has not changed in dirstate
2321 2324 if node == parent:
2322 2325 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2323 2326 continue
2324 2327 if pmf is None:
2325 2328 # only need parent manifest in this unlikely case,
2326 2329 # so do not read by default
2327 2330 pmf = repo.changectx(parent).manifest()
2328 2331 if abs in pmf:
2329 2332 if mfentry:
2330 2333 # if version of file is same in parent and target
2331 2334 # manifests, do nothing
2332 2335 if pmf[abs] != mfentry:
2333 2336 handle(revert, False)
2334 2337 else:
2335 2338 handle(remove, False)
2336 2339
2337 2340 if not opts.get('dry_run'):
2338 2341 for f in forget[0]:
2339 2342 repo.dirstate.forget(f)
2340 2343 r = hg.revert(repo, node, update.has_key)
2341 2344 for f in add[0]:
2342 2345 repo.dirstate.add(f)
2343 2346 for f in undelete[0]:
2344 2347 repo.dirstate.normal(f)
2345 2348 for f in remove[0]:
2346 2349 repo.dirstate.remove(f)
2347 2350 return r
2348 2351 finally:
2349 2352 del wlock
2350 2353
2351 2354 def rollback(ui, repo):
2352 2355 """roll back the last transaction
2353 2356
2354 2357 This command should be used with care. There is only one level of
2355 2358 rollback, and there is no way to undo a rollback. It will also
2356 2359 restore the dirstate at the time of the last transaction, losing
2357 2360 any dirstate changes since that time.
2358 2361
2359 2362 Transactions are used to encapsulate the effects of all commands
2360 2363 that create new changesets or propagate existing changesets into a
2361 2364 repository. For example, the following commands are transactional,
2362 2365 and their effects can be rolled back:
2363 2366
2364 2367 commit
2365 2368 import
2366 2369 pull
2367 2370 push (with this repository as destination)
2368 2371 unbundle
2369 2372
2370 2373 This command is not intended for use on public repositories. Once
2371 2374 changes are visible for pull by other users, rolling a transaction
2372 2375 back locally is ineffective (someone else may already have pulled
2373 2376 the changes). Furthermore, a race is possible with readers of the
2374 2377 repository; for example an in-progress pull from the repository
2375 2378 may fail if a rollback is performed.
2376 2379 """
2377 2380 repo.rollback()
2378 2381
2379 2382 def root(ui, repo):
2380 2383 """print the root (top) of the current working dir
2381 2384
2382 2385 Print the root directory of the current repository.
2383 2386 """
2384 2387 ui.write(repo.root + "\n")
2385 2388
2386 2389 def serve(ui, repo, **opts):
2387 2390 """export the repository via HTTP
2388 2391
2389 2392 Start a local HTTP repository browser and pull server.
2390 2393
2391 2394 By default, the server logs accesses to stdout and errors to
2392 2395 stderr. Use the "-A" and "-E" options to log to files.
2393 2396 """
2394 2397
2395 2398 if opts["stdio"]:
2396 2399 if repo is None:
2397 2400 raise hg.RepoError(_("There is no Mercurial repository here"
2398 2401 " (.hg not found)"))
2399 2402 s = sshserver.sshserver(ui, repo)
2400 2403 s.serve_forever()
2401 2404
2402 2405 parentui = ui.parentui or ui
2403 2406 optlist = ("name templates style address port prefix ipv6"
2404 2407 " accesslog errorlog webdir_conf certificate")
2405 2408 for o in optlist.split():
2406 2409 if opts[o]:
2407 2410 parentui.setconfig("web", o, str(opts[o]))
2408 2411 if (repo is not None) and (repo.ui != parentui):
2409 2412 repo.ui.setconfig("web", o, str(opts[o]))
2410 2413
2411 2414 if repo is None and not ui.config("web", "webdir_conf"):
2412 2415 raise hg.RepoError(_("There is no Mercurial repository here"
2413 2416 " (.hg not found)"))
2414 2417
2415 2418 class service:
2416 2419 def init(self):
2417 2420 util.set_signal_handler()
2418 2421 try:
2419 2422 self.httpd = hgweb.server.create_server(parentui, repo)
2420 2423 except socket.error, inst:
2421 2424 raise util.Abort(_('cannot start server: ') + inst.args[1])
2422 2425
2423 2426 if not ui.verbose: return
2424 2427
2425 2428 if self.httpd.prefix:
2426 2429 prefix = self.httpd.prefix.strip('/') + '/'
2427 2430 else:
2428 2431 prefix = ''
2429 2432
2430 2433 if self.httpd.port != 80:
2431 2434 ui.status(_('listening at http://%s:%d/%s\n') %
2432 2435 (self.httpd.addr, self.httpd.port, prefix))
2433 2436 else:
2434 2437 ui.status(_('listening at http://%s/%s\n') %
2435 2438 (self.httpd.addr, prefix))
2436 2439
2437 2440 def run(self):
2438 2441 self.httpd.serve_forever()
2439 2442
2440 2443 service = service()
2441 2444
2442 2445 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2443 2446
2444 2447 def status(ui, repo, *pats, **opts):
2445 2448 """show changed files in the working directory
2446 2449
2447 2450 Show status of files in the repository. If names are given, only
2448 2451 files that match are shown. Files that are clean or ignored or
2449 2452 source of a copy/move operation, are not listed unless -c (clean),
2450 2453 -i (ignored), -C (copies) or -A is given. Unless options described
2451 2454 with "show only ..." are given, the options -mardu are used.
2452 2455
2453 2456 NOTE: status may appear to disagree with diff if permissions have
2454 2457 changed or a merge has occurred. The standard diff format does not
2455 2458 report permission changes and diff only reports changes relative
2456 2459 to one merge parent.
2457 2460
2458 2461 If one revision is given, it is used as the base revision.
2459 2462 If two revisions are given, the difference between them is shown.
2460 2463
2461 2464 The codes used to show the status of files are:
2462 2465 M = modified
2463 2466 A = added
2464 2467 R = removed
2465 2468 C = clean
2466 2469 ! = deleted, but still tracked
2467 2470 ? = not tracked
2468 2471 I = ignored
2469 2472 = the previous added file was copied from here
2470 2473 """
2471 2474
2472 2475 all = opts['all']
2473 2476 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2474 2477
2475 2478 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2476 2479 cwd = (pats and repo.getcwd()) or ''
2477 2480 modified, added, removed, deleted, unknown, ignored, clean = [
2478 2481 n for n in repo.status(node1=node1, node2=node2, files=files,
2479 2482 match=matchfn,
2480 2483 list_ignored=all or opts['ignored'],
2481 2484 list_clean=all or opts['clean'])]
2482 2485
2483 2486 changetypes = (('modified', 'M', modified),
2484 2487 ('added', 'A', added),
2485 2488 ('removed', 'R', removed),
2486 2489 ('deleted', '!', deleted),
2487 2490 ('unknown', '?', unknown),
2488 2491 ('ignored', 'I', ignored))
2489 2492
2490 2493 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2491 2494
2492 2495 end = opts['print0'] and '\0' or '\n'
2493 2496
2494 2497 for opt, char, changes in ([ct for ct in explicit_changetypes
2495 2498 if all or opts[ct[0]]]
2496 2499 or changetypes):
2497 2500 if opts['no_status']:
2498 2501 format = "%%s%s" % end
2499 2502 else:
2500 2503 format = "%s %%s%s" % (char, end)
2501 2504
2502 2505 for f in changes:
2503 2506 ui.write(format % repo.pathto(f, cwd))
2504 2507 if ((all or opts.get('copies')) and not opts.get('no_status')):
2505 2508 copied = repo.dirstate.copied(f)
2506 2509 if copied:
2507 2510 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2508 2511
2509 2512 def tag(ui, repo, name, rev_=None, **opts):
2510 2513 """add a tag for the current or given revision
2511 2514
2512 2515 Name a particular revision using <name>.
2513 2516
2514 2517 Tags are used to name particular revisions of the repository and are
2515 2518 very useful to compare different revision, to go back to significant
2516 2519 earlier versions or to mark branch points as releases, etc.
2517 2520
2518 2521 If no revision is given, the parent of the working directory is used,
2519 2522 or tip if no revision is checked out.
2520 2523
2521 2524 To facilitate version control, distribution, and merging of tags,
2522 2525 they are stored as a file named ".hgtags" which is managed
2523 2526 similarly to other project files and can be hand-edited if
2524 2527 necessary. The file '.hg/localtags' is used for local tags (not
2525 2528 shared among repositories).
2526 2529 """
2527 2530 if name in ['tip', '.', 'null']:
2528 2531 raise util.Abort(_("the name '%s' is reserved") % name)
2529 2532 if rev_ is not None:
2530 2533 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2531 2534 "please use 'hg tag [-r REV] NAME' instead\n"))
2532 2535 if opts['rev']:
2533 2536 raise util.Abort(_("use only one form to specify the revision"))
2534 2537 if opts['rev'] and opts['remove']:
2535 2538 raise util.Abort(_("--rev and --remove are incompatible"))
2536 2539 if opts['rev']:
2537 2540 rev_ = opts['rev']
2538 2541 message = opts['message']
2539 2542 if opts['remove']:
2540 2543 tagtype = repo.tagtype(name)
2541 2544
2542 2545 if not tagtype:
2543 2546 raise util.Abort(_('tag %s does not exist') % name)
2544 2547 if opts['local'] and tagtype == 'global':
2545 2548 raise util.Abort(_('%s tag is global') % name)
2546 2549 if not opts['local'] and tagtype == 'local':
2547 2550 raise util.Abort(_('%s tag is local') % name)
2548 2551
2549 2552 rev_ = nullid
2550 2553 if not message:
2551 2554 message = _('Removed tag %s') % name
2552 2555 elif name in repo.tags() and not opts['force']:
2553 2556 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2554 2557 % name)
2555 2558 if not rev_ and repo.dirstate.parents()[1] != nullid:
2556 2559 raise util.Abort(_('uncommitted merge - please provide a '
2557 2560 'specific revision'))
2558 2561 r = repo.changectx(rev_).node()
2559 2562
2560 2563 if not message:
2561 2564 message = _('Added tag %s for changeset %s') % (name, short(r))
2562 2565
2563 2566 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2564 2567
2565 2568 def tags(ui, repo):
2566 2569 """list repository tags
2567 2570
2568 2571 List the repository tags.
2569 2572
2570 2573 This lists both regular and local tags. When the -v/--verbose switch
2571 2574 is used, a third column "local" is printed for local tags.
2572 2575 """
2573 2576
2574 2577 l = repo.tagslist()
2575 2578 l.reverse()
2576 2579 hexfunc = ui.debugflag and hex or short
2577 2580 tagtype = ""
2578 2581
2579 2582 for t, n in l:
2580 2583 if ui.quiet:
2581 2584 ui.write("%s\n" % t)
2582 2585 continue
2583 2586
2584 2587 try:
2585 2588 hn = hexfunc(n)
2586 2589 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2587 2590 except revlog.LookupError:
2588 2591 r = " ?:%s" % hn
2589 2592 else:
2590 2593 spaces = " " * (30 - util.locallen(t))
2591 2594 if ui.verbose:
2592 2595 if repo.tagtype(t) == 'local':
2593 2596 tagtype = " local"
2594 2597 else:
2595 2598 tagtype = ""
2596 2599 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2597 2600
2598 2601 def tip(ui, repo, **opts):
2599 2602 """show the tip revision
2600 2603
2601 2604 Show the tip revision.
2602 2605 """
2603 2606 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2604 2607
2605 2608 def unbundle(ui, repo, fname1, *fnames, **opts):
2606 2609 """apply one or more changegroup files
2607 2610
2608 2611 Apply one or more compressed changegroup files generated by the
2609 2612 bundle command.
2610 2613 """
2611 2614 fnames = (fname1,) + fnames
2612 2615 for fname in fnames:
2613 2616 if os.path.exists(fname):
2614 2617 f = open(fname, "rb")
2615 2618 else:
2616 2619 f = urllib.urlopen(fname)
2617 2620 gen = changegroup.readbundle(f, fname)
2618 2621 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2619 2622
2620 2623 return postincoming(ui, repo, modheads, opts['update'], None)
2621 2624
2622 2625 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2623 2626 """update working directory
2624 2627
2625 2628 Update the working directory to the specified revision, or the
2626 2629 tip of the current branch if none is specified.
2627 2630
2628 2631 If there are no outstanding changes in the working directory and
2629 2632 there is a linear relationship between the current version and the
2630 2633 requested version, the result is the requested version.
2631 2634
2632 2635 To merge the working directory with another revision, use the
2633 2636 merge command.
2634 2637
2635 2638 By default, update will refuse to run if doing so would require
2636 2639 discarding local changes.
2637 2640 """
2638 2641 if rev and node:
2639 2642 raise util.Abort(_("please specify just one revision"))
2640 2643
2641 2644 if not rev:
2642 2645 rev = node
2643 2646
2644 2647 if date:
2645 2648 if rev:
2646 2649 raise util.Abort(_("you can't specify a revision and a date"))
2647 2650 rev = cmdutil.finddate(ui, repo, date)
2648 2651
2649 2652 if clean:
2650 2653 return hg.clean(repo, rev)
2651 2654 else:
2652 2655 return hg.update(repo, rev)
2653 2656
2654 2657 def verify(ui, repo):
2655 2658 """verify the integrity of the repository
2656 2659
2657 2660 Verify the integrity of the current repository.
2658 2661
2659 2662 This will perform an extensive check of the repository's
2660 2663 integrity, validating the hashes and checksums of each entry in
2661 2664 the changelog, manifest, and tracked files, as well as the
2662 2665 integrity of their crosslinks and indices.
2663 2666 """
2664 2667 return hg.verify(repo)
2665 2668
2666 2669 def version_(ui):
2667 2670 """output version and copyright information"""
2668 2671 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2669 2672 % version.get_version())
2670 2673 ui.status(_(
2671 2674 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2672 2675 "This is free software; see the source for copying conditions. "
2673 2676 "There is NO\nwarranty; "
2674 2677 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2675 2678 ))
2676 2679
2677 2680 # Command options and aliases are listed here, alphabetically
2678 2681
2679 2682 globalopts = [
2680 2683 ('R', 'repository', '',
2681 2684 _('repository root directory or symbolic path name')),
2682 2685 ('', 'cwd', '', _('change working directory')),
2683 2686 ('y', 'noninteractive', None,
2684 2687 _('do not prompt, assume \'yes\' for any required answers')),
2685 2688 ('q', 'quiet', None, _('suppress output')),
2686 2689 ('v', 'verbose', None, _('enable additional output')),
2687 2690 ('', 'config', [], _('set/override config option')),
2688 2691 ('', 'debug', None, _('enable debugging output')),
2689 2692 ('', 'debugger', None, _('start debugger')),
2690 2693 ('', 'encoding', util._encoding, _('set the charset encoding')),
2691 2694 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2692 2695 ('', 'lsprof', None, _('print improved command execution profile')),
2693 2696 ('', 'traceback', None, _('print traceback on exception')),
2694 2697 ('', 'time', None, _('time how long the command takes')),
2695 2698 ('', 'profile', None, _('print command execution profile')),
2696 2699 ('', 'version', None, _('output version information and exit')),
2697 2700 ('h', 'help', None, _('display help and exit')),
2698 2701 ]
2699 2702
2700 2703 dryrunopts = [('n', 'dry-run', None,
2701 2704 _('do not perform actions, just print output'))]
2702 2705
2703 2706 remoteopts = [
2704 2707 ('e', 'ssh', '', _('specify ssh command to use')),
2705 2708 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2706 2709 ]
2707 2710
2708 2711 walkopts = [
2709 2712 ('I', 'include', [], _('include names matching the given patterns')),
2710 2713 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2711 2714 ]
2712 2715
2713 2716 commitopts = [
2714 2717 ('m', 'message', '', _('use <text> as commit message')),
2715 2718 ('l', 'logfile', '', _('read commit message from <file>')),
2716 2719 ]
2717 2720
2718 2721 commitopts2 = [
2719 2722 ('d', 'date', '', _('record datecode as commit date')),
2720 2723 ('u', 'user', '', _('record user as committer')),
2721 2724 ]
2722 2725
2723 2726 table = {
2724 2727 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2725 2728 "addremove":
2726 2729 (addremove,
2727 2730 [('s', 'similarity', '',
2728 2731 _('guess renamed files by similarity (0<=s<=100)')),
2729 2732 ] + walkopts + dryrunopts,
2730 2733 _('hg addremove [OPTION]... [FILE]...')),
2731 2734 "^annotate":
2732 2735 (annotate,
2733 2736 [('r', 'rev', '', _('annotate the specified revision')),
2734 2737 ('f', 'follow', None, _('follow file copies and renames')),
2735 2738 ('a', 'text', None, _('treat all files as text')),
2736 2739 ('u', 'user', None, _('list the author')),
2737 2740 ('d', 'date', None, _('list the date')),
2738 2741 ('n', 'number', None, _('list the revision number (default)')),
2739 2742 ('c', 'changeset', None, _('list the changeset')),
2740 2743 ('l', 'line-number', None,
2741 2744 _('show line number at the first appearance'))
2742 2745 ] + walkopts,
2743 2746 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2744 2747 "archive":
2745 2748 (archive,
2746 2749 [('', 'no-decode', None, _('do not pass files through decoders')),
2747 2750 ('p', 'prefix', '', _('directory prefix for files in archive')),
2748 2751 ('r', 'rev', '', _('revision to distribute')),
2749 2752 ('t', 'type', '', _('type of distribution to create')),
2750 2753 ] + walkopts,
2751 2754 _('hg archive [OPTION]... DEST')),
2752 2755 "backout":
2753 2756 (backout,
2754 2757 [('', 'merge', None,
2755 2758 _('merge with old dirstate parent after backout')),
2756 2759 ('', 'parent', '', _('parent to choose when backing out merge')),
2757 2760 ('r', 'rev', '', _('revision to backout')),
2758 2761 ] + walkopts + commitopts + commitopts2,
2759 2762 _('hg backout [OPTION]... [-r] REV')),
2760 2763 "bisect":
2761 2764 (bisect,
2762 2765 [('r', 'reset', False, _('reset bisect state')),
2763 2766 ('g', 'good', False, _('mark changeset good')),
2764 2767 ('b', 'bad', False, _('mark changeset bad')),
2765 2768 ('s', 'skip', False, _('skip testing changeset')),
2766 2769 ('U', 'noupdate', False, _('do not update to target'))],
2767 2770 _("hg bisect [-gbsr] [REV]")),
2768 2771 "branch":
2769 2772 (branch,
2770 2773 [('f', 'force', None,
2771 2774 _('set branch name even if it shadows an existing branch'))],
2772 2775 _('hg branch [-f] [NAME]')),
2773 2776 "branches":
2774 2777 (branches,
2775 2778 [('a', 'active', False,
2776 2779 _('show only branches that have unmerged heads'))],
2777 2780 _('hg branches [-a]')),
2778 2781 "bundle":
2779 2782 (bundle,
2780 2783 [('f', 'force', None,
2781 2784 _('run even when remote repository is unrelated')),
2782 2785 ('r', 'rev', [],
2783 2786 _('a changeset you would like to bundle')),
2784 2787 ('', 'base', [],
2785 2788 _('a base changeset to specify instead of a destination')),
2786 2789 ] + remoteopts,
2787 2790 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2788 2791 "cat":
2789 2792 (cat,
2790 2793 [('o', 'output', '', _('print output to file with formatted name')),
2791 2794 ('r', 'rev', '', _('print the given revision')),
2792 2795 ] + walkopts,
2793 2796 _('hg cat [OPTION]... FILE...')),
2794 2797 "^clone":
2795 2798 (clone,
2796 2799 [('U', 'noupdate', None, _('do not update the new working directory')),
2797 2800 ('r', 'rev', [],
2798 2801 _('a changeset you would like to have after cloning')),
2799 2802 ('', 'pull', None, _('use pull protocol to copy metadata')),
2800 2803 ('', 'uncompressed', None,
2801 2804 _('use uncompressed transfer (fast over LAN)')),
2802 2805 ] + remoteopts,
2803 2806 _('hg clone [OPTION]... SOURCE [DEST]')),
2804 2807 "^commit|ci":
2805 2808 (commit,
2806 2809 [('A', 'addremove', None,
2807 2810 _('mark new/missing files as added/removed before committing')),
2808 2811 ] + walkopts + commitopts + commitopts2,
2809 2812 _('hg commit [OPTION]... [FILE]...')),
2810 2813 "copy|cp":
2811 2814 (copy,
2812 2815 [('A', 'after', None, _('record a copy that has already occurred')),
2813 2816 ('f', 'force', None,
2814 2817 _('forcibly copy over an existing managed file')),
2815 2818 ] + walkopts + dryrunopts,
2816 2819 _('hg copy [OPTION]... [SOURCE]... DEST')),
2817 2820 "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
2818 2821 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2819 2822 "debugcomplete":
2820 2823 (debugcomplete,
2821 2824 [('o', 'options', None, _('show the command options'))],
2822 2825 _('hg debugcomplete [-o] CMD')),
2823 2826 "debugdate":
2824 2827 (debugdate,
2825 2828 [('e', 'extended', None, _('try extended date formats'))],
2826 2829 _('hg debugdate [-e] DATE [RANGE]')),
2827 2830 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2828 2831 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2829 2832 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2830 2833 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2831 2834 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2832 2835 "debugrawcommit|rawcommit":
2833 2836 (rawcommit,
2834 2837 [('p', 'parent', [], _('parent')),
2835 2838 ('F', 'files', '', _('file list'))
2836 2839 ] + commitopts + commitopts2,
2837 2840 _('hg debugrawcommit [OPTION]... [FILE]...')),
2838 2841 "debugrebuildstate":
2839 2842 (debugrebuildstate,
2840 2843 [('r', 'rev', '', _('revision to rebuild to'))],
2841 2844 _('hg debugrebuildstate [-r REV] [REV]')),
2842 2845 "debugrename":
2843 2846 (debugrename,
2844 2847 [('r', 'rev', '', _('revision to debug'))],
2845 2848 _('hg debugrename [-r REV] FILE')),
2846 2849 "debugsetparents":
2847 2850 (debugsetparents,
2848 2851 [],
2849 2852 _('hg debugsetparents REV1 [REV2]')),
2850 2853 "debugstate": (debugstate, [], _('hg debugstate')),
2851 2854 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2852 2855 "^diff":
2853 2856 (diff,
2854 2857 [('r', 'rev', [], _('revision')),
2855 2858 ('a', 'text', None, _('treat all files as text')),
2856 2859 ('p', 'show-function', None,
2857 2860 _('show which function each change is in')),
2858 2861 ('g', 'git', None, _('use git extended diff format')),
2859 2862 ('', 'nodates', None, _("don't include dates in diff headers")),
2860 2863 ('w', 'ignore-all-space', None,
2861 2864 _('ignore white space when comparing lines')),
2862 2865 ('b', 'ignore-space-change', None,
2863 2866 _('ignore changes in the amount of white space')),
2864 2867 ('B', 'ignore-blank-lines', None,
2865 2868 _('ignore changes whose lines are all blank')),
2866 2869 ] + walkopts,
2867 2870 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2868 2871 "^export":
2869 2872 (export,
2870 2873 [('o', 'output', '', _('print output to file with formatted name')),
2871 2874 ('a', 'text', None, _('treat all files as text')),
2872 2875 ('g', 'git', None, _('use git extended diff format')),
2873 2876 ('', 'nodates', None, _("don't include dates in diff headers")),
2874 2877 ('', 'switch-parent', None, _('diff against the second parent'))],
2875 2878 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2876 2879 "grep":
2877 2880 (grep,
2878 2881 [('0', 'print0', None, _('end fields with NUL')),
2879 2882 ('', 'all', None, _('print all revisions that match')),
2880 2883 ('f', 'follow', None,
2881 2884 _('follow changeset history, or file history across copies and renames')),
2882 2885 ('i', 'ignore-case', None, _('ignore case when matching')),
2883 2886 ('l', 'files-with-matches', None,
2884 2887 _('print only filenames and revs that match')),
2885 2888 ('n', 'line-number', None, _('print matching line numbers')),
2886 2889 ('r', 'rev', [], _('search in given revision range')),
2887 2890 ('u', 'user', None, _('print user who committed change')),
2888 2891 ] + walkopts,
2889 2892 _('hg grep [OPTION]... PATTERN [FILE]...')),
2890 2893 "heads":
2891 2894 (heads,
2892 2895 [('', 'style', '', _('display using template map file')),
2893 2896 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2894 2897 ('', 'template', '', _('display with template'))],
2895 2898 _('hg heads [-r REV] [REV]...')),
2896 2899 "help": (help_, [], _('hg help [COMMAND]')),
2897 2900 "identify|id":
2898 2901 (identify,
2899 2902 [('r', 'rev', '', _('identify the specified rev')),
2900 2903 ('n', 'num', None, _('show local revision number')),
2901 2904 ('i', 'id', None, _('show global revision id')),
2902 2905 ('b', 'branch', None, _('show branch')),
2903 2906 ('t', 'tags', None, _('show tags'))],
2904 2907 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2905 2908 "import|patch":
2906 2909 (import_,
2907 2910 [('p', 'strip', 1,
2908 2911 _('directory strip option for patch. This has the same\n'
2909 2912 'meaning as the corresponding patch option')),
2910 2913 ('b', 'base', '', _('base path')),
2911 2914 ('f', 'force', None,
2912 2915 _('skip check for outstanding uncommitted changes')),
2913 2916 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2914 2917 ('', 'exact', None,
2915 2918 _('apply patch to the nodes from which it was generated')),
2916 2919 ('', 'import-branch', None,
2917 2920 _('Use any branch information in patch (implied by --exact)'))] +
2918 2921 commitopts + commitopts2,
2919 2922 _('hg import [OPTION]... PATCH...')),
2920 2923 "incoming|in":
2921 2924 (incoming,
2922 2925 [('M', 'no-merges', None, _('do not show merges')),
2923 2926 ('f', 'force', None,
2924 2927 _('run even when remote repository is unrelated')),
2925 2928 ('', 'style', '', _('display using template map file')),
2926 2929 ('n', 'newest-first', None, _('show newest record first')),
2927 2930 ('', 'bundle', '', _('file to store the bundles into')),
2928 2931 ('p', 'patch', None, _('show patch')),
2929 2932 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2930 2933 ('', 'template', '', _('display with template')),
2931 2934 ] + remoteopts,
2932 2935 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2933 2936 ' [--bundle FILENAME] [SOURCE]')),
2934 2937 "^init":
2935 2938 (init,
2936 2939 remoteopts,
2937 2940 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2938 2941 "locate":
2939 2942 (locate,
2940 2943 [('r', 'rev', '', _('search the repository as it stood at rev')),
2941 2944 ('0', 'print0', None,
2942 2945 _('end filenames with NUL, for use with xargs')),
2943 2946 ('f', 'fullpath', None,
2944 2947 _('print complete paths from the filesystem root')),
2945 2948 ] + walkopts,
2946 2949 _('hg locate [OPTION]... [PATTERN]...')),
2947 2950 "^log|history":
2948 2951 (log,
2949 2952 [('f', 'follow', None,
2950 2953 _('follow changeset history, or file history across copies and renames')),
2951 2954 ('', 'follow-first', None,
2952 2955 _('only follow the first parent of merge changesets')),
2953 2956 ('d', 'date', '', _('show revs matching date spec')),
2954 2957 ('C', 'copies', None, _('show copied files')),
2955 2958 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2956 2959 ('l', 'limit', '', _('limit number of changes displayed')),
2957 2960 ('r', 'rev', [], _('show the specified revision or range')),
2958 2961 ('', 'removed', None, _('include revs where files were removed')),
2959 2962 ('M', 'no-merges', None, _('do not show merges')),
2960 2963 ('', 'style', '', _('display using template map file')),
2961 2964 ('m', 'only-merges', None, _('show only merges')),
2962 2965 ('p', 'patch', None, _('show patch')),
2963 2966 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2964 2967 ('', 'template', '', _('display with template')),
2965 2968 ] + walkopts,
2966 2969 _('hg log [OPTION]... [FILE]')),
2967 2970 "manifest":
2968 2971 (manifest,
2969 2972 [('r', 'rev', '', _('revision to display'))],
2970 2973 _('hg manifest [-r REV]')),
2971 2974 "^merge":
2972 2975 (merge,
2973 2976 [('f', 'force', None, _('force a merge with outstanding changes')),
2974 2977 ('r', 'rev', '', _('revision to merge')),
2975 2978 ],
2976 2979 _('hg merge [-f] [[-r] REV]')),
2977 2980 "outgoing|out":
2978 2981 (outgoing,
2979 2982 [('M', 'no-merges', None, _('do not show merges')),
2980 2983 ('f', 'force', None,
2981 2984 _('run even when remote repository is unrelated')),
2982 2985 ('p', 'patch', None, _('show patch')),
2983 2986 ('', 'style', '', _('display using template map file')),
2984 2987 ('r', 'rev', [], _('a specific revision you would like to push')),
2985 2988 ('n', 'newest-first', None, _('show newest record first')),
2986 2989 ('', 'template', '', _('display with template')),
2987 2990 ] + remoteopts,
2988 2991 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2989 2992 "^parents":
2990 2993 (parents,
2991 2994 [('r', 'rev', '', _('show parents from the specified rev')),
2992 2995 ('', 'style', '', _('display using template map file')),
2993 2996 ('', 'template', '', _('display with template'))],
2994 2997 _('hg parents [-r REV] [FILE]')),
2995 2998 "paths": (paths, [], _('hg paths [NAME]')),
2996 2999 "^pull":
2997 3000 (pull,
2998 3001 [('u', 'update', None,
2999 3002 _('update to new tip if changesets were pulled')),
3000 3003 ('f', 'force', None,
3001 3004 _('run even when remote repository is unrelated')),
3002 3005 ('r', 'rev', [],
3003 3006 _('a specific revision up to which you would like to pull')),
3004 3007 ] + remoteopts,
3005 3008 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3006 3009 "^push":
3007 3010 (push,
3008 3011 [('f', 'force', None, _('force push')),
3009 3012 ('r', 'rev', [], _('a specific revision you would like to push')),
3010 3013 ] + remoteopts,
3011 3014 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3012 3015 "recover": (recover, [], _('hg recover')),
3013 3016 "^remove|rm":
3014 3017 (remove,
3015 3018 [('A', 'after', None, _('record remove without deleting')),
3016 3019 ('f', 'force', None, _('remove file even if modified')),
3017 3020 ] + walkopts,
3018 3021 _('hg remove [OPTION]... FILE...')),
3019 3022 "rename|mv":
3020 3023 (rename,
3021 3024 [('A', 'after', None, _('record a rename that has already occurred')),
3022 3025 ('f', 'force', None,
3023 3026 _('forcibly copy over an existing managed file')),
3024 3027 ] + walkopts + dryrunopts,
3025 3028 _('hg rename [OPTION]... SOURCE... DEST')),
3026 3029 "revert":
3027 3030 (revert,
3028 3031 [('a', 'all', None, _('revert all changes when no arguments given')),
3029 3032 ('d', 'date', '', _('tipmost revision matching date')),
3030 3033 ('r', 'rev', '', _('revision to revert to')),
3031 3034 ('', 'no-backup', None, _('do not save backup copies of files')),
3032 3035 ] + walkopts + dryrunopts,
3033 3036 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3034 3037 "rollback": (rollback, [], _('hg rollback')),
3035 3038 "root": (root, [], _('hg root')),
3036 3039 "^serve":
3037 3040 (serve,
3038 3041 [('A', 'accesslog', '', _('name of access log file to write to')),
3039 3042 ('d', 'daemon', None, _('run server in background')),
3040 3043 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3041 3044 ('E', 'errorlog', '', _('name of error log file to write to')),
3042 3045 ('p', 'port', 0, _('port to use (default: 8000)')),
3043 3046 ('a', 'address', '', _('address to use')),
3044 3047 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3045 3048 ('n', 'name', '',
3046 3049 _('name to show in web pages (default: working dir)')),
3047 3050 ('', 'webdir-conf', '', _('name of the webdir config file'
3048 3051 ' (serve more than one repo)')),
3049 3052 ('', 'pid-file', '', _('name of file to write process ID to')),
3050 3053 ('', 'stdio', None, _('for remote clients')),
3051 3054 ('t', 'templates', '', _('web templates to use')),
3052 3055 ('', 'style', '', _('template style to use')),
3053 3056 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3054 3057 ('', 'certificate', '', _('SSL certificate file'))],
3055 3058 _('hg serve [OPTION]...')),
3056 3059 "showconfig|debugconfig":
3057 3060 (showconfig,
3058 3061 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3059 3062 _('hg showconfig [-u] [NAME]...')),
3060 3063 "^status|st":
3061 3064 (status,
3062 3065 [('A', 'all', None, _('show status of all files')),
3063 3066 ('m', 'modified', None, _('show only modified files')),
3064 3067 ('a', 'added', None, _('show only added files')),
3065 3068 ('r', 'removed', None, _('show only removed files')),
3066 3069 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3067 3070 ('c', 'clean', None, _('show only files without changes')),
3068 3071 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3069 3072 ('i', 'ignored', None, _('show only ignored files')),
3070 3073 ('n', 'no-status', None, _('hide status prefix')),
3071 3074 ('C', 'copies', None, _('show source of copied files')),
3072 3075 ('0', 'print0', None,
3073 3076 _('end filenames with NUL, for use with xargs')),
3074 3077 ('', 'rev', [], _('show difference from revision')),
3075 3078 ] + walkopts,
3076 3079 _('hg status [OPTION]... [FILE]...')),
3077 3080 "tag":
3078 3081 (tag,
3079 3082 [('f', 'force', None, _('replace existing tag')),
3080 3083 ('l', 'local', None, _('make the tag local')),
3081 3084 ('r', 'rev', '', _('revision to tag')),
3082 3085 ('', 'remove', None, _('remove a tag')),
3083 3086 # -l/--local is already there, commitopts cannot be used
3084 3087 ('m', 'message', '', _('use <text> as commit message')),
3085 3088 ] + commitopts2,
3086 3089 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3087 3090 "tags": (tags, [], _('hg tags')),
3088 3091 "tip":
3089 3092 (tip,
3090 3093 [('', 'style', '', _('display using template map file')),
3091 3094 ('p', 'patch', None, _('show patch')),
3092 3095 ('', 'template', '', _('display with template'))],
3093 3096 _('hg tip [-p]')),
3094 3097 "unbundle":
3095 3098 (unbundle,
3096 3099 [('u', 'update', None,
3097 3100 _('update to new tip if changesets were unbundled'))],
3098 3101 _('hg unbundle [-u] FILE...')),
3099 3102 "^update|up|checkout|co":
3100 3103 (update,
3101 3104 [('C', 'clean', None, _('overwrite locally modified files')),
3102 3105 ('d', 'date', '', _('tipmost revision matching date')),
3103 3106 ('r', 'rev', '', _('revision'))],
3104 3107 _('hg update [-C] [-d DATE] [[-r] REV]')),
3105 3108 "verify": (verify, [], _('hg verify')),
3106 3109 "version": (version_, [], _('hg version')),
3107 3110 }
3108 3111
3109 3112 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3110 3113 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3111 3114 optionalrepo = ("identify paths serve showconfig")
@@ -1,462 +1,458 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from remoterepo import *
11 11 from i18n import _
12 12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 13 import errno, keepalive, tempfile, socket, changegroup
14 14
15 15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 16 def __init__(self, ui):
17 17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 18 self.ui = ui
19 19
20 20 def find_user_password(self, realm, authuri):
21 21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 22 self, realm, authuri)
23 23 user, passwd = authinfo
24 24 if user and passwd:
25 25 return (user, passwd)
26 26
27 27 if not self.ui.interactive:
28 28 raise util.Abort(_('http authorization required'))
29 29
30 30 self.ui.write(_("http authorization required\n"))
31 31 self.ui.status(_("realm: %s\n") % realm)
32 32 if user:
33 33 self.ui.status(_("user: %s\n") % user)
34 34 else:
35 35 user = self.ui.prompt(_("user:"), default=None)
36 36
37 37 if not passwd:
38 38 passwd = self.ui.getpass()
39 39
40 40 self.add_password(realm, authuri, user, passwd)
41 41 return (user, passwd)
42 42
43 43 def netlocsplit(netloc):
44 44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 45
46 46 a = netloc.find('@')
47 47 if a == -1:
48 48 user, passwd = None, None
49 49 else:
50 50 userpass, netloc = netloc[:a], netloc[a+1:]
51 51 c = userpass.find(':')
52 52 if c == -1:
53 53 user, passwd = urllib.unquote(userpass), None
54 54 else:
55 55 user = urllib.unquote(userpass[:c])
56 56 passwd = urllib.unquote(userpass[c+1:])
57 57 c = netloc.find(':')
58 58 if c == -1:
59 59 host, port = netloc, None
60 60 else:
61 61 host, port = netloc[:c], netloc[c+1:]
62 62 return host, port, user, passwd
63 63
64 64 def netlocunsplit(host, port, user=None, passwd=None):
65 65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 66 if port:
67 67 hostport = host + ':' + port
68 68 else:
69 69 hostport = host
70 70 if user:
71 71 if passwd:
72 72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 73 else:
74 74 userpass = urllib.quote(user)
75 75 return userpass + '@' + hostport
76 76 return hostport
77 77
78 78 # work around a bug in Python < 2.4.2
79 79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 80 class request(urllib2.Request):
81 81 def add_header(self, key, val):
82 82 if key.lower() == 'proxy-authorization':
83 83 val = val.strip()
84 84 return urllib2.Request.add_header(self, key, val)
85 85
86 86 class httpsendfile(file):
87 87 def __len__(self):
88 88 return os.fstat(self.fileno()).st_size
89 89
90 90 def _gen_sendfile(connection):
91 91 def _sendfile(self, data):
92 92 # send a file
93 93 if isinstance(data, httpsendfile):
94 94 # if auth required, some data sent twice, so rewind here
95 95 data.seek(0)
96 96 for chunk in util.filechunkiter(data):
97 97 connection.send(self, chunk)
98 98 else:
99 99 connection.send(self, data)
100 100 return _sendfile
101 101
102 102 class httpconnection(keepalive.HTTPConnection):
103 103 # must be able to send big bundle as stream.
104 104 send = _gen_sendfile(keepalive.HTTPConnection)
105 105
106 class basehttphandler(keepalive.HTTPHandler):
106 class httphandler(keepalive.HTTPHandler):
107 107 def http_open(self, req):
108 108 return self.do_open(httpconnection, req)
109 109
110 def __del__(self):
111 self.close_all()
112
110 113 has_https = hasattr(urllib2, 'HTTPSHandler')
111 114 if has_https:
112 115 class httpsconnection(httplib.HTTPSConnection):
113 116 response_class = keepalive.HTTPResponse
114 117 # must be able to send big bundle as stream.
115 118 send = _gen_sendfile(httplib.HTTPSConnection)
116 119
117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
118 121 def https_open(self, req):
119 122 return self.do_open(httpsconnection, req)
120 else:
121 class httphandler(basehttphandler):
122 pass
123 123
124 124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 125 # it doesn't know about the auth type requested. This can happen if
126 126 # somebody is using BasicAuth and types a bad password.
127 127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 129 try:
130 130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 131 self, auth_header, host, req, headers)
132 132 except ValueError, inst:
133 133 arg = inst.args[0]
134 134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 135 return
136 136 raise
137 137
138 138 def zgenerator(f):
139 139 zd = zlib.decompressobj()
140 140 try:
141 141 for chunk in util.filechunkiter(f):
142 142 yield zd.decompress(chunk)
143 143 except httplib.HTTPException, inst:
144 144 raise IOError(None, _('connection ended unexpectedly'))
145 145 yield zd.flush()
146 146
147 147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 149 '0123456789' '_.-/')
150 150 _safeset = None
151 151 _hex = None
152 152 def quotepath(path):
153 153 '''quote the path part of a URL
154 154
155 155 This is similar to urllib.quote, but it also tries to avoid
156 156 quoting things twice (inspired by wget):
157 157
158 158 >>> quotepath('abc def')
159 159 'abc%20def'
160 160 >>> quotepath('abc%20def')
161 161 'abc%20def'
162 162 >>> quotepath('abc%20 def')
163 163 'abc%20%20def'
164 164 >>> quotepath('abc def%20')
165 165 'abc%20def%20'
166 166 >>> quotepath('abc def%2')
167 167 'abc%20def%252'
168 168 >>> quotepath('abc def%')
169 169 'abc%20def%25'
170 170 '''
171 171 global _safeset, _hex
172 172 if _safeset is None:
173 173 _safeset = util.set(_safe)
174 174 _hex = util.set('abcdefABCDEF0123456789')
175 175 l = list(path)
176 176 for i in xrange(len(l)):
177 177 c = l[i]
178 178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 179 pass
180 180 elif c not in _safeset:
181 181 l[i] = '%%%02X' % ord(c)
182 182 return ''.join(l)
183 183
184 184 class httprepository(remoterepository):
185 185 def __init__(self, ui, path):
186 186 self.path = path
187 187 self.caps = None
188 188 self.handler = None
189 189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 190 if query or frag:
191 191 raise util.Abort(_('unsupported URL component: "%s"') %
192 192 (query or frag))
193 193 if not urlpath:
194 194 urlpath = '/'
195 195 urlpath = quotepath(urlpath)
196 196 host, port, user, passwd = netlocsplit(netloc)
197 197
198 198 # urllib cannot handle URLs with embedded user or passwd
199 199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 200 urlpath, '', ''))
201 201 self.ui = ui
202 202 self.ui.debug(_('using %s\n') % self._url)
203 203
204 204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 205 # XXX proxyauthinfo = None
206 self.handler = httphandler()
207 handlers = [self.handler]
206 handlers = [httphandler()]
207 if has_https:
208 handlers.append(httpshandler())
208 209
209 210 if proxyurl:
210 211 # proxy can be proper url or host[:port]
211 212 if not (proxyurl.startswith('http:') or
212 213 proxyurl.startswith('https:')):
213 214 proxyurl = 'http://' + proxyurl + '/'
214 215 snpqf = urlparse.urlsplit(proxyurl)
215 216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
216 217 hpup = netlocsplit(proxynetloc)
217 218
218 219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
219 220 if not proxyuser:
220 221 proxyuser = ui.config("http_proxy", "user")
221 222 proxypasswd = ui.config("http_proxy", "passwd")
222 223
223 224 # see if we should use a proxy for this url
224 225 no_list = [ "localhost", "127.0.0.1" ]
225 226 no_list.extend([p.lower() for
226 227 p in ui.configlist("http_proxy", "no")])
227 228 no_list.extend([p.strip().lower() for
228 229 p in os.getenv("no_proxy", '').split(',')
229 230 if p.strip()])
230 231 # "http_proxy.always" config is for running tests on localhost
231 232 if (not ui.configbool("http_proxy", "always") and
232 233 host.lower() in no_list):
233 234 # avoid auto-detection of proxy settings by appending
234 235 # a ProxyHandler with no proxies defined.
235 236 handlers.append(urllib2.ProxyHandler({}))
236 237 ui.debug(_('disabling proxy for %s\n') % host)
237 238 else:
238 239 proxyurl = urlparse.urlunsplit((
239 240 proxyscheme, netlocunsplit(proxyhost, proxyport,
240 241 proxyuser, proxypasswd or ''),
241 242 proxypath, proxyquery, proxyfrag))
242 243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
243 244 ui.debug(_('proxying through http://%s:%s\n') %
244 245 (proxyhost, proxyport))
245 246
246 247 # urllib2 takes proxy values from the environment and those
247 248 # will take precedence if found, so drop them
248 249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
249 250 try:
250 251 if env in os.environ:
251 252 del os.environ[env]
252 253 except OSError:
253 254 pass
254 255
255 256 passmgr = passwordmgr(ui)
256 257 if user:
257 258 ui.debug(_('http auth: user %s, password %s\n') %
258 259 (user, passwd and '*' * len(passwd) or 'not set'))
259 260 netloc = host
260 261 if port:
261 262 netloc += ':' + port
262 263 # Python < 2.4.3 uses only the netloc to search for a password
263 264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
264 265
265 266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
266 267 httpdigestauthhandler(passmgr)))
267 268 opener = urllib2.build_opener(*handlers)
268 269
269 270 # 1.0 here is the _protocol_ version
270 271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
271 272 urllib2.install_opener(opener)
272 273
273 def __del__(self):
274 if self.handler:
275 self.handler.close_all()
276 self.handler = None
277
278 274 def url(self):
279 275 return self.path
280 276
281 277 # look up capabilities only when needed
282 278
283 279 def get_caps(self):
284 280 if self.caps is None:
285 281 try:
286 282 self.caps = util.set(self.do_read('capabilities').split())
287 283 except repo.RepoError:
288 284 self.caps = util.set()
289 285 self.ui.debug(_('capabilities: %s\n') %
290 286 (' '.join(self.caps or ['none'])))
291 287 return self.caps
292 288
293 289 capabilities = property(get_caps)
294 290
295 291 def lock(self):
296 292 raise util.Abort(_('operation not supported over http'))
297 293
298 294 def do_cmd(self, cmd, **args):
299 295 data = args.pop('data', None)
300 296 headers = args.pop('headers', {})
301 297 self.ui.debug(_("sending %s command\n") % cmd)
302 298 q = {"cmd": cmd}
303 299 q.update(args)
304 300 qs = '?%s' % urllib.urlencode(q)
305 301 cu = "%s%s" % (self._url, qs)
306 302 try:
307 303 if data:
308 304 self.ui.debug(_("sending %s bytes\n") % len(data))
309 305 resp = urllib2.urlopen(request(cu, data, headers))
310 306 except urllib2.HTTPError, inst:
311 307 if inst.code == 401:
312 308 raise util.Abort(_('authorization failed'))
313 309 raise
314 310 except httplib.HTTPException, inst:
315 311 self.ui.debug(_('http error while sending %s command\n') % cmd)
316 312 self.ui.print_exc()
317 313 raise IOError(None, inst)
318 314 except IndexError:
319 315 # this only happens with Python 2.3, later versions raise URLError
320 316 raise util.Abort(_('http error, possibly caused by proxy setting'))
321 317 # record the url we got redirected to
322 318 resp_url = resp.geturl()
323 319 if resp_url.endswith(qs):
324 320 resp_url = resp_url[:-len(qs)]
325 321 if self._url != resp_url:
326 322 self.ui.status(_('real URL is %s\n') % resp_url)
327 323 self._url = resp_url
328 324 try:
329 325 proto = resp.getheader('content-type')
330 326 except AttributeError:
331 327 proto = resp.headers['content-type']
332 328
333 329 # accept old "text/plain" and "application/hg-changegroup" for now
334 330 if not (proto.startswith('application/mercurial-') or
335 331 proto.startswith('text/plain') or
336 332 proto.startswith('application/hg-changegroup')):
337 333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
338 334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
339 335 % self._url)
340 336
341 337 if proto.startswith('application/mercurial-'):
342 338 try:
343 339 version = proto.split('-', 1)[1]
344 340 version_info = tuple([int(n) for n in version.split('.')])
345 341 except ValueError:
346 342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
347 343 "header (%s)") % (self._url, proto))
348 344 if version_info > (0, 1):
349 345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
350 346 (self._url, version))
351 347
352 348 return resp
353 349
354 350 def do_read(self, cmd, **args):
355 351 fp = self.do_cmd(cmd, **args)
356 352 try:
357 353 return fp.read()
358 354 finally:
359 355 # if using keepalive, allow connection to be reused
360 356 fp.close()
361 357
362 358 def lookup(self, key):
363 359 self.requirecap('lookup', _('look up remote revision'))
364 360 d = self.do_cmd("lookup", key = key).read()
365 361 success, data = d[:-1].split(' ', 1)
366 362 if int(success):
367 363 return bin(data)
368 364 raise repo.RepoError(data)
369 365
370 366 def heads(self):
371 367 d = self.do_read("heads")
372 368 try:
373 369 return map(bin, d[:-1].split(" "))
374 370 except:
375 371 raise util.UnexpectedOutput(_("unexpected response:"), d)
376 372
377 373 def branches(self, nodes):
378 374 n = " ".join(map(hex, nodes))
379 375 d = self.do_read("branches", nodes=n)
380 376 try:
381 377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
382 378 return br
383 379 except:
384 380 raise util.UnexpectedOutput(_("unexpected response:"), d)
385 381
386 382 def between(self, pairs):
387 383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
388 384 d = self.do_read("between", pairs=n)
389 385 try:
390 386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
391 387 return p
392 388 except:
393 389 raise util.UnexpectedOutput(_("unexpected response:"), d)
394 390
395 391 def changegroup(self, nodes, kind):
396 392 n = " ".join(map(hex, nodes))
397 393 f = self.do_cmd("changegroup", roots=n)
398 394 return util.chunkbuffer(zgenerator(f))
399 395
400 396 def changegroupsubset(self, bases, heads, source):
401 397 self.requirecap('changegroupsubset', _('look up remote changes'))
402 398 baselst = " ".join([hex(n) for n in bases])
403 399 headlst = " ".join([hex(n) for n in heads])
404 400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
405 401 return util.chunkbuffer(zgenerator(f))
406 402
407 403 def unbundle(self, cg, heads, source):
408 404 # have to stream bundle to a temp file because we do not have
409 405 # http 1.1 chunked transfer.
410 406
411 407 type = ""
412 408 types = self.capable('unbundle')
413 409 # servers older than d1b16a746db6 will send 'unbundle' as a
414 410 # boolean capability
415 411 try:
416 412 types = types.split(',')
417 413 except AttributeError:
418 414 types = [""]
419 415 if types:
420 416 for x in types:
421 417 if x in changegroup.bundletypes:
422 418 type = x
423 419 break
424 420
425 421 tempname = changegroup.writebundle(cg, None, type)
426 422 fp = httpsendfile(tempname, "rb")
427 423 try:
428 424 try:
429 425 rfp = self.do_cmd(
430 426 'unbundle', data=fp,
431 427 headers={'Content-Type': 'application/octet-stream'},
432 428 heads=' '.join(map(hex, heads)))
433 429 try:
434 430 ret = int(rfp.readline())
435 431 self.ui.write(rfp.read())
436 432 return ret
437 433 finally:
438 434 rfp.close()
439 435 except socket.error, err:
440 436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
441 437 raise util.Abort(_('push failed: %s') % err[1])
442 438 raise util.Abort(err[1])
443 439 finally:
444 440 fp.close()
445 441 os.unlink(tempname)
446 442
447 443 def stream_out(self):
448 444 return self.do_cmd('stream_out')
449 445
450 446 class httpsrepository(httprepository):
451 447 def __init__(self, ui, path):
452 448 if not has_https:
453 449 raise util.Abort(_('Python support for SSL and HTTPS '
454 450 'is not installed'))
455 451 httprepository.__init__(self, ui, path)
456 452
457 453 def instance(ui, path, create):
458 454 if create:
459 455 raise util.Abort(_('cannot create new http repository'))
460 456 if path.startswith('https:'):
461 457 return httpsrepository(ui, path)
462 458 return httprepository(ui, path)
@@ -1,579 +1,582 b''
1 1 # This library is free software; you can redistribute it and/or
2 2 # modify it under the terms of the GNU Lesser General Public
3 3 # License as published by the Free Software Foundation; either
4 4 # version 2.1 of the License, or (at your option) any later version.
5 5 #
6 6 # This library is distributed in the hope that it will be useful,
7 7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 9 # Lesser General Public License for more details.
10 10 #
11 11 # You should have received a copy of the GNU Lesser General Public
12 12 # License along with this library; if not, write to the
13 13 # Free Software Foundation, Inc.,
14 14 # 59 Temple Place, Suite 330,
15 15 # Boston, MA 02111-1307 USA
16 16
17 17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
18 18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
19 19
20 20 # Modified by Benoit Boissinot:
21 21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
22 22
23 23 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
24 24
25 25 >>> import urllib2
26 26 >>> from keepalive import HTTPHandler
27 27 >>> keepalive_handler = HTTPHandler()
28 28 >>> opener = urllib2.build_opener(keepalive_handler)
29 29 >>> urllib2.install_opener(opener)
30 30 >>>
31 31 >>> fo = urllib2.urlopen('http://www.python.org')
32 32
33 33 If a connection to a given host is requested, and all of the existing
34 34 connections are still in use, another connection will be opened. If
35 35 the handler tries to use an existing connection but it fails in some
36 36 way, it will be closed and removed from the pool.
37 37
38 38 To remove the handler, simply re-run build_opener with no arguments, and
39 39 install that opener.
40 40
41 41 You can explicitly close connections by using the close_connection()
42 42 method of the returned file-like object (described below) or you can
43 43 use the handler methods:
44 44
45 45 close_connection(host)
46 46 close_all()
47 47 open_connections()
48 48
49 49 NOTE: using the close_connection and close_all methods of the handler
50 50 should be done with care when using multiple threads.
51 51 * there is nothing that prevents another thread from creating new
52 52 connections immediately after connections are closed
53 53 * no checks are done to prevent in-use connections from being closed
54 54
55 55 >>> keepalive_handler.close_all()
56 56
57 57 EXTRA ATTRIBUTES AND METHODS
58 58
59 59 Upon a status of 200, the object returned has a few additional
60 60 attributes and methods, which should not be used if you want to
61 61 remain consistent with the normal urllib2-returned objects:
62 62
63 63 close_connection() - close the connection to the host
64 64 readlines() - you know, readlines()
65 65 status - the return status (ie 404)
66 66 reason - english translation of status (ie 'File not found')
67 67
68 68 If you want the best of both worlds, use this inside an
69 69 AttributeError-catching try:
70 70
71 71 >>> try: status = fo.status
72 72 >>> except AttributeError: status = None
73 73
74 74 Unfortunately, these are ONLY there if status == 200, so it's not
75 75 easy to distinguish between non-200 responses. The reason is that
76 76 urllib2 tries to do clever things with error codes 301, 302, 401,
77 77 and 407, and it wraps the object upon return.
78 78
79 79 For python versions earlier than 2.4, you can avoid this fancy error
80 80 handling by setting the module-level global HANDLE_ERRORS to zero.
81 81 You see, prior to 2.4, it's the HTTP Handler's job to determine what
82 82 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
83 83 means "pass everything up". In python 2.4, however, this job no
84 84 longer belongs to the HTTP Handler and is now done by a NEW handler,
85 85 HTTPErrorProcessor. Here's the bottom line:
86 86
87 87 python version < 2.4
88 88 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
89 89 errors
90 90 HANDLE_ERRORS == 0 pass everything up, error processing is
91 91 left to the calling code
92 92 python version >= 2.4
93 93 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
94 94 HANDLE_ERRORS == 0 (default) pass everything up, let the
95 95 other handlers (specifically,
96 96 HTTPErrorProcessor) decide what to do
97 97
98 98 In practice, setting the variable either way makes little difference
99 99 in python 2.4, so for the most consistent behavior across versions,
100 100 you probably just want to use the defaults, which will give you
101 101 exceptions on errors.
102 102
103 103 """
104 104
105 105 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
106 106
107 107 import urllib2
108 108 import httplib
109 109 import socket
110 110 import thread
111 111
112 112 DEBUG = None
113 113
114 114 import sys
115 115 if sys.version_info < (2, 4): HANDLE_ERRORS = 1
116 116 else: HANDLE_ERRORS = 0
117 117
118 118 class ConnectionManager:
119 119 """
120 120 The connection manager must be able to:
121 121 * keep track of all existing
122 122 """
123 123 def __init__(self):
124 124 self._lock = thread.allocate_lock()
125 125 self._hostmap = {} # map hosts to a list of connections
126 126 self._connmap = {} # map connections to host
127 127 self._readymap = {} # map connection to ready state
128 128
129 129 def add(self, host, connection, ready):
130 130 self._lock.acquire()
131 131 try:
132 132 if not host in self._hostmap: self._hostmap[host] = []
133 133 self._hostmap[host].append(connection)
134 134 self._connmap[connection] = host
135 135 self._readymap[connection] = ready
136 136 finally:
137 137 self._lock.release()
138 138
139 139 def remove(self, connection):
140 140 self._lock.acquire()
141 141 try:
142 142 try:
143 143 host = self._connmap[connection]
144 144 except KeyError:
145 145 pass
146 146 else:
147 147 del self._connmap[connection]
148 148 del self._readymap[connection]
149 149 self._hostmap[host].remove(connection)
150 150 if not self._hostmap[host]: del self._hostmap[host]
151 151 finally:
152 152 self._lock.release()
153 153
154 154 def set_ready(self, connection, ready):
155 155 try: self._readymap[connection] = ready
156 156 except KeyError: pass
157 157
158 158 def get_ready_conn(self, host):
159 159 conn = None
160 160 self._lock.acquire()
161 161 try:
162 162 if host in self._hostmap:
163 163 for c in self._hostmap[host]:
164 164 if self._readymap[c]:
165 165 self._readymap[c] = 0
166 166 conn = c
167 167 break
168 168 finally:
169 169 self._lock.release()
170 170 return conn
171 171
172 172 def get_all(self, host=None):
173 173 if host:
174 174 return list(self._hostmap.get(host, []))
175 175 else:
176 176 return dict(self._hostmap)
177 177
178 class HTTPHandler(urllib2.HTTPHandler):
178 class KeepAliveHandler:
179 179 def __init__(self):
180 180 self._cm = ConnectionManager()
181 181
182 182 #### Connection Management
183 183 def open_connections(self):
184 184 """return a list of connected hosts and the number of connections
185 185 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
186 186 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
187 187
188 188 def close_connection(self, host):
189 189 """close connection(s) to <host>
190 190 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
191 191 no error occurs if there is no connection to that host."""
192 192 for h in self._cm.get_all(host):
193 193 self._cm.remove(h)
194 194 h.close()
195 195
196 196 def close_all(self):
197 197 """close all open connections"""
198 198 for host, conns in self._cm.get_all().items():
199 199 for h in conns:
200 200 self._cm.remove(h)
201 201 h.close()
202 202
203 203 def _request_closed(self, request, host, connection):
204 204 """tells us that this request is now closed and the the
205 205 connection is ready for another request"""
206 206 self._cm.set_ready(connection, 1)
207 207
208 208 def _remove_connection(self, host, connection, close=0):
209 209 if close: connection.close()
210 210 self._cm.remove(connection)
211 211
212 212 #### Transaction Execution
213 213 def http_open(self, req):
214 214 return self.do_open(HTTPConnection, req)
215 215
216 216 def do_open(self, http_class, req):
217 217 host = req.get_host()
218 218 if not host:
219 219 raise urllib2.URLError('no host given')
220 220
221 221 try:
222 222 h = self._cm.get_ready_conn(host)
223 223 while h:
224 224 r = self._reuse_connection(h, req, host)
225 225
226 226 # if this response is non-None, then it worked and we're
227 227 # done. Break out, skipping the else block.
228 228 if r: break
229 229
230 230 # connection is bad - possibly closed by server
231 231 # discard it and ask for the next free connection
232 232 h.close()
233 233 self._cm.remove(h)
234 234 h = self._cm.get_ready_conn(host)
235 235 else:
236 236 # no (working) free connections were found. Create a new one.
237 237 h = http_class(host)
238 238 if DEBUG: DEBUG.info("creating new connection to %s (%d)",
239 239 host, id(h))
240 240 self._cm.add(host, h, 0)
241 241 self._start_transaction(h, req)
242 242 r = h.getresponse()
243 243 except (socket.error, httplib.HTTPException), err:
244 244 raise urllib2.URLError(err)
245 245
246 246 # if not a persistent connection, don't try to reuse it
247 247 if r.will_close: self._cm.remove(h)
248 248
249 249 if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
250 250 r._handler = self
251 251 r._host = host
252 252 r._url = req.get_full_url()
253 253 r._connection = h
254 254 r.code = r.status
255 255 r.headers = r.msg
256 256 r.msg = r.reason
257 257
258 258 if r.status == 200 or not HANDLE_ERRORS:
259 259 return r
260 260 else:
261 261 return self.parent.error('http', req, r,
262 262 r.status, r.msg, r.headers)
263 263
264 264 def _reuse_connection(self, h, req, host):
265 265 """start the transaction with a re-used connection
266 266 return a response object (r) upon success or None on failure.
267 267 This DOES not close or remove bad connections in cases where
268 268 it returns. However, if an unexpected exception occurs, it
269 269 will close and remove the connection before re-raising.
270 270 """
271 271 try:
272 272 self._start_transaction(h, req)
273 273 r = h.getresponse()
274 274 # note: just because we got something back doesn't mean it
275 275 # worked. We'll check the version below, too.
276 276 except (socket.error, httplib.HTTPException):
277 277 r = None
278 278 except:
279 279 # adding this block just in case we've missed
280 280 # something we will still raise the exception, but
281 281 # lets try and close the connection and remove it
282 282 # first. We previously got into a nasty loop
283 283 # where an exception was uncaught, and so the
284 284 # connection stayed open. On the next try, the
285 285 # same exception was raised, etc. The tradeoff is
286 286 # that it's now possible this call will raise
287 287 # a DIFFERENT exception
288 288 if DEBUG: DEBUG.error("unexpected exception - closing " + \
289 289 "connection to %s (%d)", host, id(h))
290 290 self._cm.remove(h)
291 291 h.close()
292 292 raise
293 293
294 294 if r is None or r.version == 9:
295 295 # httplib falls back to assuming HTTP 0.9 if it gets a
296 296 # bad header back. This is most likely to happen if
297 297 # the socket has been closed by the server since we
298 298 # last used the connection.
299 299 if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
300 300 host, id(h))
301 301 r = None
302 302 else:
303 303 if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
304 304
305 305 return r
306 306
307 307 def _start_transaction(self, h, req):
308 308 headers = req.headers.copy()
309 309 body = req.data
310 310 if sys.version_info >= (2, 4):
311 311 headers.update(req.unredirected_hdrs)
312 312 try:
313 313 h.request(req.get_method(), req.get_selector(), body, headers)
314 314 except socket.error, err: # XXX what error?
315 315 raise urllib2.URLError(err)
316 316
317 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
318 pass
319
317 320 class HTTPResponse(httplib.HTTPResponse):
318 321 # we need to subclass HTTPResponse in order to
319 322 # 1) add readline() and readlines() methods
320 323 # 2) add close_connection() methods
321 324 # 3) add info() and geturl() methods
322 325
323 326 # in order to add readline(), read must be modified to deal with a
324 327 # buffer. example: readline must read a buffer and then spit back
325 328 # one line at a time. The only real alternative is to read one
326 329 # BYTE at a time (ick). Once something has been read, it can't be
327 330 # put back (ok, maybe it can, but that's even uglier than this),
328 331 # so if you THEN do a normal read, you must first take stuff from
329 332 # the buffer.
330 333
331 334 # the read method wraps the original to accomodate buffering,
332 335 # although read() never adds to the buffer.
333 336 # Both readline and readlines have been stolen with almost no
334 337 # modification from socket.py
335 338
336 339
337 340 def __init__(self, sock, debuglevel=0, strict=0, method=None):
338 341 if method: # the httplib in python 2.3 uses the method arg
339 342 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
340 343 else: # 2.2 doesn't
341 344 httplib.HTTPResponse.__init__(self, sock, debuglevel)
342 345 self.fileno = sock.fileno
343 346 self.code = None
344 347 self._rbuf = ''
345 348 self._rbufsize = 8096
346 349 self._handler = None # inserted by the handler later
347 350 self._host = None # (same)
348 351 self._url = None # (same)
349 352 self._connection = None # (same)
350 353
351 354 _raw_read = httplib.HTTPResponse.read
352 355
353 356 def close(self):
354 357 if self.fp:
355 358 self.fp.close()
356 359 self.fp = None
357 360 if self._handler:
358 361 self._handler._request_closed(self, self._host,
359 362 self._connection)
360 363
361 364 def close_connection(self):
362 365 self._handler._remove_connection(self._host, self._connection, close=1)
363 366 self.close()
364 367
365 368 def info(self):
366 369 return self.headers
367 370
368 371 def geturl(self):
369 372 return self._url
370 373
371 374 def read(self, amt=None):
372 375 # the _rbuf test is only in this first if for speed. It's not
373 376 # logically necessary
374 377 if self._rbuf and not amt is None:
375 378 L = len(self._rbuf)
376 379 if amt > L:
377 380 amt -= L
378 381 else:
379 382 s = self._rbuf[:amt]
380 383 self._rbuf = self._rbuf[amt:]
381 384 return s
382 385
383 386 s = self._rbuf + self._raw_read(amt)
384 387 self._rbuf = ''
385 388 return s
386 389
387 390 def readline(self, limit=-1):
388 391 data = ""
389 392 i = self._rbuf.find('\n')
390 393 while i < 0 and not (0 < limit <= len(self._rbuf)):
391 394 new = self._raw_read(self._rbufsize)
392 395 if not new: break
393 396 i = new.find('\n')
394 397 if i >= 0: i = i + len(self._rbuf)
395 398 self._rbuf = self._rbuf + new
396 399 if i < 0: i = len(self._rbuf)
397 400 else: i = i+1
398 401 if 0 <= limit < len(self._rbuf): i = limit
399 402 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
400 403 return data
401 404
402 405 def readlines(self, sizehint = 0):
403 406 total = 0
404 407 list = []
405 408 while 1:
406 409 line = self.readline()
407 410 if not line: break
408 411 list.append(line)
409 412 total += len(line)
410 413 if sizehint and total >= sizehint:
411 414 break
412 415 return list
413 416
414 417
415 418 class HTTPConnection(httplib.HTTPConnection):
416 419 # use the modified response class
417 420 response_class = HTTPResponse
418 421
419 422 #########################################################################
420 423 ##### TEST FUNCTIONS
421 424 #########################################################################
422 425
423 426 def error_handler(url):
424 427 global HANDLE_ERRORS
425 428 orig = HANDLE_ERRORS
426 429 keepalive_handler = HTTPHandler()
427 430 opener = urllib2.build_opener(keepalive_handler)
428 431 urllib2.install_opener(opener)
429 432 pos = {0: 'off', 1: 'on'}
430 433 for i in (0, 1):
431 434 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
432 435 HANDLE_ERRORS = i
433 436 try:
434 437 fo = urllib2.urlopen(url)
435 438 foo = fo.read()
436 439 fo.close()
437 440 try: status, reason = fo.status, fo.reason
438 441 except AttributeError: status, reason = None, None
439 442 except IOError, e:
440 443 print " EXCEPTION: %s" % e
441 444 raise
442 445 else:
443 446 print " status = %s, reason = %s" % (status, reason)
444 447 HANDLE_ERRORS = orig
445 448 hosts = keepalive_handler.open_connections()
446 449 print "open connections:", hosts
447 450 keepalive_handler.close_all()
448 451
449 452 def continuity(url):
450 453 import md5
451 454 format = '%25s: %s'
452 455
453 456 # first fetch the file with the normal http handler
454 457 opener = urllib2.build_opener()
455 458 urllib2.install_opener(opener)
456 459 fo = urllib2.urlopen(url)
457 460 foo = fo.read()
458 461 fo.close()
459 462 m = md5.new(foo)
460 463 print format % ('normal urllib', m.hexdigest())
461 464
462 465 # now install the keepalive handler and try again
463 466 opener = urllib2.build_opener(HTTPHandler())
464 467 urllib2.install_opener(opener)
465 468
466 469 fo = urllib2.urlopen(url)
467 470 foo = fo.read()
468 471 fo.close()
469 472 m = md5.new(foo)
470 473 print format % ('keepalive read', m.hexdigest())
471 474
472 475 fo = urllib2.urlopen(url)
473 476 foo = ''
474 477 while 1:
475 478 f = fo.readline()
476 479 if f: foo = foo + f
477 480 else: break
478 481 fo.close()
479 482 m = md5.new(foo)
480 483 print format % ('keepalive readline', m.hexdigest())
481 484
482 485 def comp(N, url):
483 486 print ' making %i connections to:\n %s' % (N, url)
484 487
485 488 sys.stdout.write(' first using the normal urllib handlers')
486 489 # first use normal opener
487 490 opener = urllib2.build_opener()
488 491 urllib2.install_opener(opener)
489 492 t1 = fetch(N, url)
490 493 print ' TIME: %.3f s' % t1
491 494
492 495 sys.stdout.write(' now using the keepalive handler ')
493 496 # now install the keepalive handler and try again
494 497 opener = urllib2.build_opener(HTTPHandler())
495 498 urllib2.install_opener(opener)
496 499 t2 = fetch(N, url)
497 500 print ' TIME: %.3f s' % t2
498 501 print ' improvement factor: %.2f' % (t1/t2, )
499 502
500 503 def fetch(N, url, delay=0):
501 504 import time
502 505 lens = []
503 506 starttime = time.time()
504 507 for i in range(N):
505 508 if delay and i > 0: time.sleep(delay)
506 509 fo = urllib2.urlopen(url)
507 510 foo = fo.read()
508 511 fo.close()
509 512 lens.append(len(foo))
510 513 diff = time.time() - starttime
511 514
512 515 j = 0
513 516 for i in lens[1:]:
514 517 j = j + 1
515 518 if not i == lens[0]:
516 519 print "WARNING: inconsistent length on read %i: %i" % (j, i)
517 520
518 521 return diff
519 522
520 523 def test_timeout(url):
521 524 global DEBUG
522 525 dbbackup = DEBUG
523 526 class FakeLogger:
524 527 def debug(self, msg, *args): print msg % args
525 528 info = warning = error = debug
526 529 DEBUG = FakeLogger()
527 530 print " fetching the file to establish a connection"
528 531 fo = urllib2.urlopen(url)
529 532 data1 = fo.read()
530 533 fo.close()
531 534
532 535 i = 20
533 536 print " waiting %i seconds for the server to close the connection" % i
534 537 while i > 0:
535 538 sys.stdout.write('\r %2i' % i)
536 539 sys.stdout.flush()
537 540 time.sleep(1)
538 541 i -= 1
539 542 sys.stderr.write('\r')
540 543
541 544 print " fetching the file a second time"
542 545 fo = urllib2.urlopen(url)
543 546 data2 = fo.read()
544 547 fo.close()
545 548
546 549 if data1 == data2:
547 550 print ' data are identical'
548 551 else:
549 552 print ' ERROR: DATA DIFFER'
550 553
551 554 DEBUG = dbbackup
552 555
553 556
554 557 def test(url, N=10):
555 558 print "checking error hander (do this on a non-200)"
556 559 try: error_handler(url)
557 560 except IOError, e:
558 561 print "exiting - exception will prevent further tests"
559 562 sys.exit()
560 563 print
561 564 print "performing continuity test (making sure stuff isn't corrupted)"
562 565 continuity(url)
563 566 print
564 567 print "performing speed comparison"
565 568 comp(N, url)
566 569 print
567 570 print "performing dropped-connection check"
568 571 test_timeout(url)
569 572
570 573 if __name__ == '__main__':
571 574 import time
572 575 import sys
573 576 try:
574 577 N = int(sys.argv[1])
575 578 url = sys.argv[2]
576 579 except:
577 580 print "%s <integer> <url>" % sys.argv[0]
578 581 else:
579 582 test(url, N)
@@ -1,2076 +1,2081 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import re, lock, transaction, tempfile, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.root = os.path.realpath(path)
22 22 self.path = os.path.join(self.root, ".hg")
23 23 self.origroot = path
24 24 self.opener = util.opener(self.path)
25 25 self.wopener = util.opener(self.root)
26 26
27 27 if not os.path.isdir(self.path):
28 28 if create:
29 29 if not os.path.exists(path):
30 30 os.mkdir(path)
31 31 os.mkdir(self.path)
32 32 requirements = ["revlogv1"]
33 33 if parentui.configbool('format', 'usestore', True):
34 34 os.mkdir(os.path.join(self.path, "store"))
35 35 requirements.append("store")
36 36 # create an invalid changelog
37 37 self.opener("00changelog.i", "a").write(
38 38 '\0\0\0\2' # represents revlogv2
39 39 ' dummy changelog to prevent using the old repo layout'
40 40 )
41 41 reqfile = self.opener("requires", "w")
42 42 for r in requirements:
43 43 reqfile.write("%s\n" % r)
44 44 reqfile.close()
45 45 else:
46 46 raise repo.RepoError(_("repository %s not found") % path)
47 47 elif create:
48 48 raise repo.RepoError(_("repository %s already exists") % path)
49 49 else:
50 50 # find requirements
51 51 try:
52 52 requirements = self.opener("requires").read().splitlines()
53 53 except IOError, inst:
54 54 if inst.errno != errno.ENOENT:
55 55 raise
56 56 requirements = []
57 57 # check them
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 61
62 62 # setup store
63 63 if "store" in requirements:
64 64 self.encodefn = util.encodefilename
65 65 self.decodefn = util.decodefilename
66 66 self.spath = os.path.join(self.path, "store")
67 67 else:
68 68 self.encodefn = lambda x: x
69 69 self.decodefn = lambda x: x
70 70 self.spath = self.path
71 71 self.sopener = util.encodedopener(util.opener(self.spath),
72 72 self.encodefn)
73 73
74 74 self.ui = ui.ui(parentui=parentui)
75 75 try:
76 76 self.ui.readconfig(self.join("hgrc"), self.root)
77 77 extensions.loadall(self.ui)
78 78 except IOError:
79 79 pass
80 80
81 81 self.tagscache = None
82 82 self._tagstypecache = None
83 83 self.branchcache = None
84 84 self.nodetagscache = None
85 85 self.filterpats = {}
86 86 self._datafilters = {}
87 87 self._transref = self._lockref = self._wlockref = None
88 88
89 89 def __getattr__(self, name):
90 90 if name == 'changelog':
91 91 self.changelog = changelog.changelog(self.sopener)
92 92 self.sopener.defversion = self.changelog.version
93 93 return self.changelog
94 94 if name == 'manifest':
95 95 self.changelog
96 96 self.manifest = manifest.manifest(self.sopener)
97 97 return self.manifest
98 98 if name == 'dirstate':
99 99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 100 return self.dirstate
101 101 else:
102 102 raise AttributeError, name
103 103
104 104 def url(self):
105 105 return 'file:' + self.root
106 106
107 107 def hook(self, name, throw=False, **args):
108 108 return hook.hook(self.ui, self, name, throw, **args)
109 109
110 110 tag_disallowed = ':\r\n'
111 111
112 112 def _tag(self, name, node, message, local, user, date, parent=None,
113 113 extra={}):
114 114 use_dirstate = parent is None
115 115
116 116 for c in self.tag_disallowed:
117 117 if c in name:
118 118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119 119
120 120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121 121
122 122 def writetag(fp, name, munge, prevtags):
123 fp.seek(0, 2)
123 124 if prevtags and prevtags[-1] != '\n':
124 125 fp.write('\n')
125 126 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 127 fp.close()
127 128
128 129 prevtags = ''
129 130 if local:
130 131 try:
131 132 fp = self.opener('localtags', 'r+')
132 133 except IOError, err:
133 134 fp = self.opener('localtags', 'a')
134 135 else:
135 136 prevtags = fp.read()
136 137
137 138 # local tags are stored in the current charset
138 139 writetag(fp, name, None, prevtags)
139 140 self.hook('tag', node=hex(node), tag=name, local=local)
140 141 return
141 142
142 143 if use_dirstate:
143 144 try:
144 145 fp = self.wfile('.hgtags', 'rb+')
145 146 except IOError, err:
146 147 fp = self.wfile('.hgtags', 'ab')
147 148 else:
148 149 prevtags = fp.read()
149 150 else:
150 151 try:
151 152 prevtags = self.filectx('.hgtags', parent).data()
152 153 except revlog.LookupError:
153 154 pass
154 155 fp = self.wfile('.hgtags', 'wb')
155 156 if prevtags:
156 157 fp.write(prevtags)
157 158
158 159 # committed tags are stored in UTF-8
159 160 writetag(fp, name, util.fromlocal, prevtags)
160 161
161 162 if use_dirstate and '.hgtags' not in self.dirstate:
162 163 self.add(['.hgtags'])
163 164
164 165 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
165 166 extra=extra)
166 167
167 168 self.hook('tag', node=hex(node), tag=name, local=local)
168 169
169 170 return tagnode
170 171
171 172 def tag(self, name, node, message, local, user, date):
172 173 '''tag a revision with a symbolic name.
173 174
174 175 if local is True, the tag is stored in a per-repository file.
175 176 otherwise, it is stored in the .hgtags file, and a new
176 177 changeset is committed with the change.
177 178
178 179 keyword arguments:
179 180
180 181 local: whether to store tag in non-version-controlled file
181 182 (default False)
182 183
183 184 message: commit message to use if committing
184 185
185 186 user: name of user to use if committing
186 187
187 188 date: date tuple to use if committing'''
188 189
189 190 for x in self.status()[:5]:
190 191 if '.hgtags' in x:
191 192 raise util.Abort(_('working copy of .hgtags is changed '
192 193 '(please commit .hgtags manually)'))
193 194
194 195
195 196 self._tag(name, node, message, local, user, date)
196 197
197 198 def tags(self):
198 199 '''return a mapping of tag to node'''
199 200 if self.tagscache:
200 201 return self.tagscache
201 202
202 203 globaltags = {}
203 204 tagtypes = {}
204 205
205 206 def readtags(lines, fn, tagtype):
206 207 filetags = {}
207 208 count = 0
208 209
209 210 def warn(msg):
210 211 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
211 212
212 213 for l in lines:
213 214 count += 1
214 215 if not l:
215 216 continue
216 217 s = l.split(" ", 1)
217 218 if len(s) != 2:
218 219 warn(_("cannot parse entry"))
219 220 continue
220 221 node, key = s
221 222 key = util.tolocal(key.strip()) # stored in UTF-8
222 223 try:
223 224 bin_n = bin(node)
224 225 except TypeError:
225 226 warn(_("node '%s' is not well formed") % node)
226 227 continue
227 228 if bin_n not in self.changelog.nodemap:
228 229 warn(_("tag '%s' refers to unknown node") % key)
229 230 continue
230 231
231 232 h = []
232 233 if key in filetags:
233 234 n, h = filetags[key]
234 235 h.append(n)
235 236 filetags[key] = (bin_n, h)
236 237
237 238 for k, nh in filetags.items():
238 239 if k not in globaltags:
239 240 globaltags[k] = nh
240 241 tagtypes[k] = tagtype
241 242 continue
242 243
243 244 # we prefer the global tag if:
244 245 # it supercedes us OR
245 246 # mutual supercedes and it has a higher rank
246 247 # otherwise we win because we're tip-most
247 248 an, ah = nh
248 249 bn, bh = globaltags[k]
249 250 if (bn != an and an in bh and
250 251 (bn not in ah or len(bh) > len(ah))):
251 252 an = bn
252 253 ah.extend([n for n in bh if n not in ah])
253 254 globaltags[k] = an, ah
254 255 tagtypes[k] = tagtype
255 256
256 257 # read the tags file from each head, ending with the tip
257 258 f = None
258 259 for rev, node, fnode in self._hgtagsnodes():
259 260 f = (f and f.filectx(fnode) or
260 261 self.filectx('.hgtags', fileid=fnode))
261 262 readtags(f.data().splitlines(), f, "global")
262 263
263 264 try:
264 265 data = util.fromlocal(self.opener("localtags").read())
265 266 # localtags are stored in the local character set
266 267 # while the internal tag table is stored in UTF-8
267 268 readtags(data.splitlines(), "localtags", "local")
268 269 except IOError:
269 270 pass
270 271
271 272 self.tagscache = {}
272 273 self._tagstypecache = {}
273 274 for k,nh in globaltags.items():
274 275 n = nh[0]
275 276 if n != nullid:
276 277 self.tagscache[k] = n
277 278 self._tagstypecache[k] = tagtypes[k]
278 279 self.tagscache['tip'] = self.changelog.tip()
279 280
280 281 return self.tagscache
281 282
282 283 def tagtype(self, tagname):
283 284 '''
284 285 return the type of the given tag. result can be:
285 286
286 287 'local' : a local tag
287 288 'global' : a global tag
288 289 None : tag does not exist
289 290 '''
290 291
291 292 self.tags()
292 293
293 294 return self._tagstypecache.get(tagname)
294 295
295 296 def _hgtagsnodes(self):
296 297 heads = self.heads()
297 298 heads.reverse()
298 299 last = {}
299 300 ret = []
300 301 for node in heads:
301 302 c = self.changectx(node)
302 303 rev = c.rev()
303 304 try:
304 305 fnode = c.filenode('.hgtags')
305 306 except revlog.LookupError:
306 307 continue
307 308 ret.append((rev, node, fnode))
308 309 if fnode in last:
309 310 ret[last[fnode]] = None
310 311 last[fnode] = len(ret) - 1
311 312 return [item for item in ret if item]
312 313
313 314 def tagslist(self):
314 315 '''return a list of tags ordered by revision'''
315 316 l = []
316 317 for t, n in self.tags().items():
317 318 try:
318 319 r = self.changelog.rev(n)
319 320 except:
320 321 r = -2 # sort to the beginning of the list if unknown
321 322 l.append((r, t, n))
322 323 l.sort()
323 324 return [(t, n) for r, t, n in l]
324 325
325 326 def nodetags(self, node):
326 327 '''return the tags associated with a node'''
327 328 if not self.nodetagscache:
328 329 self.nodetagscache = {}
329 330 for t, n in self.tags().items():
330 331 self.nodetagscache.setdefault(n, []).append(t)
331 332 return self.nodetagscache.get(node, [])
332 333
333 334 def _branchtags(self):
334 335 partial, last, lrev = self._readbranchcache()
335 336
336 337 tiprev = self.changelog.count() - 1
337 338 if lrev != tiprev:
338 339 self._updatebranchcache(partial, lrev+1, tiprev+1)
339 340 self._writebranchcache(partial, self.changelog.tip(), tiprev)
340 341
341 342 return partial
342 343
343 344 def branchtags(self):
344 345 if self.branchcache is not None:
345 346 return self.branchcache
346 347
347 348 self.branchcache = {} # avoid recursion in changectx
348 349 partial = self._branchtags()
349 350
350 351 # the branch cache is stored on disk as UTF-8, but in the local
351 352 # charset internally
352 353 for k, v in partial.items():
353 354 self.branchcache[util.tolocal(k)] = v
354 355 return self.branchcache
355 356
356 357 def _readbranchcache(self):
357 358 partial = {}
358 359 try:
359 360 f = self.opener("branch.cache")
360 361 lines = f.read().split('\n')
361 362 f.close()
362 363 except (IOError, OSError):
363 364 return {}, nullid, nullrev
364 365
365 366 try:
366 367 last, lrev = lines.pop(0).split(" ", 1)
367 368 last, lrev = bin(last), int(lrev)
368 369 if not (lrev < self.changelog.count() and
369 370 self.changelog.node(lrev) == last): # sanity check
370 371 # invalidate the cache
371 372 raise ValueError('Invalid branch cache: unknown tip')
372 373 for l in lines:
373 374 if not l: continue
374 375 node, label = l.split(" ", 1)
375 376 partial[label.strip()] = bin(node)
376 377 except (KeyboardInterrupt, util.SignalInterrupt):
377 378 raise
378 379 except Exception, inst:
379 380 if self.ui.debugflag:
380 381 self.ui.warn(str(inst), '\n')
381 382 partial, last, lrev = {}, nullid, nullrev
382 383 return partial, last, lrev
383 384
384 385 def _writebranchcache(self, branches, tip, tiprev):
385 386 try:
386 387 f = self.opener("branch.cache", "w", atomictemp=True)
387 388 f.write("%s %s\n" % (hex(tip), tiprev))
388 389 for label, node in branches.iteritems():
389 390 f.write("%s %s\n" % (hex(node), label))
390 391 f.rename()
391 392 except (IOError, OSError):
392 393 pass
393 394
394 395 def _updatebranchcache(self, partial, start, end):
395 396 for r in xrange(start, end):
396 397 c = self.changectx(r)
397 398 b = c.branch()
398 399 partial[b] = c.node()
399 400
400 401 def lookup(self, key):
401 402 if key == '.':
402 403 key, second = self.dirstate.parents()
403 404 if key == nullid:
404 405 raise repo.RepoError(_("no revision checked out"))
405 406 if second != nullid:
406 407 self.ui.warn(_("warning: working directory has two parents, "
407 408 "tag '.' uses the first\n"))
408 409 elif key == 'null':
409 410 return nullid
410 411 n = self.changelog._match(key)
411 412 if n:
412 413 return n
413 414 if key in self.tags():
414 415 return self.tags()[key]
415 416 if key in self.branchtags():
416 417 return self.branchtags()[key]
417 418 n = self.changelog._partialmatch(key)
418 419 if n:
419 420 return n
420 421 try:
421 422 if len(key) == 20:
422 423 key = hex(key)
423 424 except:
424 425 pass
425 426 raise repo.RepoError(_("unknown revision '%s'") % key)
426 427
427 428 def dev(self):
428 429 return os.lstat(self.path).st_dev
429 430
430 431 def local(self):
431 432 return True
432 433
433 434 def join(self, f):
434 435 return os.path.join(self.path, f)
435 436
436 437 def sjoin(self, f):
437 438 f = self.encodefn(f)
438 439 return os.path.join(self.spath, f)
439 440
440 441 def wjoin(self, f):
441 442 return os.path.join(self.root, f)
442 443
443 444 def file(self, f):
444 445 if f[0] == '/':
445 446 f = f[1:]
446 447 return filelog.filelog(self.sopener, f)
447 448
448 449 def changectx(self, changeid=None):
449 450 return context.changectx(self, changeid)
450 451
451 452 def workingctx(self):
452 453 return context.workingctx(self)
453 454
454 455 def parents(self, changeid=None):
455 456 '''
456 457 get list of changectxs for parents of changeid or working directory
457 458 '''
458 459 if changeid is None:
459 460 pl = self.dirstate.parents()
460 461 else:
461 462 n = self.changelog.lookup(changeid)
462 463 pl = self.changelog.parents(n)
463 464 if pl[1] == nullid:
464 465 return [self.changectx(pl[0])]
465 466 return [self.changectx(pl[0]), self.changectx(pl[1])]
466 467
467 468 def filectx(self, path, changeid=None, fileid=None):
468 469 """changeid can be a changeset revision, node, or tag.
469 470 fileid can be a file revision or node."""
470 471 return context.filectx(self, path, changeid, fileid)
471 472
472 473 def getcwd(self):
473 474 return self.dirstate.getcwd()
474 475
475 476 def pathto(self, f, cwd=None):
476 477 return self.dirstate.pathto(f, cwd)
477 478
478 479 def wfile(self, f, mode='r'):
479 480 return self.wopener(f, mode)
480 481
481 482 def _link(self, f):
482 483 return os.path.islink(self.wjoin(f))
483 484
484 485 def _filter(self, filter, filename, data):
485 486 if filter not in self.filterpats:
486 487 l = []
487 488 for pat, cmd in self.ui.configitems(filter):
488 489 mf = util.matcher(self.root, "", [pat], [], [])[1]
489 490 fn = None
490 491 for name, filterfn in self._datafilters.iteritems():
491 492 if cmd.startswith(name):
492 493 fn = filterfn
493 494 break
494 495 if not fn:
495 496 fn = lambda s, c, **kwargs: util.filter(s, c)
496 497 # Wrap old filters not supporting keyword arguments
497 498 if not inspect.getargspec(fn)[2]:
498 499 oldfn = fn
499 500 fn = lambda s, c, **kwargs: oldfn(s, c)
500 501 l.append((mf, fn, cmd))
501 502 self.filterpats[filter] = l
502 503
503 504 for mf, fn, cmd in self.filterpats[filter]:
504 505 if mf(filename):
505 506 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
506 507 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
507 508 break
508 509
509 510 return data
510 511
511 512 def adddatafilter(self, name, filter):
512 513 self._datafilters[name] = filter
513 514
514 515 def wread(self, filename):
515 516 if self._link(filename):
516 517 data = os.readlink(self.wjoin(filename))
517 518 else:
518 519 data = self.wopener(filename, 'r').read()
519 520 return self._filter("encode", filename, data)
520 521
521 522 def wwrite(self, filename, data, flags):
522 523 data = self._filter("decode", filename, data)
523 524 try:
524 525 os.unlink(self.wjoin(filename))
525 526 except OSError:
526 527 pass
527 528 self.wopener(filename, 'w').write(data)
528 529 util.set_flags(self.wjoin(filename), flags)
529 530
530 531 def wwritedata(self, filename, data):
531 532 return self._filter("decode", filename, data)
532 533
533 534 def transaction(self):
534 535 if self._transref and self._transref():
535 536 return self._transref().nest()
536 537
537 538 # abort here if the journal already exists
538 539 if os.path.exists(self.sjoin("journal")):
539 540 raise repo.RepoError(_("journal already exists - run hg recover"))
540 541
541 542 # save dirstate for rollback
542 543 try:
543 544 ds = self.opener("dirstate").read()
544 545 except IOError:
545 546 ds = ""
546 547 self.opener("journal.dirstate", "w").write(ds)
547 548 self.opener("journal.branch", "w").write(self.dirstate.branch())
548 549
549 550 renames = [(self.sjoin("journal"), self.sjoin("undo")),
550 551 (self.join("journal.dirstate"), self.join("undo.dirstate")),
551 552 (self.join("journal.branch"), self.join("undo.branch"))]
552 553 tr = transaction.transaction(self.ui.warn, self.sopener,
553 554 self.sjoin("journal"),
554 555 aftertrans(renames))
555 556 self._transref = weakref.ref(tr)
556 557 return tr
557 558
558 559 def recover(self):
559 560 l = self.lock()
560 561 try:
561 562 if os.path.exists(self.sjoin("journal")):
562 563 self.ui.status(_("rolling back interrupted transaction\n"))
563 564 transaction.rollback(self.sopener, self.sjoin("journal"))
564 565 self.invalidate()
565 566 return True
566 567 else:
567 568 self.ui.warn(_("no interrupted transaction available\n"))
568 569 return False
569 570 finally:
570 571 del l
571 572
572 573 def rollback(self):
573 574 wlock = lock = None
574 575 try:
575 576 wlock = self.wlock()
576 577 lock = self.lock()
577 578 if os.path.exists(self.sjoin("undo")):
578 579 self.ui.status(_("rolling back last transaction\n"))
579 580 transaction.rollback(self.sopener, self.sjoin("undo"))
580 581 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
581 582 branch = self.opener("undo.branch").read()
582 583 self.dirstate.setbranch(branch)
583 584 self.invalidate()
584 585 self.dirstate.invalidate()
585 586 else:
586 587 self.ui.warn(_("no rollback information available\n"))
587 588 finally:
588 589 del lock, wlock
589 590
590 591 def invalidate(self):
591 592 for a in "changelog manifest".split():
592 593 if hasattr(self, a):
593 594 self.__delattr__(a)
594 595 self.tagscache = None
595 596 self._tagstypecache = None
596 597 self.nodetagscache = None
597 598
598 599 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
599 600 try:
600 601 l = lock.lock(lockname, 0, releasefn, desc=desc)
601 602 except lock.LockHeld, inst:
602 603 if not wait:
603 604 raise
604 605 self.ui.warn(_("waiting for lock on %s held by %r\n") %
605 606 (desc, inst.locker))
606 607 # default to 600 seconds timeout
607 608 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
608 609 releasefn, desc=desc)
609 610 if acquirefn:
610 611 acquirefn()
611 612 return l
612 613
613 614 def lock(self, wait=True):
614 615 if self._lockref and self._lockref():
615 616 return self._lockref()
616 617
617 618 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
618 619 _('repository %s') % self.origroot)
619 620 self._lockref = weakref.ref(l)
620 621 return l
621 622
622 623 def wlock(self, wait=True):
623 624 if self._wlockref and self._wlockref():
624 625 return self._wlockref()
625 626
626 627 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
627 628 self.dirstate.invalidate, _('working directory of %s') %
628 629 self.origroot)
629 630 self._wlockref = weakref.ref(l)
630 631 return l
631 632
632 633 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
633 634 """
634 635 commit an individual file as part of a larger transaction
635 636 """
636 637
637 638 t = self.wread(fn)
638 639 fl = self.file(fn)
639 640 fp1 = manifest1.get(fn, nullid)
640 641 fp2 = manifest2.get(fn, nullid)
641 642
642 643 meta = {}
643 644 cp = self.dirstate.copied(fn)
644 645 if cp:
645 646 # Mark the new revision of this file as a copy of another
646 647 # file. This copy data will effectively act as a parent
647 648 # of this new revision. If this is a merge, the first
648 649 # parent will be the nullid (meaning "look up the copy data")
649 650 # and the second one will be the other parent. For example:
650 651 #
651 652 # 0 --- 1 --- 3 rev1 changes file foo
652 653 # \ / rev2 renames foo to bar and changes it
653 654 # \- 2 -/ rev3 should have bar with all changes and
654 655 # should record that bar descends from
655 656 # bar in rev2 and foo in rev1
656 657 #
657 658 # this allows this merge to succeed:
658 659 #
659 660 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
660 661 # \ / merging rev3 and rev4 should use bar@rev2
661 662 # \- 2 --- 4 as the merge base
662 663 #
663 664 meta["copy"] = cp
664 665 if not manifest2: # not a branch merge
665 666 meta["copyrev"] = hex(manifest1.get(cp, nullid))
666 667 fp2 = nullid
667 668 elif fp2 != nullid: # copied on remote side
668 669 meta["copyrev"] = hex(manifest1.get(cp, nullid))
669 670 elif fp1 != nullid: # copied on local side, reversed
670 671 meta["copyrev"] = hex(manifest2.get(cp))
671 672 fp2 = fp1
672 673 elif cp in manifest2: # directory rename on local side
673 674 meta["copyrev"] = hex(manifest2[cp])
674 675 else: # directory rename on remote side
675 676 meta["copyrev"] = hex(manifest1.get(cp, nullid))
676 677 self.ui.debug(_(" %s: copy %s:%s\n") %
677 678 (fn, cp, meta["copyrev"]))
678 679 fp1 = nullid
679 680 elif fp2 != nullid:
680 681 # is one parent an ancestor of the other?
681 682 fpa = fl.ancestor(fp1, fp2)
682 683 if fpa == fp1:
683 684 fp1, fp2 = fp2, nullid
684 685 elif fpa == fp2:
685 686 fp2 = nullid
686 687
687 688 # is the file unmodified from the parent? report existing entry
688 689 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
689 690 return fp1
690 691
691 692 changelist.append(fn)
692 693 return fl.add(t, meta, tr, linkrev, fp1, fp2)
693 694
694 695 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
695 696 if p1 is None:
696 697 p1, p2 = self.dirstate.parents()
697 698 return self.commit(files=files, text=text, user=user, date=date,
698 699 p1=p1, p2=p2, extra=extra, empty_ok=True)
699 700
700 701 def commit(self, files=None, text="", user=None, date=None,
701 702 match=util.always, force=False, force_editor=False,
702 703 p1=None, p2=None, extra={}, empty_ok=False):
703 704 wlock = lock = tr = None
704 705 valid = 0 # don't save the dirstate if this isn't set
705 706 if files:
706 707 files = util.unique(files)
707 708 try:
708 709 commit = []
709 710 remove = []
710 711 changed = []
711 712 use_dirstate = (p1 is None) # not rawcommit
712 713 extra = extra.copy()
713 714
714 715 if use_dirstate:
715 716 if files:
716 717 for f in files:
717 718 s = self.dirstate[f]
718 719 if s in 'nma':
719 720 commit.append(f)
720 721 elif s == 'r':
721 722 remove.append(f)
722 723 else:
723 724 self.ui.warn(_("%s not tracked!\n") % f)
724 725 else:
725 726 changes = self.status(match=match)[:5]
726 727 modified, added, removed, deleted, unknown = changes
727 728 commit = modified + added
728 729 remove = removed
729 730 else:
730 731 commit = files
731 732
732 733 if use_dirstate:
733 734 p1, p2 = self.dirstate.parents()
734 735 update_dirstate = True
735 736 else:
736 737 p1, p2 = p1, p2 or nullid
737 738 update_dirstate = (self.dirstate.parents()[0] == p1)
738 739
739 740 c1 = self.changelog.read(p1)
740 741 c2 = self.changelog.read(p2)
741 742 m1 = self.manifest.read(c1[0]).copy()
742 743 m2 = self.manifest.read(c2[0])
743 744
744 745 if use_dirstate:
745 746 branchname = self.workingctx().branch()
746 747 try:
747 748 branchname = branchname.decode('UTF-8').encode('UTF-8')
748 749 except UnicodeDecodeError:
749 750 raise util.Abort(_('branch name not in UTF-8!'))
750 751 else:
751 752 branchname = ""
752 753
753 754 if use_dirstate:
754 755 oldname = c1[5].get("branch") # stored in UTF-8
755 756 if (not commit and not remove and not force and p2 == nullid
756 757 and branchname == oldname):
757 758 self.ui.status(_("nothing changed\n"))
758 759 return None
759 760
760 761 xp1 = hex(p1)
761 762 if p2 == nullid: xp2 = ''
762 763 else: xp2 = hex(p2)
763 764
764 765 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
765 766
766 767 wlock = self.wlock()
767 768 lock = self.lock()
768 769 tr = self.transaction()
769 770 trp = weakref.proxy(tr)
770 771
771 772 # check in files
772 773 new = {}
773 774 linkrev = self.changelog.count()
774 775 commit.sort()
775 776 is_exec = util.execfunc(self.root, m1.execf)
776 777 is_link = util.linkfunc(self.root, m1.linkf)
777 778 for f in commit:
778 779 self.ui.note(f + "\n")
779 780 try:
780 781 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
781 782 new_exec = is_exec(f)
782 783 new_link = is_link(f)
783 784 if ((not changed or changed[-1] != f) and
784 785 m2.get(f) != new[f]):
785 786 # mention the file in the changelog if some
786 787 # flag changed, even if there was no content
787 788 # change.
788 789 old_exec = m1.execf(f)
789 790 old_link = m1.linkf(f)
790 791 if old_exec != new_exec or old_link != new_link:
791 792 changed.append(f)
792 793 m1.set(f, new_exec, new_link)
793 794 if use_dirstate:
794 795 self.dirstate.normal(f)
795 796
796 797 except (OSError, IOError):
797 798 if use_dirstate:
798 799 self.ui.warn(_("trouble committing %s!\n") % f)
799 800 raise
800 801 else:
801 802 remove.append(f)
802 803
803 804 # update manifest
804 805 m1.update(new)
805 806 remove.sort()
806 807 removed = []
807 808
808 809 for f in remove:
809 810 if f in m1:
810 811 del m1[f]
811 812 removed.append(f)
812 813 elif f in m2:
813 814 removed.append(f)
814 815 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
815 816 (new, removed))
816 817
817 818 # add changeset
818 819 new = new.keys()
819 820 new.sort()
820 821
821 822 user = user or self.ui.username()
822 823 if (not empty_ok and not text) or force_editor:
823 824 edittext = []
824 825 if text:
825 826 edittext.append(text)
826 827 edittext.append("")
827 828 edittext.append(_("HG: Enter commit message."
828 829 " Lines beginning with 'HG:' are removed."))
829 830 edittext.append("HG: --")
830 831 edittext.append("HG: user: %s" % user)
831 832 if p2 != nullid:
832 833 edittext.append("HG: branch merge")
833 834 if branchname:
834 835 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
835 836 edittext.extend(["HG: changed %s" % f for f in changed])
836 837 edittext.extend(["HG: removed %s" % f for f in removed])
837 838 if not changed and not remove:
838 839 edittext.append("HG: no files changed")
839 840 edittext.append("")
840 841 # run editor in the repository root
841 842 olddir = os.getcwd()
842 843 os.chdir(self.root)
843 844 text = self.ui.edit("\n".join(edittext), user)
844 845 os.chdir(olddir)
845 846
846 847 if branchname:
847 848 extra["branch"] = branchname
848 849
849 850 if use_dirstate:
850 851 lines = [line.rstrip() for line in text.rstrip().splitlines()]
851 852 while lines and not lines[0]:
852 853 del lines[0]
853 854 if not lines:
854 855 raise util.Abort(_("empty commit message"))
855 856 text = '\n'.join(lines)
856 857
857 858 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
858 859 user, date, extra)
859 860 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
860 861 parent2=xp2)
861 862 tr.close()
862 863
863 864 if self.branchcache and "branch" in extra:
864 865 self.branchcache[util.tolocal(extra["branch"])] = n
865 866
866 867 if use_dirstate or update_dirstate:
867 868 self.dirstate.setparents(n)
868 869 if use_dirstate:
869 870 for f in removed:
870 871 self.dirstate.forget(f)
871 872 valid = 1 # our dirstate updates are complete
872 873
873 874 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
874 875 return n
875 876 finally:
876 877 if not valid: # don't save our updated dirstate
877 878 self.dirstate.invalidate()
878 879 del tr, lock, wlock
879 880
880 881 def walk(self, node=None, files=[], match=util.always, badmatch=None):
881 882 '''
882 883 walk recursively through the directory tree or a given
883 884 changeset, finding all files matched by the match
884 885 function
885 886
886 887 results are yielded in a tuple (src, filename), where src
887 888 is one of:
888 889 'f' the file was found in the directory tree
889 890 'm' the file was only in the dirstate and not in the tree
890 891 'b' file was not found and matched badmatch
891 892 '''
892 893
893 894 if node:
894 895 fdict = dict.fromkeys(files)
895 896 # for dirstate.walk, files=['.'] means "walk the whole tree".
896 897 # follow that here, too
897 898 fdict.pop('.', None)
898 899 mdict = self.manifest.read(self.changelog.read(node)[0])
899 900 mfiles = mdict.keys()
900 901 mfiles.sort()
901 902 for fn in mfiles:
902 903 for ffn in fdict:
903 904 # match if the file is the exact name or a directory
904 905 if ffn == fn or fn.startswith("%s/" % ffn):
905 906 del fdict[ffn]
906 907 break
907 908 if match(fn):
908 909 yield 'm', fn
909 910 ffiles = fdict.keys()
910 911 ffiles.sort()
911 912 for fn in ffiles:
912 913 if badmatch and badmatch(fn):
913 914 if match(fn):
914 915 yield 'b', fn
915 916 else:
916 917 self.ui.warn(_('%s: No such file in rev %s\n')
917 918 % (self.pathto(fn), short(node)))
918 919 else:
919 920 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
920 921 yield src, fn
921 922
922 923 def status(self, node1=None, node2=None, files=[], match=util.always,
923 924 list_ignored=False, list_clean=False):
924 925 """return status of files between two nodes or node and working directory
925 926
926 927 If node1 is None, use the first dirstate parent instead.
927 928 If node2 is None, compare node1 with working directory.
928 929 """
929 930
930 931 def fcmp(fn, getnode):
931 932 t1 = self.wread(fn)
932 933 return self.file(fn).cmp(getnode(fn), t1)
933 934
934 935 def mfmatches(node):
935 936 change = self.changelog.read(node)
936 937 mf = self.manifest.read(change[0]).copy()
937 938 for fn in mf.keys():
938 939 if not match(fn):
939 940 del mf[fn]
940 941 return mf
941 942
942 943 modified, added, removed, deleted, unknown = [], [], [], [], []
943 944 ignored, clean = [], []
944 945
945 946 compareworking = False
946 947 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
947 948 compareworking = True
948 949
949 950 if not compareworking:
950 951 # read the manifest from node1 before the manifest from node2,
951 952 # so that we'll hit the manifest cache if we're going through
952 953 # all the revisions in parent->child order.
953 954 mf1 = mfmatches(node1)
954 955
955 956 # are we comparing the working directory?
956 957 if not node2:
957 958 (lookup, modified, added, removed, deleted, unknown,
958 959 ignored, clean) = self.dirstate.status(files, match,
959 960 list_ignored, list_clean)
960 961
961 962 # are we comparing working dir against its parent?
962 963 if compareworking:
963 964 if lookup:
964 965 fixup = []
965 966 # do a full compare of any files that might have changed
966 967 ctx = self.changectx()
967 968 for f in lookup:
968 969 if f not in ctx or ctx[f].cmp(self.wread(f)):
969 970 modified.append(f)
970 971 else:
971 972 fixup.append(f)
972 973 if list_clean:
973 974 clean.append(f)
974 975
975 976 # update dirstate for files that are actually clean
976 977 if fixup:
977 978 wlock = None
978 979 try:
979 980 try:
980 981 wlock = self.wlock(False)
981 982 except lock.LockException:
982 983 pass
983 984 if wlock:
984 985 for f in fixup:
985 986 self.dirstate.normal(f)
986 987 finally:
987 988 del wlock
988 989 else:
989 990 # we are comparing working dir against non-parent
990 991 # generate a pseudo-manifest for the working dir
991 992 # XXX: create it in dirstate.py ?
992 993 mf2 = mfmatches(self.dirstate.parents()[0])
993 994 is_exec = util.execfunc(self.root, mf2.execf)
994 995 is_link = util.linkfunc(self.root, mf2.linkf)
995 996 for f in lookup + modified + added:
996 997 mf2[f] = ""
997 998 mf2.set(f, is_exec(f), is_link(f))
998 999 for f in removed:
999 1000 if f in mf2:
1000 1001 del mf2[f]
1001 1002
1002 1003 else:
1003 1004 # we are comparing two revisions
1004 1005 mf2 = mfmatches(node2)
1005 1006
1006 1007 if not compareworking:
1007 1008 # flush lists from dirstate before comparing manifests
1008 1009 modified, added, clean = [], [], []
1009 1010
1010 1011 # make sure to sort the files so we talk to the disk in a
1011 1012 # reasonable order
1012 1013 mf2keys = mf2.keys()
1013 1014 mf2keys.sort()
1014 1015 getnode = lambda fn: mf1.get(fn, nullid)
1015 1016 for fn in mf2keys:
1016 1017 if fn in mf1:
1017 1018 if (mf1.flags(fn) != mf2.flags(fn) or
1018 1019 (mf1[fn] != mf2[fn] and
1019 1020 (mf2[fn] != "" or fcmp(fn, getnode)))):
1020 1021 modified.append(fn)
1021 1022 elif list_clean:
1022 1023 clean.append(fn)
1023 1024 del mf1[fn]
1024 1025 else:
1025 1026 added.append(fn)
1026 1027
1027 1028 removed = mf1.keys()
1028 1029
1029 1030 # sort and return results:
1030 1031 for l in modified, added, removed, deleted, unknown, ignored, clean:
1031 1032 l.sort()
1032 1033 return (modified, added, removed, deleted, unknown, ignored, clean)
1033 1034
1034 1035 def add(self, list):
1035 1036 wlock = self.wlock()
1036 1037 try:
1037 1038 rejected = []
1038 1039 for f in list:
1039 1040 p = self.wjoin(f)
1040 1041 try:
1041 1042 st = os.lstat(p)
1042 1043 except:
1043 1044 self.ui.warn(_("%s does not exist!\n") % f)
1044 1045 rejected.append(f)
1045 1046 continue
1046 1047 if st.st_size > 10000000:
1047 1048 self.ui.warn(_("%s: files over 10MB may cause memory and"
1048 1049 " performance problems\n"
1049 1050 "(use 'hg revert %s' to unadd the file)\n")
1050 1051 % (f, f))
1051 1052 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1052 1053 self.ui.warn(_("%s not added: only files and symlinks "
1053 1054 "supported currently\n") % f)
1054 1055 rejected.append(p)
1055 1056 elif self.dirstate[f] in 'amn':
1056 1057 self.ui.warn(_("%s already tracked!\n") % f)
1057 1058 elif self.dirstate[f] == 'r':
1058 1059 self.dirstate.normallookup(f)
1059 1060 else:
1060 1061 self.dirstate.add(f)
1061 1062 return rejected
1062 1063 finally:
1063 1064 del wlock
1064 1065
1065 1066 def forget(self, list):
1066 1067 wlock = self.wlock()
1067 1068 try:
1068 1069 for f in list:
1069 1070 if self.dirstate[f] != 'a':
1070 1071 self.ui.warn(_("%s not added!\n") % f)
1071 1072 else:
1072 1073 self.dirstate.forget(f)
1073 1074 finally:
1074 1075 del wlock
1075 1076
1076 1077 def remove(self, list, unlink=False):
1077 1078 wlock = None
1078 1079 try:
1079 1080 if unlink:
1080 1081 for f in list:
1081 1082 try:
1082 1083 util.unlink(self.wjoin(f))
1083 1084 except OSError, inst:
1084 1085 if inst.errno != errno.ENOENT:
1085 1086 raise
1086 1087 wlock = self.wlock()
1087 1088 for f in list:
1088 1089 if unlink and os.path.exists(self.wjoin(f)):
1089 1090 self.ui.warn(_("%s still exists!\n") % f)
1090 1091 elif self.dirstate[f] == 'a':
1091 1092 self.dirstate.forget(f)
1092 1093 elif f not in self.dirstate:
1093 1094 self.ui.warn(_("%s not tracked!\n") % f)
1094 1095 else:
1095 1096 self.dirstate.remove(f)
1096 1097 finally:
1097 1098 del wlock
1098 1099
1099 1100 def undelete(self, list):
1100 1101 wlock = None
1101 1102 try:
1102 1103 manifests = [self.manifest.read(self.changelog.read(p)[0])
1103 1104 for p in self.dirstate.parents() if p != nullid]
1104 1105 wlock = self.wlock()
1105 1106 for f in list:
1106 1107 if self.dirstate[f] != 'r':
1107 1108 self.ui.warn("%s not removed!\n" % f)
1108 1109 else:
1109 1110 m = f in manifests[0] and manifests[0] or manifests[1]
1110 1111 t = self.file(f).read(m[f])
1111 1112 self.wwrite(f, t, m.flags(f))
1112 1113 self.dirstate.normal(f)
1113 1114 finally:
1114 1115 del wlock
1115 1116
1116 1117 def copy(self, source, dest):
1117 1118 wlock = None
1118 1119 try:
1119 1120 p = self.wjoin(dest)
1120 1121 if not (os.path.exists(p) or os.path.islink(p)):
1121 1122 self.ui.warn(_("%s does not exist!\n") % dest)
1122 1123 elif not (os.path.isfile(p) or os.path.islink(p)):
1123 1124 self.ui.warn(_("copy failed: %s is not a file or a "
1124 1125 "symbolic link\n") % dest)
1125 1126 else:
1126 1127 wlock = self.wlock()
1127 1128 if dest not in self.dirstate:
1128 1129 self.dirstate.add(dest)
1129 1130 self.dirstate.copy(source, dest)
1130 1131 finally:
1131 1132 del wlock
1132 1133
1133 1134 def heads(self, start=None):
1134 1135 heads = self.changelog.heads(start)
1135 1136 # sort the output in rev descending order
1136 1137 heads = [(-self.changelog.rev(h), h) for h in heads]
1137 1138 heads.sort()
1138 1139 return [n for (r, n) in heads]
1139 1140
1140 1141 def branchheads(self, branch, start=None):
1141 1142 branches = self.branchtags()
1142 1143 if branch not in branches:
1143 1144 return []
1144 1145 # The basic algorithm is this:
1145 1146 #
1146 1147 # Start from the branch tip since there are no later revisions that can
1147 1148 # possibly be in this branch, and the tip is a guaranteed head.
1148 1149 #
1149 1150 # Remember the tip's parents as the first ancestors, since these by
1150 1151 # definition are not heads.
1151 1152 #
1152 1153 # Step backwards from the brach tip through all the revisions. We are
1153 1154 # guaranteed by the rules of Mercurial that we will now be visiting the
1154 1155 # nodes in reverse topological order (children before parents).
1155 1156 #
1156 1157 # If a revision is one of the ancestors of a head then we can toss it
1157 1158 # out of the ancestors set (we've already found it and won't be
1158 1159 # visiting it again) and put its parents in the ancestors set.
1159 1160 #
1160 1161 # Otherwise, if a revision is in the branch it's another head, since it
1161 1162 # wasn't in the ancestor list of an existing head. So add it to the
1162 1163 # head list, and add its parents to the ancestor list.
1163 1164 #
1164 1165 # If it is not in the branch ignore it.
1165 1166 #
1166 1167 # Once we have a list of heads, use nodesbetween to filter out all the
1167 1168 # heads that cannot be reached from startrev. There may be a more
1168 1169 # efficient way to do this as part of the previous algorithm.
1169 1170
1170 1171 set = util.set
1171 1172 heads = [self.changelog.rev(branches[branch])]
1172 1173 # Don't care if ancestors contains nullrev or not.
1173 1174 ancestors = set(self.changelog.parentrevs(heads[0]))
1174 1175 for rev in xrange(heads[0] - 1, nullrev, -1):
1175 1176 if rev in ancestors:
1176 1177 ancestors.update(self.changelog.parentrevs(rev))
1177 1178 ancestors.remove(rev)
1178 1179 elif self.changectx(rev).branch() == branch:
1179 1180 heads.append(rev)
1180 1181 ancestors.update(self.changelog.parentrevs(rev))
1181 1182 heads = [self.changelog.node(rev) for rev in heads]
1182 1183 if start is not None:
1183 1184 heads = self.changelog.nodesbetween([start], heads)[2]
1184 1185 return heads
1185 1186
1186 1187 def branches(self, nodes):
1187 1188 if not nodes:
1188 1189 nodes = [self.changelog.tip()]
1189 1190 b = []
1190 1191 for n in nodes:
1191 1192 t = n
1192 1193 while 1:
1193 1194 p = self.changelog.parents(n)
1194 1195 if p[1] != nullid or p[0] == nullid:
1195 1196 b.append((t, n, p[0], p[1]))
1196 1197 break
1197 1198 n = p[0]
1198 1199 return b
1199 1200
1200 1201 def between(self, pairs):
1201 1202 r = []
1202 1203
1203 1204 for top, bottom in pairs:
1204 1205 n, l, i = top, [], 0
1205 1206 f = 1
1206 1207
1207 1208 while n != bottom:
1208 1209 p = self.changelog.parents(n)[0]
1209 1210 if i == f:
1210 1211 l.append(n)
1211 1212 f = f * 2
1212 1213 n = p
1213 1214 i += 1
1214 1215
1215 1216 r.append(l)
1216 1217
1217 1218 return r
1218 1219
1219 1220 def findincoming(self, remote, base=None, heads=None, force=False):
1220 1221 """Return list of roots of the subsets of missing nodes from remote
1221 1222
1222 1223 If base dict is specified, assume that these nodes and their parents
1223 1224 exist on the remote side and that no child of a node of base exists
1224 1225 in both remote and self.
1225 1226 Furthermore base will be updated to include the nodes that exists
1226 1227 in self and remote but no children exists in self and remote.
1227 1228 If a list of heads is specified, return only nodes which are heads
1228 1229 or ancestors of these heads.
1229 1230
1230 1231 All the ancestors of base are in self and in remote.
1231 1232 All the descendants of the list returned are missing in self.
1232 1233 (and so we know that the rest of the nodes are missing in remote, see
1233 1234 outgoing)
1234 1235 """
1235 1236 m = self.changelog.nodemap
1236 1237 search = []
1237 1238 fetch = {}
1238 1239 seen = {}
1239 1240 seenbranch = {}
1240 1241 if base == None:
1241 1242 base = {}
1242 1243
1243 1244 if not heads:
1244 1245 heads = remote.heads()
1245 1246
1246 1247 if self.changelog.tip() == nullid:
1247 1248 base[nullid] = 1
1248 1249 if heads != [nullid]:
1249 1250 return [nullid]
1250 1251 return []
1251 1252
1252 1253 # assume we're closer to the tip than the root
1253 1254 # and start by examining the heads
1254 1255 self.ui.status(_("searching for changes\n"))
1255 1256
1256 1257 unknown = []
1257 1258 for h in heads:
1258 1259 if h not in m:
1259 1260 unknown.append(h)
1260 1261 else:
1261 1262 base[h] = 1
1262 1263
1263 1264 if not unknown:
1264 1265 return []
1265 1266
1266 1267 req = dict.fromkeys(unknown)
1267 1268 reqcnt = 0
1268 1269
1269 1270 # search through remote branches
1270 1271 # a 'branch' here is a linear segment of history, with four parts:
1271 1272 # head, root, first parent, second parent
1272 1273 # (a branch always has two parents (or none) by definition)
1273 1274 unknown = remote.branches(unknown)
1274 1275 while unknown:
1275 1276 r = []
1276 1277 while unknown:
1277 1278 n = unknown.pop(0)
1278 1279 if n[0] in seen:
1279 1280 continue
1280 1281
1281 1282 self.ui.debug(_("examining %s:%s\n")
1282 1283 % (short(n[0]), short(n[1])))
1283 1284 if n[0] == nullid: # found the end of the branch
1284 1285 pass
1285 1286 elif n in seenbranch:
1286 1287 self.ui.debug(_("branch already found\n"))
1287 1288 continue
1288 1289 elif n[1] and n[1] in m: # do we know the base?
1289 1290 self.ui.debug(_("found incomplete branch %s:%s\n")
1290 1291 % (short(n[0]), short(n[1])))
1291 1292 search.append(n) # schedule branch range for scanning
1292 1293 seenbranch[n] = 1
1293 1294 else:
1294 1295 if n[1] not in seen and n[1] not in fetch:
1295 1296 if n[2] in m and n[3] in m:
1296 1297 self.ui.debug(_("found new changeset %s\n") %
1297 1298 short(n[1]))
1298 1299 fetch[n[1]] = 1 # earliest unknown
1299 1300 for p in n[2:4]:
1300 1301 if p in m:
1301 1302 base[p] = 1 # latest known
1302 1303
1303 1304 for p in n[2:4]:
1304 1305 if p not in req and p not in m:
1305 1306 r.append(p)
1306 1307 req[p] = 1
1307 1308 seen[n[0]] = 1
1308 1309
1309 1310 if r:
1310 1311 reqcnt += 1
1311 1312 self.ui.debug(_("request %d: %s\n") %
1312 1313 (reqcnt, " ".join(map(short, r))))
1313 1314 for p in xrange(0, len(r), 10):
1314 1315 for b in remote.branches(r[p:p+10]):
1315 1316 self.ui.debug(_("received %s:%s\n") %
1316 1317 (short(b[0]), short(b[1])))
1317 1318 unknown.append(b)
1318 1319
1319 1320 # do binary search on the branches we found
1320 1321 while search:
1321 1322 n = search.pop(0)
1322 1323 reqcnt += 1
1323 1324 l = remote.between([(n[0], n[1])])[0]
1324 1325 l.append(n[1])
1325 1326 p = n[0]
1326 1327 f = 1
1327 1328 for i in l:
1328 1329 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1329 1330 if i in m:
1330 1331 if f <= 2:
1331 1332 self.ui.debug(_("found new branch changeset %s\n") %
1332 1333 short(p))
1333 1334 fetch[p] = 1
1334 1335 base[i] = 1
1335 1336 else:
1336 1337 self.ui.debug(_("narrowed branch search to %s:%s\n")
1337 1338 % (short(p), short(i)))
1338 1339 search.append((p, i))
1339 1340 break
1340 1341 p, f = i, f * 2
1341 1342
1342 1343 # sanity check our fetch list
1343 1344 for f in fetch.keys():
1344 1345 if f in m:
1345 1346 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1346 1347
1347 1348 if base.keys() == [nullid]:
1348 1349 if force:
1349 1350 self.ui.warn(_("warning: repository is unrelated\n"))
1350 1351 else:
1351 1352 raise util.Abort(_("repository is unrelated"))
1352 1353
1353 1354 self.ui.debug(_("found new changesets starting at ") +
1354 1355 " ".join([short(f) for f in fetch]) + "\n")
1355 1356
1356 1357 self.ui.debug(_("%d total queries\n") % reqcnt)
1357 1358
1358 1359 return fetch.keys()
1359 1360
1360 1361 def findoutgoing(self, remote, base=None, heads=None, force=False):
1361 1362 """Return list of nodes that are roots of subsets not in remote
1362 1363
1363 1364 If base dict is specified, assume that these nodes and their parents
1364 1365 exist on the remote side.
1365 1366 If a list of heads is specified, return only nodes which are heads
1366 1367 or ancestors of these heads, and return a second element which
1367 1368 contains all remote heads which get new children.
1368 1369 """
1369 1370 if base == None:
1370 1371 base = {}
1371 1372 self.findincoming(remote, base, heads, force=force)
1372 1373
1373 1374 self.ui.debug(_("common changesets up to ")
1374 1375 + " ".join(map(short, base.keys())) + "\n")
1375 1376
1376 1377 remain = dict.fromkeys(self.changelog.nodemap)
1377 1378
1378 1379 # prune everything remote has from the tree
1379 1380 del remain[nullid]
1380 1381 remove = base.keys()
1381 1382 while remove:
1382 1383 n = remove.pop(0)
1383 1384 if n in remain:
1384 1385 del remain[n]
1385 1386 for p in self.changelog.parents(n):
1386 1387 remove.append(p)
1387 1388
1388 1389 # find every node whose parents have been pruned
1389 1390 subset = []
1390 1391 # find every remote head that will get new children
1391 1392 updated_heads = {}
1392 1393 for n in remain:
1393 1394 p1, p2 = self.changelog.parents(n)
1394 1395 if p1 not in remain and p2 not in remain:
1395 1396 subset.append(n)
1396 1397 if heads:
1397 1398 if p1 in heads:
1398 1399 updated_heads[p1] = True
1399 1400 if p2 in heads:
1400 1401 updated_heads[p2] = True
1401 1402
1402 1403 # this is the set of all roots we have to push
1403 1404 if heads:
1404 1405 return subset, updated_heads.keys()
1405 1406 else:
1406 1407 return subset
1407 1408
1408 1409 def pull(self, remote, heads=None, force=False):
1409 1410 lock = self.lock()
1410 1411 try:
1411 1412 fetch = self.findincoming(remote, heads=heads, force=force)
1412 1413 if fetch == [nullid]:
1413 1414 self.ui.status(_("requesting all changes\n"))
1414 1415
1415 1416 if not fetch:
1416 1417 self.ui.status(_("no changes found\n"))
1417 1418 return 0
1418 1419
1419 1420 if heads is None:
1420 1421 cg = remote.changegroup(fetch, 'pull')
1421 1422 else:
1422 1423 if 'changegroupsubset' not in remote.capabilities:
1423 1424 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1424 1425 cg = remote.changegroupsubset(fetch, heads, 'pull')
1425 1426 return self.addchangegroup(cg, 'pull', remote.url())
1426 1427 finally:
1427 1428 del lock
1428 1429
1429 1430 def push(self, remote, force=False, revs=None):
1430 1431 # there are two ways to push to remote repo:
1431 1432 #
1432 1433 # addchangegroup assumes local user can lock remote
1433 1434 # repo (local filesystem, old ssh servers).
1434 1435 #
1435 1436 # unbundle assumes local user cannot lock remote repo (new ssh
1436 1437 # servers, http servers).
1437 1438
1438 1439 if remote.capable('unbundle'):
1439 1440 return self.push_unbundle(remote, force, revs)
1440 1441 return self.push_addchangegroup(remote, force, revs)
1441 1442
1442 1443 def prepush(self, remote, force, revs):
1443 1444 base = {}
1444 1445 remote_heads = remote.heads()
1445 1446 inc = self.findincoming(remote, base, remote_heads, force=force)
1446 1447
1447 1448 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1448 1449 if revs is not None:
1449 1450 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1450 1451 else:
1451 1452 bases, heads = update, self.changelog.heads()
1452 1453
1453 1454 if not bases:
1454 1455 self.ui.status(_("no changes found\n"))
1455 1456 return None, 1
1456 1457 elif not force:
1457 1458 # check if we're creating new remote heads
1458 1459 # to be a remote head after push, node must be either
1459 1460 # - unknown locally
1460 1461 # - a local outgoing head descended from update
1461 1462 # - a remote head that's known locally and not
1462 1463 # ancestral to an outgoing head
1463 1464
1464 1465 warn = 0
1465 1466
1466 1467 if remote_heads == [nullid]:
1467 1468 warn = 0
1468 1469 elif not revs and len(heads) > len(remote_heads):
1469 1470 warn = 1
1470 1471 else:
1471 1472 newheads = list(heads)
1472 1473 for r in remote_heads:
1473 1474 if r in self.changelog.nodemap:
1474 1475 desc = self.changelog.heads(r, heads)
1475 1476 l = [h for h in heads if h in desc]
1476 1477 if not l:
1477 1478 newheads.append(r)
1478 1479 else:
1479 1480 newheads.append(r)
1480 1481 if len(newheads) > len(remote_heads):
1481 1482 warn = 1
1482 1483
1483 1484 if warn:
1484 1485 self.ui.warn(_("abort: push creates new remote branches!\n"))
1485 1486 self.ui.status(_("(did you forget to merge?"
1486 1487 " use push -f to force)\n"))
1487 1488 return None, 1
1488 1489 elif inc:
1489 1490 self.ui.warn(_("note: unsynced remote changes!\n"))
1490 1491
1491 1492
1492 1493 if revs is None:
1493 1494 cg = self.changegroup(update, 'push')
1494 1495 else:
1495 1496 cg = self.changegroupsubset(update, revs, 'push')
1496 1497 return cg, remote_heads
1497 1498
1498 1499 def push_addchangegroup(self, remote, force, revs):
1499 1500 lock = remote.lock()
1500 1501 try:
1501 1502 ret = self.prepush(remote, force, revs)
1502 1503 if ret[0] is not None:
1503 1504 cg, remote_heads = ret
1504 1505 return remote.addchangegroup(cg, 'push', self.url())
1505 1506 return ret[1]
1506 1507 finally:
1507 1508 del lock
1508 1509
1509 1510 def push_unbundle(self, remote, force, revs):
1510 1511 # local repo finds heads on server, finds out what revs it
1511 1512 # must push. once revs transferred, if server finds it has
1512 1513 # different heads (someone else won commit/push race), server
1513 1514 # aborts.
1514 1515
1515 1516 ret = self.prepush(remote, force, revs)
1516 1517 if ret[0] is not None:
1517 1518 cg, remote_heads = ret
1518 1519 if force: remote_heads = ['force']
1519 1520 return remote.unbundle(cg, remote_heads, 'push')
1520 1521 return ret[1]
1521 1522
1522 1523 def changegroupinfo(self, nodes, source):
1523 1524 if self.ui.verbose or source == 'bundle':
1524 1525 self.ui.status(_("%d changesets found\n") % len(nodes))
1525 1526 if self.ui.debugflag:
1526 1527 self.ui.debug(_("List of changesets:\n"))
1527 1528 for node in nodes:
1528 1529 self.ui.debug("%s\n" % hex(node))
1529 1530
1530 1531 def changegroupsubset(self, bases, heads, source, extranodes=None):
1531 1532 """This function generates a changegroup consisting of all the nodes
1532 1533 that are descendents of any of the bases, and ancestors of any of
1533 1534 the heads.
1534 1535
1535 1536 It is fairly complex as determining which filenodes and which
1536 1537 manifest nodes need to be included for the changeset to be complete
1537 1538 is non-trivial.
1538 1539
1539 1540 Another wrinkle is doing the reverse, figuring out which changeset in
1540 1541 the changegroup a particular filenode or manifestnode belongs to.
1541 1542
1542 1543 The caller can specify some nodes that must be included in the
1543 1544 changegroup using the extranodes argument. It should be a dict
1544 1545 where the keys are the filenames (or 1 for the manifest), and the
1545 1546 values are lists of (node, linknode) tuples, where node is a wanted
1546 1547 node and linknode is the changelog node that should be transmitted as
1547 1548 the linkrev.
1548 1549 """
1549 1550
1550 1551 self.hook('preoutgoing', throw=True, source=source)
1551 1552
1552 1553 # Set up some initial variables
1553 1554 # Make it easy to refer to self.changelog
1554 1555 cl = self.changelog
1555 1556 # msng is short for missing - compute the list of changesets in this
1556 1557 # changegroup.
1557 1558 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1558 1559 self.changegroupinfo(msng_cl_lst, source)
1559 1560 # Some bases may turn out to be superfluous, and some heads may be
1560 1561 # too. nodesbetween will return the minimal set of bases and heads
1561 1562 # necessary to re-create the changegroup.
1562 1563
1563 1564 # Known heads are the list of heads that it is assumed the recipient
1564 1565 # of this changegroup will know about.
1565 1566 knownheads = {}
1566 1567 # We assume that all parents of bases are known heads.
1567 1568 for n in bases:
1568 1569 for p in cl.parents(n):
1569 1570 if p != nullid:
1570 1571 knownheads[p] = 1
1571 1572 knownheads = knownheads.keys()
1572 1573 if knownheads:
1573 1574 # Now that we know what heads are known, we can compute which
1574 1575 # changesets are known. The recipient must know about all
1575 1576 # changesets required to reach the known heads from the null
1576 1577 # changeset.
1577 1578 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1578 1579 junk = None
1579 1580 # Transform the list into an ersatz set.
1580 1581 has_cl_set = dict.fromkeys(has_cl_set)
1581 1582 else:
1582 1583 # If there were no known heads, the recipient cannot be assumed to
1583 1584 # know about any changesets.
1584 1585 has_cl_set = {}
1585 1586
1586 1587 # Make it easy to refer to self.manifest
1587 1588 mnfst = self.manifest
1588 1589 # We don't know which manifests are missing yet
1589 1590 msng_mnfst_set = {}
1590 1591 # Nor do we know which filenodes are missing.
1591 1592 msng_filenode_set = {}
1592 1593
1593 1594 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1594 1595 junk = None
1595 1596
1596 1597 # A changeset always belongs to itself, so the changenode lookup
1597 1598 # function for a changenode is identity.
1598 1599 def identity(x):
1599 1600 return x
1600 1601
1601 1602 # A function generating function. Sets up an environment for the
1602 1603 # inner function.
1603 1604 def cmp_by_rev_func(revlog):
1604 1605 # Compare two nodes by their revision number in the environment's
1605 1606 # revision history. Since the revision number both represents the
1606 1607 # most efficient order to read the nodes in, and represents a
1607 1608 # topological sorting of the nodes, this function is often useful.
1608 1609 def cmp_by_rev(a, b):
1609 1610 return cmp(revlog.rev(a), revlog.rev(b))
1610 1611 return cmp_by_rev
1611 1612
1612 1613 # If we determine that a particular file or manifest node must be a
1613 1614 # node that the recipient of the changegroup will already have, we can
1614 1615 # also assume the recipient will have all the parents. This function
1615 1616 # prunes them from the set of missing nodes.
1616 1617 def prune_parents(revlog, hasset, msngset):
1617 1618 haslst = hasset.keys()
1618 1619 haslst.sort(cmp_by_rev_func(revlog))
1619 1620 for node in haslst:
1620 1621 parentlst = [p for p in revlog.parents(node) if p != nullid]
1621 1622 while parentlst:
1622 1623 n = parentlst.pop()
1623 1624 if n not in hasset:
1624 1625 hasset[n] = 1
1625 1626 p = [p for p in revlog.parents(n) if p != nullid]
1626 1627 parentlst.extend(p)
1627 1628 for n in hasset:
1628 1629 msngset.pop(n, None)
1629 1630
1630 1631 # This is a function generating function used to set up an environment
1631 1632 # for the inner function to execute in.
1632 1633 def manifest_and_file_collector(changedfileset):
1633 1634 # This is an information gathering function that gathers
1634 1635 # information from each changeset node that goes out as part of
1635 1636 # the changegroup. The information gathered is a list of which
1636 1637 # manifest nodes are potentially required (the recipient may
1637 1638 # already have them) and total list of all files which were
1638 1639 # changed in any changeset in the changegroup.
1639 1640 #
1640 1641 # We also remember the first changenode we saw any manifest
1641 1642 # referenced by so we can later determine which changenode 'owns'
1642 1643 # the manifest.
1643 1644 def collect_manifests_and_files(clnode):
1644 1645 c = cl.read(clnode)
1645 1646 for f in c[3]:
1646 1647 # This is to make sure we only have one instance of each
1647 1648 # filename string for each filename.
1648 1649 changedfileset.setdefault(f, f)
1649 1650 msng_mnfst_set.setdefault(c[0], clnode)
1650 1651 return collect_manifests_and_files
1651 1652
1652 1653 # Figure out which manifest nodes (of the ones we think might be part
1653 1654 # of the changegroup) the recipient must know about and remove them
1654 1655 # from the changegroup.
1655 1656 def prune_manifests():
1656 1657 has_mnfst_set = {}
1657 1658 for n in msng_mnfst_set:
1658 1659 # If a 'missing' manifest thinks it belongs to a changenode
1659 1660 # the recipient is assumed to have, obviously the recipient
1660 1661 # must have that manifest.
1661 1662 linknode = cl.node(mnfst.linkrev(n))
1662 1663 if linknode in has_cl_set:
1663 1664 has_mnfst_set[n] = 1
1664 1665 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1665 1666
1666 1667 # Use the information collected in collect_manifests_and_files to say
1667 1668 # which changenode any manifestnode belongs to.
1668 1669 def lookup_manifest_link(mnfstnode):
1669 1670 return msng_mnfst_set[mnfstnode]
1670 1671
1671 1672 # A function generating function that sets up the initial environment
1672 1673 # the inner function.
1673 1674 def filenode_collector(changedfiles):
1674 1675 next_rev = [0]
1675 1676 # This gathers information from each manifestnode included in the
1676 1677 # changegroup about which filenodes the manifest node references
1677 1678 # so we can include those in the changegroup too.
1678 1679 #
1679 1680 # It also remembers which changenode each filenode belongs to. It
1680 1681 # does this by assuming the a filenode belongs to the changenode
1681 1682 # the first manifest that references it belongs to.
1682 1683 def collect_msng_filenodes(mnfstnode):
1683 1684 r = mnfst.rev(mnfstnode)
1684 1685 if r == next_rev[0]:
1685 1686 # If the last rev we looked at was the one just previous,
1686 1687 # we only need to see a diff.
1687 1688 deltamf = mnfst.readdelta(mnfstnode)
1688 1689 # For each line in the delta
1689 1690 for f, fnode in deltamf.items():
1690 1691 f = changedfiles.get(f, None)
1691 1692 # And if the file is in the list of files we care
1692 1693 # about.
1693 1694 if f is not None:
1694 1695 # Get the changenode this manifest belongs to
1695 1696 clnode = msng_mnfst_set[mnfstnode]
1696 1697 # Create the set of filenodes for the file if
1697 1698 # there isn't one already.
1698 1699 ndset = msng_filenode_set.setdefault(f, {})
1699 1700 # And set the filenode's changelog node to the
1700 1701 # manifest's if it hasn't been set already.
1701 1702 ndset.setdefault(fnode, clnode)
1702 1703 else:
1703 1704 # Otherwise we need a full manifest.
1704 1705 m = mnfst.read(mnfstnode)
1705 1706 # For every file in we care about.
1706 1707 for f in changedfiles:
1707 1708 fnode = m.get(f, None)
1708 1709 # If it's in the manifest
1709 1710 if fnode is not None:
1710 1711 # See comments above.
1711 1712 clnode = msng_mnfst_set[mnfstnode]
1712 1713 ndset = msng_filenode_set.setdefault(f, {})
1713 1714 ndset.setdefault(fnode, clnode)
1714 1715 # Remember the revision we hope to see next.
1715 1716 next_rev[0] = r + 1
1716 1717 return collect_msng_filenodes
1717 1718
1718 1719 # We have a list of filenodes we think we need for a file, lets remove
1719 1720 # all those we now the recipient must have.
1720 1721 def prune_filenodes(f, filerevlog):
1721 1722 msngset = msng_filenode_set[f]
1722 1723 hasset = {}
1723 1724 # If a 'missing' filenode thinks it belongs to a changenode we
1724 1725 # assume the recipient must have, then the recipient must have
1725 1726 # that filenode.
1726 1727 for n in msngset:
1727 1728 clnode = cl.node(filerevlog.linkrev(n))
1728 1729 if clnode in has_cl_set:
1729 1730 hasset[n] = 1
1730 1731 prune_parents(filerevlog, hasset, msngset)
1731 1732
1732 1733 # A function generator function that sets up the a context for the
1733 1734 # inner function.
1734 1735 def lookup_filenode_link_func(fname):
1735 1736 msngset = msng_filenode_set[fname]
1736 1737 # Lookup the changenode the filenode belongs to.
1737 1738 def lookup_filenode_link(fnode):
1738 1739 return msngset[fnode]
1739 1740 return lookup_filenode_link
1740 1741
1741 1742 # Add the nodes that were explicitly requested.
1742 1743 def add_extra_nodes(name, nodes):
1743 1744 if not extranodes or name not in extranodes:
1744 1745 return
1745 1746
1746 1747 for node, linknode in extranodes[name]:
1747 1748 if node not in nodes:
1748 1749 nodes[node] = linknode
1749 1750
1750 1751 # Now that we have all theses utility functions to help out and
1751 1752 # logically divide up the task, generate the group.
1752 1753 def gengroup():
1753 1754 # The set of changed files starts empty.
1754 1755 changedfiles = {}
1755 1756 # Create a changenode group generator that will call our functions
1756 1757 # back to lookup the owning changenode and collect information.
1757 1758 group = cl.group(msng_cl_lst, identity,
1758 1759 manifest_and_file_collector(changedfiles))
1759 1760 for chnk in group:
1760 1761 yield chnk
1761 1762
1762 1763 # The list of manifests has been collected by the generator
1763 1764 # calling our functions back.
1764 1765 prune_manifests()
1765 1766 add_extra_nodes(1, msng_mnfst_set)
1766 1767 msng_mnfst_lst = msng_mnfst_set.keys()
1767 1768 # Sort the manifestnodes by revision number.
1768 1769 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1769 1770 # Create a generator for the manifestnodes that calls our lookup
1770 1771 # and data collection functions back.
1771 1772 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1772 1773 filenode_collector(changedfiles))
1773 1774 for chnk in group:
1774 1775 yield chnk
1775 1776
1776 1777 # These are no longer needed, dereference and toss the memory for
1777 1778 # them.
1778 1779 msng_mnfst_lst = None
1779 1780 msng_mnfst_set.clear()
1780 1781
1781 1782 if extranodes:
1782 1783 for fname in extranodes:
1783 1784 if isinstance(fname, int):
1784 1785 continue
1785 1786 add_extra_nodes(fname,
1786 1787 msng_filenode_set.setdefault(fname, {}))
1787 1788 changedfiles[fname] = 1
1788 1789 changedfiles = changedfiles.keys()
1789 1790 changedfiles.sort()
1790 1791 # Go through all our files in order sorted by name.
1791 1792 for fname in changedfiles:
1792 1793 filerevlog = self.file(fname)
1793 1794 if filerevlog.count() == 0:
1794 1795 raise util.Abort(_("empty or missing revlog for %s") % fname)
1795 1796 # Toss out the filenodes that the recipient isn't really
1796 1797 # missing.
1797 1798 if fname in msng_filenode_set:
1798 1799 prune_filenodes(fname, filerevlog)
1799 1800 msng_filenode_lst = msng_filenode_set[fname].keys()
1800 1801 else:
1801 1802 msng_filenode_lst = []
1802 1803 # If any filenodes are left, generate the group for them,
1803 1804 # otherwise don't bother.
1804 1805 if len(msng_filenode_lst) > 0:
1805 1806 yield changegroup.chunkheader(len(fname))
1806 1807 yield fname
1807 1808 # Sort the filenodes by their revision #
1808 1809 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1809 1810 # Create a group generator and only pass in a changenode
1810 1811 # lookup function as we need to collect no information
1811 1812 # from filenodes.
1812 1813 group = filerevlog.group(msng_filenode_lst,
1813 1814 lookup_filenode_link_func(fname))
1814 1815 for chnk in group:
1815 1816 yield chnk
1816 1817 if fname in msng_filenode_set:
1817 1818 # Don't need this anymore, toss it to free memory.
1818 1819 del msng_filenode_set[fname]
1819 1820 # Signal that no more groups are left.
1820 1821 yield changegroup.closechunk()
1821 1822
1822 1823 if msng_cl_lst:
1823 1824 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1824 1825
1825 1826 return util.chunkbuffer(gengroup())
1826 1827
1827 1828 def changegroup(self, basenodes, source):
1828 1829 """Generate a changegroup of all nodes that we have that a recipient
1829 1830 doesn't.
1830 1831
1831 1832 This is much easier than the previous function as we can assume that
1832 1833 the recipient has any changenode we aren't sending them."""
1833 1834
1834 1835 self.hook('preoutgoing', throw=True, source=source)
1835 1836
1836 1837 cl = self.changelog
1837 1838 nodes = cl.nodesbetween(basenodes, None)[0]
1838 1839 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1839 1840 self.changegroupinfo(nodes, source)
1840 1841
1841 1842 def identity(x):
1842 1843 return x
1843 1844
1844 1845 def gennodelst(revlog):
1845 1846 for r in xrange(0, revlog.count()):
1846 1847 n = revlog.node(r)
1847 1848 if revlog.linkrev(n) in revset:
1848 1849 yield n
1849 1850
1850 1851 def changed_file_collector(changedfileset):
1851 1852 def collect_changed_files(clnode):
1852 1853 c = cl.read(clnode)
1853 1854 for fname in c[3]:
1854 1855 changedfileset[fname] = 1
1855 1856 return collect_changed_files
1856 1857
1857 1858 def lookuprevlink_func(revlog):
1858 1859 def lookuprevlink(n):
1859 1860 return cl.node(revlog.linkrev(n))
1860 1861 return lookuprevlink
1861 1862
1862 1863 def gengroup():
1863 1864 # construct a list of all changed files
1864 1865 changedfiles = {}
1865 1866
1866 1867 for chnk in cl.group(nodes, identity,
1867 1868 changed_file_collector(changedfiles)):
1868 1869 yield chnk
1869 1870 changedfiles = changedfiles.keys()
1870 1871 changedfiles.sort()
1871 1872
1872 1873 mnfst = self.manifest
1873 1874 nodeiter = gennodelst(mnfst)
1874 1875 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1875 1876 yield chnk
1876 1877
1877 1878 for fname in changedfiles:
1878 1879 filerevlog = self.file(fname)
1879 1880 if filerevlog.count() == 0:
1880 1881 raise util.Abort(_("empty or missing revlog for %s") % fname)
1881 1882 nodeiter = gennodelst(filerevlog)
1882 1883 nodeiter = list(nodeiter)
1883 1884 if nodeiter:
1884 1885 yield changegroup.chunkheader(len(fname))
1885 1886 yield fname
1886 1887 lookup = lookuprevlink_func(filerevlog)
1887 1888 for chnk in filerevlog.group(nodeiter, lookup):
1888 1889 yield chnk
1889 1890
1890 1891 yield changegroup.closechunk()
1891 1892
1892 1893 if nodes:
1893 1894 self.hook('outgoing', node=hex(nodes[0]), source=source)
1894 1895
1895 1896 return util.chunkbuffer(gengroup())
1896 1897
1897 1898 def addchangegroup(self, source, srctype, url, emptyok=False):
1898 1899 """add changegroup to repo.
1899 1900
1900 1901 return values:
1901 1902 - nothing changed or no source: 0
1902 1903 - more heads than before: 1+added heads (2..n)
1903 1904 - less heads than before: -1-removed heads (-2..-n)
1904 1905 - number of heads stays the same: 1
1905 1906 """
1906 1907 def csmap(x):
1907 1908 self.ui.debug(_("add changeset %s\n") % short(x))
1908 1909 return cl.count()
1909 1910
1910 1911 def revmap(x):
1911 1912 return cl.rev(x)
1912 1913
1913 1914 if not source:
1914 1915 return 0
1915 1916
1916 1917 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1917 1918
1918 1919 changesets = files = revisions = 0
1919 1920
1920 1921 # write changelog data to temp files so concurrent readers will not see
1921 1922 # inconsistent view
1922 1923 cl = self.changelog
1923 1924 cl.delayupdate()
1924 1925 oldheads = len(cl.heads())
1925 1926
1926 1927 tr = self.transaction()
1927 1928 try:
1928 1929 trp = weakref.proxy(tr)
1929 1930 # pull off the changeset group
1930 1931 self.ui.status(_("adding changesets\n"))
1931 1932 cor = cl.count() - 1
1932 1933 chunkiter = changegroup.chunkiter(source)
1933 1934 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1934 1935 raise util.Abort(_("received changelog group is empty"))
1935 1936 cnr = cl.count() - 1
1936 1937 changesets = cnr - cor
1937 1938
1938 1939 # pull off the manifest group
1939 1940 self.ui.status(_("adding manifests\n"))
1940 1941 chunkiter = changegroup.chunkiter(source)
1941 1942 # no need to check for empty manifest group here:
1942 1943 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1943 1944 # no new manifest will be created and the manifest group will
1944 1945 # be empty during the pull
1945 1946 self.manifest.addgroup(chunkiter, revmap, trp)
1946 1947
1947 1948 # process the files
1948 1949 self.ui.status(_("adding file changes\n"))
1949 1950 while 1:
1950 1951 f = changegroup.getchunk(source)
1951 1952 if not f:
1952 1953 break
1953 1954 self.ui.debug(_("adding %s revisions\n") % f)
1954 1955 fl = self.file(f)
1955 1956 o = fl.count()
1956 1957 chunkiter = changegroup.chunkiter(source)
1957 1958 if fl.addgroup(chunkiter, revmap, trp) is None:
1958 1959 raise util.Abort(_("received file revlog group is empty"))
1959 1960 revisions += fl.count() - o
1960 1961 files += 1
1961 1962
1962 1963 # make changelog see real files again
1963 1964 cl.finalize(trp)
1964 1965
1965 1966 newheads = len(self.changelog.heads())
1966 1967 heads = ""
1967 1968 if oldheads and newheads != oldheads:
1968 1969 heads = _(" (%+d heads)") % (newheads - oldheads)
1969 1970
1970 1971 self.ui.status(_("added %d changesets"
1971 1972 " with %d changes to %d files%s\n")
1972 1973 % (changesets, revisions, files, heads))
1973 1974
1974 1975 if changesets > 0:
1975 1976 self.hook('pretxnchangegroup', throw=True,
1976 1977 node=hex(self.changelog.node(cor+1)), source=srctype,
1977 1978 url=url)
1978 1979
1979 1980 tr.close()
1980 1981 finally:
1981 1982 del tr
1982 1983
1983 1984 if changesets > 0:
1985 # forcefully update the on-disk branch cache
1986 self.ui.debug(_("updating the branch cache\n"))
1987 self.branchcache = None
1988 self.branchtags()
1984 1989 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1985 1990 source=srctype, url=url)
1986 1991
1987 1992 for i in xrange(cor + 1, cnr + 1):
1988 1993 self.hook("incoming", node=hex(self.changelog.node(i)),
1989 1994 source=srctype, url=url)
1990 1995
1991 1996 # never return 0 here:
1992 1997 if newheads < oldheads:
1993 1998 return newheads - oldheads - 1
1994 1999 else:
1995 2000 return newheads - oldheads + 1
1996 2001
1997 2002
1998 2003 def stream_in(self, remote):
1999 2004 fp = remote.stream_out()
2000 2005 l = fp.readline()
2001 2006 try:
2002 2007 resp = int(l)
2003 2008 except ValueError:
2004 2009 raise util.UnexpectedOutput(
2005 2010 _('Unexpected response from remote server:'), l)
2006 2011 if resp == 1:
2007 2012 raise util.Abort(_('operation forbidden by server'))
2008 2013 elif resp == 2:
2009 2014 raise util.Abort(_('locking the remote repository failed'))
2010 2015 elif resp != 0:
2011 2016 raise util.Abort(_('the server sent an unknown error code'))
2012 2017 self.ui.status(_('streaming all changes\n'))
2013 2018 l = fp.readline()
2014 2019 try:
2015 2020 total_files, total_bytes = map(int, l.split(' ', 1))
2016 2021 except ValueError, TypeError:
2017 2022 raise util.UnexpectedOutput(
2018 2023 _('Unexpected response from remote server:'), l)
2019 2024 self.ui.status(_('%d files to transfer, %s of data\n') %
2020 2025 (total_files, util.bytecount(total_bytes)))
2021 2026 start = time.time()
2022 2027 for i in xrange(total_files):
2023 2028 # XXX doesn't support '\n' or '\r' in filenames
2024 2029 l = fp.readline()
2025 2030 try:
2026 2031 name, size = l.split('\0', 1)
2027 2032 size = int(size)
2028 2033 except ValueError, TypeError:
2029 2034 raise util.UnexpectedOutput(
2030 2035 _('Unexpected response from remote server:'), l)
2031 2036 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2032 2037 ofp = self.sopener(name, 'w')
2033 2038 for chunk in util.filechunkiter(fp, limit=size):
2034 2039 ofp.write(chunk)
2035 2040 ofp.close()
2036 2041 elapsed = time.time() - start
2037 2042 if elapsed <= 0:
2038 2043 elapsed = 0.001
2039 2044 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2040 2045 (util.bytecount(total_bytes), elapsed,
2041 2046 util.bytecount(total_bytes / elapsed)))
2042 2047 self.invalidate()
2043 2048 return len(self.heads()) + 1
2044 2049
2045 2050 def clone(self, remote, heads=[], stream=False):
2046 2051 '''clone remote repository.
2047 2052
2048 2053 keyword arguments:
2049 2054 heads: list of revs to clone (forces use of pull)
2050 2055 stream: use streaming clone if possible'''
2051 2056
2052 2057 # now, all clients that can request uncompressed clones can
2053 2058 # read repo formats supported by all servers that can serve
2054 2059 # them.
2055 2060
2056 2061 # if revlog format changes, client will have to check version
2057 2062 # and format flags on "stream" capability, and use
2058 2063 # uncompressed only if compatible.
2059 2064
2060 2065 if stream and not heads and remote.capable('stream'):
2061 2066 return self.stream_in(remote)
2062 2067 return self.pull(remote, heads)
2063 2068
2064 2069 # used to avoid circular references so destructors work
2065 2070 def aftertrans(files):
2066 2071 renamefiles = [tuple(t) for t in files]
2067 2072 def a():
2068 2073 for src, dest in renamefiles:
2069 2074 util.rename(src, dest)
2070 2075 return a
2071 2076
2072 2077 def instance(ui, path, create):
2073 2078 return localrepository(ui, util.drop_scheme('file', path), create)
2074 2079
2075 2080 def islocal(path):
2076 2081 return True
@@ -1,227 +1,238 b''
1 1 # sshrepo.py - ssh repository proxy class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from remoterepo import *
10 10 from i18n import _
11 11 import repo, os, re, stat, util
12 12
13 13 class sshrepository(remoterepository):
14 14 def __init__(self, ui, path, create=0):
15 15 self._url = path
16 16 self.ui = ui
17 17
18 18 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
19 19 if not m:
20 20 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
21 21
22 22 self.user = m.group(2)
23 23 self.host = m.group(3)
24 24 self.port = m.group(5)
25 25 self.path = m.group(7) or "."
26 26
27 27 sshcmd = self.ui.config("ui", "ssh", "ssh")
28 28 remotecmd = self.ui.config("ui", "remotecmd", "hg")
29 29
30 30 args = util.sshargs(sshcmd, self.host, self.user, self.port)
31 31
32 32 if create:
33 33 cmd = '%s %s "%s init %s"'
34 34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
35 35
36 36 ui.note('running %s\n' % cmd)
37 37 res = util.system(cmd)
38 38 if res != 0:
39 39 self.raise_(repo.RepoError(_("could not create remote repo")))
40 40
41 41 self.validate_repo(ui, sshcmd, args, remotecmd)
42 42
43 43 def url(self):
44 44 return self._url
45 45
46 46 def validate_repo(self, ui, sshcmd, args, remotecmd):
47 47 # cleanup up previous run
48 48 self.cleanup()
49 49
50 50 cmd = '%s %s "%s -R %s serve --stdio"'
51 51 cmd = cmd % (sshcmd, args, remotecmd, self.path)
52 52
53 53 cmd = util.quotecommand(cmd)
54 54 ui.note('running %s\n' % cmd)
55 55 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
56 56
57 57 # skip any noise generated by remote shell
58 58 self.do_cmd("hello")
59 59 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
60 60 lines = ["", "dummy"]
61 61 max_noise = 500
62 62 while lines[-1] and max_noise:
63 63 l = r.readline()
64 64 self.readerr()
65 65 if lines[-1] == "1\n" and l == "\n":
66 66 break
67 67 if l:
68 68 ui.debug(_("remote: "), l)
69 69 lines.append(l)
70 70 max_noise -= 1
71 71 else:
72 72 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
73 73
74 74 self.capabilities = util.set()
75 75 lines.reverse()
76 76 for l in lines:
77 77 if l.startswith("capabilities:"):
78 78 self.capabilities.update(l[:-1].split(":")[1].split())
79 79 break
80 80
81 81 def readerr(self):
82 82 while 1:
83 83 size = util.fstat(self.pipee).st_size
84 84 if size == 0: break
85 85 l = self.pipee.readline()
86 86 if not l: break
87 87 self.ui.status(_("remote: "), l)
88 88
89 89 def raise_(self, exception):
90 90 self.cleanup()
91 91 raise exception
92 92
93 93 def cleanup(self):
94 94 try:
95 95 self.pipeo.close()
96 96 self.pipei.close()
97 97 # read the error descriptor until EOF
98 98 for l in self.pipee:
99 99 self.ui.status(_("remote: "), l)
100 100 self.pipee.close()
101 101 except:
102 102 pass
103 103
104 104 __del__ = cleanup
105 105
106 106 def do_cmd(self, cmd, **args):
107 107 self.ui.debug(_("sending %s command\n") % cmd)
108 108 self.pipeo.write("%s\n" % cmd)
109 109 for k, v in args.items():
110 110 self.pipeo.write("%s %d\n" % (k, len(v)))
111 111 self.pipeo.write(v)
112 112 self.pipeo.flush()
113 113
114 114 return self.pipei
115 115
116 116 def call(self, cmd, **args):
117 r = self.do_cmd(cmd, **args)
118 l = r.readline()
117 self.do_cmd(cmd, **args)
118 return self._recv()
119
120 def _recv(self):
121 l = self.pipei.readline()
119 122 self.readerr()
120 123 try:
121 124 l = int(l)
122 125 except:
123 126 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
124 return r.read(l)
127 return self.pipei.read(l)
128
129 def _send(self, data, flush=False):
130 self.pipeo.write("%d\n" % len(data))
131 if data:
132 self.pipeo.write(data)
133 if flush:
134 self.pipeo.flush()
135 self.readerr()
125 136
126 137 def lock(self):
127 138 self.call("lock")
128 139 return remotelock(self)
129 140
130 141 def unlock(self):
131 142 self.call("unlock")
132 143
133 144 def lookup(self, key):
134 145 self.requirecap('lookup', _('look up remote revision'))
135 146 d = self.call("lookup", key=key)
136 147 success, data = d[:-1].split(" ", 1)
137 148 if int(success):
138 149 return bin(data)
139 150 else:
140 151 self.raise_(repo.RepoError(data))
141 152
142 153 def heads(self):
143 154 d = self.call("heads")
144 155 try:
145 156 return map(bin, d[:-1].split(" "))
146 157 except:
147 158 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
148 159
149 160 def branches(self, nodes):
150 161 n = " ".join(map(hex, nodes))
151 162 d = self.call("branches", nodes=n)
152 163 try:
153 164 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
154 165 return br
155 166 except:
156 167 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
157 168
158 169 def between(self, pairs):
159 170 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
160 171 d = self.call("between", pairs=n)
161 172 try:
162 173 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
163 174 return p
164 175 except:
165 176 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
166 177
167 178 def changegroup(self, nodes, kind):
168 179 n = " ".join(map(hex, nodes))
169 180 return self.do_cmd("changegroup", roots=n)
170 181
171 182 def changegroupsubset(self, bases, heads, kind):
172 183 self.requirecap('changegroupsubset', _('look up remote changes'))
173 184 bases = " ".join(map(hex, bases))
174 185 heads = " ".join(map(hex, heads))
175 186 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
176 187
177 188 def unbundle(self, cg, heads, source):
178 189 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
179 190 if d:
180 191 # remote may send "unsynced changes"
181 192 self.raise_(repo.RepoError(_("push refused: %s") % d))
182 193
183 194 while 1:
184 195 d = cg.read(4096)
185 if not d: break
186 self.pipeo.write(str(len(d)) + '\n')
187 self.pipeo.write(d)
188 self.readerr()
196 if not d:
197 break
198 self._send(d)
189 199
190 self.pipeo.write('0\n')
191 self.pipeo.flush()
200 self._send("", flush=True)
192 201
193 self.readerr()
194 l = int(self.pipei.readline())
195 r = self.pipei.read(l)
202 r = self._recv()
196 203 if r:
197 204 # remote may send "unsynced changes"
198 205 self.raise_(repo.RepoError(_("push failed: %s") % r))
199 206
200 self.readerr()
201 l = int(self.pipei.readline())
202 r = self.pipei.read(l)
203 return int(r)
207 r = self._recv()
208 try:
209 return int(r)
210 except:
211 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
204 212
205 213 def addchangegroup(self, cg, source, url):
206 214 d = self.call("addchangegroup")
207 215 if d:
208 216 self.raise_(repo.RepoError(_("push refused: %s") % d))
209 217 while 1:
210 218 d = cg.read(4096)
211 if not d: break
219 if not d:
220 break
212 221 self.pipeo.write(d)
213 222 self.readerr()
214 223
215 224 self.pipeo.flush()
216 225
217 226 self.readerr()
218 l = int(self.pipei.readline())
219 r = self.pipei.read(l)
227 r = self._recv()
220 228 if not r:
221 229 return 1
222 return int(r)
230 try:
231 return int(r)
232 except:
233 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
223 234
224 235 def stream_out(self):
225 236 return self.do_cmd('stream_out')
226 237
227 238 instance = sshrepository
@@ -1,6 +1,6 b''
1 1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
2 2 <html>
3 3 <head>
4 <link rel="icon" href="#staticurl#hgicon.png" type="image/png">
4 <link rel="icon" href="#staticurl#hgicon.png" type="image/png" />
5 5 <meta name="robots" content="index, nofollow" />
6 6 <link rel="stylesheet" href="#staticurl#style.css" type="text/css" />
@@ -1,6 +1,6 b''
1 1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
2 2 <html>
3 3 <head>
4 <link rel="icon" href="?static=hgicon.png" type="image/png">
4 <link rel="icon" href="?static=hgicon.png" type="image/png" />
5 5 <meta name="robots" content="index, nofollow" />
6 6 <link rel="stylesheet" href="?static=style.css" type="text/css" />
@@ -1,583 +1,588 b''
1 1 3:911600dab2ae
2 2 requesting all changes
3 3 adding changesets
4 4 adding manifests
5 5 adding file changes
6 6 added 1 changesets with 3 changes to 3 files
7 7 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
8 8
9 9 Extension disabled for lack of a hook
10 10 Pushing as user fred
11 11 hgrc = """
12 12 """
13 13 pushing to ../b
14 14 searching for changes
15 15 common changesets up to 6675d58eff77
16 16 3 changesets found
17 17 List of changesets:
18 18 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
19 19 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
20 20 911600dab2ae7a9baff75958b84fe606851ce955
21 21 adding changesets
22 22 add changeset ef1ea85a6374
23 23 add changeset f9cafe1212c8
24 24 add changeset 911600dab2ae
25 25 adding manifests
26 26 adding file changes
27 27 adding foo/Bar/file.txt revisions
28 28 adding foo/file.txt revisions
29 29 adding quux/file.py revisions
30 30 added 3 changesets with 3 changes to 3 files
31 updating the branch cache
31 32 rolling back last transaction
32 33 0:6675d58eff77
33 34
34 35 Extension disabled for lack of acl.sources
35 36 Pushing as user fred
36 37 hgrc = """
37 38 [hooks]
38 39 pretxnchangegroup.acl = python:hgext.acl.hook
39 40 """
40 41 pushing to ../b
41 42 searching for changes
42 43 common changesets up to 6675d58eff77
43 44 3 changesets found
44 45 List of changesets:
45 46 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
46 47 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
47 48 911600dab2ae7a9baff75958b84fe606851ce955
48 49 adding changesets
49 50 add changeset ef1ea85a6374
50 51 add changeset f9cafe1212c8
51 52 add changeset 911600dab2ae
52 53 adding manifests
53 54 adding file changes
54 55 adding foo/Bar/file.txt revisions
55 56 adding foo/file.txt revisions
56 57 adding quux/file.py revisions
57 58 added 3 changesets with 3 changes to 3 files
58 59 calling hook pretxnchangegroup.acl: hgext.acl.hook
59 60 acl: acl.allow not enabled
60 61 acl: acl.deny not enabled
61 62 acl: changes have source "push" - skipping
63 updating the branch cache
62 64 rolling back last transaction
63 65 0:6675d58eff77
64 66
65 67 No [acl.allow]/[acl.deny]
66 68 Pushing as user fred
67 69 hgrc = """
68 70 [hooks]
69 71 pretxnchangegroup.acl = python:hgext.acl.hook
70 72 [acl]
71 73 sources = push
72 74 """
73 75 pushing to ../b
74 76 searching for changes
75 77 common changesets up to 6675d58eff77
76 78 3 changesets found
77 79 List of changesets:
78 80 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
79 81 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
80 82 911600dab2ae7a9baff75958b84fe606851ce955
81 83 adding changesets
82 84 add changeset ef1ea85a6374
83 85 add changeset f9cafe1212c8
84 86 add changeset 911600dab2ae
85 87 adding manifests
86 88 adding file changes
87 89 adding foo/Bar/file.txt revisions
88 90 adding foo/file.txt revisions
89 91 adding quux/file.py revisions
90 92 added 3 changesets with 3 changes to 3 files
91 93 calling hook pretxnchangegroup.acl: hgext.acl.hook
92 94 acl: acl.allow not enabled
93 95 acl: acl.deny not enabled
94 96 acl: allowing changeset ef1ea85a6374
95 97 acl: allowing changeset f9cafe1212c8
96 98 acl: allowing changeset 911600dab2ae
99 updating the branch cache
97 100 rolling back last transaction
98 101 0:6675d58eff77
99 102
100 103 Empty [acl.allow]
101 104 Pushing as user fred
102 105 hgrc = """
103 106 [hooks]
104 107 pretxnchangegroup.acl = python:hgext.acl.hook
105 108 [acl]
106 109 sources = push
107 110 [acl.allow]
108 111 """
109 112 pushing to ../b
110 113 searching for changes
111 114 common changesets up to 6675d58eff77
112 115 3 changesets found
113 116 List of changesets:
114 117 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
115 118 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
116 119 911600dab2ae7a9baff75958b84fe606851ce955
117 120 adding changesets
118 121 add changeset ef1ea85a6374
119 122 add changeset f9cafe1212c8
120 123 add changeset 911600dab2ae
121 124 adding manifests
122 125 adding file changes
123 126 adding foo/Bar/file.txt revisions
124 127 adding foo/file.txt revisions
125 128 adding quux/file.py revisions
126 129 added 3 changesets with 3 changes to 3 files
127 130 calling hook pretxnchangegroup.acl: hgext.acl.hook
128 131 acl: acl.allow enabled, 0 entries for user fred
129 132 acl: acl.deny not enabled
130 133 acl: user fred not allowed on foo/file.txt
131 134 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
132 135 transaction abort!
133 136 rollback completed
134 137 abort: acl: access denied for changeset ef1ea85a6374
135 138 no rollback information available
136 139 0:6675d58eff77
137 140
138 141 fred is allowed inside foo/
139 142 Pushing as user fred
140 143 hgrc = """
141 144 [hooks]
142 145 pretxnchangegroup.acl = python:hgext.acl.hook
143 146 [acl]
144 147 sources = push
145 148 [acl.allow]
146 149 foo/** = fred
147 150 """
148 151 pushing to ../b
149 152 searching for changes
150 153 common changesets up to 6675d58eff77
151 154 3 changesets found
152 155 List of changesets:
153 156 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
154 157 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
155 158 911600dab2ae7a9baff75958b84fe606851ce955
156 159 adding changesets
157 160 add changeset ef1ea85a6374
158 161 add changeset f9cafe1212c8
159 162 add changeset 911600dab2ae
160 163 adding manifests
161 164 adding file changes
162 165 adding foo/Bar/file.txt revisions
163 166 adding foo/file.txt revisions
164 167 adding quux/file.py revisions
165 168 added 3 changesets with 3 changes to 3 files
166 169 calling hook pretxnchangegroup.acl: hgext.acl.hook
167 170 acl: acl.allow enabled, 1 entries for user fred
168 171 acl: acl.deny not enabled
169 172 acl: allowing changeset ef1ea85a6374
170 173 acl: allowing changeset f9cafe1212c8
171 174 acl: user fred not allowed on quux/file.py
172 175 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
173 176 transaction abort!
174 177 rollback completed
175 178 abort: acl: access denied for changeset 911600dab2ae
176 179 no rollback information available
177 180 0:6675d58eff77
178 181
179 182 Empty [acl.deny]
180 183 Pushing as user barney
181 184 hgrc = """
182 185 [hooks]
183 186 pretxnchangegroup.acl = python:hgext.acl.hook
184 187 [acl]
185 188 sources = push
186 189 [acl.allow]
187 190 foo/** = fred
188 191 [acl.deny]
189 192 """
190 193 pushing to ../b
191 194 searching for changes
192 195 common changesets up to 6675d58eff77
193 196 3 changesets found
194 197 List of changesets:
195 198 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
196 199 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
197 200 911600dab2ae7a9baff75958b84fe606851ce955
198 201 adding changesets
199 202 add changeset ef1ea85a6374
200 203 add changeset f9cafe1212c8
201 204 add changeset 911600dab2ae
202 205 adding manifests
203 206 adding file changes
204 207 adding foo/Bar/file.txt revisions
205 208 adding foo/file.txt revisions
206 209 adding quux/file.py revisions
207 210 added 3 changesets with 3 changes to 3 files
208 211 calling hook pretxnchangegroup.acl: hgext.acl.hook
209 212 acl: acl.allow enabled, 0 entries for user barney
210 213 acl: acl.deny enabled, 0 entries for user barney
211 214 acl: user barney not allowed on foo/file.txt
212 215 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
213 216 transaction abort!
214 217 rollback completed
215 218 abort: acl: access denied for changeset ef1ea85a6374
216 219 no rollback information available
217 220 0:6675d58eff77
218 221
219 222 fred is allowed inside foo/, but not foo/bar/ (case matters)
220 223 Pushing as user fred
221 224 hgrc = """
222 225 [hooks]
223 226 pretxnchangegroup.acl = python:hgext.acl.hook
224 227 [acl]
225 228 sources = push
226 229 [acl.allow]
227 230 foo/** = fred
228 231 [acl.deny]
229 232 foo/bar/** = fred
230 233 """
231 234 pushing to ../b
232 235 searching for changes
233 236 common changesets up to 6675d58eff77
234 237 3 changesets found
235 238 List of changesets:
236 239 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
237 240 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
238 241 911600dab2ae7a9baff75958b84fe606851ce955
239 242 adding changesets
240 243 add changeset ef1ea85a6374
241 244 add changeset f9cafe1212c8
242 245 add changeset 911600dab2ae
243 246 adding manifests
244 247 adding file changes
245 248 adding foo/Bar/file.txt revisions
246 249 adding foo/file.txt revisions
247 250 adding quux/file.py revisions
248 251 added 3 changesets with 3 changes to 3 files
249 252 calling hook pretxnchangegroup.acl: hgext.acl.hook
250 253 acl: acl.allow enabled, 1 entries for user fred
251 254 acl: acl.deny enabled, 1 entries for user fred
252 255 acl: allowing changeset ef1ea85a6374
253 256 acl: allowing changeset f9cafe1212c8
254 257 acl: user fred not allowed on quux/file.py
255 258 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
256 259 transaction abort!
257 260 rollback completed
258 261 abort: acl: access denied for changeset 911600dab2ae
259 262 no rollback information available
260 263 0:6675d58eff77
261 264
262 265 fred is allowed inside foo/, but not foo/Bar/
263 266 Pushing as user fred
264 267 hgrc = """
265 268 [hooks]
266 269 pretxnchangegroup.acl = python:hgext.acl.hook
267 270 [acl]
268 271 sources = push
269 272 [acl.allow]
270 273 foo/** = fred
271 274 [acl.deny]
272 275 foo/bar/** = fred
273 276 foo/Bar/** = fred
274 277 """
275 278 pushing to ../b
276 279 searching for changes
277 280 common changesets up to 6675d58eff77
278 281 3 changesets found
279 282 List of changesets:
280 283 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
281 284 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
282 285 911600dab2ae7a9baff75958b84fe606851ce955
283 286 adding changesets
284 287 add changeset ef1ea85a6374
285 288 add changeset f9cafe1212c8
286 289 add changeset 911600dab2ae
287 290 adding manifests
288 291 adding file changes
289 292 adding foo/Bar/file.txt revisions
290 293 adding foo/file.txt revisions
291 294 adding quux/file.py revisions
292 295 added 3 changesets with 3 changes to 3 files
293 296 calling hook pretxnchangegroup.acl: hgext.acl.hook
294 297 acl: acl.allow enabled, 1 entries for user fred
295 298 acl: acl.deny enabled, 2 entries for user fred
296 299 acl: allowing changeset ef1ea85a6374
297 300 acl: user fred denied on foo/Bar/file.txt
298 301 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
299 302 transaction abort!
300 303 rollback completed
301 304 abort: acl: access denied for changeset f9cafe1212c8
302 305 no rollback information available
303 306 0:6675d58eff77
304 307
305 308 barney is not mentioned => not allowed anywhere
306 309 Pushing as user barney
307 310 hgrc = """
308 311 [hooks]
309 312 pretxnchangegroup.acl = python:hgext.acl.hook
310 313 [acl]
311 314 sources = push
312 315 [acl.allow]
313 316 foo/** = fred
314 317 [acl.deny]
315 318 foo/bar/** = fred
316 319 foo/Bar/** = fred
317 320 """
318 321 pushing to ../b
319 322 searching for changes
320 323 common changesets up to 6675d58eff77
321 324 3 changesets found
322 325 List of changesets:
323 326 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
324 327 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
325 328 911600dab2ae7a9baff75958b84fe606851ce955
326 329 adding changesets
327 330 add changeset ef1ea85a6374
328 331 add changeset f9cafe1212c8
329 332 add changeset 911600dab2ae
330 333 adding manifests
331 334 adding file changes
332 335 adding foo/Bar/file.txt revisions
333 336 adding foo/file.txt revisions
334 337 adding quux/file.py revisions
335 338 added 3 changesets with 3 changes to 3 files
336 339 calling hook pretxnchangegroup.acl: hgext.acl.hook
337 340 acl: acl.allow enabled, 0 entries for user barney
338 341 acl: acl.deny enabled, 0 entries for user barney
339 342 acl: user barney not allowed on foo/file.txt
340 343 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
341 344 transaction abort!
342 345 rollback completed
343 346 abort: acl: access denied for changeset ef1ea85a6374
344 347 no rollback information available
345 348 0:6675d58eff77
346 349
347 350 barney is allowed everywhere
348 351 Pushing as user barney
349 352 hgrc = """
350 353 [hooks]
351 354 pretxnchangegroup.acl = python:hgext.acl.hook
352 355 [acl]
353 356 sources = push
354 357 [acl.allow]
355 358 foo/** = fred
356 359 [acl.deny]
357 360 foo/bar/** = fred
358 361 foo/Bar/** = fred
359 362 [acl.allow]
360 363 ** = barney
361 364 """
362 365 pushing to ../b
363 366 searching for changes
364 367 common changesets up to 6675d58eff77
365 368 3 changesets found
366 369 List of changesets:
367 370 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
368 371 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
369 372 911600dab2ae7a9baff75958b84fe606851ce955
370 373 adding changesets
371 374 add changeset ef1ea85a6374
372 375 add changeset f9cafe1212c8
373 376 add changeset 911600dab2ae
374 377 adding manifests
375 378 adding file changes
376 379 adding foo/Bar/file.txt revisions
377 380 adding foo/file.txt revisions
378 381 adding quux/file.py revisions
379 382 added 3 changesets with 3 changes to 3 files
380 383 calling hook pretxnchangegroup.acl: hgext.acl.hook
381 384 acl: acl.allow enabled, 1 entries for user barney
382 385 acl: acl.deny enabled, 0 entries for user barney
383 386 acl: allowing changeset ef1ea85a6374
384 387 acl: allowing changeset f9cafe1212c8
385 388 acl: allowing changeset 911600dab2ae
389 updating the branch cache
386 390 rolling back last transaction
387 391 0:6675d58eff77
388 392
389 393 wilma can change files with a .txt extension
390 394 Pushing as user wilma
391 395 hgrc = """
392 396 [hooks]
393 397 pretxnchangegroup.acl = python:hgext.acl.hook
394 398 [acl]
395 399 sources = push
396 400 [acl.allow]
397 401 foo/** = fred
398 402 [acl.deny]
399 403 foo/bar/** = fred
400 404 foo/Bar/** = fred
401 405 [acl.allow]
402 406 ** = barney
403 407 **/*.txt = wilma
404 408 """
405 409 pushing to ../b
406 410 searching for changes
407 411 common changesets up to 6675d58eff77
408 412 3 changesets found
409 413 List of changesets:
410 414 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
411 415 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
412 416 911600dab2ae7a9baff75958b84fe606851ce955
413 417 adding changesets
414 418 add changeset ef1ea85a6374
415 419 add changeset f9cafe1212c8
416 420 add changeset 911600dab2ae
417 421 adding manifests
418 422 adding file changes
419 423 adding foo/Bar/file.txt revisions
420 424 adding foo/file.txt revisions
421 425 adding quux/file.py revisions
422 426 added 3 changesets with 3 changes to 3 files
423 427 calling hook pretxnchangegroup.acl: hgext.acl.hook
424 428 acl: acl.allow enabled, 1 entries for user wilma
425 429 acl: acl.deny enabled, 0 entries for user wilma
426 430 acl: allowing changeset ef1ea85a6374
427 431 acl: allowing changeset f9cafe1212c8
428 432 acl: user wilma not allowed on quux/file.py
429 433 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
430 434 transaction abort!
431 435 rollback completed
432 436 abort: acl: access denied for changeset 911600dab2ae
433 437 no rollback information available
434 438 0:6675d58eff77
435 439
436 440 file specified by acl.config does not exist
437 441 Pushing as user barney
438 442 hgrc = """
439 443 [hooks]
440 444 pretxnchangegroup.acl = python:hgext.acl.hook
441 445 [acl]
442 446 sources = push
443 447 [acl.allow]
444 448 foo/** = fred
445 449 [acl.deny]
446 450 foo/bar/** = fred
447 451 foo/Bar/** = fred
448 452 [acl.allow]
449 453 ** = barney
450 454 **/*.txt = wilma
451 455 [acl]
452 456 config = ../acl.config
453 457 """
454 458 pushing to ../b
455 459 searching for changes
456 460 common changesets up to 6675d58eff77
457 461 3 changesets found
458 462 List of changesets:
459 463 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
460 464 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
461 465 911600dab2ae7a9baff75958b84fe606851ce955
462 466 adding changesets
463 467 add changeset ef1ea85a6374
464 468 add changeset f9cafe1212c8
465 469 add changeset 911600dab2ae
466 470 adding manifests
467 471 adding file changes
468 472 adding foo/Bar/file.txt revisions
469 473 adding foo/file.txt revisions
470 474 adding quux/file.py revisions
471 475 added 3 changesets with 3 changes to 3 files
472 476 calling hook pretxnchangegroup.acl: hgext.acl.hook
473 477 error: pretxnchangegroup.acl hook failed: unable to open ../acl.config: No such file or directory
474 478 transaction abort!
475 479 rollback completed
476 480 abort: unable to open ../acl.config: No such file or directory
477 481 no rollback information available
478 482 0:6675d58eff77
479 483
480 484 betty is allowed inside foo/ by a acl.config file
481 485 Pushing as user betty
482 486 hgrc = """
483 487 [hooks]
484 488 pretxnchangegroup.acl = python:hgext.acl.hook
485 489 [acl]
486 490 sources = push
487 491 [acl.allow]
488 492 foo/** = fred
489 493 [acl.deny]
490 494 foo/bar/** = fred
491 495 foo/Bar/** = fred
492 496 [acl.allow]
493 497 ** = barney
494 498 **/*.txt = wilma
495 499 [acl]
496 500 config = ../acl.config
497 501 """
498 502 acl.config = """
499 503 [acl.allow]
500 504 foo/** = betty
501 505 """
502 506 pushing to ../b
503 507 searching for changes
504 508 common changesets up to 6675d58eff77
505 509 3 changesets found
506 510 List of changesets:
507 511 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
508 512 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
509 513 911600dab2ae7a9baff75958b84fe606851ce955
510 514 adding changesets
511 515 add changeset ef1ea85a6374
512 516 add changeset f9cafe1212c8
513 517 add changeset 911600dab2ae
514 518 adding manifests
515 519 adding file changes
516 520 adding foo/Bar/file.txt revisions
517 521 adding foo/file.txt revisions
518 522 adding quux/file.py revisions
519 523 added 3 changesets with 3 changes to 3 files
520 524 calling hook pretxnchangegroup.acl: hgext.acl.hook
521 525 acl: acl.allow enabled, 1 entries for user betty
522 526 acl: acl.deny enabled, 0 entries for user betty
523 527 acl: allowing changeset ef1ea85a6374
524 528 acl: allowing changeset f9cafe1212c8
525 529 acl: user betty not allowed on quux/file.py
526 530 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
527 531 transaction abort!
528 532 rollback completed
529 533 abort: acl: access denied for changeset 911600dab2ae
530 534 no rollback information available
531 535 0:6675d58eff77
532 536
533 537 acl.config can set only [acl.allow]/[acl.deny]
534 538 Pushing as user barney
535 539 hgrc = """
536 540 [hooks]
537 541 pretxnchangegroup.acl = python:hgext.acl.hook
538 542 [acl]
539 543 sources = push
540 544 [acl.allow]
541 545 foo/** = fred
542 546 [acl.deny]
543 547 foo/bar/** = fred
544 548 foo/Bar/** = fred
545 549 [acl.allow]
546 550 ** = barney
547 551 **/*.txt = wilma
548 552 [acl]
549 553 config = ../acl.config
550 554 """
551 555 acl.config = """
552 556 [acl.allow]
553 557 foo/** = betty
554 558 [hooks]
555 559 changegroup.acl = false
556 560 """
557 561 pushing to ../b
558 562 searching for changes
559 563 common changesets up to 6675d58eff77
560 564 3 changesets found
561 565 List of changesets:
562 566 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
563 567 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
564 568 911600dab2ae7a9baff75958b84fe606851ce955
565 569 adding changesets
566 570 add changeset ef1ea85a6374
567 571 add changeset f9cafe1212c8
568 572 add changeset 911600dab2ae
569 573 adding manifests
570 574 adding file changes
571 575 adding foo/Bar/file.txt revisions
572 576 adding foo/file.txt revisions
573 577 adding quux/file.py revisions
574 578 added 3 changesets with 3 changes to 3 files
575 579 calling hook pretxnchangegroup.acl: hgext.acl.hook
576 580 acl: acl.allow enabled, 1 entries for user barney
577 581 acl: acl.deny enabled, 0 entries for user barney
578 582 acl: allowing changeset ef1ea85a6374
579 583 acl: allowing changeset f9cafe1212c8
580 584 acl: allowing changeset 911600dab2ae
585 updating the branch cache
581 586 rolling back last transaction
582 587 0:6675d58eff77
583 588
1 NO CONTENT: modified file, binary diff hidden
@@ -1,139 +1,139 b''
1 1 adding da/foo
2 2 adding foo
3 3 % manifest
4 4 200 Script output follows
5 5
6 6
7 7 drwxr-xr-x da
8 8 -rw-r--r-- 4 foo
9 9
10 10
11 11 200 Script output follows
12 12
13 13
14 14 -rw-r--r-- 4 foo
15 15
16 16
17 17 % plain file
18 18 200 Script output follows
19 19
20 20 foo
21 21 % should give a 404 - static file that does not exist
22 22 404 Not Found
23 23
24 24 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
25 25 <html>
26 26 <head>
27 <link rel="icon" href="/static/hgicon.png" type="image/png">
27 <link rel="icon" href="/static/hgicon.png" type="image/png" />
28 28 <meta name="robots" content="index, nofollow" />
29 29 <link rel="stylesheet" href="/static/style.css" type="text/css" />
30 30
31 31 <title>Mercurial Error</title>
32 32 </head>
33 33 <body>
34 34
35 35 <h2>Mercurial Error</h2>
36 36
37 37 <p>
38 38 An error occurred while processing your request:
39 39 </p>
40 40 <p>
41 41 Not Found
42 42 </p>
43 43
44 44
45 45 <div class="logo">
46 46 powered by<br/>
47 47 <a href="http://www.selenic.com/mercurial/">mercurial</a>
48 48 </div>
49 49
50 50 </body>
51 51 </html>
52 52
53 53 % should give a 404 - bad revision
54 54 404 Not Found
55 55
56 56
57 57 error: revision not found: spam
58 58 % should give a 400 - bad command
59 59 400
60 60
61 61
62 62 error: No such method: spam
63 63 % should give a 404 - file does not exist
64 64 404 Not Found
65 65
66 66
67 67 error: Path not found: bork/
68 68 % stop and restart
69 69 7 log lines written
70 70 % static file
71 71 200 Script output follows
72 72
73 73 body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; }
74 74 a { color:#0000cc; }
75 75 a:hover, a:visited, a:active { color:#880000; }
76 76 div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
77 77 div.page_header a:visited { color:#0000cc; }
78 78 div.page_header a:hover { color:#880000; }
79 79 div.page_nav { padding:8px; }
80 80 div.page_nav a:visited { color:#0000cc; }
81 81 div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
82 82 div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
83 83 div.page_footer_text { float:left; color:#555555; font-style:italic; }
84 84 div.page_body { padding:8px; }
85 85 div.title, a.title {
86 86 display:block; padding:6px 8px;
87 87 font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000;
88 88 }
89 89 a.title:hover { background-color: #d9d8d1; }
90 90 div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; }
91 91 div.log_body { padding:8px 8px 8px 150px; }
92 92 .age { white-space:nowrap; }
93 93 span.age { position:relative; float:left; width:142px; font-style:italic; }
94 94 div.log_link {
95 95 padding:0px 8px;
96 96 font-size:10px; font-family:sans-serif; font-style:normal;
97 97 position:relative; float:left; width:136px;
98 98 }
99 99 div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; }
100 100 a.list { text-decoration:none; color:#000000; }
101 101 a.list:hover { text-decoration:underline; color:#880000; }
102 102 table { padding:8px 4px; }
103 103 th { padding:2px 5px; font-size:12px; text-align:left; }
104 104 tr.light:hover, .parity0:hover { background-color:#edece6; }
105 105 tr.dark, .parity1 { background-color:#f6f6f0; }
106 106 tr.dark:hover, .parity1:hover { background-color:#edece6; }
107 107 td { padding:2px 5px; font-size:12px; vertical-align:top; }
108 108 td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
109 109 div.pre { font-family:monospace; font-size:12px; white-space:pre; }
110 110 div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
111 111 div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
112 112 div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
113 113 .linenr { color:#999999; text-decoration:none }
114 114 a.rss_logo {
115 115 float:right; padding:3px 6px; line-height:10px;
116 116 border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e;
117 117 color:#ffffff; background-color:#ff6600;
118 118 font-weight:bold; font-family:sans-serif; font-size:10px;
119 119 text-align:center; text-decoration:none;
120 120 }
121 121 a.rss_logo:hover { background-color:#ee5500; }
122 122 pre { margin: 0; }
123 123 span.logtags span {
124 124 padding: 0px 4px;
125 125 font-size: 10px;
126 126 font-weight: normal;
127 127 border: 1px solid;
128 128 background-color: #ffaaff;
129 129 border-color: #ffccff #ff00ee #ff00ee #ffccff;
130 130 }
131 131 span.logtags span.tagtag {
132 132 background-color: #ffffaa;
133 133 border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
134 134 }
135 135 span.logtags span.branchtag {
136 136 background-color: #aaffaa;
137 137 border-color: #ccffcc #00cc33 #00cc33 #ccffcc;
138 138 }
139 139 % errors
@@ -1,197 +1,209 b''
1 1 #!/bin/sh
2 2
3 3 hg init a
4 4 mkdir a/d1
5 5 mkdir a/d1/d2
6 6 echo line 1 > a/a
7 7 echo line 1 > a/d1/d2/a
8 8 hg --cwd a ci -d '0 0' -Ama
9 9
10 10 echo line 2 >> a/a
11 11 hg --cwd a ci -u someone -d '1 0' -m'second change'
12 12
13 13 echo % import exported patch
14 14 hg clone -r0 a b
15 15 hg --cwd a export tip > tip.patch
16 16 hg --cwd b import ../tip.patch
17 17 echo % message should be same
18 18 hg --cwd b tip | grep 'second change'
19 19 echo % committer should be same
20 20 hg --cwd b tip | grep someone
21 21 rm -r b
22 22
23 23 echo % import of plain diff should fail without message
24 24 hg clone -r0 a b
25 25 hg --cwd a diff -r0:1 > tip.patch
26 26 hg --cwd b import ../tip.patch
27 27 rm -r b
28 28
29 29 echo % import of plain diff should be ok with message
30 30 hg clone -r0 a b
31 31 hg --cwd a diff -r0:1 > tip.patch
32 32 hg --cwd b import -mpatch ../tip.patch
33 33 rm -r b
34 34
35 35 echo % import of plain diff with specific date and user
36 36 hg clone -r0 a b
37 37 hg --cwd a diff -r0:1 > tip.patch
38 38 hg --cwd b import -mpatch -d '1 0' -u 'user@nowhere.net' ../tip.patch
39 39 hg -R b tip -pv
40 40 rm -r b
41 41
42 42 echo % import of plain diff should be ok with --no-commit
43 43 hg clone -r0 a b
44 44 hg --cwd a diff -r0:1 > tip.patch
45 45 hg --cwd b import --no-commit ../tip.patch
46 46 hg --cwd b diff --nodates
47 47 rm -r b
48 48
49 49 echo % hg -R repo import
50 50 # put the clone in a subdir - having a directory named "a"
51 51 # used to hide a bug.
52 52 mkdir dir
53 53 hg clone -r0 a dir/b
54 54 hg --cwd a export tip > dir/tip.patch
55 55 cd dir
56 56 hg -R b import tip.patch
57 57 cd ..
58 58 rm -r dir
59 59
60 60 echo % import from stdin
61 61 hg clone -r0 a b
62 62 hg --cwd a export tip | hg --cwd b import -
63 63 rm -r b
64 64
65 65 echo % override commit message
66 66 hg clone -r0 a b
67 67 hg --cwd a export tip | hg --cwd b import -m 'override' -
68 68 hg --cwd b tip | grep override
69 69 rm -r b
70 70
71 71 cat > mkmsg.py <<EOF
72 72 import email.Message, sys
73 73 msg = email.Message.Message()
74 74 msg.set_payload('email commit message\n' + open('tip.patch', 'rb').read())
75 75 msg['Subject'] = 'email patch'
76 76 msg['From'] = 'email patcher'
77 77 sys.stdout.write(msg.as_string())
78 78 EOF
79 79
80 80 echo % plain diff in email, subject, message body
81 81 hg clone -r0 a b
82 82 hg --cwd a diff -r0:1 > tip.patch
83 83 python mkmsg.py > msg.patch
84 84 hg --cwd b import ../msg.patch
85 85 hg --cwd b tip | grep email
86 86 rm -r b
87 87
88 88 echo % plain diff in email, no subject, message body
89 89 hg clone -r0 a b
90 90 grep -v '^Subject:' msg.patch | hg --cwd b import -
91 91 rm -r b
92 92
93 93 echo % plain diff in email, subject, no message body
94 94 hg clone -r0 a b
95 95 grep -v '^email ' msg.patch | hg --cwd b import -
96 96 rm -r b
97 97
98 98 echo % plain diff in email, no subject, no message body, should fail
99 99 hg clone -r0 a b
100 100 egrep -v '^(Subject|email)' msg.patch | hg --cwd b import -
101 101 rm -r b
102 102
103 103 echo % hg export in email, should use patch header
104 104 hg clone -r0 a b
105 105 hg --cwd a export tip > tip.patch
106 106 python mkmsg.py | hg --cwd b import -
107 107 hg --cwd b tip | grep second
108 108 rm -r b
109 109
110 110 # subject: duplicate detection, removal of [PATCH]
111 111 # The '---' tests the gitsendmail handling without proper mail headers
112 112 cat > mkmsg2.py <<EOF
113 113 import email.Message, sys
114 114 msg = email.Message.Message()
115 115 msg.set_payload('email patch\n\nnext line\n---\n' + open('tip.patch').read())
116 116 msg['Subject'] = '[PATCH] email patch'
117 117 msg['From'] = 'email patcher'
118 118 sys.stdout.write(msg.as_string())
119 119 EOF
120 120
121 121 echo '% plain diff in email, [PATCH] subject, message body with subject'
122 122 hg clone -r0 a b
123 123 hg --cwd a diff -r0:1 > tip.patch
124 124 python mkmsg2.py | hg --cwd b import -
125 125 hg --cwd b tip --template '{desc}\n'
126 126 rm -r b
127 127
128 # We weren't backing up the correct dirstate file when importing many patches
129 # (issue963)
130 echo '% import patch1 patch2; rollback'
131 echo line 3 >> a/a
132 hg --cwd a ci -m'third change'
133 hg --cwd a export -o '../patch%R' 1 2
134 hg clone -qr0 a b
135 hg --cwd b parents --template 'parent: #rev#\n'
136 hg --cwd b import ../patch1 ../patch2
137 hg --cwd b rollback
138 hg --cwd b parents --template 'parent: #rev#\n'
139 rm -r b
128 140
129 141 # bug non regression test
130 142 # importing a patch in a subdirectory failed at the commit stage
131 143 echo line 2 >> a/d1/d2/a
132 144 hg --cwd a ci -u someoneelse -d '1 0' -m'subdir change'
133 145 echo % hg import in a subdirectory
134 146 hg clone -r0 a b
135 147 hg --cwd a export tip | sed -e 's/d1\/d2\///' > tip.patch
136 148 dir=`pwd`
137 149 cd b/d1/d2 2>&1 > /dev/null
138 150 hg import ../../../tip.patch
139 151 cd $dir
140 152 echo "% message should be 'subdir change'"
141 153 hg --cwd b tip | grep 'subdir change'
142 154 echo "% committer should be 'someoneelse'"
143 155 hg --cwd b tip | grep someoneelse
144 156 echo "% should be empty"
145 157 hg --cwd b status
146 158
147 159
148 160 # Test fuzziness (ambiguous patch location, fuzz=2)
149 161 echo % test fuzziness
150 162 hg init fuzzy
151 163 cd fuzzy
152 164 echo line1 > a
153 165 echo line0 >> a
154 166 echo line3 >> a
155 167 hg ci -Am adda
156 168 echo line1 > a
157 169 echo line2 >> a
158 170 echo line0 >> a
159 171 echo line3 >> a
160 172 hg ci -m change a
161 173 hg export tip > tip.patch
162 174 hg up -C 0
163 175 echo line1 > a
164 176 echo line0 >> a
165 177 echo line1 >> a
166 178 echo line0 >> a
167 179 hg ci -m brancha
168 180 hg import -v tip.patch
169 181 cd ..
170 182
171 183 # Test hunk touching empty files (issue906)
172 184 hg init empty
173 185 cd empty
174 186 touch a
175 187 touch b1
176 188 touch c1
177 189 echo d > d
178 190 hg ci -Am init
179 191 echo a > a
180 192 echo b > b1
181 193 hg mv b1 b2
182 194 echo c > c1
183 195 hg copy c1 c2
184 196 rm d
185 197 touch d
186 198 hg diff --git
187 199 hg ci -m empty
188 200 hg export --git tip > empty.diff
189 201 hg up -C 0
190 202 hg import empty.diff
191 203 for name in a b1 b2 c1 c2 d;
192 204 do
193 205 echo % $name file
194 206 test -f $name && cat $name
195 207 done
196 208 cd ..
197 209
@@ -1,219 +1,225 b''
1 1 adding a
2 2 adding d1/d2/a
3 3 % import exported patch
4 4 requesting all changes
5 5 adding changesets
6 6 adding manifests
7 7 adding file changes
8 8 added 1 changesets with 2 changes to 2 files
9 9 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
10 10 applying ../tip.patch
11 11 % message should be same
12 12 summary: second change
13 13 % committer should be same
14 14 user: someone
15 15 % import of plain diff should fail without message
16 16 requesting all changes
17 17 adding changesets
18 18 adding manifests
19 19 adding file changes
20 20 added 1 changesets with 2 changes to 2 files
21 21 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 22 applying ../tip.patch
23 23 transaction abort!
24 24 rollback completed
25 25 abort: empty commit message
26 26 % import of plain diff should be ok with message
27 27 requesting all changes
28 28 adding changesets
29 29 adding manifests
30 30 adding file changes
31 31 added 1 changesets with 2 changes to 2 files
32 32 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
33 33 applying ../tip.patch
34 34 % import of plain diff with specific date and user
35 35 requesting all changes
36 36 adding changesets
37 37 adding manifests
38 38 adding file changes
39 39 added 1 changesets with 2 changes to 2 files
40 40 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 41 applying ../tip.patch
42 42 changeset: 1:ca68f19f3a40
43 43 tag: tip
44 44 user: user@nowhere.net
45 45 date: Thu Jan 01 00:00:01 1970 +0000
46 46 files: a
47 47 description:
48 48 patch
49 49
50 50
51 51 diff -r 80971e65b431 -r ca68f19f3a40 a
52 52 --- a/a Thu Jan 01 00:00:00 1970 +0000
53 53 +++ b/a Thu Jan 01 00:00:01 1970 +0000
54 54 @@ -1,1 +1,2 @@
55 55 line 1
56 56 +line 2
57 57
58 58 % import of plain diff should be ok with --no-commit
59 59 requesting all changes
60 60 adding changesets
61 61 adding manifests
62 62 adding file changes
63 63 added 1 changesets with 2 changes to 2 files
64 64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 65 applying ../tip.patch
66 66 diff -r 80971e65b431 a
67 67 --- a/a
68 68 +++ b/a
69 69 @@ -1,1 +1,2 @@
70 70 line 1
71 71 +line 2
72 72 % hg -R repo import
73 73 requesting all changes
74 74 adding changesets
75 75 adding manifests
76 76 adding file changes
77 77 added 1 changesets with 2 changes to 2 files
78 78 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
79 79 applying tip.patch
80 80 % import from stdin
81 81 requesting all changes
82 82 adding changesets
83 83 adding manifests
84 84 adding file changes
85 85 added 1 changesets with 2 changes to 2 files
86 86 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 87 applying patch from stdin
88 88 % override commit message
89 89 requesting all changes
90 90 adding changesets
91 91 adding manifests
92 92 adding file changes
93 93 added 1 changesets with 2 changes to 2 files
94 94 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
95 95 applying patch from stdin
96 96 summary: override
97 97 % plain diff in email, subject, message body
98 98 requesting all changes
99 99 adding changesets
100 100 adding manifests
101 101 adding file changes
102 102 added 1 changesets with 2 changes to 2 files
103 103 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
104 104 applying ../msg.patch
105 105 user: email patcher
106 106 summary: email patch
107 107 % plain diff in email, no subject, message body
108 108 requesting all changes
109 109 adding changesets
110 110 adding manifests
111 111 adding file changes
112 112 added 1 changesets with 2 changes to 2 files
113 113 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
114 114 applying patch from stdin
115 115 % plain diff in email, subject, no message body
116 116 requesting all changes
117 117 adding changesets
118 118 adding manifests
119 119 adding file changes
120 120 added 1 changesets with 2 changes to 2 files
121 121 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
122 122 applying patch from stdin
123 123 % plain diff in email, no subject, no message body, should fail
124 124 requesting all changes
125 125 adding changesets
126 126 adding manifests
127 127 adding file changes
128 128 added 1 changesets with 2 changes to 2 files
129 129 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 130 applying patch from stdin
131 131 transaction abort!
132 132 rollback completed
133 133 abort: empty commit message
134 134 % hg export in email, should use patch header
135 135 requesting all changes
136 136 adding changesets
137 137 adding manifests
138 138 adding file changes
139 139 added 1 changesets with 2 changes to 2 files
140 140 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
141 141 applying patch from stdin
142 142 summary: second change
143 143 % plain diff in email, [PATCH] subject, message body with subject
144 144 requesting all changes
145 145 adding changesets
146 146 adding manifests
147 147 adding file changes
148 148 added 1 changesets with 2 changes to 2 files
149 149 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
150 150 applying patch from stdin
151 151 email patch
152 152
153 153 next line
154 154 ---
155 % import patch1 patch2; rollback
156 parent: 0
157 applying ../patch1
158 applying ../patch2
159 rolling back last transaction
160 parent: 1
155 161 % hg import in a subdirectory
156 162 requesting all changes
157 163 adding changesets
158 164 adding manifests
159 165 adding file changes
160 166 added 1 changesets with 2 changes to 2 files
161 167 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
162 168 applying ../../../tip.patch
163 169 % message should be 'subdir change'
164 170 summary: subdir change
165 171 % committer should be 'someoneelse'
166 172 user: someoneelse
167 173 % should be empty
168 174 % test fuzziness
169 175 adding a
170 176 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 177 applying tip.patch
172 178 patching file a
173 179 Hunk #1 succeeded at 1 with fuzz 2 (offset -2 lines).
174 180 a
175 181 adding a
176 182 adding b1
177 183 adding c1
178 184 adding d
179 185 diff --git a/a b/a
180 186 --- a/a
181 187 +++ b/a
182 188 @@ -0,0 +1,1 @@
183 189 +a
184 190 diff --git a/b1 b/b2
185 191 rename from b1
186 192 rename to b2
187 193 --- a/b1
188 194 +++ b/b2
189 195 @@ -0,0 +1,1 @@
190 196 +b
191 197 diff --git a/c1 b/c1
192 198 --- a/c1
193 199 +++ b/c1
194 200 @@ -0,0 +1,1 @@
195 201 +c
196 202 diff --git a/c1 b/c2
197 203 copy from c1
198 204 copy to c2
199 205 --- a/c1
200 206 +++ b/c2
201 207 @@ -0,0 +1,1 @@
202 208 +c
203 209 diff --git a/d b/d
204 210 --- a/d
205 211 +++ b/d
206 212 @@ -1,1 +0,0 @@
207 213 -d
208 214 4 files updated, 0 files merged, 2 files removed, 0 files unresolved
209 215 applying empty.diff
210 216 % a file
211 217 a
212 218 % b1 file
213 219 % b2 file
214 220 b
215 221 % c1 file
216 222 c
217 223 % c2 file
218 224 c
219 225 % d file
@@ -1,476 +1,489 b''
1 1 #!/bin/sh
2 2
3 3 checkundo()
4 4 {
5 5 if [ -f .hg/store/undo ]; then
6 6 echo ".hg/store/undo still exists after $1"
7 7 fi
8 8 }
9 9
10 10 echo "[extensions]" >> $HGRCPATH
11 11 echo "mq=" >> $HGRCPATH
12 12
13 13 echo % help
14 14 hg help mq
15 15
16 16 hg init a
17 17 cd a
18 18 echo a > a
19 19 hg ci -Ama
20 20
21 21 hg clone . ../k
22 22
23 23 mkdir b
24 24 echo z > b/z
25 25 hg ci -Ama
26 26
27 27 echo % qinit
28 28
29 29 hg qinit
30 30
31 31 cd ..
32 32 hg init b
33 33
34 34 echo % -R qinit
35 35
36 36 hg -R b qinit
37 37
38 38 hg init c
39 39
40 40 echo % qinit -c
41 41
42 42 hg --cwd c qinit -c
43 43 hg -R c/.hg/patches st
44 44
45 echo % qnew should refuse bad patch names
46 hg -R c qnew series
47 hg -R c qnew status
48 hg -R c qnew guards
49 hg -R c qnew .hgignore
50
45 51 echo % qnew implies add
46 52
47 53 hg -R c qnew test.patch
48 54 hg -R c/.hg/patches st
49 55
50 56 echo '% qinit; qinit -c'
51 57 hg init d
52 58 cd d
53 59 hg qinit
54 60 hg qinit -c
55 61 # qinit -c should create both files if they don't exist
56 62 echo ' .hgignore:'
57 63 cat .hg/patches/.hgignore
58 64 echo ' series:'
59 65 cat .hg/patches/series
60 66 hg qinit -c 2>&1 | sed -e 's/repository.*already/repository already/'
61 67 cd ..
62 68
63 69 echo '% qinit; <stuff>; qinit -c'
64 70 hg init e
65 71 cd e
66 72 hg qnew A
67 73 checkundo qnew
68 74 echo foo > foo
69 75 hg add foo
70 76 hg qrefresh
71 77 hg qnew B
72 78 echo >> foo
73 79 hg qrefresh
74 80 echo status >> .hg/patches/.hgignore
75 81 echo bleh >> .hg/patches/.hgignore
76 82 hg qinit -c
77 83 hg -R .hg/patches status
78 84 # qinit -c shouldn't touch these files if they already exist
79 85 echo ' .hgignore:'
80 86 cat .hg/patches/.hgignore
81 87 echo ' series:'
82 88 cat .hg/patches/series
83 89 cd ..
84 90
85 91 cd a
86 92
87 93 echo a > somefile
88 94 hg add somefile
89 95
90 96 echo % qnew with uncommitted changes
91 97
92 98 hg qnew uncommitted.patch
93 99 hg st
94 100 hg qseries
95 101
96 102 echo '% qnew with uncommitted changes and missing file (issue 803)'
97 103
98 104 hg qnew issue803.patch someotherfile 2>&1 | \
99 105 sed -e 's/someotherfile:.*/someotherfile: No such file or directory/'
100 106 hg st
101 107 hg qseries
102 108 hg qpop -f
103 109 hg qdel issue803.patch
104 110
105 111 hg revert --no-backup somefile
106 112 rm somefile
107 113
108 114 echo % qnew -m
109 115
110 116 hg qnew -m 'foo bar' test.patch
111 117 cat .hg/patches/test.patch
112 118
113 119 echo % qrefresh
114 120
115 121 echo a >> a
116 122 hg qrefresh
117 123 sed -e "s/^\(diff -r \)\([a-f0-9]* \)/\1 x/" \
118 124 -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
119 125 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/test.patch
120 126
121 127 echo % empty qrefresh
122 128
123 129 hg qrefresh -X a
124 130 echo 'revision:'
125 131 hg diff -r -2 -r -1
126 132 echo 'patch:'
127 133 cat .hg/patches/test.patch
128 134 echo 'working dir diff:'
129 135 hg diff --nodates -q
130 136 # restore things
131 137 hg qrefresh
132 138 checkundo qrefresh
133 139
134 140 echo % qpop
135 141
136 142 hg qpop
137 143 checkundo qpop
138 144
139 145 echo % qpush
140 146
141 147 hg qpush
142 148 checkundo qpush
143 149
144 150 cd ..
145 151
146 152 echo % pop/push outside repo
147 153
148 154 hg -R a qpop
149 155 hg -R a qpush
150 156
151 157 cd a
152 158 hg qnew test2.patch
153 159
154 160 echo % qrefresh in subdir
155 161
156 162 cd b
157 163 echo a > a
158 164 hg add a
159 165 hg qrefresh
160 166
161 167 echo % pop/push -a in subdir
162 168
163 169 hg qpop -a
164 170 hg --traceback qpush -a
165 171
166 172 echo % qseries
167 173 hg qseries
168 174 hg qpop
169 175 hg qseries -vs
170 176 hg qpush
171 177
172 178 echo % qapplied
173 179 hg qapplied
174 180
175 181 echo % qtop
176 182 hg qtop
177 183
178 184 echo % qprev
179 185 hg qprev
180 186
181 187 echo % qnext
182 188 hg qnext
183 189
184 190 echo % pop, qnext, qprev, qapplied
185 191 hg qpop
186 192 hg qnext
187 193 hg qprev
188 194 hg qapplied
189 195
190 196 echo % commit should fail
191 197 hg commit
192 198
193 199 echo % push should fail
194 200 hg push ../../k
195 201
196 202 echo % qunapplied
197 203 hg qunapplied
198 204
199 205 echo % qpush/qpop with index
200 206 hg qnew test1b.patch
201 207 echo 1b > 1b
202 208 hg add 1b
203 209 hg qrefresh
204 210 hg qpush 2
205 211 hg qpop 0
206 212 hg qpush test.patch+1
207 213 hg qpush test.patch+2
208 214 hg qpop test2.patch-1
209 215 hg qpop test2.patch-2
210 216 hg qpush test1b.patch+1
211 217
212 218 echo % push should succeed
213 219 hg qpop -a
214 220 hg push ../../k
215 221
216 222 echo % qpush/qpop error codes
217 223 errorcode()
218 224 {
219 225 hg "$@" && echo " $@ succeeds" || echo " $@ fails"
220 226 }
221 227
222 228 # we want to start with some patches applied
223 229 hg qpush -a
224 230 echo " % pops all patches and succeeds"
225 231 errorcode qpop -a
226 232 echo " % does nothing and succeeds"
227 233 errorcode qpop -a
228 234 echo " % fails - nothing else to pop"
229 235 errorcode qpop
230 236 echo " % pushes a patch and succeeds"
231 237 errorcode qpush
232 238 echo " % pops a patch and succeeds"
233 239 errorcode qpop
234 240 echo " % pushes up to test1b.patch and succeeds"
235 241 errorcode qpush test1b.patch
236 242 echo " % does nothing and succeeds"
237 243 errorcode qpush test1b.patch
238 244 echo " % does nothing and succeeds"
239 245 errorcode qpop test1b.patch
240 246 echo " % fails - can't push to this patch"
241 247 errorcode qpush test.patch
242 248 echo " % fails - can't pop to this patch"
243 249 errorcode qpop test2.patch
244 250 echo " % pops up to test.patch and succeeds"
245 251 errorcode qpop test.patch
246 252 echo " % pushes all patches and succeeds"
247 253 errorcode qpush -a
248 254 echo " % does nothing and succeeds"
249 255 errorcode qpush -a
250 256 echo " % fails - nothing else to push"
251 257 errorcode qpush
252 258 echo " % does nothing and succeeds"
253 259 errorcode qpush test2.patch
254 260
255 261
256 262 echo % strip
257 263 cd ../../b
258 264 echo x>x
259 265 hg ci -Ama
260 266 hg strip tip 2>&1 | sed 's/\(saving bundle to \).*/\1/'
261 267 hg unbundle .hg/strip-backup/*
262 268
263 269 echo '% cd b; hg qrefresh'
264 270 hg init refresh
265 271 cd refresh
266 272 echo a > a
267 273 hg ci -Ama -d'0 0'
268 274 hg qnew -mfoo foo
269 275 echo a >> a
270 276 hg qrefresh
271 277 mkdir b
272 278 cd b
273 279 echo f > f
274 280 hg add f
275 281 hg qrefresh
276 282 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
277 283 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" ../.hg/patches/foo
278 284 echo % hg qrefresh .
279 285 hg qrefresh .
280 286 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
281 287 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" ../.hg/patches/foo
282 288 hg status
283 289
284 290 echo % qpush failure
285 291 cd ..
286 292 hg qrefresh
287 293 hg qnew -mbar bar
288 294 echo foo > foo
289 295 echo bar > bar
290 296 hg add foo bar
291 297 hg qrefresh
292 298 hg qpop -a
293 299 echo bar > foo
294 300 hg qpush -a
295 301 hg st
296 302
297 303 echo % mq tags
298 304 hg log --template '{rev} {tags}\n' -r qparent:qtip
299 305
306 echo % bad node in status
307 hg qpop
308 hg strip -qn tip
309 hg tip 2>&1 | sed -e 's/unknown node .*/unknown node/'
310 hg branches 2>&1 | sed -e 's/unknown node .*/unknown node/'
311 hg qpop
312
300 313 cat >>$HGRCPATH <<EOF
301 314 [diff]
302 315 git = True
303 316 EOF
304 317 cd ..
305 318 hg init git
306 319 cd git
307 320 hg qinit
308 321
309 322 hg qnew -m'new file' new
310 323 echo foo > new
311 324 chmod +x new
312 325 hg add new
313 326 hg qrefresh
314 327 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
315 328 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/new
316 329
317 330 hg qnew -m'copy file' copy
318 331 hg cp new copy
319 332 hg qrefresh
320 333 sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
321 334 -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" .hg/patches/copy
322 335
323 336 hg qpop
324 337 hg qpush
325 338 hg qdiff
326 339 cat >>$HGRCPATH <<EOF
327 340 [diff]
328 341 git = False
329 342 EOF
330 343 hg qdiff --git
331 344
332 345 cd ..
333 346 hg init slow
334 347 cd slow
335 348 hg qinit
336 349 echo foo > foo
337 350 hg add foo
338 351 hg ci -m 'add foo'
339 352 hg qnew bar
340 353 echo bar > bar
341 354 hg add bar
342 355 hg mv foo baz
343 356 hg qrefresh --git
344 357 hg up -C 0
345 358 echo >> foo
346 359 hg ci -m 'change foo'
347 360 hg up -C 1
348 361 hg qrefresh --git 2>&1 | grep -v 'saving bundle'
349 362 cat .hg/patches/bar
350 363 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
351 364 hg qrefresh --git
352 365 cat .hg/patches/bar
353 366 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
354 367 hg qrefresh
355 368 grep 'diff --git' .hg/patches/bar
356 369
357 370 echo
358 371 hg up -C 1
359 372 echo >> foo
360 373 hg ci -m 'change foo again'
361 374 hg up -C 2
362 375 hg mv bar quux
363 376 hg mv baz bleh
364 377 hg qrefresh --git 2>&1 | grep -v 'saving bundle'
365 378 cat .hg/patches/bar
366 379 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
367 380 hg mv quux fred
368 381 hg mv bleh barney
369 382 hg qrefresh --git
370 383 cat .hg/patches/bar
371 384 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
372 385
373 386 echo % refresh omitting an added file
374 387 hg qnew baz
375 388 echo newfile > newfile
376 389 hg add newfile
377 390 hg qrefresh
378 391 hg st -A newfile
379 392 hg qrefresh -X newfile
380 393 hg st -A newfile
381 394 hg revert newfile
382 395 rm newfile
383 396 hg qpop
384 397 hg qdel baz
385 398
386 399 echo % create a git patch
387 400 echo a > alexander
388 401 hg add alexander
389 402 hg qnew -f --git addalexander
390 403 grep diff .hg/patches/addalexander
391 404
392 405 echo % create a git binary patch
393 406 cat > writebin.py <<EOF
394 407 import sys
395 408 path = sys.argv[1]
396 409 open(path, 'wb').write('BIN\x00ARY')
397 410 EOF
398 411 python writebin.py bucephalus
399 412
400 413 python "$TESTDIR/md5sum.py" bucephalus
401 414 hg add bucephalus
402 415 hg qnew -f --git addbucephalus
403 416 grep diff .hg/patches/addbucephalus
404 417
405 418 echo % check binary patches can be popped and pushed
406 419 hg qpop
407 420 test -f bucephalus && echo % bucephalus should not be there
408 421 hg qpush
409 422 test -f bucephalus || echo % bucephalus should be there
410 423 python "$TESTDIR/md5sum.py" bucephalus
411 424
412 425
413 426 echo '% strip again'
414 427 cd ..
415 428 hg init strip
416 429 cd strip
417 430 touch foo
418 431 hg add foo
419 432 hg ci -m 'add foo' -d '0 0'
420 433 echo >> foo
421 434 hg ci -m 'change foo 1' -d '0 0'
422 435 hg up -C 0
423 436 echo 1 >> foo
424 437 hg ci -m 'change foo 2' -d '0 0'
425 438 HGMERGE=true hg merge
426 439 hg ci -m merge -d '0 0'
427 440 hg log
428 441 hg strip 1 2>&1 | sed 's/\(saving bundle to \).*/\1/'
429 442 checkundo strip
430 443 hg log
431 444 cd ..
432 445
433 446 echo '% qclone'
434 447 qlog()
435 448 {
436 449 echo 'main repo:'
437 450 hg log --template ' rev {rev}: {desc}\n'
438 451 echo 'patch repo:'
439 452 hg -R .hg/patches log --template ' rev {rev}: {desc}\n'
440 453 }
441 454 hg init qclonesource
442 455 cd qclonesource
443 456 echo foo > foo
444 457 hg add foo
445 458 hg ci -m 'add foo'
446 459 hg qinit
447 460 hg qnew patch1
448 461 echo bar >> foo
449 462 hg qrefresh -m 'change foo'
450 463 cd ..
451 464
452 465 # repo with unversioned patch dir
453 466 hg qclone qclonesource failure
454 467
455 468 cd qclonesource
456 469 hg qinit -c
457 470 hg qci -m checkpoint
458 471 qlog
459 472 cd ..
460 473
461 474 # repo with patches applied
462 475 hg qclone qclonesource qclonedest
463 476 cd qclonedest
464 477 qlog
465 478 cd ..
466 479
467 480 # repo with patches unapplied
468 481 cd qclonesource
469 482 hg qpop -a
470 483 qlog
471 484 cd ..
472 485 hg qclone qclonesource qclonedest2
473 486 cd qclonedest2
474 487 qlog
475 488 cd ..
476 489
@@ -1,455 +1,472 b''
1 1 % help
2 2 mq extension - patch management and development
3 3
4 4 This extension lets you work with a stack of patches in a Mercurial
5 5 repository. It manages two stacks of patches - all known patches, and
6 6 applied patches (subset of known patches).
7 7
8 8 Known patches are represented as patch files in the .hg/patches
9 9 directory. Applied patches are both patch files and changesets.
10 10
11 11 Common tasks (use "hg help command" for more details):
12 12
13 13 prepare repository to work with patches qinit
14 14 create new patch qnew
15 15 import existing patch qimport
16 16
17 17 print patch series qseries
18 18 print applied patches qapplied
19 19 print name of top applied patch qtop
20 20
21 21 add known patch to applied stack qpush
22 22 remove patch from applied stack qpop
23 23 refresh contents of top applied patch qrefresh
24 24
25 25 list of commands:
26 26
27 27 qapplied print the patches already applied
28 28 qclone clone main and patch repository at same time
29 29 qcommit commit changes in the queue repository
30 30 qdelete remove patches from queue
31 31 qdiff diff of the current patch
32 32 qfold fold the named patches into the current patch
33 33 qgoto push or pop patches until named patch is at top of stack
34 34 qguard set or print guards for a patch
35 35 qheader Print the header of the topmost or specified patch
36 36 qimport import a patch
37 37 qinit init a new queue repository
38 38 qnew create a new patch
39 39 qnext print the name of the next patch
40 40 qpop pop the current patch off the stack
41 41 qprev print the name of the previous patch
42 42 qpush push the next patch onto the stack
43 43 qrefresh update the current patch
44 44 qrename rename a patch
45 45 qrestore restore the queue state saved by a rev
46 46 qsave save current queue state
47 47 qselect set or print guarded patches to push
48 48 qseries print the entire series file
49 49 qtop print the name of the current patch
50 50 qunapplied print the patches not yet applied
51 51 strip strip a revision and all later revs on the same branch
52 52
53 53 use "hg -v help mq" to show aliases and global options
54 54 adding a
55 55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 56 adding b/z
57 57 % qinit
58 58 % -R qinit
59 59 % qinit -c
60 60 A .hgignore
61 61 A series
62 % qnew should refuse bad patch names
63 abort: "series" cannot be used as the name of a patch
64 abort: "status" cannot be used as the name of a patch
65 abort: "guards" cannot be used as the name of a patch
66 abort: ".hgignore" cannot be used as the name of a patch
62 67 % qnew implies add
63 68 A .hgignore
64 69 A series
65 70 A test.patch
66 71 % qinit; qinit -c
67 72 .hgignore:
68 73 syntax: glob
69 74 status
70 75 guards
71 76 series:
72 77 abort: repository already exists!
73 78 % qinit; <stuff>; qinit -c
74 79 adding .hg/patches/A
75 80 adding .hg/patches/B
76 81 A .hgignore
77 82 A A
78 83 A B
79 84 A series
80 85 .hgignore:
81 86 status
82 87 bleh
83 88 series:
84 89 A
85 90 B
86 91 % qnew with uncommitted changes
87 92 abort: local changes found, refresh first
88 93 A somefile
89 94 % qnew with uncommitted changes and missing file (issue 803)
90 95 someotherfile: No such file or directory
91 96 A somefile
92 97 issue803.patch
93 98 Patch queue now empty
94 99 % qnew -m
95 100 foo bar
96 101 % qrefresh
97 102 foo bar
98 103
99 104 diff -r xa
100 105 --- a/a
101 106 +++ b/a
102 107 @@ -1,1 +1,2 @@
103 108 a
104 109 +a
105 110 % empty qrefresh
106 111 revision:
107 112 patch:
108 113 foo bar
109 114
110 115 working dir diff:
111 116 --- a/a
112 117 +++ b/a
113 118 @@ -1,1 +1,2 @@
114 119 a
115 120 +a
116 121 % qpop
117 122 Patch queue now empty
118 123 % qpush
119 124 applying test.patch
120 125 Now at: test.patch
121 126 % pop/push outside repo
122 127 Patch queue now empty
123 128 applying test.patch
124 129 Now at: test.patch
125 130 % qrefresh in subdir
126 131 % pop/push -a in subdir
127 132 Patch queue now empty
128 133 applying test.patch
129 134 applying test2.patch
130 135 Now at: test2.patch
131 136 % qseries
132 137 test.patch
133 138 test2.patch
134 139 Now at: test.patch
135 140 0 A test.patch: foo bar
136 141 1 U test2.patch:
137 142 applying test2.patch
138 143 Now at: test2.patch
139 144 % qapplied
140 145 test.patch
141 146 test2.patch
142 147 % qtop
143 148 test2.patch
144 149 % qprev
145 150 test.patch
146 151 % qnext
147 152 All patches applied
148 153 % pop, qnext, qprev, qapplied
149 154 Now at: test.patch
150 155 test2.patch
151 156 Only one patch applied
152 157 test.patch
153 158 % commit should fail
154 159 abort: cannot commit over an applied mq patch
155 160 % push should fail
156 161 pushing to ../../k
157 162 abort: source has mq patches applied
158 163 % qunapplied
159 164 test2.patch
160 165 % qpush/qpop with index
161 166 applying test2.patch
162 167 Now at: test2.patch
163 168 Now at: test.patch
164 169 applying test1b.patch
165 170 Now at: test1b.patch
166 171 applying test2.patch
167 172 Now at: test2.patch
168 173 Now at: test1b.patch
169 174 Now at: test.patch
170 175 applying test1b.patch
171 176 applying test2.patch
172 177 Now at: test2.patch
173 178 % push should succeed
174 179 Patch queue now empty
175 180 pushing to ../../k
176 181 searching for changes
177 182 adding changesets
178 183 adding manifests
179 184 adding file changes
180 185 added 1 changesets with 1 changes to 1 files
181 186 % qpush/qpop error codes
182 187 applying test.patch
183 188 applying test1b.patch
184 189 applying test2.patch
185 190 Now at: test2.patch
186 191 % pops all patches and succeeds
187 192 Patch queue now empty
188 193 qpop -a succeeds
189 194 % does nothing and succeeds
190 195 no patches applied
191 196 qpop -a succeeds
192 197 % fails - nothing else to pop
193 198 no patches applied
194 199 qpop fails
195 200 % pushes a patch and succeeds
196 201 applying test.patch
197 202 Now at: test.patch
198 203 qpush succeeds
199 204 % pops a patch and succeeds
200 205 Patch queue now empty
201 206 qpop succeeds
202 207 % pushes up to test1b.patch and succeeds
203 208 applying test.patch
204 209 applying test1b.patch
205 210 Now at: test1b.patch
206 211 qpush test1b.patch succeeds
207 212 % does nothing and succeeds
208 213 qpush: test1b.patch is already at the top
209 214 qpush test1b.patch succeeds
210 215 % does nothing and succeeds
211 216 qpop: test1b.patch is already at the top
212 217 qpop test1b.patch succeeds
213 218 % fails - can't push to this patch
214 219 abort: cannot push to a previous patch: test.patch
215 220 qpush test.patch fails
216 221 % fails - can't pop to this patch
217 222 abort: patch test2.patch is not applied
218 223 qpop test2.patch fails
219 224 % pops up to test.patch and succeeds
220 225 Now at: test.patch
221 226 qpop test.patch succeeds
222 227 % pushes all patches and succeeds
223 228 applying test1b.patch
224 229 applying test2.patch
225 230 Now at: test2.patch
226 231 qpush -a succeeds
227 232 % does nothing and succeeds
228 233 all patches are currently applied
229 234 qpush -a succeeds
230 235 % fails - nothing else to push
231 236 patch series already fully applied
232 237 qpush fails
233 238 % does nothing and succeeds
234 239 all patches are currently applied
235 240 qpush test2.patch succeeds
236 241 % strip
237 242 adding x
238 243 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
239 244 saving bundle to
240 245 adding changesets
241 246 adding manifests
242 247 adding file changes
243 248 added 1 changesets with 1 changes to 1 files
244 249 (run 'hg update' to get a working copy)
245 250 % cd b; hg qrefresh
246 251 adding a
247 252 foo
248 253
249 254 diff -r cb9a9f314b8b a
250 255 --- a/a
251 256 +++ b/a
252 257 @@ -1,1 +1,2 @@
253 258 a
254 259 +a
255 260 diff -r cb9a9f314b8b b/f
256 261 --- /dev/null
257 262 +++ b/b/f
258 263 @@ -0,0 +1,1 @@
259 264 +f
260 265 % hg qrefresh .
261 266 foo
262 267
263 268 diff -r cb9a9f314b8b b/f
264 269 --- /dev/null
265 270 +++ b/b/f
266 271 @@ -0,0 +1,1 @@
267 272 +f
268 273 M a
269 274 % qpush failure
270 275 Patch queue now empty
271 276 applying foo
272 277 applying bar
273 278 file foo already exists
274 279 1 out of 1 hunk FAILED -- saving rejects to file foo.rej
275 280 patch failed, unable to continue (try -v)
276 281 patch failed, rejects left in working dir
277 282 Errors during apply, please fix and refresh bar
278 283 ? foo
279 284 ? foo.rej
280 285 % mq tags
281 286 0 qparent
282 287 1 qbase foo
283 288 2 qtip bar tip
289 % bad node in status
290 Now at: foo
291 changeset: 0:cb9a9f314b8b
292 mq status file refers to unknown node
293 tag: tip
294 user: test
295 date: Thu Jan 01 00:00:00 1970 +0000
296 summary: a
297
298 mq status file refers to unknown node
299 default 0:cb9a9f314b8b
300 abort: working directory revision is not qtip
284 301 new file
285 302
286 303 diff --git a/new b/new
287 304 new file mode 100755
288 305 --- /dev/null
289 306 +++ b/new
290 307 @@ -0,0 +1,1 @@
291 308 +foo
292 309 copy file
293 310
294 311 diff --git a/new b/copy
295 312 copy from new
296 313 copy to copy
297 314 Now at: new
298 315 applying copy
299 316 Now at: copy
300 317 diff --git a/new b/copy
301 318 copy from new
302 319 copy to copy
303 320 diff --git a/new b/copy
304 321 copy from new
305 322 copy to copy
306 323 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
307 324 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
308 325 adding branch
309 326 adding changesets
310 327 adding manifests
311 328 adding file changes
312 329 added 1 changesets with 1 changes to 1 files
313 330 Patch queue now empty
314 331 applying bar
315 332 Now at: bar
316 333 diff --git a/bar b/bar
317 334 new file mode 100644
318 335 --- /dev/null
319 336 +++ b/bar
320 337 @@ -0,0 +1,1 @@
321 338 +bar
322 339 diff --git a/foo b/baz
323 340 rename from foo
324 341 rename to baz
325 342 2 baz (foo)
326 343 diff --git a/bar b/bar
327 344 new file mode 100644
328 345 --- /dev/null
329 346 +++ b/bar
330 347 @@ -0,0 +1,1 @@
331 348 +bar
332 349 diff --git a/foo b/baz
333 350 rename from foo
334 351 rename to baz
335 352 2 baz (foo)
336 353 diff --git a/bar b/bar
337 354 diff --git a/foo b/baz
338 355
339 356 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
340 357 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
341 358 adding branch
342 359 adding changesets
343 360 adding manifests
344 361 adding file changes
345 362 added 1 changesets with 1 changes to 1 files
346 363 Patch queue now empty
347 364 applying bar
348 365 Now at: bar
349 366 diff --git a/foo b/bleh
350 367 rename from foo
351 368 rename to bleh
352 369 diff --git a/quux b/quux
353 370 new file mode 100644
354 371 --- /dev/null
355 372 +++ b/quux
356 373 @@ -0,0 +1,1 @@
357 374 +bar
358 375 3 bleh (foo)
359 376 diff --git a/foo b/barney
360 377 rename from foo
361 378 rename to barney
362 379 diff --git a/fred b/fred
363 380 new file mode 100644
364 381 --- /dev/null
365 382 +++ b/fred
366 383 @@ -0,0 +1,1 @@
367 384 +bar
368 385 3 barney (foo)
369 386 % refresh omitting an added file
370 387 C newfile
371 388 A newfile
372 389 Now at: bar
373 390 % create a git patch
374 391 diff --git a/alexander b/alexander
375 392 % create a git binary patch
376 393 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
377 394 diff --git a/bucephalus b/bucephalus
378 395 % check binary patches can be popped and pushed
379 396 Now at: addalexander
380 397 applying addbucephalus
381 398 Now at: addbucephalus
382 399 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
383 400 % strip again
384 401 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
385 402 merging foo
386 403 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
387 404 (branch merge, don't forget to commit)
388 405 changeset: 3:99615015637b
389 406 tag: tip
390 407 parent: 2:20cbbe65cff7
391 408 parent: 1:d2871fc282d4
392 409 user: test
393 410 date: Thu Jan 01 00:00:00 1970 +0000
394 411 summary: merge
395 412
396 413 changeset: 2:20cbbe65cff7
397 414 parent: 0:53245c60e682
398 415 user: test
399 416 date: Thu Jan 01 00:00:00 1970 +0000
400 417 summary: change foo 2
401 418
402 419 changeset: 1:d2871fc282d4
403 420 user: test
404 421 date: Thu Jan 01 00:00:00 1970 +0000
405 422 summary: change foo 1
406 423
407 424 changeset: 0:53245c60e682
408 425 user: test
409 426 date: Thu Jan 01 00:00:00 1970 +0000
410 427 summary: add foo
411 428
412 429 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
413 430 saving bundle to
414 431 saving bundle to
415 432 adding branch
416 433 adding changesets
417 434 adding manifests
418 435 adding file changes
419 436 added 1 changesets with 1 changes to 1 files
420 437 changeset: 1:20cbbe65cff7
421 438 tag: tip
422 439 user: test
423 440 date: Thu Jan 01 00:00:00 1970 +0000
424 441 summary: change foo 2
425 442
426 443 changeset: 0:53245c60e682
427 444 user: test
428 445 date: Thu Jan 01 00:00:00 1970 +0000
429 446 summary: add foo
430 447
431 448 % qclone
432 449 abort: versioned patch repository not found (see qinit -c)
433 450 adding .hg/patches/patch1
434 451 main repo:
435 452 rev 1: change foo
436 453 rev 0: add foo
437 454 patch repo:
438 455 rev 0: checkpoint
439 456 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
440 457 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
441 458 main repo:
442 459 rev 0: add foo
443 460 patch repo:
444 461 rev 0: checkpoint
445 462 Patch queue now empty
446 463 main repo:
447 464 rev 0: add foo
448 465 patch repo:
449 466 rev 0: checkpoint
450 467 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
451 468 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
452 469 main repo:
453 470 rev 0: add foo
454 471 patch repo:
455 472 rev 0: checkpoint
@@ -1,65 +1,74 b''
1 1 #!/bin/sh
2 2
3 3 hg init t
4 4 cd t
5 5 hg branches
6 6
7 7 echo foo > a
8 8 hg add a
9 9 hg ci -m "initial" -d "1000000 0"
10 10 hg branch foo
11 11 hg branch
12 12 hg ci -m "add branch name" -d "1000000 0"
13 13 hg branch bar
14 14 hg ci -m "change branch name" -d "1000000 0"
15 15 echo % branch shadowing
16 16 hg branch default
17 17 hg branch -f default
18 18 hg ci -m "clear branch name" -d "1000000 0"
19 19
20 20 hg co foo
21 21 hg branch
22 22 echo bleah > a
23 23 hg ci -m "modify a branch" -d "1000000 0"
24 24
25 25 hg merge
26 26 hg branch
27 27 hg ci -m "merge" -d "1000000 0"
28 28 hg log
29 29
30 30 hg branches
31 31 hg branches -q
32 32
33 33 echo % test for invalid branch cache
34 34 hg rollback
35 35 cp .hg/branch.cache .hg/bc-invalid
36 36 hg log -r foo
37 37 cp .hg/bc-invalid .hg/branch.cache
38 38 hg --debug log -r foo
39 39 rm .hg/branch.cache
40 40 echo corrupted > .hg/branch.cache
41 41 hg log -qr foo
42 42 cat .hg/branch.cache
43 43
44 echo % push should update the branch cache
45 hg init ../target
46 echo % pushing just rev 0
47 hg push -qr 0 ../target
48 cat ../target/.hg/branch.cache
49 echo % pushing everything
50 hg push -qf ../target
51 cat ../target/.hg/branch.cache
52
44 53 echo % update with no arguments: tipmost revision of the current branch
45 54 hg up -q -C 0
46 55 hg up -q
47 56 hg id
48 57 hg up -q 1
49 58 hg up -q
50 59 hg id
51 60 hg branch foobar
52 61 hg up
53 62
54 63 echo % fastforward merge
55 64 hg branch ff
56 65 echo ff > ff
57 66 hg ci -Am'fast forward' -d '1000000 0'
58 67 hg up foo
59 68 hg merge ff
60 69 hg branch
61 70 hg commit -m'Merge ff into foo' -d '1000000 0'
62 71 hg parents
63 72 hg manifest
64 73
65 74 exit 0
@@ -1,108 +1,117 b''
1 1 marked working directory as branch foo
2 2 foo
3 3 marked working directory as branch bar
4 4 % branch shadowing
5 5 abort: a branch of the same name already exists (use --force to override)
6 6 marked working directory as branch default
7 7 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
8 8 foo
9 9 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
10 10 (branch merge, don't forget to commit)
11 11 foo
12 12 changeset: 5:5f8fb06e083e
13 13 branch: foo
14 14 tag: tip
15 15 parent: 4:4909a3732169
16 16 parent: 3:bf1bc2f45e83
17 17 user: test
18 18 date: Mon Jan 12 13:46:40 1970 +0000
19 19 summary: merge
20 20
21 21 changeset: 4:4909a3732169
22 22 branch: foo
23 23 parent: 1:b699b1cec9c2
24 24 user: test
25 25 date: Mon Jan 12 13:46:40 1970 +0000
26 26 summary: modify a branch
27 27
28 28 changeset: 3:bf1bc2f45e83
29 29 user: test
30 30 date: Mon Jan 12 13:46:40 1970 +0000
31 31 summary: clear branch name
32 32
33 33 changeset: 2:67ec16bde7f1
34 34 branch: bar
35 35 user: test
36 36 date: Mon Jan 12 13:46:40 1970 +0000
37 37 summary: change branch name
38 38
39 39 changeset: 1:b699b1cec9c2
40 40 branch: foo
41 41 user: test
42 42 date: Mon Jan 12 13:46:40 1970 +0000
43 43 summary: add branch name
44 44
45 45 changeset: 0:be8523e69bf8
46 46 user: test
47 47 date: Mon Jan 12 13:46:40 1970 +0000
48 48 summary: initial
49 49
50 50 foo 5:5f8fb06e083e
51 51 default 3:bf1bc2f45e83 (inactive)
52 52 bar 2:67ec16bde7f1 (inactive)
53 53 foo
54 54 default
55 55 bar
56 56 % test for invalid branch cache
57 57 rolling back last transaction
58 58 changeset: 4:4909a3732169
59 59 branch: foo
60 60 tag: tip
61 61 parent: 1:b699b1cec9c2
62 62 user: test
63 63 date: Mon Jan 12 13:46:40 1970 +0000
64 64 summary: modify a branch
65 65
66 66 Invalid branch cache: unknown tip
67 67 changeset: 4:4909a3732169c0c20011c4f4b8fdff4e3d89b23f
68 68 branch: foo
69 69 tag: tip
70 70 parent: 1:b699b1cec9c2966b3700de4fef0dc123cd754c31
71 71 parent: -1:0000000000000000000000000000000000000000
72 72 manifest: 4:d01b250baaa05909152f7ae07d7a649deea0df9a
73 73 user: test
74 74 date: Mon Jan 12 13:46:40 1970 +0000
75 75 files: a
76 76 extra: branch=foo
77 77 description:
78 78 modify a branch
79 79
80 80
81 81 4:4909a3732169
82 82 4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
83 83 bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
84 84 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
85 85 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
86 % push should update the branch cache
87 % pushing just rev 0
88 be8523e69bf892e25817fc97187516b3c0804ae4 0
89 be8523e69bf892e25817fc97187516b3c0804ae4 default
90 % pushing everything
91 4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
92 bf1bc2f45e834c75404d0ddab57d53beab56e2f8 default
93 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
94 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
86 95 % update with no arguments: tipmost revision of the current branch
87 96 bf1bc2f45e83
88 97 4909a3732169 (foo) tip
89 98 marked working directory as branch foobar
90 99 abort: branch foobar not found
91 100 % fastforward merge
92 101 marked working directory as branch ff
93 102 adding ff
94 103 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
95 104 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 105 (branch merge, don't forget to commit)
97 106 foo
98 107 changeset: 6:f0c74f92a385
99 108 branch: foo
100 109 tag: tip
101 110 parent: 4:4909a3732169
102 111 parent: 5:c420d2121b71
103 112 user: test
104 113 date: Mon Jan 12 13:46:40 1970 +0000
105 114 summary: Merge ff into foo
106 115
107 116 a
108 117 ff
@@ -1,103 +1,111 b''
1 1 #!/bin/sh
2 2
3 3 cp "$TESTDIR"/printenv.py .
4 4
5 5 # This test tries to exercise the ssh functionality with a dummy script
6 6
7 7 cat <<EOF > dummyssh
8 8 import sys
9 9 import os
10 10
11 11 os.chdir(os.path.dirname(sys.argv[0]))
12 12 if sys.argv[1] != "user@dummy":
13 13 sys.exit(-1)
14 14
15 15 if not os.path.exists("dummyssh"):
16 16 sys.exit(-1)
17 17
18 18 os.environ["SSH_CLIENT"] = "127.0.0.1 1 2"
19 19
20 20 log = open("dummylog", "ab")
21 21 log.write("Got arguments")
22 22 for i, arg in enumerate(sys.argv[1:]):
23 23 log.write(" %d:%s" % (i+1, arg))
24 24 log.write("\n")
25 25 log.close()
26 26 r = os.system(sys.argv[2])
27 27 sys.exit(bool(r))
28 28 EOF
29 29
30 cat <<EOF > badhook
31 import sys
32 sys.stdout.write("KABOOM")
33 EOF
34
30 35 echo "# creating 'remote'"
31 36 hg init remote
32 37 cd remote
33 38 echo this > foo
34 39 echo this > fooO
35 40 hg ci -A -m "init" -d "1000000 0" foo fooO
36 41 echo '[server]' > .hg/hgrc
37 42 echo 'uncompressed = True' >> .hg/hgrc
38 43 echo '[hooks]' >> .hg/hgrc
39 44 echo 'changegroup = python ../printenv.py changegroup-in-remote 0 ../dummylog' >> .hg/hgrc
40 45
41 46 cd ..
42 47
43 48 echo "# repo not found error"
44 49 hg clone -e "python ./dummyssh" ssh://user@dummy/nonexistent local
45 50
46 51 echo "# clone remote via stream"
47 52 hg clone -e "python ./dummyssh" --uncompressed ssh://user@dummy/remote local-stream 2>&1 | \
48 53 sed -e 's/[0-9][0-9.]*/XXX/g' -e 's/[KM]\(B\/sec\)/X\1/'
49 54 cd local-stream
50 55 hg verify
51 56 cd ..
52 57
53 58 echo "# clone remote via pull"
54 59 hg clone -e "python ./dummyssh" ssh://user@dummy/remote local
55 60
56 61 echo "# verify"
57 62 cd local
58 63 hg verify
59 64
60 65 echo '[hooks]' >> .hg/hgrc
61 66 echo 'changegroup = python ../printenv.py changegroup-in-local 0 ../dummylog' >> .hg/hgrc
62 67
63 68 echo "# empty default pull"
64 69 hg paths
65 70 hg pull -e "python ../dummyssh"
66 71
67 72 echo "# local change"
68 73 echo bleah > foo
69 74 hg ci -m "add" -d "1000000 0"
70 75
71 76 echo "# updating rc"
72 77 echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
73 78 echo "[ui]" >> .hg/hgrc
74 79 echo "ssh = python ../dummyssh" >> .hg/hgrc
75 80
76 81 echo "# find outgoing"
77 82 hg out ssh://user@dummy/remote
78 83
79 84 echo "# find incoming on the remote side"
80 85 hg incoming -R ../remote -e "python ../dummyssh" ssh://user@dummy/local
81 86
82 87 echo "# push"
83 88 hg push
84 89
85 90 cd ../remote
86 91
87 92 echo "# check remote tip"
88 93 hg tip
89 94 hg verify
90 95 hg cat -r tip foo
91 96
92 97 echo z > z
93 98 hg ci -A -m z -d '1000001 0' z
99 # a bad, evil hook that prints to stdout
100 echo 'changegroup.stdout = python ../badhook' >> .hg/hgrc
94 101
95 102 cd ../local
96 103 echo r > r
97 104 hg ci -A -m z -d '1000002 0' r
98 105
99 echo "# push should succeed"
106 echo "# push should succeed even though it has an unexpected response"
100 107 hg push
108 hg -R ../remote heads
101 109
102 110 cd ..
103 111 cat dummylog
@@ -1,90 +1,105 b''
1 1 # creating 'remote'
2 2 # repo not found error
3 3 remote: abort: There is no Mercurial repository here (.hg not found)!
4 4 abort: no suitable response from remote hg!
5 5 # clone remote via stream
6 6 streaming all changes
7 7 XXX files to transfer, XXX bytes of data
8 8 transferred XXX bytes in XXX seconds (XXX XB/sec)
9 9 XXX files updated, XXX files merged, XXX files removed, XXX files unresolved
10 10 checking changesets
11 11 checking manifests
12 12 crosschecking files in changesets and manifests
13 13 checking files
14 14 2 files, 1 changesets, 2 total revisions
15 15 # clone remote via pull
16 16 requesting all changes
17 17 adding changesets
18 18 adding manifests
19 19 adding file changes
20 20 added 1 changesets with 2 changes to 2 files
21 21 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 22 # verify
23 23 checking changesets
24 24 checking manifests
25 25 crosschecking files in changesets and manifests
26 26 checking files
27 27 2 files, 1 changesets, 2 total revisions
28 28 # empty default pull
29 29 default = ssh://user@dummy/remote
30 30 pulling from ssh://user@dummy/remote
31 31 searching for changes
32 32 no changes found
33 33 # local change
34 34 # updating rc
35 35 # find outgoing
36 36 comparing with ssh://user@dummy/remote
37 37 searching for changes
38 38 changeset: 1:572896fe480d
39 39 tag: tip
40 40 user: test
41 41 date: Mon Jan 12 13:46:40 1970 +0000
42 42 summary: add
43 43
44 44 # find incoming on the remote side
45 45 comparing with ssh://user@dummy/local
46 46 searching for changes
47 47 changeset: 1:572896fe480d
48 48 tag: tip
49 49 user: test
50 50 date: Mon Jan 12 13:46:40 1970 +0000
51 51 summary: add
52 52
53 53 # push
54 54 pushing to ssh://user@dummy/remote
55 55 searching for changes
56 56 remote: adding changesets
57 57 remote: adding manifests
58 58 remote: adding file changes
59 59 remote: added 1 changesets with 1 changes to 1 files
60 60 # check remote tip
61 61 changeset: 1:572896fe480d
62 62 tag: tip
63 63 user: test
64 64 date: Mon Jan 12 13:46:40 1970 +0000
65 65 summary: add
66 66
67 67 checking changesets
68 68 checking manifests
69 69 crosschecking files in changesets and manifests
70 70 checking files
71 71 2 files, 2 changesets, 3 total revisions
72 72 bleah
73 # push should succeed
73 # push should succeed even though it has an unexpected response
74 74 pushing to ssh://user@dummy/remote
75 75 searching for changes
76 76 note: unsynced remote changes!
77 77 remote: adding changesets
78 78 remote: adding manifests
79 79 remote: adding file changes
80 80 remote: added 1 changesets with 1 changes to 1 files
81 abort: unexpected response:
82 'KABOOM1\n'
83 changeset: 3:ac7448082955
84 tag: tip
85 parent: 1:572896fe480d
86 user: test
87 date: Mon Jan 12 13:46:42 1970 +0000
88 summary: z
89
90 changeset: 2:187c6caa0d1e
91 parent: 0:e34318c26897
92 user: test
93 date: Mon Jan 12 13:46:41 1970 +0000
94 summary: z
95
81 96 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
82 97 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
83 98 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
84 99 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
85 100 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
86 101 Got arguments 1:user@dummy 2:hg -R local serve --stdio
87 102 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
88 103 changegroup-in-remote hook: HG_NODE=572896fe480d7581849806ee402175c49cb20037 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
89 104 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
90 105 changegroup-in-remote hook: HG_NODE=ac7448082955a0b2ff5cb4512c1e061c779bbc79 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
General Comments 0
You need to be logged in to leave comments. Login now