##// END OF EJS Templates
i18n: mark strings for translation in Mercurial
Martin Geisler -
r6953:63b5f4c7 default
parent child Browse files
Show More
@@ -1,294 +1,294 b''
1 1 """
2 2 bundlerepo.py - repository class for viewing uncompressed bundles
3 3
4 4 This provides a read-only repository interface to bundles as if
5 5 they were part of the actual repository.
6 6
7 7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import hex, nullid, short
14 14 from i18n import _
15 15 import changegroup, util, os, struct, bz2, zlib, tempfile, shutil, mdiff
16 16 import repo, localrepo, changelog, manifest, filelog, revlog
17 17
18 18 class bundlerevlog(revlog.revlog):
19 19 def __init__(self, opener, indexfile, bundlefile,
20 20 linkmapper=None):
21 21 # How it works:
22 22 # to retrieve a revision, we need to know the offset of
23 23 # the revision in the bundlefile (an opened file).
24 24 #
25 25 # We store this offset in the index (start), to differentiate a
26 26 # rev in the bundle and from a rev in the revlog, we check
27 27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 28 # (it is bigger since we store the node to which the delta is)
29 29 #
30 30 revlog.revlog.__init__(self, opener, indexfile)
31 31 self.bundlefile = bundlefile
32 32 self.basemap = {}
33 33 def chunkpositer():
34 34 for chunk in changegroup.chunkiter(bundlefile):
35 35 pos = bundlefile.tell()
36 36 yield chunk, pos - len(chunk)
37 37 n = len(self)
38 38 prev = None
39 39 for chunk, start in chunkpositer():
40 40 size = len(chunk)
41 41 if size < 80:
42 raise util.Abort("invalid changegroup")
42 raise util.Abort(_("invalid changegroup"))
43 43 start += 80
44 44 size -= 80
45 45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 46 if node in self.nodemap:
47 47 prev = node
48 48 continue
49 49 for p in (p1, p2):
50 50 if not p in self.nodemap:
51 51 raise revlog.LookupError(p1, self.indexfile,
52 52 _("unknown parent"))
53 53 if linkmapper is None:
54 54 link = n
55 55 else:
56 56 link = linkmapper(cs)
57 57
58 58 if not prev:
59 59 prev = p1
60 60 # start, size, full unc. size, base (unused), link, p1, p2, node
61 61 e = (revlog.offset_type(start, 0), size, -1, -1, link,
62 62 self.rev(p1), self.rev(p2), node)
63 63 self.basemap[n] = prev
64 64 self.index.insert(-1, e)
65 65 self.nodemap[node] = n
66 66 prev = node
67 67 n += 1
68 68
69 69 def bundle(self, rev):
70 70 """is rev from the bundle"""
71 71 if rev < 0:
72 72 return False
73 73 return rev in self.basemap
74 74 def bundlebase(self, rev): return self.basemap[rev]
75 75 def chunk(self, rev, df=None, cachelen=4096):
76 76 # Warning: in case of bundle, the diff is against bundlebase,
77 77 # not against rev - 1
78 78 # XXX: could use some caching
79 79 if not self.bundle(rev):
80 80 return revlog.revlog.chunk(self, rev, df)
81 81 self.bundlefile.seek(self.start(rev))
82 82 return self.bundlefile.read(self.length(rev))
83 83
84 84 def revdiff(self, rev1, rev2):
85 85 """return or calculate a delta between two revisions"""
86 86 if self.bundle(rev1) and self.bundle(rev2):
87 87 # hot path for bundle
88 88 revb = self.rev(self.bundlebase(rev2))
89 89 if revb == rev1:
90 90 return self.chunk(rev2)
91 91 elif not self.bundle(rev1) and not self.bundle(rev2):
92 92 return revlog.revlog.revdiff(self, rev1, rev2)
93 93
94 94 return mdiff.textdiff(self.revision(self.node(rev1)),
95 95 self.revision(self.node(rev2)))
96 96
97 97 def revision(self, node):
98 98 """return an uncompressed revision of a given"""
99 99 if node == nullid: return ""
100 100
101 101 text = None
102 102 chain = []
103 103 iter_node = node
104 104 rev = self.rev(iter_node)
105 105 # reconstruct the revision if it is from a changegroup
106 106 while self.bundle(rev):
107 107 if self._cache and self._cache[0] == iter_node:
108 108 text = self._cache[2]
109 109 break
110 110 chain.append(rev)
111 111 iter_node = self.bundlebase(rev)
112 112 rev = self.rev(iter_node)
113 113 if text is None:
114 114 text = revlog.revlog.revision(self, iter_node)
115 115
116 116 while chain:
117 117 delta = self.chunk(chain.pop())
118 118 text = mdiff.patches(text, [delta])
119 119
120 120 p1, p2 = self.parents(node)
121 121 if node != revlog.hash(text, p1, p2):
122 122 raise revlog.RevlogError(_("integrity check failed on %s:%d")
123 123 % (self.datafile, self.rev(node)))
124 124
125 125 self._cache = (node, self.rev(node), text)
126 126 return text
127 127
128 128 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
129 129 raise NotImplementedError
130 130 def addgroup(self, revs, linkmapper, transaction):
131 131 raise NotImplementedError
132 132 def strip(self, rev, minlink):
133 133 raise NotImplementedError
134 134 def checksize(self):
135 135 raise NotImplementedError
136 136
137 137 class bundlechangelog(bundlerevlog, changelog.changelog):
138 138 def __init__(self, opener, bundlefile):
139 139 changelog.changelog.__init__(self, opener)
140 140 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
141 141
142 142 class bundlemanifest(bundlerevlog, manifest.manifest):
143 143 def __init__(self, opener, bundlefile, linkmapper):
144 144 manifest.manifest.__init__(self, opener)
145 145 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
146 146 linkmapper)
147 147
148 148 class bundlefilelog(bundlerevlog, filelog.filelog):
149 149 def __init__(self, opener, path, bundlefile, linkmapper):
150 150 filelog.filelog.__init__(self, opener, path)
151 151 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
152 152 linkmapper)
153 153
154 154 class bundlerepository(localrepo.localrepository):
155 155 def __init__(self, ui, path, bundlename):
156 156 self._tempparent = None
157 157 try:
158 158 localrepo.localrepository.__init__(self, ui, path)
159 159 except repo.RepoError:
160 160 self._tempparent = tempfile.mkdtemp()
161 161 tmprepo = localrepo.instance(ui,self._tempparent,1)
162 162 localrepo.localrepository.__init__(self, ui, self._tempparent)
163 163
164 164 if path:
165 165 self._url = 'bundle:' + path + '+' + bundlename
166 166 else:
167 167 self._url = 'bundle:' + bundlename
168 168
169 169 self.tempfile = None
170 170 self.bundlefile = open(bundlename, "rb")
171 171 header = self.bundlefile.read(6)
172 172 if not header.startswith("HG"):
173 173 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
174 174 elif not header.startswith("HG10"):
175 175 raise util.Abort(_("%s: unknown bundle version") % bundlename)
176 176 elif (header == "HG10BZ") or (header == "HG10GZ"):
177 177 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
178 178 suffix=".hg10un", dir=self.path)
179 179 self.tempfile = temp
180 180 fptemp = os.fdopen(fdtemp, 'wb')
181 181 def generator(f):
182 182 if header == "HG10BZ":
183 183 zd = bz2.BZ2Decompressor()
184 184 zd.decompress("BZ")
185 185 elif header == "HG10GZ":
186 186 zd = zlib.decompressobj()
187 187 for chunk in f:
188 188 yield zd.decompress(chunk)
189 189 gen = generator(util.filechunkiter(self.bundlefile, 4096))
190 190
191 191 try:
192 192 fptemp.write("HG10UN")
193 193 for chunk in gen:
194 194 fptemp.write(chunk)
195 195 finally:
196 196 fptemp.close()
197 197 self.bundlefile.close()
198 198
199 199 self.bundlefile = open(self.tempfile, "rb")
200 200 # seek right after the header
201 201 self.bundlefile.seek(6)
202 202 elif header == "HG10UN":
203 203 # nothing to do
204 204 pass
205 205 else:
206 206 raise util.Abort(_("%s: unknown bundle compression type")
207 207 % bundlename)
208 208 # dict with the mapping 'filename' -> position in the bundle
209 209 self.bundlefilespos = {}
210 210
211 211 def __getattr__(self, name):
212 212 if name == 'changelog':
213 213 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
214 214 self.manstart = self.bundlefile.tell()
215 215 return self.changelog
216 216 if name == 'manifest':
217 217 self.bundlefile.seek(self.manstart)
218 218 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
219 219 self.changelog.rev)
220 220 self.filestart = self.bundlefile.tell()
221 221 return self.manifest
222 222 if name == 'manstart':
223 223 self.changelog
224 224 return self.manstart
225 225 if name == 'filestart':
226 226 self.manifest
227 227 return self.filestart
228 228 return localrepo.localrepository.__getattr__(self, name)
229 229
230 230 def url(self):
231 231 return self._url
232 232
233 233 def file(self, f):
234 234 if not self.bundlefilespos:
235 235 self.bundlefile.seek(self.filestart)
236 236 while 1:
237 237 chunk = changegroup.getchunk(self.bundlefile)
238 238 if not chunk:
239 239 break
240 240 self.bundlefilespos[chunk] = self.bundlefile.tell()
241 241 for c in changegroup.chunkiter(self.bundlefile):
242 242 pass
243 243
244 244 if f[0] == '/':
245 245 f = f[1:]
246 246 if f in self.bundlefilespos:
247 247 self.bundlefile.seek(self.bundlefilespos[f])
248 248 return bundlefilelog(self.sopener, f, self.bundlefile,
249 249 self.changelog.rev)
250 250 else:
251 251 return filelog.filelog(self.sopener, f)
252 252
253 253 def close(self):
254 254 """Close assigned bundle file immediately."""
255 255 self.bundlefile.close()
256 256
257 257 def __del__(self):
258 258 bundlefile = getattr(self, 'bundlefile', None)
259 259 if bundlefile and not bundlefile.closed:
260 260 bundlefile.close()
261 261 tempfile = getattr(self, 'tempfile', None)
262 262 if tempfile is not None:
263 263 os.unlink(tempfile)
264 264 if self._tempparent:
265 265 shutil.rmtree(self._tempparent, True)
266 266
267 267 def cancopy(self):
268 268 return False
269 269
270 270 def instance(ui, path, create):
271 271 if create:
272 272 raise util.Abort(_('cannot create new bundle repository'))
273 273 parentpath = ui.config("bundle", "mainreporoot", "")
274 274 if parentpath:
275 275 # Try to make the full path relative so we get a nice, short URL.
276 276 # In particular, we don't want temp dir names in test outputs.
277 277 cwd = os.getcwd()
278 278 if parentpath == cwd:
279 279 parentpath = ''
280 280 else:
281 281 cwd = os.path.join(cwd,'')
282 282 if parentpath.startswith(cwd):
283 283 parentpath = parentpath[len(cwd):]
284 284 path = util.drop_scheme('file', path)
285 285 if path.startswith('bundle:'):
286 286 path = util.drop_scheme('bundle', path)
287 287 s = path.split("+", 1)
288 288 if len(s) == 1:
289 289 repopath, bundlename = parentpath, s[0]
290 290 else:
291 291 repopath, bundlename = s
292 292 else:
293 293 repopath, bundlename = parentpath, path
294 294 return bundlerepository(ui, repopath, bundlename)
@@ -1,1183 +1,1183 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20
21 21 def findpossible(ui, cmd, table):
22 22 """
23 23 Return cmd -> (aliases, command table entry)
24 24 for each matching command.
25 25 Return debug commands (or their aliases) only if no normal command matches.
26 26 """
27 27 choice = {}
28 28 debugchoice = {}
29 29 for e in table.keys():
30 30 aliases = e.lstrip("^").split("|")
31 31 found = None
32 32 if cmd in aliases:
33 33 found = cmd
34 34 elif not ui.config("ui", "strict"):
35 35 for a in aliases:
36 36 if a.startswith(cmd):
37 37 found = a
38 38 break
39 39 if found is not None:
40 40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 41 debugchoice[found] = (aliases, table[e])
42 42 else:
43 43 choice[found] = (aliases, table[e])
44 44
45 45 if not choice and debugchoice:
46 46 choice = debugchoice
47 47
48 48 return choice
49 49
50 50 def findcmd(ui, cmd, table):
51 51 """Return (aliases, command table entry) for command string."""
52 52 choice = findpossible(ui, cmd, table)
53 53
54 54 if cmd in choice:
55 55 return choice[cmd]
56 56
57 57 if len(choice) > 1:
58 58 clist = choice.keys()
59 59 clist.sort()
60 60 raise AmbiguousCommand(cmd, clist)
61 61
62 62 if choice:
63 63 return choice.values()[0]
64 64
65 65 raise UnknownCommand(cmd)
66 66
67 67 def bail_if_changed(repo):
68 68 if repo.dirstate.parents()[1] != nullid:
69 69 raise util.Abort(_('outstanding uncommitted merge'))
70 70 modified, added, removed, deleted = repo.status()[:4]
71 71 if modified or added or removed or deleted:
72 72 raise util.Abort(_("outstanding uncommitted changes"))
73 73
74 74 def logmessage(opts):
75 75 """ get the log message according to -m and -l option """
76 76 message = opts['message']
77 77 logfile = opts['logfile']
78 78
79 79 if message and logfile:
80 80 raise util.Abort(_('options --message and --logfile are mutually '
81 81 'exclusive'))
82 82 if not message and logfile:
83 83 try:
84 84 if logfile == '-':
85 85 message = sys.stdin.read()
86 86 else:
87 87 message = open(logfile).read()
88 88 except IOError, inst:
89 89 raise util.Abort(_("can't read commit message '%s': %s") %
90 90 (logfile, inst.strerror))
91 91 return message
92 92
93 93 def loglimit(opts):
94 94 """get the log limit according to option -l/--limit"""
95 95 limit = opts.get('limit')
96 96 if limit:
97 97 try:
98 98 limit = int(limit)
99 99 except ValueError:
100 100 raise util.Abort(_('limit must be a positive integer'))
101 101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 102 else:
103 103 limit = sys.maxint
104 104 return limit
105 105
106 106 def setremoteconfig(ui, opts):
107 107 "copy remote options to ui tree"
108 108 if opts.get('ssh'):
109 109 ui.setconfig("ui", "ssh", opts['ssh'])
110 110 if opts.get('remotecmd'):
111 111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112 112
113 113 def revpair(repo, revs):
114 114 '''return pair of nodes, given list of revisions. second item can
115 115 be None, meaning use working dir.'''
116 116
117 117 def revfix(repo, val, defval):
118 118 if not val and val != 0 and defval is not None:
119 119 val = defval
120 120 return repo.lookup(val)
121 121
122 122 if not revs:
123 123 return repo.dirstate.parents()[0], None
124 124 end = None
125 125 if len(revs) == 1:
126 126 if revrangesep in revs[0]:
127 127 start, end = revs[0].split(revrangesep, 1)
128 128 start = revfix(repo, start, 0)
129 129 end = revfix(repo, end, len(repo) - 1)
130 130 else:
131 131 start = revfix(repo, revs[0], None)
132 132 elif len(revs) == 2:
133 133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 134 raise util.Abort(_('too many revisions specified'))
135 135 start = revfix(repo, revs[0], None)
136 136 end = revfix(repo, revs[1], None)
137 137 else:
138 138 raise util.Abort(_('too many revisions specified'))
139 139 return start, end
140 140
141 141 def revrange(repo, revs):
142 142 """Yield revision as strings from a list of revision specifications."""
143 143
144 144 def revfix(repo, val, defval):
145 145 if not val and val != 0 and defval is not None:
146 146 return defval
147 147 return repo.changelog.rev(repo.lookup(val))
148 148
149 149 seen, l = {}, []
150 150 for spec in revs:
151 151 if revrangesep in spec:
152 152 start, end = spec.split(revrangesep, 1)
153 153 start = revfix(repo, start, 0)
154 154 end = revfix(repo, end, len(repo) - 1)
155 155 step = start > end and -1 or 1
156 156 for rev in xrange(start, end+step, step):
157 157 if rev in seen:
158 158 continue
159 159 seen[rev] = 1
160 160 l.append(rev)
161 161 else:
162 162 rev = revfix(repo, spec, None)
163 163 if rev in seen:
164 164 continue
165 165 seen[rev] = 1
166 166 l.append(rev)
167 167
168 168 return l
169 169
170 170 def make_filename(repo, pat, node,
171 171 total=None, seqno=None, revwidth=None, pathname=None):
172 172 node_expander = {
173 173 'H': lambda: hex(node),
174 174 'R': lambda: str(repo.changelog.rev(node)),
175 175 'h': lambda: short(node),
176 176 }
177 177 expander = {
178 178 '%': lambda: '%',
179 179 'b': lambda: os.path.basename(repo.root),
180 180 }
181 181
182 182 try:
183 183 if node:
184 184 expander.update(node_expander)
185 185 if node:
186 186 expander['r'] = (lambda:
187 187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 188 if total is not None:
189 189 expander['N'] = lambda: str(total)
190 190 if seqno is not None:
191 191 expander['n'] = lambda: str(seqno)
192 192 if total is not None and seqno is not None:
193 193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 194 if pathname is not None:
195 195 expander['s'] = lambda: os.path.basename(pathname)
196 196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 197 expander['p'] = lambda: pathname
198 198
199 199 newname = []
200 200 patlen = len(pat)
201 201 i = 0
202 202 while i < patlen:
203 203 c = pat[i]
204 204 if c == '%':
205 205 i += 1
206 206 c = pat[i]
207 207 c = expander[c]()
208 208 newname.append(c)
209 209 i += 1
210 210 return ''.join(newname)
211 211 except KeyError, inst:
212 212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 213 inst.args[0])
214 214
215 215 def make_file(repo, pat, node=None,
216 216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217 217 if not pat or pat == '-':
218 218 return 'w' in mode and sys.stdout or sys.stdin
219 219 if hasattr(pat, 'write') and 'w' in mode:
220 220 return pat
221 221 if hasattr(pat, 'read') and 'r' in mode:
222 222 return pat
223 223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 224 pathname),
225 225 mode)
226 226
227 227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 228 if not globbed and default == 'relpath':
229 229 pats = util.expand_glob(pats or [])
230 230 m = _match.match(repo.root, repo.getcwd(), pats,
231 231 opts.get('include'), opts.get('exclude'), default)
232 232 def badfn(f, msg):
233 233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 234 return False
235 235 m.bad = badfn
236 236 return m
237 237
238 238 def matchall(repo):
239 239 return _match.always(repo.root, repo.getcwd())
240 240
241 241 def matchfiles(repo, files):
242 242 return _match.exact(repo.root, repo.getcwd(), files)
243 243
244 244 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 245 '''find renamed files -- yields (before, after, score) tuples'''
246 246 if added is None or removed is None:
247 247 added, removed = repo.status()[1:3]
248 248 ctx = repo['.']
249 249 for a in added:
250 250 aa = repo.wread(a)
251 251 bestname, bestscore = None, threshold
252 252 for r in removed:
253 253 rr = ctx.filectx(r).data()
254 254
255 255 # bdiff.blocks() returns blocks of matching lines
256 256 # count the number of bytes in each
257 257 equal = 0
258 258 alines = mdiff.splitnewlines(aa)
259 259 matches = bdiff.blocks(aa, rr)
260 260 for x1,x2,y1,y2 in matches:
261 261 for line in alines[x1:x2]:
262 262 equal += len(line)
263 263
264 264 lengths = len(aa) + len(rr)
265 265 if lengths:
266 266 myscore = equal*2.0 / lengths
267 267 if myscore >= bestscore:
268 268 bestname, bestscore = r, myscore
269 269 if bestname:
270 270 yield bestname, a, bestscore
271 271
272 272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 273 if dry_run is None:
274 274 dry_run = opts.get('dry_run')
275 275 if similarity is None:
276 276 similarity = float(opts.get('similarity') or 0)
277 277 add, remove = [], []
278 278 mapping = {}
279 279 audit_path = util.path_auditor(repo.root)
280 280 m = match(repo, pats, opts)
281 281 for abs in repo.walk(m):
282 282 target = repo.wjoin(abs)
283 283 good = True
284 284 try:
285 285 audit_path(abs)
286 286 except:
287 287 good = False
288 288 rel = m.rel(abs)
289 289 exact = m.exact(abs)
290 290 if good and abs not in repo.dirstate:
291 291 add.append(abs)
292 292 mapping[abs] = rel, m.exact(abs)
293 293 if repo.ui.verbose or not exact:
294 294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 296 or (os.path.isdir(target) and not os.path.islink(target))):
297 297 remove.append(abs)
298 298 mapping[abs] = rel, exact
299 299 if repo.ui.verbose or not exact:
300 300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 301 if not dry_run:
302 302 repo.remove(remove)
303 303 repo.add(add)
304 304 if similarity > 0:
305 305 for old, new, score in findrenames(repo, add, remove, similarity):
306 306 oldrel, oldexact = mapping[old]
307 307 newrel, newexact = mapping[new]
308 308 if repo.ui.verbose or not oldexact or not newexact:
309 309 repo.ui.status(_('recording removal of %s as rename to %s '
310 310 '(%d%% similar)\n') %
311 311 (oldrel, newrel, score * 100))
312 312 if not dry_run:
313 313 repo.copy(old, new)
314 314
315 315 def copy(ui, repo, pats, opts, rename=False):
316 316 # called with the repo lock held
317 317 #
318 318 # hgsep => pathname that uses "/" to separate directories
319 319 # ossep => pathname that uses os.sep to separate directories
320 320 cwd = repo.getcwd()
321 321 targets = {}
322 322 after = opts.get("after")
323 323 dryrun = opts.get("dry_run")
324 324
325 325 def walkpat(pat):
326 326 srcs = []
327 327 m = match(repo, [pat], opts, globbed=True)
328 328 for abs in repo.walk(m):
329 329 state = repo.dirstate[abs]
330 330 rel = m.rel(abs)
331 331 exact = m.exact(abs)
332 332 if state in '?r':
333 333 if exact and state == '?':
334 334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 335 if exact and state == 'r':
336 336 ui.warn(_('%s: not copying - file has been marked for'
337 337 ' remove\n') % rel)
338 338 continue
339 339 # abs: hgsep
340 340 # rel: ossep
341 341 srcs.append((abs, rel, exact))
342 342 return srcs
343 343
344 344 # abssrc: hgsep
345 345 # relsrc: ossep
346 346 # otarget: ossep
347 347 def copyfile(abssrc, relsrc, otarget, exact):
348 348 abstarget = util.canonpath(repo.root, cwd, otarget)
349 349 reltarget = repo.pathto(abstarget, cwd)
350 350 target = repo.wjoin(abstarget)
351 351 src = repo.wjoin(abssrc)
352 352 state = repo.dirstate[abstarget]
353 353
354 354 # check for collisions
355 355 prevsrc = targets.get(abstarget)
356 356 if prevsrc is not None:
357 357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 358 (reltarget, repo.pathto(abssrc, cwd),
359 359 repo.pathto(prevsrc, cwd)))
360 360 return
361 361
362 362 # check for overwrites
363 363 exists = os.path.exists(target)
364 364 if (not after and exists or after and state in 'mn'):
365 365 if not opts['force']:
366 366 ui.warn(_('%s: not overwriting - file exists\n') %
367 367 reltarget)
368 368 return
369 369
370 370 if after:
371 371 if not exists:
372 372 return
373 373 elif not dryrun:
374 374 try:
375 375 if exists:
376 376 os.unlink(target)
377 377 targetdir = os.path.dirname(target) or '.'
378 378 if not os.path.isdir(targetdir):
379 379 os.makedirs(targetdir)
380 380 util.copyfile(src, target)
381 381 except IOError, inst:
382 382 if inst.errno == errno.ENOENT:
383 383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 384 else:
385 385 ui.warn(_('%s: cannot copy - %s\n') %
386 386 (relsrc, inst.strerror))
387 387 return True # report a failure
388 388
389 389 if ui.verbose or not exact:
390 390 action = rename and "moving" or "copying"
391 391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392 392
393 393 targets[abstarget] = abssrc
394 394
395 395 # fix up dirstate
396 396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 397 if abstarget == origsrc: # copying back a copy?
398 398 if state not in 'mn' and not dryrun:
399 399 repo.dirstate.normallookup(abstarget)
400 400 else:
401 401 if repo.dirstate[origsrc] == 'a':
402 402 if not ui.quiet:
403 403 ui.warn(_("%s has not been committed yet, so no copy "
404 404 "data will be stored for %s.\n")
405 405 % (repo.pathto(origsrc, cwd), reltarget))
406 406 if abstarget not in repo.dirstate and not dryrun:
407 407 repo.add([abstarget])
408 408 elif not dryrun:
409 409 repo.copy(origsrc, abstarget)
410 410
411 411 if rename and not dryrun:
412 412 repo.remove([abssrc], not after)
413 413
414 414 # pat: ossep
415 415 # dest ossep
416 416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 417 # return: function that takes hgsep and returns ossep
418 418 def targetpathfn(pat, dest, srcs):
419 419 if os.path.isdir(pat):
420 420 abspfx = util.canonpath(repo.root, cwd, pat)
421 421 abspfx = util.localpath(abspfx)
422 422 if destdirexists:
423 423 striplen = len(os.path.split(abspfx)[0])
424 424 else:
425 425 striplen = len(abspfx)
426 426 if striplen:
427 427 striplen += len(os.sep)
428 428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 429 elif destdirexists:
430 430 res = lambda p: os.path.join(dest,
431 431 os.path.basename(util.localpath(p)))
432 432 else:
433 433 res = lambda p: dest
434 434 return res
435 435
436 436 # pat: ossep
437 437 # dest ossep
438 438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 439 # return: function that takes hgsep and returns ossep
440 440 def targetpathafterfn(pat, dest, srcs):
441 441 if util.patkind(pat, None)[0]:
442 442 # a mercurial pattern
443 443 res = lambda p: os.path.join(dest,
444 444 os.path.basename(util.localpath(p)))
445 445 else:
446 446 abspfx = util.canonpath(repo.root, cwd, pat)
447 447 if len(abspfx) < len(srcs[0][0]):
448 448 # A directory. Either the target path contains the last
449 449 # component of the source path or it does not.
450 450 def evalpath(striplen):
451 451 score = 0
452 452 for s in srcs:
453 453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 454 if os.path.exists(t):
455 455 score += 1
456 456 return score
457 457
458 458 abspfx = util.localpath(abspfx)
459 459 striplen = len(abspfx)
460 460 if striplen:
461 461 striplen += len(os.sep)
462 462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 463 score = evalpath(striplen)
464 464 striplen1 = len(os.path.split(abspfx)[0])
465 465 if striplen1:
466 466 striplen1 += len(os.sep)
467 467 if evalpath(striplen1) > score:
468 468 striplen = striplen1
469 469 res = lambda p: os.path.join(dest,
470 470 util.localpath(p)[striplen:])
471 471 else:
472 472 # a file
473 473 if destdirexists:
474 474 res = lambda p: os.path.join(dest,
475 475 os.path.basename(util.localpath(p)))
476 476 else:
477 477 res = lambda p: dest
478 478 return res
479 479
480 480
481 481 pats = util.expand_glob(pats)
482 482 if not pats:
483 483 raise util.Abort(_('no source or destination specified'))
484 484 if len(pats) == 1:
485 485 raise util.Abort(_('no destination specified'))
486 486 dest = pats.pop()
487 487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 488 if not destdirexists:
489 489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 490 raise util.Abort(_('with multiple sources, destination must be an '
491 491 'existing directory'))
492 492 if util.endswithsep(dest):
493 493 raise util.Abort(_('destination %s is not a directory') % dest)
494 494
495 495 tfn = targetpathfn
496 496 if after:
497 497 tfn = targetpathafterfn
498 498 copylist = []
499 499 for pat in pats:
500 500 srcs = walkpat(pat)
501 501 if not srcs:
502 502 continue
503 503 copylist.append((tfn(pat, dest, srcs), srcs))
504 504 if not copylist:
505 505 raise util.Abort(_('no files to copy'))
506 506
507 507 errors = 0
508 508 for targetpath, srcs in copylist:
509 509 for abssrc, relsrc, exact in srcs:
510 510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 511 errors += 1
512 512
513 513 if errors:
514 514 ui.warn(_('(consider using --after)\n'))
515 515
516 516 return errors
517 517
518 518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 519 '''Run a command as a service.'''
520 520
521 521 if opts['daemon'] and not opts['daemon_pipefds']:
522 522 rfd, wfd = os.pipe()
523 523 args = sys.argv[:]
524 524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 525 # Don't pass --cwd to the child process, because we've already
526 526 # changed directory.
527 527 for i in xrange(1,len(args)):
528 528 if args[i].startswith('--cwd='):
529 529 del args[i]
530 530 break
531 531 elif args[i].startswith('--cwd'):
532 532 del args[i:i+2]
533 533 break
534 534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 535 args[0], args)
536 536 os.close(wfd)
537 537 os.read(rfd, 1)
538 538 if parentfn:
539 539 return parentfn(pid)
540 540 else:
541 541 os._exit(0)
542 542
543 543 if initfn:
544 544 initfn()
545 545
546 546 if opts['pid_file']:
547 547 fp = open(opts['pid_file'], 'w')
548 548 fp.write(str(os.getpid()) + '\n')
549 549 fp.close()
550 550
551 551 if opts['daemon_pipefds']:
552 552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 553 os.close(rfd)
554 554 try:
555 555 os.setsid()
556 556 except AttributeError:
557 557 pass
558 558 os.write(wfd, 'y')
559 559 os.close(wfd)
560 560 sys.stdout.flush()
561 561 sys.stderr.flush()
562 562 fd = os.open(util.nulldev, os.O_RDWR)
563 563 if fd != 0: os.dup2(fd, 0)
564 564 if fd != 1: os.dup2(fd, 1)
565 565 if fd != 2: os.dup2(fd, 2)
566 566 if fd not in (0, 1, 2): os.close(fd)
567 567
568 568 if runfn:
569 569 return runfn()
570 570
571 571 class changeset_printer(object):
572 572 '''show changeset information when templating not requested.'''
573 573
574 574 def __init__(self, ui, repo, patch, buffered):
575 575 self.ui = ui
576 576 self.repo = repo
577 577 self.buffered = buffered
578 578 self.patch = patch
579 579 self.header = {}
580 580 self.hunk = {}
581 581 self.lastheader = None
582 582
583 583 def flush(self, rev):
584 584 if rev in self.header:
585 585 h = self.header[rev]
586 586 if h != self.lastheader:
587 587 self.lastheader = h
588 588 self.ui.write(h)
589 589 del self.header[rev]
590 590 if rev in self.hunk:
591 591 self.ui.write(self.hunk[rev])
592 592 del self.hunk[rev]
593 593 return 1
594 594 return 0
595 595
596 596 def show(self, rev=0, changenode=None, copies=(), **props):
597 597 if self.buffered:
598 598 self.ui.pushbuffer()
599 599 self._show(rev, changenode, copies, props)
600 600 self.hunk[rev] = self.ui.popbuffer()
601 601 else:
602 602 self._show(rev, changenode, copies, props)
603 603
604 604 def _show(self, rev, changenode, copies, props):
605 605 '''show a single changeset or file revision'''
606 606 log = self.repo.changelog
607 607 if changenode is None:
608 608 changenode = log.node(rev)
609 609 elif not rev:
610 610 rev = log.rev(changenode)
611 611
612 612 if self.ui.quiet:
613 613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 614 return
615 615
616 616 changes = log.read(changenode)
617 617 date = util.datestr(changes[2])
618 618 extra = changes[5]
619 619 branch = extra.get("branch")
620 620
621 621 hexfunc = self.ui.debugflag and hex or short
622 622
623 623 parents = [(p, hexfunc(log.node(p)))
624 624 for p in self._meaningful_parentrevs(log, rev)]
625 625
626 626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627 627
628 628 # don't show the default branch name
629 629 if branch != 'default':
630 630 branch = util.tolocal(branch)
631 631 self.ui.write(_("branch: %s\n") % branch)
632 632 for tag in self.repo.nodetags(changenode):
633 633 self.ui.write(_("tag: %s\n") % tag)
634 634 for parent in parents:
635 635 self.ui.write(_("parent: %d:%s\n") % parent)
636 636
637 637 if self.ui.debugflag:
638 638 self.ui.write(_("manifest: %d:%s\n") %
639 639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 640 self.ui.write(_("user: %s\n") % changes[1])
641 641 self.ui.write(_("date: %s\n") % date)
642 642
643 643 if self.ui.debugflag:
644 644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 646 files):
647 647 if value:
648 648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 649 elif changes[3] and self.ui.verbose:
650 650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 651 if copies and self.ui.verbose:
652 652 copies = ['%s (%s)' % c for c in copies]
653 653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654 654
655 655 if extra and self.ui.debugflag:
656 656 for key, value in util.sort(extra.items()):
657 657 self.ui.write(_("extra: %s=%s\n")
658 658 % (key, value.encode('string_escape')))
659 659
660 660 description = changes[4].strip()
661 661 if description:
662 662 if self.ui.verbose:
663 663 self.ui.write(_("description:\n"))
664 664 self.ui.write(description)
665 665 self.ui.write("\n\n")
666 666 else:
667 667 self.ui.write(_("summary: %s\n") %
668 668 description.splitlines()[0])
669 669 self.ui.write("\n")
670 670
671 671 self.showpatch(changenode)
672 672
673 673 def showpatch(self, node):
674 674 if self.patch:
675 675 prev = self.repo.changelog.parents(node)[0]
676 676 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
677 677 opts=patch.diffopts(self.ui))
678 678 self.ui.write("\n")
679 679
680 680 def _meaningful_parentrevs(self, log, rev):
681 681 """Return list of meaningful (or all if debug) parentrevs for rev.
682 682
683 683 For merges (two non-nullrev revisions) both parents are meaningful.
684 684 Otherwise the first parent revision is considered meaningful if it
685 685 is not the preceding revision.
686 686 """
687 687 parents = log.parentrevs(rev)
688 688 if not self.ui.debugflag and parents[1] == nullrev:
689 689 if parents[0] >= rev - 1:
690 690 parents = []
691 691 else:
692 692 parents = [parents[0]]
693 693 return parents
694 694
695 695
696 696 class changeset_templater(changeset_printer):
697 697 '''format changeset information.'''
698 698
699 699 def __init__(self, ui, repo, patch, mapfile, buffered):
700 700 changeset_printer.__init__(self, ui, repo, patch, buffered)
701 701 filters = templatefilters.filters.copy()
702 702 filters['formatnode'] = (ui.debugflag and (lambda x: x)
703 703 or (lambda x: x[:12]))
704 704 self.t = templater.templater(mapfile, filters,
705 705 cache={
706 706 'parent': '{rev}:{node|formatnode} ',
707 707 'manifest': '{rev}:{node|formatnode}',
708 708 'filecopy': '{name} ({source})'})
709 709
710 710 def use_template(self, t):
711 711 '''set template string to use'''
712 712 self.t.cache['changeset'] = t
713 713
714 714 def _show(self, rev, changenode, copies, props):
715 715 '''show a single changeset or file revision'''
716 716 log = self.repo.changelog
717 717 if changenode is None:
718 718 changenode = log.node(rev)
719 719 elif not rev:
720 720 rev = log.rev(changenode)
721 721
722 722 changes = log.read(changenode)
723 723
724 724 def showlist(name, values, plural=None, **args):
725 725 '''expand set of values.
726 726 name is name of key in template map.
727 727 values is list of strings or dicts.
728 728 plural is plural of name, if not simply name + 's'.
729 729
730 730 expansion works like this, given name 'foo'.
731 731
732 732 if values is empty, expand 'no_foos'.
733 733
734 734 if 'foo' not in template map, return values as a string,
735 735 joined by space.
736 736
737 737 expand 'start_foos'.
738 738
739 739 for each value, expand 'foo'. if 'last_foo' in template
740 740 map, expand it instead of 'foo' for last key.
741 741
742 742 expand 'end_foos'.
743 743 '''
744 744 if plural: names = plural
745 745 else: names = name + 's'
746 746 if not values:
747 747 noname = 'no_' + names
748 748 if noname in self.t:
749 749 yield self.t(noname, **args)
750 750 return
751 751 if name not in self.t:
752 752 if isinstance(values[0], str):
753 753 yield ' '.join(values)
754 754 else:
755 755 for v in values:
756 756 yield dict(v, **args)
757 757 return
758 758 startname = 'start_' + names
759 759 if startname in self.t:
760 760 yield self.t(startname, **args)
761 761 vargs = args.copy()
762 762 def one(v, tag=name):
763 763 try:
764 764 vargs.update(v)
765 765 except (AttributeError, ValueError):
766 766 try:
767 767 for a, b in v:
768 768 vargs[a] = b
769 769 except ValueError:
770 770 vargs[name] = v
771 771 return self.t(tag, **vargs)
772 772 lastname = 'last_' + name
773 773 if lastname in self.t:
774 774 last = values.pop()
775 775 else:
776 776 last = None
777 777 for v in values:
778 778 yield one(v)
779 779 if last is not None:
780 780 yield one(last, tag=lastname)
781 781 endname = 'end_' + names
782 782 if endname in self.t:
783 783 yield self.t(endname, **args)
784 784
785 785 def showbranches(**args):
786 786 branch = changes[5].get("branch")
787 787 if branch != 'default':
788 788 branch = util.tolocal(branch)
789 789 return showlist('branch', [branch], plural='branches', **args)
790 790
791 791 def showparents(**args):
792 792 parents = [[('rev', p), ('node', hex(log.node(p)))]
793 793 for p in self._meaningful_parentrevs(log, rev)]
794 794 return showlist('parent', parents, **args)
795 795
796 796 def showtags(**args):
797 797 return showlist('tag', self.repo.nodetags(changenode), **args)
798 798
799 799 def showextras(**args):
800 800 for key, value in util.sort(changes[5].items()):
801 801 args = args.copy()
802 802 args.update(dict(key=key, value=value))
803 803 yield self.t('extra', **args)
804 804
805 805 def showcopies(**args):
806 806 c = [{'name': x[0], 'source': x[1]} for x in copies]
807 807 return showlist('file_copy', c, plural='file_copies', **args)
808 808
809 809 files = []
810 810 def getfiles():
811 811 if not files:
812 812 files[:] = self.repo.status(
813 813 log.parents(changenode)[0], changenode)[:3]
814 814 return files
815 815 def showfiles(**args):
816 816 return showlist('file', changes[3], **args)
817 817 def showmods(**args):
818 818 return showlist('file_mod', getfiles()[0], **args)
819 819 def showadds(**args):
820 820 return showlist('file_add', getfiles()[1], **args)
821 821 def showdels(**args):
822 822 return showlist('file_del', getfiles()[2], **args)
823 823 def showmanifest(**args):
824 824 args = args.copy()
825 825 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
826 826 node=hex(changes[0])))
827 827 return self.t('manifest', **args)
828 828
829 829 defprops = {
830 830 'author': changes[1],
831 831 'branches': showbranches,
832 832 'date': changes[2],
833 833 'desc': changes[4].strip(),
834 834 'file_adds': showadds,
835 835 'file_dels': showdels,
836 836 'file_mods': showmods,
837 837 'files': showfiles,
838 838 'file_copies': showcopies,
839 839 'manifest': showmanifest,
840 840 'node': hex(changenode),
841 841 'parents': showparents,
842 842 'rev': rev,
843 843 'tags': showtags,
844 844 'extras': showextras,
845 845 }
846 846 props = props.copy()
847 847 props.update(defprops)
848 848
849 849 try:
850 850 if self.ui.debugflag and 'header_debug' in self.t:
851 851 key = 'header_debug'
852 852 elif self.ui.quiet and 'header_quiet' in self.t:
853 853 key = 'header_quiet'
854 854 elif self.ui.verbose and 'header_verbose' in self.t:
855 855 key = 'header_verbose'
856 856 elif 'header' in self.t:
857 857 key = 'header'
858 858 else:
859 859 key = ''
860 860 if key:
861 861 h = templater.stringify(self.t(key, **props))
862 862 if self.buffered:
863 863 self.header[rev] = h
864 864 else:
865 865 self.ui.write(h)
866 866 if self.ui.debugflag and 'changeset_debug' in self.t:
867 867 key = 'changeset_debug'
868 868 elif self.ui.quiet and 'changeset_quiet' in self.t:
869 869 key = 'changeset_quiet'
870 870 elif self.ui.verbose and 'changeset_verbose' in self.t:
871 871 key = 'changeset_verbose'
872 872 else:
873 873 key = 'changeset'
874 874 self.ui.write(templater.stringify(self.t(key, **props)))
875 875 self.showpatch(changenode)
876 876 except KeyError, inst:
877 877 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
878 878 inst.args[0]))
879 879 except SyntaxError, inst:
880 880 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
881 881
882 882 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
883 883 """show one changeset using template or regular display.
884 884
885 885 Display format will be the first non-empty hit of:
886 886 1. option 'template'
887 887 2. option 'style'
888 888 3. [ui] setting 'logtemplate'
889 889 4. [ui] setting 'style'
890 890 If all of these values are either the unset or the empty string,
891 891 regular display via changeset_printer() is done.
892 892 """
893 893 # options
894 894 patch = False
895 895 if opts.get('patch'):
896 896 patch = matchfn or matchall(repo)
897 897
898 898 tmpl = opts.get('template')
899 899 mapfile = None
900 900 if tmpl:
901 901 tmpl = templater.parsestring(tmpl, quoted=False)
902 902 else:
903 903 mapfile = opts.get('style')
904 904 # ui settings
905 905 if not mapfile:
906 906 tmpl = ui.config('ui', 'logtemplate')
907 907 if tmpl:
908 908 tmpl = templater.parsestring(tmpl)
909 909 else:
910 910 mapfile = ui.config('ui', 'style')
911 911
912 912 if tmpl or mapfile:
913 913 if mapfile:
914 914 if not os.path.split(mapfile)[0]:
915 915 mapname = (templater.templatepath('map-cmdline.' + mapfile)
916 916 or templater.templatepath(mapfile))
917 917 if mapname: mapfile = mapname
918 918 try:
919 919 t = changeset_templater(ui, repo, patch, mapfile, buffered)
920 920 except SyntaxError, inst:
921 921 raise util.Abort(inst.args[0])
922 922 if tmpl: t.use_template(tmpl)
923 923 return t
924 924 return changeset_printer(ui, repo, patch, buffered)
925 925
926 926 def finddate(ui, repo, date):
927 927 """Find the tipmost changeset that matches the given date spec"""
928 928 df = util.matchdate(date)
929 929 get = util.cachefunc(lambda r: repo[r].changeset())
930 930 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
931 931 results = {}
932 932 for st, rev, fns in changeiter:
933 933 if st == 'add':
934 934 d = get(rev)[2]
935 935 if df(d[0]):
936 936 results[rev] = d
937 937 elif st == 'iter':
938 938 if rev in results:
939 ui.status("Found revision %s from %s\n" %
939 ui.status(_("Found revision %s from %s\n") %
940 940 (rev, util.datestr(results[rev])))
941 941 return str(rev)
942 942
943 943 raise util.Abort(_("revision matching date not found"))
944 944
945 945 def walkchangerevs(ui, repo, pats, change, opts):
946 946 '''Iterate over files and the revs they changed in.
947 947
948 948 Callers most commonly need to iterate backwards over the history
949 949 it is interested in. Doing so has awful (quadratic-looking)
950 950 performance, so we use iterators in a "windowed" way.
951 951
952 952 We walk a window of revisions in the desired order. Within the
953 953 window, we first walk forwards to gather data, then in the desired
954 954 order (usually backwards) to display it.
955 955
956 956 This function returns an (iterator, matchfn) tuple. The iterator
957 957 yields 3-tuples. They will be of one of the following forms:
958 958
959 959 "window", incrementing, lastrev: stepping through a window,
960 960 positive if walking forwards through revs, last rev in the
961 961 sequence iterated over - use to reset state for the current window
962 962
963 963 "add", rev, fns: out-of-order traversal of the given file names
964 964 fns, which changed during revision rev - use to gather data for
965 965 possible display
966 966
967 967 "iter", rev, None: in-order traversal of the revs earlier iterated
968 968 over with "add" - use to display data'''
969 969
970 970 def increasing_windows(start, end, windowsize=8, sizelimit=512):
971 971 if start < end:
972 972 while start < end:
973 973 yield start, min(windowsize, end-start)
974 974 start += windowsize
975 975 if windowsize < sizelimit:
976 976 windowsize *= 2
977 977 else:
978 978 while start > end:
979 979 yield start, min(windowsize, start-end-1)
980 980 start -= windowsize
981 981 if windowsize < sizelimit:
982 982 windowsize *= 2
983 983
984 984 m = match(repo, pats, opts)
985 985 follow = opts.get('follow') or opts.get('follow_first')
986 986
987 987 if not len(repo):
988 988 return [], m
989 989
990 990 if follow:
991 991 defrange = '%s:0' % repo['.'].rev()
992 992 else:
993 993 defrange = '-1:0'
994 994 revs = revrange(repo, opts['rev'] or [defrange])
995 995 wanted = {}
996 996 slowpath = m.anypats() or opts.get('removed')
997 997 fncache = {}
998 998
999 999 if not slowpath and not m.files():
1000 1000 # No files, no patterns. Display all revs.
1001 1001 wanted = dict.fromkeys(revs)
1002 1002 copies = []
1003 1003 if not slowpath:
1004 1004 # Only files, no patterns. Check the history of each file.
1005 1005 def filerevgen(filelog, node):
1006 1006 cl_count = len(repo)
1007 1007 if node is None:
1008 1008 last = len(filelog) - 1
1009 1009 else:
1010 1010 last = filelog.rev(node)
1011 1011 for i, window in increasing_windows(last, nullrev):
1012 1012 revs = []
1013 1013 for j in xrange(i - window, i + 1):
1014 1014 n = filelog.node(j)
1015 1015 revs.append((filelog.linkrev(n),
1016 1016 follow and filelog.renamed(n)))
1017 1017 revs.reverse()
1018 1018 for rev in revs:
1019 1019 # only yield rev for which we have the changelog, it can
1020 1020 # happen while doing "hg log" during a pull or commit
1021 1021 if rev[0] < cl_count:
1022 1022 yield rev
1023 1023 def iterfiles():
1024 1024 for filename in m.files():
1025 1025 yield filename, None
1026 1026 for filename_node in copies:
1027 1027 yield filename_node
1028 1028 minrev, maxrev = min(revs), max(revs)
1029 1029 for file_, node in iterfiles():
1030 1030 filelog = repo.file(file_)
1031 1031 if not len(filelog):
1032 1032 if node is None:
1033 1033 # A zero count may be a directory or deleted file, so
1034 1034 # try to find matching entries on the slow path.
1035 1035 slowpath = True
1036 1036 break
1037 1037 else:
1038 1038 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1039 1039 % (file_, short(node)))
1040 1040 continue
1041 1041 for rev, copied in filerevgen(filelog, node):
1042 1042 if rev <= maxrev:
1043 1043 if rev < minrev:
1044 1044 break
1045 1045 fncache.setdefault(rev, [])
1046 1046 fncache[rev].append(file_)
1047 1047 wanted[rev] = 1
1048 1048 if follow and copied:
1049 1049 copies.append(copied)
1050 1050 if slowpath:
1051 1051 if follow:
1052 1052 raise util.Abort(_('can only follow copies/renames for explicit '
1053 1053 'file names'))
1054 1054
1055 1055 # The slow path checks files modified in every changeset.
1056 1056 def changerevgen():
1057 1057 for i, window in increasing_windows(len(repo) - 1, nullrev):
1058 1058 for j in xrange(i - window, i + 1):
1059 1059 yield j, change(j)[3]
1060 1060
1061 1061 for rev, changefiles in changerevgen():
1062 1062 matches = filter(m, changefiles)
1063 1063 if matches:
1064 1064 fncache[rev] = matches
1065 1065 wanted[rev] = 1
1066 1066
1067 1067 class followfilter:
1068 1068 def __init__(self, onlyfirst=False):
1069 1069 self.startrev = nullrev
1070 1070 self.roots = []
1071 1071 self.onlyfirst = onlyfirst
1072 1072
1073 1073 def match(self, rev):
1074 1074 def realparents(rev):
1075 1075 if self.onlyfirst:
1076 1076 return repo.changelog.parentrevs(rev)[0:1]
1077 1077 else:
1078 1078 return filter(lambda x: x != nullrev,
1079 1079 repo.changelog.parentrevs(rev))
1080 1080
1081 1081 if self.startrev == nullrev:
1082 1082 self.startrev = rev
1083 1083 return True
1084 1084
1085 1085 if rev > self.startrev:
1086 1086 # forward: all descendants
1087 1087 if not self.roots:
1088 1088 self.roots.append(self.startrev)
1089 1089 for parent in realparents(rev):
1090 1090 if parent in self.roots:
1091 1091 self.roots.append(rev)
1092 1092 return True
1093 1093 else:
1094 1094 # backwards: all parents
1095 1095 if not self.roots:
1096 1096 self.roots.extend(realparents(self.startrev))
1097 1097 if rev in self.roots:
1098 1098 self.roots.remove(rev)
1099 1099 self.roots.extend(realparents(rev))
1100 1100 return True
1101 1101
1102 1102 return False
1103 1103
1104 1104 # it might be worthwhile to do this in the iterator if the rev range
1105 1105 # is descending and the prune args are all within that range
1106 1106 for rev in opts.get('prune', ()):
1107 1107 rev = repo.changelog.rev(repo.lookup(rev))
1108 1108 ff = followfilter()
1109 1109 stop = min(revs[0], revs[-1])
1110 1110 for x in xrange(rev, stop-1, -1):
1111 1111 if ff.match(x) and x in wanted:
1112 1112 del wanted[x]
1113 1113
1114 1114 def iterate():
1115 1115 if follow and not m.files():
1116 1116 ff = followfilter(onlyfirst=opts.get('follow_first'))
1117 1117 def want(rev):
1118 1118 if ff.match(rev) and rev in wanted:
1119 1119 return True
1120 1120 return False
1121 1121 else:
1122 1122 def want(rev):
1123 1123 return rev in wanted
1124 1124
1125 1125 for i, window in increasing_windows(0, len(revs)):
1126 1126 yield 'window', revs[0] < revs[-1], revs[-1]
1127 1127 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1128 1128 for rev in util.sort(list(nrevs)):
1129 1129 fns = fncache.get(rev)
1130 1130 if not fns:
1131 1131 def fns_generator():
1132 1132 for f in change(rev)[3]:
1133 1133 if m(f):
1134 1134 yield f
1135 1135 fns = fns_generator()
1136 1136 yield 'add', rev, fns
1137 1137 for rev in nrevs:
1138 1138 yield 'iter', rev, None
1139 1139 return iterate(), m
1140 1140
1141 1141 def commit(ui, repo, commitfunc, pats, opts):
1142 1142 '''commit the specified files or all outstanding changes'''
1143 1143 date = opts.get('date')
1144 1144 if date:
1145 1145 opts['date'] = util.parsedate(date)
1146 1146 message = logmessage(opts)
1147 1147
1148 1148 # extract addremove carefully -- this function can be called from a command
1149 1149 # that doesn't support addremove
1150 1150 if opts.get('addremove'):
1151 1151 addremove(repo, pats, opts)
1152 1152
1153 1153 m = match(repo, pats, opts)
1154 1154 if pats:
1155 1155 modified, added, removed = repo.status(match=m)[:3]
1156 1156 files = util.sort(modified + added + removed)
1157 1157 slist = None
1158 1158 for f in m.files():
1159 1159 if f == '.':
1160 1160 continue
1161 1161 if f not in files:
1162 1162 rf = repo.wjoin(f)
1163 1163 rel = repo.pathto(f)
1164 1164 try:
1165 1165 mode = os.lstat(rf)[stat.ST_MODE]
1166 1166 except OSError:
1167 1167 raise util.Abort(_("file %s not found!") % rel)
1168 1168 if stat.S_ISDIR(mode):
1169 1169 name = f + '/'
1170 1170 i = bisect.bisect(files, name)
1171 1171 if i >= len(files) or not files[i].startswith(name):
1172 1172 raise util.Abort(_("no match under directory %s!")
1173 1173 % rel)
1174 1174 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1175 1175 raise util.Abort(_("can't commit %s: "
1176 1176 "unsupported file type!") % rel)
1177 1177 elif f not in repo.dirstate:
1178 1178 raise util.Abort(_("file %s not tracked!") % rel)
1179 1179 m = matchfiles(repo, files)
1180 1180 try:
1181 1181 return commitfunc(ui, repo, message, m, opts)
1182 1182 except ValueError, inst:
1183 1183 raise util.Abort(str(inst))
@@ -1,3320 +1,3320 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from repo import RepoError, NoCapability
10 10 from i18n import _
11 11 import os, re, sys, urllib
12 12 import hg, util, revlog, bundlerepo, extensions, copies
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the repository.
24 24
25 25 The files will be added to the repository at the next commit. To
26 26 undo an add before that, see hg revert.
27 27
28 28 If no names are given, add all files in the repository.
29 29 """
30 30
31 31 rejected = None
32 32 exacts = {}
33 33 names = []
34 34 m = cmdutil.match(repo, pats, opts)
35 35 m.bad = lambda x,y: True
36 36 for abs in repo.walk(m):
37 37 if m.exact(abs):
38 38 if ui.verbose:
39 39 ui.status(_('adding %s\n') % m.rel(abs))
40 40 names.append(abs)
41 41 exacts[abs] = 1
42 42 elif abs not in repo.dirstate:
43 43 ui.status(_('adding %s\n') % m.rel(abs))
44 44 names.append(abs)
45 45 if not opts.get('dry_run'):
46 46 rejected = repo.add(names)
47 47 rejected = [p for p in rejected if p in exacts]
48 48 return rejected and 1 or 0
49 49
50 50 def addremove(ui, repo, *pats, **opts):
51 51 """add all new files, delete all missing files
52 52
53 53 Add all new files and remove all missing files from the repository.
54 54
55 55 New files are ignored if they match any of the patterns in .hgignore. As
56 56 with add, these changes take effect at the next commit.
57 57
58 58 Use the -s option to detect renamed files. With a parameter > 0,
59 59 this compares every removed file with every added file and records
60 60 those similar enough as renames. This option takes a percentage
61 61 between 0 (disabled) and 100 (files must be identical) as its
62 62 parameter. Detecting renamed files this way can be expensive.
63 63 """
64 64 try:
65 65 sim = float(opts.get('similarity') or 0)
66 66 except ValueError:
67 67 raise util.Abort(_('similarity must be a number'))
68 68 if sim < 0 or sim > 100:
69 69 raise util.Abort(_('similarity must be between 0 and 100'))
70 70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71 71
72 72 def annotate(ui, repo, *pats, **opts):
73 73 """show changeset information per file line
74 74
75 75 List changes in files, showing the revision id responsible for each line
76 76
77 77 This command is useful to discover who did a change or when a change took
78 78 place.
79 79
80 80 Without the -a option, annotate will avoid processing files it
81 81 detects as binary. With -a, annotate will generate an annotation
82 82 anyway, probably with undesirable results.
83 83 """
84 84 datefunc = ui.quiet and util.shortdate or util.datestr
85 85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86 86
87 87 if not pats:
88 88 raise util.Abort(_('at least one file name or pattern required'))
89 89
90 90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 91 ('number', lambda x: str(x[0].rev())),
92 92 ('changeset', lambda x: short(x[0].node())),
93 93 ('date', getdate),
94 94 ('follow', lambda x: x[0].path()),
95 95 ]
96 96
97 97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 98 and not opts['follow']):
99 99 opts['number'] = 1
100 100
101 101 linenumber = opts.get('line_number') is not None
102 102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104 104
105 105 funcmap = [func for op, func in opmap if opts.get(op)]
106 106 if linenumber:
107 107 lastfunc = funcmap[-1]
108 108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109 109
110 110 ctx = repo[opts['rev']]
111 111
112 112 m = cmdutil.match(repo, pats, opts)
113 113 for abs in ctx.walk(m):
114 114 fctx = ctx[abs]
115 115 if not opts['text'] and util.binary(fctx.data()):
116 116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 117 continue
118 118
119 119 lines = fctx.annotate(follow=opts.get('follow'),
120 120 linenumber=linenumber)
121 121 pieces = []
122 122
123 123 for f in funcmap:
124 124 l = [f(n) for n, dummy in lines]
125 125 if l:
126 126 m = max(map(len, l))
127 127 pieces.append(["%*s" % (m, x) for x in l])
128 128
129 129 if pieces:
130 130 for p, l in zip(zip(*pieces), lines):
131 131 ui.write("%s: %s" % (" ".join(p), l[1]))
132 132
133 133 def archive(ui, repo, dest, **opts):
134 134 '''create unversioned archive of a repository revision
135 135
136 136 By default, the revision used is the parent of the working
137 137 directory; use "-r" to specify a different revision.
138 138
139 139 To specify the type of archive to create, use "-t". Valid
140 140 types are:
141 141
142 142 "files" (default): a directory full of files
143 143 "tar": tar archive, uncompressed
144 144 "tbz2": tar archive, compressed using bzip2
145 145 "tgz": tar archive, compressed using gzip
146 146 "uzip": zip archive, uncompressed
147 147 "zip": zip archive, compressed using deflate
148 148
149 149 The exact name of the destination archive or directory is given
150 150 using a format string; see "hg help export" for details.
151 151
152 152 Each member added to an archive file has a directory prefix
153 153 prepended. Use "-p" to specify a format string for the prefix.
154 154 The default is the basename of the archive, with suffixes removed.
155 155 '''
156 156
157 157 ctx = repo[opts['rev']]
158 158 if not ctx:
159 159 raise util.Abort(_('repository has no revisions'))
160 160 node = ctx.node()
161 161 dest = cmdutil.make_filename(repo, dest, node)
162 162 if os.path.realpath(dest) == repo.root:
163 163 raise util.Abort(_('repository root cannot be destination'))
164 164 matchfn = cmdutil.match(repo, [], opts)
165 165 kind = opts.get('type') or 'files'
166 166 prefix = opts['prefix']
167 167 if dest == '-':
168 168 if kind == 'files':
169 169 raise util.Abort(_('cannot archive plain files to stdout'))
170 170 dest = sys.stdout
171 171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 172 prefix = cmdutil.make_filename(repo, prefix, node)
173 173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 174 matchfn, prefix)
175 175
176 176 def backout(ui, repo, node=None, rev=None, **opts):
177 177 '''reverse effect of earlier changeset
178 178
179 179 Commit the backed out changes as a new changeset. The new
180 180 changeset is a child of the backed out changeset.
181 181
182 182 If you back out a changeset other than the tip, a new head is
183 183 created. This head will be the new tip and you should merge this
184 184 backout changeset with another head (current one by default).
185 185
186 186 The --merge option remembers the parent of the working directory
187 187 before starting the backout, then merges the new head with that
188 188 changeset afterwards. This saves you from doing the merge by
189 189 hand. The result of this merge is not committed, as for a normal
190 190 merge.
191 191
192 192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 193 '''
194 194 if rev and node:
195 195 raise util.Abort(_("please specify just one revision"))
196 196
197 197 if not rev:
198 198 rev = node
199 199
200 200 if not rev:
201 201 raise util.Abort(_("please specify a revision to backout"))
202 202
203 203 date = opts.get('date')
204 204 if date:
205 205 opts['date'] = util.parsedate(date)
206 206
207 207 cmdutil.bail_if_changed(repo)
208 208 node = repo.lookup(rev)
209 209
210 210 op1, op2 = repo.dirstate.parents()
211 211 a = repo.changelog.ancestor(op1, node)
212 212 if a != node:
213 213 raise util.Abort(_('cannot back out change on a different branch'))
214 214
215 215 p1, p2 = repo.changelog.parents(node)
216 216 if p1 == nullid:
217 217 raise util.Abort(_('cannot back out a change with no parents'))
218 218 if p2 != nullid:
219 219 if not opts['parent']:
220 220 raise util.Abort(_('cannot back out a merge changeset without '
221 221 '--parent'))
222 222 p = repo.lookup(opts['parent'])
223 223 if p not in (p1, p2):
224 224 raise util.Abort(_('%s is not a parent of %s') %
225 225 (short(p), short(node)))
226 226 parent = p
227 227 else:
228 228 if opts['parent']:
229 229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 230 parent = p1
231 231
232 232 # the backout should appear on the same branch
233 233 branch = repo.dirstate.branch()
234 234 hg.clean(repo, node, show_stats=False)
235 235 repo.dirstate.setbranch(branch)
236 236 revert_opts = opts.copy()
237 237 revert_opts['date'] = None
238 238 revert_opts['all'] = True
239 239 revert_opts['rev'] = hex(parent)
240 240 revert_opts['no_backup'] = None
241 241 revert(ui, repo, **revert_opts)
242 242 commit_opts = opts.copy()
243 243 commit_opts['addremove'] = False
244 244 if not commit_opts['message'] and not commit_opts['logfile']:
245 245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 246 commit_opts['force_editor'] = True
247 247 commit(ui, repo, **commit_opts)
248 248 def nice(node):
249 249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 250 ui.status(_('changeset %s backs out changeset %s\n') %
251 251 (nice(repo.changelog.tip()), nice(node)))
252 252 if op1 != node:
253 253 hg.clean(repo, op1, show_stats=False)
254 254 if opts['merge']:
255 255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 256 hg.merge(repo, hex(repo.changelog.tip()))
257 257 else:
258 258 ui.status(_('the backout changeset is a new head - '
259 259 'do not forget to merge\n'))
260 260 ui.status(_('(use "backout --merge" '
261 261 'if you want to auto-merge)\n'))
262 262
263 263 def bisect(ui, repo, rev=None, extra=None,
264 264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 265 """subdivision search of changesets
266 266
267 267 This command helps to find changesets which introduce problems.
268 268 To use, mark the earliest changeset you know exhibits the problem
269 269 as bad, then mark the latest changeset which is free from the
270 270 problem as good. Bisect will update your working directory to a
271 271 revision for testing (unless the --noupdate option is specified).
272 272 Once you have performed tests, mark the working directory as bad
273 273 or good and bisect will either update to another candidate changeset
274 274 or announce that it has found the bad revision.
275 275
276 276 As a shortcut, you can also use the revision argument to mark a
277 277 revision as good or bad without checking it out first.
278 278 """
279 279 # backward compatibility
280 280 if rev in "good bad reset init".split():
281 281 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
282 282 cmd, rev, extra = rev, extra, None
283 283 if cmd == "good":
284 284 good = True
285 285 elif cmd == "bad":
286 286 bad = True
287 287 else:
288 288 reset = True
289 289 elif extra or good + bad + skip + reset > 1:
290 290 raise util.Abort(_('incompatible arguments'))
291 291 elif not (good or bad or skip or reset):
292 292 ui.status(_('(no action selected)\n'))
293 293 return
294 294
295 295 if reset:
296 296 p = repo.join("bisect.state")
297 297 if os.path.exists(p):
298 298 os.unlink(p)
299 299 return
300 300
301 301 # load state
302 302 state = {'good': [], 'bad': [], 'skip': []}
303 303 if os.path.exists(repo.join("bisect.state")):
304 304 for l in repo.opener("bisect.state"):
305 305 kind, node = l[:-1].split()
306 306 node = repo.lookup(node)
307 307 if kind not in state:
308 308 raise util.Abort(_("unknown bisect kind %s") % kind)
309 309 state[kind].append(node)
310 310
311 311 # update state
312 312 node = repo.lookup(rev or '.')
313 313 if good:
314 314 state['good'].append(node)
315 315 elif bad:
316 316 state['bad'].append(node)
317 317 elif skip:
318 318 state['skip'].append(node)
319 319
320 320 # save state
321 321 f = repo.opener("bisect.state", "w", atomictemp=True)
322 322 wlock = repo.wlock()
323 323 try:
324 324 for kind in state:
325 325 for node in state[kind]:
326 326 f.write("%s %s\n" % (kind, hex(node)))
327 327 f.rename()
328 328 finally:
329 329 del wlock
330 330
331 331 if not state['good'] or not state['bad']:
332 332 return
333 333
334 334 # actually bisect
335 335 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
336 336 if changesets == 0:
337 337 displayer = cmdutil.show_changeset(ui, repo, {})
338 338 transition = (good and "good" or "bad")
339 339 if len(nodes) == 1:
340 340 # narrowed it down to a single revision
341 341 ui.write(_("The first %s revision is:\n") % transition)
342 342 displayer.show(changenode=nodes[0])
343 343 else:
344 344 # multiple possible revisions
345 345 ui.write(_("Due to skipped revisions, the first "
346 346 "%s revision could be any of:\n") % transition)
347 347 for n in nodes:
348 348 displayer.show(changenode=n)
349 349 else:
350 350 assert len(nodes) == 1 # only a single node can be tested next
351 351 node = nodes[0]
352 352 # compute the approximate number of remaining tests
353 353 tests, size = 0, 2
354 354 while size <= changesets:
355 355 tests, size = tests + 1, size * 2
356 356 rev = repo.changelog.rev(node)
357 357 ui.write(_("Testing changeset %s:%s "
358 358 "(%s changesets remaining, ~%s tests)\n")
359 359 % (rev, short(node), changesets, tests))
360 360 if not noupdate:
361 361 cmdutil.bail_if_changed(repo)
362 362 return hg.clean(repo, node)
363 363
364 364 def branch(ui, repo, label=None, **opts):
365 365 """set or show the current branch name
366 366
367 367 With no argument, show the current branch name. With one argument,
368 368 set the working directory branch name (the branch does not exist in
369 369 the repository until the next commit).
370 370
371 371 Unless --force is specified, branch will not let you set a
372 372 branch name that shadows an existing branch.
373 373
374 374 Use the command 'hg update' to switch to an existing branch.
375 375 """
376 376
377 377 if label:
378 378 if not opts.get('force') and label in repo.branchtags():
379 379 if label not in [p.branch() for p in repo.parents()]:
380 380 raise util.Abort(_('a branch of the same name already exists'
381 381 ' (use --force to override)'))
382 382 repo.dirstate.setbranch(util.fromlocal(label))
383 383 ui.status(_('marked working directory as branch %s\n') % label)
384 384 else:
385 385 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
386 386
387 387 def branches(ui, repo, active=False):
388 388 """list repository named branches
389 389
390 390 List the repository's named branches, indicating which ones are
391 391 inactive. If active is specified, only show active branches.
392 392
393 393 A branch is considered active if it contains repository heads.
394 394
395 395 Use the command 'hg update' to switch to an existing branch.
396 396 """
397 397 hexfunc = ui.debugflag and hex or short
398 398 activebranches = [util.tolocal(repo[n].branch())
399 399 for n in repo.heads()]
400 400 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
401 401 for tag, node in repo.branchtags().items()])
402 402 branches.reverse()
403 403
404 404 for isactive, node, tag in branches:
405 405 if (not active) or isactive:
406 406 if ui.quiet:
407 407 ui.write("%s\n" % tag)
408 408 else:
409 409 rev = str(node).rjust(31 - util.locallen(tag))
410 410 isinactive = ((not isactive) and " (inactive)") or ''
411 411 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
412 412 ui.write("%s %s:%s%s\n" % data)
413 413
414 414 def bundle(ui, repo, fname, dest=None, **opts):
415 415 """create a changegroup file
416 416
417 417 Generate a compressed changegroup file collecting changesets not
418 418 found in the other repository.
419 419
420 420 If no destination repository is specified the destination is
421 421 assumed to have all the nodes specified by one or more --base
422 422 parameters. To create a bundle containing all changesets, use
423 423 --all (or --base null). To change the compression method applied,
424 424 use the -t option (by default, bundles are compressed using bz2).
425 425
426 426 The bundle file can then be transferred using conventional means and
427 427 applied to another repository with the unbundle or pull command.
428 428 This is useful when direct push and pull are not available or when
429 429 exporting an entire repository is undesirable.
430 430
431 431 Applying bundles preserves all changeset contents including
432 432 permissions, copy/rename information, and revision history.
433 433 """
434 434 revs = opts.get('rev') or None
435 435 if revs:
436 436 revs = [repo.lookup(rev) for rev in revs]
437 437 if opts.get('all'):
438 438 base = ['null']
439 439 else:
440 440 base = opts.get('base')
441 441 if base:
442 442 if dest:
443 443 raise util.Abort(_("--base is incompatible with specifiying "
444 444 "a destination"))
445 445 base = [repo.lookup(rev) for rev in base]
446 446 # create the right base
447 447 # XXX: nodesbetween / changegroup* should be "fixed" instead
448 448 o = []
449 449 has = {nullid: None}
450 450 for n in base:
451 451 has.update(repo.changelog.reachable(n))
452 452 if revs:
453 453 visit = list(revs)
454 454 else:
455 455 visit = repo.changelog.heads()
456 456 seen = {}
457 457 while visit:
458 458 n = visit.pop(0)
459 459 parents = [p for p in repo.changelog.parents(n) if p not in has]
460 460 if len(parents) == 0:
461 461 o.insert(0, n)
462 462 else:
463 463 for p in parents:
464 464 if p not in seen:
465 465 seen[p] = 1
466 466 visit.append(p)
467 467 else:
468 468 cmdutil.setremoteconfig(ui, opts)
469 469 dest, revs, checkout = hg.parseurl(
470 470 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
471 471 other = hg.repository(ui, dest)
472 472 o = repo.findoutgoing(other, force=opts['force'])
473 473
474 474 if revs:
475 475 cg = repo.changegroupsubset(o, revs, 'bundle')
476 476 else:
477 477 cg = repo.changegroup(o, 'bundle')
478 478
479 479 bundletype = opts.get('type', 'bzip2').lower()
480 480 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
481 481 bundletype = btypes.get(bundletype)
482 482 if bundletype not in changegroup.bundletypes:
483 483 raise util.Abort(_('unknown bundle type specified with --type'))
484 484
485 485 changegroup.writebundle(cg, fname, bundletype)
486 486
487 487 def cat(ui, repo, file1, *pats, **opts):
488 488 """output the current or given revision of files
489 489
490 490 Print the specified files as they were at the given revision.
491 491 If no revision is given, the parent of the working directory is used,
492 492 or tip if no revision is checked out.
493 493
494 494 Output may be to a file, in which case the name of the file is
495 495 given using a format string. The formatting rules are the same as
496 496 for the export command, with the following additions:
497 497
498 498 %s basename of file being printed
499 499 %d dirname of file being printed, or '.' if in repo root
500 500 %p root-relative path name of file being printed
501 501 """
502 502 ctx = repo[opts['rev']]
503 503 err = 1
504 504 m = cmdutil.match(repo, (file1,) + pats, opts)
505 505 for abs in ctx.walk(m):
506 506 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
507 507 data = ctx[abs].data()
508 508 if opts.get('decode'):
509 509 data = repo.wwritedata(abs, data)
510 510 fp.write(data)
511 511 err = 0
512 512 return err
513 513
514 514 def clone(ui, source, dest=None, **opts):
515 515 """make a copy of an existing repository
516 516
517 517 Create a copy of an existing repository in a new directory.
518 518
519 519 If no destination directory name is specified, it defaults to the
520 520 basename of the source.
521 521
522 522 The location of the source is added to the new repository's
523 523 .hg/hgrc file, as the default to be used for future pulls.
524 524
525 525 For efficiency, hardlinks are used for cloning whenever the source
526 526 and destination are on the same filesystem (note this applies only
527 527 to the repository data, not to the checked out files). Some
528 528 filesystems, such as AFS, implement hardlinking incorrectly, but
529 529 do not report errors. In these cases, use the --pull option to
530 530 avoid hardlinking.
531 531
532 532 In some cases, you can clone repositories and checked out files
533 533 using full hardlinks with
534 534
535 535 $ cp -al REPO REPOCLONE
536 536
537 537 This is the fastest way to clone, but it is not always safe. The
538 538 operation is not atomic (making sure REPO is not modified during
539 539 the operation is up to you) and you have to make sure your editor
540 540 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
541 541 this is not compatible with certain extensions that place their
542 542 metadata under the .hg directory, such as mq.
543 543
544 544 If you use the -r option to clone up to a specific revision, no
545 545 subsequent revisions will be present in the cloned repository.
546 546 This option implies --pull, even on local repositories.
547 547
548 548 If the -U option is used, the new clone will contain only a repository
549 549 (.hg) and no working copy (the working copy parent is the null revision).
550 550
551 551 See pull for valid source format details.
552 552
553 553 It is possible to specify an ssh:// URL as the destination, but no
554 554 .hg/hgrc and working directory will be created on the remote side.
555 555 Look at the help text for the pull command for important details
556 556 about ssh:// URLs.
557 557 """
558 558 cmdutil.setremoteconfig(ui, opts)
559 559 hg.clone(ui, source, dest,
560 560 pull=opts['pull'],
561 561 stream=opts['uncompressed'],
562 562 rev=opts['rev'],
563 563 update=not opts['noupdate'])
564 564
565 565 def commit(ui, repo, *pats, **opts):
566 566 """commit the specified files or all outstanding changes
567 567
568 568 Commit changes to the given files into the repository.
569 569
570 570 If a list of files is omitted, all changes reported by "hg status"
571 571 will be committed.
572 572
573 573 If you are committing the result of a merge, do not provide any
574 574 file names or -I/-X filters.
575 575
576 576 If no commit message is specified, the configured editor is started to
577 577 enter a message.
578 578
579 579 See 'hg help dates' for a list of formats valid for -d/--date.
580 580 """
581 581 def commitfunc(ui, repo, message, match, opts):
582 582 return repo.commit(match.files(), message, opts['user'], opts['date'],
583 583 match, force_editor=opts.get('force_editor'))
584 584
585 585 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
586 586 if not node:
587 587 return
588 588 cl = repo.changelog
589 589 rev = cl.rev(node)
590 590 parents = cl.parentrevs(rev)
591 591 if rev - 1 in parents:
592 592 # one of the parents was the old tip
593 593 pass
594 594 elif (parents == (nullrev, nullrev) or
595 595 len(cl.heads(cl.node(parents[0]))) > 1 and
596 596 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
597 597 ui.status(_('created new head\n'))
598 598
599 599 if ui.debugflag:
600 600 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
601 601 elif ui.verbose:
602 602 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
603 603
604 604 def copy(ui, repo, *pats, **opts):
605 605 """mark files as copied for the next commit
606 606
607 607 Mark dest as having copies of source files. If dest is a
608 608 directory, copies are put in that directory. If dest is a file,
609 609 there can only be one source.
610 610
611 611 By default, this command copies the contents of files as they
612 612 stand in the working directory. If invoked with --after, the
613 613 operation is recorded, but no copying is performed.
614 614
615 615 This command takes effect in the next commit. To undo a copy
616 616 before that, see hg revert.
617 617 """
618 618 wlock = repo.wlock(False)
619 619 try:
620 620 return cmdutil.copy(ui, repo, pats, opts)
621 621 finally:
622 622 del wlock
623 623
624 624 def debugancestor(ui, repo, *args):
625 625 """find the ancestor revision of two revisions in a given index"""
626 626 if len(args) == 3:
627 627 index, rev1, rev2 = args
628 628 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
629 629 lookup = r.lookup
630 630 elif len(args) == 2:
631 631 if not repo:
632 632 raise util.Abort(_("There is no Mercurial repository here "
633 633 "(.hg not found)"))
634 634 rev1, rev2 = args
635 635 r = repo.changelog
636 636 lookup = repo.lookup
637 637 else:
638 638 raise util.Abort(_('either two or three arguments required'))
639 639 a = r.ancestor(lookup(rev1), lookup(rev2))
640 640 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
641 641
642 642 def debugcomplete(ui, cmd='', **opts):
643 643 """returns the completion list associated with the given command"""
644 644
645 645 if opts['options']:
646 646 options = []
647 647 otables = [globalopts]
648 648 if cmd:
649 649 aliases, entry = cmdutil.findcmd(ui, cmd, table)
650 650 otables.append(entry[1])
651 651 for t in otables:
652 652 for o in t:
653 653 if o[0]:
654 654 options.append('-%s' % o[0])
655 655 options.append('--%s' % o[1])
656 656 ui.write("%s\n" % "\n".join(options))
657 657 return
658 658
659 659 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table))))
660 660
661 661 def debugfsinfo(ui, path = "."):
662 662 file('.debugfsinfo', 'w').write('')
663 663 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
664 664 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
665 665 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
666 666 and 'yes' or 'no'))
667 667 os.unlink('.debugfsinfo')
668 668
669 669 def debugrebuildstate(ui, repo, rev="tip"):
670 670 """rebuild the dirstate as it would look like for the given revision"""
671 671 ctx = repo[rev]
672 672 wlock = repo.wlock()
673 673 try:
674 674 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
675 675 finally:
676 676 del wlock
677 677
678 678 def debugcheckstate(ui, repo):
679 679 """validate the correctness of the current dirstate"""
680 680 parent1, parent2 = repo.dirstate.parents()
681 681 m1 = repo[parent1].manifest()
682 682 m2 = repo[parent2].manifest()
683 683 errors = 0
684 684 for f in repo.dirstate:
685 685 state = repo.dirstate[f]
686 686 if state in "nr" and f not in m1:
687 687 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
688 688 errors += 1
689 689 if state in "a" and f in m1:
690 690 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
691 691 errors += 1
692 692 if state in "m" and f not in m1 and f not in m2:
693 693 ui.warn(_("%s in state %s, but not in either manifest\n") %
694 694 (f, state))
695 695 errors += 1
696 696 for f in m1:
697 697 state = repo.dirstate[f]
698 698 if state not in "nrm":
699 699 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
700 700 errors += 1
701 701 if errors:
702 702 error = _(".hg/dirstate inconsistent with current parent's manifest")
703 703 raise util.Abort(error)
704 704
705 705 def showconfig(ui, repo, *values, **opts):
706 706 """show combined config settings from all hgrc files
707 707
708 708 With no args, print names and values of all config items.
709 709
710 710 With one arg of the form section.name, print just the value of
711 711 that config item.
712 712
713 713 With multiple args, print names and values of all config items
714 714 with matching section names."""
715 715
716 716 untrusted = bool(opts.get('untrusted'))
717 717 if values:
718 718 if len([v for v in values if '.' in v]) > 1:
719 719 raise util.Abort(_('only one config item permitted'))
720 720 for section, name, value in ui.walkconfig(untrusted=untrusted):
721 721 sectname = section + '.' + name
722 722 if values:
723 723 for v in values:
724 724 if v == section:
725 725 ui.write('%s=%s\n' % (sectname, value))
726 726 elif v == sectname:
727 727 ui.write(value, '\n')
728 728 else:
729 729 ui.write('%s=%s\n' % (sectname, value))
730 730
731 731 def debugsetparents(ui, repo, rev1, rev2=None):
732 732 """manually set the parents of the current working directory
733 733
734 734 This is useful for writing repository conversion tools, but should
735 735 be used with care.
736 736 """
737 737
738 738 if not rev2:
739 739 rev2 = hex(nullid)
740 740
741 741 wlock = repo.wlock()
742 742 try:
743 743 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
744 744 finally:
745 745 del wlock
746 746
747 747 def debugstate(ui, repo, nodates=None):
748 748 """show the contents of the current dirstate"""
749 749 timestr = ""
750 750 showdate = not nodates
751 751 for file_, ent in util.sort(repo.dirstate._map.items()):
752 752 if showdate:
753 753 if ent[3] == -1:
754 754 # Pad or slice to locale representation
755 755 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
756 756 timestr = 'unset'
757 757 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
758 758 else:
759 759 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
760 760 if ent[1] & 020000:
761 761 mode = 'lnk'
762 762 else:
763 763 mode = '%3o' % (ent[1] & 0777)
764 764 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
765 765 for f in repo.dirstate.copies():
766 766 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
767 767
768 768 def debugdata(ui, file_, rev):
769 769 """dump the contents of a data file revision"""
770 770 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
771 771 try:
772 772 ui.write(r.revision(r.lookup(rev)))
773 773 except KeyError:
774 774 raise util.Abort(_('invalid revision identifier %s') % rev)
775 775
776 776 def debugdate(ui, date, range=None, **opts):
777 777 """parse and display a date"""
778 778 if opts["extended"]:
779 779 d = util.parsedate(date, util.extendeddateformats)
780 780 else:
781 781 d = util.parsedate(date)
782 782 ui.write("internal: %s %s\n" % d)
783 783 ui.write("standard: %s\n" % util.datestr(d))
784 784 if range:
785 785 m = util.matchdate(range)
786 786 ui.write("match: %s\n" % m(d[0]))
787 787
788 788 def debugindex(ui, file_):
789 789 """dump the contents of an index file"""
790 790 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
791 791 ui.write(" rev offset length base linkrev" +
792 792 " nodeid p1 p2\n")
793 793 for i in r:
794 794 node = r.node(i)
795 795 try:
796 796 pp = r.parents(node)
797 797 except:
798 798 pp = [nullid, nullid]
799 799 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
800 800 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
801 801 short(node), short(pp[0]), short(pp[1])))
802 802
803 803 def debugindexdot(ui, file_):
804 804 """dump an index DAG as a .dot file"""
805 805 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
806 806 ui.write("digraph G {\n")
807 807 for i in r:
808 808 node = r.node(i)
809 809 pp = r.parents(node)
810 810 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
811 811 if pp[1] != nullid:
812 812 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
813 813 ui.write("}\n")
814 814
815 815 def debuginstall(ui):
816 816 '''test Mercurial installation'''
817 817
818 818 def writetemp(contents):
819 819 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
820 820 f = os.fdopen(fd, "wb")
821 821 f.write(contents)
822 822 f.close()
823 823 return name
824 824
825 825 problems = 0
826 826
827 827 # encoding
828 828 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
829 829 try:
830 830 util.fromlocal("test")
831 831 except util.Abort, inst:
832 832 ui.write(" %s\n" % inst)
833 833 ui.write(_(" (check that your locale is properly set)\n"))
834 834 problems += 1
835 835
836 836 # compiled modules
837 837 ui.status(_("Checking extensions...\n"))
838 838 try:
839 839 import bdiff, mpatch, base85
840 840 except Exception, inst:
841 841 ui.write(" %s\n" % inst)
842 842 ui.write(_(" One or more extensions could not be found"))
843 843 ui.write(_(" (check that you compiled the extensions)\n"))
844 844 problems += 1
845 845
846 846 # templates
847 847 ui.status(_("Checking templates...\n"))
848 848 try:
849 849 import templater
850 850 t = templater.templater(templater.templatepath("map-cmdline.default"))
851 851 except Exception, inst:
852 852 ui.write(" %s\n" % inst)
853 853 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
854 854 problems += 1
855 855
856 856 # patch
857 857 ui.status(_("Checking patch...\n"))
858 858 patchproblems = 0
859 859 a = "1\n2\n3\n4\n"
860 860 b = "1\n2\n3\ninsert\n4\n"
861 861 fa = writetemp(a)
862 862 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
863 863 os.path.basename(fa))
864 864 fd = writetemp(d)
865 865
866 866 files = {}
867 867 try:
868 868 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
869 869 except util.Abort, e:
870 870 ui.write(_(" patch call failed:\n"))
871 871 ui.write(" " + str(e) + "\n")
872 872 patchproblems += 1
873 873 else:
874 874 if list(files) != [os.path.basename(fa)]:
875 875 ui.write(_(" unexpected patch output!\n"))
876 876 patchproblems += 1
877 877 a = file(fa).read()
878 878 if a != b:
879 879 ui.write(_(" patch test failed!\n"))
880 880 patchproblems += 1
881 881
882 882 if patchproblems:
883 883 if ui.config('ui', 'patch'):
884 884 ui.write(_(" (Current patch tool may be incompatible with patch,"
885 885 " or misconfigured. Please check your .hgrc file)\n"))
886 886 else:
887 887 ui.write(_(" Internal patcher failure, please report this error"
888 888 " to http://www.selenic.com/mercurial/bts\n"))
889 889 problems += patchproblems
890 890
891 891 os.unlink(fa)
892 892 os.unlink(fd)
893 893
894 894 # editor
895 895 ui.status(_("Checking commit editor...\n"))
896 896 editor = ui.geteditor()
897 897 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
898 898 if not cmdpath:
899 899 if editor == 'vi':
900 900 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
901 901 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
902 902 else:
903 903 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
904 904 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
905 905 problems += 1
906 906
907 907 # check username
908 908 ui.status(_("Checking username...\n"))
909 909 user = os.environ.get("HGUSER")
910 910 if user is None:
911 911 user = ui.config("ui", "username")
912 912 if user is None:
913 913 user = os.environ.get("EMAIL")
914 914 if not user:
915 915 ui.warn(" ")
916 916 ui.username()
917 917 ui.write(_(" (specify a username in your .hgrc file)\n"))
918 918
919 919 if not problems:
920 920 ui.status(_("No problems detected\n"))
921 921 else:
922 922 ui.write(_("%s problems detected,"
923 923 " please check your install!\n") % problems)
924 924
925 925 return problems
926 926
927 927 def debugrename(ui, repo, file1, *pats, **opts):
928 928 """dump rename information"""
929 929
930 930 ctx = repo[opts.get('rev')]
931 931 m = cmdutil.match(repo, (file1,) + pats, opts)
932 932 for abs in ctx.walk(m):
933 933 fctx = ctx[abs]
934 934 o = fctx.filelog().renamed(fctx.filenode())
935 935 rel = m.rel(abs)
936 936 if o:
937 937 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
938 938 else:
939 939 ui.write(_("%s not renamed\n") % rel)
940 940
941 941 def debugwalk(ui, repo, *pats, **opts):
942 942 """show how files match on given patterns"""
943 943 m = cmdutil.match(repo, pats, opts)
944 944 items = list(repo.walk(m))
945 945 if not items:
946 946 return
947 947 fmt = 'f %%-%ds %%-%ds %%s' % (
948 948 max([len(abs) for abs in items]),
949 949 max([len(m.rel(abs)) for abs in items]))
950 950 for abs in items:
951 951 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
952 952 ui.write("%s\n" % line.rstrip())
953 953
954 954 def diff(ui, repo, *pats, **opts):
955 955 """diff repository (or selected files)
956 956
957 957 Show differences between revisions for the specified files.
958 958
959 959 Differences between files are shown using the unified diff format.
960 960
961 961 NOTE: diff may generate unexpected results for merges, as it will
962 962 default to comparing against the working directory's first parent
963 963 changeset if no revisions are specified.
964 964
965 965 When two revision arguments are given, then changes are shown
966 966 between those revisions. If only one revision is specified then
967 967 that revision is compared to the working directory, and, when no
968 968 revisions are specified, the working directory files are compared
969 969 to its parent.
970 970
971 971 Without the -a option, diff will avoid generating diffs of files
972 972 it detects as binary. With -a, diff will generate a diff anyway,
973 973 probably with undesirable results.
974 974 """
975 975 node1, node2 = cmdutil.revpair(repo, opts['rev'])
976 976
977 977 m = cmdutil.match(repo, pats, opts)
978 978 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
979 979
980 980 def export(ui, repo, *changesets, **opts):
981 981 """dump the header and diffs for one or more changesets
982 982
983 983 Print the changeset header and diffs for one or more revisions.
984 984
985 985 The information shown in the changeset header is: author,
986 986 changeset hash, parent(s) and commit comment.
987 987
988 988 NOTE: export may generate unexpected diff output for merge changesets,
989 989 as it will compare the merge changeset against its first parent only.
990 990
991 991 Output may be to a file, in which case the name of the file is
992 992 given using a format string. The formatting rules are as follows:
993 993
994 994 %% literal "%" character
995 995 %H changeset hash (40 bytes of hexadecimal)
996 996 %N number of patches being generated
997 997 %R changeset revision number
998 998 %b basename of the exporting repository
999 999 %h short-form changeset hash (12 bytes of hexadecimal)
1000 1000 %n zero-padded sequence number, starting at 1
1001 1001 %r zero-padded changeset revision number
1002 1002
1003 1003 Without the -a option, export will avoid generating diffs of files
1004 1004 it detects as binary. With -a, export will generate a diff anyway,
1005 1005 probably with undesirable results.
1006 1006
1007 1007 With the --switch-parent option, the diff will be against the second
1008 1008 parent. It can be useful to review a merge.
1009 1009 """
1010 1010 if not changesets:
1011 1011 raise util.Abort(_("export requires at least one changeset"))
1012 1012 revs = cmdutil.revrange(repo, changesets)
1013 1013 if len(revs) > 1:
1014 1014 ui.note(_('exporting patches:\n'))
1015 1015 else:
1016 1016 ui.note(_('exporting patch:\n'))
1017 1017 patch.export(repo, revs, template=opts['output'],
1018 1018 switch_parent=opts['switch_parent'],
1019 1019 opts=patch.diffopts(ui, opts))
1020 1020
1021 1021 def grep(ui, repo, pattern, *pats, **opts):
1022 1022 """search for a pattern in specified files and revisions
1023 1023
1024 1024 Search revisions of files for a regular expression.
1025 1025
1026 1026 This command behaves differently than Unix grep. It only accepts
1027 1027 Python/Perl regexps. It searches repository history, not the
1028 1028 working directory. It always prints the revision number in which
1029 1029 a match appears.
1030 1030
1031 1031 By default, grep only prints output for the first revision of a
1032 1032 file in which it finds a match. To get it to print every revision
1033 1033 that contains a change in match status ("-" for a match that
1034 1034 becomes a non-match, or "+" for a non-match that becomes a match),
1035 1035 use the --all flag.
1036 1036 """
1037 1037 reflags = 0
1038 1038 if opts['ignore_case']:
1039 1039 reflags |= re.I
1040 1040 try:
1041 1041 regexp = re.compile(pattern, reflags)
1042 1042 except Exception, inst:
1043 1043 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1044 1044 return None
1045 1045 sep, eol = ':', '\n'
1046 1046 if opts['print0']:
1047 1047 sep = eol = '\0'
1048 1048
1049 1049 fcache = {}
1050 1050 def getfile(fn):
1051 1051 if fn not in fcache:
1052 1052 fcache[fn] = repo.file(fn)
1053 1053 return fcache[fn]
1054 1054
1055 1055 def matchlines(body):
1056 1056 begin = 0
1057 1057 linenum = 0
1058 1058 while True:
1059 1059 match = regexp.search(body, begin)
1060 1060 if not match:
1061 1061 break
1062 1062 mstart, mend = match.span()
1063 1063 linenum += body.count('\n', begin, mstart) + 1
1064 1064 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1065 1065 lend = body.find('\n', mend)
1066 1066 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1067 1067 begin = lend + 1
1068 1068
1069 1069 class linestate(object):
1070 1070 def __init__(self, line, linenum, colstart, colend):
1071 1071 self.line = line
1072 1072 self.linenum = linenum
1073 1073 self.colstart = colstart
1074 1074 self.colend = colend
1075 1075
1076 1076 def __hash__(self):
1077 1077 return hash((self.linenum, self.line))
1078 1078
1079 1079 def __eq__(self, other):
1080 1080 return self.line == other.line
1081 1081
1082 1082 matches = {}
1083 1083 copies = {}
1084 1084 def grepbody(fn, rev, body):
1085 1085 matches[rev].setdefault(fn, [])
1086 1086 m = matches[rev][fn]
1087 1087 for lnum, cstart, cend, line in matchlines(body):
1088 1088 s = linestate(line, lnum, cstart, cend)
1089 1089 m.append(s)
1090 1090
1091 1091 def difflinestates(a, b):
1092 1092 sm = difflib.SequenceMatcher(None, a, b)
1093 1093 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1094 1094 if tag == 'insert':
1095 1095 for i in xrange(blo, bhi):
1096 1096 yield ('+', b[i])
1097 1097 elif tag == 'delete':
1098 1098 for i in xrange(alo, ahi):
1099 1099 yield ('-', a[i])
1100 1100 elif tag == 'replace':
1101 1101 for i in xrange(alo, ahi):
1102 1102 yield ('-', a[i])
1103 1103 for i in xrange(blo, bhi):
1104 1104 yield ('+', b[i])
1105 1105
1106 1106 prev = {}
1107 1107 def display(fn, rev, states, prevstates):
1108 1108 datefunc = ui.quiet and util.shortdate or util.datestr
1109 1109 found = False
1110 1110 filerevmatches = {}
1111 1111 r = prev.get(fn, -1)
1112 1112 if opts['all']:
1113 1113 iter = difflinestates(states, prevstates)
1114 1114 else:
1115 1115 iter = [('', l) for l in prevstates]
1116 1116 for change, l in iter:
1117 1117 cols = [fn, str(r)]
1118 1118 if opts['line_number']:
1119 1119 cols.append(str(l.linenum))
1120 1120 if opts['all']:
1121 1121 cols.append(change)
1122 1122 if opts['user']:
1123 1123 cols.append(ui.shortuser(get(r)[1]))
1124 1124 if opts.get('date'):
1125 1125 cols.append(datefunc(get(r)[2]))
1126 1126 if opts['files_with_matches']:
1127 1127 c = (fn, r)
1128 1128 if c in filerevmatches:
1129 1129 continue
1130 1130 filerevmatches[c] = 1
1131 1131 else:
1132 1132 cols.append(l.line)
1133 1133 ui.write(sep.join(cols), eol)
1134 1134 found = True
1135 1135 return found
1136 1136
1137 1137 fstate = {}
1138 1138 skip = {}
1139 1139 get = util.cachefunc(lambda r: repo[r].changeset())
1140 1140 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1141 1141 found = False
1142 1142 follow = opts.get('follow')
1143 1143 for st, rev, fns in changeiter:
1144 1144 if st == 'window':
1145 1145 matches.clear()
1146 1146 elif st == 'add':
1147 1147 ctx = repo[rev]
1148 1148 matches[rev] = {}
1149 1149 for fn in fns:
1150 1150 if fn in skip:
1151 1151 continue
1152 1152 try:
1153 1153 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1154 1154 fstate.setdefault(fn, [])
1155 1155 if follow:
1156 1156 copied = getfile(fn).renamed(ctx.filenode(fn))
1157 1157 if copied:
1158 1158 copies.setdefault(rev, {})[fn] = copied[0]
1159 1159 except revlog.LookupError:
1160 1160 pass
1161 1161 elif st == 'iter':
1162 1162 for fn, m in util.sort(matches[rev].items()):
1163 1163 copy = copies.get(rev, {}).get(fn)
1164 1164 if fn in skip:
1165 1165 if copy:
1166 1166 skip[copy] = True
1167 1167 continue
1168 1168 if fn in prev or fstate[fn]:
1169 1169 r = display(fn, rev, m, fstate[fn])
1170 1170 found = found or r
1171 1171 if r and not opts['all']:
1172 1172 skip[fn] = True
1173 1173 if copy:
1174 1174 skip[copy] = True
1175 1175 fstate[fn] = m
1176 1176 if copy:
1177 1177 fstate[copy] = m
1178 1178 prev[fn] = rev
1179 1179
1180 1180 for fn, state in util.sort(fstate.items()):
1181 1181 if fn in skip:
1182 1182 continue
1183 1183 if fn not in copies.get(prev[fn], {}):
1184 1184 found = display(fn, rev, {}, state) or found
1185 1185 return (not found and 1) or 0
1186 1186
1187 1187 def heads(ui, repo, *branchrevs, **opts):
1188 1188 """show current repository heads or show branch heads
1189 1189
1190 1190 With no arguments, show all repository head changesets.
1191 1191
1192 1192 If branch or revisions names are given this will show the heads of
1193 1193 the specified branches or the branches those revisions are tagged
1194 1194 with.
1195 1195
1196 1196 Repository "heads" are changesets that don't have child
1197 1197 changesets. They are where development generally takes place and
1198 1198 are the usual targets for update and merge operations.
1199 1199
1200 1200 Branch heads are changesets that have a given branch tag, but have
1201 1201 no child changesets with that tag. They are usually where
1202 1202 development on the given branch takes place.
1203 1203 """
1204 1204 if opts['rev']:
1205 1205 start = repo.lookup(opts['rev'])
1206 1206 else:
1207 1207 start = None
1208 1208 if not branchrevs:
1209 1209 # Assume we're looking repo-wide heads if no revs were specified.
1210 1210 heads = repo.heads(start)
1211 1211 else:
1212 1212 heads = []
1213 1213 visitedset = util.set()
1214 1214 for branchrev in branchrevs:
1215 1215 branch = repo[branchrev].branch()
1216 1216 if branch in visitedset:
1217 1217 continue
1218 1218 visitedset.add(branch)
1219 1219 bheads = repo.branchheads(branch, start)
1220 1220 if not bheads:
1221 1221 if branch != branchrev:
1222 1222 ui.warn(_("no changes on branch %s containing %s are "
1223 1223 "reachable from %s\n")
1224 1224 % (branch, branchrev, opts['rev']))
1225 1225 else:
1226 1226 ui.warn(_("no changes on branch %s are reachable from %s\n")
1227 1227 % (branch, opts['rev']))
1228 1228 heads.extend(bheads)
1229 1229 if not heads:
1230 1230 return 1
1231 1231 displayer = cmdutil.show_changeset(ui, repo, opts)
1232 1232 for n in heads:
1233 1233 displayer.show(changenode=n)
1234 1234
1235 1235 def help_(ui, name=None, with_version=False):
1236 1236 """show help for a command, extension, or list of commands
1237 1237
1238 1238 With no arguments, print a list of commands and short help.
1239 1239
1240 1240 Given a command name, print help for that command.
1241 1241
1242 1242 Given an extension name, print help for that extension, and the
1243 1243 commands it provides."""
1244 1244 option_lists = []
1245 1245
1246 1246 def addglobalopts(aliases):
1247 1247 if ui.verbose:
1248 1248 option_lists.append((_("global options:"), globalopts))
1249 1249 if name == 'shortlist':
1250 1250 option_lists.append((_('use "hg help" for the full list '
1251 1251 'of commands'), ()))
1252 1252 else:
1253 1253 if name == 'shortlist':
1254 1254 msg = _('use "hg help" for the full list of commands '
1255 1255 'or "hg -v" for details')
1256 1256 elif aliases:
1257 1257 msg = _('use "hg -v help%s" to show aliases and '
1258 1258 'global options') % (name and " " + name or "")
1259 1259 else:
1260 1260 msg = _('use "hg -v help %s" to show global options') % name
1261 1261 option_lists.append((msg, ()))
1262 1262
1263 1263 def helpcmd(name):
1264 1264 if with_version:
1265 1265 version_(ui)
1266 1266 ui.write('\n')
1267 1267
1268 1268 try:
1269 1269 aliases, i = cmdutil.findcmd(ui, name, table)
1270 1270 except cmdutil.AmbiguousCommand, inst:
1271 1271 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1272 1272 helplist(_('list of commands:\n\n'), select)
1273 1273 return
1274 1274
1275 1275 # synopsis
1276 1276 ui.write("%s\n" % i[2])
1277 1277
1278 1278 # aliases
1279 1279 if not ui.quiet and len(aliases) > 1:
1280 1280 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1281 1281
1282 1282 # description
1283 1283 doc = i[0].__doc__
1284 1284 if not doc:
1285 1285 doc = _("(No help text available)")
1286 1286 if ui.quiet:
1287 1287 doc = doc.splitlines(0)[0]
1288 1288 ui.write("\n%s\n" % doc.rstrip())
1289 1289
1290 1290 if not ui.quiet:
1291 1291 # options
1292 1292 if i[1]:
1293 1293 option_lists.append((_("options:\n"), i[1]))
1294 1294
1295 1295 addglobalopts(False)
1296 1296
1297 1297 def helplist(header, select=None):
1298 1298 h = {}
1299 1299 cmds = {}
1300 1300 for c, e in table.items():
1301 1301 f = c.split("|", 1)[0]
1302 1302 if select and not select(f):
1303 1303 continue
1304 1304 if name == "shortlist" and not f.startswith("^"):
1305 1305 continue
1306 1306 f = f.lstrip("^")
1307 1307 if not ui.debugflag and f.startswith("debug"):
1308 1308 continue
1309 1309 doc = e[0].__doc__
1310 1310 if not doc:
1311 1311 doc = _("(No help text available)")
1312 1312 h[f] = doc.splitlines(0)[0].rstrip()
1313 1313 cmds[f] = c.lstrip("^")
1314 1314
1315 1315 if not h:
1316 1316 ui.status(_('no commands defined\n'))
1317 1317 return
1318 1318
1319 1319 ui.status(header)
1320 1320 fns = util.sort(h)
1321 1321 m = max(map(len, fns))
1322 1322 for f in fns:
1323 1323 if ui.verbose:
1324 1324 commands = cmds[f].replace("|",", ")
1325 1325 ui.write(" %s:\n %s\n"%(commands, h[f]))
1326 1326 else:
1327 1327 ui.write(' %-*s %s\n' % (m, f, h[f]))
1328 1328
1329 1329 if not ui.quiet:
1330 1330 addglobalopts(True)
1331 1331
1332 1332 def helptopic(name):
1333 1333 v = None
1334 1334 for i, d in help.helptable:
1335 1335 l = i.split('|')
1336 1336 if name in l:
1337 1337 v = i
1338 1338 header = l[-1]
1339 1339 doc = d
1340 1340 if not v:
1341 1341 raise cmdutil.UnknownCommand(name)
1342 1342
1343 1343 # description
1344 1344 if not doc:
1345 1345 doc = _("(No help text available)")
1346 1346 if callable(doc):
1347 1347 doc = doc()
1348 1348
1349 1349 ui.write("%s\n" % header)
1350 1350 ui.write("%s\n" % doc.rstrip())
1351 1351
1352 1352 def helpext(name):
1353 1353 try:
1354 1354 mod = extensions.find(name)
1355 1355 except KeyError:
1356 1356 raise cmdutil.UnknownCommand(name)
1357 1357
1358 1358 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1359 1359 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1360 1360 for d in doc[1:]:
1361 1361 ui.write(d, '\n')
1362 1362
1363 1363 ui.status('\n')
1364 1364
1365 1365 try:
1366 1366 ct = mod.cmdtable
1367 1367 except AttributeError:
1368 1368 ct = {}
1369 1369
1370 1370 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1371 1371 helplist(_('list of commands:\n\n'), modcmds.has_key)
1372 1372
1373 1373 if name and name != 'shortlist':
1374 1374 i = None
1375 1375 for f in (helpcmd, helptopic, helpext):
1376 1376 try:
1377 1377 f(name)
1378 1378 i = None
1379 1379 break
1380 1380 except cmdutil.UnknownCommand, inst:
1381 1381 i = inst
1382 1382 if i:
1383 1383 raise i
1384 1384
1385 1385 else:
1386 1386 # program name
1387 1387 if ui.verbose or with_version:
1388 1388 version_(ui)
1389 1389 else:
1390 1390 ui.status(_("Mercurial Distributed SCM\n"))
1391 1391 ui.status('\n')
1392 1392
1393 1393 # list of commands
1394 1394 if name == "shortlist":
1395 1395 header = _('basic commands:\n\n')
1396 1396 else:
1397 1397 header = _('list of commands:\n\n')
1398 1398
1399 1399 helplist(header)
1400 1400
1401 1401 # list all option lists
1402 1402 opt_output = []
1403 1403 for title, options in option_lists:
1404 1404 opt_output.append(("\n%s" % title, None))
1405 1405 for shortopt, longopt, default, desc in options:
1406 1406 if "DEPRECATED" in desc and not ui.verbose: continue
1407 1407 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1408 1408 longopt and " --%s" % longopt),
1409 1409 "%s%s" % (desc,
1410 1410 default
1411 1411 and _(" (default: %s)") % default
1412 1412 or "")))
1413 1413
1414 1414 if ui.verbose:
1415 1415 ui.write(_("\nspecial help topics:\n"))
1416 1416 topics = []
1417 1417 for i, d in help.helptable:
1418 1418 l = i.split('|')
1419 1419 topics.append((", ".join(l[:-1]), l[-1]))
1420 1420 topics_len = max([len(s[0]) for s in topics])
1421 1421 for t, desc in topics:
1422 1422 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1423 1423
1424 1424 if opt_output:
1425 1425 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1426 1426 for first, second in opt_output:
1427 1427 if second:
1428 1428 ui.write(" %-*s %s\n" % (opts_len, first, second))
1429 1429 else:
1430 1430 ui.write("%s\n" % first)
1431 1431
1432 1432 def identify(ui, repo, source=None,
1433 1433 rev=None, num=None, id=None, branch=None, tags=None):
1434 1434 """identify the working copy or specified revision
1435 1435
1436 1436 With no revision, print a summary of the current state of the repo.
1437 1437
1438 1438 With a path, do a lookup in another repository.
1439 1439
1440 1440 This summary identifies the repository state using one or two parent
1441 1441 hash identifiers, followed by a "+" if there are uncommitted changes
1442 1442 in the working directory, a list of tags for this revision and a branch
1443 1443 name for non-default branches.
1444 1444 """
1445 1445
1446 1446 if not repo and not source:
1447 1447 raise util.Abort(_("There is no Mercurial repository here "
1448 1448 "(.hg not found)"))
1449 1449
1450 1450 hexfunc = ui.debugflag and hex or short
1451 1451 default = not (num or id or branch or tags)
1452 1452 output = []
1453 1453
1454 1454 if source:
1455 1455 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1456 1456 srepo = hg.repository(ui, source)
1457 1457 if not rev and revs:
1458 1458 rev = revs[0]
1459 1459 if not rev:
1460 1460 rev = "tip"
1461 1461 if num or branch or tags:
1462 1462 raise util.Abort(
1463 1463 "can't query remote revision number, branch, or tags")
1464 1464 output = [hexfunc(srepo.lookup(rev))]
1465 1465 elif not rev:
1466 1466 ctx = repo[None]
1467 1467 parents = ctx.parents()
1468 1468 changed = False
1469 1469 if default or id or num:
1470 1470 changed = ctx.files() + ctx.deleted()
1471 1471 if default or id:
1472 1472 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1473 1473 (changed) and "+" or "")]
1474 1474 if num:
1475 1475 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1476 1476 (changed) and "+" or ""))
1477 1477 else:
1478 1478 ctx = repo[rev]
1479 1479 if default or id:
1480 1480 output = [hexfunc(ctx.node())]
1481 1481 if num:
1482 1482 output.append(str(ctx.rev()))
1483 1483
1484 1484 if not source and default and not ui.quiet:
1485 1485 b = util.tolocal(ctx.branch())
1486 1486 if b != 'default':
1487 1487 output.append("(%s)" % b)
1488 1488
1489 1489 # multiple tags for a single parent separated by '/'
1490 1490 t = "/".join(ctx.tags())
1491 1491 if t:
1492 1492 output.append(t)
1493 1493
1494 1494 if branch:
1495 1495 output.append(util.tolocal(ctx.branch()))
1496 1496
1497 1497 if tags:
1498 1498 output.extend(ctx.tags())
1499 1499
1500 1500 ui.write("%s\n" % ' '.join(output))
1501 1501
1502 1502 def import_(ui, repo, patch1, *patches, **opts):
1503 1503 """import an ordered set of patches
1504 1504
1505 1505 Import a list of patches and commit them individually.
1506 1506
1507 1507 If there are outstanding changes in the working directory, import
1508 1508 will abort unless given the -f flag.
1509 1509
1510 1510 You can import a patch straight from a mail message. Even patches
1511 1511 as attachments work (body part must be type text/plain or
1512 1512 text/x-patch to be used). From and Subject headers of email
1513 1513 message are used as default committer and commit message. All
1514 1514 text/plain body parts before first diff are added to commit
1515 1515 message.
1516 1516
1517 1517 If the imported patch was generated by hg export, user and description
1518 1518 from patch override values from message headers and body. Values
1519 1519 given on command line with -m and -u override these.
1520 1520
1521 1521 If --exact is specified, import will set the working directory
1522 1522 to the parent of each patch before applying it, and will abort
1523 1523 if the resulting changeset has a different ID than the one
1524 1524 recorded in the patch. This may happen due to character set
1525 1525 problems or other deficiencies in the text patch format.
1526 1526
1527 1527 To read a patch from standard input, use patch name "-".
1528 1528 See 'hg help dates' for a list of formats valid for -d/--date.
1529 1529 """
1530 1530 patches = (patch1,) + patches
1531 1531
1532 1532 date = opts.get('date')
1533 1533 if date:
1534 1534 opts['date'] = util.parsedate(date)
1535 1535
1536 1536 if opts.get('exact') or not opts['force']:
1537 1537 cmdutil.bail_if_changed(repo)
1538 1538
1539 1539 d = opts["base"]
1540 1540 strip = opts["strip"]
1541 1541 wlock = lock = None
1542 1542 try:
1543 1543 wlock = repo.wlock()
1544 1544 lock = repo.lock()
1545 1545 for p in patches:
1546 1546 pf = os.path.join(d, p)
1547 1547
1548 1548 if pf == '-':
1549 1549 ui.status(_("applying patch from stdin\n"))
1550 1550 data = patch.extract(ui, sys.stdin)
1551 1551 else:
1552 1552 ui.status(_("applying %s\n") % p)
1553 1553 if os.path.exists(pf):
1554 1554 data = patch.extract(ui, file(pf, 'rb'))
1555 1555 else:
1556 1556 data = patch.extract(ui, urllib.urlopen(pf))
1557 1557 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1558 1558
1559 1559 if tmpname is None:
1560 1560 raise util.Abort(_('no diffs found'))
1561 1561
1562 1562 try:
1563 1563 cmdline_message = cmdutil.logmessage(opts)
1564 1564 if cmdline_message:
1565 1565 # pickup the cmdline msg
1566 1566 message = cmdline_message
1567 1567 elif message:
1568 1568 # pickup the patch msg
1569 1569 message = message.strip()
1570 1570 else:
1571 1571 # launch the editor
1572 1572 message = None
1573 1573 ui.debug(_('message:\n%s\n') % message)
1574 1574
1575 1575 wp = repo.parents()
1576 1576 if opts.get('exact'):
1577 1577 if not nodeid or not p1:
1578 1578 raise util.Abort(_('not a mercurial patch'))
1579 1579 p1 = repo.lookup(p1)
1580 1580 p2 = repo.lookup(p2 or hex(nullid))
1581 1581
1582 1582 if p1 != wp[0].node():
1583 1583 hg.clean(repo, p1)
1584 1584 repo.dirstate.setparents(p1, p2)
1585 1585 elif p2:
1586 1586 try:
1587 1587 p1 = repo.lookup(p1)
1588 1588 p2 = repo.lookup(p2)
1589 1589 if p1 == wp[0].node():
1590 1590 repo.dirstate.setparents(p1, p2)
1591 1591 except RepoError:
1592 1592 pass
1593 1593 if opts.get('exact') or opts.get('import_branch'):
1594 1594 repo.dirstate.setbranch(branch or 'default')
1595 1595
1596 1596 files = {}
1597 1597 try:
1598 1598 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1599 1599 files=files)
1600 1600 finally:
1601 1601 files = patch.updatedir(ui, repo, files)
1602 1602 if not opts.get('no_commit'):
1603 1603 n = repo.commit(files, message, opts.get('user') or user,
1604 1604 opts.get('date') or date)
1605 1605 if opts.get('exact'):
1606 1606 if hex(n) != nodeid:
1607 1607 repo.rollback()
1608 1608 raise util.Abort(_('patch is damaged'
1609 1609 ' or loses information'))
1610 1610 # Force a dirstate write so that the next transaction
1611 1611 # backups an up-do-date file.
1612 1612 repo.dirstate.write()
1613 1613 finally:
1614 1614 os.unlink(tmpname)
1615 1615 finally:
1616 1616 del lock, wlock
1617 1617
1618 1618 def incoming(ui, repo, source="default", **opts):
1619 1619 """show new changesets found in source
1620 1620
1621 1621 Show new changesets found in the specified path/URL or the default
1622 1622 pull location. These are the changesets that would be pulled if a pull
1623 1623 was requested.
1624 1624
1625 1625 For remote repository, using --bundle avoids downloading the changesets
1626 1626 twice if the incoming is followed by a pull.
1627 1627
1628 1628 See pull for valid source format details.
1629 1629 """
1630 1630 limit = cmdutil.loglimit(opts)
1631 1631 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1632 1632 cmdutil.setremoteconfig(ui, opts)
1633 1633
1634 1634 other = hg.repository(ui, source)
1635 1635 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1636 1636 if revs:
1637 1637 revs = [other.lookup(rev) for rev in revs]
1638 1638 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1639 1639 if not incoming:
1640 1640 try:
1641 1641 os.unlink(opts["bundle"])
1642 1642 except:
1643 1643 pass
1644 1644 ui.status(_("no changes found\n"))
1645 1645 return 1
1646 1646
1647 1647 cleanup = None
1648 1648 try:
1649 1649 fname = opts["bundle"]
1650 1650 if fname or not other.local():
1651 1651 # create a bundle (uncompressed if other repo is not local)
1652 1652 if revs is None:
1653 1653 cg = other.changegroup(incoming, "incoming")
1654 1654 else:
1655 1655 cg = other.changegroupsubset(incoming, revs, 'incoming')
1656 1656 bundletype = other.local() and "HG10BZ" or "HG10UN"
1657 1657 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1658 1658 # keep written bundle?
1659 1659 if opts["bundle"]:
1660 1660 cleanup = None
1661 1661 if not other.local():
1662 1662 # use the created uncompressed bundlerepo
1663 1663 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1664 1664
1665 1665 o = other.changelog.nodesbetween(incoming, revs)[0]
1666 1666 if opts['newest_first']:
1667 1667 o.reverse()
1668 1668 displayer = cmdutil.show_changeset(ui, other, opts)
1669 1669 count = 0
1670 1670 for n in o:
1671 1671 if count >= limit:
1672 1672 break
1673 1673 parents = [p for p in other.changelog.parents(n) if p != nullid]
1674 1674 if opts['no_merges'] and len(parents) == 2:
1675 1675 continue
1676 1676 count += 1
1677 1677 displayer.show(changenode=n)
1678 1678 finally:
1679 1679 if hasattr(other, 'close'):
1680 1680 other.close()
1681 1681 if cleanup:
1682 1682 os.unlink(cleanup)
1683 1683
1684 1684 def init(ui, dest=".", **opts):
1685 1685 """create a new repository in the given directory
1686 1686
1687 1687 Initialize a new repository in the given directory. If the given
1688 1688 directory does not exist, it is created.
1689 1689
1690 1690 If no directory is given, the current directory is used.
1691 1691
1692 1692 It is possible to specify an ssh:// URL as the destination.
1693 1693 Look at the help text for the pull command for important details
1694 1694 about ssh:// URLs.
1695 1695 """
1696 1696 cmdutil.setremoteconfig(ui, opts)
1697 1697 hg.repository(ui, dest, create=1)
1698 1698
1699 1699 def locate(ui, repo, *pats, **opts):
1700 1700 """locate files matching specific patterns
1701 1701
1702 1702 Print all files under Mercurial control whose names match the
1703 1703 given patterns.
1704 1704
1705 1705 This command searches the entire repository by default. To search
1706 1706 just the current directory and its subdirectories, use
1707 1707 "--include .".
1708 1708
1709 1709 If no patterns are given to match, this command prints all file
1710 1710 names.
1711 1711
1712 1712 If you want to feed the output of this command into the "xargs"
1713 1713 command, use the "-0" option to both this command and "xargs".
1714 1714 This will avoid the problem of "xargs" treating single filenames
1715 1715 that contain white space as multiple filenames.
1716 1716 """
1717 1717 end = opts['print0'] and '\0' or '\n'
1718 1718 rev = opts.get('rev') or None
1719 1719
1720 1720 ret = 1
1721 1721 m = cmdutil.match(repo, pats, opts, default='relglob')
1722 1722 m.bad = lambda x,y: False
1723 1723 for abs in repo[rev].walk(m):
1724 1724 if not rev and abs not in repo.dirstate:
1725 1725 continue
1726 1726 if opts['fullpath']:
1727 1727 ui.write(os.path.join(repo.root, abs), end)
1728 1728 else:
1729 1729 ui.write(((pats and m.rel(abs)) or abs), end)
1730 1730 ret = 0
1731 1731
1732 1732 return ret
1733 1733
1734 1734 def log(ui, repo, *pats, **opts):
1735 1735 """show revision history of entire repository or files
1736 1736
1737 1737 Print the revision history of the specified files or the entire
1738 1738 project.
1739 1739
1740 1740 File history is shown without following rename or copy history of
1741 1741 files. Use -f/--follow with a file name to follow history across
1742 1742 renames and copies. --follow without a file name will only show
1743 1743 ancestors or descendants of the starting revision. --follow-first
1744 1744 only follows the first parent of merge revisions.
1745 1745
1746 1746 If no revision range is specified, the default is tip:0 unless
1747 1747 --follow is set, in which case the working directory parent is
1748 1748 used as the starting revision.
1749 1749
1750 1750 See 'hg help dates' for a list of formats valid for -d/--date.
1751 1751
1752 1752 By default this command outputs: changeset id and hash, tags,
1753 1753 non-trivial parents, user, date and time, and a summary for each
1754 1754 commit. When the -v/--verbose switch is used, the list of changed
1755 1755 files and full commit message is shown.
1756 1756
1757 1757 NOTE: log -p may generate unexpected diff output for merge
1758 1758 changesets, as it will compare the merge changeset against its
1759 1759 first parent only. Also, the files: list will only reflect files
1760 1760 that are different from BOTH parents.
1761 1761
1762 1762 """
1763 1763
1764 1764 get = util.cachefunc(lambda r: repo[r].changeset())
1765 1765 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1766 1766
1767 1767 limit = cmdutil.loglimit(opts)
1768 1768 count = 0
1769 1769
1770 1770 if opts['copies'] and opts['rev']:
1771 1771 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1772 1772 else:
1773 1773 endrev = len(repo)
1774 1774 rcache = {}
1775 1775 ncache = {}
1776 1776 def getrenamed(fn, rev):
1777 1777 '''looks up all renames for a file (up to endrev) the first
1778 1778 time the file is given. It indexes on the changerev and only
1779 1779 parses the manifest if linkrev != changerev.
1780 1780 Returns rename info for fn at changerev rev.'''
1781 1781 if fn not in rcache:
1782 1782 rcache[fn] = {}
1783 1783 ncache[fn] = {}
1784 1784 fl = repo.file(fn)
1785 1785 for i in fl:
1786 1786 node = fl.node(i)
1787 1787 lr = fl.linkrev(node)
1788 1788 renamed = fl.renamed(node)
1789 1789 rcache[fn][lr] = renamed
1790 1790 if renamed:
1791 1791 ncache[fn][node] = renamed
1792 1792 if lr >= endrev:
1793 1793 break
1794 1794 if rev in rcache[fn]:
1795 1795 return rcache[fn][rev]
1796 1796
1797 1797 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1798 1798 # filectx logic.
1799 1799
1800 1800 try:
1801 1801 return repo[rev][fn].renamed()
1802 1802 except revlog.LookupError:
1803 1803 pass
1804 1804 return None
1805 1805
1806 1806 df = False
1807 1807 if opts["date"]:
1808 1808 df = util.matchdate(opts["date"])
1809 1809
1810 1810 only_branches = opts['only_branch']
1811 1811
1812 1812 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1813 1813 for st, rev, fns in changeiter:
1814 1814 if st == 'add':
1815 1815 changenode = repo.changelog.node(rev)
1816 1816 parents = [p for p in repo.changelog.parentrevs(rev)
1817 1817 if p != nullrev]
1818 1818 if opts['no_merges'] and len(parents) == 2:
1819 1819 continue
1820 1820 if opts['only_merges'] and len(parents) != 2:
1821 1821 continue
1822 1822
1823 1823 if only_branches:
1824 1824 revbranch = get(rev)[5]['branch']
1825 1825 if revbranch not in only_branches:
1826 1826 continue
1827 1827
1828 1828 if df:
1829 1829 changes = get(rev)
1830 1830 if not df(changes[2][0]):
1831 1831 continue
1832 1832
1833 1833 if opts['keyword']:
1834 1834 changes = get(rev)
1835 1835 miss = 0
1836 1836 for k in [kw.lower() for kw in opts['keyword']]:
1837 1837 if not (k in changes[1].lower() or
1838 1838 k in changes[4].lower() or
1839 1839 k in " ".join(changes[3]).lower()):
1840 1840 miss = 1
1841 1841 break
1842 1842 if miss:
1843 1843 continue
1844 1844
1845 1845 copies = []
1846 1846 if opts.get('copies') and rev:
1847 1847 for fn in get(rev)[3]:
1848 1848 rename = getrenamed(fn, rev)
1849 1849 if rename:
1850 1850 copies.append((fn, rename[0]))
1851 1851 displayer.show(rev, changenode, copies=copies)
1852 1852 elif st == 'iter':
1853 1853 if count == limit: break
1854 1854 if displayer.flush(rev):
1855 1855 count += 1
1856 1856
1857 1857 def manifest(ui, repo, node=None, rev=None):
1858 1858 """output the current or given revision of the project manifest
1859 1859
1860 1860 Print a list of version controlled files for the given revision.
1861 1861 If no revision is given, the parent of the working directory is used,
1862 1862 or tip if no revision is checked out.
1863 1863
1864 1864 The manifest is the list of files being version controlled. If no revision
1865 1865 is given then the first parent of the working directory is used.
1866 1866
1867 1867 With -v flag, print file permissions, symlink and executable bits. With
1868 1868 --debug flag, print file revision hashes.
1869 1869 """
1870 1870
1871 1871 if rev and node:
1872 1872 raise util.Abort(_("please specify just one revision"))
1873 1873
1874 1874 if not node:
1875 1875 node = rev
1876 1876
1877 1877 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1878 1878 ctx = repo[node]
1879 1879 for f in ctx:
1880 1880 if ui.debugflag:
1881 1881 ui.write("%40s " % hex(ctx.manifest()[f]))
1882 1882 if ui.verbose:
1883 1883 ui.write(decor[ctx.flags(f)])
1884 1884 ui.write("%s\n" % f)
1885 1885
1886 1886 def merge(ui, repo, node=None, force=None, rev=None):
1887 1887 """merge working directory with another revision
1888 1888
1889 1889 Merge the contents of the current working directory and the
1890 1890 requested revision. Files that changed between either parent are
1891 1891 marked as changed for the next commit and a commit must be
1892 1892 performed before any further updates are allowed.
1893 1893
1894 1894 If no revision is specified, the working directory's parent is a
1895 1895 head revision, and the current branch contains exactly one other head,
1896 1896 the other head is merged with by default. Otherwise, an explicit
1897 1897 revision to merge with must be provided.
1898 1898 """
1899 1899
1900 1900 if rev and node:
1901 1901 raise util.Abort(_("please specify just one revision"))
1902 1902 if not node:
1903 1903 node = rev
1904 1904
1905 1905 if not node:
1906 1906 branch = repo.changectx(None).branch()
1907 1907 bheads = repo.branchheads(branch)
1908 1908 if len(bheads) > 2:
1909 1909 raise util.Abort(_("branch '%s' has %d heads - "
1910 1910 "please merge with an explicit rev") %
1911 1911 (branch, len(bheads)))
1912 1912
1913 1913 parent = repo.dirstate.parents()[0]
1914 1914 if len(bheads) == 1:
1915 1915 if len(repo.heads()) > 1:
1916 1916 raise util.Abort(_("branch '%s' has one head - "
1917 1917 "please merge with an explicit rev") %
1918 1918 branch)
1919 1919 msg = _('there is nothing to merge')
1920 1920 if parent != repo.lookup(repo[None].branch()):
1921 1921 msg = _('%s - use "hg update" instead') % msg
1922 1922 raise util.Abort(msg)
1923 1923
1924 1924 if parent not in bheads:
1925 1925 raise util.Abort(_('working dir not at a head rev - '
1926 1926 'use "hg update" or merge with an explicit rev'))
1927 1927 node = parent == bheads[0] and bheads[-1] or bheads[0]
1928 1928 return hg.merge(repo, node, force=force)
1929 1929
1930 1930 def outgoing(ui, repo, dest=None, **opts):
1931 1931 """show changesets not found in destination
1932 1932
1933 1933 Show changesets not found in the specified destination repository or
1934 1934 the default push location. These are the changesets that would be pushed
1935 1935 if a push was requested.
1936 1936
1937 1937 See pull for valid destination format details.
1938 1938 """
1939 1939 limit = cmdutil.loglimit(opts)
1940 1940 dest, revs, checkout = hg.parseurl(
1941 1941 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1942 1942 cmdutil.setremoteconfig(ui, opts)
1943 1943 if revs:
1944 1944 revs = [repo.lookup(rev) for rev in revs]
1945 1945
1946 1946 other = hg.repository(ui, dest)
1947 1947 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1948 1948 o = repo.findoutgoing(other, force=opts['force'])
1949 1949 if not o:
1950 1950 ui.status(_("no changes found\n"))
1951 1951 return 1
1952 1952 o = repo.changelog.nodesbetween(o, revs)[0]
1953 1953 if opts['newest_first']:
1954 1954 o.reverse()
1955 1955 displayer = cmdutil.show_changeset(ui, repo, opts)
1956 1956 count = 0
1957 1957 for n in o:
1958 1958 if count >= limit:
1959 1959 break
1960 1960 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1961 1961 if opts['no_merges'] and len(parents) == 2:
1962 1962 continue
1963 1963 count += 1
1964 1964 displayer.show(changenode=n)
1965 1965
1966 1966 def parents(ui, repo, file_=None, **opts):
1967 1967 """show the parents of the working dir or revision
1968 1968
1969 1969 Print the working directory's parent revisions. If a
1970 1970 revision is given via --rev, the parent of that revision
1971 1971 will be printed. If a file argument is given, revision in
1972 1972 which the file was last changed (before the working directory
1973 1973 revision or the argument to --rev if given) is printed.
1974 1974 """
1975 1975 rev = opts.get('rev')
1976 1976 if rev:
1977 1977 ctx = repo[rev]
1978 1978 else:
1979 1979 ctx = repo[None]
1980 1980
1981 1981 if file_:
1982 1982 m = cmdutil.match(repo, (file_,), opts)
1983 1983 if m.anypats() or len(m.files()) != 1:
1984 1984 raise util.Abort(_('can only specify an explicit file name'))
1985 1985 file_ = m.files()[0]
1986 1986 filenodes = []
1987 1987 for cp in ctx.parents():
1988 1988 if not cp:
1989 1989 continue
1990 1990 try:
1991 1991 filenodes.append(cp.filenode(file_))
1992 1992 except revlog.LookupError:
1993 1993 pass
1994 1994 if not filenodes:
1995 1995 raise util.Abort(_("'%s' not found in manifest!") % file_)
1996 1996 fl = repo.file(file_)
1997 1997 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1998 1998 else:
1999 1999 p = [cp.node() for cp in ctx.parents()]
2000 2000
2001 2001 displayer = cmdutil.show_changeset(ui, repo, opts)
2002 2002 for n in p:
2003 2003 if n != nullid:
2004 2004 displayer.show(changenode=n)
2005 2005
2006 2006 def paths(ui, repo, search=None):
2007 2007 """show definition of symbolic path names
2008 2008
2009 2009 Show definition of symbolic path name NAME. If no name is given, show
2010 2010 definition of available names.
2011 2011
2012 2012 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2013 2013 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2014 2014 """
2015 2015 if search:
2016 2016 for name, path in ui.configitems("paths"):
2017 2017 if name == search:
2018 2018 ui.write("%s\n" % util.hidepassword(path))
2019 2019 return
2020 2020 ui.warn(_("not found!\n"))
2021 2021 return 1
2022 2022 else:
2023 2023 for name, path in ui.configitems("paths"):
2024 2024 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2025 2025
2026 2026 def postincoming(ui, repo, modheads, optupdate, checkout):
2027 2027 if modheads == 0:
2028 2028 return
2029 2029 if optupdate:
2030 2030 if modheads <= 1 or checkout:
2031 2031 return hg.update(repo, checkout)
2032 2032 else:
2033 2033 ui.status(_("not updating, since new heads added\n"))
2034 2034 if modheads > 1:
2035 2035 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2036 2036 else:
2037 2037 ui.status(_("(run 'hg update' to get a working copy)\n"))
2038 2038
2039 2039 def pull(ui, repo, source="default", **opts):
2040 2040 """pull changes from the specified source
2041 2041
2042 2042 Pull changes from a remote repository to a local one.
2043 2043
2044 2044 This finds all changes from the repository at the specified path
2045 2045 or URL and adds them to the local repository. By default, this
2046 2046 does not update the copy of the project in the working directory.
2047 2047
2048 2048 Valid URLs are of the form:
2049 2049
2050 2050 local/filesystem/path (or file://local/filesystem/path)
2051 2051 http://[user[:pass]@]host[:port]/[path]
2052 2052 https://[user[:pass]@]host[:port]/[path]
2053 2053 ssh://[user[:pass]@]host[:port]/[path]
2054 2054 static-http://host[:port]/[path]
2055 2055
2056 2056 Paths in the local filesystem can either point to Mercurial
2057 2057 repositories or to bundle files (as created by 'hg bundle' or
2058 2058 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2059 2059 allows access to a Mercurial repository where you simply use a web
2060 2060 server to publish the .hg directory as static content.
2061 2061
2062 2062 An optional identifier after # indicates a particular branch, tag,
2063 2063 or changeset to pull.
2064 2064
2065 2065 Some notes about using SSH with Mercurial:
2066 2066 - SSH requires an accessible shell account on the destination machine
2067 2067 and a copy of hg in the remote path or specified with as remotecmd.
2068 2068 - path is relative to the remote user's home directory by default.
2069 2069 Use an extra slash at the start of a path to specify an absolute path:
2070 2070 ssh://example.com//tmp/repository
2071 2071 - Mercurial doesn't use its own compression via SSH; the right thing
2072 2072 to do is to configure it in your ~/.ssh/config, e.g.:
2073 2073 Host *.mylocalnetwork.example.com
2074 2074 Compression no
2075 2075 Host *
2076 2076 Compression yes
2077 2077 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2078 2078 with the --ssh command line option.
2079 2079 """
2080 2080 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2081 2081 cmdutil.setremoteconfig(ui, opts)
2082 2082
2083 2083 other = hg.repository(ui, source)
2084 2084 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2085 2085 if revs:
2086 2086 try:
2087 2087 revs = [other.lookup(rev) for rev in revs]
2088 2088 except NoCapability:
2089 2089 error = _("Other repository doesn't support revision lookup, "
2090 2090 "so a rev cannot be specified.")
2091 2091 raise util.Abort(error)
2092 2092
2093 2093 modheads = repo.pull(other, heads=revs, force=opts['force'])
2094 2094 return postincoming(ui, repo, modheads, opts['update'], checkout)
2095 2095
2096 2096 def push(ui, repo, dest=None, **opts):
2097 2097 """push changes to the specified destination
2098 2098
2099 2099 Push changes from the local repository to the given destination.
2100 2100
2101 2101 This is the symmetrical operation for pull. It helps to move
2102 2102 changes from the current repository to a different one. If the
2103 2103 destination is local this is identical to a pull in that directory
2104 2104 from the current one.
2105 2105
2106 2106 By default, push will refuse to run if it detects the result would
2107 2107 increase the number of remote heads. This generally indicates the
2108 2108 the client has forgotten to pull and merge before pushing.
2109 2109
2110 2110 Valid URLs are of the form:
2111 2111
2112 2112 local/filesystem/path (or file://local/filesystem/path)
2113 2113 ssh://[user[:pass]@]host[:port]/[path]
2114 2114 http://[user[:pass]@]host[:port]/[path]
2115 2115 https://[user[:pass]@]host[:port]/[path]
2116 2116
2117 2117 An optional identifier after # indicates a particular branch, tag,
2118 2118 or changeset to push. If -r is used, the named changeset and all its
2119 2119 ancestors will be pushed to the remote repository.
2120 2120
2121 2121 Look at the help text for the pull command for important details
2122 2122 about ssh:// URLs.
2123 2123
2124 2124 Pushing to http:// and https:// URLs is only possible, if this
2125 2125 feature is explicitly enabled on the remote Mercurial server.
2126 2126 """
2127 2127 dest, revs, checkout = hg.parseurl(
2128 2128 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2129 2129 cmdutil.setremoteconfig(ui, opts)
2130 2130
2131 2131 other = hg.repository(ui, dest)
2132 ui.status('pushing to %s\n' % util.hidepassword(dest))
2132 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
2133 2133 if revs:
2134 2134 revs = [repo.lookup(rev) for rev in revs]
2135 2135 r = repo.push(other, opts['force'], revs=revs)
2136 2136 return r == 0
2137 2137
2138 2138 def rawcommit(ui, repo, *pats, **opts):
2139 2139 """raw commit interface (DEPRECATED)
2140 2140
2141 2141 (DEPRECATED)
2142 2142 Lowlevel commit, for use in helper scripts.
2143 2143
2144 2144 This command is not intended to be used by normal users, as it is
2145 2145 primarily useful for importing from other SCMs.
2146 2146
2147 2147 This command is now deprecated and will be removed in a future
2148 2148 release, please use debugsetparents and commit instead.
2149 2149 """
2150 2150
2151 2151 ui.warn(_("(the rawcommit command is deprecated)\n"))
2152 2152
2153 2153 message = cmdutil.logmessage(opts)
2154 2154
2155 2155 files = cmdutil.match(repo, pats, opts).files()
2156 2156 if opts['files']:
2157 2157 files += open(opts['files']).read().splitlines()
2158 2158
2159 2159 parents = [repo.lookup(p) for p in opts['parent']]
2160 2160
2161 2161 try:
2162 2162 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2163 2163 except ValueError, inst:
2164 2164 raise util.Abort(str(inst))
2165 2165
2166 2166 def recover(ui, repo):
2167 2167 """roll back an interrupted transaction
2168 2168
2169 2169 Recover from an interrupted commit or pull.
2170 2170
2171 2171 This command tries to fix the repository status after an interrupted
2172 2172 operation. It should only be necessary when Mercurial suggests it.
2173 2173 """
2174 2174 if repo.recover():
2175 2175 return hg.verify(repo)
2176 2176 return 1
2177 2177
2178 2178 def remove(ui, repo, *pats, **opts):
2179 2179 """remove the specified files on the next commit
2180 2180
2181 2181 Schedule the indicated files for removal from the repository.
2182 2182
2183 2183 This only removes files from the current branch, not from the entire
2184 2184 project history. -A can be used to remove only files that have already
2185 2185 been deleted, -f can be used to force deletion, and -Af can be used
2186 2186 to remove files from the next revision without deleting them.
2187 2187
2188 2188 The following table details the behavior of remove for different file
2189 2189 states (columns) and option combinations (rows). The file states are
2190 2190 Added, Clean, Modified and Missing (as reported by hg status). The
2191 2191 actions are Warn, Remove (from branch) and Delete (from disk).
2192 2192
2193 2193 A C M !
2194 2194 none W RD W R
2195 2195 -f R RD RD R
2196 2196 -A W W W R
2197 2197 -Af R R R R
2198 2198
2199 2199 This command schedules the files to be removed at the next commit.
2200 2200 To undo a remove before that, see hg revert.
2201 2201 """
2202 2202
2203 2203 after, force = opts.get('after'), opts.get('force')
2204 2204 if not pats and not after:
2205 2205 raise util.Abort(_('no files specified'))
2206 2206
2207 2207 m = cmdutil.match(repo, pats, opts)
2208 2208 s = repo.status(match=m, clean=True)
2209 2209 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2210 2210
2211 2211 def warn(files, reason):
2212 2212 for f in files:
2213 2213 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2214 2214 % (m.rel(f), reason))
2215 2215
2216 2216 if force:
2217 2217 remove, forget = modified + deleted + clean, added
2218 2218 elif after:
2219 2219 remove, forget = deleted, []
2220 2220 warn(modified + added + clean, _('still exists'))
2221 2221 else:
2222 2222 remove, forget = deleted + clean, []
2223 2223 warn(modified, _('is modified'))
2224 2224 warn(added, _('has been marked for add'))
2225 2225
2226 2226 for f in util.sort(remove + forget):
2227 2227 if ui.verbose or not m.exact(f):
2228 2228 ui.status(_('removing %s\n') % m.rel(f))
2229 2229
2230 2230 repo.forget(forget)
2231 2231 repo.remove(remove, unlink=not after)
2232 2232
2233 2233 def rename(ui, repo, *pats, **opts):
2234 2234 """rename files; equivalent of copy + remove
2235 2235
2236 2236 Mark dest as copies of sources; mark sources for deletion. If
2237 2237 dest is a directory, copies are put in that directory. If dest is
2238 2238 a file, there can only be one source.
2239 2239
2240 2240 By default, this command copies the contents of files as they
2241 2241 stand in the working directory. If invoked with --after, the
2242 2242 operation is recorded, but no copying is performed.
2243 2243
2244 2244 This command takes effect in the next commit. To undo a rename
2245 2245 before that, see hg revert.
2246 2246 """
2247 2247 wlock = repo.wlock(False)
2248 2248 try:
2249 2249 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2250 2250 finally:
2251 2251 del wlock
2252 2252
2253 2253 def resolve(ui, repo, *pats, **opts):
2254 2254 """resolve file merges from a branch merge or update
2255 2255
2256 2256 This command will attempt to resolve unresolved merges from the
2257 2257 last update or merge command. This will use the local file
2258 2258 revision preserved at the last update or merge to cleanly retry
2259 2259 the file merge attempt. With no file or options specified, this
2260 2260 command will attempt to resolve all unresolved files.
2261 2261
2262 2262 The codes used to show the status of files are:
2263 2263 U = unresolved
2264 2264 R = resolved
2265 2265 """
2266 2266
2267 2267 if len([x for x in opts if opts[x]]) > 1:
2268 2268 raise util.Abort(_("too many options specified"))
2269 2269
2270 2270 ms = merge_.mergestate(repo)
2271 2271 m = cmdutil.match(repo, pats, opts)
2272 2272
2273 2273 for f in ms:
2274 2274 if m(f):
2275 2275 if opts.get("list"):
2276 2276 ui.write("%s %s\n" % (ms[f].upper(), f))
2277 2277 elif opts.get("mark"):
2278 2278 ms.mark(f, "r")
2279 2279 elif opts.get("unmark"):
2280 2280 ms.mark(f, "u")
2281 2281 else:
2282 2282 wctx = repo[None]
2283 2283 mctx = wctx.parents()[-1]
2284 2284 ms.resolve(f, wctx, mctx)
2285 2285
2286 2286 def revert(ui, repo, *pats, **opts):
2287 2287 """restore individual files or dirs to an earlier state
2288 2288
2289 2289 (use update -r to check out earlier revisions, revert does not
2290 2290 change the working dir parents)
2291 2291
2292 2292 With no revision specified, revert the named files or directories
2293 2293 to the contents they had in the parent of the working directory.
2294 2294 This restores the contents of the affected files to an unmodified
2295 2295 state and unschedules adds, removes, copies, and renames. If the
2296 2296 working directory has two parents, you must explicitly specify the
2297 2297 revision to revert to.
2298 2298
2299 2299 Using the -r option, revert the given files or directories to their
2300 2300 contents as of a specific revision. This can be helpful to "roll
2301 2301 back" some or all of an earlier change.
2302 2302 See 'hg help dates' for a list of formats valid for -d/--date.
2303 2303
2304 2304 Revert modifies the working directory. It does not commit any
2305 2305 changes, or change the parent of the working directory. If you
2306 2306 revert to a revision other than the parent of the working
2307 2307 directory, the reverted files will thus appear modified
2308 2308 afterwards.
2309 2309
2310 2310 If a file has been deleted, it is restored. If the executable
2311 2311 mode of a file was changed, it is reset.
2312 2312
2313 2313 If names are given, all files matching the names are reverted.
2314 2314 If no arguments are given, no files are reverted.
2315 2315
2316 2316 Modified files are saved with a .orig suffix before reverting.
2317 2317 To disable these backups, use --no-backup.
2318 2318 """
2319 2319
2320 2320 if opts["date"]:
2321 2321 if opts["rev"]:
2322 2322 raise util.Abort(_("you can't specify a revision and a date"))
2323 2323 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2324 2324
2325 2325 if not pats and not opts['all']:
2326 2326 raise util.Abort(_('no files or directories specified; '
2327 2327 'use --all to revert the whole repo'))
2328 2328
2329 2329 parent, p2 = repo.dirstate.parents()
2330 2330 if not opts['rev'] and p2 != nullid:
2331 2331 raise util.Abort(_('uncommitted merge - please provide a '
2332 2332 'specific revision'))
2333 2333 ctx = repo[opts['rev']]
2334 2334 node = ctx.node()
2335 2335 mf = ctx.manifest()
2336 2336 if node == parent:
2337 2337 pmf = mf
2338 2338 else:
2339 2339 pmf = None
2340 2340
2341 2341 # need all matching names in dirstate and manifest of target rev,
2342 2342 # so have to walk both. do not print errors if files exist in one
2343 2343 # but not other.
2344 2344
2345 2345 names = {}
2346 2346
2347 2347 wlock = repo.wlock()
2348 2348 try:
2349 2349 # walk dirstate.
2350 2350 files = []
2351 2351
2352 2352 m = cmdutil.match(repo, pats, opts)
2353 2353 m.bad = lambda x,y: False
2354 2354 for abs in repo.walk(m):
2355 2355 names[abs] = m.rel(abs), m.exact(abs)
2356 2356
2357 2357 # walk target manifest.
2358 2358
2359 2359 def badfn(path, msg):
2360 2360 if path in names:
2361 2361 return False
2362 2362 path_ = path + '/'
2363 2363 for f in names:
2364 2364 if f.startswith(path_):
2365 2365 return False
2366 2366 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2367 2367 return False
2368 2368
2369 2369 m = cmdutil.match(repo, pats, opts)
2370 2370 m.bad = badfn
2371 2371 for abs in repo[node].walk(m):
2372 2372 if abs not in names:
2373 2373 names[abs] = m.rel(abs), m.exact(abs)
2374 2374
2375 2375 m = cmdutil.matchfiles(repo, names)
2376 2376 changes = repo.status(match=m)[:4]
2377 2377 modified, added, removed, deleted = map(dict.fromkeys, changes)
2378 2378
2379 2379 # if f is a rename, also revert the source
2380 2380 cwd = repo.getcwd()
2381 2381 for f in added:
2382 2382 src = repo.dirstate.copied(f)
2383 2383 if src and src not in names and repo.dirstate[src] == 'r':
2384 2384 removed[src] = None
2385 2385 names[src] = (repo.pathto(src, cwd), True)
2386 2386
2387 2387 def removeforget(abs):
2388 2388 if repo.dirstate[abs] == 'a':
2389 2389 return _('forgetting %s\n')
2390 2390 return _('removing %s\n')
2391 2391
2392 2392 revert = ([], _('reverting %s\n'))
2393 2393 add = ([], _('adding %s\n'))
2394 2394 remove = ([], removeforget)
2395 2395 undelete = ([], _('undeleting %s\n'))
2396 2396
2397 2397 disptable = (
2398 2398 # dispatch table:
2399 2399 # file state
2400 2400 # action if in target manifest
2401 2401 # action if not in target manifest
2402 2402 # make backup if in target manifest
2403 2403 # make backup if not in target manifest
2404 2404 (modified, revert, remove, True, True),
2405 2405 (added, revert, remove, True, False),
2406 2406 (removed, undelete, None, False, False),
2407 2407 (deleted, revert, remove, False, False),
2408 2408 )
2409 2409
2410 2410 for abs, (rel, exact) in util.sort(names.items()):
2411 2411 mfentry = mf.get(abs)
2412 2412 target = repo.wjoin(abs)
2413 2413 def handle(xlist, dobackup):
2414 2414 xlist[0].append(abs)
2415 2415 if dobackup and not opts['no_backup'] and util.lexists(target):
2416 2416 bakname = "%s.orig" % rel
2417 2417 ui.note(_('saving current version of %s as %s\n') %
2418 2418 (rel, bakname))
2419 2419 if not opts.get('dry_run'):
2420 2420 util.copyfile(target, bakname)
2421 2421 if ui.verbose or not exact:
2422 2422 msg = xlist[1]
2423 2423 if not isinstance(msg, basestring):
2424 2424 msg = msg(abs)
2425 2425 ui.status(msg % rel)
2426 2426 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2427 2427 if abs not in table: continue
2428 2428 # file has changed in dirstate
2429 2429 if mfentry:
2430 2430 handle(hitlist, backuphit)
2431 2431 elif misslist is not None:
2432 2432 handle(misslist, backupmiss)
2433 2433 break
2434 2434 else:
2435 2435 if abs not in repo.dirstate:
2436 2436 if mfentry:
2437 2437 handle(add, True)
2438 2438 elif exact:
2439 2439 ui.warn(_('file not managed: %s\n') % rel)
2440 2440 continue
2441 2441 # file has not changed in dirstate
2442 2442 if node == parent:
2443 2443 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2444 2444 continue
2445 2445 if pmf is None:
2446 2446 # only need parent manifest in this unlikely case,
2447 2447 # so do not read by default
2448 2448 pmf = repo[parent].manifest()
2449 2449 if abs in pmf:
2450 2450 if mfentry:
2451 2451 # if version of file is same in parent and target
2452 2452 # manifests, do nothing
2453 2453 if (pmf[abs] != mfentry or
2454 2454 pmf.flags(abs) != mf.flags(abs)):
2455 2455 handle(revert, False)
2456 2456 else:
2457 2457 handle(remove, False)
2458 2458
2459 2459 if not opts.get('dry_run'):
2460 2460 def checkout(f):
2461 2461 fc = ctx[f]
2462 2462 repo.wwrite(f, fc.data(), fc.flags())
2463 2463
2464 2464 audit_path = util.path_auditor(repo.root)
2465 2465 for f in remove[0]:
2466 2466 if repo.dirstate[f] == 'a':
2467 2467 repo.dirstate.forget(f)
2468 2468 continue
2469 2469 audit_path(f)
2470 2470 try:
2471 2471 util.unlink(repo.wjoin(f))
2472 2472 except OSError:
2473 2473 pass
2474 2474 repo.dirstate.remove(f)
2475 2475
2476 2476 normal = None
2477 2477 if node == parent:
2478 2478 # We're reverting to our parent. If possible, we'd like status
2479 2479 # to report the file as clean. We have to use normallookup for
2480 2480 # merges to avoid losing information about merged/dirty files.
2481 2481 if p2 != nullid:
2482 2482 normal = repo.dirstate.normallookup
2483 2483 else:
2484 2484 normal = repo.dirstate.normal
2485 2485 for f in revert[0]:
2486 2486 checkout(f)
2487 2487 if normal:
2488 2488 normal(f)
2489 2489
2490 2490 for f in add[0]:
2491 2491 checkout(f)
2492 2492 repo.dirstate.add(f)
2493 2493
2494 2494 normal = repo.dirstate.normallookup
2495 2495 if node == parent and p2 == nullid:
2496 2496 normal = repo.dirstate.normal
2497 2497 for f in undelete[0]:
2498 2498 checkout(f)
2499 2499 normal(f)
2500 2500
2501 2501 finally:
2502 2502 del wlock
2503 2503
2504 2504 def rollback(ui, repo):
2505 2505 """roll back the last transaction
2506 2506
2507 2507 This command should be used with care. There is only one level of
2508 2508 rollback, and there is no way to undo a rollback. It will also
2509 2509 restore the dirstate at the time of the last transaction, losing
2510 2510 any dirstate changes since that time.
2511 2511
2512 2512 Transactions are used to encapsulate the effects of all commands
2513 2513 that create new changesets or propagate existing changesets into a
2514 2514 repository. For example, the following commands are transactional,
2515 2515 and their effects can be rolled back:
2516 2516
2517 2517 commit
2518 2518 import
2519 2519 pull
2520 2520 push (with this repository as destination)
2521 2521 unbundle
2522 2522
2523 2523 This command is not intended for use on public repositories. Once
2524 2524 changes are visible for pull by other users, rolling a transaction
2525 2525 back locally is ineffective (someone else may already have pulled
2526 2526 the changes). Furthermore, a race is possible with readers of the
2527 2527 repository; for example an in-progress pull from the repository
2528 2528 may fail if a rollback is performed.
2529 2529 """
2530 2530 repo.rollback()
2531 2531
2532 2532 def root(ui, repo):
2533 2533 """print the root (top) of the current working dir
2534 2534
2535 2535 Print the root directory of the current repository.
2536 2536 """
2537 2537 ui.write(repo.root + "\n")
2538 2538
2539 2539 def serve(ui, repo, **opts):
2540 2540 """export the repository via HTTP
2541 2541
2542 2542 Start a local HTTP repository browser and pull server.
2543 2543
2544 2544 By default, the server logs accesses to stdout and errors to
2545 2545 stderr. Use the "-A" and "-E" options to log to files.
2546 2546 """
2547 2547
2548 2548 if opts["stdio"]:
2549 2549 if repo is None:
2550 2550 raise RepoError(_("There is no Mercurial repository here"
2551 2551 " (.hg not found)"))
2552 2552 s = sshserver.sshserver(ui, repo)
2553 2553 s.serve_forever()
2554 2554
2555 2555 parentui = ui.parentui or ui
2556 2556 optlist = ("name templates style address port prefix ipv6"
2557 2557 " accesslog errorlog webdir_conf certificate")
2558 2558 for o in optlist.split():
2559 2559 if opts[o]:
2560 2560 parentui.setconfig("web", o, str(opts[o]))
2561 2561 if (repo is not None) and (repo.ui != parentui):
2562 2562 repo.ui.setconfig("web", o, str(opts[o]))
2563 2563
2564 2564 if repo is None and not ui.config("web", "webdir_conf"):
2565 2565 raise RepoError(_("There is no Mercurial repository here"
2566 2566 " (.hg not found)"))
2567 2567
2568 2568 class service:
2569 2569 def init(self):
2570 2570 util.set_signal_handler()
2571 2571 self.httpd = hgweb.server.create_server(parentui, repo)
2572 2572
2573 2573 if not ui.verbose: return
2574 2574
2575 2575 if self.httpd.prefix:
2576 2576 prefix = self.httpd.prefix.strip('/') + '/'
2577 2577 else:
2578 2578 prefix = ''
2579 2579
2580 2580 port = ':%d' % self.httpd.port
2581 2581 if port == ':80':
2582 2582 port = ''
2583 2583
2584 2584 bindaddr = self.httpd.addr
2585 2585 if bindaddr == '0.0.0.0':
2586 2586 bindaddr = '*'
2587 2587 elif ':' in bindaddr: # IPv6
2588 2588 bindaddr = '[%s]' % bindaddr
2589 2589
2590 2590 fqaddr = self.httpd.fqaddr
2591 2591 if ':' in fqaddr:
2592 2592 fqaddr = '[%s]' % fqaddr
2593 2593 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2594 2594 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2595 2595
2596 2596 def run(self):
2597 2597 self.httpd.serve_forever()
2598 2598
2599 2599 service = service()
2600 2600
2601 2601 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2602 2602
2603 2603 def status(ui, repo, *pats, **opts):
2604 2604 """show changed files in the working directory
2605 2605
2606 2606 Show status of files in the repository. If names are given, only
2607 2607 files that match are shown. Files that are clean or ignored or
2608 2608 source of a copy/move operation, are not listed unless -c (clean),
2609 2609 -i (ignored), -C (copies) or -A is given. Unless options described
2610 2610 with "show only ..." are given, the options -mardu are used.
2611 2611
2612 2612 Option -q/--quiet hides untracked (unknown and ignored) files
2613 2613 unless explicitly requested with -u/--unknown or -i/-ignored.
2614 2614
2615 2615 NOTE: status may appear to disagree with diff if permissions have
2616 2616 changed or a merge has occurred. The standard diff format does not
2617 2617 report permission changes and diff only reports changes relative
2618 2618 to one merge parent.
2619 2619
2620 2620 If one revision is given, it is used as the base revision.
2621 2621 If two revisions are given, the difference between them is shown.
2622 2622
2623 2623 The codes used to show the status of files are:
2624 2624 M = modified
2625 2625 A = added
2626 2626 R = removed
2627 2627 C = clean
2628 2628 ! = deleted, but still tracked
2629 2629 ? = not tracked
2630 2630 I = ignored
2631 2631 = the previous added file was copied from here
2632 2632 """
2633 2633
2634 2634 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2635 2635 cwd = (pats and repo.getcwd()) or ''
2636 2636 end = opts['print0'] and '\0' or '\n'
2637 2637 copy = {}
2638 2638 states = 'modified added removed deleted unknown ignored clean'.split()
2639 2639 show = [k for k in states if opts[k]]
2640 2640 if opts['all']:
2641 2641 show += ui.quiet and (states[:4] + ['clean']) or states
2642 2642 if not show:
2643 2643 show = ui.quiet and states[:4] or states[:5]
2644 2644
2645 2645 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2646 2646 'ignored' in show, 'clean' in show, 'unknown' in show)
2647 2647 changestates = zip(states, 'MAR!?IC', stat)
2648 2648
2649 2649 if (opts['all'] or opts['copies']) and not opts['no_status']:
2650 2650 ctxn = repo[nullid]
2651 2651 ctx1 = repo[node1]
2652 2652 ctx2 = repo[node2]
2653 2653 added = stat[1]
2654 2654 if node2 is None:
2655 2655 added = stat[0] + stat[1] # merged?
2656 2656
2657 2657 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2658 2658 if k in added:
2659 2659 copy[k] = v
2660 2660 elif v in added:
2661 2661 copy[v] = k
2662 2662
2663 2663 for state, char, files in changestates:
2664 2664 if state in show:
2665 2665 format = "%s %%s%s" % (char, end)
2666 2666 if opts['no_status']:
2667 2667 format = "%%s%s" % end
2668 2668
2669 2669 for f in files:
2670 2670 ui.write(format % repo.pathto(f, cwd))
2671 2671 if f in copy:
2672 2672 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2673 2673
2674 2674 def tag(ui, repo, name1, *names, **opts):
2675 2675 """add one or more tags for the current or given revision
2676 2676
2677 2677 Name a particular revision using <name>.
2678 2678
2679 2679 Tags are used to name particular revisions of the repository and are
2680 2680 very useful to compare different revisions, to go back to significant
2681 2681 earlier versions or to mark branch points as releases, etc.
2682 2682
2683 2683 If no revision is given, the parent of the working directory is used,
2684 2684 or tip if no revision is checked out.
2685 2685
2686 2686 To facilitate version control, distribution, and merging of tags,
2687 2687 they are stored as a file named ".hgtags" which is managed
2688 2688 similarly to other project files and can be hand-edited if
2689 2689 necessary. The file '.hg/localtags' is used for local tags (not
2690 2690 shared among repositories).
2691 2691
2692 2692 See 'hg help dates' for a list of formats valid for -d/--date.
2693 2693 """
2694 2694
2695 2695 rev_ = "."
2696 2696 names = (name1,) + names
2697 2697 if len(names) != len(dict.fromkeys(names)):
2698 2698 raise util.Abort(_('tag names must be unique'))
2699 2699 for n in names:
2700 2700 if n in ['tip', '.', 'null']:
2701 2701 raise util.Abort(_('the name \'%s\' is reserved') % n)
2702 2702 if opts['rev'] and opts['remove']:
2703 2703 raise util.Abort(_("--rev and --remove are incompatible"))
2704 2704 if opts['rev']:
2705 2705 rev_ = opts['rev']
2706 2706 message = opts['message']
2707 2707 if opts['remove']:
2708 2708 expectedtype = opts['local'] and 'local' or 'global'
2709 2709 for n in names:
2710 2710 if not repo.tagtype(n):
2711 2711 raise util.Abort(_('tag \'%s\' does not exist') % n)
2712 2712 if repo.tagtype(n) != expectedtype:
2713 2713 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2714 2714 (n, expectedtype))
2715 2715 rev_ = nullid
2716 2716 if not message:
2717 2717 message = _('Removed tag %s') % ', '.join(names)
2718 2718 elif not opts['force']:
2719 2719 for n in names:
2720 2720 if n in repo.tags():
2721 2721 raise util.Abort(_('tag \'%s\' already exists '
2722 2722 '(use -f to force)') % n)
2723 2723 if not rev_ and repo.dirstate.parents()[1] != nullid:
2724 2724 raise util.Abort(_('uncommitted merge - please provide a '
2725 2725 'specific revision'))
2726 2726 r = repo[rev_].node()
2727 2727
2728 2728 if not message:
2729 2729 message = (_('Added tag %s for changeset %s') %
2730 2730 (', '.join(names), short(r)))
2731 2731
2732 2732 date = opts.get('date')
2733 2733 if date:
2734 2734 date = util.parsedate(date)
2735 2735
2736 2736 repo.tag(names, r, message, opts['local'], opts['user'], date)
2737 2737
2738 2738 def tags(ui, repo):
2739 2739 """list repository tags
2740 2740
2741 2741 List the repository tags.
2742 2742
2743 2743 This lists both regular and local tags. When the -v/--verbose switch
2744 2744 is used, a third column "local" is printed for local tags.
2745 2745 """
2746 2746
2747 2747 l = repo.tagslist()
2748 2748 l.reverse()
2749 2749 hexfunc = ui.debugflag and hex or short
2750 2750 tagtype = ""
2751 2751
2752 2752 for t, n in l:
2753 2753 if ui.quiet:
2754 2754 ui.write("%s\n" % t)
2755 2755 continue
2756 2756
2757 2757 try:
2758 2758 hn = hexfunc(n)
2759 2759 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2760 2760 except revlog.LookupError:
2761 2761 r = " ?:%s" % hn
2762 2762 else:
2763 2763 spaces = " " * (30 - util.locallen(t))
2764 2764 if ui.verbose:
2765 2765 if repo.tagtype(t) == 'local':
2766 2766 tagtype = " local"
2767 2767 else:
2768 2768 tagtype = ""
2769 2769 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2770 2770
2771 2771 def tip(ui, repo, **opts):
2772 2772 """show the tip revision
2773 2773
2774 2774 The tip revision (usually just called the tip) is the most
2775 2775 recently added changeset in the repository, the most recently
2776 2776 changed head.
2777 2777
2778 2778 If you have just made a commit, that commit will be the tip. If
2779 2779 you have just pulled changes from another repository, the tip of
2780 2780 that repository becomes the current tip. The "tip" tag is special
2781 2781 and cannot be renamed or assigned to a different changeset.
2782 2782 """
2783 2783 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2784 2784
2785 2785 def unbundle(ui, repo, fname1, *fnames, **opts):
2786 2786 """apply one or more changegroup files
2787 2787
2788 2788 Apply one or more compressed changegroup files generated by the
2789 2789 bundle command.
2790 2790 """
2791 2791 fnames = (fname1,) + fnames
2792 2792
2793 2793 lock = None
2794 2794 try:
2795 2795 lock = repo.lock()
2796 2796 for fname in fnames:
2797 2797 if os.path.exists(fname):
2798 2798 f = open(fname, "rb")
2799 2799 else:
2800 2800 f = urllib.urlopen(fname)
2801 2801 gen = changegroup.readbundle(f, fname)
2802 2802 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2803 2803 finally:
2804 2804 del lock
2805 2805
2806 2806 return postincoming(ui, repo, modheads, opts['update'], None)
2807 2807
2808 2808 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2809 2809 """update working directory
2810 2810
2811 2811 Update the repository's working directory to the specified revision,
2812 2812 or the tip of the current branch if none is specified.
2813 2813
2814 2814 If the requested revision is a descendant of the working
2815 2815 directory, any outstanding changes in the working directory will
2816 2816 be merged into the result. If it is not directly descended but is
2817 2817 on the same named branch, update aborts with a suggestion to use
2818 2818 merge or update -C instead.
2819 2819
2820 2820 If the requested revision is on a different named branch and the
2821 2821 working directory is clean, update quietly switches branches.
2822 2822
2823 2823 If you want to update just one file to an older revision, use revert.
2824 2824
2825 2825 See 'hg help dates' for a list of formats valid for --date.
2826 2826 """
2827 2827 if rev and node:
2828 2828 raise util.Abort(_("please specify just one revision"))
2829 2829
2830 2830 if not rev:
2831 2831 rev = node
2832 2832
2833 2833 if date:
2834 2834 if rev:
2835 2835 raise util.Abort(_("you can't specify a revision and a date"))
2836 2836 rev = cmdutil.finddate(ui, repo, date)
2837 2837
2838 2838 if clean:
2839 2839 return hg.clean(repo, rev)
2840 2840 else:
2841 2841 return hg.update(repo, rev)
2842 2842
2843 2843 def verify(ui, repo):
2844 2844 """verify the integrity of the repository
2845 2845
2846 2846 Verify the integrity of the current repository.
2847 2847
2848 2848 This will perform an extensive check of the repository's
2849 2849 integrity, validating the hashes and checksums of each entry in
2850 2850 the changelog, manifest, and tracked files, as well as the
2851 2851 integrity of their crosslinks and indices.
2852 2852 """
2853 2853 return hg.verify(repo)
2854 2854
2855 2855 def version_(ui):
2856 2856 """output version and copyright information"""
2857 2857 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2858 2858 % version.get_version())
2859 2859 ui.status(_(
2860 2860 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2861 2861 "This is free software; see the source for copying conditions. "
2862 2862 "There is NO\nwarranty; "
2863 2863 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2864 2864 ))
2865 2865
2866 2866 # Command options and aliases are listed here, alphabetically
2867 2867
2868 2868 globalopts = [
2869 2869 ('R', 'repository', '',
2870 2870 _('repository root directory or symbolic path name')),
2871 2871 ('', 'cwd', '', _('change working directory')),
2872 2872 ('y', 'noninteractive', None,
2873 2873 _('do not prompt, assume \'yes\' for any required answers')),
2874 2874 ('q', 'quiet', None, _('suppress output')),
2875 2875 ('v', 'verbose', None, _('enable additional output')),
2876 2876 ('', 'config', [], _('set/override config option')),
2877 2877 ('', 'debug', None, _('enable debugging output')),
2878 2878 ('', 'debugger', None, _('start debugger')),
2879 2879 ('', 'encoding', util._encoding, _('set the charset encoding')),
2880 2880 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2881 2881 ('', 'lsprof', None, _('print improved command execution profile')),
2882 2882 ('', 'traceback', None, _('print traceback on exception')),
2883 2883 ('', 'time', None, _('time how long the command takes')),
2884 2884 ('', 'profile', None, _('print command execution profile')),
2885 2885 ('', 'version', None, _('output version information and exit')),
2886 2886 ('h', 'help', None, _('display help and exit')),
2887 2887 ]
2888 2888
2889 2889 dryrunopts = [('n', 'dry-run', None,
2890 2890 _('do not perform actions, just print output'))]
2891 2891
2892 2892 remoteopts = [
2893 2893 ('e', 'ssh', '', _('specify ssh command to use')),
2894 2894 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2895 2895 ]
2896 2896
2897 2897 walkopts = [
2898 2898 ('I', 'include', [], _('include names matching the given patterns')),
2899 2899 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2900 2900 ]
2901 2901
2902 2902 commitopts = [
2903 2903 ('m', 'message', '', _('use <text> as commit message')),
2904 2904 ('l', 'logfile', '', _('read commit message from <file>')),
2905 2905 ]
2906 2906
2907 2907 commitopts2 = [
2908 2908 ('d', 'date', '', _('record datecode as commit date')),
2909 2909 ('u', 'user', '', _('record user as committer')),
2910 2910 ]
2911 2911
2912 2912 templateopts = [
2913 2913 ('', 'style', '', _('display using template map file')),
2914 2914 ('', 'template', '', _('display with template')),
2915 2915 ]
2916 2916
2917 2917 logopts = [
2918 2918 ('p', 'patch', None, _('show patch')),
2919 2919 ('l', 'limit', '', _('limit number of changes displayed')),
2920 2920 ('M', 'no-merges', None, _('do not show merges')),
2921 2921 ] + templateopts
2922 2922
2923 2923 diffopts = [
2924 2924 ('a', 'text', None, _('treat all files as text')),
2925 2925 ('g', 'git', None, _('use git extended diff format')),
2926 2926 ('', 'nodates', None, _("don't include dates in diff headers"))
2927 2927 ]
2928 2928
2929 2929 diffopts2 = [
2930 2930 ('p', 'show-function', None, _('show which function each change is in')),
2931 2931 ('w', 'ignore-all-space', None,
2932 2932 _('ignore white space when comparing lines')),
2933 2933 ('b', 'ignore-space-change', None,
2934 2934 _('ignore changes in the amount of white space')),
2935 2935 ('B', 'ignore-blank-lines', None,
2936 2936 _('ignore changes whose lines are all blank')),
2937 2937 ('U', 'unified', '', _('number of lines of context to show'))
2938 2938 ]
2939 2939
2940 2940 table = {
2941 2941 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2942 2942 "addremove":
2943 2943 (addremove,
2944 2944 [('s', 'similarity', '',
2945 2945 _('guess renamed files by similarity (0<=s<=100)')),
2946 2946 ] + walkopts + dryrunopts,
2947 2947 _('hg addremove [OPTION]... [FILE]...')),
2948 2948 "^annotate|blame":
2949 2949 (annotate,
2950 2950 [('r', 'rev', '', _('annotate the specified revision')),
2951 2951 ('f', 'follow', None, _('follow file copies and renames')),
2952 2952 ('a', 'text', None, _('treat all files as text')),
2953 2953 ('u', 'user', None, _('list the author (long with -v)')),
2954 2954 ('d', 'date', None, _('list the date (short with -q)')),
2955 2955 ('n', 'number', None, _('list the revision number (default)')),
2956 2956 ('c', 'changeset', None, _('list the changeset')),
2957 2957 ('l', 'line-number', None,
2958 2958 _('show line number at the first appearance'))
2959 2959 ] + walkopts,
2960 2960 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2961 2961 "archive":
2962 2962 (archive,
2963 2963 [('', 'no-decode', None, _('do not pass files through decoders')),
2964 2964 ('p', 'prefix', '', _('directory prefix for files in archive')),
2965 2965 ('r', 'rev', '', _('revision to distribute')),
2966 2966 ('t', 'type', '', _('type of distribution to create')),
2967 2967 ] + walkopts,
2968 2968 _('hg archive [OPTION]... DEST')),
2969 2969 "backout":
2970 2970 (backout,
2971 2971 [('', 'merge', None,
2972 2972 _('merge with old dirstate parent after backout')),
2973 2973 ('', 'parent', '', _('parent to choose when backing out merge')),
2974 2974 ('r', 'rev', '', _('revision to backout')),
2975 2975 ] + walkopts + commitopts + commitopts2,
2976 2976 _('hg backout [OPTION]... [-r] REV')),
2977 2977 "bisect":
2978 2978 (bisect,
2979 2979 [('r', 'reset', False, _('reset bisect state')),
2980 2980 ('g', 'good', False, _('mark changeset good')),
2981 2981 ('b', 'bad', False, _('mark changeset bad')),
2982 2982 ('s', 'skip', False, _('skip testing changeset')),
2983 2983 ('U', 'noupdate', False, _('do not update to target'))],
2984 2984 _("hg bisect [-gbsr] [REV]")),
2985 2985 "branch":
2986 2986 (branch,
2987 2987 [('f', 'force', None,
2988 2988 _('set branch name even if it shadows an existing branch'))],
2989 2989 _('hg branch [-f] [NAME]')),
2990 2990 "branches":
2991 2991 (branches,
2992 2992 [('a', 'active', False,
2993 2993 _('show only branches that have unmerged heads'))],
2994 2994 _('hg branches [-a]')),
2995 2995 "bundle":
2996 2996 (bundle,
2997 2997 [('f', 'force', None,
2998 2998 _('run even when remote repository is unrelated')),
2999 2999 ('r', 'rev', [],
3000 3000 _('a changeset up to which you would like to bundle')),
3001 3001 ('', 'base', [],
3002 3002 _('a base changeset to specify instead of a destination')),
3003 3003 ('a', 'all', None, _('bundle all changesets in the repository')),
3004 3004 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3005 3005 ] + remoteopts,
3006 3006 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3007 3007 "cat":
3008 3008 (cat,
3009 3009 [('o', 'output', '', _('print output to file with formatted name')),
3010 3010 ('r', 'rev', '', _('print the given revision')),
3011 3011 ('', 'decode', None, _('apply any matching decode filter')),
3012 3012 ] + walkopts,
3013 3013 _('hg cat [OPTION]... FILE...')),
3014 3014 "^clone":
3015 3015 (clone,
3016 3016 [('U', 'noupdate', None,
3017 3017 _('the clone will only contain a repository (no working copy)')),
3018 3018 ('r', 'rev', [],
3019 3019 _('a changeset you would like to have after cloning')),
3020 3020 ('', 'pull', None, _('use pull protocol to copy metadata')),
3021 3021 ('', 'uncompressed', None,
3022 3022 _('use uncompressed transfer (fast over LAN)')),
3023 3023 ] + remoteopts,
3024 3024 _('hg clone [OPTION]... SOURCE [DEST]')),
3025 3025 "^commit|ci":
3026 3026 (commit,
3027 3027 [('A', 'addremove', None,
3028 3028 _('mark new/missing files as added/removed before committing')),
3029 3029 ] + walkopts + commitopts + commitopts2,
3030 3030 _('hg commit [OPTION]... [FILE]...')),
3031 3031 "copy|cp":
3032 3032 (copy,
3033 3033 [('A', 'after', None, _('record a copy that has already occurred')),
3034 3034 ('f', 'force', None,
3035 3035 _('forcibly copy over an existing managed file')),
3036 3036 ] + walkopts + dryrunopts,
3037 3037 _('hg copy [OPTION]... [SOURCE]... DEST')),
3038 3038 "debugancestor": (debugancestor, [],
3039 3039 _('hg debugancestor [INDEX] REV1 REV2')),
3040 3040 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3041 3041 "debugcomplete":
3042 3042 (debugcomplete,
3043 3043 [('o', 'options', None, _('show the command options'))],
3044 3044 _('hg debugcomplete [-o] CMD')),
3045 3045 "debugdate":
3046 3046 (debugdate,
3047 3047 [('e', 'extended', None, _('try extended date formats'))],
3048 3048 _('hg debugdate [-e] DATE [RANGE]')),
3049 3049 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3050 3050 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3051 3051 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3052 3052 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3053 3053 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3054 3054 "debugrawcommit|rawcommit":
3055 3055 (rawcommit,
3056 3056 [('p', 'parent', [], _('parent')),
3057 3057 ('F', 'files', '', _('file list'))
3058 3058 ] + commitopts + commitopts2,
3059 3059 _('hg debugrawcommit [OPTION]... [FILE]...')),
3060 3060 "debugrebuildstate":
3061 3061 (debugrebuildstate,
3062 3062 [('r', 'rev', '', _('revision to rebuild to'))],
3063 3063 _('hg debugrebuildstate [-r REV] [REV]')),
3064 3064 "debugrename":
3065 3065 (debugrename,
3066 3066 [('r', 'rev', '', _('revision to debug'))],
3067 3067 _('hg debugrename [-r REV] FILE')),
3068 3068 "debugsetparents":
3069 3069 (debugsetparents,
3070 3070 [],
3071 3071 _('hg debugsetparents REV1 [REV2]')),
3072 3072 "debugstate":
3073 3073 (debugstate,
3074 3074 [('', 'nodates', None, _('do not display the saved mtime'))],
3075 3075 _('hg debugstate [OPTS]')),
3076 3076 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3077 3077 "^diff":
3078 3078 (diff,
3079 3079 [('r', 'rev', [], _('revision'))
3080 3080 ] + diffopts + diffopts2 + walkopts,
3081 3081 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3082 3082 "^export":
3083 3083 (export,
3084 3084 [('o', 'output', '', _('print output to file with formatted name')),
3085 3085 ('', 'switch-parent', None, _('diff against the second parent'))
3086 3086 ] + diffopts,
3087 3087 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3088 3088 "grep":
3089 3089 (grep,
3090 3090 [('0', 'print0', None, _('end fields with NUL')),
3091 3091 ('', 'all', None, _('print all revisions that match')),
3092 3092 ('f', 'follow', None,
3093 3093 _('follow changeset history, or file history across copies and renames')),
3094 3094 ('i', 'ignore-case', None, _('ignore case when matching')),
3095 3095 ('l', 'files-with-matches', None,
3096 3096 _('print only filenames and revs that match')),
3097 3097 ('n', 'line-number', None, _('print matching line numbers')),
3098 3098 ('r', 'rev', [], _('search in given revision range')),
3099 3099 ('u', 'user', None, _('list the author (long with -v)')),
3100 3100 ('d', 'date', None, _('list the date (short with -q)')),
3101 3101 ] + walkopts,
3102 3102 _('hg grep [OPTION]... PATTERN [FILE]...')),
3103 3103 "heads":
3104 3104 (heads,
3105 3105 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3106 3106 ] + templateopts,
3107 3107 _('hg heads [-r REV] [REV]...')),
3108 3108 "help": (help_, [], _('hg help [COMMAND]')),
3109 3109 "identify|id":
3110 3110 (identify,
3111 3111 [('r', 'rev', '', _('identify the specified rev')),
3112 3112 ('n', 'num', None, _('show local revision number')),
3113 3113 ('i', 'id', None, _('show global revision id')),
3114 3114 ('b', 'branch', None, _('show branch')),
3115 3115 ('t', 'tags', None, _('show tags'))],
3116 3116 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3117 3117 "import|patch":
3118 3118 (import_,
3119 3119 [('p', 'strip', 1,
3120 3120 _('directory strip option for patch. This has the same\n'
3121 3121 'meaning as the corresponding patch option')),
3122 3122 ('b', 'base', '', _('base path')),
3123 3123 ('f', 'force', None,
3124 3124 _('skip check for outstanding uncommitted changes')),
3125 3125 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3126 3126 ('', 'exact', None,
3127 3127 _('apply patch to the nodes from which it was generated')),
3128 3128 ('', 'import-branch', None,
3129 3129 _('Use any branch information in patch (implied by --exact)'))] +
3130 3130 commitopts + commitopts2,
3131 3131 _('hg import [OPTION]... PATCH...')),
3132 3132 "incoming|in":
3133 3133 (incoming,
3134 3134 [('f', 'force', None,
3135 3135 _('run even when remote repository is unrelated')),
3136 3136 ('n', 'newest-first', None, _('show newest record first')),
3137 3137 ('', 'bundle', '', _('file to store the bundles into')),
3138 3138 ('r', 'rev', [],
3139 3139 _('a specific revision up to which you would like to pull')),
3140 3140 ] + logopts + remoteopts,
3141 3141 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3142 3142 ' [--bundle FILENAME] [SOURCE]')),
3143 3143 "^init":
3144 3144 (init,
3145 3145 remoteopts,
3146 3146 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3147 3147 "locate":
3148 3148 (locate,
3149 3149 [('r', 'rev', '', _('search the repository as it stood at rev')),
3150 3150 ('0', 'print0', None,
3151 3151 _('end filenames with NUL, for use with xargs')),
3152 3152 ('f', 'fullpath', None,
3153 3153 _('print complete paths from the filesystem root')),
3154 3154 ] + walkopts,
3155 3155 _('hg locate [OPTION]... [PATTERN]...')),
3156 3156 "^log|history":
3157 3157 (log,
3158 3158 [('f', 'follow', None,
3159 3159 _('follow changeset history, or file history across copies and renames')),
3160 3160 ('', 'follow-first', None,
3161 3161 _('only follow the first parent of merge changesets')),
3162 3162 ('d', 'date', '', _('show revs matching date spec')),
3163 3163 ('C', 'copies', None, _('show copied files')),
3164 3164 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3165 3165 ('r', 'rev', [], _('show the specified revision or range')),
3166 3166 ('', 'removed', None, _('include revs where files were removed')),
3167 3167 ('m', 'only-merges', None, _('show only merges')),
3168 3168 ('b', 'only-branch', [],
3169 3169 _('show only changesets within the given named branch')),
3170 3170 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3171 3171 ] + logopts + walkopts,
3172 3172 _('hg log [OPTION]... [FILE]')),
3173 3173 "manifest":
3174 3174 (manifest,
3175 3175 [('r', 'rev', '', _('revision to display'))],
3176 3176 _('hg manifest [-r REV]')),
3177 3177 "^merge":
3178 3178 (merge,
3179 3179 [('f', 'force', None, _('force a merge with outstanding changes')),
3180 3180 ('r', 'rev', '', _('revision to merge')),
3181 3181 ],
3182 3182 _('hg merge [-f] [[-r] REV]')),
3183 3183 "outgoing|out":
3184 3184 (outgoing,
3185 3185 [('f', 'force', None,
3186 3186 _('run even when remote repository is unrelated')),
3187 3187 ('r', 'rev', [],
3188 3188 _('a specific revision up to which you would like to push')),
3189 3189 ('n', 'newest-first', None, _('show newest record first')),
3190 3190 ] + logopts + remoteopts,
3191 3191 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3192 3192 "^parents":
3193 3193 (parents,
3194 3194 [('r', 'rev', '', _('show parents from the specified rev')),
3195 3195 ] + templateopts,
3196 3196 _('hg parents [-r REV] [FILE]')),
3197 3197 "paths": (paths, [], _('hg paths [NAME]')),
3198 3198 "^pull":
3199 3199 (pull,
3200 3200 [('u', 'update', None,
3201 3201 _('update to new tip if changesets were pulled')),
3202 3202 ('f', 'force', None,
3203 3203 _('run even when remote repository is unrelated')),
3204 3204 ('r', 'rev', [],
3205 3205 _('a specific revision up to which you would like to pull')),
3206 3206 ] + remoteopts,
3207 3207 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3208 3208 "^push":
3209 3209 (push,
3210 3210 [('f', 'force', None, _('force push')),
3211 3211 ('r', 'rev', [],
3212 3212 _('a specific revision up to which you would like to push')),
3213 3213 ] + remoteopts,
3214 3214 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3215 3215 "recover": (recover, [], _('hg recover')),
3216 3216 "^remove|rm":
3217 3217 (remove,
3218 3218 [('A', 'after', None, _('record delete for missing files')),
3219 3219 ('f', 'force', None,
3220 3220 _('remove (and delete) file even if added or modified')),
3221 3221 ] + walkopts,
3222 3222 _('hg remove [OPTION]... FILE...')),
3223 3223 "rename|mv":
3224 3224 (rename,
3225 3225 [('A', 'after', None, _('record a rename that has already occurred')),
3226 3226 ('f', 'force', None,
3227 3227 _('forcibly copy over an existing managed file')),
3228 3228 ] + walkopts + dryrunopts,
3229 3229 _('hg rename [OPTION]... SOURCE... DEST')),
3230 3230 "resolve":
3231 3231 (resolve,
3232 3232 [('l', 'list', None, _('list state of files needing merge')),
3233 3233 ('m', 'mark', None, _('mark files as resolved')),
3234 3234 ('u', 'unmark', None, _('unmark files as resolved'))],
3235 3235 ('hg resolve [OPTION] [FILES...]')),
3236 3236 "revert":
3237 3237 (revert,
3238 3238 [('a', 'all', None, _('revert all changes when no arguments given')),
3239 3239 ('d', 'date', '', _('tipmost revision matching date')),
3240 3240 ('r', 'rev', '', _('revision to revert to')),
3241 3241 ('', 'no-backup', None, _('do not save backup copies of files')),
3242 3242 ] + walkopts + dryrunopts,
3243 3243 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3244 3244 "rollback": (rollback, [], _('hg rollback')),
3245 3245 "root": (root, [], _('hg root')),
3246 3246 "^serve":
3247 3247 (serve,
3248 3248 [('A', 'accesslog', '', _('name of access log file to write to')),
3249 3249 ('d', 'daemon', None, _('run server in background')),
3250 3250 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3251 3251 ('E', 'errorlog', '', _('name of error log file to write to')),
3252 3252 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3253 3253 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3254 3254 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3255 3255 ('n', 'name', '',
3256 3256 _('name to show in web pages (default: working dir)')),
3257 3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3258 3258 ' (serve more than one repo)')),
3259 3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3260 3260 ('', 'stdio', None, _('for remote clients')),
3261 3261 ('t', 'templates', '', _('web templates to use')),
3262 3262 ('', 'style', '', _('template style to use')),
3263 3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3264 3264 ('', 'certificate', '', _('SSL certificate file'))],
3265 3265 _('hg serve [OPTION]...')),
3266 3266 "showconfig|debugconfig":
3267 3267 (showconfig,
3268 3268 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3269 3269 _('hg showconfig [-u] [NAME]...')),
3270 3270 "^status|st":
3271 3271 (status,
3272 3272 [('A', 'all', None, _('show status of all files')),
3273 3273 ('m', 'modified', None, _('show only modified files')),
3274 3274 ('a', 'added', None, _('show only added files')),
3275 3275 ('r', 'removed', None, _('show only removed files')),
3276 3276 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3277 3277 ('c', 'clean', None, _('show only files without changes')),
3278 3278 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3279 3279 ('i', 'ignored', None, _('show only ignored files')),
3280 3280 ('n', 'no-status', None, _('hide status prefix')),
3281 3281 ('C', 'copies', None, _('show source of copied files')),
3282 3282 ('0', 'print0', None,
3283 3283 _('end filenames with NUL, for use with xargs')),
3284 3284 ('', 'rev', [], _('show difference from revision')),
3285 3285 ] + walkopts,
3286 3286 _('hg status [OPTION]... [FILE]...')),
3287 3287 "tag":
3288 3288 (tag,
3289 3289 [('f', 'force', None, _('replace existing tag')),
3290 3290 ('l', 'local', None, _('make the tag local')),
3291 3291 ('r', 'rev', '', _('revision to tag')),
3292 3292 ('', 'remove', None, _('remove a tag')),
3293 3293 # -l/--local is already there, commitopts cannot be used
3294 3294 ('m', 'message', '', _('use <text> as commit message')),
3295 3295 ] + commitopts2,
3296 3296 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3297 3297 "tags": (tags, [], _('hg tags')),
3298 3298 "tip":
3299 3299 (tip,
3300 3300 [('p', 'patch', None, _('show patch')),
3301 3301 ] + templateopts,
3302 3302 _('hg tip [-p]')),
3303 3303 "unbundle":
3304 3304 (unbundle,
3305 3305 [('u', 'update', None,
3306 3306 _('update to new tip if changesets were unbundled'))],
3307 3307 _('hg unbundle [-u] FILE...')),
3308 3308 "^update|up|checkout|co":
3309 3309 (update,
3310 3310 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3311 3311 ('d', 'date', '', _('tipmost revision matching date')),
3312 3312 ('r', 'rev', '', _('revision'))],
3313 3313 _('hg update [-C] [-d DATE] [[-r] REV]')),
3314 3314 "verify": (verify, [], _('hg verify')),
3315 3315 "version": (version_, [], _('hg version')),
3316 3316 }
3317 3317
3318 3318 norepo = ("clone init version help debugcomplete debugdata"
3319 3319 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3320 3320 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,294 +1,294 b''
1 1 # hgweb/server.py - The standalone hg web server.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
10 10 from mercurial import hg, util
11 11 from mercurial.repo import RepoError
12 12 from hgweb_mod import hgweb
13 13 from hgwebdir_mod import hgwebdir
14 14 from mercurial.i18n import gettext as _
15 15
16 16 def _splitURI(uri):
17 17 """ Return path and query splited from uri
18 18
19 19 Just like CGI environment, the path is unquoted, the query is
20 20 not.
21 21 """
22 22 if '?' in uri:
23 23 path, query = uri.split('?', 1)
24 24 else:
25 25 path, query = uri, ''
26 26 return urllib.unquote(path), query
27 27
28 28 class _error_logger(object):
29 29 def __init__(self, handler):
30 30 self.handler = handler
31 31 def flush(self):
32 32 pass
33 33 def write(self, str):
34 34 self.writelines(str.split('\n'))
35 35 def writelines(self, seq):
36 36 for msg in seq:
37 37 self.handler.log_error("HG error: %s", msg)
38 38
39 39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
40 40
41 41 url_scheme = 'http'
42 42
43 43 def __init__(self, *args, **kargs):
44 44 self.protocol_version = 'HTTP/1.1'
45 45 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
46 46
47 47 def _log_any(self, fp, format, *args):
48 48 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
49 49 self.log_date_time_string(),
50 50 format % args))
51 51 fp.flush()
52 52
53 53 def log_error(self, format, *args):
54 54 self._log_any(self.server.errorlog, format, *args)
55 55
56 56 def log_message(self, format, *args):
57 57 self._log_any(self.server.accesslog, format, *args)
58 58
59 59 def do_write(self):
60 60 try:
61 61 self.do_hgweb()
62 62 except socket.error, inst:
63 63 if inst[0] != errno.EPIPE:
64 64 raise
65 65
66 66 def do_POST(self):
67 67 try:
68 68 self.do_write()
69 69 except StandardError, inst:
70 70 self._start_response("500 Internal Server Error", [])
71 71 self._write("Internal Server Error")
72 72 tb = "".join(traceback.format_exception(*sys.exc_info()))
73 73 self.log_error("Exception happened during processing request '%s':\n%s",
74 74 self.path, tb)
75 75
76 76 def do_GET(self):
77 77 self.do_POST()
78 78
79 79 def do_hgweb(self):
80 80 path, query = _splitURI(self.path)
81 81
82 82 env = {}
83 83 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
84 84 env['REQUEST_METHOD'] = self.command
85 85 env['SERVER_NAME'] = self.server.server_name
86 86 env['SERVER_PORT'] = str(self.server.server_port)
87 87 env['REQUEST_URI'] = self.path
88 88 env['SCRIPT_NAME'] = self.server.prefix
89 89 env['PATH_INFO'] = path[len(self.server.prefix):]
90 90 env['REMOTE_HOST'] = self.client_address[0]
91 91 env['REMOTE_ADDR'] = self.client_address[0]
92 92 if query:
93 93 env['QUERY_STRING'] = query
94 94
95 95 if self.headers.typeheader is None:
96 96 env['CONTENT_TYPE'] = self.headers.type
97 97 else:
98 98 env['CONTENT_TYPE'] = self.headers.typeheader
99 99 length = self.headers.getheader('content-length')
100 100 if length:
101 101 env['CONTENT_LENGTH'] = length
102 102 for header in [h for h in self.headers.keys()
103 103 if h not in ('content-type', 'content-length')]:
104 104 hkey = 'HTTP_' + header.replace('-', '_').upper()
105 105 hval = self.headers.getheader(header)
106 106 hval = hval.replace('\n', '').strip()
107 107 if hval:
108 108 env[hkey] = hval
109 109 env['SERVER_PROTOCOL'] = self.request_version
110 110 env['wsgi.version'] = (1, 0)
111 111 env['wsgi.url_scheme'] = self.url_scheme
112 112 env['wsgi.input'] = self.rfile
113 113 env['wsgi.errors'] = _error_logger(self)
114 114 env['wsgi.multithread'] = isinstance(self.server,
115 115 SocketServer.ThreadingMixIn)
116 116 env['wsgi.multiprocess'] = isinstance(self.server,
117 117 SocketServer.ForkingMixIn)
118 118 env['wsgi.run_once'] = 0
119 119
120 120 self.close_connection = True
121 121 self.saved_status = None
122 122 self.saved_headers = []
123 123 self.sent_headers = False
124 124 self.length = None
125 125 for chunk in self.server.application(env, self._start_response):
126 126 self._write(chunk)
127 127
128 128 def send_headers(self):
129 129 if not self.saved_status:
130 130 raise AssertionError("Sending headers before start_response() called")
131 131 saved_status = self.saved_status.split(None, 1)
132 132 saved_status[0] = int(saved_status[0])
133 133 self.send_response(*saved_status)
134 134 should_close = True
135 135 for h in self.saved_headers:
136 136 self.send_header(*h)
137 137 if h[0].lower() == 'content-length':
138 138 should_close = False
139 139 self.length = int(h[1])
140 140 # The value of the Connection header is a list of case-insensitive
141 141 # tokens separated by commas and optional whitespace.
142 142 if 'close' in [token.strip().lower() for token in
143 143 self.headers.get('connection', '').split(',')]:
144 144 should_close = True
145 145 if should_close:
146 146 self.send_header('Connection', 'close')
147 147 self.close_connection = should_close
148 148 self.end_headers()
149 149 self.sent_headers = True
150 150
151 151 def _start_response(self, http_status, headers, exc_info=None):
152 152 code, msg = http_status.split(None, 1)
153 153 code = int(code)
154 154 self.saved_status = http_status
155 155 bad_headers = ('connection', 'transfer-encoding')
156 156 self.saved_headers = [h for h in headers
157 157 if h[0].lower() not in bad_headers]
158 158 return self._write
159 159
160 160 def _write(self, data):
161 161 if not self.saved_status:
162 162 raise AssertionError("data written before start_response() called")
163 163 elif not self.sent_headers:
164 164 self.send_headers()
165 165 if self.length is not None:
166 166 if len(data) > self.length:
167 167 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
168 168 self.length = self.length - len(data)
169 169 self.wfile.write(data)
170 170 self.wfile.flush()
171 171
172 172 class _shgwebhandler(_hgwebhandler):
173 173
174 174 url_scheme = 'https'
175 175
176 176 def setup(self):
177 177 self.connection = self.request
178 178 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
179 179 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
180 180
181 181 def do_write(self):
182 182 from OpenSSL.SSL import SysCallError
183 183 try:
184 184 super(_shgwebhandler, self).do_write()
185 185 except SysCallError, inst:
186 186 if inst.args[0] != errno.EPIPE:
187 187 raise
188 188
189 189 def handle_one_request(self):
190 190 from OpenSSL.SSL import SysCallError, ZeroReturnError
191 191 try:
192 192 super(_shgwebhandler, self).handle_one_request()
193 193 except (SysCallError, ZeroReturnError):
194 194 self.close_connection = True
195 195 pass
196 196
197 197 def create_server(ui, repo):
198 198 use_threads = True
199 199
200 200 def openlog(opt, default):
201 201 if opt and opt != '-':
202 202 return open(opt, 'a')
203 203 return default
204 204
205 205 if repo is None:
206 206 myui = ui
207 207 else:
208 208 myui = repo.ui
209 209 address = myui.config("web", "address", "")
210 210 port = int(myui.config("web", "port", 8000))
211 211 prefix = myui.config("web", "prefix", "")
212 212 if prefix:
213 213 prefix = "/" + prefix.strip("/")
214 214 use_ipv6 = myui.configbool("web", "ipv6")
215 215 webdir_conf = myui.config("web", "webdir_conf")
216 216 ssl_cert = myui.config("web", "certificate")
217 217 accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
218 218 errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
219 219
220 220 if use_threads:
221 221 try:
222 222 from threading import activeCount
223 223 except ImportError:
224 224 use_threads = False
225 225
226 226 if use_threads:
227 227 _mixin = SocketServer.ThreadingMixIn
228 228 else:
229 229 if hasattr(os, "fork"):
230 230 _mixin = SocketServer.ForkingMixIn
231 231 else:
232 232 class _mixin:
233 233 pass
234 234
235 235 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
236 236
237 237 # SO_REUSEADDR has broken semantics on windows
238 238 if os.name == 'nt':
239 239 allow_reuse_address = 0
240 240
241 241 def __init__(self, *args, **kargs):
242 242 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
243 243 self.accesslog = accesslog
244 244 self.errorlog = errorlog
245 245 self.daemon_threads = True
246 246 def make_handler():
247 247 if webdir_conf:
248 248 hgwebobj = hgwebdir(webdir_conf, ui)
249 249 elif repo is not None:
250 250 hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
251 251 else:
252 252 raise RepoError(_("There is no Mercurial repository here"
253 253 " (.hg not found)"))
254 254 return hgwebobj
255 255 self.application = make_handler()
256 256
257 257 if ssl_cert:
258 258 try:
259 259 from OpenSSL import SSL
260 260 ctx = SSL.Context(SSL.SSLv23_METHOD)
261 261 except ImportError:
262 raise util.Abort("SSL support is unavailable")
262 raise util.Abort(_("SSL support is unavailable"))
263 263 ctx.use_privatekey_file(ssl_cert)
264 264 ctx.use_certificate_file(ssl_cert)
265 265 sock = socket.socket(self.address_family, self.socket_type)
266 266 self.socket = SSL.Connection(ctx, sock)
267 267 self.server_bind()
268 268 self.server_activate()
269 269
270 270 self.addr, self.port = self.socket.getsockname()[0:2]
271 271 self.prefix = prefix
272 272 self.fqaddr = socket.getfqdn(address)
273 273
274 274 class IPv6HTTPServer(MercurialHTTPServer):
275 275 address_family = getattr(socket, 'AF_INET6', None)
276 276
277 277 def __init__(self, *args, **kwargs):
278 278 if self.address_family is None:
279 279 raise RepoError(_('IPv6 not available on this system'))
280 280 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
281 281
282 282 if ssl_cert:
283 283 handler = _shgwebhandler
284 284 else:
285 285 handler = _hgwebhandler
286 286
287 287 try:
288 288 if use_ipv6:
289 289 return IPv6HTTPServer((address, port), handler)
290 290 else:
291 291 return MercurialHTTPServer((address, port), handler)
292 292 except socket.error, inst:
293 293 raise util.Abort(_("cannot start server at '%s:%d': %s")
294 294 % (address, port, inst.args[1]))
@@ -1,2070 +1,2070 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import lock, transaction, stat, errno, ui, store
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14 import match as match_
15 15 import merge as merge_
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 19 supported = ('revlogv1', 'store')
20 20
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 self.root = os.path.realpath(path)
24 24 self.path = os.path.join(self.root, ".hg")
25 25 self.origroot = path
26 26 self.opener = util.opener(self.path)
27 27 self.wopener = util.opener(self.root)
28 28
29 29 if not os.path.isdir(self.path):
30 30 if create:
31 31 if not os.path.exists(path):
32 32 os.mkdir(path)
33 33 os.mkdir(self.path)
34 34 requirements = ["revlogv1"]
35 35 if parentui.configbool('format', 'usestore', True):
36 36 os.mkdir(os.path.join(self.path, "store"))
37 37 requirements.append("store")
38 38 # create an invalid changelog
39 39 self.opener("00changelog.i", "a").write(
40 40 '\0\0\0\2' # represents revlogv2
41 41 ' dummy changelog to prevent using the old repo layout'
42 42 )
43 43 reqfile = self.opener("requires", "w")
44 44 for r in requirements:
45 45 reqfile.write("%s\n" % r)
46 46 reqfile.close()
47 47 else:
48 48 raise repo.RepoError(_("repository %s not found") % path)
49 49 elif create:
50 50 raise repo.RepoError(_("repository %s already exists") % path)
51 51 else:
52 52 # find requirements
53 53 requirements = []
54 54 try:
55 55 requirements = self.opener("requires").read().splitlines()
56 56 for r in requirements:
57 57 if r not in self.supported:
58 58 raise repo.RepoError(_("requirement '%s' not supported") % r)
59 59 except IOError, inst:
60 60 if inst.errno != errno.ENOENT:
61 61 raise
62 62
63 63 self.store = store.store(requirements, self.path, util.opener)
64 64 self.spath = self.store.path
65 65 self.sopener = self.store.opener
66 66 self.sjoin = self.store.join
67 67 self.opener.createmode = self.store.createmode
68 68
69 69 self.ui = ui.ui(parentui=parentui)
70 70 try:
71 71 self.ui.readconfig(self.join("hgrc"), self.root)
72 72 extensions.loadall(self.ui)
73 73 except IOError:
74 74 pass
75 75
76 76 self.tagscache = None
77 77 self._tagstypecache = None
78 78 self.branchcache = None
79 79 self._ubranchcache = None # UTF-8 version of branchcache
80 80 self._branchcachetip = None
81 81 self.nodetagscache = None
82 82 self.filterpats = {}
83 83 self._datafilters = {}
84 84 self._transref = self._lockref = self._wlockref = None
85 85
86 86 def __getattr__(self, name):
87 87 if name == 'changelog':
88 88 self.changelog = changelog.changelog(self.sopener)
89 89 self.sopener.defversion = self.changelog.version
90 90 return self.changelog
91 91 if name == 'manifest':
92 92 self.changelog
93 93 self.manifest = manifest.manifest(self.sopener)
94 94 return self.manifest
95 95 if name == 'dirstate':
96 96 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
97 97 return self.dirstate
98 98 else:
99 99 raise AttributeError, name
100 100
101 101 def __getitem__(self, changeid):
102 102 if changeid == None:
103 103 return context.workingctx(self)
104 104 return context.changectx(self, changeid)
105 105
106 106 def __nonzero__(self):
107 107 return True
108 108
109 109 def __len__(self):
110 110 return len(self.changelog)
111 111
112 112 def __iter__(self):
113 113 for i in xrange(len(self)):
114 114 yield i
115 115
116 116 def url(self):
117 117 return 'file:' + self.root
118 118
119 119 def hook(self, name, throw=False, **args):
120 120 return hook.hook(self.ui, self, name, throw, **args)
121 121
122 122 tag_disallowed = ':\r\n'
123 123
124 124 def _tag(self, names, node, message, local, user, date, parent=None,
125 125 extra={}):
126 126 use_dirstate = parent is None
127 127
128 128 if isinstance(names, str):
129 129 allchars = names
130 130 names = (names,)
131 131 else:
132 132 allchars = ''.join(names)
133 133 for c in self.tag_disallowed:
134 134 if c in allchars:
135 135 raise util.Abort(_('%r cannot be used in a tag name') % c)
136 136
137 137 for name in names:
138 138 self.hook('pretag', throw=True, node=hex(node), tag=name,
139 139 local=local)
140 140
141 141 def writetags(fp, names, munge, prevtags):
142 142 fp.seek(0, 2)
143 143 if prevtags and prevtags[-1] != '\n':
144 144 fp.write('\n')
145 145 for name in names:
146 146 m = munge and munge(name) or name
147 147 if self._tagstypecache and name in self._tagstypecache:
148 148 old = self.tagscache.get(name, nullid)
149 149 fp.write('%s %s\n' % (hex(old), m))
150 150 fp.write('%s %s\n' % (hex(node), m))
151 151 fp.close()
152 152
153 153 prevtags = ''
154 154 if local:
155 155 try:
156 156 fp = self.opener('localtags', 'r+')
157 157 except IOError, err:
158 158 fp = self.opener('localtags', 'a')
159 159 else:
160 160 prevtags = fp.read()
161 161
162 162 # local tags are stored in the current charset
163 163 writetags(fp, names, None, prevtags)
164 164 for name in names:
165 165 self.hook('tag', node=hex(node), tag=name, local=local)
166 166 return
167 167
168 168 if use_dirstate:
169 169 try:
170 170 fp = self.wfile('.hgtags', 'rb+')
171 171 except IOError, err:
172 172 fp = self.wfile('.hgtags', 'ab')
173 173 else:
174 174 prevtags = fp.read()
175 175 else:
176 176 try:
177 177 prevtags = self.filectx('.hgtags', parent).data()
178 178 except revlog.LookupError:
179 179 pass
180 180 fp = self.wfile('.hgtags', 'wb')
181 181 if prevtags:
182 182 fp.write(prevtags)
183 183
184 184 # committed tags are stored in UTF-8
185 185 writetags(fp, names, util.fromlocal, prevtags)
186 186
187 187 if use_dirstate and '.hgtags' not in self.dirstate:
188 188 self.add(['.hgtags'])
189 189
190 190 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
191 191 extra=extra)
192 192
193 193 for name in names:
194 194 self.hook('tag', node=hex(node), tag=name, local=local)
195 195
196 196 return tagnode
197 197
198 198 def tag(self, names, node, message, local, user, date):
199 199 '''tag a revision with one or more symbolic names.
200 200
201 201 names is a list of strings or, when adding a single tag, names may be a
202 202 string.
203 203
204 204 if local is True, the tags are stored in a per-repository file.
205 205 otherwise, they are stored in the .hgtags file, and a new
206 206 changeset is committed with the change.
207 207
208 208 keyword arguments:
209 209
210 210 local: whether to store tags in non-version-controlled file
211 211 (default False)
212 212
213 213 message: commit message to use if committing
214 214
215 215 user: name of user to use if committing
216 216
217 217 date: date tuple to use if committing'''
218 218
219 219 for x in self.status()[:5]:
220 220 if '.hgtags' in x:
221 221 raise util.Abort(_('working copy of .hgtags is changed '
222 222 '(please commit .hgtags manually)'))
223 223
224 224 self._tag(names, node, message, local, user, date)
225 225
226 226 def tags(self):
227 227 '''return a mapping of tag to node'''
228 228 if self.tagscache:
229 229 return self.tagscache
230 230
231 231 globaltags = {}
232 232 tagtypes = {}
233 233
234 234 def readtags(lines, fn, tagtype):
235 235 filetags = {}
236 236 count = 0
237 237
238 238 def warn(msg):
239 239 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
240 240
241 241 for l in lines:
242 242 count += 1
243 243 if not l:
244 244 continue
245 245 s = l.split(" ", 1)
246 246 if len(s) != 2:
247 247 warn(_("cannot parse entry"))
248 248 continue
249 249 node, key = s
250 250 key = util.tolocal(key.strip()) # stored in UTF-8
251 251 try:
252 252 bin_n = bin(node)
253 253 except TypeError:
254 254 warn(_("node '%s' is not well formed") % node)
255 255 continue
256 256 if bin_n not in self.changelog.nodemap:
257 257 warn(_("tag '%s' refers to unknown node") % key)
258 258 continue
259 259
260 260 h = []
261 261 if key in filetags:
262 262 n, h = filetags[key]
263 263 h.append(n)
264 264 filetags[key] = (bin_n, h)
265 265
266 266 for k, nh in filetags.items():
267 267 if k not in globaltags:
268 268 globaltags[k] = nh
269 269 tagtypes[k] = tagtype
270 270 continue
271 271
272 272 # we prefer the global tag if:
273 273 # it supercedes us OR
274 274 # mutual supercedes and it has a higher rank
275 275 # otherwise we win because we're tip-most
276 276 an, ah = nh
277 277 bn, bh = globaltags[k]
278 278 if (bn != an and an in bh and
279 279 (bn not in ah or len(bh) > len(ah))):
280 280 an = bn
281 281 ah.extend([n for n in bh if n not in ah])
282 282 globaltags[k] = an, ah
283 283 tagtypes[k] = tagtype
284 284
285 285 # read the tags file from each head, ending with the tip
286 286 f = None
287 287 for rev, node, fnode in self._hgtagsnodes():
288 288 f = (f and f.filectx(fnode) or
289 289 self.filectx('.hgtags', fileid=fnode))
290 290 readtags(f.data().splitlines(), f, "global")
291 291
292 292 try:
293 293 data = util.fromlocal(self.opener("localtags").read())
294 294 # localtags are stored in the local character set
295 295 # while the internal tag table is stored in UTF-8
296 296 readtags(data.splitlines(), "localtags", "local")
297 297 except IOError:
298 298 pass
299 299
300 300 self.tagscache = {}
301 301 self._tagstypecache = {}
302 302 for k,nh in globaltags.items():
303 303 n = nh[0]
304 304 if n != nullid:
305 305 self.tagscache[k] = n
306 306 self._tagstypecache[k] = tagtypes[k]
307 307 self.tagscache['tip'] = self.changelog.tip()
308 308 return self.tagscache
309 309
310 310 def tagtype(self, tagname):
311 311 '''
312 312 return the type of the given tag. result can be:
313 313
314 314 'local' : a local tag
315 315 'global' : a global tag
316 316 None : tag does not exist
317 317 '''
318 318
319 319 self.tags()
320 320
321 321 return self._tagstypecache.get(tagname)
322 322
323 323 def _hgtagsnodes(self):
324 324 heads = self.heads()
325 325 heads.reverse()
326 326 last = {}
327 327 ret = []
328 328 for node in heads:
329 329 c = self[node]
330 330 rev = c.rev()
331 331 try:
332 332 fnode = c.filenode('.hgtags')
333 333 except revlog.LookupError:
334 334 continue
335 335 ret.append((rev, node, fnode))
336 336 if fnode in last:
337 337 ret[last[fnode]] = None
338 338 last[fnode] = len(ret) - 1
339 339 return [item for item in ret if item]
340 340
341 341 def tagslist(self):
342 342 '''return a list of tags ordered by revision'''
343 343 l = []
344 344 for t, n in self.tags().items():
345 345 try:
346 346 r = self.changelog.rev(n)
347 347 except:
348 348 r = -2 # sort to the beginning of the list if unknown
349 349 l.append((r, t, n))
350 350 return [(t, n) for r, t, n in util.sort(l)]
351 351
352 352 def nodetags(self, node):
353 353 '''return the tags associated with a node'''
354 354 if not self.nodetagscache:
355 355 self.nodetagscache = {}
356 356 for t, n in self.tags().items():
357 357 self.nodetagscache.setdefault(n, []).append(t)
358 358 return self.nodetagscache.get(node, [])
359 359
360 360 def _branchtags(self, partial, lrev):
361 361 tiprev = len(self) - 1
362 362 if lrev != tiprev:
363 363 self._updatebranchcache(partial, lrev+1, tiprev+1)
364 364 self._writebranchcache(partial, self.changelog.tip(), tiprev)
365 365
366 366 return partial
367 367
368 368 def branchtags(self):
369 369 tip = self.changelog.tip()
370 370 if self.branchcache is not None and self._branchcachetip == tip:
371 371 return self.branchcache
372 372
373 373 oldtip = self._branchcachetip
374 374 self._branchcachetip = tip
375 375 if self.branchcache is None:
376 376 self.branchcache = {} # avoid recursion in changectx
377 377 else:
378 378 self.branchcache.clear() # keep using the same dict
379 379 if oldtip is None or oldtip not in self.changelog.nodemap:
380 380 partial, last, lrev = self._readbranchcache()
381 381 else:
382 382 lrev = self.changelog.rev(oldtip)
383 383 partial = self._ubranchcache
384 384
385 385 self._branchtags(partial, lrev)
386 386
387 387 # the branch cache is stored on disk as UTF-8, but in the local
388 388 # charset internally
389 389 for k, v in partial.items():
390 390 self.branchcache[util.tolocal(k)] = v
391 391 self._ubranchcache = partial
392 392 return self.branchcache
393 393
394 394 def _readbranchcache(self):
395 395 partial = {}
396 396 try:
397 397 f = self.opener("branch.cache")
398 398 lines = f.read().split('\n')
399 399 f.close()
400 400 except (IOError, OSError):
401 401 return {}, nullid, nullrev
402 402
403 403 try:
404 404 last, lrev = lines.pop(0).split(" ", 1)
405 405 last, lrev = bin(last), int(lrev)
406 406 if lrev >= len(self) or self[lrev].node() != last:
407 407 # invalidate the cache
408 408 raise ValueError('invalidating branch cache (tip differs)')
409 409 for l in lines:
410 410 if not l: continue
411 411 node, label = l.split(" ", 1)
412 412 partial[label.strip()] = bin(node)
413 413 except (KeyboardInterrupt, util.SignalInterrupt):
414 414 raise
415 415 except Exception, inst:
416 416 if self.ui.debugflag:
417 417 self.ui.warn(str(inst), '\n')
418 418 partial, last, lrev = {}, nullid, nullrev
419 419 return partial, last, lrev
420 420
421 421 def _writebranchcache(self, branches, tip, tiprev):
422 422 try:
423 423 f = self.opener("branch.cache", "w", atomictemp=True)
424 424 f.write("%s %s\n" % (hex(tip), tiprev))
425 425 for label, node in branches.iteritems():
426 426 f.write("%s %s\n" % (hex(node), label))
427 427 f.rename()
428 428 except (IOError, OSError):
429 429 pass
430 430
431 431 def _updatebranchcache(self, partial, start, end):
432 432 for r in xrange(start, end):
433 433 c = self[r]
434 434 b = c.branch()
435 435 partial[b] = c.node()
436 436
437 437 def lookup(self, key):
438 438 if key == '.':
439 439 return self.dirstate.parents()[0]
440 440 elif key == 'null':
441 441 return nullid
442 442 n = self.changelog._match(key)
443 443 if n:
444 444 return n
445 445 if key in self.tags():
446 446 return self.tags()[key]
447 447 if key in self.branchtags():
448 448 return self.branchtags()[key]
449 449 n = self.changelog._partialmatch(key)
450 450 if n:
451 451 return n
452 452 try:
453 453 if len(key) == 20:
454 454 key = hex(key)
455 455 except:
456 456 pass
457 457 raise repo.RepoError(_("unknown revision '%s'") % key)
458 458
459 459 def local(self):
460 460 return True
461 461
462 462 def join(self, f):
463 463 return os.path.join(self.path, f)
464 464
465 465 def wjoin(self, f):
466 466 return os.path.join(self.root, f)
467 467
468 468 def rjoin(self, f):
469 469 return os.path.join(self.root, util.pconvert(f))
470 470
471 471 def file(self, f):
472 472 if f[0] == '/':
473 473 f = f[1:]
474 474 return filelog.filelog(self.sopener, f)
475 475
476 476 def changectx(self, changeid):
477 477 return self[changeid]
478 478
479 479 def parents(self, changeid=None):
480 480 '''get list of changectxs for parents of changeid'''
481 481 return self[changeid].parents()
482 482
483 483 def filectx(self, path, changeid=None, fileid=None):
484 484 """changeid can be a changeset revision, node, or tag.
485 485 fileid can be a file revision or node."""
486 486 return context.filectx(self, path, changeid, fileid)
487 487
488 488 def getcwd(self):
489 489 return self.dirstate.getcwd()
490 490
491 491 def pathto(self, f, cwd=None):
492 492 return self.dirstate.pathto(f, cwd)
493 493
494 494 def wfile(self, f, mode='r'):
495 495 return self.wopener(f, mode)
496 496
497 497 def _link(self, f):
498 498 return os.path.islink(self.wjoin(f))
499 499
500 500 def _filter(self, filter, filename, data):
501 501 if filter not in self.filterpats:
502 502 l = []
503 503 for pat, cmd in self.ui.configitems(filter):
504 504 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 505 fn = None
506 506 params = cmd
507 507 for name, filterfn in self._datafilters.iteritems():
508 508 if cmd.startswith(name):
509 509 fn = filterfn
510 510 params = cmd[len(name):].lstrip()
511 511 break
512 512 if not fn:
513 513 fn = lambda s, c, **kwargs: util.filter(s, c)
514 514 # Wrap old filters not supporting keyword arguments
515 515 if not inspect.getargspec(fn)[2]:
516 516 oldfn = fn
517 517 fn = lambda s, c, **kwargs: oldfn(s, c)
518 518 l.append((mf, fn, params))
519 519 self.filterpats[filter] = l
520 520
521 521 for mf, fn, cmd in self.filterpats[filter]:
522 522 if mf(filename):
523 523 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
524 524 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
525 525 break
526 526
527 527 return data
528 528
529 529 def adddatafilter(self, name, filter):
530 530 self._datafilters[name] = filter
531 531
532 532 def wread(self, filename):
533 533 if self._link(filename):
534 534 data = os.readlink(self.wjoin(filename))
535 535 else:
536 536 data = self.wopener(filename, 'r').read()
537 537 return self._filter("encode", filename, data)
538 538
539 539 def wwrite(self, filename, data, flags):
540 540 data = self._filter("decode", filename, data)
541 541 try:
542 542 os.unlink(self.wjoin(filename))
543 543 except OSError:
544 544 pass
545 545 if 'l' in flags:
546 546 self.wopener.symlink(data, filename)
547 547 else:
548 548 self.wopener(filename, 'w').write(data)
549 549 if 'x' in flags:
550 550 util.set_flags(self.wjoin(filename), False, True)
551 551
552 552 def wwritedata(self, filename, data):
553 553 return self._filter("decode", filename, data)
554 554
555 555 def transaction(self):
556 556 if self._transref and self._transref():
557 557 return self._transref().nest()
558 558
559 559 # abort here if the journal already exists
560 560 if os.path.exists(self.sjoin("journal")):
561 561 raise repo.RepoError(_("journal already exists - run hg recover"))
562 562
563 563 # save dirstate for rollback
564 564 try:
565 565 ds = self.opener("dirstate").read()
566 566 except IOError:
567 567 ds = ""
568 568 self.opener("journal.dirstate", "w").write(ds)
569 569 self.opener("journal.branch", "w").write(self.dirstate.branch())
570 570
571 571 renames = [(self.sjoin("journal"), self.sjoin("undo")),
572 572 (self.join("journal.dirstate"), self.join("undo.dirstate")),
573 573 (self.join("journal.branch"), self.join("undo.branch"))]
574 574 tr = transaction.transaction(self.ui.warn, self.sopener,
575 575 self.sjoin("journal"),
576 576 aftertrans(renames),
577 577 self.store.createmode)
578 578 self._transref = weakref.ref(tr)
579 579 return tr
580 580
581 581 def recover(self):
582 582 l = self.lock()
583 583 try:
584 584 if os.path.exists(self.sjoin("journal")):
585 585 self.ui.status(_("rolling back interrupted transaction\n"))
586 586 transaction.rollback(self.sopener, self.sjoin("journal"))
587 587 self.invalidate()
588 588 return True
589 589 else:
590 590 self.ui.warn(_("no interrupted transaction available\n"))
591 591 return False
592 592 finally:
593 593 del l
594 594
595 595 def rollback(self):
596 596 wlock = lock = None
597 597 try:
598 598 wlock = self.wlock()
599 599 lock = self.lock()
600 600 if os.path.exists(self.sjoin("undo")):
601 601 self.ui.status(_("rolling back last transaction\n"))
602 602 transaction.rollback(self.sopener, self.sjoin("undo"))
603 603 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
604 604 try:
605 605 branch = self.opener("undo.branch").read()
606 606 self.dirstate.setbranch(branch)
607 607 except IOError:
608 608 self.ui.warn(_("Named branch could not be reset, "
609 609 "current branch still is: %s\n")
610 610 % util.tolocal(self.dirstate.branch()))
611 611 self.invalidate()
612 612 self.dirstate.invalidate()
613 613 else:
614 614 self.ui.warn(_("no rollback information available\n"))
615 615 finally:
616 616 del lock, wlock
617 617
618 618 def invalidate(self):
619 619 for a in "changelog manifest".split():
620 620 if a in self.__dict__:
621 621 delattr(self, a)
622 622 self.tagscache = None
623 623 self._tagstypecache = None
624 624 self.nodetagscache = None
625 625 self.branchcache = None
626 626 self._ubranchcache = None
627 627 self._branchcachetip = None
628 628
629 629 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
630 630 try:
631 631 l = lock.lock(lockname, 0, releasefn, desc=desc)
632 632 except lock.LockHeld, inst:
633 633 if not wait:
634 634 raise
635 635 self.ui.warn(_("waiting for lock on %s held by %r\n") %
636 636 (desc, inst.locker))
637 637 # default to 600 seconds timeout
638 638 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
639 639 releasefn, desc=desc)
640 640 if acquirefn:
641 641 acquirefn()
642 642 return l
643 643
644 644 def lock(self, wait=True):
645 645 if self._lockref and self._lockref():
646 646 return self._lockref()
647 647
648 648 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
649 649 _('repository %s') % self.origroot)
650 650 self._lockref = weakref.ref(l)
651 651 return l
652 652
653 653 def wlock(self, wait=True):
654 654 if self._wlockref and self._wlockref():
655 655 return self._wlockref()
656 656
657 657 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
658 658 self.dirstate.invalidate, _('working directory of %s') %
659 659 self.origroot)
660 660 self._wlockref = weakref.ref(l)
661 661 return l
662 662
663 663 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
664 664 """
665 665 commit an individual file as part of a larger transaction
666 666 """
667 667
668 668 fn = fctx.path()
669 669 t = fctx.data()
670 670 fl = self.file(fn)
671 671 fp1 = manifest1.get(fn, nullid)
672 672 fp2 = manifest2.get(fn, nullid)
673 673
674 674 meta = {}
675 675 cp = fctx.renamed()
676 676 if cp and cp[0] != fn:
677 677 # Mark the new revision of this file as a copy of another
678 678 # file. This copy data will effectively act as a parent
679 679 # of this new revision. If this is a merge, the first
680 680 # parent will be the nullid (meaning "look up the copy data")
681 681 # and the second one will be the other parent. For example:
682 682 #
683 683 # 0 --- 1 --- 3 rev1 changes file foo
684 684 # \ / rev2 renames foo to bar and changes it
685 685 # \- 2 -/ rev3 should have bar with all changes and
686 686 # should record that bar descends from
687 687 # bar in rev2 and foo in rev1
688 688 #
689 689 # this allows this merge to succeed:
690 690 #
691 691 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
692 692 # \ / merging rev3 and rev4 should use bar@rev2
693 693 # \- 2 --- 4 as the merge base
694 694 #
695 695
696 696 cf = cp[0]
697 697 cr = manifest1.get(cf)
698 698 nfp = fp2
699 699
700 700 if manifest2: # branch merge
701 701 if fp2 == nullid: # copied on remote side
702 702 if fp1 != nullid or cf in manifest2:
703 703 cr = manifest2[cf]
704 704 nfp = fp1
705 705
706 706 # find source in nearest ancestor if we've lost track
707 707 if not cr:
708 708 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
709 709 (fn, cf))
710 710 for a in self['.'].ancestors():
711 711 if cf in a:
712 712 cr = a[cf].filenode()
713 713 break
714 714
715 715 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
716 716 meta["copy"] = cf
717 717 meta["copyrev"] = hex(cr)
718 718 fp1, fp2 = nullid, nfp
719 719 elif fp2 != nullid:
720 720 # is one parent an ancestor of the other?
721 721 fpa = fl.ancestor(fp1, fp2)
722 722 if fpa == fp1:
723 723 fp1, fp2 = fp2, nullid
724 724 elif fpa == fp2:
725 725 fp2 = nullid
726 726
727 727 # is the file unmodified from the parent? report existing entry
728 728 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
729 729 return fp1
730 730
731 731 changelist.append(fn)
732 732 return fl.add(t, meta, tr, linkrev, fp1, fp2)
733 733
734 734 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
735 735 if p1 is None:
736 736 p1, p2 = self.dirstate.parents()
737 737 return self.commit(files=files, text=text, user=user, date=date,
738 738 p1=p1, p2=p2, extra=extra, empty_ok=True)
739 739
740 740 def commit(self, files=None, text="", user=None, date=None,
741 741 match=None, force=False, force_editor=False,
742 742 p1=None, p2=None, extra={}, empty_ok=False):
743 743 wlock = lock = None
744 744 if files:
745 745 files = util.unique(files)
746 746 try:
747 747 wlock = self.wlock()
748 748 lock = self.lock()
749 749 use_dirstate = (p1 is None) # not rawcommit
750 750
751 751 if use_dirstate:
752 752 p1, p2 = self.dirstate.parents()
753 753 update_dirstate = True
754 754
755 755 if (not force and p2 != nullid and
756 756 (match and (match.files() or match.anypats()))):
757 757 raise util.Abort(_('cannot partially commit a merge '
758 758 '(do not specify files or patterns)'))
759 759
760 760 if files:
761 761 modified, removed = [], []
762 762 for f in files:
763 763 s = self.dirstate[f]
764 764 if s in 'nma':
765 765 modified.append(f)
766 766 elif s == 'r':
767 767 removed.append(f)
768 768 else:
769 769 self.ui.warn(_("%s not tracked!\n") % f)
770 770 changes = [modified, [], removed, [], []]
771 771 else:
772 772 changes = self.status(match=match)
773 773 else:
774 774 p1, p2 = p1, p2 or nullid
775 775 update_dirstate = (self.dirstate.parents()[0] == p1)
776 776 changes = [files, [], [], [], []]
777 777
778 778 ms = merge_.mergestate(self)
779 779 for f in changes[0]:
780 780 if f in ms and ms[f] == 'u':
781 781 raise util.Abort(_("unresolved merge conflicts "
782 782 "(see hg resolve)"))
783 783 wctx = context.workingctx(self, (p1, p2), text, user, date,
784 784 extra, changes)
785 785 return self._commitctx(wctx, force, force_editor, empty_ok,
786 786 use_dirstate, update_dirstate)
787 787 finally:
788 788 del lock, wlock
789 789
790 790 def commitctx(self, ctx):
791 791 wlock = lock = None
792 792 try:
793 793 wlock = self.wlock()
794 794 lock = self.lock()
795 795 return self._commitctx(ctx, force=True, force_editor=False,
796 796 empty_ok=True, use_dirstate=False,
797 797 update_dirstate=False)
798 798 finally:
799 799 del lock, wlock
800 800
801 801 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
802 802 use_dirstate=True, update_dirstate=True):
803 803 tr = None
804 804 valid = 0 # don't save the dirstate if this isn't set
805 805 try:
806 806 commit = util.sort(wctx.modified() + wctx.added())
807 807 remove = wctx.removed()
808 808 extra = wctx.extra().copy()
809 809 branchname = extra['branch']
810 810 user = wctx.user()
811 811 text = wctx.description()
812 812
813 813 p1, p2 = [p.node() for p in wctx.parents()]
814 814 c1 = self.changelog.read(p1)
815 815 c2 = self.changelog.read(p2)
816 816 m1 = self.manifest.read(c1[0]).copy()
817 817 m2 = self.manifest.read(c2[0])
818 818
819 819 if use_dirstate:
820 820 oldname = c1[5].get("branch") # stored in UTF-8
821 821 if (not commit and not remove and not force and p2 == nullid
822 822 and branchname == oldname):
823 823 self.ui.status(_("nothing changed\n"))
824 824 return None
825 825
826 826 xp1 = hex(p1)
827 827 if p2 == nullid: xp2 = ''
828 828 else: xp2 = hex(p2)
829 829
830 830 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
831 831
832 832 tr = self.transaction()
833 833 trp = weakref.proxy(tr)
834 834
835 835 # check in files
836 836 new = {}
837 837 changed = []
838 838 linkrev = len(self)
839 839 for f in commit:
840 840 self.ui.note(f + "\n")
841 841 try:
842 842 fctx = wctx.filectx(f)
843 843 newflags = fctx.flags()
844 844 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
845 845 if ((not changed or changed[-1] != f) and
846 846 m2.get(f) != new[f]):
847 847 # mention the file in the changelog if some
848 848 # flag changed, even if there was no content
849 849 # change.
850 850 if m1.flags(f) != newflags:
851 851 changed.append(f)
852 852 m1.set(f, newflags)
853 853 if use_dirstate:
854 854 self.dirstate.normal(f)
855 855
856 856 except (OSError, IOError):
857 857 if use_dirstate:
858 858 self.ui.warn(_("trouble committing %s!\n") % f)
859 859 raise
860 860 else:
861 861 remove.append(f)
862 862
863 863 # update manifest
864 864 m1.update(new)
865 865 removed = []
866 866
867 867 for f in util.sort(remove):
868 868 if f in m1:
869 869 del m1[f]
870 870 removed.append(f)
871 871 elif f in m2:
872 872 removed.append(f)
873 873 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
874 874 (new, removed))
875 875
876 876 # add changeset
877 877 if (not empty_ok and not text) or force_editor:
878 878 edittext = []
879 879 if text:
880 880 edittext.append(text)
881 881 edittext.append("")
882 882 edittext.append("") # Empty line between message and comments.
883 883 edittext.append(_("HG: Enter commit message."
884 884 " Lines beginning with 'HG:' are removed."))
885 885 edittext.append("HG: --")
886 886 edittext.append("HG: user: %s" % user)
887 887 if p2 != nullid:
888 888 edittext.append("HG: branch merge")
889 889 if branchname:
890 890 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
891 891 edittext.extend(["HG: changed %s" % f for f in changed])
892 892 edittext.extend(["HG: removed %s" % f for f in removed])
893 893 if not changed and not remove:
894 894 edittext.append("HG: no files changed")
895 895 edittext.append("")
896 896 # run editor in the repository root
897 897 olddir = os.getcwd()
898 898 os.chdir(self.root)
899 899 text = self.ui.edit("\n".join(edittext), user)
900 900 os.chdir(olddir)
901 901
902 902 lines = [line.rstrip() for line in text.rstrip().splitlines()]
903 903 while lines and not lines[0]:
904 904 del lines[0]
905 905 if not lines and use_dirstate:
906 906 raise util.Abort(_("empty commit message"))
907 907 text = '\n'.join(lines)
908 908
909 909 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
910 910 user, wctx.date(), extra)
911 911 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
912 912 parent2=xp2)
913 913 tr.close()
914 914
915 915 if self.branchcache:
916 916 self.branchtags()
917 917
918 918 if use_dirstate or update_dirstate:
919 919 self.dirstate.setparents(n)
920 920 if use_dirstate:
921 921 for f in removed:
922 922 self.dirstate.forget(f)
923 923 valid = 1 # our dirstate updates are complete
924 924
925 925 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
926 926 return n
927 927 finally:
928 928 if not valid: # don't save our updated dirstate
929 929 self.dirstate.invalidate()
930 930 del tr
931 931
932 932 def walk(self, match, node=None):
933 933 '''
934 934 walk recursively through the directory tree or a given
935 935 changeset, finding all files matched by the match
936 936 function
937 937 '''
938 938 return self[node].walk(match)
939 939
940 940 def status(self, node1='.', node2=None, match=None,
941 941 ignored=False, clean=False, unknown=False):
942 942 """return status of files between two nodes or node and working directory
943 943
944 944 If node1 is None, use the first dirstate parent instead.
945 945 If node2 is None, compare node1 with working directory.
946 946 """
947 947
948 948 def mfmatches(ctx):
949 949 mf = ctx.manifest().copy()
950 950 for fn in mf.keys():
951 951 if not match(fn):
952 952 del mf[fn]
953 953 return mf
954 954
955 955 ctx1 = self[node1]
956 956 ctx2 = self[node2]
957 957 working = ctx2 == self[None]
958 958 parentworking = working and ctx1 == self['.']
959 959 match = match or match_.always(self.root, self.getcwd())
960 960 listignored, listclean, listunknown = ignored, clean, unknown
961 961
962 962 if working: # we need to scan the working dir
963 963 s = self.dirstate.status(match, listignored, listclean, listunknown)
964 964 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
965 965
966 966 # check for any possibly clean files
967 967 if parentworking and cmp:
968 968 fixup = []
969 969 # do a full compare of any files that might have changed
970 970 for f in cmp:
971 971 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
972 972 or ctx1[f].cmp(ctx2[f].data())):
973 973 modified.append(f)
974 974 else:
975 975 fixup.append(f)
976 976
977 977 if listclean:
978 978 clean += fixup
979 979
980 980 # update dirstate for files that are actually clean
981 981 if fixup:
982 982 wlock = None
983 983 try:
984 984 try:
985 985 wlock = self.wlock(False)
986 986 for f in fixup:
987 987 self.dirstate.normal(f)
988 988 except lock.LockException:
989 989 pass
990 990 finally:
991 991 del wlock
992 992
993 993 if not parentworking:
994 994 mf1 = mfmatches(ctx1)
995 995 if working:
996 996 # we are comparing working dir against non-parent
997 997 # generate a pseudo-manifest for the working dir
998 998 mf2 = mfmatches(self['.'])
999 999 for f in cmp + modified + added:
1000 1000 mf2[f] = None
1001 1001 mf2.set(f, ctx2.flags(f))
1002 1002 for f in removed:
1003 1003 if f in mf2:
1004 1004 del mf2[f]
1005 1005 else:
1006 1006 # we are comparing two revisions
1007 1007 deleted, unknown, ignored = [], [], []
1008 1008 mf2 = mfmatches(ctx2)
1009 1009
1010 1010 modified, added, clean = [], [], []
1011 1011 for fn in mf2:
1012 1012 if fn in mf1:
1013 1013 if (mf1.flags(fn) != mf2.flags(fn) or
1014 1014 (mf1[fn] != mf2[fn] and
1015 1015 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1016 1016 modified.append(fn)
1017 1017 elif listclean:
1018 1018 clean.append(fn)
1019 1019 del mf1[fn]
1020 1020 else:
1021 1021 added.append(fn)
1022 1022 removed = mf1.keys()
1023 1023
1024 1024 r = modified, added, removed, deleted, unknown, ignored, clean
1025 1025 [l.sort() for l in r]
1026 1026 return r
1027 1027
1028 1028 def add(self, list):
1029 1029 wlock = self.wlock()
1030 1030 try:
1031 1031 rejected = []
1032 1032 for f in list:
1033 1033 p = self.wjoin(f)
1034 1034 try:
1035 1035 st = os.lstat(p)
1036 1036 except:
1037 1037 self.ui.warn(_("%s does not exist!\n") % f)
1038 1038 rejected.append(f)
1039 1039 continue
1040 1040 if st.st_size > 10000000:
1041 1041 self.ui.warn(_("%s: files over 10MB may cause memory and"
1042 1042 " performance problems\n"
1043 1043 "(use 'hg revert %s' to unadd the file)\n")
1044 1044 % (f, f))
1045 1045 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1046 1046 self.ui.warn(_("%s not added: only files and symlinks "
1047 1047 "supported currently\n") % f)
1048 1048 rejected.append(p)
1049 1049 elif self.dirstate[f] in 'amn':
1050 1050 self.ui.warn(_("%s already tracked!\n") % f)
1051 1051 elif self.dirstate[f] == 'r':
1052 1052 self.dirstate.normallookup(f)
1053 1053 else:
1054 1054 self.dirstate.add(f)
1055 1055 return rejected
1056 1056 finally:
1057 1057 del wlock
1058 1058
1059 1059 def forget(self, list):
1060 1060 wlock = self.wlock()
1061 1061 try:
1062 1062 for f in list:
1063 1063 if self.dirstate[f] != 'a':
1064 1064 self.ui.warn(_("%s not added!\n") % f)
1065 1065 else:
1066 1066 self.dirstate.forget(f)
1067 1067 finally:
1068 1068 del wlock
1069 1069
1070 1070 def remove(self, list, unlink=False):
1071 1071 wlock = None
1072 1072 try:
1073 1073 if unlink:
1074 1074 for f in list:
1075 1075 try:
1076 1076 util.unlink(self.wjoin(f))
1077 1077 except OSError, inst:
1078 1078 if inst.errno != errno.ENOENT:
1079 1079 raise
1080 1080 wlock = self.wlock()
1081 1081 for f in list:
1082 1082 if unlink and os.path.exists(self.wjoin(f)):
1083 1083 self.ui.warn(_("%s still exists!\n") % f)
1084 1084 elif self.dirstate[f] == 'a':
1085 1085 self.dirstate.forget(f)
1086 1086 elif f not in self.dirstate:
1087 1087 self.ui.warn(_("%s not tracked!\n") % f)
1088 1088 else:
1089 1089 self.dirstate.remove(f)
1090 1090 finally:
1091 1091 del wlock
1092 1092
1093 1093 def undelete(self, list):
1094 1094 wlock = None
1095 1095 try:
1096 1096 manifests = [self.manifest.read(self.changelog.read(p)[0])
1097 1097 for p in self.dirstate.parents() if p != nullid]
1098 1098 wlock = self.wlock()
1099 1099 for f in list:
1100 1100 if self.dirstate[f] != 'r':
1101 self.ui.warn("%s not removed!\n" % f)
1101 self.ui.warn(_("%s not removed!\n") % f)
1102 1102 else:
1103 1103 m = f in manifests[0] and manifests[0] or manifests[1]
1104 1104 t = self.file(f).read(m[f])
1105 1105 self.wwrite(f, t, m.flags(f))
1106 1106 self.dirstate.normal(f)
1107 1107 finally:
1108 1108 del wlock
1109 1109
1110 1110 def copy(self, source, dest):
1111 1111 wlock = None
1112 1112 try:
1113 1113 p = self.wjoin(dest)
1114 1114 if not (os.path.exists(p) or os.path.islink(p)):
1115 1115 self.ui.warn(_("%s does not exist!\n") % dest)
1116 1116 elif not (os.path.isfile(p) or os.path.islink(p)):
1117 1117 self.ui.warn(_("copy failed: %s is not a file or a "
1118 1118 "symbolic link\n") % dest)
1119 1119 else:
1120 1120 wlock = self.wlock()
1121 1121 if dest not in self.dirstate:
1122 1122 self.dirstate.add(dest)
1123 1123 self.dirstate.copy(source, dest)
1124 1124 finally:
1125 1125 del wlock
1126 1126
1127 1127 def heads(self, start=None):
1128 1128 heads = self.changelog.heads(start)
1129 1129 # sort the output in rev descending order
1130 1130 heads = [(-self.changelog.rev(h), h) for h in heads]
1131 1131 return [n for (r, n) in util.sort(heads)]
1132 1132
1133 1133 def branchheads(self, branch=None, start=None):
1134 1134 if branch is None:
1135 1135 branch = self[None].branch()
1136 1136 branches = self.branchtags()
1137 1137 if branch not in branches:
1138 1138 return []
1139 1139 # The basic algorithm is this:
1140 1140 #
1141 1141 # Start from the branch tip since there are no later revisions that can
1142 1142 # possibly be in this branch, and the tip is a guaranteed head.
1143 1143 #
1144 1144 # Remember the tip's parents as the first ancestors, since these by
1145 1145 # definition are not heads.
1146 1146 #
1147 1147 # Step backwards from the brach tip through all the revisions. We are
1148 1148 # guaranteed by the rules of Mercurial that we will now be visiting the
1149 1149 # nodes in reverse topological order (children before parents).
1150 1150 #
1151 1151 # If a revision is one of the ancestors of a head then we can toss it
1152 1152 # out of the ancestors set (we've already found it and won't be
1153 1153 # visiting it again) and put its parents in the ancestors set.
1154 1154 #
1155 1155 # Otherwise, if a revision is in the branch it's another head, since it
1156 1156 # wasn't in the ancestor list of an existing head. So add it to the
1157 1157 # head list, and add its parents to the ancestor list.
1158 1158 #
1159 1159 # If it is not in the branch ignore it.
1160 1160 #
1161 1161 # Once we have a list of heads, use nodesbetween to filter out all the
1162 1162 # heads that cannot be reached from startrev. There may be a more
1163 1163 # efficient way to do this as part of the previous algorithm.
1164 1164
1165 1165 set = util.set
1166 1166 heads = [self.changelog.rev(branches[branch])]
1167 1167 # Don't care if ancestors contains nullrev or not.
1168 1168 ancestors = set(self.changelog.parentrevs(heads[0]))
1169 1169 for rev in xrange(heads[0] - 1, nullrev, -1):
1170 1170 if rev in ancestors:
1171 1171 ancestors.update(self.changelog.parentrevs(rev))
1172 1172 ancestors.remove(rev)
1173 1173 elif self[rev].branch() == branch:
1174 1174 heads.append(rev)
1175 1175 ancestors.update(self.changelog.parentrevs(rev))
1176 1176 heads = [self.changelog.node(rev) for rev in heads]
1177 1177 if start is not None:
1178 1178 heads = self.changelog.nodesbetween([start], heads)[2]
1179 1179 return heads
1180 1180
1181 1181 def branches(self, nodes):
1182 1182 if not nodes:
1183 1183 nodes = [self.changelog.tip()]
1184 1184 b = []
1185 1185 for n in nodes:
1186 1186 t = n
1187 1187 while 1:
1188 1188 p = self.changelog.parents(n)
1189 1189 if p[1] != nullid or p[0] == nullid:
1190 1190 b.append((t, n, p[0], p[1]))
1191 1191 break
1192 1192 n = p[0]
1193 1193 return b
1194 1194
1195 1195 def between(self, pairs):
1196 1196 r = []
1197 1197
1198 1198 for top, bottom in pairs:
1199 1199 n, l, i = top, [], 0
1200 1200 f = 1
1201 1201
1202 1202 while n != bottom:
1203 1203 p = self.changelog.parents(n)[0]
1204 1204 if i == f:
1205 1205 l.append(n)
1206 1206 f = f * 2
1207 1207 n = p
1208 1208 i += 1
1209 1209
1210 1210 r.append(l)
1211 1211
1212 1212 return r
1213 1213
1214 1214 def findincoming(self, remote, base=None, heads=None, force=False):
1215 1215 """Return list of roots of the subsets of missing nodes from remote
1216 1216
1217 1217 If base dict is specified, assume that these nodes and their parents
1218 1218 exist on the remote side and that no child of a node of base exists
1219 1219 in both remote and self.
1220 1220 Furthermore base will be updated to include the nodes that exists
1221 1221 in self and remote but no children exists in self and remote.
1222 1222 If a list of heads is specified, return only nodes which are heads
1223 1223 or ancestors of these heads.
1224 1224
1225 1225 All the ancestors of base are in self and in remote.
1226 1226 All the descendants of the list returned are missing in self.
1227 1227 (and so we know that the rest of the nodes are missing in remote, see
1228 1228 outgoing)
1229 1229 """
1230 1230 m = self.changelog.nodemap
1231 1231 search = []
1232 1232 fetch = {}
1233 1233 seen = {}
1234 1234 seenbranch = {}
1235 1235 if base == None:
1236 1236 base = {}
1237 1237
1238 1238 if not heads:
1239 1239 heads = remote.heads()
1240 1240
1241 1241 if self.changelog.tip() == nullid:
1242 1242 base[nullid] = 1
1243 1243 if heads != [nullid]:
1244 1244 return [nullid]
1245 1245 return []
1246 1246
1247 1247 # assume we're closer to the tip than the root
1248 1248 # and start by examining the heads
1249 1249 self.ui.status(_("searching for changes\n"))
1250 1250
1251 1251 unknown = []
1252 1252 for h in heads:
1253 1253 if h not in m:
1254 1254 unknown.append(h)
1255 1255 else:
1256 1256 base[h] = 1
1257 1257
1258 1258 if not unknown:
1259 1259 return []
1260 1260
1261 1261 req = dict.fromkeys(unknown)
1262 1262 reqcnt = 0
1263 1263
1264 1264 # search through remote branches
1265 1265 # a 'branch' here is a linear segment of history, with four parts:
1266 1266 # head, root, first parent, second parent
1267 1267 # (a branch always has two parents (or none) by definition)
1268 1268 unknown = remote.branches(unknown)
1269 1269 while unknown:
1270 1270 r = []
1271 1271 while unknown:
1272 1272 n = unknown.pop(0)
1273 1273 if n[0] in seen:
1274 1274 continue
1275 1275
1276 1276 self.ui.debug(_("examining %s:%s\n")
1277 1277 % (short(n[0]), short(n[1])))
1278 1278 if n[0] == nullid: # found the end of the branch
1279 1279 pass
1280 1280 elif n in seenbranch:
1281 1281 self.ui.debug(_("branch already found\n"))
1282 1282 continue
1283 1283 elif n[1] and n[1] in m: # do we know the base?
1284 1284 self.ui.debug(_("found incomplete branch %s:%s\n")
1285 1285 % (short(n[0]), short(n[1])))
1286 1286 search.append(n) # schedule branch range for scanning
1287 1287 seenbranch[n] = 1
1288 1288 else:
1289 1289 if n[1] not in seen and n[1] not in fetch:
1290 1290 if n[2] in m and n[3] in m:
1291 1291 self.ui.debug(_("found new changeset %s\n") %
1292 1292 short(n[1]))
1293 1293 fetch[n[1]] = 1 # earliest unknown
1294 1294 for p in n[2:4]:
1295 1295 if p in m:
1296 1296 base[p] = 1 # latest known
1297 1297
1298 1298 for p in n[2:4]:
1299 1299 if p not in req and p not in m:
1300 1300 r.append(p)
1301 1301 req[p] = 1
1302 1302 seen[n[0]] = 1
1303 1303
1304 1304 if r:
1305 1305 reqcnt += 1
1306 1306 self.ui.debug(_("request %d: %s\n") %
1307 1307 (reqcnt, " ".join(map(short, r))))
1308 1308 for p in xrange(0, len(r), 10):
1309 1309 for b in remote.branches(r[p:p+10]):
1310 1310 self.ui.debug(_("received %s:%s\n") %
1311 1311 (short(b[0]), short(b[1])))
1312 1312 unknown.append(b)
1313 1313
1314 1314 # do binary search on the branches we found
1315 1315 while search:
1316 1316 n = search.pop(0)
1317 1317 reqcnt += 1
1318 1318 l = remote.between([(n[0], n[1])])[0]
1319 1319 l.append(n[1])
1320 1320 p = n[0]
1321 1321 f = 1
1322 1322 for i in l:
1323 1323 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1324 1324 if i in m:
1325 1325 if f <= 2:
1326 1326 self.ui.debug(_("found new branch changeset %s\n") %
1327 1327 short(p))
1328 1328 fetch[p] = 1
1329 1329 base[i] = 1
1330 1330 else:
1331 1331 self.ui.debug(_("narrowed branch search to %s:%s\n")
1332 1332 % (short(p), short(i)))
1333 1333 search.append((p, i))
1334 1334 break
1335 1335 p, f = i, f * 2
1336 1336
1337 1337 # sanity check our fetch list
1338 1338 for f in fetch.keys():
1339 1339 if f in m:
1340 1340 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1341 1341
1342 1342 if base.keys() == [nullid]:
1343 1343 if force:
1344 1344 self.ui.warn(_("warning: repository is unrelated\n"))
1345 1345 else:
1346 1346 raise util.Abort(_("repository is unrelated"))
1347 1347
1348 1348 self.ui.debug(_("found new changesets starting at ") +
1349 1349 " ".join([short(f) for f in fetch]) + "\n")
1350 1350
1351 1351 self.ui.debug(_("%d total queries\n") % reqcnt)
1352 1352
1353 1353 return fetch.keys()
1354 1354
1355 1355 def findoutgoing(self, remote, base=None, heads=None, force=False):
1356 1356 """Return list of nodes that are roots of subsets not in remote
1357 1357
1358 1358 If base dict is specified, assume that these nodes and their parents
1359 1359 exist on the remote side.
1360 1360 If a list of heads is specified, return only nodes which are heads
1361 1361 or ancestors of these heads, and return a second element which
1362 1362 contains all remote heads which get new children.
1363 1363 """
1364 1364 if base == None:
1365 1365 base = {}
1366 1366 self.findincoming(remote, base, heads, force=force)
1367 1367
1368 1368 self.ui.debug(_("common changesets up to ")
1369 1369 + " ".join(map(short, base.keys())) + "\n")
1370 1370
1371 1371 remain = dict.fromkeys(self.changelog.nodemap)
1372 1372
1373 1373 # prune everything remote has from the tree
1374 1374 del remain[nullid]
1375 1375 remove = base.keys()
1376 1376 while remove:
1377 1377 n = remove.pop(0)
1378 1378 if n in remain:
1379 1379 del remain[n]
1380 1380 for p in self.changelog.parents(n):
1381 1381 remove.append(p)
1382 1382
1383 1383 # find every node whose parents have been pruned
1384 1384 subset = []
1385 1385 # find every remote head that will get new children
1386 1386 updated_heads = {}
1387 1387 for n in remain:
1388 1388 p1, p2 = self.changelog.parents(n)
1389 1389 if p1 not in remain and p2 not in remain:
1390 1390 subset.append(n)
1391 1391 if heads:
1392 1392 if p1 in heads:
1393 1393 updated_heads[p1] = True
1394 1394 if p2 in heads:
1395 1395 updated_heads[p2] = True
1396 1396
1397 1397 # this is the set of all roots we have to push
1398 1398 if heads:
1399 1399 return subset, updated_heads.keys()
1400 1400 else:
1401 1401 return subset
1402 1402
1403 1403 def pull(self, remote, heads=None, force=False):
1404 1404 lock = self.lock()
1405 1405 try:
1406 1406 fetch = self.findincoming(remote, heads=heads, force=force)
1407 1407 if fetch == [nullid]:
1408 1408 self.ui.status(_("requesting all changes\n"))
1409 1409
1410 1410 if not fetch:
1411 1411 self.ui.status(_("no changes found\n"))
1412 1412 return 0
1413 1413
1414 1414 if heads is None:
1415 1415 cg = remote.changegroup(fetch, 'pull')
1416 1416 else:
1417 1417 if 'changegroupsubset' not in remote.capabilities:
1418 1418 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1419 1419 cg = remote.changegroupsubset(fetch, heads, 'pull')
1420 1420 return self.addchangegroup(cg, 'pull', remote.url())
1421 1421 finally:
1422 1422 del lock
1423 1423
1424 1424 def push(self, remote, force=False, revs=None):
1425 1425 # there are two ways to push to remote repo:
1426 1426 #
1427 1427 # addchangegroup assumes local user can lock remote
1428 1428 # repo (local filesystem, old ssh servers).
1429 1429 #
1430 1430 # unbundle assumes local user cannot lock remote repo (new ssh
1431 1431 # servers, http servers).
1432 1432
1433 1433 if remote.capable('unbundle'):
1434 1434 return self.push_unbundle(remote, force, revs)
1435 1435 return self.push_addchangegroup(remote, force, revs)
1436 1436
1437 1437 def prepush(self, remote, force, revs):
1438 1438 base = {}
1439 1439 remote_heads = remote.heads()
1440 1440 inc = self.findincoming(remote, base, remote_heads, force=force)
1441 1441
1442 1442 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1443 1443 if revs is not None:
1444 1444 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1445 1445 else:
1446 1446 bases, heads = update, self.changelog.heads()
1447 1447
1448 1448 if not bases:
1449 1449 self.ui.status(_("no changes found\n"))
1450 1450 return None, 1
1451 1451 elif not force:
1452 1452 # check if we're creating new remote heads
1453 1453 # to be a remote head after push, node must be either
1454 1454 # - unknown locally
1455 1455 # - a local outgoing head descended from update
1456 1456 # - a remote head that's known locally and not
1457 1457 # ancestral to an outgoing head
1458 1458
1459 1459 warn = 0
1460 1460
1461 1461 if remote_heads == [nullid]:
1462 1462 warn = 0
1463 1463 elif not revs and len(heads) > len(remote_heads):
1464 1464 warn = 1
1465 1465 else:
1466 1466 newheads = list(heads)
1467 1467 for r in remote_heads:
1468 1468 if r in self.changelog.nodemap:
1469 1469 desc = self.changelog.heads(r, heads)
1470 1470 l = [h for h in heads if h in desc]
1471 1471 if not l:
1472 1472 newheads.append(r)
1473 1473 else:
1474 1474 newheads.append(r)
1475 1475 if len(newheads) > len(remote_heads):
1476 1476 warn = 1
1477 1477
1478 1478 if warn:
1479 1479 self.ui.warn(_("abort: push creates new remote heads!\n"))
1480 1480 self.ui.status(_("(did you forget to merge?"
1481 1481 " use push -f to force)\n"))
1482 1482 return None, 0
1483 1483 elif inc:
1484 1484 self.ui.warn(_("note: unsynced remote changes!\n"))
1485 1485
1486 1486
1487 1487 if revs is None:
1488 1488 cg = self.changegroup(update, 'push')
1489 1489 else:
1490 1490 cg = self.changegroupsubset(update, revs, 'push')
1491 1491 return cg, remote_heads
1492 1492
1493 1493 def push_addchangegroup(self, remote, force, revs):
1494 1494 lock = remote.lock()
1495 1495 try:
1496 1496 ret = self.prepush(remote, force, revs)
1497 1497 if ret[0] is not None:
1498 1498 cg, remote_heads = ret
1499 1499 return remote.addchangegroup(cg, 'push', self.url())
1500 1500 return ret[1]
1501 1501 finally:
1502 1502 del lock
1503 1503
1504 1504 def push_unbundle(self, remote, force, revs):
1505 1505 # local repo finds heads on server, finds out what revs it
1506 1506 # must push. once revs transferred, if server finds it has
1507 1507 # different heads (someone else won commit/push race), server
1508 1508 # aborts.
1509 1509
1510 1510 ret = self.prepush(remote, force, revs)
1511 1511 if ret[0] is not None:
1512 1512 cg, remote_heads = ret
1513 1513 if force: remote_heads = ['force']
1514 1514 return remote.unbundle(cg, remote_heads, 'push')
1515 1515 return ret[1]
1516 1516
1517 1517 def changegroupinfo(self, nodes, source):
1518 1518 if self.ui.verbose or source == 'bundle':
1519 1519 self.ui.status(_("%d changesets found\n") % len(nodes))
1520 1520 if self.ui.debugflag:
1521 1521 self.ui.debug(_("List of changesets:\n"))
1522 1522 for node in nodes:
1523 1523 self.ui.debug("%s\n" % hex(node))
1524 1524
1525 1525 def changegroupsubset(self, bases, heads, source, extranodes=None):
1526 1526 """This function generates a changegroup consisting of all the nodes
1527 1527 that are descendents of any of the bases, and ancestors of any of
1528 1528 the heads.
1529 1529
1530 1530 It is fairly complex as determining which filenodes and which
1531 1531 manifest nodes need to be included for the changeset to be complete
1532 1532 is non-trivial.
1533 1533
1534 1534 Another wrinkle is doing the reverse, figuring out which changeset in
1535 1535 the changegroup a particular filenode or manifestnode belongs to.
1536 1536
1537 1537 The caller can specify some nodes that must be included in the
1538 1538 changegroup using the extranodes argument. It should be a dict
1539 1539 where the keys are the filenames (or 1 for the manifest), and the
1540 1540 values are lists of (node, linknode) tuples, where node is a wanted
1541 1541 node and linknode is the changelog node that should be transmitted as
1542 1542 the linkrev.
1543 1543 """
1544 1544
1545 1545 self.hook('preoutgoing', throw=True, source=source)
1546 1546
1547 1547 # Set up some initial variables
1548 1548 # Make it easy to refer to self.changelog
1549 1549 cl = self.changelog
1550 1550 # msng is short for missing - compute the list of changesets in this
1551 1551 # changegroup.
1552 1552 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1553 1553 self.changegroupinfo(msng_cl_lst, source)
1554 1554 # Some bases may turn out to be superfluous, and some heads may be
1555 1555 # too. nodesbetween will return the minimal set of bases and heads
1556 1556 # necessary to re-create the changegroup.
1557 1557
1558 1558 # Known heads are the list of heads that it is assumed the recipient
1559 1559 # of this changegroup will know about.
1560 1560 knownheads = {}
1561 1561 # We assume that all parents of bases are known heads.
1562 1562 for n in bases:
1563 1563 for p in cl.parents(n):
1564 1564 if p != nullid:
1565 1565 knownheads[p] = 1
1566 1566 knownheads = knownheads.keys()
1567 1567 if knownheads:
1568 1568 # Now that we know what heads are known, we can compute which
1569 1569 # changesets are known. The recipient must know about all
1570 1570 # changesets required to reach the known heads from the null
1571 1571 # changeset.
1572 1572 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1573 1573 junk = None
1574 1574 # Transform the list into an ersatz set.
1575 1575 has_cl_set = dict.fromkeys(has_cl_set)
1576 1576 else:
1577 1577 # If there were no known heads, the recipient cannot be assumed to
1578 1578 # know about any changesets.
1579 1579 has_cl_set = {}
1580 1580
1581 1581 # Make it easy to refer to self.manifest
1582 1582 mnfst = self.manifest
1583 1583 # We don't know which manifests are missing yet
1584 1584 msng_mnfst_set = {}
1585 1585 # Nor do we know which filenodes are missing.
1586 1586 msng_filenode_set = {}
1587 1587
1588 1588 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1589 1589 junk = None
1590 1590
1591 1591 # A changeset always belongs to itself, so the changenode lookup
1592 1592 # function for a changenode is identity.
1593 1593 def identity(x):
1594 1594 return x
1595 1595
1596 1596 # A function generating function. Sets up an environment for the
1597 1597 # inner function.
1598 1598 def cmp_by_rev_func(revlog):
1599 1599 # Compare two nodes by their revision number in the environment's
1600 1600 # revision history. Since the revision number both represents the
1601 1601 # most efficient order to read the nodes in, and represents a
1602 1602 # topological sorting of the nodes, this function is often useful.
1603 1603 def cmp_by_rev(a, b):
1604 1604 return cmp(revlog.rev(a), revlog.rev(b))
1605 1605 return cmp_by_rev
1606 1606
1607 1607 # If we determine that a particular file or manifest node must be a
1608 1608 # node that the recipient of the changegroup will already have, we can
1609 1609 # also assume the recipient will have all the parents. This function
1610 1610 # prunes them from the set of missing nodes.
1611 1611 def prune_parents(revlog, hasset, msngset):
1612 1612 haslst = hasset.keys()
1613 1613 haslst.sort(cmp_by_rev_func(revlog))
1614 1614 for node in haslst:
1615 1615 parentlst = [p for p in revlog.parents(node) if p != nullid]
1616 1616 while parentlst:
1617 1617 n = parentlst.pop()
1618 1618 if n not in hasset:
1619 1619 hasset[n] = 1
1620 1620 p = [p for p in revlog.parents(n) if p != nullid]
1621 1621 parentlst.extend(p)
1622 1622 for n in hasset:
1623 1623 msngset.pop(n, None)
1624 1624
1625 1625 # This is a function generating function used to set up an environment
1626 1626 # for the inner function to execute in.
1627 1627 def manifest_and_file_collector(changedfileset):
1628 1628 # This is an information gathering function that gathers
1629 1629 # information from each changeset node that goes out as part of
1630 1630 # the changegroup. The information gathered is a list of which
1631 1631 # manifest nodes are potentially required (the recipient may
1632 1632 # already have them) and total list of all files which were
1633 1633 # changed in any changeset in the changegroup.
1634 1634 #
1635 1635 # We also remember the first changenode we saw any manifest
1636 1636 # referenced by so we can later determine which changenode 'owns'
1637 1637 # the manifest.
1638 1638 def collect_manifests_and_files(clnode):
1639 1639 c = cl.read(clnode)
1640 1640 for f in c[3]:
1641 1641 # This is to make sure we only have one instance of each
1642 1642 # filename string for each filename.
1643 1643 changedfileset.setdefault(f, f)
1644 1644 msng_mnfst_set.setdefault(c[0], clnode)
1645 1645 return collect_manifests_and_files
1646 1646
1647 1647 # Figure out which manifest nodes (of the ones we think might be part
1648 1648 # of the changegroup) the recipient must know about and remove them
1649 1649 # from the changegroup.
1650 1650 def prune_manifests():
1651 1651 has_mnfst_set = {}
1652 1652 for n in msng_mnfst_set:
1653 1653 # If a 'missing' manifest thinks it belongs to a changenode
1654 1654 # the recipient is assumed to have, obviously the recipient
1655 1655 # must have that manifest.
1656 1656 linknode = cl.node(mnfst.linkrev(n))
1657 1657 if linknode in has_cl_set:
1658 1658 has_mnfst_set[n] = 1
1659 1659 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1660 1660
1661 1661 # Use the information collected in collect_manifests_and_files to say
1662 1662 # which changenode any manifestnode belongs to.
1663 1663 def lookup_manifest_link(mnfstnode):
1664 1664 return msng_mnfst_set[mnfstnode]
1665 1665
1666 1666 # A function generating function that sets up the initial environment
1667 1667 # the inner function.
1668 1668 def filenode_collector(changedfiles):
1669 1669 next_rev = [0]
1670 1670 # This gathers information from each manifestnode included in the
1671 1671 # changegroup about which filenodes the manifest node references
1672 1672 # so we can include those in the changegroup too.
1673 1673 #
1674 1674 # It also remembers which changenode each filenode belongs to. It
1675 1675 # does this by assuming the a filenode belongs to the changenode
1676 1676 # the first manifest that references it belongs to.
1677 1677 def collect_msng_filenodes(mnfstnode):
1678 1678 r = mnfst.rev(mnfstnode)
1679 1679 if r == next_rev[0]:
1680 1680 # If the last rev we looked at was the one just previous,
1681 1681 # we only need to see a diff.
1682 1682 deltamf = mnfst.readdelta(mnfstnode)
1683 1683 # For each line in the delta
1684 1684 for f, fnode in deltamf.items():
1685 1685 f = changedfiles.get(f, None)
1686 1686 # And if the file is in the list of files we care
1687 1687 # about.
1688 1688 if f is not None:
1689 1689 # Get the changenode this manifest belongs to
1690 1690 clnode = msng_mnfst_set[mnfstnode]
1691 1691 # Create the set of filenodes for the file if
1692 1692 # there isn't one already.
1693 1693 ndset = msng_filenode_set.setdefault(f, {})
1694 1694 # And set the filenode's changelog node to the
1695 1695 # manifest's if it hasn't been set already.
1696 1696 ndset.setdefault(fnode, clnode)
1697 1697 else:
1698 1698 # Otherwise we need a full manifest.
1699 1699 m = mnfst.read(mnfstnode)
1700 1700 # For every file in we care about.
1701 1701 for f in changedfiles:
1702 1702 fnode = m.get(f, None)
1703 1703 # If it's in the manifest
1704 1704 if fnode is not None:
1705 1705 # See comments above.
1706 1706 clnode = msng_mnfst_set[mnfstnode]
1707 1707 ndset = msng_filenode_set.setdefault(f, {})
1708 1708 ndset.setdefault(fnode, clnode)
1709 1709 # Remember the revision we hope to see next.
1710 1710 next_rev[0] = r + 1
1711 1711 return collect_msng_filenodes
1712 1712
1713 1713 # We have a list of filenodes we think we need for a file, lets remove
1714 1714 # all those we now the recipient must have.
1715 1715 def prune_filenodes(f, filerevlog):
1716 1716 msngset = msng_filenode_set[f]
1717 1717 hasset = {}
1718 1718 # If a 'missing' filenode thinks it belongs to a changenode we
1719 1719 # assume the recipient must have, then the recipient must have
1720 1720 # that filenode.
1721 1721 for n in msngset:
1722 1722 clnode = cl.node(filerevlog.linkrev(n))
1723 1723 if clnode in has_cl_set:
1724 1724 hasset[n] = 1
1725 1725 prune_parents(filerevlog, hasset, msngset)
1726 1726
1727 1727 # A function generator function that sets up the a context for the
1728 1728 # inner function.
1729 1729 def lookup_filenode_link_func(fname):
1730 1730 msngset = msng_filenode_set[fname]
1731 1731 # Lookup the changenode the filenode belongs to.
1732 1732 def lookup_filenode_link(fnode):
1733 1733 return msngset[fnode]
1734 1734 return lookup_filenode_link
1735 1735
1736 1736 # Add the nodes that were explicitly requested.
1737 1737 def add_extra_nodes(name, nodes):
1738 1738 if not extranodes or name not in extranodes:
1739 1739 return
1740 1740
1741 1741 for node, linknode in extranodes[name]:
1742 1742 if node not in nodes:
1743 1743 nodes[node] = linknode
1744 1744
1745 1745 # Now that we have all theses utility functions to help out and
1746 1746 # logically divide up the task, generate the group.
1747 1747 def gengroup():
1748 1748 # The set of changed files starts empty.
1749 1749 changedfiles = {}
1750 1750 # Create a changenode group generator that will call our functions
1751 1751 # back to lookup the owning changenode and collect information.
1752 1752 group = cl.group(msng_cl_lst, identity,
1753 1753 manifest_and_file_collector(changedfiles))
1754 1754 for chnk in group:
1755 1755 yield chnk
1756 1756
1757 1757 # The list of manifests has been collected by the generator
1758 1758 # calling our functions back.
1759 1759 prune_manifests()
1760 1760 add_extra_nodes(1, msng_mnfst_set)
1761 1761 msng_mnfst_lst = msng_mnfst_set.keys()
1762 1762 # Sort the manifestnodes by revision number.
1763 1763 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1764 1764 # Create a generator for the manifestnodes that calls our lookup
1765 1765 # and data collection functions back.
1766 1766 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1767 1767 filenode_collector(changedfiles))
1768 1768 for chnk in group:
1769 1769 yield chnk
1770 1770
1771 1771 # These are no longer needed, dereference and toss the memory for
1772 1772 # them.
1773 1773 msng_mnfst_lst = None
1774 1774 msng_mnfst_set.clear()
1775 1775
1776 1776 if extranodes:
1777 1777 for fname in extranodes:
1778 1778 if isinstance(fname, int):
1779 1779 continue
1780 1780 add_extra_nodes(fname,
1781 1781 msng_filenode_set.setdefault(fname, {}))
1782 1782 changedfiles[fname] = 1
1783 1783 # Go through all our files in order sorted by name.
1784 1784 for fname in util.sort(changedfiles):
1785 1785 filerevlog = self.file(fname)
1786 1786 if not len(filerevlog):
1787 1787 raise util.Abort(_("empty or missing revlog for %s") % fname)
1788 1788 # Toss out the filenodes that the recipient isn't really
1789 1789 # missing.
1790 1790 if fname in msng_filenode_set:
1791 1791 prune_filenodes(fname, filerevlog)
1792 1792 msng_filenode_lst = msng_filenode_set[fname].keys()
1793 1793 else:
1794 1794 msng_filenode_lst = []
1795 1795 # If any filenodes are left, generate the group for them,
1796 1796 # otherwise don't bother.
1797 1797 if len(msng_filenode_lst) > 0:
1798 1798 yield changegroup.chunkheader(len(fname))
1799 1799 yield fname
1800 1800 # Sort the filenodes by their revision #
1801 1801 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1802 1802 # Create a group generator and only pass in a changenode
1803 1803 # lookup function as we need to collect no information
1804 1804 # from filenodes.
1805 1805 group = filerevlog.group(msng_filenode_lst,
1806 1806 lookup_filenode_link_func(fname))
1807 1807 for chnk in group:
1808 1808 yield chnk
1809 1809 if fname in msng_filenode_set:
1810 1810 # Don't need this anymore, toss it to free memory.
1811 1811 del msng_filenode_set[fname]
1812 1812 # Signal that no more groups are left.
1813 1813 yield changegroup.closechunk()
1814 1814
1815 1815 if msng_cl_lst:
1816 1816 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1817 1817
1818 1818 return util.chunkbuffer(gengroup())
1819 1819
1820 1820 def changegroup(self, basenodes, source):
1821 1821 """Generate a changegroup of all nodes that we have that a recipient
1822 1822 doesn't.
1823 1823
1824 1824 This is much easier than the previous function as we can assume that
1825 1825 the recipient has any changenode we aren't sending them."""
1826 1826
1827 1827 self.hook('preoutgoing', throw=True, source=source)
1828 1828
1829 1829 cl = self.changelog
1830 1830 nodes = cl.nodesbetween(basenodes, None)[0]
1831 1831 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1832 1832 self.changegroupinfo(nodes, source)
1833 1833
1834 1834 def identity(x):
1835 1835 return x
1836 1836
1837 1837 def gennodelst(log):
1838 1838 for r in log:
1839 1839 n = log.node(r)
1840 1840 if log.linkrev(n) in revset:
1841 1841 yield n
1842 1842
1843 1843 def changed_file_collector(changedfileset):
1844 1844 def collect_changed_files(clnode):
1845 1845 c = cl.read(clnode)
1846 1846 for fname in c[3]:
1847 1847 changedfileset[fname] = 1
1848 1848 return collect_changed_files
1849 1849
1850 1850 def lookuprevlink_func(revlog):
1851 1851 def lookuprevlink(n):
1852 1852 return cl.node(revlog.linkrev(n))
1853 1853 return lookuprevlink
1854 1854
1855 1855 def gengroup():
1856 1856 # construct a list of all changed files
1857 1857 changedfiles = {}
1858 1858
1859 1859 for chnk in cl.group(nodes, identity,
1860 1860 changed_file_collector(changedfiles)):
1861 1861 yield chnk
1862 1862
1863 1863 mnfst = self.manifest
1864 1864 nodeiter = gennodelst(mnfst)
1865 1865 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1866 1866 yield chnk
1867 1867
1868 1868 for fname in util.sort(changedfiles):
1869 1869 filerevlog = self.file(fname)
1870 1870 if not len(filerevlog):
1871 1871 raise util.Abort(_("empty or missing revlog for %s") % fname)
1872 1872 nodeiter = gennodelst(filerevlog)
1873 1873 nodeiter = list(nodeiter)
1874 1874 if nodeiter:
1875 1875 yield changegroup.chunkheader(len(fname))
1876 1876 yield fname
1877 1877 lookup = lookuprevlink_func(filerevlog)
1878 1878 for chnk in filerevlog.group(nodeiter, lookup):
1879 1879 yield chnk
1880 1880
1881 1881 yield changegroup.closechunk()
1882 1882
1883 1883 if nodes:
1884 1884 self.hook('outgoing', node=hex(nodes[0]), source=source)
1885 1885
1886 1886 return util.chunkbuffer(gengroup())
1887 1887
1888 1888 def addchangegroup(self, source, srctype, url, emptyok=False):
1889 1889 """add changegroup to repo.
1890 1890
1891 1891 return values:
1892 1892 - nothing changed or no source: 0
1893 1893 - more heads than before: 1+added heads (2..n)
1894 1894 - less heads than before: -1-removed heads (-2..-n)
1895 1895 - number of heads stays the same: 1
1896 1896 """
1897 1897 def csmap(x):
1898 1898 self.ui.debug(_("add changeset %s\n") % short(x))
1899 1899 return len(cl)
1900 1900
1901 1901 def revmap(x):
1902 1902 return cl.rev(x)
1903 1903
1904 1904 if not source:
1905 1905 return 0
1906 1906
1907 1907 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1908 1908
1909 1909 changesets = files = revisions = 0
1910 1910
1911 1911 # write changelog data to temp files so concurrent readers will not see
1912 1912 # inconsistent view
1913 1913 cl = self.changelog
1914 1914 cl.delayupdate()
1915 1915 oldheads = len(cl.heads())
1916 1916
1917 1917 tr = self.transaction()
1918 1918 try:
1919 1919 trp = weakref.proxy(tr)
1920 1920 # pull off the changeset group
1921 1921 self.ui.status(_("adding changesets\n"))
1922 1922 cor = len(cl) - 1
1923 1923 chunkiter = changegroup.chunkiter(source)
1924 1924 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1925 1925 raise util.Abort(_("received changelog group is empty"))
1926 1926 cnr = len(cl) - 1
1927 1927 changesets = cnr - cor
1928 1928
1929 1929 # pull off the manifest group
1930 1930 self.ui.status(_("adding manifests\n"))
1931 1931 chunkiter = changegroup.chunkiter(source)
1932 1932 # no need to check for empty manifest group here:
1933 1933 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1934 1934 # no new manifest will be created and the manifest group will
1935 1935 # be empty during the pull
1936 1936 self.manifest.addgroup(chunkiter, revmap, trp)
1937 1937
1938 1938 # process the files
1939 1939 self.ui.status(_("adding file changes\n"))
1940 1940 while 1:
1941 1941 f = changegroup.getchunk(source)
1942 1942 if not f:
1943 1943 break
1944 1944 self.ui.debug(_("adding %s revisions\n") % f)
1945 1945 fl = self.file(f)
1946 1946 o = len(fl)
1947 1947 chunkiter = changegroup.chunkiter(source)
1948 1948 if fl.addgroup(chunkiter, revmap, trp) is None:
1949 1949 raise util.Abort(_("received file revlog group is empty"))
1950 1950 revisions += len(fl) - o
1951 1951 files += 1
1952 1952
1953 1953 # make changelog see real files again
1954 1954 cl.finalize(trp)
1955 1955
1956 1956 newheads = len(self.changelog.heads())
1957 1957 heads = ""
1958 1958 if oldheads and newheads != oldheads:
1959 1959 heads = _(" (%+d heads)") % (newheads - oldheads)
1960 1960
1961 1961 self.ui.status(_("added %d changesets"
1962 1962 " with %d changes to %d files%s\n")
1963 1963 % (changesets, revisions, files, heads))
1964 1964
1965 1965 if changesets > 0:
1966 1966 self.hook('pretxnchangegroup', throw=True,
1967 1967 node=hex(self.changelog.node(cor+1)), source=srctype,
1968 1968 url=url)
1969 1969
1970 1970 tr.close()
1971 1971 finally:
1972 1972 del tr
1973 1973
1974 1974 if changesets > 0:
1975 1975 # forcefully update the on-disk branch cache
1976 1976 self.ui.debug(_("updating the branch cache\n"))
1977 1977 self.branchtags()
1978 1978 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1979 1979 source=srctype, url=url)
1980 1980
1981 1981 for i in xrange(cor + 1, cnr + 1):
1982 1982 self.hook("incoming", node=hex(self.changelog.node(i)),
1983 1983 source=srctype, url=url)
1984 1984
1985 1985 # never return 0 here:
1986 1986 if newheads < oldheads:
1987 1987 return newheads - oldheads - 1
1988 1988 else:
1989 1989 return newheads - oldheads + 1
1990 1990
1991 1991
1992 1992 def stream_in(self, remote):
1993 1993 fp = remote.stream_out()
1994 1994 l = fp.readline()
1995 1995 try:
1996 1996 resp = int(l)
1997 1997 except ValueError:
1998 1998 raise util.UnexpectedOutput(
1999 1999 _('Unexpected response from remote server:'), l)
2000 2000 if resp == 1:
2001 2001 raise util.Abort(_('operation forbidden by server'))
2002 2002 elif resp == 2:
2003 2003 raise util.Abort(_('locking the remote repository failed'))
2004 2004 elif resp != 0:
2005 2005 raise util.Abort(_('the server sent an unknown error code'))
2006 2006 self.ui.status(_('streaming all changes\n'))
2007 2007 l = fp.readline()
2008 2008 try:
2009 2009 total_files, total_bytes = map(int, l.split(' ', 1))
2010 2010 except (ValueError, TypeError):
2011 2011 raise util.UnexpectedOutput(
2012 2012 _('Unexpected response from remote server:'), l)
2013 2013 self.ui.status(_('%d files to transfer, %s of data\n') %
2014 2014 (total_files, util.bytecount(total_bytes)))
2015 2015 start = time.time()
2016 2016 for i in xrange(total_files):
2017 2017 # XXX doesn't support '\n' or '\r' in filenames
2018 2018 l = fp.readline()
2019 2019 try:
2020 2020 name, size = l.split('\0', 1)
2021 2021 size = int(size)
2022 2022 except ValueError, TypeError:
2023 2023 raise util.UnexpectedOutput(
2024 2024 _('Unexpected response from remote server:'), l)
2025 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2025 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2026 2026 ofp = self.sopener(name, 'w')
2027 2027 for chunk in util.filechunkiter(fp, limit=size):
2028 2028 ofp.write(chunk)
2029 2029 ofp.close()
2030 2030 elapsed = time.time() - start
2031 2031 if elapsed <= 0:
2032 2032 elapsed = 0.001
2033 2033 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2034 2034 (util.bytecount(total_bytes), elapsed,
2035 2035 util.bytecount(total_bytes / elapsed)))
2036 2036 self.invalidate()
2037 2037 return len(self.heads()) + 1
2038 2038
2039 2039 def clone(self, remote, heads=[], stream=False):
2040 2040 '''clone remote repository.
2041 2041
2042 2042 keyword arguments:
2043 2043 heads: list of revs to clone (forces use of pull)
2044 2044 stream: use streaming clone if possible'''
2045 2045
2046 2046 # now, all clients that can request uncompressed clones can
2047 2047 # read repo formats supported by all servers that can serve
2048 2048 # them.
2049 2049
2050 2050 # if revlog format changes, client will have to check version
2051 2051 # and format flags on "stream" capability, and use
2052 2052 # uncompressed only if compatible.
2053 2053
2054 2054 if stream and not heads and remote.capable('stream'):
2055 2055 return self.stream_in(remote)
2056 2056 return self.pull(remote, heads)
2057 2057
2058 2058 # used to avoid circular references so destructors work
2059 2059 def aftertrans(files):
2060 2060 renamefiles = [tuple(t) for t in files]
2061 2061 def a():
2062 2062 for src, dest in renamefiles:
2063 2063 util.rename(src, dest)
2064 2064 return a
2065 2065
2066 2066 def instance(ui, path, create):
2067 2067 return localrepository(ui, util.drop_scheme('file', path), create)
2068 2068
2069 2069 def islocal(path):
2070 2070 return True
@@ -1,499 +1,499 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import nullid, nullrev, hex, bin
9 9 from i18n import _
10 10 import errno, util, os, filemerge, copies, shutil
11 11
12 12 class mergestate(object):
13 13 '''track 3-way merge state of individual files'''
14 14 def __init__(self, repo):
15 15 self._repo = repo
16 16 self._read()
17 17 def reset(self, node):
18 18 self._state = {}
19 19 self._local = node
20 20 shutil.rmtree(self._repo.join("merge"), True)
21 21 def _read(self):
22 22 self._state = {}
23 23 try:
24 24 localnode = None
25 25 f = self._repo.opener("merge/state")
26 26 for i, l in enumerate(f):
27 27 if i == 0:
28 28 localnode = l[:-1]
29 29 else:
30 30 bits = l[:-1].split("\0")
31 31 self._state[bits[0]] = bits[1:]
32 32 self._local = bin(localnode)
33 33 except IOError, err:
34 34 if err.errno != errno.ENOENT:
35 35 raise
36 36 def _write(self):
37 37 f = self._repo.opener("merge/state", "w")
38 38 f.write(hex(self._local) + "\n")
39 39 for d, v in self._state.items():
40 40 f.write("\0".join([d] + v) + "\n")
41 41 def add(self, fcl, fco, fca, fd, flags):
42 42 hash = util.sha1(fcl.path()).hexdigest()
43 43 self._repo.opener("merge/" + hash, "w").write(fcl.data())
44 44 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
45 45 hex(fca.filenode()), fco.path(), flags]
46 46 self._write()
47 47 def __contains__(self, dfile):
48 48 return dfile in self._state
49 49 def __getitem__(self, dfile):
50 50 return self._state[dfile][0]
51 51 def __iter__(self):
52 52 l = self._state.keys()
53 53 l.sort()
54 54 for f in l:
55 55 yield f
56 56 def mark(self, dfile, state):
57 57 self._state[dfile][0] = state
58 58 self._write()
59 59 def resolve(self, dfile, wctx, octx):
60 60 if self[dfile] == 'r':
61 61 return 0
62 62 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
63 63 f = self._repo.opener("merge/" + hash)
64 64 self._repo.wwrite(dfile, f.read(), flags)
65 65 fcd = wctx[dfile]
66 66 fco = octx[ofile]
67 67 fca = self._repo.filectx(afile, fileid=anode)
68 68 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
69 69 if not r:
70 70 self.mark(dfile, 'r')
71 71 return r
72 72
73 73 def _checkunknown(wctx, mctx):
74 74 "check for collisions between unknown files and files in mctx"
75 75 for f in wctx.unknown():
76 76 if f in mctx and mctx[f].cmp(wctx[f].data()):
77 77 raise util.Abort(_("untracked file in working directory differs"
78 78 " from file in requested revision: '%s'") % f)
79 79
80 80 def _checkcollision(mctx):
81 81 "check for case folding collisions in the destination context"
82 82 folded = {}
83 83 for fn in mctx:
84 84 fold = fn.lower()
85 85 if fold in folded:
86 86 raise util.Abort(_("case-folding collision between %s and %s")
87 87 % (fn, folded[fold]))
88 88 folded[fold] = fn
89 89
90 90 def _forgetremoved(wctx, mctx, branchmerge):
91 91 """
92 92 Forget removed files
93 93
94 94 If we're jumping between revisions (as opposed to merging), and if
95 95 neither the working directory nor the target rev has the file,
96 96 then we need to remove it from the dirstate, to prevent the
97 97 dirstate from listing the file when it is no longer in the
98 98 manifest.
99 99
100 100 If we're merging, and the other revision has removed a file
101 101 that is not present in the working directory, we need to mark it
102 102 as removed.
103 103 """
104 104
105 105 action = []
106 106 state = branchmerge and 'r' or 'f'
107 107 for f in wctx.deleted():
108 108 if f not in mctx:
109 109 action.append((f, state))
110 110
111 111 if not branchmerge:
112 112 for f in wctx.removed():
113 113 if f not in mctx:
114 114 action.append((f, "f"))
115 115
116 116 return action
117 117
118 118 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
119 119 """
120 120 Merge p1 and p2 with ancestor ma and generate merge action list
121 121
122 122 overwrite = whether we clobber working files
123 123 partial = function to filter file lists
124 124 """
125 125
126 126 repo.ui.note(_("resolving manifests\n"))
127 127 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
128 128 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
129 129
130 130 m1 = p1.manifest()
131 131 m2 = p2.manifest()
132 132 ma = pa.manifest()
133 133 backwards = (pa == p2)
134 134 action = []
135 135 copy, copied, diverge = {}, {}, {}
136 136
137 137 def fmerge(f, f2=None, fa=None):
138 138 """merge flags"""
139 139 if not f2:
140 140 f2 = f
141 141 fa = f
142 142 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
143 143 if m == n: # flags agree
144 144 return m # unchanged
145 145 if m and n: # flags are set but don't agree
146 146 if not a: # both differ from parent
147 147 r = repo.ui.prompt(
148 148 _(" conflicting flags for %s\n"
149 149 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
150 150 return r != "n" and r or ''
151 151 if m == a:
152 152 return n # changed from m to n
153 153 return m # changed from n to m
154 154 if m and m != a: # changed from a to m
155 155 return m
156 156 if n and n != a: # changed from a to n
157 157 return n
158 158 return '' # flag was cleared
159 159
160 160 def act(msg, m, f, *args):
161 161 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
162 162 action.append((f, m) + args)
163 163
164 164 if pa and not (backwards or overwrite):
165 165 if repo.ui.configbool("merge", "followcopies", True):
166 166 dirs = repo.ui.configbool("merge", "followdirs", True)
167 167 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
168 168 copied = dict.fromkeys(copy.values())
169 169 for of, fl in diverge.items():
170 170 act("divergent renames", "dr", of, fl)
171 171
172 172 # Compare manifests
173 173 for f, n in m1.iteritems():
174 174 if partial and not partial(f):
175 175 continue
176 176 if f in m2:
177 177 if overwrite or backwards:
178 178 rflags = m2.flags(f)
179 179 else:
180 180 rflags = fmerge(f)
181 181 # are files different?
182 182 if n != m2[f]:
183 183 a = ma.get(f, nullid)
184 184 # are we clobbering?
185 185 if overwrite:
186 186 act("clobbering", "g", f, rflags)
187 187 # or are we going back in time and clean?
188 188 elif backwards and not n[20:]:
189 189 act("reverting", "g", f, rflags)
190 190 # are both different from the ancestor?
191 191 elif n != a and m2[f] != a:
192 192 act("versions differ", "m", f, f, f, rflags, False)
193 193 # is remote's version newer?
194 194 elif m2[f] != a:
195 195 act("remote is newer", "g", f, rflags)
196 196 # local is newer, not overwrite, check mode bits
197 197 elif m1.flags(f) != rflags:
198 198 act("update permissions", "e", f, rflags)
199 199 # contents same, check mode bits
200 200 elif m1.flags(f) != rflags:
201 201 act("update permissions", "e", f, rflags)
202 202 elif f in copied:
203 203 continue
204 204 elif f in copy:
205 205 f2 = copy[f]
206 206 if f2 not in m2: # directory rename
207 207 act("remote renamed directory to " + f2, "d",
208 208 f, None, f2, m1.flags(f))
209 209 elif f2 in m1: # case 2 A,B/B/B
210 210 act("local copied to " + f2, "m",
211 211 f, f2, f, fmerge(f, f2, f2), False)
212 212 else: # case 4,21 A/B/B
213 213 act("local moved to " + f2, "m",
214 214 f, f2, f, fmerge(f, f2, f2), False)
215 215 elif f in ma:
216 216 if n != ma[f] and not overwrite:
217 217 if repo.ui.prompt(
218 218 _(" local changed %s which remote deleted\n"
219 219 "use (c)hanged version or (d)elete?") % f,
220 220 _("[cd]"), _("c")) == _("d"):
221 221 act("prompt delete", "r", f)
222 222 else:
223 223 act("other deleted", "r", f)
224 224 else:
225 225 # file is created on branch or in working directory
226 226 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
227 227 act("remote deleted", "r", f)
228 228
229 229 for f, n in m2.iteritems():
230 230 if partial and not partial(f):
231 231 continue
232 232 if f in m1:
233 233 continue
234 234 if f in copied:
235 235 continue
236 236 if f in copy:
237 237 f2 = copy[f]
238 238 if f2 not in m1: # directory rename
239 239 act("local renamed directory to " + f2, "d",
240 240 None, f, f2, m2.flags(f))
241 241 elif f2 in m2: # rename case 1, A/A,B/A
242 242 act("remote copied to " + f, "m",
243 243 f2, f, f, fmerge(f2, f, f2), False)
244 244 else: # case 3,20 A/B/A
245 245 act("remote moved to " + f, "m",
246 246 f2, f, f, fmerge(f2, f, f2), True)
247 247 elif f in ma:
248 248 if overwrite or backwards:
249 249 act("recreating", "g", f, m2.flags(f))
250 250 elif n != ma[f]:
251 251 if repo.ui.prompt(
252 252 _("remote changed %s which local deleted\n"
253 253 "use (c)hanged version or leave (d)eleted?") % f,
254 254 _("[cd]"), _("c")) == _("c"):
255 255 act("prompt recreating", "g", f, m2.flags(f))
256 256 else:
257 257 act("remote created", "g", f, m2.flags(f))
258 258
259 259 return action
260 260
261 261 def actioncmp(a1, a2):
262 262 m1 = a1[1]
263 263 m2 = a2[1]
264 264 if m1 == m2:
265 265 return cmp(a1, a2)
266 266 if m1 == 'r':
267 267 return -1
268 268 if m2 == 'r':
269 269 return 1
270 270 return cmp(a1, a2)
271 271
272 272 def applyupdates(repo, action, wctx, mctx):
273 273 "apply the merge action list to the working directory"
274 274
275 275 updated, merged, removed, unresolved = 0, 0, 0, 0
276 276 ms = mergestate(repo)
277 277 ms.reset(wctx.parents()[0].node())
278 278 moves = []
279 279 action.sort(actioncmp)
280 280
281 281 # prescan for merges
282 282 for a in action:
283 283 f, m = a[:2]
284 284 if m == 'm': # merge
285 285 f2, fd, flags, move = a[2:]
286 286 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
287 287 fcl = wctx[f]
288 288 fco = mctx[f2]
289 289 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
290 290 ms.add(fcl, fco, fca, fd, flags)
291 291 if f != fd and move:
292 292 moves.append(f)
293 293
294 294 # remove renamed files after safely stored
295 295 for f in moves:
296 296 if util.lexists(repo.wjoin(f)):
297 297 repo.ui.debug(_("removing %s\n") % f)
298 298 os.unlink(repo.wjoin(f))
299 299
300 300 audit_path = util.path_auditor(repo.root)
301 301
302 302 for a in action:
303 303 f, m = a[:2]
304 304 if f and f[0] == "/":
305 305 continue
306 306 if m == "r": # remove
307 307 repo.ui.note(_("removing %s\n") % f)
308 308 audit_path(f)
309 309 try:
310 310 util.unlink(repo.wjoin(f))
311 311 except OSError, inst:
312 312 if inst.errno != errno.ENOENT:
313 313 repo.ui.warn(_("update failed to remove %s: %s!\n") %
314 314 (f, inst.strerror))
315 315 removed += 1
316 316 elif m == "m": # merge
317 317 f2, fd, flags, move = a[2:]
318 318 r = ms.resolve(fd, wctx, mctx)
319 319 if r > 0:
320 320 unresolved += 1
321 321 else:
322 322 if r is None:
323 323 updated += 1
324 324 else:
325 325 merged += 1
326 326 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
327 327 if f != fd and move and util.lexists(repo.wjoin(f)):
328 328 repo.ui.debug(_("removing %s\n") % f)
329 329 os.unlink(repo.wjoin(f))
330 330 elif m == "g": # get
331 331 flags = a[2]
332 332 repo.ui.note(_("getting %s\n") % f)
333 333 t = mctx.filectx(f).data()
334 334 repo.wwrite(f, t, flags)
335 335 updated += 1
336 336 elif m == "d": # directory rename
337 337 f2, fd, flags = a[2:]
338 338 if f:
339 339 repo.ui.note(_("moving %s to %s\n") % (f, fd))
340 340 t = wctx.filectx(f).data()
341 341 repo.wwrite(fd, t, flags)
342 342 util.unlink(repo.wjoin(f))
343 343 if f2:
344 344 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
345 345 t = mctx.filectx(f2).data()
346 346 repo.wwrite(fd, t, flags)
347 347 updated += 1
348 348 elif m == "dr": # divergent renames
349 349 fl = a[2]
350 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
350 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
351 351 for nf in fl:
352 352 repo.ui.warn(" %s\n" % nf)
353 353 elif m == "e": # exec
354 354 flags = a[2]
355 355 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
356 356
357 357 return updated, merged, removed, unresolved
358 358
359 359 def recordupdates(repo, action, branchmerge):
360 360 "record merge actions to the dirstate"
361 361
362 362 for a in action:
363 363 f, m = a[:2]
364 364 if m == "r": # remove
365 365 if branchmerge:
366 366 repo.dirstate.remove(f)
367 367 else:
368 368 repo.dirstate.forget(f)
369 369 elif m == "f": # forget
370 370 repo.dirstate.forget(f)
371 371 elif m in "ge": # get or exec change
372 372 if branchmerge:
373 373 repo.dirstate.normaldirty(f)
374 374 else:
375 375 repo.dirstate.normal(f)
376 376 elif m == "m": # merge
377 377 f2, fd, flag, move = a[2:]
378 378 if branchmerge:
379 379 # We've done a branch merge, mark this file as merged
380 380 # so that we properly record the merger later
381 381 repo.dirstate.merge(fd)
382 382 if f != f2: # copy/rename
383 383 if move:
384 384 repo.dirstate.remove(f)
385 385 if f != fd:
386 386 repo.dirstate.copy(f, fd)
387 387 else:
388 388 repo.dirstate.copy(f2, fd)
389 389 else:
390 390 # We've update-merged a locally modified file, so
391 391 # we set the dirstate to emulate a normal checkout
392 392 # of that file some time in the past. Thus our
393 393 # merge will appear as a normal local file
394 394 # modification.
395 395 repo.dirstate.normallookup(fd)
396 396 if move:
397 397 repo.dirstate.forget(f)
398 398 elif m == "d": # directory rename
399 399 f2, fd, flag = a[2:]
400 400 if not f2 and f not in repo.dirstate:
401 401 # untracked file moved
402 402 continue
403 403 if branchmerge:
404 404 repo.dirstate.add(fd)
405 405 if f:
406 406 repo.dirstate.remove(f)
407 407 repo.dirstate.copy(f, fd)
408 408 if f2:
409 409 repo.dirstate.copy(f2, fd)
410 410 else:
411 411 repo.dirstate.normal(fd)
412 412 if f:
413 413 repo.dirstate.forget(f)
414 414
415 415 def update(repo, node, branchmerge, force, partial):
416 416 """
417 417 Perform a merge between the working directory and the given node
418 418
419 419 branchmerge = whether to merge between branches
420 420 force = whether to force branch merging or file overwriting
421 421 partial = a function to filter file lists (dirstate not updated)
422 422 """
423 423
424 424 wlock = repo.wlock()
425 425 try:
426 426 wc = repo[None]
427 427 if node is None:
428 428 # tip of current branch
429 429 try:
430 430 node = repo.branchtags()[wc.branch()]
431 431 except KeyError:
432 432 if wc.branch() == "default": # no default branch!
433 433 node = repo.lookup("tip") # update to tip
434 434 else:
435 435 raise util.Abort(_("branch %s not found") % wc.branch())
436 436 overwrite = force and not branchmerge
437 437 pl = wc.parents()
438 438 p1, p2 = pl[0], repo[node]
439 439 pa = p1.ancestor(p2)
440 440 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
441 441 fastforward = False
442 442
443 443 ### check phase
444 444 if not overwrite and len(pl) > 1:
445 445 raise util.Abort(_("outstanding uncommitted merges"))
446 446 if branchmerge:
447 447 if pa == p2:
448 448 raise util.Abort(_("can't merge with ancestor"))
449 449 elif pa == p1:
450 450 if p1.branch() != p2.branch():
451 451 fastforward = True
452 452 else:
453 453 raise util.Abort(_("nothing to merge (use 'hg update'"
454 454 " or check 'hg heads')"))
455 455 if not force and (wc.files() or wc.deleted()):
456 456 raise util.Abort(_("outstanding uncommitted changes"))
457 457 elif not overwrite:
458 458 if pa == p1 or pa == p2: # linear
459 459 pass # all good
460 460 elif p1.branch() == p2.branch():
461 461 if wc.files() or wc.deleted():
462 462 raise util.Abort(_("crosses branches (use 'hg merge' or "
463 463 "'hg update -C' to discard changes)"))
464 464 raise util.Abort(_("crosses branches (use 'hg merge' "
465 465 "or 'hg update -C')"))
466 466 elif wc.files() or wc.deleted():
467 467 raise util.Abort(_("crosses named branches (use "
468 468 "'hg update -C' to discard changes)"))
469 469 else:
470 470 # Allow jumping branches if there are no changes
471 471 overwrite = True
472 472
473 473 ### calculate phase
474 474 action = []
475 475 if not force:
476 476 _checkunknown(wc, p2)
477 477 if not util.checkcase(repo.path):
478 478 _checkcollision(p2)
479 479 action += _forgetremoved(wc, p2, branchmerge)
480 480 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
481 481
482 482 ### apply phase
483 483 if not branchmerge: # just jump to the new rev
484 484 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
485 485 if not partial:
486 486 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
487 487
488 488 stats = applyupdates(repo, action, wc, p2)
489 489
490 490 if not partial:
491 491 recordupdates(repo, action, branchmerge)
492 492 repo.dirstate.setparents(fp1, fp2)
493 493 if not branchmerge and not fastforward:
494 494 repo.dirstate.setbranch(p2.branch())
495 495 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
496 496
497 497 return stats
498 498 finally:
499 499 del wlock
@@ -1,1330 +1,1330 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import hex, nullid, short
11 11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 12 import cStringIO, email.Parser, os, popen2, re, errno
13 13 import sys, tempfile, zlib
14 14
15 15 class PatchError(Exception):
16 16 pass
17 17
18 18 class NoHunks(PatchError):
19 19 pass
20 20
21 21 # helper functions
22 22
23 23 def copyfile(src, dst, basedir=None):
24 24 if not basedir:
25 25 basedir = os.getcwd()
26 26
27 27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 28 if os.path.exists(absdst):
29 29 raise util.Abort(_("cannot create %s: destination already exists") %
30 30 dst)
31 31
32 32 targetdir = os.path.dirname(absdst)
33 33 if not os.path.isdir(targetdir):
34 34 os.makedirs(targetdir)
35 35
36 36 util.copyfile(abssrc, absdst)
37 37
38 38 # public functions
39 39
40 40 def extract(ui, fileobj):
41 41 '''extract patch from data read from fileobj.
42 42
43 43 patch can be a normal patch or contained in an email message.
44 44
45 45 return tuple (filename, message, user, date, node, p1, p2).
46 46 Any item in the returned tuple can be None. If filename is None,
47 47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48 48
49 49 # attempt to detect the start of a patch
50 50 # (this heuristic is borrowed from quilt)
51 51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54 54
55 55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 56 tmpfp = os.fdopen(fd, 'w')
57 57 try:
58 58 msg = email.Parser.Parser().parse(fileobj)
59 59
60 60 subject = msg['Subject']
61 61 user = msg['From']
62 62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 63 # should try to parse msg['Date']
64 64 date = None
65 65 nodeid = None
66 66 branch = None
67 67 parents = []
68 68
69 69 if subject:
70 70 if subject.startswith('[PATCH'):
71 71 pend = subject.find(']')
72 72 if pend >= 0:
73 73 subject = subject[pend+1:].lstrip()
74 74 subject = subject.replace('\n\t', ' ')
75 75 ui.debug('Subject: %s\n' % subject)
76 76 if user:
77 77 ui.debug('From: %s\n' % user)
78 78 diffs_seen = 0
79 79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 80 message = ''
81 81 for part in msg.walk():
82 82 content_type = part.get_content_type()
83 83 ui.debug('Content-Type: %s\n' % content_type)
84 84 if content_type not in ok_types:
85 85 continue
86 86 payload = part.get_payload(decode=True)
87 87 m = diffre.search(payload)
88 88 if m:
89 89 hgpatch = False
90 90 ignoretext = False
91 91
92 92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 93 diffs_seen += 1
94 94 cfp = cStringIO.StringIO()
95 95 for line in payload[:m.start(0)].splitlines():
96 96 if line.startswith('# HG changeset patch'):
97 97 ui.debug(_('patch generated by hg export\n'))
98 98 hgpatch = True
99 99 # drop earlier commit message content
100 100 cfp.seek(0)
101 101 cfp.truncate()
102 102 subject = None
103 103 elif hgpatch:
104 104 if line.startswith('# User '):
105 105 user = line[7:]
106 106 ui.debug('From: %s\n' % user)
107 107 elif line.startswith("# Date "):
108 108 date = line[7:]
109 109 elif line.startswith("# Branch "):
110 110 branch = line[9:]
111 111 elif line.startswith("# Node ID "):
112 112 nodeid = line[10:]
113 113 elif line.startswith("# Parent "):
114 114 parents.append(line[10:])
115 115 elif line == '---' and gitsendmail:
116 116 ignoretext = True
117 117 if not line.startswith('# ') and not ignoretext:
118 118 cfp.write(line)
119 119 cfp.write('\n')
120 120 message = cfp.getvalue()
121 121 if tmpfp:
122 122 tmpfp.write(payload)
123 123 if not payload.endswith('\n'):
124 124 tmpfp.write('\n')
125 125 elif not diffs_seen and message and content_type == 'text/plain':
126 126 message += '\n' + payload
127 127 except:
128 128 tmpfp.close()
129 129 os.unlink(tmpname)
130 130 raise
131 131
132 132 if subject and not message.startswith(subject):
133 133 message = '%s\n%s' % (subject, message)
134 134 tmpfp.close()
135 135 if not diffs_seen:
136 136 os.unlink(tmpname)
137 137 return None, message, user, date, branch, None, None, None
138 138 p1 = parents and parents.pop(0) or None
139 139 p2 = parents and parents.pop(0) or None
140 140 return tmpname, message, user, date, branch, nodeid, p1, p2
141 141
142 142 GP_PATCH = 1 << 0 # we have to run patch
143 143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 144 GP_BINARY = 1 << 2 # there's a binary patch
145 145
146 146 def readgitpatch(fp, firstline=None):
147 147 """extract git-style metadata about patches from <patchname>"""
148 148 class gitpatch:
149 149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 150 def __init__(self, path):
151 151 self.path = path
152 152 self.oldpath = None
153 153 self.mode = None
154 154 self.op = 'MODIFY'
155 155 self.lineno = 0
156 156 self.binary = False
157 157
158 158 def reader(fp, firstline):
159 159 if firstline is not None:
160 160 yield firstline
161 161 for line in fp:
162 162 yield line
163 163
164 164 # Filter patch for git information
165 165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 166 gp = None
167 167 gitpatches = []
168 168 # Can have a git patch with only metadata, causing patch to complain
169 169 dopatch = 0
170 170
171 171 lineno = 0
172 172 for line in reader(fp, firstline):
173 173 lineno += 1
174 174 if line.startswith('diff --git'):
175 175 m = gitre.match(line)
176 176 if m:
177 177 if gp:
178 178 gitpatches.append(gp)
179 179 src, dst = m.group(1, 2)
180 180 gp = gitpatch(dst)
181 181 gp.lineno = lineno
182 182 elif gp:
183 183 if line.startswith('--- '):
184 184 if gp.op in ('COPY', 'RENAME'):
185 185 dopatch |= GP_FILTER
186 186 gitpatches.append(gp)
187 187 gp = None
188 188 dopatch |= GP_PATCH
189 189 continue
190 190 if line.startswith('rename from '):
191 191 gp.op = 'RENAME'
192 192 gp.oldpath = line[12:].rstrip()
193 193 elif line.startswith('rename to '):
194 194 gp.path = line[10:].rstrip()
195 195 elif line.startswith('copy from '):
196 196 gp.op = 'COPY'
197 197 gp.oldpath = line[10:].rstrip()
198 198 elif line.startswith('copy to '):
199 199 gp.path = line[8:].rstrip()
200 200 elif line.startswith('deleted file'):
201 201 gp.op = 'DELETE'
202 202 elif line.startswith('new file mode '):
203 203 gp.op = 'ADD'
204 204 gp.mode = int(line.rstrip()[-6:], 8)
205 205 elif line.startswith('new mode '):
206 206 gp.mode = int(line.rstrip()[-6:], 8)
207 207 elif line.startswith('GIT binary patch'):
208 208 dopatch |= GP_BINARY
209 209 gp.binary = True
210 210 if gp:
211 211 gitpatches.append(gp)
212 212
213 213 if not gitpatches:
214 214 dopatch = GP_PATCH
215 215
216 216 return (dopatch, gitpatches)
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 219 """apply <patchname> to the working directory.
220 220 returns whether patch was applied with fuzz factor."""
221 221 patcher = ui.config('ui', 'patch')
222 222 args = []
223 223 try:
224 224 if patcher:
225 225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 226 files)
227 227 else:
228 228 try:
229 229 return internalpatch(patchname, ui, strip, cwd, files)
230 230 except NoHunks:
231 231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug('no valid hunks found; trying with %r instead\n' %
232 ui.debug(_('no valid hunks found; trying with %r instead\n') %
233 233 patcher)
234 234 if util.needbinarypatch():
235 235 args.append('--binary')
236 236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 237 files)
238 238 except PatchError, err:
239 239 s = str(err)
240 240 if s:
241 241 raise util.Abort(s)
242 242 else:
243 243 raise util.Abort(_('patch failed to apply'))
244 244
245 245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 246 """use <patcher> to apply <patchname> to the working directory.
247 247 returns whether patch was applied with fuzz factor."""
248 248
249 249 fuzz = False
250 250 if cwd:
251 251 args.append('-d %s' % util.shellquote(cwd))
252 252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 253 util.shellquote(patchname)))
254 254
255 255 for line in fp:
256 256 line = line.rstrip()
257 257 ui.note(line + '\n')
258 258 if line.startswith('patching file '):
259 259 pf = util.parse_patch_output(line)
260 260 printed_file = False
261 261 files.setdefault(pf, (None, None))
262 262 elif line.find('with fuzz') >= 0:
263 263 fuzz = True
264 264 if not printed_file:
265 265 ui.warn(pf + '\n')
266 266 printed_file = True
267 267 ui.warn(line + '\n')
268 268 elif line.find('saving rejects to file') >= 0:
269 269 ui.warn(line + '\n')
270 270 elif line.find('FAILED') >= 0:
271 271 if not printed_file:
272 272 ui.warn(pf + '\n')
273 273 printed_file = True
274 274 ui.warn(line + '\n')
275 275 code = fp.close()
276 276 if code:
277 277 raise PatchError(_("patch command failed: %s") %
278 278 util.explain_exit(code)[0])
279 279 return fuzz
280 280
281 281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 282 """use builtin patch to apply <patchobj> to the working directory.
283 283 returns whether patch was applied with fuzz factor."""
284 284 try:
285 285 fp = file(patchobj, 'rb')
286 286 except TypeError:
287 287 fp = patchobj
288 288 if cwd:
289 289 curdir = os.getcwd()
290 290 os.chdir(cwd)
291 291 try:
292 292 ret = applydiff(ui, fp, files, strip=strip)
293 293 finally:
294 294 if cwd:
295 295 os.chdir(curdir)
296 296 if ret < 0:
297 297 raise PatchError
298 298 return ret > 0
299 299
300 300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303 303
304 304 class patchfile:
305 305 def __init__(self, ui, fname, missing=False):
306 306 self.fname = fname
307 307 self.ui = ui
308 308 self.lines = []
309 309 self.exists = False
310 310 self.missing = missing
311 311 if not missing:
312 312 try:
313 313 fp = file(fname, 'rb')
314 314 self.lines = fp.readlines()
315 315 self.exists = True
316 316 except IOError:
317 317 pass
318 318 else:
319 319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320 320
321 321 if not self.exists:
322 322 dirname = os.path.dirname(fname)
323 323 if dirname and not os.path.isdir(dirname):
324 324 os.makedirs(dirname)
325 325
326 326 self.hash = {}
327 327 self.dirty = 0
328 328 self.offset = 0
329 329 self.rej = []
330 330 self.fileprinted = False
331 331 self.printfile(False)
332 332 self.hunks = 0
333 333
334 334 def printfile(self, warn):
335 335 if self.fileprinted:
336 336 return
337 337 if warn or self.ui.verbose:
338 338 self.fileprinted = True
339 339 s = _("patching file %s\n") % self.fname
340 340 if warn:
341 341 self.ui.warn(s)
342 342 else:
343 343 self.ui.note(s)
344 344
345 345
346 346 def findlines(self, l, linenum):
347 347 # looks through the hash and finds candidate lines. The
348 348 # result is a list of line numbers sorted based on distance
349 349 # from linenum
350 350 def sorter(a, b):
351 351 vala = abs(a - linenum)
352 352 valb = abs(b - linenum)
353 353 return cmp(vala, valb)
354 354
355 355 try:
356 356 cand = self.hash[l]
357 357 except:
358 358 return []
359 359
360 360 if len(cand) > 1:
361 361 # resort our list of potentials forward then back.
362 362 cand.sort(sorter)
363 363 return cand
364 364
365 365 def hashlines(self):
366 366 self.hash = {}
367 367 for x in xrange(len(self.lines)):
368 368 s = self.lines[x]
369 369 self.hash.setdefault(s, []).append(x)
370 370
371 371 def write_rej(self):
372 372 # our rejects are a little different from patch(1). This always
373 373 # creates rejects in the same form as the original patch. A file
374 374 # header is inserted so that you can run the reject through patch again
375 375 # without having to type the filename.
376 376
377 377 if not self.rej:
378 378 return
379 379
380 380 fname = self.fname + ".rej"
381 381 self.ui.warn(
382 382 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
383 383 (len(self.rej), self.hunks, fname))
384 384 try: os.unlink(fname)
385 385 except:
386 386 pass
387 387 fp = file(fname, 'wb')
388 388 base = os.path.basename(self.fname)
389 389 fp.write("--- %s\n+++ %s\n" % (base, base))
390 390 for x in self.rej:
391 391 for l in x.hunk:
392 392 fp.write(l)
393 393 if l[-1] != '\n':
394 394 fp.write("\n\ No newline at end of file\n")
395 395
396 396 def write(self, dest=None):
397 397 if self.dirty:
398 398 if not dest:
399 399 dest = self.fname
400 400 st = None
401 401 try:
402 402 st = os.lstat(dest)
403 403 except OSError, inst:
404 404 if inst.errno != errno.ENOENT:
405 405 raise
406 406 if st and st.st_nlink > 1:
407 407 os.unlink(dest)
408 408 fp = file(dest, 'wb')
409 409 if st and st.st_nlink > 1:
410 410 os.chmod(dest, st.st_mode)
411 411 fp.writelines(self.lines)
412 412 fp.close()
413 413
414 414 def close(self):
415 415 self.write()
416 416 self.write_rej()
417 417
418 418 def apply(self, h, reverse):
419 419 if not h.complete():
420 420 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
421 421 (h.number, h.desc, len(h.a), h.lena, len(h.b),
422 422 h.lenb))
423 423
424 424 self.hunks += 1
425 425 if reverse:
426 426 h.reverse()
427 427
428 428 if self.missing:
429 429 self.rej.append(h)
430 430 return -1
431 431
432 432 if self.exists and h.createfile():
433 433 self.ui.warn(_("file %s already exists\n") % self.fname)
434 434 self.rej.append(h)
435 435 return -1
436 436
437 437 if isinstance(h, binhunk):
438 438 if h.rmfile():
439 439 os.unlink(self.fname)
440 440 else:
441 441 self.lines[:] = h.new()
442 442 self.offset += len(h.new())
443 443 self.dirty = 1
444 444 return 0
445 445
446 446 # fast case first, no offsets, no fuzz
447 447 old = h.old()
448 448 # patch starts counting at 1 unless we are adding the file
449 449 if h.starta == 0:
450 450 start = 0
451 451 else:
452 452 start = h.starta + self.offset - 1
453 453 orig_start = start
454 454 if diffhelpers.testhunk(old, self.lines, start) == 0:
455 455 if h.rmfile():
456 456 os.unlink(self.fname)
457 457 else:
458 458 self.lines[start : start + h.lena] = h.new()
459 459 self.offset += h.lenb - h.lena
460 460 self.dirty = 1
461 461 return 0
462 462
463 463 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
464 464 self.hashlines()
465 465 if h.hunk[-1][0] != ' ':
466 466 # if the hunk tried to put something at the bottom of the file
467 467 # override the start line and use eof here
468 468 search_start = len(self.lines)
469 469 else:
470 470 search_start = orig_start
471 471
472 472 for fuzzlen in xrange(3):
473 473 for toponly in [ True, False ]:
474 474 old = h.old(fuzzlen, toponly)
475 475
476 476 cand = self.findlines(old[0][1:], search_start)
477 477 for l in cand:
478 478 if diffhelpers.testhunk(old, self.lines, l) == 0:
479 479 newlines = h.new(fuzzlen, toponly)
480 480 self.lines[l : l + len(old)] = newlines
481 481 self.offset += len(newlines) - len(old)
482 482 self.dirty = 1
483 483 if fuzzlen:
484 484 fuzzstr = "with fuzz %d " % fuzzlen
485 485 f = self.ui.warn
486 486 self.printfile(True)
487 487 else:
488 488 fuzzstr = ""
489 489 f = self.ui.note
490 490 offset = l - orig_start - fuzzlen
491 491 if offset == 1:
492 492 linestr = "line"
493 493 else:
494 494 linestr = "lines"
495 495 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
496 496 (h.number, l+1, fuzzstr, offset, linestr))
497 497 return fuzzlen
498 498 self.printfile(True)
499 499 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
500 500 self.rej.append(h)
501 501 return -1
502 502
503 503 class hunk:
504 504 def __init__(self, desc, num, lr, context, create=False, remove=False):
505 505 self.number = num
506 506 self.desc = desc
507 507 self.hunk = [ desc ]
508 508 self.a = []
509 509 self.b = []
510 510 if context:
511 511 self.read_context_hunk(lr)
512 512 else:
513 513 self.read_unified_hunk(lr)
514 514 self.create = create
515 515 self.remove = remove and not create
516 516
517 517 def read_unified_hunk(self, lr):
518 518 m = unidesc.match(self.desc)
519 519 if not m:
520 520 raise PatchError(_("bad hunk #%d") % self.number)
521 521 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
522 522 if self.lena == None:
523 523 self.lena = 1
524 524 else:
525 525 self.lena = int(self.lena)
526 526 if self.lenb == None:
527 527 self.lenb = 1
528 528 else:
529 529 self.lenb = int(self.lenb)
530 530 self.starta = int(self.starta)
531 531 self.startb = int(self.startb)
532 532 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
533 533 # if we hit eof before finishing out the hunk, the last line will
534 534 # be zero length. Lets try to fix it up.
535 535 while len(self.hunk[-1]) == 0:
536 536 del self.hunk[-1]
537 537 del self.a[-1]
538 538 del self.b[-1]
539 539 self.lena -= 1
540 540 self.lenb -= 1
541 541
542 542 def read_context_hunk(self, lr):
543 543 self.desc = lr.readline()
544 544 m = contextdesc.match(self.desc)
545 545 if not m:
546 546 raise PatchError(_("bad hunk #%d") % self.number)
547 547 foo, self.starta, foo2, aend, foo3 = m.groups()
548 548 self.starta = int(self.starta)
549 549 if aend == None:
550 550 aend = self.starta
551 551 self.lena = int(aend) - self.starta
552 552 if self.starta:
553 553 self.lena += 1
554 554 for x in xrange(self.lena):
555 555 l = lr.readline()
556 556 if l.startswith('---'):
557 557 lr.push(l)
558 558 break
559 559 s = l[2:]
560 560 if l.startswith('- ') or l.startswith('! '):
561 561 u = '-' + s
562 562 elif l.startswith(' '):
563 563 u = ' ' + s
564 564 else:
565 565 raise PatchError(_("bad hunk #%d old text line %d") %
566 566 (self.number, x))
567 567 self.a.append(u)
568 568 self.hunk.append(u)
569 569
570 570 l = lr.readline()
571 571 if l.startswith('\ '):
572 572 s = self.a[-1][:-1]
573 573 self.a[-1] = s
574 574 self.hunk[-1] = s
575 575 l = lr.readline()
576 576 m = contextdesc.match(l)
577 577 if not m:
578 578 raise PatchError(_("bad hunk #%d") % self.number)
579 579 foo, self.startb, foo2, bend, foo3 = m.groups()
580 580 self.startb = int(self.startb)
581 581 if bend == None:
582 582 bend = self.startb
583 583 self.lenb = int(bend) - self.startb
584 584 if self.startb:
585 585 self.lenb += 1
586 586 hunki = 1
587 587 for x in xrange(self.lenb):
588 588 l = lr.readline()
589 589 if l.startswith('\ '):
590 590 s = self.b[-1][:-1]
591 591 self.b[-1] = s
592 592 self.hunk[hunki-1] = s
593 593 continue
594 594 if not l:
595 595 lr.push(l)
596 596 break
597 597 s = l[2:]
598 598 if l.startswith('+ ') or l.startswith('! '):
599 599 u = '+' + s
600 600 elif l.startswith(' '):
601 601 u = ' ' + s
602 602 elif len(self.b) == 0:
603 603 # this can happen when the hunk does not add any lines
604 604 lr.push(l)
605 605 break
606 606 else:
607 607 raise PatchError(_("bad hunk #%d old text line %d") %
608 608 (self.number, x))
609 609 self.b.append(s)
610 610 while True:
611 611 if hunki >= len(self.hunk):
612 612 h = ""
613 613 else:
614 614 h = self.hunk[hunki]
615 615 hunki += 1
616 616 if h == u:
617 617 break
618 618 elif h.startswith('-'):
619 619 continue
620 620 else:
621 621 self.hunk.insert(hunki-1, u)
622 622 break
623 623
624 624 if not self.a:
625 625 # this happens when lines were only added to the hunk
626 626 for x in self.hunk:
627 627 if x.startswith('-') or x.startswith(' '):
628 628 self.a.append(x)
629 629 if not self.b:
630 630 # this happens when lines were only deleted from the hunk
631 631 for x in self.hunk:
632 632 if x.startswith('+') or x.startswith(' '):
633 633 self.b.append(x[1:])
634 634 # @@ -start,len +start,len @@
635 635 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
636 636 self.startb, self.lenb)
637 637 self.hunk[0] = self.desc
638 638
639 639 def reverse(self):
640 640 self.create, self.remove = self.remove, self.create
641 641 origlena = self.lena
642 642 origstarta = self.starta
643 643 self.lena = self.lenb
644 644 self.starta = self.startb
645 645 self.lenb = origlena
646 646 self.startb = origstarta
647 647 self.a = []
648 648 self.b = []
649 649 # self.hunk[0] is the @@ description
650 650 for x in xrange(1, len(self.hunk)):
651 651 o = self.hunk[x]
652 652 if o.startswith('-'):
653 653 n = '+' + o[1:]
654 654 self.b.append(o[1:])
655 655 elif o.startswith('+'):
656 656 n = '-' + o[1:]
657 657 self.a.append(n)
658 658 else:
659 659 n = o
660 660 self.b.append(o[1:])
661 661 self.a.append(o)
662 662 self.hunk[x] = o
663 663
664 664 def fix_newline(self):
665 665 diffhelpers.fix_newline(self.hunk, self.a, self.b)
666 666
667 667 def complete(self):
668 668 return len(self.a) == self.lena and len(self.b) == self.lenb
669 669
670 670 def createfile(self):
671 671 return self.starta == 0 and self.lena == 0 and self.create
672 672
673 673 def rmfile(self):
674 674 return self.startb == 0 and self.lenb == 0 and self.remove
675 675
676 676 def fuzzit(self, l, fuzz, toponly):
677 677 # this removes context lines from the top and bottom of list 'l'. It
678 678 # checks the hunk to make sure only context lines are removed, and then
679 679 # returns a new shortened list of lines.
680 680 fuzz = min(fuzz, len(l)-1)
681 681 if fuzz:
682 682 top = 0
683 683 bot = 0
684 684 hlen = len(self.hunk)
685 685 for x in xrange(hlen-1):
686 686 # the hunk starts with the @@ line, so use x+1
687 687 if self.hunk[x+1][0] == ' ':
688 688 top += 1
689 689 else:
690 690 break
691 691 if not toponly:
692 692 for x in xrange(hlen-1):
693 693 if self.hunk[hlen-bot-1][0] == ' ':
694 694 bot += 1
695 695 else:
696 696 break
697 697
698 698 # top and bot now count context in the hunk
699 699 # adjust them if either one is short
700 700 context = max(top, bot, 3)
701 701 if bot < context:
702 702 bot = max(0, fuzz - (context - bot))
703 703 else:
704 704 bot = min(fuzz, bot)
705 705 if top < context:
706 706 top = max(0, fuzz - (context - top))
707 707 else:
708 708 top = min(fuzz, top)
709 709
710 710 return l[top:len(l)-bot]
711 711 return l
712 712
713 713 def old(self, fuzz=0, toponly=False):
714 714 return self.fuzzit(self.a, fuzz, toponly)
715 715
716 716 def newctrl(self):
717 717 res = []
718 718 for x in self.hunk:
719 719 c = x[0]
720 720 if c == ' ' or c == '+':
721 721 res.append(x)
722 722 return res
723 723
724 724 def new(self, fuzz=0, toponly=False):
725 725 return self.fuzzit(self.b, fuzz, toponly)
726 726
727 727 class binhunk:
728 728 'A binary patch file. Only understands literals so far.'
729 729 def __init__(self, gitpatch):
730 730 self.gitpatch = gitpatch
731 731 self.text = None
732 732 self.hunk = ['GIT binary patch\n']
733 733
734 734 def createfile(self):
735 735 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
736 736
737 737 def rmfile(self):
738 738 return self.gitpatch.op == 'DELETE'
739 739
740 740 def complete(self):
741 741 return self.text is not None
742 742
743 743 def new(self):
744 744 return [self.text]
745 745
746 746 def extract(self, fp):
747 747 line = fp.readline()
748 748 self.hunk.append(line)
749 749 while line and not line.startswith('literal '):
750 750 line = fp.readline()
751 751 self.hunk.append(line)
752 752 if not line:
753 753 raise PatchError(_('could not extract binary patch'))
754 754 size = int(line[8:].rstrip())
755 755 dec = []
756 756 line = fp.readline()
757 757 self.hunk.append(line)
758 758 while len(line) > 1:
759 759 l = line[0]
760 760 if l <= 'Z' and l >= 'A':
761 761 l = ord(l) - ord('A') + 1
762 762 else:
763 763 l = ord(l) - ord('a') + 27
764 764 dec.append(base85.b85decode(line[1:-1])[:l])
765 765 line = fp.readline()
766 766 self.hunk.append(line)
767 767 text = zlib.decompress(''.join(dec))
768 768 if len(text) != size:
769 769 raise PatchError(_('binary patch is %d bytes, not %d') %
770 770 len(text), size)
771 771 self.text = text
772 772
773 773 def parsefilename(str):
774 774 # --- filename \t|space stuff
775 775 s = str[4:].rstrip('\r\n')
776 776 i = s.find('\t')
777 777 if i < 0:
778 778 i = s.find(' ')
779 779 if i < 0:
780 780 return s
781 781 return s[:i]
782 782
783 783 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
784 784 def pathstrip(path, count=1):
785 785 pathlen = len(path)
786 786 i = 0
787 787 if count == 0:
788 788 return '', path.rstrip()
789 789 while count > 0:
790 790 i = path.find('/', i)
791 791 if i == -1:
792 792 raise PatchError(_("unable to strip away %d dirs from %s") %
793 793 (count, path))
794 794 i += 1
795 795 # consume '//' in the path
796 796 while i < pathlen - 1 and path[i] == '/':
797 797 i += 1
798 798 count -= 1
799 799 return path[:i].lstrip(), path[i:].rstrip()
800 800
801 801 nulla = afile_orig == "/dev/null"
802 802 nullb = bfile_orig == "/dev/null"
803 803 abase, afile = pathstrip(afile_orig, strip)
804 804 gooda = not nulla and os.path.exists(afile)
805 805 bbase, bfile = pathstrip(bfile_orig, strip)
806 806 if afile == bfile:
807 807 goodb = gooda
808 808 else:
809 809 goodb = not nullb and os.path.exists(bfile)
810 810 createfunc = hunk.createfile
811 811 if reverse:
812 812 createfunc = hunk.rmfile
813 813 missing = not goodb and not gooda and not createfunc()
814 814 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
815 815 # diff is between a file and its backup. In this case, the original
816 816 # file should be patched (see original mpatch code).
817 817 isbackup = (abase == bbase and bfile.startswith(afile))
818 818 fname = None
819 819 if not missing:
820 820 if gooda and goodb:
821 821 fname = isbackup and afile or bfile
822 822 elif gooda:
823 823 fname = afile
824 824
825 825 if not fname:
826 826 if not nullb:
827 827 fname = isbackup and afile or bfile
828 828 elif not nulla:
829 829 fname = afile
830 830 else:
831 831 raise PatchError(_("undefined source and destination files"))
832 832
833 833 return fname, missing
834 834
835 835 class linereader:
836 836 # simple class to allow pushing lines back into the input stream
837 837 def __init__(self, fp):
838 838 self.fp = fp
839 839 self.buf = []
840 840
841 841 def push(self, line):
842 842 self.buf.append(line)
843 843
844 844 def readline(self):
845 845 if self.buf:
846 846 l = self.buf[0]
847 847 del self.buf[0]
848 848 return l
849 849 return self.fp.readline()
850 850
851 851 def iterhunks(ui, fp, sourcefile=None):
852 852 """Read a patch and yield the following events:
853 853 - ("file", afile, bfile, firsthunk): select a new target file.
854 854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
855 855 "file" event.
856 856 - ("git", gitchanges): current diff is in git format, gitchanges
857 857 maps filenames to gitpatch records. Unique event.
858 858 """
859 859
860 860 def scangitpatch(fp, firstline):
861 861 '''git patches can modify a file, then copy that file to
862 862 a new file, but expect the source to be the unmodified form.
863 863 So we scan the patch looking for that case so we can do
864 864 the copies ahead of time.'''
865 865
866 866 pos = 0
867 867 try:
868 868 pos = fp.tell()
869 869 except IOError:
870 870 fp = cStringIO.StringIO(fp.read())
871 871
872 872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
873 873 fp.seek(pos)
874 874
875 875 return fp, dopatch, gitpatches
876 876
877 877 changed = {}
878 878 current_hunk = None
879 879 afile = ""
880 880 bfile = ""
881 881 state = None
882 882 hunknum = 0
883 883 emitfile = False
884 884
885 885 git = False
886 886 gitre = re.compile('diff --git (a/.*) (b/.*)')
887 887
888 888 # our states
889 889 BFILE = 1
890 890 context = None
891 891 lr = linereader(fp)
892 892 dopatch = True
893 893 # gitworkdone is True if a git operation (copy, rename, ...) was
894 894 # performed already for the current file. Useful when the file
895 895 # section may have no hunk.
896 896 gitworkdone = False
897 897
898 898 while True:
899 899 newfile = False
900 900 x = lr.readline()
901 901 if not x:
902 902 break
903 903 if current_hunk:
904 904 if x.startswith('\ '):
905 905 current_hunk.fix_newline()
906 906 yield 'hunk', current_hunk
907 907 current_hunk = None
908 908 gitworkdone = False
909 909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
910 910 ((context or context == None) and x.startswith('***************')))):
911 911 try:
912 912 if context == None and x.startswith('***************'):
913 913 context = True
914 914 gpatch = changed.get(bfile[2:], (None, None))[1]
915 915 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
916 916 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
917 917 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
918 918 except PatchError, err:
919 919 ui.debug(err)
920 920 current_hunk = None
921 921 continue
922 922 hunknum += 1
923 923 if emitfile:
924 924 emitfile = False
925 925 yield 'file', (afile, bfile, current_hunk)
926 926 elif state == BFILE and x.startswith('GIT binary patch'):
927 927 current_hunk = binhunk(changed[bfile[2:]][1])
928 928 hunknum += 1
929 929 if emitfile:
930 930 emitfile = False
931 931 yield 'file', (afile, bfile, current_hunk)
932 932 current_hunk.extract(fp)
933 933 elif x.startswith('diff --git'):
934 934 # check for git diff, scanning the whole patch file if needed
935 935 m = gitre.match(x)
936 936 if m:
937 937 afile, bfile = m.group(1, 2)
938 938 if not git:
939 939 git = True
940 940 fp, dopatch, gitpatches = scangitpatch(fp, x)
941 941 yield 'git', gitpatches
942 942 for gp in gitpatches:
943 943 changed[gp.path] = (gp.op, gp)
944 944 # else error?
945 945 # copy/rename + modify should modify target, not source
946 946 gitop = changed.get(bfile[2:], (None, None))[0]
947 947 if gitop in ('COPY', 'DELETE', 'RENAME'):
948 948 afile = bfile
949 949 gitworkdone = True
950 950 newfile = True
951 951 elif x.startswith('---'):
952 952 # check for a unified diff
953 953 l2 = lr.readline()
954 954 if not l2.startswith('+++'):
955 955 lr.push(l2)
956 956 continue
957 957 newfile = True
958 958 context = False
959 959 afile = parsefilename(x)
960 960 bfile = parsefilename(l2)
961 961 elif x.startswith('***'):
962 962 # check for a context diff
963 963 l2 = lr.readline()
964 964 if not l2.startswith('---'):
965 965 lr.push(l2)
966 966 continue
967 967 l3 = lr.readline()
968 968 lr.push(l3)
969 969 if not l3.startswith("***************"):
970 970 lr.push(l2)
971 971 continue
972 972 newfile = True
973 973 context = True
974 974 afile = parsefilename(x)
975 975 bfile = parsefilename(l2)
976 976
977 977 if newfile:
978 978 emitfile = True
979 979 state = BFILE
980 980 hunknum = 0
981 981 if current_hunk:
982 982 if current_hunk.complete():
983 983 yield 'hunk', current_hunk
984 984 else:
985 985 raise PatchError(_("malformed patch %s %s") % (afile,
986 986 current_hunk.desc))
987 987
988 988 if hunknum == 0 and dopatch and not gitworkdone:
989 989 raise NoHunks
990 990
991 991 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
992 992 rejmerge=None, updatedir=None):
993 993 """reads a patch from fp and tries to apply it. The dict 'changed' is
994 994 filled in with all of the filenames changed by the patch. Returns 0
995 995 for a clean patch, -1 if any rejects were found and 1 if there was
996 996 any fuzz."""
997 997
998 998 rejects = 0
999 999 err = 0
1000 1000 current_file = None
1001 1001 gitpatches = None
1002 1002
1003 1003 def closefile():
1004 1004 if not current_file:
1005 1005 return 0
1006 1006 current_file.close()
1007 1007 if rejmerge:
1008 1008 rejmerge(current_file)
1009 1009 return len(current_file.rej)
1010 1010
1011 1011 for state, values in iterhunks(ui, fp, sourcefile):
1012 1012 if state == 'hunk':
1013 1013 if not current_file:
1014 1014 continue
1015 1015 current_hunk = values
1016 1016 ret = current_file.apply(current_hunk, reverse)
1017 1017 if ret >= 0:
1018 1018 changed.setdefault(current_file.fname, (None, None))
1019 1019 if ret > 0:
1020 1020 err = 1
1021 1021 elif state == 'file':
1022 1022 rejects += closefile()
1023 1023 afile, bfile, first_hunk = values
1024 1024 try:
1025 1025 if sourcefile:
1026 1026 current_file = patchfile(ui, sourcefile)
1027 1027 else:
1028 1028 current_file, missing = selectfile(afile, bfile, first_hunk,
1029 1029 strip, reverse)
1030 1030 current_file = patchfile(ui, current_file, missing)
1031 1031 except PatchError, err:
1032 1032 ui.warn(str(err) + '\n')
1033 1033 current_file, current_hunk = None, None
1034 1034 rejects += 1
1035 1035 continue
1036 1036 elif state == 'git':
1037 1037 gitpatches = values
1038 1038 cwd = os.getcwd()
1039 1039 for gp in gitpatches:
1040 1040 if gp.op in ('COPY', 'RENAME'):
1041 1041 src, dst = [util.canonpath(cwd, cwd, x)
1042 1042 for x in [gp.oldpath, gp.path]]
1043 1043 copyfile(src, dst)
1044 1044 changed[gp.path] = (gp.op, gp)
1045 1045 else:
1046 1046 raise util.Abort(_('unsupported parser state: %s') % state)
1047 1047
1048 1048 rejects += closefile()
1049 1049
1050 1050 if updatedir and gitpatches:
1051 1051 updatedir(gitpatches)
1052 1052 if rejects:
1053 1053 return -1
1054 1054 return err
1055 1055
1056 1056 def diffopts(ui, opts={}, untrusted=False):
1057 1057 def get(key, name=None, getter=ui.configbool):
1058 1058 return (opts.get(key) or
1059 1059 getter('diff', name or key, None, untrusted=untrusted))
1060 1060 return mdiff.diffopts(
1061 1061 text=opts.get('text'),
1062 1062 git=get('git'),
1063 1063 nodates=get('nodates'),
1064 1064 showfunc=get('show_function', 'showfunc'),
1065 1065 ignorews=get('ignore_all_space', 'ignorews'),
1066 1066 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1067 1067 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1068 1068 context=get('unified', getter=ui.config))
1069 1069
1070 1070 def updatedir(ui, repo, patches):
1071 1071 '''Update dirstate after patch application according to metadata'''
1072 1072 if not patches:
1073 1073 return
1074 1074 copies = []
1075 1075 removes = {}
1076 1076 cfiles = patches.keys()
1077 1077 cwd = repo.getcwd()
1078 1078 if cwd:
1079 1079 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1080 1080 for f in patches:
1081 1081 ctype, gp = patches[f]
1082 1082 if ctype == 'RENAME':
1083 1083 copies.append((gp.oldpath, gp.path))
1084 1084 removes[gp.oldpath] = 1
1085 1085 elif ctype == 'COPY':
1086 1086 copies.append((gp.oldpath, gp.path))
1087 1087 elif ctype == 'DELETE':
1088 1088 removes[gp.path] = 1
1089 1089 for src, dst in copies:
1090 1090 repo.copy(src, dst)
1091 1091 removes = removes.keys()
1092 1092 if removes:
1093 1093 repo.remove(util.sort(removes), True)
1094 1094 for f in patches:
1095 1095 ctype, gp = patches[f]
1096 1096 if gp and gp.mode:
1097 1097 flags = ''
1098 1098 if gp.mode & 0100:
1099 1099 flags = 'x'
1100 1100 elif gp.mode & 020000:
1101 1101 flags = 'l'
1102 1102 dst = os.path.join(repo.root, gp.path)
1103 1103 # patch won't create empty files
1104 1104 if ctype == 'ADD' and not os.path.exists(dst):
1105 1105 repo.wwrite(gp.path, '', flags)
1106 1106 else:
1107 1107 util.set_flags(dst, 'l' in flags, 'x' in flags)
1108 1108 cmdutil.addremove(repo, cfiles)
1109 1109 files = patches.keys()
1110 1110 files.extend([r for r in removes if r not in files])
1111 1111 return util.sort(files)
1112 1112
1113 1113 def b85diff(to, tn):
1114 1114 '''print base85-encoded binary diff'''
1115 1115 def gitindex(text):
1116 1116 if not text:
1117 1117 return '0' * 40
1118 1118 l = len(text)
1119 1119 s = util.sha1('blob %d\0' % l)
1120 1120 s.update(text)
1121 1121 return s.hexdigest()
1122 1122
1123 1123 def fmtline(line):
1124 1124 l = len(line)
1125 1125 if l <= 26:
1126 1126 l = chr(ord('A') + l - 1)
1127 1127 else:
1128 1128 l = chr(l - 26 + ord('a') - 1)
1129 1129 return '%c%s\n' % (l, base85.b85encode(line, True))
1130 1130
1131 1131 def chunk(text, csize=52):
1132 1132 l = len(text)
1133 1133 i = 0
1134 1134 while i < l:
1135 1135 yield text[i:i+csize]
1136 1136 i += csize
1137 1137
1138 1138 tohash = gitindex(to)
1139 1139 tnhash = gitindex(tn)
1140 1140 if tohash == tnhash:
1141 1141 return ""
1142 1142
1143 1143 # TODO: deltas
1144 1144 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1145 1145 (tohash, tnhash, len(tn))]
1146 1146 for l in chunk(zlib.compress(tn)):
1147 1147 ret.append(fmtline(l))
1148 1148 ret.append('\n')
1149 1149 return ''.join(ret)
1150 1150
1151 1151 def diff(repo, node1=None, node2=None, match=None,
1152 1152 fp=None, changes=None, opts=None):
1153 1153 '''print diff of changes to files between two nodes, or node and
1154 1154 working directory.
1155 1155
1156 1156 if node1 is None, use first dirstate parent instead.
1157 1157 if node2 is None, compare node1 with working directory.'''
1158 1158
1159 1159 if not match:
1160 1160 match = cmdutil.matchall(repo)
1161 1161
1162 1162 if opts is None:
1163 1163 opts = mdiff.defaultopts
1164 1164 if fp is None:
1165 1165 fp = repo.ui
1166 1166
1167 1167 if not node1:
1168 1168 node1 = repo.dirstate.parents()[0]
1169 1169
1170 1170 flcache = {}
1171 1171 def getfilectx(f, ctx):
1172 1172 flctx = ctx.filectx(f, filelog=flcache.get(f))
1173 1173 if f not in flcache:
1174 1174 flcache[f] = flctx._filelog
1175 1175 return flctx
1176 1176
1177 1177 # reading the data for node1 early allows it to play nicely
1178 1178 # with repo.status and the revlog cache.
1179 1179 ctx1 = repo[node1]
1180 1180 # force manifest reading
1181 1181 man1 = ctx1.manifest()
1182 1182 date1 = util.datestr(ctx1.date())
1183 1183
1184 1184 if not changes:
1185 1185 changes = repo.status(node1, node2, match=match)
1186 1186 modified, added, removed = changes[:3]
1187 1187
1188 1188 if not modified and not added and not removed:
1189 1189 return
1190 1190
1191 1191 ctx2 = repo[node2]
1192 1192
1193 1193 if repo.ui.quiet:
1194 1194 r = None
1195 1195 else:
1196 1196 hexfunc = repo.ui.debugflag and hex or short
1197 1197 r = [hexfunc(node) for node in [node1, node2] if node]
1198 1198
1199 1199 if opts.git:
1200 1200 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1201 1201 for k, v in copy.items():
1202 1202 copy[v] = k
1203 1203
1204 1204 gone = {}
1205 1205 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1206 1206
1207 1207 for f in util.sort(modified + added + removed):
1208 1208 to = None
1209 1209 tn = None
1210 1210 dodiff = True
1211 1211 header = []
1212 1212 if f in man1:
1213 1213 to = getfilectx(f, ctx1).data()
1214 1214 if f not in removed:
1215 1215 tn = getfilectx(f, ctx2).data()
1216 1216 a, b = f, f
1217 1217 if opts.git:
1218 1218 def addmodehdr(header, omode, nmode):
1219 1219 if omode != nmode:
1220 1220 header.append('old mode %s\n' % omode)
1221 1221 header.append('new mode %s\n' % nmode)
1222 1222
1223 1223 if f in added:
1224 1224 mode = gitmode[ctx2.flags(f)]
1225 1225 if f in copy:
1226 1226 a = copy[f]
1227 1227 omode = gitmode[man1.flags(a)]
1228 1228 addmodehdr(header, omode, mode)
1229 1229 if a in removed and a not in gone:
1230 1230 op = 'rename'
1231 1231 gone[a] = 1
1232 1232 else:
1233 1233 op = 'copy'
1234 1234 header.append('%s from %s\n' % (op, a))
1235 1235 header.append('%s to %s\n' % (op, f))
1236 1236 to = getfilectx(a, ctx1).data()
1237 1237 else:
1238 1238 header.append('new file mode %s\n' % mode)
1239 1239 if util.binary(tn):
1240 1240 dodiff = 'binary'
1241 1241 elif f in removed:
1242 1242 # have we already reported a copy above?
1243 1243 if f in copy and copy[f] in added and copy[copy[f]] == f:
1244 1244 dodiff = False
1245 1245 else:
1246 1246 header.append('deleted file mode %s\n' %
1247 1247 gitmode[man1.flags(f)])
1248 1248 else:
1249 1249 omode = gitmode[man1.flags(f)]
1250 1250 nmode = gitmode[ctx2.flags(f)]
1251 1251 addmodehdr(header, omode, nmode)
1252 1252 if util.binary(to) or util.binary(tn):
1253 1253 dodiff = 'binary'
1254 1254 r = None
1255 1255 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1256 1256 if dodiff:
1257 1257 if dodiff == 'binary':
1258 1258 text = b85diff(to, tn)
1259 1259 else:
1260 1260 text = mdiff.unidiff(to, date1,
1261 1261 # ctx2 date may be dynamic
1262 1262 tn, util.datestr(ctx2.date()),
1263 1263 a, b, r, opts=opts)
1264 1264 if text or len(header) > 1:
1265 1265 fp.write(''.join(header))
1266 1266 fp.write(text)
1267 1267
1268 1268 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1269 1269 opts=None):
1270 1270 '''export changesets as hg patches.'''
1271 1271
1272 1272 total = len(revs)
1273 1273 revwidth = max([len(str(rev)) for rev in revs])
1274 1274
1275 1275 def single(rev, seqno, fp):
1276 1276 ctx = repo[rev]
1277 1277 node = ctx.node()
1278 1278 parents = [p.node() for p in ctx.parents() if p]
1279 1279 branch = ctx.branch()
1280 1280 if switch_parent:
1281 1281 parents.reverse()
1282 1282 prev = (parents and parents[0]) or nullid
1283 1283
1284 1284 if not fp:
1285 1285 fp = cmdutil.make_file(repo, template, node, total=total,
1286 1286 seqno=seqno, revwidth=revwidth)
1287 1287 if fp != sys.stdout and hasattr(fp, 'name'):
1288 1288 repo.ui.note("%s\n" % fp.name)
1289 1289
1290 1290 fp.write("# HG changeset patch\n")
1291 1291 fp.write("# User %s\n" % ctx.user())
1292 1292 fp.write("# Date %d %d\n" % ctx.date())
1293 1293 if branch and (branch != 'default'):
1294 1294 fp.write("# Branch %s\n" % branch)
1295 1295 fp.write("# Node ID %s\n" % hex(node))
1296 1296 fp.write("# Parent %s\n" % hex(prev))
1297 1297 if len(parents) > 1:
1298 1298 fp.write("# Parent %s\n" % hex(parents[1]))
1299 1299 fp.write(ctx.description().rstrip())
1300 1300 fp.write("\n\n")
1301 1301
1302 1302 diff(repo, prev, node, fp=fp, opts=opts)
1303 1303 if fp not in (sys.stdout, repo.ui):
1304 1304 fp.close()
1305 1305
1306 1306 for seqno, rev in enumerate(revs):
1307 1307 single(rev, seqno+1, fp)
1308 1308
1309 1309 def diffstat(patchlines):
1310 1310 if not util.find_exe('diffstat'):
1311 1311 return
1312 1312 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1313 1313 try:
1314 1314 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1315 1315 try:
1316 1316 for line in patchlines:
1317 1317 p.tochild.write(line + "\n")
1318 1318 p.tochild.close()
1319 1319 if p.wait(): return
1320 1320 fp = os.fdopen(fd, 'r')
1321 1321 stat = []
1322 1322 for line in fp: stat.append(line.lstrip())
1323 1323 last = stat.pop()
1324 1324 stat.insert(0, last)
1325 1325 stat = ''.join(stat)
1326 1326 return stat
1327 1327 except: raise
1328 1328 finally:
1329 1329 try: os.unlink(name)
1330 1330 except: pass
@@ -1,135 +1,136 b''
1 1 # repair.py - functions for repository repair for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 # Copyright 2007 Matt Mackall
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import changegroup, os
10 10 from node import nullrev, short
11 from i18n import _
11 12
12 13 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
13 14 """create a bundle with the specified revisions as a backup"""
14 15 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
15 16 backupdir = repo.join("strip-backup")
16 17 if not os.path.isdir(backupdir):
17 18 os.mkdir(backupdir)
18 19 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
19 repo.ui.warn("saving bundle to %s\n" % name)
20 repo.ui.warn(_("saving bundle to %s\n") % name)
20 21 return changegroup.writebundle(cg, name, "HG10BZ")
21 22
22 23 def _collectfiles(repo, striprev):
23 24 """find out the filelogs affected by the strip"""
24 25 files = {}
25 26
26 27 for x in xrange(striprev, len(repo)):
27 28 for name in repo[x].files():
28 29 if name in files:
29 30 continue
30 31 files[name] = 1
31 32
32 33 files = files.keys()
33 34 files.sort()
34 35 return files
35 36
36 37 def _collectextranodes(repo, files, link):
37 38 """return the nodes that have to be saved before the strip"""
38 39 def collectone(revlog):
39 40 extra = []
40 41 startrev = count = len(revlog)
41 42 # find the truncation point of the revlog
42 43 for i in xrange(0, count):
43 44 node = revlog.node(i)
44 45 lrev = revlog.linkrev(node)
45 46 if lrev >= link:
46 47 startrev = i + 1
47 48 break
48 49
49 50 # see if any revision after that point has a linkrev less than link
50 51 # (we have to manually save these guys)
51 52 for i in xrange(startrev, count):
52 53 node = revlog.node(i)
53 54 lrev = revlog.linkrev(node)
54 55 if lrev < link:
55 56 extra.append((node, cl.node(lrev)))
56 57
57 58 return extra
58 59
59 60 extranodes = {}
60 61 cl = repo.changelog
61 62 extra = collectone(repo.manifest)
62 63 if extra:
63 64 extranodes[1] = extra
64 65 for fname in files:
65 66 f = repo.file(fname)
66 67 extra = collectone(f)
67 68 if extra:
68 69 extranodes[fname] = extra
69 70
70 71 return extranodes
71 72
72 73 def strip(ui, repo, node, backup="all"):
73 74 cl = repo.changelog
74 75 # TODO delete the undo files, and handle undo of merge sets
75 76 striprev = cl.rev(node)
76 77
77 78 # Some revisions with rev > striprev may not be descendants of striprev.
78 79 # We have to find these revisions and put them in a bundle, so that
79 80 # we can restore them after the truncations.
80 81 # To create the bundle we use repo.changegroupsubset which requires
81 82 # the list of heads and bases of the set of interesting revisions.
82 83 # (head = revision in the set that has no descendant in the set;
83 84 # base = revision in the set that has no ancestor in the set)
84 85 tostrip = {striprev: 1}
85 86 saveheads = {}
86 87 savebases = []
87 88 for r in xrange(striprev + 1, len(cl)):
88 89 parents = cl.parentrevs(r)
89 90 if parents[0] in tostrip or parents[1] in tostrip:
90 91 # r is a descendant of striprev
91 92 tostrip[r] = 1
92 93 # if this is a merge and one of the parents does not descend
93 94 # from striprev, mark that parent as a savehead.
94 95 if parents[1] != nullrev:
95 96 for p in parents:
96 97 if p not in tostrip and p > striprev:
97 98 saveheads[p] = 1
98 99 else:
99 100 # if no parents of this revision will be stripped, mark it as
100 101 # a savebase
101 102 if parents[0] < striprev and parents[1] < striprev:
102 103 savebases.append(cl.node(r))
103 104
104 105 for p in parents:
105 106 if p in saveheads:
106 107 del saveheads[p]
107 108 saveheads[r] = 1
108 109
109 110 saveheads = [cl.node(r) for r in saveheads]
110 111 files = _collectfiles(repo, striprev)
111 112
112 113 extranodes = _collectextranodes(repo, files, striprev)
113 114
114 115 # create a changegroup for all the branches we need to keep
115 116 if backup == "all":
116 117 _bundle(repo, [node], cl.heads(), node, 'backup')
117 118 if saveheads or extranodes:
118 119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
119 120 extranodes)
120 121
121 122 cl.strip(striprev)
122 123 repo.manifest.strip(striprev)
123 124 for name in files:
124 125 f = repo.file(name)
125 126 f.strip(striprev)
126 127
127 128 if saveheads or extranodes:
128 ui.status("adding branch\n")
129 ui.status(_("adding branch\n"))
129 130 f = open(chgrpfile, "rb")
130 131 gen = changegroup.readbundle(f, chgrpfile)
131 132 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
132 133 f.close()
133 134 if backup != "strip":
134 135 os.unlink(chgrpfile)
135 136
@@ -1,247 +1,247 b''
1 1 # sshrepo.py - ssh repository proxy class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex
9 9 from i18n import _
10 10 import repo, os, re, util
11 11
12 12 class remotelock(object):
13 13 def __init__(self, repo):
14 14 self.repo = repo
15 15 def release(self):
16 16 self.repo.unlock()
17 17 self.repo = None
18 18 def __del__(self):
19 19 if self.repo:
20 20 self.release()
21 21
22 22 class sshrepository(repo.repository):
23 23 def __init__(self, ui, path, create=0):
24 24 self._url = path
25 25 self.ui = ui
26 26
27 27 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
28 28 if not m:
29 29 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
30 30
31 31 self.user = m.group(2)
32 32 self.host = m.group(3)
33 33 self.port = m.group(5)
34 34 self.path = m.group(7) or "."
35 35
36 36 sshcmd = self.ui.config("ui", "ssh", "ssh")
37 37 remotecmd = self.ui.config("ui", "remotecmd", "hg")
38 38
39 39 args = util.sshargs(sshcmd, self.host, self.user, self.port)
40 40
41 41 if create:
42 42 cmd = '%s %s "%s init %s"'
43 43 cmd = cmd % (sshcmd, args, remotecmd, self.path)
44 44
45 ui.note('running %s\n' % cmd)
45 ui.note(_('running %s\n') % cmd)
46 46 res = util.system(cmd)
47 47 if res != 0:
48 48 self.raise_(repo.RepoError(_("could not create remote repo")))
49 49
50 50 self.validate_repo(ui, sshcmd, args, remotecmd)
51 51
52 52 def url(self):
53 53 return self._url
54 54
55 55 def validate_repo(self, ui, sshcmd, args, remotecmd):
56 56 # cleanup up previous run
57 57 self.cleanup()
58 58
59 59 cmd = '%s %s "%s -R %s serve --stdio"'
60 60 cmd = cmd % (sshcmd, args, remotecmd, self.path)
61 61
62 62 cmd = util.quotecommand(cmd)
63 ui.note('running %s\n' % cmd)
63 ui.note(_('running %s\n') % cmd)
64 64 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
65 65
66 66 # skip any noise generated by remote shell
67 67 self.do_cmd("hello")
68 68 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
69 69 lines = ["", "dummy"]
70 70 max_noise = 500
71 71 while lines[-1] and max_noise:
72 72 l = r.readline()
73 73 self.readerr()
74 74 if lines[-1] == "1\n" and l == "\n":
75 75 break
76 76 if l:
77 77 ui.debug(_("remote: "), l)
78 78 lines.append(l)
79 79 max_noise -= 1
80 80 else:
81 81 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
82 82
83 83 self.capabilities = util.set()
84 84 lines.reverse()
85 85 for l in lines:
86 86 if l.startswith("capabilities:"):
87 87 self.capabilities.update(l[:-1].split(":")[1].split())
88 88 break
89 89
90 90 def readerr(self):
91 91 while 1:
92 92 size = util.fstat(self.pipee).st_size
93 93 if size == 0: break
94 94 l = self.pipee.readline()
95 95 if not l: break
96 96 self.ui.status(_("remote: "), l)
97 97
98 98 def raise_(self, exception):
99 99 self.cleanup()
100 100 raise exception
101 101
102 102 def cleanup(self):
103 103 try:
104 104 self.pipeo.close()
105 105 self.pipei.close()
106 106 # read the error descriptor until EOF
107 107 for l in self.pipee:
108 108 self.ui.status(_("remote: "), l)
109 109 self.pipee.close()
110 110 except:
111 111 pass
112 112
113 113 __del__ = cleanup
114 114
115 115 def do_cmd(self, cmd, **args):
116 116 self.ui.debug(_("sending %s command\n") % cmd)
117 117 self.pipeo.write("%s\n" % cmd)
118 118 for k, v in args.items():
119 119 self.pipeo.write("%s %d\n" % (k, len(v)))
120 120 self.pipeo.write(v)
121 121 self.pipeo.flush()
122 122
123 123 return self.pipei
124 124
125 125 def call(self, cmd, **args):
126 126 self.do_cmd(cmd, **args)
127 127 return self._recv()
128 128
129 129 def _recv(self):
130 130 l = self.pipei.readline()
131 131 self.readerr()
132 132 try:
133 133 l = int(l)
134 134 except:
135 135 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
136 136 return self.pipei.read(l)
137 137
138 138 def _send(self, data, flush=False):
139 139 self.pipeo.write("%d\n" % len(data))
140 140 if data:
141 141 self.pipeo.write(data)
142 142 if flush:
143 143 self.pipeo.flush()
144 144 self.readerr()
145 145
146 146 def lock(self):
147 147 self.call("lock")
148 148 return remotelock(self)
149 149
150 150 def unlock(self):
151 151 self.call("unlock")
152 152
153 153 def lookup(self, key):
154 154 self.requirecap('lookup', _('look up remote revision'))
155 155 d = self.call("lookup", key=key)
156 156 success, data = d[:-1].split(" ", 1)
157 157 if int(success):
158 158 return bin(data)
159 159 else:
160 160 self.raise_(repo.RepoError(data))
161 161
162 162 def heads(self):
163 163 d = self.call("heads")
164 164 try:
165 165 return map(bin, d[:-1].split(" "))
166 166 except:
167 167 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
168 168
169 169 def branches(self, nodes):
170 170 n = " ".join(map(hex, nodes))
171 171 d = self.call("branches", nodes=n)
172 172 try:
173 173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
174 174 return br
175 175 except:
176 176 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
177 177
178 178 def between(self, pairs):
179 179 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
180 180 d = self.call("between", pairs=n)
181 181 try:
182 182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
183 183 return p
184 184 except:
185 185 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
186 186
187 187 def changegroup(self, nodes, kind):
188 188 n = " ".join(map(hex, nodes))
189 189 return self.do_cmd("changegroup", roots=n)
190 190
191 191 def changegroupsubset(self, bases, heads, kind):
192 192 self.requirecap('changegroupsubset', _('look up remote changes'))
193 193 bases = " ".join(map(hex, bases))
194 194 heads = " ".join(map(hex, heads))
195 195 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
196 196
197 197 def unbundle(self, cg, heads, source):
198 198 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
199 199 if d:
200 200 # remote may send "unsynced changes"
201 201 self.raise_(repo.RepoError(_("push refused: %s") % d))
202 202
203 203 while 1:
204 204 d = cg.read(4096)
205 205 if not d:
206 206 break
207 207 self._send(d)
208 208
209 209 self._send("", flush=True)
210 210
211 211 r = self._recv()
212 212 if r:
213 213 # remote may send "unsynced changes"
214 214 self.raise_(repo.RepoError(_("push failed: %s") % r))
215 215
216 216 r = self._recv()
217 217 try:
218 218 return int(r)
219 219 except:
220 220 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
221 221
222 222 def addchangegroup(self, cg, source, url):
223 223 d = self.call("addchangegroup")
224 224 if d:
225 225 self.raise_(repo.RepoError(_("push refused: %s") % d))
226 226 while 1:
227 227 d = cg.read(4096)
228 228 if not d:
229 229 break
230 230 self.pipeo.write(d)
231 231 self.readerr()
232 232
233 233 self.pipeo.flush()
234 234
235 235 self.readerr()
236 236 r = self._recv()
237 237 if not r:
238 238 return 1
239 239 try:
240 240 return int(r)
241 241 except:
242 242 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
243 243
244 244 def stream_out(self):
245 245 return self.do_cmd('stream_out')
246 246
247 247 instance = sshrepository
@@ -1,64 +1,65 b''
1 1 # streamclone.py - streaming clone server support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import util, lock
9 from i18n import _
9 10
10 11 class StreamException(Exception):
11 12 def __init__(self, code):
12 13 Exception.__init__(self)
13 14 self.code = code
14 15 def __str__(self):
15 16 return '%i\n' % self.code
16 17
17 18 # if server supports streaming clone, it advertises "stream"
18 19 # capability with value that is version+flags of repo it is serving.
19 20 # client only streams if it can read that repo format.
20 21
21 22 # stream file format is simple.
22 23 #
23 24 # server writes out line that says how many files, how many total
24 25 # bytes. separator is ascii space, byte counts are strings.
25 26 #
26 27 # then for each file:
27 28 #
28 29 # server writes out line that says file name, how many bytes in
29 30 # file. separator is ascii nul, byte count is string.
30 31 #
31 32 # server writes out raw file data.
32 33
33 34 def stream_out(repo, untrusted=False):
34 35 '''stream out all metadata files in repository.
35 36 writes to file-like object, must support write() and optional flush().'''
36 37
37 38 if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
38 39 raise StreamException(1)
39 40
40 41 entries = []
41 42 total_bytes = 0
42 43 try:
43 44 l = None
44 45 try:
45 repo.ui.debug('scanning\n')
46 repo.ui.debug(_('scanning\n'))
46 47 # get consistent snapshot of repo, lock during scan
47 48 l = repo.lock()
48 49 for name, ename, size in repo.store.walk():
49 50 entries.append((name, size))
50 51 total_bytes += size
51 52 finally:
52 53 del l
53 54 except (lock.LockHeld, lock.LockUnavailable), inst:
54 55 raise StreamException(2)
55 56
56 57 yield '0\n'
57 repo.ui.debug('%d files, %d bytes to transfer\n' %
58 repo.ui.debug(_('%d files, %d bytes to transfer\n') %
58 59 (len(entries), total_bytes))
59 60 yield '%d %d\n' % (len(entries), total_bytes)
60 61 for name, size in entries:
61 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
62 repo.ui.debug(_('sending %s (%d bytes)\n') % (name, size))
62 63 yield '%s\0%d\n' % (name, size)
63 64 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
64 65 yield chunk
General Comments 0
You need to be logged in to leave comments. Login now