##// END OF EJS Templates
Start using manifestflags methods
Matt Mackall -
r2832:e196aa1d default
parent child Browse files
Show More
@@ -1,174 +1,174 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import *
9 9 from i18n import gettext as _
10 10 from node import *
11 11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
12 12
13 13 def tidyprefix(dest, prefix, suffixes):
14 14 '''choose prefix to use for names in archive. make sure prefix is
15 15 safe for consumers.'''
16 16
17 17 if prefix:
18 18 prefix = prefix.replace('\\', '/')
19 19 else:
20 20 if not isinstance(dest, str):
21 21 raise ValueError('dest must be string if no prefix')
22 22 prefix = os.path.basename(dest)
23 23 lower = prefix.lower()
24 24 for sfx in suffixes:
25 25 if lower.endswith(sfx):
26 26 prefix = prefix[:-len(sfx)]
27 27 break
28 28 lpfx = os.path.normpath(util.localpath(prefix))
29 29 prefix = util.pconvert(lpfx)
30 30 if not prefix.endswith('/'):
31 31 prefix += '/'
32 32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 33 raise util.Abort(_('archive prefix contains illegal components'))
34 34 return prefix
35 35
36 36 class tarit:
37 37 '''write archive to tar file or stream. can write uncompressed,
38 38 or compress with gzip or bzip2.'''
39 39
40 40 def __init__(self, dest, prefix, mtime, kind=''):
41 41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
42 42 '.tgz', 'tbz2'])
43 43 self.mtime = mtime
44 44 if isinstance(dest, str):
45 45 self.z = tarfile.open(dest, mode='w:'+kind)
46 46 else:
47 47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
48 48
49 49 def addfile(self, name, mode, data):
50 50 i = tarfile.TarInfo(self.prefix + name)
51 51 i.mtime = self.mtime
52 52 i.size = len(data)
53 53 i.mode = mode
54 54 self.z.addfile(i, cStringIO.StringIO(data))
55 55
56 56 def done(self):
57 57 self.z.close()
58 58
59 59 class tellable:
60 60 '''provide tell method for zipfile.ZipFile when writing to http
61 61 response file object.'''
62 62
63 63 def __init__(self, fp):
64 64 self.fp = fp
65 65 self.offset = 0
66 66
67 67 def __getattr__(self, key):
68 68 return getattr(self.fp, key)
69 69
70 70 def write(self, s):
71 71 self.fp.write(s)
72 72 self.offset += len(s)
73 73
74 74 def tell(self):
75 75 return self.offset
76 76
77 77 class zipit:
78 78 '''write archive to zip file or stream. can write uncompressed,
79 79 or compressed with deflate.'''
80 80
81 81 def __init__(self, dest, prefix, mtime, compress=True):
82 82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
83 83 if not isinstance(dest, str):
84 84 try:
85 85 dest.tell()
86 86 except (AttributeError, IOError):
87 87 dest = tellable(dest)
88 88 self.z = zipfile.ZipFile(dest, 'w',
89 89 compress and zipfile.ZIP_DEFLATED or
90 90 zipfile.ZIP_STORED)
91 91 self.date_time = time.gmtime(mtime)[:6]
92 92
93 93 def addfile(self, name, mode, data):
94 94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
95 95 i.compress_type = self.z.compression
96 96 i.flag_bits = 0x08
97 97 # unzip will not honor unix file modes unless file creator is
98 98 # set to unix (id 3).
99 99 i.create_system = 3
100 100 i.external_attr = (mode | stat.S_IFREG) << 16L
101 101 self.z.writestr(i, data)
102 102
103 103 def done(self):
104 104 self.z.close()
105 105
106 106 class fileit:
107 107 '''write archive as files in directory.'''
108 108
109 109 def __init__(self, name, prefix, mtime):
110 110 if prefix:
111 111 raise util.Abort(_('cannot give prefix when archiving to files'))
112 112 self.basedir = name
113 113 self.dirs = {}
114 114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
115 115 getattr(os, 'O_BINARY', 0) |
116 116 getattr(os, 'O_NOFOLLOW', 0))
117 117
118 118 def addfile(self, name, mode, data):
119 119 destfile = os.path.join(self.basedir, name)
120 120 destdir = os.path.dirname(destfile)
121 121 if destdir not in self.dirs:
122 122 if not os.path.isdir(destdir):
123 123 os.makedirs(destdir)
124 124 self.dirs[destdir] = 1
125 125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
126 126
127 127 def done(self):
128 128 pass
129 129
130 130 archivers = {
131 131 'files': fileit,
132 132 'tar': tarit,
133 133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
134 134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
135 135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
136 136 'zip': zipit,
137 137 }
138 138
139 139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
140 140 prefix=None, mtime=None):
141 141 '''create archive of repo as it was at node.
142 142
143 143 dest can be name of directory, name of archive file, or file
144 144 object to write archive to.
145 145
146 146 kind is type of archive to create.
147 147
148 148 decode tells whether to put files through decode filters from
149 149 hgrc.
150 150
151 151 matchfn is function to filter names of files to write to archive.
152 152
153 153 prefix is name of path to put before every archive member.'''
154 154
155 155 def write(name, mode, data):
156 156 if matchfn and not matchfn(name): return
157 157 if decode:
158 158 fp = cStringIO.StringIO()
159 159 repo.wwrite(name, data, fp)
160 160 data = fp.getvalue()
161 161 archiver.addfile(name, mode, data)
162 162
163 163 change = repo.changelog.read(node)
164 164 mn = change[0]
165 165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
166 166 mf = repo.manifest.read(mn).items()
167 167 mff = repo.manifest.readflags(mn)
168 168 mf.sort()
169 169 write('.hg_archival.txt', 0644,
170 170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
171 171 for filename, filenode in mf:
172 write(filename, mff[filename] and 0755 or 0644,
172 write(filename, mff.execf(filename) and 0755 or 0644,
173 173 repo.file(filename).read(filenode))
174 174 archiver.done()
@@ -1,3507 +1,3508 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 16 demandload(globals(), "hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted, unknown = repo.changes()
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def filterfiles(filters, files):
29 29 l = [x for x in files if x in filters]
30 30
31 31 for t in filters:
32 32 if t and t[-1] != "/":
33 33 t += "/"
34 34 l += [x for x in files if x.startswith(t)]
35 35 return l
36 36
37 37 def relpath(repo, args):
38 38 cwd = repo.getcwd()
39 39 if cwd:
40 40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 41 return args
42 42
43 43 def matchpats(repo, pats=[], opts={}, head=''):
44 44 cwd = repo.getcwd()
45 45 if not pats and cwd:
46 46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
47 47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
48 48 cwd = ''
49 49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
50 50 opts.get('exclude'), head)
51 51
52 52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
53 53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
54 54 exact = dict(zip(files, files))
55 55 def walk():
56 56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
57 57 badmatch=badmatch):
58 58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
59 59 return files, matchfn, walk()
60 60
61 61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
62 62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
63 63 for r in results:
64 64 yield r
65 65
66 66 def walkchangerevs(ui, repo, pats, opts):
67 67 '''Iterate over files and the revs they changed in.
68 68
69 69 Callers most commonly need to iterate backwards over the history
70 70 it is interested in. Doing so has awful (quadratic-looking)
71 71 performance, so we use iterators in a "windowed" way.
72 72
73 73 We walk a window of revisions in the desired order. Within the
74 74 window, we first walk forwards to gather data, then in the desired
75 75 order (usually backwards) to display it.
76 76
77 77 This function returns an (iterator, getchange, matchfn) tuple. The
78 78 getchange function returns the changelog entry for a numeric
79 79 revision. The iterator yields 3-tuples. They will be of one of
80 80 the following forms:
81 81
82 82 "window", incrementing, lastrev: stepping through a window,
83 83 positive if walking forwards through revs, last rev in the
84 84 sequence iterated over - use to reset state for the current window
85 85
86 86 "add", rev, fns: out-of-order traversal of the given file names
87 87 fns, which changed during revision rev - use to gather data for
88 88 possible display
89 89
90 90 "iter", rev, None: in-order traversal of the revs earlier iterated
91 91 over with "add" - use to display data'''
92 92
93 93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
94 94 if start < end:
95 95 while start < end:
96 96 yield start, min(windowsize, end-start)
97 97 start += windowsize
98 98 if windowsize < sizelimit:
99 99 windowsize *= 2
100 100 else:
101 101 while start > end:
102 102 yield start, min(windowsize, start-end-1)
103 103 start -= windowsize
104 104 if windowsize < sizelimit:
105 105 windowsize *= 2
106 106
107 107
108 108 files, matchfn, anypats = matchpats(repo, pats, opts)
109 109
110 110 if repo.changelog.count() == 0:
111 111 return [], False, matchfn
112 112
113 113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
114 114 wanted = {}
115 115 slowpath = anypats
116 116 fncache = {}
117 117
118 118 chcache = {}
119 119 def getchange(rev):
120 120 ch = chcache.get(rev)
121 121 if ch is None:
122 122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
123 123 return ch
124 124
125 125 if not slowpath and not files:
126 126 # No files, no patterns. Display all revs.
127 127 wanted = dict(zip(revs, revs))
128 128 if not slowpath:
129 129 # Only files, no patterns. Check the history of each file.
130 130 def filerevgen(filelog):
131 131 for i, window in increasing_windows(filelog.count()-1, -1):
132 132 revs = []
133 133 for j in xrange(i - window, i + 1):
134 134 revs.append(filelog.linkrev(filelog.node(j)))
135 135 revs.reverse()
136 136 for rev in revs:
137 137 yield rev
138 138
139 139 minrev, maxrev = min(revs), max(revs)
140 140 for file_ in files:
141 141 filelog = repo.file(file_)
142 142 # A zero count may be a directory or deleted file, so
143 143 # try to find matching entries on the slow path.
144 144 if filelog.count() == 0:
145 145 slowpath = True
146 146 break
147 147 for rev in filerevgen(filelog):
148 148 if rev <= maxrev:
149 149 if rev < minrev:
150 150 break
151 151 fncache.setdefault(rev, [])
152 152 fncache[rev].append(file_)
153 153 wanted[rev] = 1
154 154 if slowpath:
155 155 # The slow path checks files modified in every changeset.
156 156 def changerevgen():
157 157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
158 158 for j in xrange(i - window, i + 1):
159 159 yield j, getchange(j)[3]
160 160
161 161 for rev, changefiles in changerevgen():
162 162 matches = filter(matchfn, changefiles)
163 163 if matches:
164 164 fncache[rev] = matches
165 165 wanted[rev] = 1
166 166
167 167 def iterate():
168 168 for i, window in increasing_windows(0, len(revs)):
169 169 yield 'window', revs[0] < revs[-1], revs[-1]
170 170 nrevs = [rev for rev in revs[i:i+window]
171 171 if rev in wanted]
172 172 srevs = list(nrevs)
173 173 srevs.sort()
174 174 for rev in srevs:
175 175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
176 176 yield 'add', rev, fns
177 177 for rev in nrevs:
178 178 yield 'iter', rev, None
179 179 return iterate(), getchange, matchfn
180 180
181 181 revrangesep = ':'
182 182
183 183 def revfix(repo, val, defval):
184 184 '''turn user-level id of changeset into rev number.
185 185 user-level id can be tag, changeset, rev number, or negative rev
186 186 number relative to number of revs (-1 is tip, etc).'''
187 187 if not val:
188 188 return defval
189 189 try:
190 190 num = int(val)
191 191 if str(num) != val:
192 192 raise ValueError
193 193 if num < 0:
194 194 num += repo.changelog.count()
195 195 if num < 0:
196 196 num = 0
197 197 elif num >= repo.changelog.count():
198 198 raise ValueError
199 199 except ValueError:
200 200 try:
201 201 num = repo.changelog.rev(repo.lookup(val))
202 202 except KeyError:
203 203 raise util.Abort(_('invalid revision identifier %s'), val)
204 204 return num
205 205
206 206 def revpair(ui, repo, revs):
207 207 '''return pair of nodes, given list of revisions. second item can
208 208 be None, meaning use working dir.'''
209 209 if not revs:
210 210 return repo.dirstate.parents()[0], None
211 211 end = None
212 212 if len(revs) == 1:
213 213 start = revs[0]
214 214 if revrangesep in start:
215 215 start, end = start.split(revrangesep, 1)
216 216 start = revfix(repo, start, 0)
217 217 end = revfix(repo, end, repo.changelog.count() - 1)
218 218 else:
219 219 start = revfix(repo, start, None)
220 220 elif len(revs) == 2:
221 221 if revrangesep in revs[0] or revrangesep in revs[1]:
222 222 raise util.Abort(_('too many revisions specified'))
223 223 start = revfix(repo, revs[0], None)
224 224 end = revfix(repo, revs[1], None)
225 225 else:
226 226 raise util.Abort(_('too many revisions specified'))
227 227 if end is not None: end = repo.lookup(str(end))
228 228 return repo.lookup(str(start)), end
229 229
230 230 def revrange(ui, repo, revs):
231 231 """Yield revision as strings from a list of revision specifications."""
232 232 seen = {}
233 233 for spec in revs:
234 234 if revrangesep in spec:
235 235 start, end = spec.split(revrangesep, 1)
236 236 start = revfix(repo, start, 0)
237 237 end = revfix(repo, end, repo.changelog.count() - 1)
238 238 step = start > end and -1 or 1
239 239 for rev in xrange(start, end+step, step):
240 240 if rev in seen:
241 241 continue
242 242 seen[rev] = 1
243 243 yield str(rev)
244 244 else:
245 245 rev = revfix(repo, spec, None)
246 246 if rev in seen:
247 247 continue
248 248 seen[rev] = 1
249 249 yield str(rev)
250 250
251 251 def make_filename(repo, pat, node,
252 252 total=None, seqno=None, revwidth=None, pathname=None):
253 253 node_expander = {
254 254 'H': lambda: hex(node),
255 255 'R': lambda: str(repo.changelog.rev(node)),
256 256 'h': lambda: short(node),
257 257 }
258 258 expander = {
259 259 '%': lambda: '%',
260 260 'b': lambda: os.path.basename(repo.root),
261 261 }
262 262
263 263 try:
264 264 if node:
265 265 expander.update(node_expander)
266 266 if node and revwidth is not None:
267 267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
268 268 if total is not None:
269 269 expander['N'] = lambda: str(total)
270 270 if seqno is not None:
271 271 expander['n'] = lambda: str(seqno)
272 272 if total is not None and seqno is not None:
273 273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
274 274 if pathname is not None:
275 275 expander['s'] = lambda: os.path.basename(pathname)
276 276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
277 277 expander['p'] = lambda: pathname
278 278
279 279 newname = []
280 280 patlen = len(pat)
281 281 i = 0
282 282 while i < patlen:
283 283 c = pat[i]
284 284 if c == '%':
285 285 i += 1
286 286 c = pat[i]
287 287 c = expander[c]()
288 288 newname.append(c)
289 289 i += 1
290 290 return ''.join(newname)
291 291 except KeyError, inst:
292 292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
293 293 inst.args[0])
294 294
295 295 def make_file(repo, pat, node=None,
296 296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
297 297 if not pat or pat == '-':
298 298 return 'w' in mode and sys.stdout or sys.stdin
299 299 if hasattr(pat, 'write') and 'w' in mode:
300 300 return pat
301 301 if hasattr(pat, 'read') and 'r' in mode:
302 302 return pat
303 303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
304 304 pathname),
305 305 mode)
306 306
307 307 def write_bundle(cg, filename=None, compress=True):
308 308 """Write a bundle file and return its filename.
309 309
310 310 Existing files will not be overwritten.
311 311 If no filename is specified, a temporary file is created.
312 312 bz2 compression can be turned off.
313 313 The bundle file will be deleted in case of errors.
314 314 """
315 315 class nocompress(object):
316 316 def compress(self, x):
317 317 return x
318 318 def flush(self):
319 319 return ""
320 320
321 321 fh = None
322 322 cleanup = None
323 323 try:
324 324 if filename:
325 325 if os.path.exists(filename):
326 326 raise util.Abort(_("file '%s' already exists"), filename)
327 327 fh = open(filename, "wb")
328 328 else:
329 329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
330 330 fh = os.fdopen(fd, "wb")
331 331 cleanup = filename
332 332
333 333 if compress:
334 334 fh.write("HG10")
335 335 z = bz2.BZ2Compressor(9)
336 336 else:
337 337 fh.write("HG10UN")
338 338 z = nocompress()
339 339 # parse the changegroup data, otherwise we will block
340 340 # in case of sshrepo because we don't know the end of the stream
341 341
342 342 # an empty chunkiter is the end of the changegroup
343 343 empty = False
344 344 while not empty:
345 345 empty = True
346 346 for chunk in changegroup.chunkiter(cg):
347 347 empty = False
348 348 fh.write(z.compress(changegroup.genchunk(chunk)))
349 349 fh.write(z.compress(changegroup.closechunk()))
350 350 fh.write(z.flush())
351 351 cleanup = None
352 352 return filename
353 353 finally:
354 354 if fh is not None:
355 355 fh.close()
356 356 if cleanup is not None:
357 357 os.unlink(cleanup)
358 358
359 359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
360 360 changes=None, text=False, opts={}):
361 361 if not node1:
362 362 node1 = repo.dirstate.parents()[0]
363 363 # reading the data for node1 early allows it to play nicely
364 364 # with repo.changes and the revlog cache.
365 365 change = repo.changelog.read(node1)
366 366 mmap = repo.manifest.read(change[0])
367 367 date1 = util.datestr(change[2])
368 368
369 369 if not changes:
370 370 changes = repo.changes(node1, node2, files, match=match)
371 371 modified, added, removed, deleted, unknown = changes
372 372 if files:
373 373 modified, added, removed = map(lambda x: filterfiles(files, x),
374 374 (modified, added, removed))
375 375
376 376 if not modified and not added and not removed:
377 377 return
378 378
379 379 if node2:
380 380 change = repo.changelog.read(node2)
381 381 mmap2 = repo.manifest.read(change[0])
382 382 _date2 = util.datestr(change[2])
383 383 def date2(f):
384 384 return _date2
385 385 def read(f):
386 386 return repo.file(f).read(mmap2[f])
387 387 else:
388 388 tz = util.makedate()[1]
389 389 _date2 = util.datestr()
390 390 def date2(f):
391 391 try:
392 392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
393 393 except OSError, err:
394 394 if err.errno != errno.ENOENT: raise
395 395 return _date2
396 396 def read(f):
397 397 return repo.wread(f)
398 398
399 399 if ui.quiet:
400 400 r = None
401 401 else:
402 402 hexfunc = ui.verbose and hex or short
403 403 r = [hexfunc(node) for node in [node1, node2] if node]
404 404
405 405 diffopts = ui.diffopts()
406 406 showfunc = opts.get('show_function') or diffopts['showfunc']
407 407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
408 408 ignorewsamount = opts.get('ignore_space_change') or \
409 409 diffopts['ignorewsamount']
410 410 ignoreblanklines = opts.get('ignore_blank_lines') or \
411 411 diffopts['ignoreblanklines']
412 412 for f in modified:
413 413 to = None
414 414 if f in mmap:
415 415 to = repo.file(f).read(mmap[f])
416 416 tn = read(f)
417 417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
418 418 showfunc=showfunc, ignorews=ignorews,
419 419 ignorewsamount=ignorewsamount,
420 420 ignoreblanklines=ignoreblanklines))
421 421 for f in added:
422 422 to = None
423 423 tn = read(f)
424 424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
425 425 showfunc=showfunc, ignorews=ignorews,
426 426 ignorewsamount=ignorewsamount,
427 427 ignoreblanklines=ignoreblanklines))
428 428 for f in removed:
429 429 to = repo.file(f).read(mmap[f])
430 430 tn = None
431 431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
432 432 showfunc=showfunc, ignorews=ignorews,
433 433 ignorewsamount=ignorewsamount,
434 434 ignoreblanklines=ignoreblanklines))
435 435
436 436 def trimuser(ui, name, rev, revcache):
437 437 """trim the name of the user who committed a change"""
438 438 user = revcache.get(rev)
439 439 if user is None:
440 440 user = revcache[rev] = ui.shortuser(name)
441 441 return user
442 442
443 443 class changeset_printer(object):
444 444 '''show changeset information when templating not requested.'''
445 445
446 446 def __init__(self, ui, repo):
447 447 self.ui = ui
448 448 self.repo = repo
449 449
450 450 def show(self, rev=0, changenode=None, brinfo=None):
451 451 '''show a single changeset or file revision'''
452 452 log = self.repo.changelog
453 453 if changenode is None:
454 454 changenode = log.node(rev)
455 455 elif not rev:
456 456 rev = log.rev(changenode)
457 457
458 458 if self.ui.quiet:
459 459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
460 460 return
461 461
462 462 changes = log.read(changenode)
463 463 date = util.datestr(changes[2])
464 464
465 465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
466 466 for p in log.parents(changenode)
467 467 if self.ui.debugflag or p != nullid]
468 468 if (not self.ui.debugflag and len(parents) == 1 and
469 469 parents[0][0] == rev-1):
470 470 parents = []
471 471
472 472 if self.ui.verbose:
473 473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
474 474 else:
475 475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
476 476
477 477 for tag in self.repo.nodetags(changenode):
478 478 self.ui.status(_("tag: %s\n") % tag)
479 479 for parent in parents:
480 480 self.ui.write(_("parent: %d:%s\n") % parent)
481 481
482 482 if brinfo and changenode in brinfo:
483 483 br = brinfo[changenode]
484 484 self.ui.write(_("branch: %s\n") % " ".join(br))
485 485
486 486 self.ui.debug(_("manifest: %d:%s\n") %
487 487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
488 488 self.ui.status(_("user: %s\n") % changes[1])
489 489 self.ui.status(_("date: %s\n") % date)
490 490
491 491 if self.ui.debugflag:
492 492 files = self.repo.changes(log.parents(changenode)[0], changenode)
493 493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
494 494 files):
495 495 if value:
496 496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
497 497 else:
498 498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
499 499
500 500 description = changes[4].strip()
501 501 if description:
502 502 if self.ui.verbose:
503 503 self.ui.status(_("description:\n"))
504 504 self.ui.status(description)
505 505 self.ui.status("\n\n")
506 506 else:
507 507 self.ui.status(_("summary: %s\n") %
508 508 description.splitlines()[0])
509 509 self.ui.status("\n")
510 510
511 511 def show_changeset(ui, repo, opts):
512 512 '''show one changeset. uses template or regular display. caller
513 513 can pass in 'style' and 'template' options in opts.'''
514 514
515 515 tmpl = opts.get('template')
516 516 if tmpl:
517 517 tmpl = templater.parsestring(tmpl, quoted=False)
518 518 else:
519 519 tmpl = ui.config('ui', 'logtemplate')
520 520 if tmpl: tmpl = templater.parsestring(tmpl)
521 521 mapfile = opts.get('style') or ui.config('ui', 'style')
522 522 if tmpl or mapfile:
523 523 if mapfile:
524 524 if not os.path.isfile(mapfile):
525 525 mapname = templater.templatepath('map-cmdline.' + mapfile)
526 526 if not mapname: mapname = templater.templatepath(mapfile)
527 527 if mapname: mapfile = mapname
528 528 try:
529 529 t = templater.changeset_templater(ui, repo, mapfile)
530 530 except SyntaxError, inst:
531 531 raise util.Abort(inst.args[0])
532 532 if tmpl: t.use_template(tmpl)
533 533 return t
534 534 return changeset_printer(ui, repo)
535 535
536 536 def show_version(ui):
537 537 """output version and copyright information"""
538 538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
539 539 % version.get_version())
540 540 ui.status(_(
541 541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
542 542 "This is free software; see the source for copying conditions. "
543 543 "There is NO\nwarranty; "
544 544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
545 545 ))
546 546
547 547 def help_(ui, name=None, with_version=False):
548 548 """show help for a command, extension, or list of commands
549 549
550 550 With no arguments, print a list of commands and short help.
551 551
552 552 Given a command name, print help for that command.
553 553
554 554 Given an extension name, print help for that extension, and the
555 555 commands it provides."""
556 556 option_lists = []
557 557
558 558 def helpcmd(name):
559 559 if with_version:
560 560 show_version(ui)
561 561 ui.write('\n')
562 562 aliases, i = findcmd(name)
563 563 # synopsis
564 564 ui.write("%s\n\n" % i[2])
565 565
566 566 # description
567 567 doc = i[0].__doc__
568 568 if not doc:
569 569 doc = _("(No help text available)")
570 570 if ui.quiet:
571 571 doc = doc.splitlines(0)[0]
572 572 ui.write("%s\n" % doc.rstrip())
573 573
574 574 if not ui.quiet:
575 575 # aliases
576 576 if len(aliases) > 1:
577 577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
578 578
579 579 # options
580 580 if i[1]:
581 581 option_lists.append(("options", i[1]))
582 582
583 583 def helplist(select=None):
584 584 h = {}
585 585 cmds = {}
586 586 for c, e in table.items():
587 587 f = c.split("|", 1)[0]
588 588 if select and not select(f):
589 589 continue
590 590 if name == "shortlist" and not f.startswith("^"):
591 591 continue
592 592 f = f.lstrip("^")
593 593 if not ui.debugflag and f.startswith("debug"):
594 594 continue
595 595 doc = e[0].__doc__
596 596 if not doc:
597 597 doc = _("(No help text available)")
598 598 h[f] = doc.splitlines(0)[0].rstrip()
599 599 cmds[f] = c.lstrip("^")
600 600
601 601 fns = h.keys()
602 602 fns.sort()
603 603 m = max(map(len, fns))
604 604 for f in fns:
605 605 if ui.verbose:
606 606 commands = cmds[f].replace("|",", ")
607 607 ui.write(" %s:\n %s\n"%(commands, h[f]))
608 608 else:
609 609 ui.write(' %-*s %s\n' % (m, f, h[f]))
610 610
611 611 def helpext(name):
612 612 try:
613 613 mod = findext(name)
614 614 except KeyError:
615 615 raise UnknownCommand(name)
616 616
617 617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
618 618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
619 619 for d in doc[1:]:
620 620 ui.write(d, '\n')
621 621
622 622 ui.status('\n')
623 623 if ui.verbose:
624 624 ui.status(_('list of commands:\n\n'))
625 625 else:
626 626 ui.status(_('list of commands (use "hg help -v %s" '
627 627 'to show aliases and global options):\n\n') % name)
628 628
629 629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
630 630 helplist(modcmds.has_key)
631 631
632 632 if name and name != 'shortlist':
633 633 try:
634 634 helpcmd(name)
635 635 except UnknownCommand:
636 636 helpext(name)
637 637
638 638 else:
639 639 # program name
640 640 if ui.verbose or with_version:
641 641 show_version(ui)
642 642 else:
643 643 ui.status(_("Mercurial Distributed SCM\n"))
644 644 ui.status('\n')
645 645
646 646 # list of commands
647 647 if name == "shortlist":
648 648 ui.status(_('basic commands (use "hg help" '
649 649 'for the full list or option "-v" for details):\n\n'))
650 650 elif ui.verbose:
651 651 ui.status(_('list of commands:\n\n'))
652 652 else:
653 653 ui.status(_('list of commands (use "hg help -v" '
654 654 'to show aliases and global options):\n\n'))
655 655
656 656 helplist()
657 657
658 658 # global options
659 659 if ui.verbose:
660 660 option_lists.append(("global options", globalopts))
661 661
662 662 # list all option lists
663 663 opt_output = []
664 664 for title, options in option_lists:
665 665 opt_output.append(("\n%s:\n" % title, None))
666 666 for shortopt, longopt, default, desc in options:
667 667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
668 668 longopt and " --%s" % longopt),
669 669 "%s%s" % (desc,
670 670 default
671 671 and _(" (default: %s)") % default
672 672 or "")))
673 673
674 674 if opt_output:
675 675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
676 676 for first, second in opt_output:
677 677 if second:
678 678 ui.write(" %-*s %s\n" % (opts_len, first, second))
679 679 else:
680 680 ui.write("%s\n" % first)
681 681
682 682 # Commands start here, listed alphabetically
683 683
684 684 def add(ui, repo, *pats, **opts):
685 685 """add the specified files on the next commit
686 686
687 687 Schedule files to be version controlled and added to the repository.
688 688
689 689 The files will be added to the repository at the next commit.
690 690
691 691 If no names are given, add all files in the repository.
692 692 """
693 693
694 694 names = []
695 695 for src, abs, rel, exact in walk(repo, pats, opts):
696 696 if exact:
697 697 if ui.verbose:
698 698 ui.status(_('adding %s\n') % rel)
699 699 names.append(abs)
700 700 elif repo.dirstate.state(abs) == '?':
701 701 ui.status(_('adding %s\n') % rel)
702 702 names.append(abs)
703 703 if not opts.get('dry_run'):
704 704 repo.add(names)
705 705
706 706 def addremove(ui, repo, *pats, **opts):
707 707 """add all new files, delete all missing files (DEPRECATED)
708 708
709 709 (DEPRECATED)
710 710 Add all new files and remove all missing files from the repository.
711 711
712 712 New files are ignored if they match any of the patterns in .hgignore. As
713 713 with add, these changes take effect at the next commit.
714 714
715 715 This command is now deprecated and will be removed in a future
716 716 release. Please use add and remove --after instead.
717 717 """
718 718 ui.warn(_('(the addremove command is deprecated; use add and remove '
719 719 '--after instead)\n'))
720 720 return addremove_lock(ui, repo, pats, opts)
721 721
722 722 def addremove_lock(ui, repo, pats, opts, wlock=None):
723 723 add, remove = [], []
724 724 for src, abs, rel, exact in walk(repo, pats, opts):
725 725 if src == 'f' and repo.dirstate.state(abs) == '?':
726 726 add.append(abs)
727 727 if ui.verbose or not exact:
728 728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
729 729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
730 730 remove.append(abs)
731 731 if ui.verbose or not exact:
732 732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
733 733 if not opts.get('dry_run'):
734 734 repo.add(add, wlock=wlock)
735 735 repo.remove(remove, wlock=wlock)
736 736
737 737 def annotate(ui, repo, *pats, **opts):
738 738 """show changeset information per file line
739 739
740 740 List changes in files, showing the revision id responsible for each line
741 741
742 742 This command is useful to discover who did a change or when a change took
743 743 place.
744 744
745 745 Without the -a option, annotate will avoid processing files it
746 746 detects as binary. With -a, annotate will generate an annotation
747 747 anyway, probably with undesirable results.
748 748 """
749 749 def getnode(rev):
750 750 return short(repo.changelog.node(rev))
751 751
752 752 ucache = {}
753 753 def getname(rev):
754 754 try:
755 755 return ucache[rev]
756 756 except:
757 757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
758 758 ucache[rev] = u
759 759 return u
760 760
761 761 dcache = {}
762 762 def getdate(rev):
763 763 datestr = dcache.get(rev)
764 764 if datestr is None:
765 765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
766 766 return datestr
767 767
768 768 if not pats:
769 769 raise util.Abort(_('at least one file name or pattern required'))
770 770
771 771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
772 772 ['date', getdate]]
773 773 if not opts['user'] and not opts['changeset'] and not opts['date']:
774 774 opts['number'] = 1
775 775
776 776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
777 777
778 778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
779 779 fctx = ctx.filectx(abs)
780 780 if not opts['text'] and util.binary(fctx.data()):
781 781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
782 782 continue
783 783
784 784 lines = fctx.annotate()
785 785 pieces = []
786 786
787 787 for o, f in opmap:
788 788 if opts[o]:
789 789 l = [f(n) for n, dummy in lines]
790 790 if l:
791 791 m = max(map(len, l))
792 792 pieces.append(["%*s" % (m, x) for x in l])
793 793
794 794 if pieces:
795 795 for p, l in zip(zip(*pieces), lines):
796 796 ui.write("%s: %s" % (" ".join(p), l[1]))
797 797
798 798 def archive(ui, repo, dest, **opts):
799 799 '''create unversioned archive of a repository revision
800 800
801 801 By default, the revision used is the parent of the working
802 802 directory; use "-r" to specify a different revision.
803 803
804 804 To specify the type of archive to create, use "-t". Valid
805 805 types are:
806 806
807 807 "files" (default): a directory full of files
808 808 "tar": tar archive, uncompressed
809 809 "tbz2": tar archive, compressed using bzip2
810 810 "tgz": tar archive, compressed using gzip
811 811 "uzip": zip archive, uncompressed
812 812 "zip": zip archive, compressed using deflate
813 813
814 814 The exact name of the destination archive or directory is given
815 815 using a format string; see "hg help export" for details.
816 816
817 817 Each member added to an archive file has a directory prefix
818 818 prepended. Use "-p" to specify a format string for the prefix.
819 819 The default is the basename of the archive, with suffixes removed.
820 820 '''
821 821
822 822 if opts['rev']:
823 823 node = repo.lookup(opts['rev'])
824 824 else:
825 825 node, p2 = repo.dirstate.parents()
826 826 if p2 != nullid:
827 827 raise util.Abort(_('uncommitted merge - please provide a '
828 828 'specific revision'))
829 829
830 830 dest = make_filename(repo, dest, node)
831 831 if os.path.realpath(dest) == repo.root:
832 832 raise util.Abort(_('repository root cannot be destination'))
833 833 dummy, matchfn, dummy = matchpats(repo, [], opts)
834 834 kind = opts.get('type') or 'files'
835 835 prefix = opts['prefix']
836 836 if dest == '-':
837 837 if kind == 'files':
838 838 raise util.Abort(_('cannot archive plain files to stdout'))
839 839 dest = sys.stdout
840 840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
841 841 prefix = make_filename(repo, prefix, node)
842 842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
843 843 matchfn, prefix)
844 844
845 845 def backout(ui, repo, rev, **opts):
846 846 '''reverse effect of earlier changeset
847 847
848 848 Commit the backed out changes as a new changeset. The new
849 849 changeset is a child of the backed out changeset.
850 850
851 851 If you back out a changeset other than the tip, a new head is
852 852 created. This head is the parent of the working directory. If
853 853 you back out an old changeset, your working directory will appear
854 854 old after the backout. You should merge the backout changeset
855 855 with another head.
856 856
857 857 The --merge option remembers the parent of the working directory
858 858 before starting the backout, then merges the new head with that
859 859 changeset afterwards. This saves you from doing the merge by
860 860 hand. The result of this merge is not committed, as for a normal
861 861 merge.'''
862 862
863 863 bail_if_changed(repo)
864 864 op1, op2 = repo.dirstate.parents()
865 865 if op2 != nullid:
866 866 raise util.Abort(_('outstanding uncommitted merge'))
867 867 node = repo.lookup(rev)
868 868 parent, p2 = repo.changelog.parents(node)
869 869 if parent == nullid:
870 870 raise util.Abort(_('cannot back out a change with no parents'))
871 871 if p2 != nullid:
872 872 raise util.Abort(_('cannot back out a merge'))
873 873 repo.update(node, force=True, show_stats=False)
874 874 revert_opts = opts.copy()
875 875 revert_opts['rev'] = hex(parent)
876 876 revert(ui, repo, **revert_opts)
877 877 commit_opts = opts.copy()
878 878 commit_opts['addremove'] = False
879 879 if not commit_opts['message'] and not commit_opts['logfile']:
880 880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
881 881 commit_opts['force_editor'] = True
882 882 commit(ui, repo, **commit_opts)
883 883 def nice(node):
884 884 return '%d:%s' % (repo.changelog.rev(node), short(node))
885 885 ui.status(_('changeset %s backs out changeset %s\n') %
886 886 (nice(repo.changelog.tip()), nice(node)))
887 887 if op1 != node:
888 888 if opts['merge']:
889 889 ui.status(_('merging with changeset %s\n') % nice(op1))
890 890 doupdate(ui, repo, hex(op1), **opts)
891 891 else:
892 892 ui.status(_('the backout changeset is a new head - '
893 893 'do not forget to merge\n'))
894 894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
895 895
896 896 def bundle(ui, repo, fname, dest=None, **opts):
897 897 """create a changegroup file
898 898
899 899 Generate a compressed changegroup file collecting all changesets
900 900 not found in the other repository.
901 901
902 902 This file can then be transferred using conventional means and
903 903 applied to another repository with the unbundle command. This is
904 904 useful when native push and pull are not available or when
905 905 exporting an entire repository is undesirable. The standard file
906 906 extension is ".hg".
907 907
908 908 Unlike import/export, this exactly preserves all changeset
909 909 contents including permissions, rename data, and revision history.
910 910 """
911 911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
912 912 other = hg.repository(ui, dest)
913 913 o = repo.findoutgoing(other, force=opts['force'])
914 914 cg = repo.changegroup(o, 'bundle')
915 915 write_bundle(cg, fname)
916 916
917 917 def cat(ui, repo, file1, *pats, **opts):
918 918 """output the latest or given revisions of files
919 919
920 920 Print the specified files as they were at the given revision.
921 921 If no revision is given then the tip is used.
922 922
923 923 Output may be to a file, in which case the name of the file is
924 924 given using a format string. The formatting rules are the same as
925 925 for the export command, with the following additions:
926 926
927 927 %s basename of file being printed
928 928 %d dirname of file being printed, or '.' if in repo root
929 929 %p root-relative path name of file being printed
930 930 """
931 931 ctx = repo.changectx(opts['rev'] or -1)
932 932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
933 933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
934 934 fp.write(ctx.filectx(abs).data())
935 935
936 936 def clone(ui, source, dest=None, **opts):
937 937 """make a copy of an existing repository
938 938
939 939 Create a copy of an existing repository in a new directory.
940 940
941 941 If no destination directory name is specified, it defaults to the
942 942 basename of the source.
943 943
944 944 The location of the source is added to the new repository's
945 945 .hg/hgrc file, as the default to be used for future pulls.
946 946
947 947 For efficiency, hardlinks are used for cloning whenever the source
948 948 and destination are on the same filesystem. Some filesystems,
949 949 such as AFS, implement hardlinking incorrectly, but do not report
950 950 errors. In these cases, use the --pull option to avoid
951 951 hardlinking.
952 952
953 953 See pull for valid source format details.
954 954
955 955 It is possible to specify an ssh:// URL as the destination, but no
956 956 .hg/hgrc will be created on the remote side. Look at the help text
957 957 for the pull command for important details about ssh:// URLs.
958 958 """
959 959 ui.setconfig_remoteopts(**opts)
960 960 hg.clone(ui, ui.expandpath(source), dest,
961 961 pull=opts['pull'],
962 962 rev=opts['rev'],
963 963 update=not opts['noupdate'])
964 964
965 965 def commit(ui, repo, *pats, **opts):
966 966 """commit the specified files or all outstanding changes
967 967
968 968 Commit changes to the given files into the repository.
969 969
970 970 If a list of files is omitted, all changes reported by "hg status"
971 971 will be committed.
972 972
973 973 If no commit message is specified, the editor configured in your hgrc
974 974 or in the EDITOR environment variable is started to enter a message.
975 975 """
976 976 message = opts['message']
977 977 logfile = opts['logfile']
978 978
979 979 if message and logfile:
980 980 raise util.Abort(_('options --message and --logfile are mutually '
981 981 'exclusive'))
982 982 if not message and logfile:
983 983 try:
984 984 if logfile == '-':
985 985 message = sys.stdin.read()
986 986 else:
987 987 message = open(logfile).read()
988 988 except IOError, inst:
989 989 raise util.Abort(_("can't read commit message '%s': %s") %
990 990 (logfile, inst.strerror))
991 991
992 992 if opts['addremove']:
993 993 addremove_lock(ui, repo, pats, opts)
994 994 fns, match, anypats = matchpats(repo, pats, opts)
995 995 if pats:
996 996 modified, added, removed, deleted, unknown = (
997 997 repo.changes(files=fns, match=match))
998 998 files = modified + added + removed
999 999 else:
1000 1000 files = []
1001 1001 try:
1002 1002 repo.commit(files, message, opts['user'], opts['date'], match,
1003 1003 force_editor=opts.get('force_editor'))
1004 1004 except ValueError, inst:
1005 1005 raise util.Abort(str(inst))
1006 1006
1007 1007 def docopy(ui, repo, pats, opts, wlock):
1008 1008 # called with the repo lock held
1009 1009 cwd = repo.getcwd()
1010 1010 errors = 0
1011 1011 copied = []
1012 1012 targets = {}
1013 1013
1014 1014 def okaytocopy(abs, rel, exact):
1015 1015 reasons = {'?': _('is not managed'),
1016 1016 'a': _('has been marked for add'),
1017 1017 'r': _('has been marked for remove')}
1018 1018 state = repo.dirstate.state(abs)
1019 1019 reason = reasons.get(state)
1020 1020 if reason:
1021 1021 if state == 'a':
1022 1022 origsrc = repo.dirstate.copied(abs)
1023 1023 if origsrc is not None:
1024 1024 return origsrc
1025 1025 if exact:
1026 1026 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1027 1027 else:
1028 1028 return abs
1029 1029
1030 1030 def copy(origsrc, abssrc, relsrc, target, exact):
1031 1031 abstarget = util.canonpath(repo.root, cwd, target)
1032 1032 reltarget = util.pathto(cwd, abstarget)
1033 1033 prevsrc = targets.get(abstarget)
1034 1034 if prevsrc is not None:
1035 1035 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1036 1036 (reltarget, abssrc, prevsrc))
1037 1037 return
1038 1038 if (not opts['after'] and os.path.exists(reltarget) or
1039 1039 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1040 1040 if not opts['force']:
1041 1041 ui.warn(_('%s: not overwriting - file exists\n') %
1042 1042 reltarget)
1043 1043 return
1044 1044 if not opts['after'] and not opts.get('dry_run'):
1045 1045 os.unlink(reltarget)
1046 1046 if opts['after']:
1047 1047 if not os.path.exists(reltarget):
1048 1048 return
1049 1049 else:
1050 1050 targetdir = os.path.dirname(reltarget) or '.'
1051 1051 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1052 1052 os.makedirs(targetdir)
1053 1053 try:
1054 1054 restore = repo.dirstate.state(abstarget) == 'r'
1055 1055 if restore and not opts.get('dry_run'):
1056 1056 repo.undelete([abstarget], wlock)
1057 1057 try:
1058 1058 if not opts.get('dry_run'):
1059 1059 shutil.copyfile(relsrc, reltarget)
1060 1060 shutil.copymode(relsrc, reltarget)
1061 1061 restore = False
1062 1062 finally:
1063 1063 if restore:
1064 1064 repo.remove([abstarget], wlock)
1065 1065 except shutil.Error, inst:
1066 1066 raise util.Abort(str(inst))
1067 1067 except IOError, inst:
1068 1068 if inst.errno == errno.ENOENT:
1069 1069 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1070 1070 else:
1071 1071 ui.warn(_('%s: cannot copy - %s\n') %
1072 1072 (relsrc, inst.strerror))
1073 1073 errors += 1
1074 1074 return
1075 1075 if ui.verbose or not exact:
1076 1076 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1077 1077 targets[abstarget] = abssrc
1078 1078 if abstarget != origsrc and not opts.get('dry_run'):
1079 1079 repo.copy(origsrc, abstarget, wlock)
1080 1080 copied.append((abssrc, relsrc, exact))
1081 1081
1082 1082 def targetpathfn(pat, dest, srcs):
1083 1083 if os.path.isdir(pat):
1084 1084 abspfx = util.canonpath(repo.root, cwd, pat)
1085 1085 if destdirexists:
1086 1086 striplen = len(os.path.split(abspfx)[0])
1087 1087 else:
1088 1088 striplen = len(abspfx)
1089 1089 if striplen:
1090 1090 striplen += len(os.sep)
1091 1091 res = lambda p: os.path.join(dest, p[striplen:])
1092 1092 elif destdirexists:
1093 1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1094 1094 else:
1095 1095 res = lambda p: dest
1096 1096 return res
1097 1097
1098 1098 def targetpathafterfn(pat, dest, srcs):
1099 1099 if util.patkind(pat, None)[0]:
1100 1100 # a mercurial pattern
1101 1101 res = lambda p: os.path.join(dest, os.path.basename(p))
1102 1102 else:
1103 1103 abspfx = util.canonpath(repo.root, cwd, pat)
1104 1104 if len(abspfx) < len(srcs[0][0]):
1105 1105 # A directory. Either the target path contains the last
1106 1106 # component of the source path or it does not.
1107 1107 def evalpath(striplen):
1108 1108 score = 0
1109 1109 for s in srcs:
1110 1110 t = os.path.join(dest, s[0][striplen:])
1111 1111 if os.path.exists(t):
1112 1112 score += 1
1113 1113 return score
1114 1114
1115 1115 striplen = len(abspfx)
1116 1116 if striplen:
1117 1117 striplen += len(os.sep)
1118 1118 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1119 1119 score = evalpath(striplen)
1120 1120 striplen1 = len(os.path.split(abspfx)[0])
1121 1121 if striplen1:
1122 1122 striplen1 += len(os.sep)
1123 1123 if evalpath(striplen1) > score:
1124 1124 striplen = striplen1
1125 1125 res = lambda p: os.path.join(dest, p[striplen:])
1126 1126 else:
1127 1127 # a file
1128 1128 if destdirexists:
1129 1129 res = lambda p: os.path.join(dest, os.path.basename(p))
1130 1130 else:
1131 1131 res = lambda p: dest
1132 1132 return res
1133 1133
1134 1134
1135 1135 pats = list(pats)
1136 1136 if not pats:
1137 1137 raise util.Abort(_('no source or destination specified'))
1138 1138 if len(pats) == 1:
1139 1139 raise util.Abort(_('no destination specified'))
1140 1140 dest = pats.pop()
1141 1141 destdirexists = os.path.isdir(dest)
1142 1142 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1143 1143 raise util.Abort(_('with multiple sources, destination must be an '
1144 1144 'existing directory'))
1145 1145 if opts['after']:
1146 1146 tfn = targetpathafterfn
1147 1147 else:
1148 1148 tfn = targetpathfn
1149 1149 copylist = []
1150 1150 for pat in pats:
1151 1151 srcs = []
1152 1152 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1153 1153 origsrc = okaytocopy(abssrc, relsrc, exact)
1154 1154 if origsrc:
1155 1155 srcs.append((origsrc, abssrc, relsrc, exact))
1156 1156 if not srcs:
1157 1157 continue
1158 1158 copylist.append((tfn(pat, dest, srcs), srcs))
1159 1159 if not copylist:
1160 1160 raise util.Abort(_('no files to copy'))
1161 1161
1162 1162 for targetpath, srcs in copylist:
1163 1163 for origsrc, abssrc, relsrc, exact in srcs:
1164 1164 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1165 1165
1166 1166 if errors:
1167 1167 ui.warn(_('(consider using --after)\n'))
1168 1168 return errors, copied
1169 1169
1170 1170 def copy(ui, repo, *pats, **opts):
1171 1171 """mark files as copied for the next commit
1172 1172
1173 1173 Mark dest as having copies of source files. If dest is a
1174 1174 directory, copies are put in that directory. If dest is a file,
1175 1175 there can only be one source.
1176 1176
1177 1177 By default, this command copies the contents of files as they
1178 1178 stand in the working directory. If invoked with --after, the
1179 1179 operation is recorded, but no copying is performed.
1180 1180
1181 1181 This command takes effect in the next commit.
1182 1182
1183 1183 NOTE: This command should be treated as experimental. While it
1184 1184 should properly record copied files, this information is not yet
1185 1185 fully used by merge, nor fully reported by log.
1186 1186 """
1187 1187 wlock = repo.wlock(0)
1188 1188 errs, copied = docopy(ui, repo, pats, opts, wlock)
1189 1189 return errs
1190 1190
1191 1191 def debugancestor(ui, index, rev1, rev2):
1192 1192 """find the ancestor revision of two revisions in a given index"""
1193 1193 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1194 1194 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1195 1195 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1196 1196
1197 1197 def debugcomplete(ui, cmd='', **opts):
1198 1198 """returns the completion list associated with the given command"""
1199 1199
1200 1200 if opts['options']:
1201 1201 options = []
1202 1202 otables = [globalopts]
1203 1203 if cmd:
1204 1204 aliases, entry = findcmd(cmd)
1205 1205 otables.append(entry[1])
1206 1206 for t in otables:
1207 1207 for o in t:
1208 1208 if o[0]:
1209 1209 options.append('-%s' % o[0])
1210 1210 options.append('--%s' % o[1])
1211 1211 ui.write("%s\n" % "\n".join(options))
1212 1212 return
1213 1213
1214 1214 clist = findpossible(cmd).keys()
1215 1215 clist.sort()
1216 1216 ui.write("%s\n" % "\n".join(clist))
1217 1217
1218 1218 def debugrebuildstate(ui, repo, rev=None):
1219 1219 """rebuild the dirstate as it would look like for the given revision"""
1220 1220 if not rev:
1221 1221 rev = repo.changelog.tip()
1222 1222 else:
1223 1223 rev = repo.lookup(rev)
1224 1224 change = repo.changelog.read(rev)
1225 1225 n = change[0]
1226 1226 files = repo.manifest.readflags(n)
1227 1227 wlock = repo.wlock()
1228 1228 repo.dirstate.rebuild(rev, files.iteritems())
1229 1229
1230 1230 def debugcheckstate(ui, repo):
1231 1231 """validate the correctness of the current dirstate"""
1232 1232 parent1, parent2 = repo.dirstate.parents()
1233 1233 repo.dirstate.read()
1234 1234 dc = repo.dirstate.map
1235 1235 keys = dc.keys()
1236 1236 keys.sort()
1237 1237 m1n = repo.changelog.read(parent1)[0]
1238 1238 m2n = repo.changelog.read(parent2)[0]
1239 1239 m1 = repo.manifest.read(m1n)
1240 1240 m2 = repo.manifest.read(m2n)
1241 1241 errors = 0
1242 1242 for f in dc:
1243 1243 state = repo.dirstate.state(f)
1244 1244 if state in "nr" and f not in m1:
1245 1245 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1246 1246 errors += 1
1247 1247 if state in "a" and f in m1:
1248 1248 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1249 1249 errors += 1
1250 1250 if state in "m" and f not in m1 and f not in m2:
1251 1251 ui.warn(_("%s in state %s, but not in either manifest\n") %
1252 1252 (f, state))
1253 1253 errors += 1
1254 1254 for f in m1:
1255 1255 state = repo.dirstate.state(f)
1256 1256 if state not in "nrm":
1257 1257 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1258 1258 errors += 1
1259 1259 if errors:
1260 1260 error = _(".hg/dirstate inconsistent with current parent's manifest")
1261 1261 raise util.Abort(error)
1262 1262
1263 1263 def debugconfig(ui, repo, *values):
1264 1264 """show combined config settings from all hgrc files
1265 1265
1266 1266 With no args, print names and values of all config items.
1267 1267
1268 1268 With one arg of the form section.name, print just the value of
1269 1269 that config item.
1270 1270
1271 1271 With multiple args, print names and values of all config items
1272 1272 with matching section names."""
1273 1273
1274 1274 if values:
1275 1275 if len([v for v in values if '.' in v]) > 1:
1276 1276 raise util.Abort(_('only one config item permitted'))
1277 1277 for section, name, value in ui.walkconfig():
1278 1278 sectname = section + '.' + name
1279 1279 if values:
1280 1280 for v in values:
1281 1281 if v == section:
1282 1282 ui.write('%s=%s\n' % (sectname, value))
1283 1283 elif v == sectname:
1284 1284 ui.write(value, '\n')
1285 1285 else:
1286 1286 ui.write('%s=%s\n' % (sectname, value))
1287 1287
1288 1288 def debugsetparents(ui, repo, rev1, rev2=None):
1289 1289 """manually set the parents of the current working directory
1290 1290
1291 1291 This is useful for writing repository conversion tools, but should
1292 1292 be used with care.
1293 1293 """
1294 1294
1295 1295 if not rev2:
1296 1296 rev2 = hex(nullid)
1297 1297
1298 1298 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1299 1299
1300 1300 def debugstate(ui, repo):
1301 1301 """show the contents of the current dirstate"""
1302 1302 repo.dirstate.read()
1303 1303 dc = repo.dirstate.map
1304 1304 keys = dc.keys()
1305 1305 keys.sort()
1306 1306 for file_ in keys:
1307 1307 ui.write("%c %3o %10d %s %s\n"
1308 1308 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1309 1309 time.strftime("%x %X",
1310 1310 time.localtime(dc[file_][3])), file_))
1311 1311 for f in repo.dirstate.copies:
1312 1312 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1313 1313
1314 1314 def debugdata(ui, file_, rev):
1315 1315 """dump the contents of an data file revision"""
1316 1316 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1317 1317 file_[:-2] + ".i", file_, 0)
1318 1318 try:
1319 1319 ui.write(r.revision(r.lookup(rev)))
1320 1320 except KeyError:
1321 1321 raise util.Abort(_('invalid revision identifier %s'), rev)
1322 1322
1323 1323 def debugindex(ui, file_):
1324 1324 """dump the contents of an index file"""
1325 1325 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1326 1326 ui.write(" rev offset length base linkrev" +
1327 1327 " nodeid p1 p2\n")
1328 1328 for i in range(r.count()):
1329 1329 node = r.node(i)
1330 1330 pp = r.parents(node)
1331 1331 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1332 1332 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1333 1333 short(node), short(pp[0]), short(pp[1])))
1334 1334
1335 1335 def debugindexdot(ui, file_):
1336 1336 """dump an index DAG as a .dot file"""
1337 1337 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1338 1338 ui.write("digraph G {\n")
1339 1339 for i in range(r.count()):
1340 1340 node = r.node(i)
1341 1341 pp = r.parents(node)
1342 1342 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1343 1343 if pp[1] != nullid:
1344 1344 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1345 1345 ui.write("}\n")
1346 1346
1347 1347 def debugrename(ui, repo, file, rev=None):
1348 1348 """dump rename information"""
1349 1349 r = repo.file(relpath(repo, [file])[0])
1350 1350 if rev:
1351 1351 try:
1352 1352 # assume all revision numbers are for changesets
1353 1353 n = repo.lookup(rev)
1354 1354 change = repo.changelog.read(n)
1355 1355 m = repo.manifest.read(change[0])
1356 1356 n = m[relpath(repo, [file])[0]]
1357 1357 except (hg.RepoError, KeyError):
1358 1358 n = r.lookup(rev)
1359 1359 else:
1360 1360 n = r.tip()
1361 1361 m = r.renamed(n)
1362 1362 if m:
1363 1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1364 1364 else:
1365 1365 ui.write(_("not renamed\n"))
1366 1366
1367 1367 def debugwalk(ui, repo, *pats, **opts):
1368 1368 """show how files match on given patterns"""
1369 1369 items = list(walk(repo, pats, opts))
1370 1370 if not items:
1371 1371 return
1372 1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1373 1373 max([len(abs) for (src, abs, rel, exact) in items]),
1374 1374 max([len(rel) for (src, abs, rel, exact) in items]))
1375 1375 for src, abs, rel, exact in items:
1376 1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1377 1377 ui.write("%s\n" % line.rstrip())
1378 1378
1379 1379 def diff(ui, repo, *pats, **opts):
1380 1380 """diff repository (or selected files)
1381 1381
1382 1382 Show differences between revisions for the specified files.
1383 1383
1384 1384 Differences between files are shown using the unified diff format.
1385 1385
1386 1386 When two revision arguments are given, then changes are shown
1387 1387 between those revisions. If only one revision is specified then
1388 1388 that revision is compared to the working directory, and, when no
1389 1389 revisions are specified, the working directory files are compared
1390 1390 to its parent.
1391 1391
1392 1392 Without the -a option, diff will avoid generating diffs of files
1393 1393 it detects as binary. With -a, diff will generate a diff anyway,
1394 1394 probably with undesirable results.
1395 1395 """
1396 1396 node1, node2 = revpair(ui, repo, opts['rev'])
1397 1397
1398 1398 fns, matchfn, anypats = matchpats(repo, pats, opts)
1399 1399
1400 1400 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1401 1401 text=opts['text'], opts=opts)
1402 1402
1403 1403 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1404 1404 node = repo.lookup(changeset)
1405 1405 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1406 1406 if opts['switch_parent']:
1407 1407 parents.reverse()
1408 1408 prev = (parents and parents[0]) or nullid
1409 1409 change = repo.changelog.read(node)
1410 1410
1411 1411 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1412 1412 revwidth=revwidth)
1413 1413 if fp != sys.stdout:
1414 1414 ui.note("%s\n" % fp.name)
1415 1415
1416 1416 fp.write("# HG changeset patch\n")
1417 1417 fp.write("# User %s\n" % change[1])
1418 1418 fp.write("# Date %d %d\n" % change[2])
1419 1419 fp.write("# Node ID %s\n" % hex(node))
1420 1420 fp.write("# Parent %s\n" % hex(prev))
1421 1421 if len(parents) > 1:
1422 1422 fp.write("# Parent %s\n" % hex(parents[1]))
1423 1423 fp.write(change[4].rstrip())
1424 1424 fp.write("\n\n")
1425 1425
1426 1426 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1427 1427 if fp != sys.stdout:
1428 1428 fp.close()
1429 1429
1430 1430 def export(ui, repo, *changesets, **opts):
1431 1431 """dump the header and diffs for one or more changesets
1432 1432
1433 1433 Print the changeset header and diffs for one or more revisions.
1434 1434
1435 1435 The information shown in the changeset header is: author,
1436 1436 changeset hash, parent and commit comment.
1437 1437
1438 1438 Output may be to a file, in which case the name of the file is
1439 1439 given using a format string. The formatting rules are as follows:
1440 1440
1441 1441 %% literal "%" character
1442 1442 %H changeset hash (40 bytes of hexadecimal)
1443 1443 %N number of patches being generated
1444 1444 %R changeset revision number
1445 1445 %b basename of the exporting repository
1446 1446 %h short-form changeset hash (12 bytes of hexadecimal)
1447 1447 %n zero-padded sequence number, starting at 1
1448 1448 %r zero-padded changeset revision number
1449 1449
1450 1450 Without the -a option, export will avoid generating diffs of files
1451 1451 it detects as binary. With -a, export will generate a diff anyway,
1452 1452 probably with undesirable results.
1453 1453
1454 1454 With the --switch-parent option, the diff will be against the second
1455 1455 parent. It can be useful to review a merge.
1456 1456 """
1457 1457 if not changesets:
1458 1458 raise util.Abort(_("export requires at least one changeset"))
1459 1459 seqno = 0
1460 1460 revs = list(revrange(ui, repo, changesets))
1461 1461 total = len(revs)
1462 1462 revwidth = max(map(len, revs))
1463 1463 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1464 1464 ui.note(msg)
1465 1465 for cset in revs:
1466 1466 seqno += 1
1467 1467 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1468 1468
1469 1469 def forget(ui, repo, *pats, **opts):
1470 1470 """don't add the specified files on the next commit (DEPRECATED)
1471 1471
1472 1472 (DEPRECATED)
1473 1473 Undo an 'hg add' scheduled for the next commit.
1474 1474
1475 1475 This command is now deprecated and will be removed in a future
1476 1476 release. Please use revert instead.
1477 1477 """
1478 1478 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1479 1479 forget = []
1480 1480 for src, abs, rel, exact in walk(repo, pats, opts):
1481 1481 if repo.dirstate.state(abs) == 'a':
1482 1482 forget.append(abs)
1483 1483 if ui.verbose or not exact:
1484 1484 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1485 1485 repo.forget(forget)
1486 1486
1487 1487 def grep(ui, repo, pattern, *pats, **opts):
1488 1488 """search for a pattern in specified files and revisions
1489 1489
1490 1490 Search revisions of files for a regular expression.
1491 1491
1492 1492 This command behaves differently than Unix grep. It only accepts
1493 1493 Python/Perl regexps. It searches repository history, not the
1494 1494 working directory. It always prints the revision number in which
1495 1495 a match appears.
1496 1496
1497 1497 By default, grep only prints output for the first revision of a
1498 1498 file in which it finds a match. To get it to print every revision
1499 1499 that contains a change in match status ("-" for a match that
1500 1500 becomes a non-match, or "+" for a non-match that becomes a match),
1501 1501 use the --all flag.
1502 1502 """
1503 1503 reflags = 0
1504 1504 if opts['ignore_case']:
1505 1505 reflags |= re.I
1506 1506 regexp = re.compile(pattern, reflags)
1507 1507 sep, eol = ':', '\n'
1508 1508 if opts['print0']:
1509 1509 sep = eol = '\0'
1510 1510
1511 1511 fcache = {}
1512 1512 def getfile(fn):
1513 1513 if fn not in fcache:
1514 1514 fcache[fn] = repo.file(fn)
1515 1515 return fcache[fn]
1516 1516
1517 1517 def matchlines(body):
1518 1518 begin = 0
1519 1519 linenum = 0
1520 1520 while True:
1521 1521 match = regexp.search(body, begin)
1522 1522 if not match:
1523 1523 break
1524 1524 mstart, mend = match.span()
1525 1525 linenum += body.count('\n', begin, mstart) + 1
1526 1526 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1527 1527 lend = body.find('\n', mend)
1528 1528 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1529 1529 begin = lend + 1
1530 1530
1531 1531 class linestate(object):
1532 1532 def __init__(self, line, linenum, colstart, colend):
1533 1533 self.line = line
1534 1534 self.linenum = linenum
1535 1535 self.colstart = colstart
1536 1536 self.colend = colend
1537 1537 def __eq__(self, other):
1538 1538 return self.line == other.line
1539 1539 def __hash__(self):
1540 1540 return hash(self.line)
1541 1541
1542 1542 matches = {}
1543 1543 def grepbody(fn, rev, body):
1544 1544 matches[rev].setdefault(fn, {})
1545 1545 m = matches[rev][fn]
1546 1546 for lnum, cstart, cend, line in matchlines(body):
1547 1547 s = linestate(line, lnum, cstart, cend)
1548 1548 m[s] = s
1549 1549
1550 1550 # FIXME: prev isn't used, why ?
1551 1551 prev = {}
1552 1552 ucache = {}
1553 1553 def display(fn, rev, states, prevstates):
1554 1554 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1555 1555 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1556 1556 counts = {'-': 0, '+': 0}
1557 1557 filerevmatches = {}
1558 1558 for l in diff:
1559 1559 if incrementing or not opts['all']:
1560 1560 change = ((l in prevstates) and '-') or '+'
1561 1561 r = rev
1562 1562 else:
1563 1563 change = ((l in states) and '-') or '+'
1564 1564 r = prev[fn]
1565 1565 cols = [fn, str(rev)]
1566 1566 if opts['line_number']:
1567 1567 cols.append(str(l.linenum))
1568 1568 if opts['all']:
1569 1569 cols.append(change)
1570 1570 if opts['user']:
1571 1571 cols.append(trimuser(ui, getchange(rev)[1], rev,
1572 1572 ucache))
1573 1573 if opts['files_with_matches']:
1574 1574 c = (fn, rev)
1575 1575 if c in filerevmatches:
1576 1576 continue
1577 1577 filerevmatches[c] = 1
1578 1578 else:
1579 1579 cols.append(l.line)
1580 1580 ui.write(sep.join(cols), eol)
1581 1581 counts[change] += 1
1582 1582 return counts['+'], counts['-']
1583 1583
1584 1584 fstate = {}
1585 1585 skip = {}
1586 1586 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1587 1587 count = 0
1588 1588 incrementing = False
1589 1589 for st, rev, fns in changeiter:
1590 1590 if st == 'window':
1591 1591 incrementing = rev
1592 1592 matches.clear()
1593 1593 elif st == 'add':
1594 1594 change = repo.changelog.read(repo.lookup(str(rev)))
1595 1595 mf = repo.manifest.read(change[0])
1596 1596 matches[rev] = {}
1597 1597 for fn in fns:
1598 1598 if fn in skip:
1599 1599 continue
1600 1600 fstate.setdefault(fn, {})
1601 1601 try:
1602 1602 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1603 1603 except KeyError:
1604 1604 pass
1605 1605 elif st == 'iter':
1606 1606 states = matches[rev].items()
1607 1607 states.sort()
1608 1608 for fn, m in states:
1609 1609 if fn in skip:
1610 1610 continue
1611 1611 if incrementing or not opts['all'] or fstate[fn]:
1612 1612 pos, neg = display(fn, rev, m, fstate[fn])
1613 1613 count += pos + neg
1614 1614 if pos and not opts['all']:
1615 1615 skip[fn] = True
1616 1616 fstate[fn] = m
1617 1617 prev[fn] = rev
1618 1618
1619 1619 if not incrementing:
1620 1620 fstate = fstate.items()
1621 1621 fstate.sort()
1622 1622 for fn, state in fstate:
1623 1623 if fn in skip:
1624 1624 continue
1625 1625 display(fn, rev, {}, state)
1626 1626 return (count == 0 and 1) or 0
1627 1627
1628 1628 def heads(ui, repo, **opts):
1629 1629 """show current repository heads
1630 1630
1631 1631 Show all repository head changesets.
1632 1632
1633 1633 Repository "heads" are changesets that don't have children
1634 1634 changesets. They are where development generally takes place and
1635 1635 are the usual targets for update and merge operations.
1636 1636 """
1637 1637 if opts['rev']:
1638 1638 heads = repo.heads(repo.lookup(opts['rev']))
1639 1639 else:
1640 1640 heads = repo.heads()
1641 1641 br = None
1642 1642 if opts['branches']:
1643 1643 br = repo.branchlookup(heads)
1644 1644 displayer = show_changeset(ui, repo, opts)
1645 1645 for n in heads:
1646 1646 displayer.show(changenode=n, brinfo=br)
1647 1647
1648 1648 def identify(ui, repo):
1649 1649 """print information about the working copy
1650 1650
1651 1651 Print a short summary of the current state of the repo.
1652 1652
1653 1653 This summary identifies the repository state using one or two parent
1654 1654 hash identifiers, followed by a "+" if there are uncommitted changes
1655 1655 in the working directory, followed by a list of tags for this revision.
1656 1656 """
1657 1657 parents = [p for p in repo.dirstate.parents() if p != nullid]
1658 1658 if not parents:
1659 1659 ui.write(_("unknown\n"))
1660 1660 return
1661 1661
1662 1662 hexfunc = ui.verbose and hex or short
1663 1663 modified, added, removed, deleted, unknown = repo.changes()
1664 1664 output = ["%s%s" %
1665 1665 ('+'.join([hexfunc(parent) for parent in parents]),
1666 1666 (modified or added or removed or deleted) and "+" or "")]
1667 1667
1668 1668 if not ui.quiet:
1669 1669 # multiple tags for a single parent separated by '/'
1670 1670 parenttags = ['/'.join(tags)
1671 1671 for tags in map(repo.nodetags, parents) if tags]
1672 1672 # tags for multiple parents separated by ' + '
1673 1673 if parenttags:
1674 1674 output.append(' + '.join(parenttags))
1675 1675
1676 1676 ui.write("%s\n" % ' '.join(output))
1677 1677
1678 1678 def import_(ui, repo, patch1, *patches, **opts):
1679 1679 """import an ordered set of patches
1680 1680
1681 1681 Import a list of patches and commit them individually.
1682 1682
1683 1683 If there are outstanding changes in the working directory, import
1684 1684 will abort unless given the -f flag.
1685 1685
1686 1686 You can import a patch straight from a mail message. Even patches
1687 1687 as attachments work (body part must be type text/plain or
1688 1688 text/x-patch to be used). From and Subject headers of email
1689 1689 message are used as default committer and commit message. All
1690 1690 text/plain body parts before first diff are added to commit
1691 1691 message.
1692 1692
1693 1693 If imported patch was generated by hg export, user and description
1694 1694 from patch override values from message headers and body. Values
1695 1695 given on command line with -m and -u override these.
1696 1696
1697 1697 To read a patch from standard input, use patch name "-".
1698 1698 """
1699 1699 patches = (patch1,) + patches
1700 1700
1701 1701 if not opts['force']:
1702 1702 bail_if_changed(repo)
1703 1703
1704 1704 d = opts["base"]
1705 1705 strip = opts["strip"]
1706 1706
1707 1707 mailre = re.compile(r'(?:From |[\w-]+:)')
1708 1708
1709 1709 # attempt to detect the start of a patch
1710 1710 # (this heuristic is borrowed from quilt)
1711 1711 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1712 1712 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1713 1713 '(---|\*\*\*)[ \t])', re.MULTILINE)
1714 1714
1715 1715 for patch in patches:
1716 1716 pf = os.path.join(d, patch)
1717 1717
1718 1718 message = None
1719 1719 user = None
1720 1720 date = None
1721 1721 hgpatch = False
1722 1722
1723 1723 p = email.Parser.Parser()
1724 1724 if pf == '-':
1725 1725 msg = p.parse(sys.stdin)
1726 1726 ui.status(_("applying patch from stdin\n"))
1727 1727 else:
1728 1728 msg = p.parse(file(pf))
1729 1729 ui.status(_("applying %s\n") % patch)
1730 1730
1731 1731 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1732 1732 tmpfp = os.fdopen(fd, 'w')
1733 1733 try:
1734 1734 message = msg['Subject']
1735 1735 if message:
1736 1736 message = message.replace('\n\t', ' ')
1737 1737 ui.debug('Subject: %s\n' % message)
1738 1738 user = msg['From']
1739 1739 if user:
1740 1740 ui.debug('From: %s\n' % user)
1741 1741 diffs_seen = 0
1742 1742 ok_types = ('text/plain', 'text/x-patch')
1743 1743 for part in msg.walk():
1744 1744 content_type = part.get_content_type()
1745 1745 ui.debug('Content-Type: %s\n' % content_type)
1746 1746 if content_type not in ok_types:
1747 1747 continue
1748 1748 payload = part.get_payload(decode=True)
1749 1749 m = diffre.search(payload)
1750 1750 if m:
1751 1751 ui.debug(_('found patch at byte %d\n') % m.start(0))
1752 1752 diffs_seen += 1
1753 1753 hgpatch = False
1754 1754 fp = cStringIO.StringIO()
1755 1755 if message:
1756 1756 fp.write(message)
1757 1757 fp.write('\n')
1758 1758 for line in payload[:m.start(0)].splitlines():
1759 1759 if line.startswith('# HG changeset patch'):
1760 1760 ui.debug(_('patch generated by hg export\n'))
1761 1761 hgpatch = True
1762 1762 # drop earlier commit message content
1763 1763 fp.seek(0)
1764 1764 fp.truncate()
1765 1765 elif hgpatch:
1766 1766 if line.startswith('# User '):
1767 1767 user = line[7:]
1768 1768 ui.debug('From: %s\n' % user)
1769 1769 elif line.startswith("# Date "):
1770 1770 date = line[7:]
1771 1771 if not line.startswith('# '):
1772 1772 fp.write(line)
1773 1773 fp.write('\n')
1774 1774 message = fp.getvalue()
1775 1775 if tmpfp:
1776 1776 tmpfp.write(payload)
1777 1777 if not payload.endswith('\n'):
1778 1778 tmpfp.write('\n')
1779 1779 elif not diffs_seen and message and content_type == 'text/plain':
1780 1780 message += '\n' + payload
1781 1781
1782 1782 if opts['message']:
1783 1783 # pickup the cmdline msg
1784 1784 message = opts['message']
1785 1785 elif message:
1786 1786 # pickup the patch msg
1787 1787 message = message.strip()
1788 1788 else:
1789 1789 # launch the editor
1790 1790 message = None
1791 1791 ui.debug(_('message:\n%s\n') % message)
1792 1792
1793 1793 tmpfp.close()
1794 1794 if not diffs_seen:
1795 1795 raise util.Abort(_('no diffs found'))
1796 1796
1797 1797 files = util.patch(strip, tmpname, ui)
1798 1798 if len(files) > 0:
1799 1799 addremove_lock(ui, repo, files, {})
1800 1800 repo.commit(files, message, user, date)
1801 1801 finally:
1802 1802 os.unlink(tmpname)
1803 1803
1804 1804 def incoming(ui, repo, source="default", **opts):
1805 1805 """show new changesets found in source
1806 1806
1807 1807 Show new changesets found in the specified path/URL or the default
1808 1808 pull location. These are the changesets that would be pulled if a pull
1809 1809 was requested.
1810 1810
1811 1811 For remote repository, using --bundle avoids downloading the changesets
1812 1812 twice if the incoming is followed by a pull.
1813 1813
1814 1814 See pull for valid source format details.
1815 1815 """
1816 1816 source = ui.expandpath(source)
1817 1817 ui.setconfig_remoteopts(**opts)
1818 1818
1819 1819 other = hg.repository(ui, source)
1820 1820 incoming = repo.findincoming(other, force=opts["force"])
1821 1821 if not incoming:
1822 1822 ui.status(_("no changes found\n"))
1823 1823 return
1824 1824
1825 1825 cleanup = None
1826 1826 try:
1827 1827 fname = opts["bundle"]
1828 1828 if fname or not other.local():
1829 1829 # create a bundle (uncompressed if other repo is not local)
1830 1830 cg = other.changegroup(incoming, "incoming")
1831 1831 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1832 1832 # keep written bundle?
1833 1833 if opts["bundle"]:
1834 1834 cleanup = None
1835 1835 if not other.local():
1836 1836 # use the created uncompressed bundlerepo
1837 1837 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1838 1838
1839 1839 revs = None
1840 1840 if opts['rev']:
1841 1841 revs = [other.lookup(rev) for rev in opts['rev']]
1842 1842 o = other.changelog.nodesbetween(incoming, revs)[0]
1843 1843 if opts['newest_first']:
1844 1844 o.reverse()
1845 1845 displayer = show_changeset(ui, other, opts)
1846 1846 for n in o:
1847 1847 parents = [p for p in other.changelog.parents(n) if p != nullid]
1848 1848 if opts['no_merges'] and len(parents) == 2:
1849 1849 continue
1850 1850 displayer.show(changenode=n)
1851 1851 if opts['patch']:
1852 1852 prev = (parents and parents[0]) or nullid
1853 1853 dodiff(ui, ui, other, prev, n)
1854 1854 ui.write("\n")
1855 1855 finally:
1856 1856 if hasattr(other, 'close'):
1857 1857 other.close()
1858 1858 if cleanup:
1859 1859 os.unlink(cleanup)
1860 1860
1861 1861 def init(ui, dest=".", **opts):
1862 1862 """create a new repository in the given directory
1863 1863
1864 1864 Initialize a new repository in the given directory. If the given
1865 1865 directory does not exist, it is created.
1866 1866
1867 1867 If no directory is given, the current directory is used.
1868 1868
1869 1869 It is possible to specify an ssh:// URL as the destination.
1870 1870 Look at the help text for the pull command for important details
1871 1871 about ssh:// URLs.
1872 1872 """
1873 1873 ui.setconfig_remoteopts(**opts)
1874 1874 hg.repository(ui, dest, create=1)
1875 1875
1876 1876 def locate(ui, repo, *pats, **opts):
1877 1877 """locate files matching specific patterns
1878 1878
1879 1879 Print all files under Mercurial control whose names match the
1880 1880 given patterns.
1881 1881
1882 1882 This command searches the current directory and its
1883 1883 subdirectories. To search an entire repository, move to the root
1884 1884 of the repository.
1885 1885
1886 1886 If no patterns are given to match, this command prints all file
1887 1887 names.
1888 1888
1889 1889 If you want to feed the output of this command into the "xargs"
1890 1890 command, use the "-0" option to both this command and "xargs".
1891 1891 This will avoid the problem of "xargs" treating single filenames
1892 1892 that contain white space as multiple filenames.
1893 1893 """
1894 1894 end = opts['print0'] and '\0' or '\n'
1895 1895 rev = opts['rev']
1896 1896 if rev:
1897 1897 node = repo.lookup(rev)
1898 1898 else:
1899 1899 node = None
1900 1900
1901 1901 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1902 1902 head='(?:.*/|)'):
1903 1903 if not node and repo.dirstate.state(abs) == '?':
1904 1904 continue
1905 1905 if opts['fullpath']:
1906 1906 ui.write(os.path.join(repo.root, abs), end)
1907 1907 else:
1908 1908 ui.write(((pats and rel) or abs), end)
1909 1909
1910 1910 def log(ui, repo, *pats, **opts):
1911 1911 """show revision history of entire repository or files
1912 1912
1913 1913 Print the revision history of the specified files or the entire project.
1914 1914
1915 1915 By default this command outputs: changeset id and hash, tags,
1916 1916 non-trivial parents, user, date and time, and a summary for each
1917 1917 commit. When the -v/--verbose switch is used, the list of changed
1918 1918 files and full commit message is shown.
1919 1919 """
1920 1920 class dui(object):
1921 1921 # Implement and delegate some ui protocol. Save hunks of
1922 1922 # output for later display in the desired order.
1923 1923 def __init__(self, ui):
1924 1924 self.ui = ui
1925 1925 self.hunk = {}
1926 1926 self.header = {}
1927 1927 def bump(self, rev):
1928 1928 self.rev = rev
1929 1929 self.hunk[rev] = []
1930 1930 self.header[rev] = []
1931 1931 def note(self, *args):
1932 1932 if self.verbose:
1933 1933 self.write(*args)
1934 1934 def status(self, *args):
1935 1935 if not self.quiet:
1936 1936 self.write(*args)
1937 1937 def write(self, *args):
1938 1938 self.hunk[self.rev].append(args)
1939 1939 def write_header(self, *args):
1940 1940 self.header[self.rev].append(args)
1941 1941 def debug(self, *args):
1942 1942 if self.debugflag:
1943 1943 self.write(*args)
1944 1944 def __getattr__(self, key):
1945 1945 return getattr(self.ui, key)
1946 1946
1947 1947 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1948 1948
1949 1949 if opts['limit']:
1950 1950 try:
1951 1951 limit = int(opts['limit'])
1952 1952 except ValueError:
1953 1953 raise util.Abort(_('limit must be a positive integer'))
1954 1954 if limit <= 0: raise util.Abort(_('limit must be positive'))
1955 1955 else:
1956 1956 limit = sys.maxint
1957 1957 count = 0
1958 1958
1959 1959 displayer = show_changeset(ui, repo, opts)
1960 1960 for st, rev, fns in changeiter:
1961 1961 if st == 'window':
1962 1962 du = dui(ui)
1963 1963 displayer.ui = du
1964 1964 elif st == 'add':
1965 1965 du.bump(rev)
1966 1966 changenode = repo.changelog.node(rev)
1967 1967 parents = [p for p in repo.changelog.parents(changenode)
1968 1968 if p != nullid]
1969 1969 if opts['no_merges'] and len(parents) == 2:
1970 1970 continue
1971 1971 if opts['only_merges'] and len(parents) != 2:
1972 1972 continue
1973 1973
1974 1974 if opts['keyword']:
1975 1975 changes = getchange(rev)
1976 1976 miss = 0
1977 1977 for k in [kw.lower() for kw in opts['keyword']]:
1978 1978 if not (k in changes[1].lower() or
1979 1979 k in changes[4].lower() or
1980 1980 k in " ".join(changes[3][:20]).lower()):
1981 1981 miss = 1
1982 1982 break
1983 1983 if miss:
1984 1984 continue
1985 1985
1986 1986 br = None
1987 1987 if opts['branches']:
1988 1988 br = repo.branchlookup([repo.changelog.node(rev)])
1989 1989
1990 1990 displayer.show(rev, brinfo=br)
1991 1991 if opts['patch']:
1992 1992 prev = (parents and parents[0]) or nullid
1993 1993 dodiff(du, du, repo, prev, changenode, match=matchfn)
1994 1994 du.write("\n\n")
1995 1995 elif st == 'iter':
1996 1996 if count == limit: break
1997 1997 if du.header[rev]:
1998 1998 for args in du.header[rev]:
1999 1999 ui.write_header(*args)
2000 2000 if du.hunk[rev]:
2001 2001 count += 1
2002 2002 for args in du.hunk[rev]:
2003 2003 ui.write(*args)
2004 2004
2005 2005 def manifest(ui, repo, rev=None):
2006 2006 """output the latest or given revision of the project manifest
2007 2007
2008 2008 Print a list of version controlled files for the given revision.
2009 2009
2010 2010 The manifest is the list of files being version controlled. If no revision
2011 2011 is given then the tip is used.
2012 2012 """
2013 2013 if rev:
2014 2014 try:
2015 2015 # assume all revision numbers are for changesets
2016 2016 n = repo.lookup(rev)
2017 2017 change = repo.changelog.read(n)
2018 2018 n = change[0]
2019 2019 except hg.RepoError:
2020 2020 n = repo.manifest.lookup(rev)
2021 2021 else:
2022 2022 n = repo.manifest.tip()
2023 2023 m = repo.manifest.read(n)
2024 2024 mf = repo.manifest.readflags(n)
2025 2025 files = m.keys()
2026 2026 files.sort()
2027 2027
2028 2028 for f in files:
2029 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2029 ui.write("%40s %3s %s\n" % (hex(m[f]),
2030 mf.execf(f) and "755" or "644", f))
2030 2031
2031 2032 def merge(ui, repo, node=None, **opts):
2032 2033 """Merge working directory with another revision
2033 2034
2034 2035 Merge the contents of the current working directory and the
2035 2036 requested revision. Files that changed between either parent are
2036 2037 marked as changed for the next commit and a commit must be
2037 2038 performed before any further updates are allowed.
2038 2039 """
2039 2040 return doupdate(ui, repo, node=node, merge=True, **opts)
2040 2041
2041 2042 def outgoing(ui, repo, dest=None, **opts):
2042 2043 """show changesets not found in destination
2043 2044
2044 2045 Show changesets not found in the specified destination repository or
2045 2046 the default push location. These are the changesets that would be pushed
2046 2047 if a push was requested.
2047 2048
2048 2049 See pull for valid destination format details.
2049 2050 """
2050 2051 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2051 2052 ui.setconfig_remoteopts(**opts)
2052 2053 revs = None
2053 2054 if opts['rev']:
2054 2055 revs = [repo.lookup(rev) for rev in opts['rev']]
2055 2056
2056 2057 other = hg.repository(ui, dest)
2057 2058 o = repo.findoutgoing(other, force=opts['force'])
2058 2059 if not o:
2059 2060 ui.status(_("no changes found\n"))
2060 2061 return
2061 2062 o = repo.changelog.nodesbetween(o, revs)[0]
2062 2063 if opts['newest_first']:
2063 2064 o.reverse()
2064 2065 displayer = show_changeset(ui, repo, opts)
2065 2066 for n in o:
2066 2067 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 2068 if opts['no_merges'] and len(parents) == 2:
2068 2069 continue
2069 2070 displayer.show(changenode=n)
2070 2071 if opts['patch']:
2071 2072 prev = (parents and parents[0]) or nullid
2072 2073 dodiff(ui, ui, repo, prev, n)
2073 2074 ui.write("\n")
2074 2075
2075 2076 def parents(ui, repo, rev=None, branches=None, **opts):
2076 2077 """show the parents of the working dir or revision
2077 2078
2078 2079 Print the working directory's parent revisions.
2079 2080 """
2080 2081 if rev:
2081 2082 p = repo.changelog.parents(repo.lookup(rev))
2082 2083 else:
2083 2084 p = repo.dirstate.parents()
2084 2085
2085 2086 br = None
2086 2087 if branches is not None:
2087 2088 br = repo.branchlookup(p)
2088 2089 displayer = show_changeset(ui, repo, opts)
2089 2090 for n in p:
2090 2091 if n != nullid:
2091 2092 displayer.show(changenode=n, brinfo=br)
2092 2093
2093 2094 def paths(ui, repo, search=None):
2094 2095 """show definition of symbolic path names
2095 2096
2096 2097 Show definition of symbolic path name NAME. If no name is given, show
2097 2098 definition of available names.
2098 2099
2099 2100 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2100 2101 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2101 2102 """
2102 2103 if search:
2103 2104 for name, path in ui.configitems("paths"):
2104 2105 if name == search:
2105 2106 ui.write("%s\n" % path)
2106 2107 return
2107 2108 ui.warn(_("not found!\n"))
2108 2109 return 1
2109 2110 else:
2110 2111 for name, path in ui.configitems("paths"):
2111 2112 ui.write("%s = %s\n" % (name, path))
2112 2113
2113 2114 def postincoming(ui, repo, modheads, optupdate):
2114 2115 if modheads == 0:
2115 2116 return
2116 2117 if optupdate:
2117 2118 if modheads == 1:
2118 2119 return doupdate(ui, repo)
2119 2120 else:
2120 2121 ui.status(_("not updating, since new heads added\n"))
2121 2122 if modheads > 1:
2122 2123 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2123 2124 else:
2124 2125 ui.status(_("(run 'hg update' to get a working copy)\n"))
2125 2126
2126 2127 def pull(ui, repo, source="default", **opts):
2127 2128 """pull changes from the specified source
2128 2129
2129 2130 Pull changes from a remote repository to a local one.
2130 2131
2131 2132 This finds all changes from the repository at the specified path
2132 2133 or URL and adds them to the local repository. By default, this
2133 2134 does not update the copy of the project in the working directory.
2134 2135
2135 2136 Valid URLs are of the form:
2136 2137
2137 2138 local/filesystem/path
2138 2139 http://[user@]host[:port]/[path]
2139 2140 https://[user@]host[:port]/[path]
2140 2141 ssh://[user@]host[:port]/[path]
2141 2142
2142 2143 Some notes about using SSH with Mercurial:
2143 2144 - SSH requires an accessible shell account on the destination machine
2144 2145 and a copy of hg in the remote path or specified with as remotecmd.
2145 2146 - path is relative to the remote user's home directory by default.
2146 2147 Use an extra slash at the start of a path to specify an absolute path:
2147 2148 ssh://example.com//tmp/repository
2148 2149 - Mercurial doesn't use its own compression via SSH; the right thing
2149 2150 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2150 2151 Host *.mylocalnetwork.example.com
2151 2152 Compression off
2152 2153 Host *
2153 2154 Compression on
2154 2155 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2155 2156 with the --ssh command line option.
2156 2157 """
2157 2158 source = ui.expandpath(source)
2158 2159 ui.setconfig_remoteopts(**opts)
2159 2160
2160 2161 other = hg.repository(ui, source)
2161 2162 ui.status(_('pulling from %s\n') % (source))
2162 2163 revs = None
2163 2164 if opts['rev'] and not other.local():
2164 2165 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2165 2166 elif opts['rev']:
2166 2167 revs = [other.lookup(rev) for rev in opts['rev']]
2167 2168 modheads = repo.pull(other, heads=revs, force=opts['force'])
2168 2169 return postincoming(ui, repo, modheads, opts['update'])
2169 2170
2170 2171 def push(ui, repo, dest=None, **opts):
2171 2172 """push changes to the specified destination
2172 2173
2173 2174 Push changes from the local repository to the given destination.
2174 2175
2175 2176 This is the symmetrical operation for pull. It helps to move
2176 2177 changes from the current repository to a different one. If the
2177 2178 destination is local this is identical to a pull in that directory
2178 2179 from the current one.
2179 2180
2180 2181 By default, push will refuse to run if it detects the result would
2181 2182 increase the number of remote heads. This generally indicates the
2182 2183 the client has forgotten to sync and merge before pushing.
2183 2184
2184 2185 Valid URLs are of the form:
2185 2186
2186 2187 local/filesystem/path
2187 2188 ssh://[user@]host[:port]/[path]
2188 2189
2189 2190 Look at the help text for the pull command for important details
2190 2191 about ssh:// URLs.
2191 2192
2192 2193 Pushing to http:// and https:// URLs is possible, too, if this
2193 2194 feature is enabled on the remote Mercurial server.
2194 2195 """
2195 2196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2196 2197 ui.setconfig_remoteopts(**opts)
2197 2198
2198 2199 other = hg.repository(ui, dest)
2199 2200 ui.status('pushing to %s\n' % (dest))
2200 2201 revs = None
2201 2202 if opts['rev']:
2202 2203 revs = [repo.lookup(rev) for rev in opts['rev']]
2203 2204 r = repo.push(other, opts['force'], revs=revs)
2204 2205 return r == 0
2205 2206
2206 2207 def rawcommit(ui, repo, *flist, **rc):
2207 2208 """raw commit interface (DEPRECATED)
2208 2209
2209 2210 (DEPRECATED)
2210 2211 Lowlevel commit, for use in helper scripts.
2211 2212
2212 2213 This command is not intended to be used by normal users, as it is
2213 2214 primarily useful for importing from other SCMs.
2214 2215
2215 2216 This command is now deprecated and will be removed in a future
2216 2217 release, please use debugsetparents and commit instead.
2217 2218 """
2218 2219
2219 2220 ui.warn(_("(the rawcommit command is deprecated)\n"))
2220 2221
2221 2222 message = rc['message']
2222 2223 if not message and rc['logfile']:
2223 2224 try:
2224 2225 message = open(rc['logfile']).read()
2225 2226 except IOError:
2226 2227 pass
2227 2228 if not message and not rc['logfile']:
2228 2229 raise util.Abort(_("missing commit message"))
2229 2230
2230 2231 files = relpath(repo, list(flist))
2231 2232 if rc['files']:
2232 2233 files += open(rc['files']).read().splitlines()
2233 2234
2234 2235 rc['parent'] = map(repo.lookup, rc['parent'])
2235 2236
2236 2237 try:
2237 2238 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2238 2239 except ValueError, inst:
2239 2240 raise util.Abort(str(inst))
2240 2241
2241 2242 def recover(ui, repo):
2242 2243 """roll back an interrupted transaction
2243 2244
2244 2245 Recover from an interrupted commit or pull.
2245 2246
2246 2247 This command tries to fix the repository status after an interrupted
2247 2248 operation. It should only be necessary when Mercurial suggests it.
2248 2249 """
2249 2250 if repo.recover():
2250 2251 return repo.verify()
2251 2252 return 1
2252 2253
2253 2254 def remove(ui, repo, *pats, **opts):
2254 2255 """remove the specified files on the next commit
2255 2256
2256 2257 Schedule the indicated files for removal from the repository.
2257 2258
2258 2259 This command schedules the files to be removed at the next commit.
2259 2260 This only removes files from the current branch, not from the
2260 2261 entire project history. If the files still exist in the working
2261 2262 directory, they will be deleted from it. If invoked with --after,
2262 2263 files that have been manually deleted are marked as removed.
2263 2264
2264 2265 Modified files and added files are not removed by default. To
2265 2266 remove them, use the -f/--force option.
2266 2267 """
2267 2268 names = []
2268 2269 if not opts['after'] and not pats:
2269 2270 raise util.Abort(_('no files specified'))
2270 2271 files, matchfn, anypats = matchpats(repo, pats, opts)
2271 2272 exact = dict.fromkeys(files)
2272 2273 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2273 2274 modified, added, removed, deleted, unknown = mardu
2274 2275 remove, forget = [], []
2275 2276 for src, abs, rel, exact in walk(repo, pats, opts):
2276 2277 reason = None
2277 2278 if abs not in deleted and opts['after']:
2278 2279 reason = _('is still present')
2279 2280 elif abs in modified and not opts['force']:
2280 2281 reason = _('is modified (use -f to force removal)')
2281 2282 elif abs in added:
2282 2283 if opts['force']:
2283 2284 forget.append(abs)
2284 2285 continue
2285 2286 reason = _('has been marked for add (use -f to force removal)')
2286 2287 elif abs in unknown:
2287 2288 reason = _('is not managed')
2288 2289 elif abs in removed:
2289 2290 continue
2290 2291 if reason:
2291 2292 if exact:
2292 2293 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2293 2294 else:
2294 2295 if ui.verbose or not exact:
2295 2296 ui.status(_('removing %s\n') % rel)
2296 2297 remove.append(abs)
2297 2298 repo.forget(forget)
2298 2299 repo.remove(remove, unlink=not opts['after'])
2299 2300
2300 2301 def rename(ui, repo, *pats, **opts):
2301 2302 """rename files; equivalent of copy + remove
2302 2303
2303 2304 Mark dest as copies of sources; mark sources for deletion. If
2304 2305 dest is a directory, copies are put in that directory. If dest is
2305 2306 a file, there can only be one source.
2306 2307
2307 2308 By default, this command copies the contents of files as they
2308 2309 stand in the working directory. If invoked with --after, the
2309 2310 operation is recorded, but no copying is performed.
2310 2311
2311 2312 This command takes effect in the next commit.
2312 2313
2313 2314 NOTE: This command should be treated as experimental. While it
2314 2315 should properly record rename files, this information is not yet
2315 2316 fully used by merge, nor fully reported by log.
2316 2317 """
2317 2318 wlock = repo.wlock(0)
2318 2319 errs, copied = docopy(ui, repo, pats, opts, wlock)
2319 2320 names = []
2320 2321 for abs, rel, exact in copied:
2321 2322 if ui.verbose or not exact:
2322 2323 ui.status(_('removing %s\n') % rel)
2323 2324 names.append(abs)
2324 2325 if not opts.get('dry_run'):
2325 2326 repo.remove(names, True, wlock)
2326 2327 return errs
2327 2328
2328 2329 def revert(ui, repo, *pats, **opts):
2329 2330 """revert files or dirs to their states as of some revision
2330 2331
2331 2332 With no revision specified, revert the named files or directories
2332 2333 to the contents they had in the parent of the working directory.
2333 2334 This restores the contents of the affected files to an unmodified
2334 2335 state. If the working directory has two parents, you must
2335 2336 explicitly specify the revision to revert to.
2336 2337
2337 2338 Modified files are saved with a .orig suffix before reverting.
2338 2339 To disable these backups, use --no-backup.
2339 2340
2340 2341 Using the -r option, revert the given files or directories to
2341 2342 their contents as of a specific revision. This can be helpful to"roll
2342 2343 back" some or all of a change that should not have been committed.
2343 2344
2344 2345 Revert modifies the working directory. It does not commit any
2345 2346 changes, or change the parent of the working directory. If you
2346 2347 revert to a revision other than the parent of the working
2347 2348 directory, the reverted files will thus appear modified
2348 2349 afterwards.
2349 2350
2350 2351 If a file has been deleted, it is recreated. If the executable
2351 2352 mode of a file was changed, it is reset.
2352 2353
2353 2354 If names are given, all files matching the names are reverted.
2354 2355
2355 2356 If no arguments are given, all files in the repository are reverted.
2356 2357 """
2357 2358 parent, p2 = repo.dirstate.parents()
2358 2359 if opts['rev']:
2359 2360 node = repo.lookup(opts['rev'])
2360 2361 elif p2 != nullid:
2361 2362 raise util.Abort(_('working dir has two parents; '
2362 2363 'you must specify the revision to revert to'))
2363 2364 else:
2364 2365 node = parent
2365 2366 mf = repo.manifest.read(repo.changelog.read(node)[0])
2366 2367 if node == parent:
2367 2368 pmf = mf
2368 2369 else:
2369 2370 pmf = None
2370 2371
2371 2372 wlock = repo.wlock()
2372 2373
2373 2374 # need all matching names in dirstate and manifest of target rev,
2374 2375 # so have to walk both. do not print errors if files exist in one
2375 2376 # but not other.
2376 2377
2377 2378 names = {}
2378 2379 target_only = {}
2379 2380
2380 2381 # walk dirstate.
2381 2382
2382 2383 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2383 2384 names[abs] = (rel, exact)
2384 2385 if src == 'b':
2385 2386 target_only[abs] = True
2386 2387
2387 2388 # walk target manifest.
2388 2389
2389 2390 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2390 2391 badmatch=names.has_key):
2391 2392 if abs in names: continue
2392 2393 names[abs] = (rel, exact)
2393 2394 target_only[abs] = True
2394 2395
2395 2396 changes = repo.changes(match=names.has_key, wlock=wlock)
2396 2397 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2397 2398
2398 2399 revert = ([], _('reverting %s\n'))
2399 2400 add = ([], _('adding %s\n'))
2400 2401 remove = ([], _('removing %s\n'))
2401 2402 forget = ([], _('forgetting %s\n'))
2402 2403 undelete = ([], _('undeleting %s\n'))
2403 2404 update = {}
2404 2405
2405 2406 disptable = (
2406 2407 # dispatch table:
2407 2408 # file state
2408 2409 # action if in target manifest
2409 2410 # action if not in target manifest
2410 2411 # make backup if in target manifest
2411 2412 # make backup if not in target manifest
2412 2413 (modified, revert, remove, True, True),
2413 2414 (added, revert, forget, True, False),
2414 2415 (removed, undelete, None, False, False),
2415 2416 (deleted, revert, remove, False, False),
2416 2417 (unknown, add, None, True, False),
2417 2418 (target_only, add, None, False, False),
2418 2419 )
2419 2420
2420 2421 entries = names.items()
2421 2422 entries.sort()
2422 2423
2423 2424 for abs, (rel, exact) in entries:
2424 2425 mfentry = mf.get(abs)
2425 2426 def handle(xlist, dobackup):
2426 2427 xlist[0].append(abs)
2427 2428 update[abs] = 1
2428 2429 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2429 2430 bakname = "%s.orig" % rel
2430 2431 ui.note(_('saving current version of %s as %s\n') %
2431 2432 (rel, bakname))
2432 2433 if not opts.get('dry_run'):
2433 2434 shutil.copyfile(rel, bakname)
2434 2435 shutil.copymode(rel, bakname)
2435 2436 if ui.verbose or not exact:
2436 2437 ui.status(xlist[1] % rel)
2437 2438 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2438 2439 if abs not in table: continue
2439 2440 # file has changed in dirstate
2440 2441 if mfentry:
2441 2442 handle(hitlist, backuphit)
2442 2443 elif misslist is not None:
2443 2444 handle(misslist, backupmiss)
2444 2445 else:
2445 2446 if exact: ui.warn(_('file not managed: %s\n' % rel))
2446 2447 break
2447 2448 else:
2448 2449 # file has not changed in dirstate
2449 2450 if node == parent:
2450 2451 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2451 2452 continue
2452 2453 if pmf is None:
2453 2454 # only need parent manifest in this unlikely case,
2454 2455 # so do not read by default
2455 2456 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2456 2457 if abs in pmf:
2457 2458 if mfentry:
2458 2459 # if version of file is same in parent and target
2459 2460 # manifests, do nothing
2460 2461 if pmf[abs] != mfentry:
2461 2462 handle(revert, False)
2462 2463 else:
2463 2464 handle(remove, False)
2464 2465
2465 2466 if not opts.get('dry_run'):
2466 2467 repo.dirstate.forget(forget[0])
2467 2468 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2468 2469 show_stats=False)
2469 2470 repo.dirstate.update(add[0], 'a')
2470 2471 repo.dirstate.update(undelete[0], 'n')
2471 2472 repo.dirstate.update(remove[0], 'r')
2472 2473 return r
2473 2474
2474 2475 def rollback(ui, repo):
2475 2476 """roll back the last transaction in this repository
2476 2477
2477 2478 Roll back the last transaction in this repository, restoring the
2478 2479 project to its state prior to the transaction.
2479 2480
2480 2481 Transactions are used to encapsulate the effects of all commands
2481 2482 that create new changesets or propagate existing changesets into a
2482 2483 repository. For example, the following commands are transactional,
2483 2484 and their effects can be rolled back:
2484 2485
2485 2486 commit
2486 2487 import
2487 2488 pull
2488 2489 push (with this repository as destination)
2489 2490 unbundle
2490 2491
2491 2492 This command should be used with care. There is only one level of
2492 2493 rollback, and there is no way to undo a rollback.
2493 2494
2494 2495 This command is not intended for use on public repositories. Once
2495 2496 changes are visible for pull by other users, rolling a transaction
2496 2497 back locally is ineffective (someone else may already have pulled
2497 2498 the changes). Furthermore, a race is possible with readers of the
2498 2499 repository; for example an in-progress pull from the repository
2499 2500 may fail if a rollback is performed.
2500 2501 """
2501 2502 repo.rollback()
2502 2503
2503 2504 def root(ui, repo):
2504 2505 """print the root (top) of the current working dir
2505 2506
2506 2507 Print the root directory of the current repository.
2507 2508 """
2508 2509 ui.write(repo.root + "\n")
2509 2510
2510 2511 def serve(ui, repo, **opts):
2511 2512 """export the repository via HTTP
2512 2513
2513 2514 Start a local HTTP repository browser and pull server.
2514 2515
2515 2516 By default, the server logs accesses to stdout and errors to
2516 2517 stderr. Use the "-A" and "-E" options to log to files.
2517 2518 """
2518 2519
2519 2520 if opts["stdio"]:
2520 2521 if repo is None:
2521 2522 raise hg.RepoError(_('no repo found'))
2522 2523 s = sshserver.sshserver(ui, repo)
2523 2524 s.serve_forever()
2524 2525
2525 2526 optlist = ("name templates style address port ipv6"
2526 2527 " accesslog errorlog webdir_conf")
2527 2528 for o in optlist.split():
2528 2529 if opts[o]:
2529 2530 ui.setconfig("web", o, opts[o])
2530 2531
2531 2532 if repo is None and not ui.config("web", "webdir_conf"):
2532 2533 raise hg.RepoError(_('no repo found'))
2533 2534
2534 2535 if opts['daemon'] and not opts['daemon_pipefds']:
2535 2536 rfd, wfd = os.pipe()
2536 2537 args = sys.argv[:]
2537 2538 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2538 2539 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2539 2540 args[0], args)
2540 2541 os.close(wfd)
2541 2542 os.read(rfd, 1)
2542 2543 os._exit(0)
2543 2544
2544 2545 try:
2545 2546 httpd = hgweb.server.create_server(ui, repo)
2546 2547 except socket.error, inst:
2547 2548 raise util.Abort(_('cannot start server: ') + inst.args[1])
2548 2549
2549 2550 if ui.verbose:
2550 2551 addr, port = httpd.socket.getsockname()
2551 2552 if addr == '0.0.0.0':
2552 2553 addr = socket.gethostname()
2553 2554 else:
2554 2555 try:
2555 2556 addr = socket.gethostbyaddr(addr)[0]
2556 2557 except socket.error:
2557 2558 pass
2558 2559 if port != 80:
2559 2560 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2560 2561 else:
2561 2562 ui.status(_('listening at http://%s/\n') % addr)
2562 2563
2563 2564 if opts['pid_file']:
2564 2565 fp = open(opts['pid_file'], 'w')
2565 2566 fp.write(str(os.getpid()) + '\n')
2566 2567 fp.close()
2567 2568
2568 2569 if opts['daemon_pipefds']:
2569 2570 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2570 2571 os.close(rfd)
2571 2572 os.write(wfd, 'y')
2572 2573 os.close(wfd)
2573 2574 sys.stdout.flush()
2574 2575 sys.stderr.flush()
2575 2576 fd = os.open(util.nulldev, os.O_RDWR)
2576 2577 if fd != 0: os.dup2(fd, 0)
2577 2578 if fd != 1: os.dup2(fd, 1)
2578 2579 if fd != 2: os.dup2(fd, 2)
2579 2580 if fd not in (0, 1, 2): os.close(fd)
2580 2581
2581 2582 httpd.serve_forever()
2582 2583
2583 2584 def status(ui, repo, *pats, **opts):
2584 2585 """show changed files in the working directory
2585 2586
2586 2587 Show changed files in the repository. If names are
2587 2588 given, only files that match are shown.
2588 2589
2589 2590 The codes used to show the status of files are:
2590 2591 M = modified
2591 2592 A = added
2592 2593 R = removed
2593 2594 ! = deleted, but still tracked
2594 2595 ? = not tracked
2595 2596 I = ignored (not shown by default)
2596 2597 """
2597 2598
2598 2599 show_ignored = opts['ignored'] and True or False
2599 2600 files, matchfn, anypats = matchpats(repo, pats, opts)
2600 2601 cwd = (pats and repo.getcwd()) or ''
2601 2602 modified, added, removed, deleted, unknown, ignored = [
2602 2603 [util.pathto(cwd, x) for x in n]
2603 2604 for n in repo.changes(files=files, match=matchfn,
2604 2605 show_ignored=show_ignored)]
2605 2606
2606 2607 changetypes = [('modified', 'M', modified),
2607 2608 ('added', 'A', added),
2608 2609 ('removed', 'R', removed),
2609 2610 ('deleted', '!', deleted),
2610 2611 ('unknown', '?', unknown),
2611 2612 ('ignored', 'I', ignored)]
2612 2613
2613 2614 end = opts['print0'] and '\0' or '\n'
2614 2615
2615 2616 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2616 2617 or changetypes):
2617 2618 if opts['no_status']:
2618 2619 format = "%%s%s" % end
2619 2620 else:
2620 2621 format = "%s %%s%s" % (char, end)
2621 2622
2622 2623 for f in changes:
2623 2624 ui.write(format % f)
2624 2625
2625 2626 def tag(ui, repo, name, rev_=None, **opts):
2626 2627 """add a tag for the current tip or a given revision
2627 2628
2628 2629 Name a particular revision using <name>.
2629 2630
2630 2631 Tags are used to name particular revisions of the repository and are
2631 2632 very useful to compare different revision, to go back to significant
2632 2633 earlier versions or to mark branch points as releases, etc.
2633 2634
2634 2635 If no revision is given, the tip is used.
2635 2636
2636 2637 To facilitate version control, distribution, and merging of tags,
2637 2638 they are stored as a file named ".hgtags" which is managed
2638 2639 similarly to other project files and can be hand-edited if
2639 2640 necessary. The file '.hg/localtags' is used for local tags (not
2640 2641 shared among repositories).
2641 2642 """
2642 2643 if name == "tip":
2643 2644 raise util.Abort(_("the name 'tip' is reserved"))
2644 2645 if rev_ is not None:
2645 2646 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2646 2647 "please use 'hg tag [-r REV] NAME' instead\n"))
2647 2648 if opts['rev']:
2648 2649 raise util.Abort(_("use only one form to specify the revision"))
2649 2650 if opts['rev']:
2650 2651 rev_ = opts['rev']
2651 2652 if rev_:
2652 2653 r = hex(repo.lookup(rev_))
2653 2654 else:
2654 2655 r = hex(repo.changelog.tip())
2655 2656
2656 2657 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2657 2658 opts['date'])
2658 2659
2659 2660 def tags(ui, repo):
2660 2661 """list repository tags
2661 2662
2662 2663 List the repository tags.
2663 2664
2664 2665 This lists both regular and local tags.
2665 2666 """
2666 2667
2667 2668 l = repo.tagslist()
2668 2669 l.reverse()
2669 2670 for t, n in l:
2670 2671 try:
2671 2672 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2672 2673 except KeyError:
2673 2674 r = " ?:?"
2674 2675 if ui.quiet:
2675 2676 ui.write("%s\n" % t)
2676 2677 else:
2677 2678 ui.write("%-30s %s\n" % (t, r))
2678 2679
2679 2680 def tip(ui, repo, **opts):
2680 2681 """show the tip revision
2681 2682
2682 2683 Show the tip revision.
2683 2684 """
2684 2685 n = repo.changelog.tip()
2685 2686 br = None
2686 2687 if opts['branches']:
2687 2688 br = repo.branchlookup([n])
2688 2689 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2689 2690 if opts['patch']:
2690 2691 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2691 2692
2692 2693 def unbundle(ui, repo, fname, **opts):
2693 2694 """apply a changegroup file
2694 2695
2695 2696 Apply a compressed changegroup file generated by the bundle
2696 2697 command.
2697 2698 """
2698 2699 f = urllib.urlopen(fname)
2699 2700
2700 2701 header = f.read(6)
2701 2702 if not header.startswith("HG"):
2702 2703 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2703 2704 elif not header.startswith("HG10"):
2704 2705 raise util.Abort(_("%s: unknown bundle version") % fname)
2705 2706 elif header == "HG10BZ":
2706 2707 def generator(f):
2707 2708 zd = bz2.BZ2Decompressor()
2708 2709 zd.decompress("BZ")
2709 2710 for chunk in f:
2710 2711 yield zd.decompress(chunk)
2711 2712 elif header == "HG10UN":
2712 2713 def generator(f):
2713 2714 for chunk in f:
2714 2715 yield chunk
2715 2716 else:
2716 2717 raise util.Abort(_("%s: unknown bundle compression type")
2717 2718 % fname)
2718 2719 gen = generator(util.filechunkiter(f, 4096))
2719 2720 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2720 2721 return postincoming(ui, repo, modheads, opts['update'])
2721 2722
2722 2723 def undo(ui, repo):
2723 2724 """undo the last commit or pull (DEPRECATED)
2724 2725
2725 2726 (DEPRECATED)
2726 2727 This command is now deprecated and will be removed in a future
2727 2728 release. Please use the rollback command instead. For usage
2728 2729 instructions, see the rollback command.
2729 2730 """
2730 2731 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2731 2732 repo.rollback()
2732 2733
2733 2734 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2734 2735 branch=None, **opts):
2735 2736 """update or merge working directory
2736 2737
2737 2738 Update the working directory to the specified revision.
2738 2739
2739 2740 If there are no outstanding changes in the working directory and
2740 2741 there is a linear relationship between the current version and the
2741 2742 requested version, the result is the requested version.
2742 2743
2743 2744 To merge the working directory with another revision, use the
2744 2745 merge command.
2745 2746
2746 2747 By default, update will refuse to run if doing so would require
2747 2748 merging or discarding local changes.
2748 2749 """
2749 2750 if merge:
2750 2751 ui.warn(_('(the -m/--merge option is deprecated; '
2751 2752 'use the merge command instead)\n'))
2752 2753 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2753 2754
2754 2755 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2755 2756 branch=None, **opts):
2756 2757 if branch:
2757 2758 br = repo.branchlookup(branch=branch)
2758 2759 found = []
2759 2760 for x in br:
2760 2761 if branch in br[x]:
2761 2762 found.append(x)
2762 2763 if len(found) > 1:
2763 2764 ui.warn(_("Found multiple heads for %s\n") % branch)
2764 2765 for x in found:
2765 2766 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2766 2767 return 1
2767 2768 if len(found) == 1:
2768 2769 node = found[0]
2769 2770 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2770 2771 else:
2771 2772 ui.warn(_("branch %s not found\n") % (branch))
2772 2773 return 1
2773 2774 else:
2774 2775 node = node and repo.lookup(node) or repo.changelog.tip()
2775 2776 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2776 2777
2777 2778 def verify(ui, repo):
2778 2779 """verify the integrity of the repository
2779 2780
2780 2781 Verify the integrity of the current repository.
2781 2782
2782 2783 This will perform an extensive check of the repository's
2783 2784 integrity, validating the hashes and checksums of each entry in
2784 2785 the changelog, manifest, and tracked files, as well as the
2785 2786 integrity of their crosslinks and indices.
2786 2787 """
2787 2788 return repo.verify()
2788 2789
2789 2790 # Command options and aliases are listed here, alphabetically
2790 2791
2791 2792 table = {
2792 2793 "^add":
2793 2794 (add,
2794 2795 [('I', 'include', [], _('include names matching the given patterns')),
2795 2796 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2796 2797 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2797 2798 _('hg add [OPTION]... [FILE]...')),
2798 2799 "debugaddremove|addremove":
2799 2800 (addremove,
2800 2801 [('I', 'include', [], _('include names matching the given patterns')),
2801 2802 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2802 2803 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2803 2804 _('hg addremove [OPTION]... [FILE]...')),
2804 2805 "^annotate":
2805 2806 (annotate,
2806 2807 [('r', 'rev', '', _('annotate the specified revision')),
2807 2808 ('a', 'text', None, _('treat all files as text')),
2808 2809 ('u', 'user', None, _('list the author')),
2809 2810 ('d', 'date', None, _('list the date')),
2810 2811 ('n', 'number', None, _('list the revision number (default)')),
2811 2812 ('c', 'changeset', None, _('list the changeset')),
2812 2813 ('I', 'include', [], _('include names matching the given patterns')),
2813 2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2814 2815 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2815 2816 "archive":
2816 2817 (archive,
2817 2818 [('', 'no-decode', None, _('do not pass files through decoders')),
2818 2819 ('p', 'prefix', '', _('directory prefix for files in archive')),
2819 2820 ('r', 'rev', '', _('revision to distribute')),
2820 2821 ('t', 'type', '', _('type of distribution to create')),
2821 2822 ('I', 'include', [], _('include names matching the given patterns')),
2822 2823 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2823 2824 _('hg archive [OPTION]... DEST')),
2824 2825 "backout":
2825 2826 (backout,
2826 2827 [('', 'merge', None,
2827 2828 _('merge with old dirstate parent after backout')),
2828 2829 ('m', 'message', '', _('use <text> as commit message')),
2829 2830 ('l', 'logfile', '', _('read commit message from <file>')),
2830 2831 ('d', 'date', '', _('record datecode as commit date')),
2831 2832 ('u', 'user', '', _('record user as committer')),
2832 2833 ('I', 'include', [], _('include names matching the given patterns')),
2833 2834 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2834 2835 _('hg backout [OPTION]... REV')),
2835 2836 "bundle":
2836 2837 (bundle,
2837 2838 [('f', 'force', None,
2838 2839 _('run even when remote repository is unrelated'))],
2839 2840 _('hg bundle FILE DEST')),
2840 2841 "cat":
2841 2842 (cat,
2842 2843 [('o', 'output', '', _('print output to file with formatted name')),
2843 2844 ('r', 'rev', '', _('print the given revision')),
2844 2845 ('I', 'include', [], _('include names matching the given patterns')),
2845 2846 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 2847 _('hg cat [OPTION]... FILE...')),
2847 2848 "^clone":
2848 2849 (clone,
2849 2850 [('U', 'noupdate', None, _('do not update the new working directory')),
2850 2851 ('r', 'rev', [],
2851 2852 _('a changeset you would like to have after cloning')),
2852 2853 ('', 'pull', None, _('use pull protocol to copy metadata')),
2853 2854 ('e', 'ssh', '', _('specify ssh command to use')),
2854 2855 ('', 'remotecmd', '',
2855 2856 _('specify hg command to run on the remote side'))],
2856 2857 _('hg clone [OPTION]... SOURCE [DEST]')),
2857 2858 "^commit|ci":
2858 2859 (commit,
2859 2860 [('A', 'addremove', None,
2860 2861 _('mark new/missing files as added/removed before committing')),
2861 2862 ('m', 'message', '', _('use <text> as commit message')),
2862 2863 ('l', 'logfile', '', _('read the commit message from <file>')),
2863 2864 ('d', 'date', '', _('record datecode as commit date')),
2864 2865 ('u', 'user', '', _('record user as commiter')),
2865 2866 ('I', 'include', [], _('include names matching the given patterns')),
2866 2867 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2867 2868 _('hg commit [OPTION]... [FILE]...')),
2868 2869 "copy|cp":
2869 2870 (copy,
2870 2871 [('A', 'after', None, _('record a copy that has already occurred')),
2871 2872 ('f', 'force', None,
2872 2873 _('forcibly copy over an existing managed file')),
2873 2874 ('I', 'include', [], _('include names matching the given patterns')),
2874 2875 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2875 2876 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2876 2877 _('hg copy [OPTION]... [SOURCE]... DEST')),
2877 2878 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2878 2879 "debugcomplete":
2879 2880 (debugcomplete,
2880 2881 [('o', 'options', None, _('show the command options'))],
2881 2882 _('debugcomplete [-o] CMD')),
2882 2883 "debugrebuildstate":
2883 2884 (debugrebuildstate,
2884 2885 [('r', 'rev', '', _('revision to rebuild to'))],
2885 2886 _('debugrebuildstate [-r REV] [REV]')),
2886 2887 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2887 2888 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2888 2889 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2889 2890 "debugstate": (debugstate, [], _('debugstate')),
2890 2891 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2891 2892 "debugindex": (debugindex, [], _('debugindex FILE')),
2892 2893 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2893 2894 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2894 2895 "debugwalk":
2895 2896 (debugwalk,
2896 2897 [('I', 'include', [], _('include names matching the given patterns')),
2897 2898 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2898 2899 _('debugwalk [OPTION]... [FILE]...')),
2899 2900 "^diff":
2900 2901 (diff,
2901 2902 [('r', 'rev', [], _('revision')),
2902 2903 ('a', 'text', None, _('treat all files as text')),
2903 2904 ('p', 'show-function', None,
2904 2905 _('show which function each change is in')),
2905 2906 ('w', 'ignore-all-space', None,
2906 2907 _('ignore white space when comparing lines')),
2907 2908 ('b', 'ignore-space-change', None,
2908 2909 _('ignore changes in the amount of white space')),
2909 2910 ('B', 'ignore-blank-lines', None,
2910 2911 _('ignore changes whose lines are all blank')),
2911 2912 ('I', 'include', [], _('include names matching the given patterns')),
2912 2913 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2913 2914 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2914 2915 "^export":
2915 2916 (export,
2916 2917 [('o', 'output', '', _('print output to file with formatted name')),
2917 2918 ('a', 'text', None, _('treat all files as text')),
2918 2919 ('', 'switch-parent', None, _('diff against the second parent'))],
2919 2920 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2920 2921 "debugforget|forget":
2921 2922 (forget,
2922 2923 [('I', 'include', [], _('include names matching the given patterns')),
2923 2924 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2924 2925 _('hg forget [OPTION]... FILE...')),
2925 2926 "grep":
2926 2927 (grep,
2927 2928 [('0', 'print0', None, _('end fields with NUL')),
2928 2929 ('', 'all', None, _('print all revisions that match')),
2929 2930 ('i', 'ignore-case', None, _('ignore case when matching')),
2930 2931 ('l', 'files-with-matches', None,
2931 2932 _('print only filenames and revs that match')),
2932 2933 ('n', 'line-number', None, _('print matching line numbers')),
2933 2934 ('r', 'rev', [], _('search in given revision range')),
2934 2935 ('u', 'user', None, _('print user who committed change')),
2935 2936 ('I', 'include', [], _('include names matching the given patterns')),
2936 2937 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2937 2938 _('hg grep [OPTION]... PATTERN [FILE]...')),
2938 2939 "heads":
2939 2940 (heads,
2940 2941 [('b', 'branches', None, _('show branches')),
2941 2942 ('', 'style', '', _('display using template map file')),
2942 2943 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2943 2944 ('', 'template', '', _('display with template'))],
2944 2945 _('hg heads [-b] [-r <rev>]')),
2945 2946 "help": (help_, [], _('hg help [COMMAND]')),
2946 2947 "identify|id": (identify, [], _('hg identify')),
2947 2948 "import|patch":
2948 2949 (import_,
2949 2950 [('p', 'strip', 1,
2950 2951 _('directory strip option for patch. This has the same\n'
2951 2952 'meaning as the corresponding patch option')),
2952 2953 ('m', 'message', '', _('use <text> as commit message')),
2953 2954 ('b', 'base', '', _('base path')),
2954 2955 ('f', 'force', None,
2955 2956 _('skip check for outstanding uncommitted changes'))],
2956 2957 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2957 2958 "incoming|in": (incoming,
2958 2959 [('M', 'no-merges', None, _('do not show merges')),
2959 2960 ('f', 'force', None,
2960 2961 _('run even when remote repository is unrelated')),
2961 2962 ('', 'style', '', _('display using template map file')),
2962 2963 ('n', 'newest-first', None, _('show newest record first')),
2963 2964 ('', 'bundle', '', _('file to store the bundles into')),
2964 2965 ('p', 'patch', None, _('show patch')),
2965 2966 ('r', 'rev', [], _('a specific revision you would like to pull')),
2966 2967 ('', 'template', '', _('display with template')),
2967 2968 ('e', 'ssh', '', _('specify ssh command to use')),
2968 2969 ('', 'remotecmd', '',
2969 2970 _('specify hg command to run on the remote side'))],
2970 2971 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2971 2972 ' [--bundle FILENAME] [SOURCE]')),
2972 2973 "^init":
2973 2974 (init,
2974 2975 [('e', 'ssh', '', _('specify ssh command to use')),
2975 2976 ('', 'remotecmd', '',
2976 2977 _('specify hg command to run on the remote side'))],
2977 2978 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2978 2979 "locate":
2979 2980 (locate,
2980 2981 [('r', 'rev', '', _('search the repository as it stood at rev')),
2981 2982 ('0', 'print0', None,
2982 2983 _('end filenames with NUL, for use with xargs')),
2983 2984 ('f', 'fullpath', None,
2984 2985 _('print complete paths from the filesystem root')),
2985 2986 ('I', 'include', [], _('include names matching the given patterns')),
2986 2987 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2987 2988 _('hg locate [OPTION]... [PATTERN]...')),
2988 2989 "^log|history":
2989 2990 (log,
2990 2991 [('b', 'branches', None, _('show branches')),
2991 2992 ('k', 'keyword', [], _('search for a keyword')),
2992 2993 ('l', 'limit', '', _('limit number of changes displayed')),
2993 2994 ('r', 'rev', [], _('show the specified revision or range')),
2994 2995 ('M', 'no-merges', None, _('do not show merges')),
2995 2996 ('', 'style', '', _('display using template map file')),
2996 2997 ('m', 'only-merges', None, _('show only merges')),
2997 2998 ('p', 'patch', None, _('show patch')),
2998 2999 ('', 'template', '', _('display with template')),
2999 3000 ('I', 'include', [], _('include names matching the given patterns')),
3000 3001 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3001 3002 _('hg log [OPTION]... [FILE]')),
3002 3003 "manifest": (manifest, [], _('hg manifest [REV]')),
3003 3004 "merge":
3004 3005 (merge,
3005 3006 [('b', 'branch', '', _('merge with head of a specific branch')),
3006 3007 ('f', 'force', None, _('force a merge with outstanding changes'))],
3007 3008 _('hg merge [-b TAG] [-f] [REV]')),
3008 3009 "outgoing|out": (outgoing,
3009 3010 [('M', 'no-merges', None, _('do not show merges')),
3010 3011 ('f', 'force', None,
3011 3012 _('run even when remote repository is unrelated')),
3012 3013 ('p', 'patch', None, _('show patch')),
3013 3014 ('', 'style', '', _('display using template map file')),
3014 3015 ('r', 'rev', [], _('a specific revision you would like to push')),
3015 3016 ('n', 'newest-first', None, _('show newest record first')),
3016 3017 ('', 'template', '', _('display with template')),
3017 3018 ('e', 'ssh', '', _('specify ssh command to use')),
3018 3019 ('', 'remotecmd', '',
3019 3020 _('specify hg command to run on the remote side'))],
3020 3021 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3021 3022 "^parents":
3022 3023 (parents,
3023 3024 [('b', 'branches', None, _('show branches')),
3024 3025 ('', 'style', '', _('display using template map file')),
3025 3026 ('', 'template', '', _('display with template'))],
3026 3027 _('hg parents [-b] [REV]')),
3027 3028 "paths": (paths, [], _('hg paths [NAME]')),
3028 3029 "^pull":
3029 3030 (pull,
3030 3031 [('u', 'update', None,
3031 3032 _('update the working directory to tip after pull')),
3032 3033 ('e', 'ssh', '', _('specify ssh command to use')),
3033 3034 ('f', 'force', None,
3034 3035 _('run even when remote repository is unrelated')),
3035 3036 ('r', 'rev', [], _('a specific revision you would like to pull')),
3036 3037 ('', 'remotecmd', '',
3037 3038 _('specify hg command to run on the remote side'))],
3038 3039 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3039 3040 "^push":
3040 3041 (push,
3041 3042 [('f', 'force', None, _('force push')),
3042 3043 ('e', 'ssh', '', _('specify ssh command to use')),
3043 3044 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 3045 ('', 'remotecmd', '',
3045 3046 _('specify hg command to run on the remote side'))],
3046 3047 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3047 3048 "debugrawcommit|rawcommit":
3048 3049 (rawcommit,
3049 3050 [('p', 'parent', [], _('parent')),
3050 3051 ('d', 'date', '', _('date code')),
3051 3052 ('u', 'user', '', _('user')),
3052 3053 ('F', 'files', '', _('file list')),
3053 3054 ('m', 'message', '', _('commit message')),
3054 3055 ('l', 'logfile', '', _('commit message file'))],
3055 3056 _('hg debugrawcommit [OPTION]... [FILE]...')),
3056 3057 "recover": (recover, [], _('hg recover')),
3057 3058 "^remove|rm":
3058 3059 (remove,
3059 3060 [('A', 'after', None, _('record remove that has already occurred')),
3060 3061 ('f', 'force', None, _('remove file even if modified')),
3061 3062 ('I', 'include', [], _('include names matching the given patterns')),
3062 3063 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 3064 _('hg remove [OPTION]... FILE...')),
3064 3065 "rename|mv":
3065 3066 (rename,
3066 3067 [('A', 'after', None, _('record a rename that has already occurred')),
3067 3068 ('f', 'force', None,
3068 3069 _('forcibly copy over an existing managed file')),
3069 3070 ('I', 'include', [], _('include names matching the given patterns')),
3070 3071 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3071 3072 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3072 3073 _('hg rename [OPTION]... SOURCE... DEST')),
3073 3074 "^revert":
3074 3075 (revert,
3075 3076 [('r', 'rev', '', _('revision to revert to')),
3076 3077 ('', 'no-backup', None, _('do not save backup copies of files')),
3077 3078 ('I', 'include', [], _('include names matching given patterns')),
3078 3079 ('X', 'exclude', [], _('exclude names matching given patterns')),
3079 3080 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3080 3081 _('hg revert [-r REV] [NAME]...')),
3081 3082 "rollback": (rollback, [], _('hg rollback')),
3082 3083 "root": (root, [], _('hg root')),
3083 3084 "^serve":
3084 3085 (serve,
3085 3086 [('A', 'accesslog', '', _('name of access log file to write to')),
3086 3087 ('d', 'daemon', None, _('run server in background')),
3087 3088 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3088 3089 ('E', 'errorlog', '', _('name of error log file to write to')),
3089 3090 ('p', 'port', 0, _('port to use (default: 8000)')),
3090 3091 ('a', 'address', '', _('address to use')),
3091 3092 ('n', 'name', '',
3092 3093 _('name to show in web pages (default: working dir)')),
3093 3094 ('', 'webdir-conf', '', _('name of the webdir config file'
3094 3095 ' (serve more than one repo)')),
3095 3096 ('', 'pid-file', '', _('name of file to write process ID to')),
3096 3097 ('', 'stdio', None, _('for remote clients')),
3097 3098 ('t', 'templates', '', _('web templates to use')),
3098 3099 ('', 'style', '', _('template style to use')),
3099 3100 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3100 3101 _('hg serve [OPTION]...')),
3101 3102 "^status|st":
3102 3103 (status,
3103 3104 [('m', 'modified', None, _('show only modified files')),
3104 3105 ('a', 'added', None, _('show only added files')),
3105 3106 ('r', 'removed', None, _('show only removed files')),
3106 3107 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3107 3108 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 3109 ('i', 'ignored', None, _('show ignored files')),
3109 3110 ('n', 'no-status', None, _('hide status prefix')),
3110 3111 ('0', 'print0', None,
3111 3112 _('end filenames with NUL, for use with xargs')),
3112 3113 ('I', 'include', [], _('include names matching the given patterns')),
3113 3114 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3114 3115 _('hg status [OPTION]... [FILE]...')),
3115 3116 "tag":
3116 3117 (tag,
3117 3118 [('l', 'local', None, _('make the tag local')),
3118 3119 ('m', 'message', '', _('message for tag commit log entry')),
3119 3120 ('d', 'date', '', _('record datecode as commit date')),
3120 3121 ('u', 'user', '', _('record user as commiter')),
3121 3122 ('r', 'rev', '', _('revision to tag'))],
3122 3123 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3123 3124 "tags": (tags, [], _('hg tags')),
3124 3125 "tip":
3125 3126 (tip,
3126 3127 [('b', 'branches', None, _('show branches')),
3127 3128 ('', 'style', '', _('display using template map file')),
3128 3129 ('p', 'patch', None, _('show patch')),
3129 3130 ('', 'template', '', _('display with template'))],
3130 3131 _('hg tip [-b] [-p]')),
3131 3132 "unbundle":
3132 3133 (unbundle,
3133 3134 [('u', 'update', None,
3134 3135 _('update the working directory to tip after unbundle'))],
3135 3136 _('hg unbundle [-u] FILE')),
3136 3137 "debugundo|undo": (undo, [], _('hg undo')),
3137 3138 "^update|up|checkout|co":
3138 3139 (update,
3139 3140 [('b', 'branch', '', _('checkout the head of a specific branch')),
3140 3141 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3141 3142 ('C', 'clean', None, _('overwrite locally modified files')),
3142 3143 ('f', 'force', None, _('force a merge with outstanding changes'))],
3143 3144 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3144 3145 "verify": (verify, [], _('hg verify')),
3145 3146 "version": (show_version, [], _('hg version')),
3146 3147 }
3147 3148
3148 3149 globalopts = [
3149 3150 ('R', 'repository', '',
3150 3151 _('repository root directory or symbolic path name')),
3151 3152 ('', 'cwd', '', _('change working directory')),
3152 3153 ('y', 'noninteractive', None,
3153 3154 _('do not prompt, assume \'yes\' for any required answers')),
3154 3155 ('q', 'quiet', None, _('suppress output')),
3155 3156 ('v', 'verbose', None, _('enable additional output')),
3156 3157 ('', 'config', [], _('set/override config option')),
3157 3158 ('', 'debug', None, _('enable debugging output')),
3158 3159 ('', 'debugger', None, _('start debugger')),
3159 3160 ('', 'lsprof', None, _('print improved command execution profile')),
3160 3161 ('', 'traceback', None, _('print traceback on exception')),
3161 3162 ('', 'time', None, _('time how long the command takes')),
3162 3163 ('', 'profile', None, _('print command execution profile')),
3163 3164 ('', 'version', None, _('output version information and exit')),
3164 3165 ('h', 'help', None, _('display help and exit')),
3165 3166 ]
3166 3167
3167 3168 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3168 3169 " debugindex debugindexdot")
3169 3170 optionalrepo = ("paths serve debugconfig")
3170 3171
3171 3172 def findpossible(cmd):
3172 3173 """
3173 3174 Return cmd -> (aliases, command table entry)
3174 3175 for each matching command.
3175 3176 Return debug commands (or their aliases) only if no normal command matches.
3176 3177 """
3177 3178 choice = {}
3178 3179 debugchoice = {}
3179 3180 for e in table.keys():
3180 3181 aliases = e.lstrip("^").split("|")
3181 3182 found = None
3182 3183 if cmd in aliases:
3183 3184 found = cmd
3184 3185 else:
3185 3186 for a in aliases:
3186 3187 if a.startswith(cmd):
3187 3188 found = a
3188 3189 break
3189 3190 if found is not None:
3190 3191 if aliases[0].startswith("debug"):
3191 3192 debugchoice[found] = (aliases, table[e])
3192 3193 else:
3193 3194 choice[found] = (aliases, table[e])
3194 3195
3195 3196 if not choice and debugchoice:
3196 3197 choice = debugchoice
3197 3198
3198 3199 return choice
3199 3200
3200 3201 def findcmd(cmd):
3201 3202 """Return (aliases, command table entry) for command string."""
3202 3203 choice = findpossible(cmd)
3203 3204
3204 3205 if choice.has_key(cmd):
3205 3206 return choice[cmd]
3206 3207
3207 3208 if len(choice) > 1:
3208 3209 clist = choice.keys()
3209 3210 clist.sort()
3210 3211 raise AmbiguousCommand(cmd, clist)
3211 3212
3212 3213 if choice:
3213 3214 return choice.values()[0]
3214 3215
3215 3216 raise UnknownCommand(cmd)
3216 3217
3217 3218 def catchterm(*args):
3218 3219 raise util.SignalInterrupt
3219 3220
3220 3221 def run():
3221 3222 sys.exit(dispatch(sys.argv[1:]))
3222 3223
3223 3224 class ParseError(Exception):
3224 3225 """Exception raised on errors in parsing the command line."""
3225 3226
3226 3227 def parse(ui, args):
3227 3228 options = {}
3228 3229 cmdoptions = {}
3229 3230
3230 3231 try:
3231 3232 args = fancyopts.fancyopts(args, globalopts, options)
3232 3233 except fancyopts.getopt.GetoptError, inst:
3233 3234 raise ParseError(None, inst)
3234 3235
3235 3236 if args:
3236 3237 cmd, args = args[0], args[1:]
3237 3238 aliases, i = findcmd(cmd)
3238 3239 cmd = aliases[0]
3239 3240 defaults = ui.config("defaults", cmd)
3240 3241 if defaults:
3241 3242 args = defaults.split() + args
3242 3243 c = list(i[1])
3243 3244 else:
3244 3245 cmd = None
3245 3246 c = []
3246 3247
3247 3248 # combine global options into local
3248 3249 for o in globalopts:
3249 3250 c.append((o[0], o[1], options[o[1]], o[3]))
3250 3251
3251 3252 try:
3252 3253 args = fancyopts.fancyopts(args, c, cmdoptions)
3253 3254 except fancyopts.getopt.GetoptError, inst:
3254 3255 raise ParseError(cmd, inst)
3255 3256
3256 3257 # separate global options back out
3257 3258 for o in globalopts:
3258 3259 n = o[1]
3259 3260 options[n] = cmdoptions[n]
3260 3261 del cmdoptions[n]
3261 3262
3262 3263 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3263 3264
3264 3265 external = {}
3265 3266
3266 3267 def findext(name):
3267 3268 '''return module with given extension name'''
3268 3269 try:
3269 3270 return sys.modules[external[name]]
3270 3271 except KeyError:
3271 3272 dotname = '.' + name
3272 3273 for k, v in external.iteritems():
3273 3274 if k.endswith('.' + name) or v == name:
3274 3275 return sys.modules[v]
3275 3276 raise KeyError(name)
3276 3277
3277 3278 def dispatch(args):
3278 3279 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3279 3280 num = getattr(signal, name, None)
3280 3281 if num: signal.signal(num, catchterm)
3281 3282
3282 3283 try:
3283 3284 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3284 3285 except util.Abort, inst:
3285 3286 sys.stderr.write(_("abort: %s\n") % inst)
3286 3287 return -1
3287 3288
3288 3289 for ext_name, load_from_name in u.extensions():
3289 3290 try:
3290 3291 if load_from_name:
3291 3292 # the module will be loaded in sys.modules
3292 3293 # choose an unique name so that it doesn't
3293 3294 # conflicts with other modules
3294 3295 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 3296 mod = imp.load_source(module_name, load_from_name)
3296 3297 else:
3297 3298 def importh(name):
3298 3299 mod = __import__(name)
3299 3300 components = name.split('.')
3300 3301 for comp in components[1:]:
3301 3302 mod = getattr(mod, comp)
3302 3303 return mod
3303 3304 try:
3304 3305 mod = importh("hgext.%s" % ext_name)
3305 3306 except ImportError:
3306 3307 mod = importh(ext_name)
3307 3308 external[ext_name] = mod.__name__
3308 3309 except (util.SignalInterrupt, KeyboardInterrupt):
3309 3310 raise
3310 3311 except Exception, inst:
3311 3312 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3312 3313 if u.print_exc():
3313 3314 return 1
3314 3315
3315 3316 for name in external.itervalues():
3316 3317 mod = sys.modules[name]
3317 3318 uisetup = getattr(mod, 'uisetup', None)
3318 3319 if uisetup:
3319 3320 uisetup(u)
3320 3321 cmdtable = getattr(mod, 'cmdtable', {})
3321 3322 for t in cmdtable:
3322 3323 if t in table:
3323 3324 u.warn(_("module %s overrides %s\n") % (name, t))
3324 3325 table.update(cmdtable)
3325 3326
3326 3327 try:
3327 3328 cmd, func, args, options, cmdoptions = parse(u, args)
3328 3329 if options["time"]:
3329 3330 def get_times():
3330 3331 t = os.times()
3331 3332 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3332 3333 t = (t[0], t[1], t[2], t[3], time.clock())
3333 3334 return t
3334 3335 s = get_times()
3335 3336 def print_time():
3336 3337 t = get_times()
3337 3338 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3338 3339 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3339 3340 atexit.register(print_time)
3340 3341
3341 3342 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3342 3343 not options["noninteractive"], options["traceback"],
3343 3344 options["config"])
3344 3345
3345 3346 # enter the debugger before command execution
3346 3347 if options['debugger']:
3347 3348 pdb.set_trace()
3348 3349
3349 3350 try:
3350 3351 if options['cwd']:
3351 3352 try:
3352 3353 os.chdir(options['cwd'])
3353 3354 except OSError, inst:
3354 3355 raise util.Abort('%s: %s' %
3355 3356 (options['cwd'], inst.strerror))
3356 3357
3357 3358 path = u.expandpath(options["repository"]) or ""
3358 3359 repo = path and hg.repository(u, path=path) or None
3359 3360
3360 3361 if options['help']:
3361 3362 return help_(u, cmd, options['version'])
3362 3363 elif options['version']:
3363 3364 return show_version(u)
3364 3365 elif not cmd:
3365 3366 return help_(u, 'shortlist')
3366 3367
3367 3368 if cmd not in norepo.split():
3368 3369 try:
3369 3370 if not repo:
3370 3371 repo = hg.repository(u, path=path)
3371 3372 u = repo.ui
3372 3373 for name in external.itervalues():
3373 3374 mod = sys.modules[name]
3374 3375 if hasattr(mod, 'reposetup'):
3375 3376 mod.reposetup(u, repo)
3376 3377 except hg.RepoError:
3377 3378 if cmd not in optionalrepo.split():
3378 3379 raise
3379 3380 d = lambda: func(u, repo, *args, **cmdoptions)
3380 3381 else:
3381 3382 d = lambda: func(u, *args, **cmdoptions)
3382 3383
3383 3384 try:
3384 3385 if options['profile']:
3385 3386 import hotshot, hotshot.stats
3386 3387 prof = hotshot.Profile("hg.prof")
3387 3388 try:
3388 3389 try:
3389 3390 return prof.runcall(d)
3390 3391 except:
3391 3392 try:
3392 3393 u.warn(_('exception raised - generating '
3393 3394 'profile anyway\n'))
3394 3395 except:
3395 3396 pass
3396 3397 raise
3397 3398 finally:
3398 3399 prof.close()
3399 3400 stats = hotshot.stats.load("hg.prof")
3400 3401 stats.strip_dirs()
3401 3402 stats.sort_stats('time', 'calls')
3402 3403 stats.print_stats(40)
3403 3404 elif options['lsprof']:
3404 3405 try:
3405 3406 from mercurial import lsprof
3406 3407 except ImportError:
3407 3408 raise util.Abort(_(
3408 3409 'lsprof not available - install from '
3409 3410 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3410 3411 p = lsprof.Profiler()
3411 3412 p.enable(subcalls=True)
3412 3413 try:
3413 3414 return d()
3414 3415 finally:
3415 3416 p.disable()
3416 3417 stats = lsprof.Stats(p.getstats())
3417 3418 stats.sort()
3418 3419 stats.pprint(top=10, file=sys.stderr, climit=5)
3419 3420 else:
3420 3421 return d()
3421 3422 finally:
3422 3423 u.flush()
3423 3424 except:
3424 3425 # enter the debugger when we hit an exception
3425 3426 if options['debugger']:
3426 3427 pdb.post_mortem(sys.exc_info()[2])
3427 3428 u.print_exc()
3428 3429 raise
3429 3430 except ParseError, inst:
3430 3431 if inst.args[0]:
3431 3432 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3432 3433 help_(u, inst.args[0])
3433 3434 else:
3434 3435 u.warn(_("hg: %s\n") % inst.args[1])
3435 3436 help_(u, 'shortlist')
3436 3437 except AmbiguousCommand, inst:
3437 3438 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3438 3439 (inst.args[0], " ".join(inst.args[1])))
3439 3440 except UnknownCommand, inst:
3440 3441 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3441 3442 help_(u, 'shortlist')
3442 3443 except hg.RepoError, inst:
3443 3444 u.warn(_("abort: %s!\n") % inst)
3444 3445 except lock.LockHeld, inst:
3445 3446 if inst.errno == errno.ETIMEDOUT:
3446 3447 reason = _('timed out waiting for lock held by %s') % inst.locker
3447 3448 else:
3448 3449 reason = _('lock held by %s') % inst.locker
3449 3450 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3450 3451 except lock.LockUnavailable, inst:
3451 3452 u.warn(_("abort: could not lock %s: %s\n") %
3452 3453 (inst.desc or inst.filename, inst.strerror))
3453 3454 except revlog.RevlogError, inst:
3454 3455 u.warn(_("abort: "), inst, "!\n")
3455 3456 except util.SignalInterrupt:
3456 3457 u.warn(_("killed!\n"))
3457 3458 except KeyboardInterrupt:
3458 3459 try:
3459 3460 u.warn(_("interrupted!\n"))
3460 3461 except IOError, inst:
3461 3462 if inst.errno == errno.EPIPE:
3462 3463 if u.debugflag:
3463 3464 u.warn(_("\nbroken pipe\n"))
3464 3465 else:
3465 3466 raise
3466 3467 except IOError, inst:
3467 3468 if hasattr(inst, "code"):
3468 3469 u.warn(_("abort: %s\n") % inst)
3469 3470 elif hasattr(inst, "reason"):
3470 3471 u.warn(_("abort: error: %s\n") % inst.reason[1])
3471 3472 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3472 3473 if u.debugflag:
3473 3474 u.warn(_("broken pipe\n"))
3474 3475 elif getattr(inst, "strerror", None):
3475 3476 if getattr(inst, "filename", None):
3476 3477 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3477 3478 else:
3478 3479 u.warn(_("abort: %s\n") % inst.strerror)
3479 3480 else:
3480 3481 raise
3481 3482 except OSError, inst:
3482 3483 if hasattr(inst, "filename"):
3483 3484 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3484 3485 else:
3485 3486 u.warn(_("abort: %s\n") % inst.strerror)
3486 3487 except util.Abort, inst:
3487 3488 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3488 3489 except TypeError, inst:
3489 3490 # was this an argument error?
3490 3491 tb = traceback.extract_tb(sys.exc_info()[2])
3491 3492 if len(tb) > 2: # no
3492 3493 raise
3493 3494 u.debug(inst, "\n")
3494 3495 u.warn(_("%s: invalid arguments\n") % cmd)
3495 3496 help_(u, cmd)
3496 3497 except SystemExit, inst:
3497 3498 # Commands shouldn't sys.exit directly, but give a return code.
3498 3499 # Just in case catch this and and pass exit code to caller.
3499 3500 return inst.code
3500 3501 except:
3501 3502 u.warn(_("** unknown exception encountered, details follow\n"))
3502 3503 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3503 3504 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3504 3505 % version.get_version())
3505 3506 raise
3506 3507
3507 3508 return -1
@@ -1,483 +1,483 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "struct os time bisect stat util re errno")
14 14
15 15 class dirstate(object):
16 16 format = ">cllll"
17 17
18 18 def __init__(self, opener, ui, root):
19 19 self.opener = opener
20 20 self.root = root
21 21 self.dirty = 0
22 22 self.ui = ui
23 23 self.map = None
24 24 self.pl = None
25 25 self.copies = {}
26 26 self.ignorefunc = None
27 27 self.blockignore = False
28 28
29 29 def wjoin(self, f):
30 30 return os.path.join(self.root, f)
31 31
32 32 def getcwd(self):
33 33 cwd = os.getcwd()
34 34 if cwd == self.root: return ''
35 35 return cwd[len(self.root) + 1:]
36 36
37 37 def hgignore(self):
38 38 '''return the contents of .hgignore files as a list of patterns.
39 39
40 40 the files parsed for patterns include:
41 41 .hgignore in the repository root
42 42 any additional files specified in the [ui] section of ~/.hgrc
43 43
44 44 trailing white space is dropped.
45 45 the escape character is backslash.
46 46 comments start with #.
47 47 empty lines are skipped.
48 48
49 49 lines can be of the following formats:
50 50
51 51 syntax: regexp # defaults following lines to non-rooted regexps
52 52 syntax: glob # defaults following lines to non-rooted globs
53 53 re:pattern # non-rooted regular expression
54 54 glob:pattern # non-rooted glob
55 55 pattern # pattern of the current default type'''
56 56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
57 57 def parselines(fp):
58 58 for line in fp:
59 59 escape = False
60 60 for i in xrange(len(line)):
61 61 if escape: escape = False
62 62 elif line[i] == '\\': escape = True
63 63 elif line[i] == '#': break
64 64 line = line[:i].rstrip()
65 65 if line: yield line
66 66 repoignore = self.wjoin('.hgignore')
67 67 files = [repoignore]
68 68 files.extend(self.ui.hgignorefiles())
69 69 pats = {}
70 70 for f in files:
71 71 try:
72 72 pats[f] = []
73 73 fp = open(f)
74 74 syntax = 'relre:'
75 75 for line in parselines(fp):
76 76 if line.startswith('syntax:'):
77 77 s = line[7:].strip()
78 78 try:
79 79 syntax = syntaxes[s]
80 80 except KeyError:
81 81 self.ui.warn(_("%s: ignoring invalid "
82 82 "syntax '%s'\n") % (f, s))
83 83 continue
84 84 pat = syntax + line
85 85 for s in syntaxes.values():
86 86 if line.startswith(s):
87 87 pat = line
88 88 break
89 89 pats[f].append(pat)
90 90 except IOError, inst:
91 91 if f != repoignore:
92 92 self.ui.warn(_("skipping unreadable ignore file"
93 93 " '%s': %s\n") % (f, inst.strerror))
94 94 return pats
95 95
96 96 def ignore(self, fn):
97 97 '''default match function used by dirstate and
98 98 localrepository. this honours the repository .hgignore file
99 99 and any other files specified in the [ui] section of .hgrc.'''
100 100 if self.blockignore:
101 101 return False
102 102 if not self.ignorefunc:
103 103 ignore = self.hgignore()
104 104 allpats = []
105 105 [allpats.extend(patlist) for patlist in ignore.values()]
106 106 if allpats:
107 107 try:
108 108 files, self.ignorefunc, anypats = (
109 109 util.matcher(self.root, inc=allpats, src='.hgignore'))
110 110 except util.Abort:
111 111 # Re-raise an exception where the src is the right file
112 112 for f, patlist in ignore.items():
113 113 files, self.ignorefunc, anypats = (
114 114 util.matcher(self.root, inc=patlist, src=f))
115 115 else:
116 116 self.ignorefunc = util.never
117 117 return self.ignorefunc(fn)
118 118
119 119 def __del__(self):
120 120 if self.dirty:
121 121 self.write()
122 122
123 123 def __getitem__(self, key):
124 124 try:
125 125 return self.map[key]
126 126 except TypeError:
127 127 self.lazyread()
128 128 return self[key]
129 129
130 130 def __contains__(self, key):
131 131 self.lazyread()
132 132 return key in self.map
133 133
134 134 def parents(self):
135 135 self.lazyread()
136 136 return self.pl
137 137
138 138 def markdirty(self):
139 139 if not self.dirty:
140 140 self.dirty = 1
141 141
142 142 def setparents(self, p1, p2=nullid):
143 143 self.lazyread()
144 144 self.markdirty()
145 145 self.pl = p1, p2
146 146
147 147 def state(self, key):
148 148 try:
149 149 return self[key][0]
150 150 except KeyError:
151 151 return "?"
152 152
153 153 def lazyread(self):
154 154 if self.map is None:
155 155 self.read()
156 156
157 157 def parse(self, st):
158 158 self.pl = [st[:20], st[20: 40]]
159 159
160 160 # deref fields so they will be local in loop
161 161 map = self.map
162 162 copies = self.copies
163 163 format = self.format
164 164 unpack = struct.unpack
165 165
166 166 pos = 40
167 167 e_size = struct.calcsize(format)
168 168
169 169 while pos < len(st):
170 170 newpos = pos + e_size
171 171 e = unpack(format, st[pos:newpos])
172 172 l = e[4]
173 173 pos = newpos
174 174 newpos = pos + l
175 175 f = st[pos:newpos]
176 176 if '\0' in f:
177 177 f, c = f.split('\0')
178 178 copies[f] = c
179 179 map[f] = e[:4]
180 180 pos = newpos
181 181
182 182 def read(self):
183 183 self.map = {}
184 184 self.pl = [nullid, nullid]
185 185 try:
186 186 st = self.opener("dirstate").read()
187 187 if st:
188 188 self.parse(st)
189 189 except IOError, err:
190 190 if err.errno != errno.ENOENT: raise
191 191
192 192 def copy(self, source, dest):
193 193 self.lazyread()
194 194 self.markdirty()
195 195 self.copies[dest] = source
196 196
197 197 def copied(self, file):
198 198 return self.copies.get(file, None)
199 199
200 200 def update(self, files, state, **kw):
201 201 ''' current states:
202 202 n normal
203 203 m needs merging
204 204 r marked for removal
205 205 a marked for addition'''
206 206
207 207 if not files: return
208 208 self.lazyread()
209 209 self.markdirty()
210 210 for f in files:
211 211 if state == "r":
212 212 self.map[f] = ('r', 0, 0, 0)
213 213 else:
214 214 s = os.lstat(self.wjoin(f))
215 215 st_size = kw.get('st_size', s.st_size)
216 216 st_mtime = kw.get('st_mtime', s.st_mtime)
217 217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
218 218 if self.copies.has_key(f):
219 219 del self.copies[f]
220 220
221 221 def forget(self, files):
222 222 if not files: return
223 223 self.lazyread()
224 224 self.markdirty()
225 225 for f in files:
226 226 try:
227 227 del self.map[f]
228 228 except KeyError:
229 229 self.ui.warn(_("not in dirstate: %s!\n") % f)
230 230 pass
231 231
232 232 def clear(self):
233 233 self.map = {}
234 234 self.copies = {}
235 235 self.markdirty()
236 236
237 237 def rebuild(self, parent, files):
238 238 self.clear()
239 239 umask = os.umask(0)
240 240 os.umask(umask)
241 for f, mode in files:
242 if mode:
241 for f in files:
242 if files.execf(f):
243 243 self.map[f] = ('n', ~umask, -1, 0)
244 244 else:
245 245 self.map[f] = ('n', ~umask & 0666, -1, 0)
246 246 self.pl = (parent, nullid)
247 247 self.markdirty()
248 248
249 249 def write(self):
250 250 if not self.dirty:
251 251 return
252 252 st = self.opener("dirstate", "w", atomic=True)
253 253 st.write("".join(self.pl))
254 254 for f, e in self.map.items():
255 255 c = self.copied(f)
256 256 if c:
257 257 f = f + "\0" + c
258 258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
259 259 st.write(e + f)
260 260 self.dirty = 0
261 261
262 262 def filterfiles(self, files):
263 263 ret = {}
264 264 unknown = []
265 265
266 266 for x in files:
267 267 if x == '.':
268 268 return self.map.copy()
269 269 if x not in self.map:
270 270 unknown.append(x)
271 271 else:
272 272 ret[x] = self.map[x]
273 273
274 274 if not unknown:
275 275 return ret
276 276
277 277 b = self.map.keys()
278 278 b.sort()
279 279 blen = len(b)
280 280
281 281 for x in unknown:
282 282 bs = bisect.bisect(b, "%s%s" % (x, '/'))
283 283 while bs < blen:
284 284 s = b[bs]
285 285 if len(s) > len(x) and s.startswith(x):
286 286 ret[s] = self.map[s]
287 287 else:
288 288 break
289 289 bs += 1
290 290 return ret
291 291
292 292 def supported_type(self, f, st, verbose=False):
293 293 if stat.S_ISREG(st.st_mode):
294 294 return True
295 295 if verbose:
296 296 kind = 'unknown'
297 297 if stat.S_ISCHR(st.st_mode): kind = _('character device')
298 298 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
299 299 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
300 300 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
301 301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
302 302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
303 303 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
304 304 util.pathto(self.getcwd(), f),
305 305 kind))
306 306 return False
307 307
308 308 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
309 309 badmatch=None):
310 310 self.lazyread()
311 311
312 312 # walk all files by default
313 313 if not files:
314 314 files = [self.root]
315 315 if not dc:
316 316 dc = self.map.copy()
317 317 elif not dc:
318 318 dc = self.filterfiles(files)
319 319
320 320 def statmatch(file_, stat):
321 321 file_ = util.pconvert(file_)
322 322 if not ignored and file_ not in dc and self.ignore(file_):
323 323 return False
324 324 return match(file_)
325 325
326 326 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
327 327 badmatch=badmatch)
328 328
329 329 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
330 330 # filter out the stat
331 331 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
332 332 yield src, f
333 333
334 334 # walk recursively through the directory tree, finding all files
335 335 # matched by the statmatch function
336 336 #
337 337 # results are yielded in a tuple (src, filename, st), where src
338 338 # is one of:
339 339 # 'f' the file was found in the directory tree
340 340 # 'm' the file was only in the dirstate and not in the tree
341 341 # and st is the stat result if the file was found in the directory.
342 342 #
343 343 # dc is an optional arg for the current dirstate. dc is not modified
344 344 # directly by this function, but might be modified by your statmatch call.
345 345 #
346 346 def walkhelper(self, files, statmatch, dc, badmatch=None):
347 347 # recursion free walker, faster than os.walk.
348 348 def findfiles(s):
349 349 work = [s]
350 350 while work:
351 351 top = work.pop()
352 352 names = os.listdir(top)
353 353 names.sort()
354 354 # nd is the top of the repository dir tree
355 355 nd = util.normpath(top[len(self.root) + 1:])
356 356 if nd == '.':
357 357 nd = ''
358 358 else:
359 359 # do not recurse into a repo contained in this
360 360 # one. use bisect to find .hg directory so speed
361 361 # is good on big directory.
362 362 hg = bisect.bisect_left(names, '.hg')
363 363 if hg < len(names) and names[hg] == '.hg':
364 364 if os.path.isdir(os.path.join(top, '.hg')):
365 365 continue
366 366 for f in names:
367 367 np = util.pconvert(os.path.join(nd, f))
368 368 if seen(np):
369 369 continue
370 370 p = os.path.join(top, f)
371 371 # don't trip over symlinks
372 372 st = os.lstat(p)
373 373 if stat.S_ISDIR(st.st_mode):
374 374 ds = os.path.join(nd, f +'/')
375 375 if statmatch(ds, st):
376 376 work.append(p)
377 377 if statmatch(np, st) and np in dc:
378 378 yield 'm', np, st
379 379 elif statmatch(np, st):
380 380 if self.supported_type(np, st):
381 381 yield 'f', np, st
382 382 elif np in dc:
383 383 yield 'm', np, st
384 384
385 385 known = {'.hg': 1}
386 386 def seen(fn):
387 387 if fn in known: return True
388 388 known[fn] = 1
389 389
390 390 # step one, find all files that match our criteria
391 391 files.sort()
392 392 for ff in util.unique(files):
393 393 f = self.wjoin(ff)
394 394 try:
395 395 st = os.lstat(f)
396 396 except OSError, inst:
397 397 nf = util.normpath(ff)
398 398 found = False
399 399 for fn in dc:
400 400 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
401 401 found = True
402 402 break
403 403 if not found:
404 404 if inst.errno != errno.ENOENT or not badmatch:
405 405 self.ui.warn('%s: %s\n' % (
406 406 util.pathto(self.getcwd(), ff),
407 407 inst.strerror))
408 408 elif badmatch and badmatch(ff) and statmatch(ff, None):
409 409 yield 'b', ff, None
410 410 continue
411 411 if stat.S_ISDIR(st.st_mode):
412 412 cmp1 = (lambda x, y: cmp(x[1], y[1]))
413 413 sorted_ = [ x for x in findfiles(f) ]
414 414 sorted_.sort(cmp1)
415 415 for e in sorted_:
416 416 yield e
417 417 else:
418 418 ff = util.normpath(ff)
419 419 if seen(ff):
420 420 continue
421 421 self.blockignore = True
422 422 if statmatch(ff, st):
423 423 if self.supported_type(ff, st, verbose=True):
424 424 yield 'f', ff, st
425 425 elif ff in dc:
426 426 yield 'm', ff, st
427 427 self.blockignore = False
428 428
429 429 # step two run through anything left in the dc hash and yield
430 430 # if we haven't already seen it
431 431 ks = dc.keys()
432 432 ks.sort()
433 433 for k in ks:
434 434 if not seen(k) and (statmatch(k, None)):
435 435 yield 'm', k, None
436 436
437 437 def changes(self, files=None, match=util.always, show_ignored=None):
438 438 lookup, modified, added, unknown, ignored = [], [], [], [], []
439 439 removed, deleted = [], []
440 440
441 441 for src, fn, st in self.statwalk(files, match, ignored=show_ignored):
442 442 try:
443 443 type_, mode, size, time = self[fn]
444 444 except KeyError:
445 445 if show_ignored and self.ignore(fn):
446 446 ignored.append(fn)
447 447 else:
448 448 unknown.append(fn)
449 449 continue
450 450 if src == 'm':
451 451 nonexistent = True
452 452 if not st:
453 453 try:
454 454 st = os.lstat(self.wjoin(fn))
455 455 except OSError, inst:
456 456 if inst.errno != errno.ENOENT:
457 457 raise
458 458 st = None
459 459 # We need to re-check that it is a valid file
460 460 if st and self.supported_type(fn, st):
461 461 nonexistent = False
462 462 # XXX: what to do with file no longer present in the fs
463 463 # who are not removed in the dirstate ?
464 464 if nonexistent and type_ in "nm":
465 465 deleted.append(fn)
466 466 continue
467 467 # check the common case first
468 468 if type_ == 'n':
469 469 if not st:
470 470 st = os.lstat(self.wjoin(fn))
471 471 if size >= 0 and (size != st.st_size
472 472 or (mode ^ st.st_mode) & 0100):
473 473 modified.append(fn)
474 474 elif time != st.st_mtime:
475 475 lookup.append(fn)
476 476 elif type_ == 'm':
477 477 modified.append(fn)
478 478 elif type_ == 'a':
479 479 added.append(fn)
480 480 elif type_ == 'r':
481 481 removed.append(fn)
482 482
483 483 return (lookup, modified, added, removed, deleted, unknown, ignored)
@@ -1,2212 +1,2211 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "appendfile changegroup")
12 12 demandload(globals(), "changelog dirstate filelog manifest repo context")
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 14 demandload(globals(), "os revlog util")
15 15
16 16 class localrepository(object):
17 17 capabilities = ()
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 if not path:
23 23 p = os.getcwd()
24 24 while not os.path.isdir(os.path.join(p, ".hg")):
25 25 oldp = p
26 26 p = os.path.dirname(p)
27 27 if p == oldp:
28 28 raise repo.RepoError(_("no repo found"))
29 29 path = p
30 30 self.path = os.path.join(path, ".hg")
31 31
32 32 if not create and not os.path.isdir(self.path):
33 33 raise repo.RepoError(_("repository %s not found") % path)
34 34
35 35 self.root = os.path.abspath(path)
36 36 self.origroot = path
37 37 self.ui = ui.ui(parentui=parentui)
38 38 self.opener = util.opener(self.path)
39 39 self.wopener = util.opener(self.root)
40 40
41 41 try:
42 42 self.ui.readconfig(self.join("hgrc"), self.root)
43 43 except IOError:
44 44 pass
45 45
46 46 v = self.ui.revlogopts
47 47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
48 48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
49 49 fl = v.get('flags', None)
50 50 flags = 0
51 51 if fl != None:
52 52 for x in fl.split():
53 53 flags |= revlog.flagstr(x)
54 54 elif self.revlogv1:
55 55 flags = revlog.REVLOG_DEFAULT_FLAGS
56 56
57 57 v = self.revlogversion | flags
58 58 self.manifest = manifest.manifest(self.opener, v)
59 59 self.changelog = changelog.changelog(self.opener, v)
60 60
61 61 # the changelog might not have the inline index flag
62 62 # on. If the format of the changelog is the same as found in
63 63 # .hgrc, apply any flags found in the .hgrc as well.
64 64 # Otherwise, just version from the changelog
65 65 v = self.changelog.version
66 66 if v == self.revlogversion:
67 67 v |= flags
68 68 self.revlogversion = v
69 69
70 70 self.tagscache = None
71 71 self.nodetagscache = None
72 72 self.encodepats = None
73 73 self.decodepats = None
74 74 self.transhandle = None
75 75
76 76 if create:
77 77 if not os.path.exists(path):
78 78 os.mkdir(path)
79 79 os.mkdir(self.path)
80 80 os.mkdir(self.join("data"))
81 81
82 82 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
83 83
84 84 def hook(self, name, throw=False, **args):
85 85 def callhook(hname, funcname):
86 86 '''call python hook. hook is callable object, looked up as
87 87 name in python module. if callable returns "true", hook
88 88 fails, else passes. if hook raises exception, treated as
89 89 hook failure. exception propagates if throw is "true".
90 90
91 91 reason for "true" meaning "hook failed" is so that
92 92 unmodified commands (e.g. mercurial.commands.update) can
93 93 be run as hooks without wrappers to convert return values.'''
94 94
95 95 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
96 96 d = funcname.rfind('.')
97 97 if d == -1:
98 98 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
99 99 % (hname, funcname))
100 100 modname = funcname[:d]
101 101 try:
102 102 obj = __import__(modname)
103 103 except ImportError:
104 104 try:
105 105 # extensions are loaded with hgext_ prefix
106 106 obj = __import__("hgext_%s" % modname)
107 107 except ImportError:
108 108 raise util.Abort(_('%s hook is invalid '
109 109 '(import of "%s" failed)') %
110 110 (hname, modname))
111 111 try:
112 112 for p in funcname.split('.')[1:]:
113 113 obj = getattr(obj, p)
114 114 except AttributeError, err:
115 115 raise util.Abort(_('%s hook is invalid '
116 116 '("%s" is not defined)') %
117 117 (hname, funcname))
118 118 if not callable(obj):
119 119 raise util.Abort(_('%s hook is invalid '
120 120 '("%s" is not callable)') %
121 121 (hname, funcname))
122 122 try:
123 123 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
124 124 except (KeyboardInterrupt, util.SignalInterrupt):
125 125 raise
126 126 except Exception, exc:
127 127 if isinstance(exc, util.Abort):
128 128 self.ui.warn(_('error: %s hook failed: %s\n') %
129 129 (hname, exc.args[0] % exc.args[1:]))
130 130 else:
131 131 self.ui.warn(_('error: %s hook raised an exception: '
132 132 '%s\n') % (hname, exc))
133 133 if throw:
134 134 raise
135 135 self.ui.print_exc()
136 136 return True
137 137 if r:
138 138 if throw:
139 139 raise util.Abort(_('%s hook failed') % hname)
140 140 self.ui.warn(_('warning: %s hook failed\n') % hname)
141 141 return r
142 142
143 143 def runhook(name, cmd):
144 144 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
145 145 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
146 146 r = util.system(cmd, environ=env, cwd=self.root)
147 147 if r:
148 148 desc, r = util.explain_exit(r)
149 149 if throw:
150 150 raise util.Abort(_('%s hook %s') % (name, desc))
151 151 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
152 152 return r
153 153
154 154 r = False
155 155 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
156 156 if hname.split(".", 1)[0] == name and cmd]
157 157 hooks.sort()
158 158 for hname, cmd in hooks:
159 159 if cmd.startswith('python:'):
160 160 r = callhook(hname, cmd[7:].strip()) or r
161 161 else:
162 162 r = runhook(hname, cmd) or r
163 163 return r
164 164
165 165 tag_disallowed = ':\r\n'
166 166
167 167 def tag(self, name, node, local=False, message=None, user=None, date=None):
168 168 '''tag a revision with a symbolic name.
169 169
170 170 if local is True, the tag is stored in a per-repository file.
171 171 otherwise, it is stored in the .hgtags file, and a new
172 172 changeset is committed with the change.
173 173
174 174 keyword arguments:
175 175
176 176 local: whether to store tag in non-version-controlled file
177 177 (default False)
178 178
179 179 message: commit message to use if committing
180 180
181 181 user: name of user to use if committing
182 182
183 183 date: date tuple to use if committing'''
184 184
185 185 for c in self.tag_disallowed:
186 186 if c in name:
187 187 raise util.Abort(_('%r cannot be used in a tag name') % c)
188 188
189 189 self.hook('pretag', throw=True, node=node, tag=name, local=local)
190 190
191 191 if local:
192 192 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
193 193 self.hook('tag', node=node, tag=name, local=local)
194 194 return
195 195
196 196 for x in self.changes():
197 197 if '.hgtags' in x:
198 198 raise util.Abort(_('working copy of .hgtags is changed '
199 199 '(please commit .hgtags manually)'))
200 200
201 201 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
202 202 if self.dirstate.state('.hgtags') == '?':
203 203 self.add(['.hgtags'])
204 204
205 205 if not message:
206 206 message = _('Added tag %s for changeset %s') % (name, node)
207 207
208 208 self.commit(['.hgtags'], message, user, date)
209 209 self.hook('tag', node=node, tag=name, local=local)
210 210
211 211 def tags(self):
212 212 '''return a mapping of tag to node'''
213 213 if not self.tagscache:
214 214 self.tagscache = {}
215 215
216 216 def parsetag(line, context):
217 217 if not line:
218 218 return
219 219 s = l.split(" ", 1)
220 220 if len(s) != 2:
221 221 self.ui.warn(_("%s: cannot parse entry\n") % context)
222 222 return
223 223 node, key = s
224 224 key = key.strip()
225 225 try:
226 226 bin_n = bin(node)
227 227 except TypeError:
228 228 self.ui.warn(_("%s: node '%s' is not well formed\n") %
229 229 (context, node))
230 230 return
231 231 if bin_n not in self.changelog.nodemap:
232 232 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
233 233 (context, key))
234 234 return
235 235 self.tagscache[key] = bin_n
236 236
237 237 # read the tags file from each head, ending with the tip,
238 238 # and add each tag found to the map, with "newer" ones
239 239 # taking precedence
240 240 heads = self.heads()
241 241 heads.reverse()
242 242 fl = self.file(".hgtags")
243 243 for node in heads:
244 244 change = self.changelog.read(node)
245 245 rev = self.changelog.rev(node)
246 246 fn, ff = self.manifest.find(change[0], '.hgtags')
247 247 if fn is None: continue
248 248 count = 0
249 249 for l in fl.read(fn).splitlines():
250 250 count += 1
251 251 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
252 252 (rev, short(node), count))
253 253 try:
254 254 f = self.opener("localtags")
255 255 count = 0
256 256 for l in f:
257 257 count += 1
258 258 parsetag(l, _("localtags, line %d") % count)
259 259 except IOError:
260 260 pass
261 261
262 262 self.tagscache['tip'] = self.changelog.tip()
263 263
264 264 return self.tagscache
265 265
266 266 def tagslist(self):
267 267 '''return a list of tags ordered by revision'''
268 268 l = []
269 269 for t, n in self.tags().items():
270 270 try:
271 271 r = self.changelog.rev(n)
272 272 except:
273 273 r = -2 # sort to the beginning of the list if unknown
274 274 l.append((r, t, n))
275 275 l.sort()
276 276 return [(t, n) for r, t, n in l]
277 277
278 278 def nodetags(self, node):
279 279 '''return the tags associated with a node'''
280 280 if not self.nodetagscache:
281 281 self.nodetagscache = {}
282 282 for t, n in self.tags().items():
283 283 self.nodetagscache.setdefault(n, []).append(t)
284 284 return self.nodetagscache.get(node, [])
285 285
286 286 def lookup(self, key):
287 287 try:
288 288 return self.tags()[key]
289 289 except KeyError:
290 290 try:
291 291 return self.changelog.lookup(key)
292 292 except:
293 293 raise repo.RepoError(_("unknown revision '%s'") % key)
294 294
295 295 def dev(self):
296 296 return os.lstat(self.path).st_dev
297 297
298 298 def local(self):
299 299 return True
300 300
301 301 def join(self, f):
302 302 return os.path.join(self.path, f)
303 303
304 304 def wjoin(self, f):
305 305 return os.path.join(self.root, f)
306 306
307 307 def file(self, f):
308 308 if f[0] == '/':
309 309 f = f[1:]
310 310 return filelog.filelog(self.opener, f, self.revlogversion)
311 311
312 312 def changectx(self, changeid):
313 313 return context.changectx(self, changeid)
314 314
315 315 def filectx(self, path, changeid=None, fileid=None):
316 316 """changeid can be a changeset revision, node, or tag.
317 317 fileid can be a file revision or node."""
318 318 return context.filectx(self, path, changeid, fileid)
319 319
320 320 def getcwd(self):
321 321 return self.dirstate.getcwd()
322 322
323 323 def wfile(self, f, mode='r'):
324 324 return self.wopener(f, mode)
325 325
326 326 def wread(self, filename):
327 327 if self.encodepats == None:
328 328 l = []
329 329 for pat, cmd in self.ui.configitems("encode"):
330 330 mf = util.matcher(self.root, "", [pat], [], [])[1]
331 331 l.append((mf, cmd))
332 332 self.encodepats = l
333 333
334 334 data = self.wopener(filename, 'r').read()
335 335
336 336 for mf, cmd in self.encodepats:
337 337 if mf(filename):
338 338 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
339 339 data = util.filter(data, cmd)
340 340 break
341 341
342 342 return data
343 343
344 344 def wwrite(self, filename, data, fd=None):
345 345 if self.decodepats == None:
346 346 l = []
347 347 for pat, cmd in self.ui.configitems("decode"):
348 348 mf = util.matcher(self.root, "", [pat], [], [])[1]
349 349 l.append((mf, cmd))
350 350 self.decodepats = l
351 351
352 352 for mf, cmd in self.decodepats:
353 353 if mf(filename):
354 354 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
355 355 data = util.filter(data, cmd)
356 356 break
357 357
358 358 if fd:
359 359 return fd.write(data)
360 360 return self.wopener(filename, 'w').write(data)
361 361
362 362 def transaction(self):
363 363 tr = self.transhandle
364 364 if tr != None and tr.running():
365 365 return tr.nest()
366 366
367 367 # save dirstate for rollback
368 368 try:
369 369 ds = self.opener("dirstate").read()
370 370 except IOError:
371 371 ds = ""
372 372 self.opener("journal.dirstate", "w").write(ds)
373 373
374 374 tr = transaction.transaction(self.ui.warn, self.opener,
375 375 self.join("journal"),
376 376 aftertrans(self.path))
377 377 self.transhandle = tr
378 378 return tr
379 379
380 380 def recover(self):
381 381 l = self.lock()
382 382 if os.path.exists(self.join("journal")):
383 383 self.ui.status(_("rolling back interrupted transaction\n"))
384 384 transaction.rollback(self.opener, self.join("journal"))
385 385 self.reload()
386 386 return True
387 387 else:
388 388 self.ui.warn(_("no interrupted transaction available\n"))
389 389 return False
390 390
391 391 def rollback(self, wlock=None):
392 392 if not wlock:
393 393 wlock = self.wlock()
394 394 l = self.lock()
395 395 if os.path.exists(self.join("undo")):
396 396 self.ui.status(_("rolling back last transaction\n"))
397 397 transaction.rollback(self.opener, self.join("undo"))
398 398 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
399 399 self.reload()
400 400 self.wreload()
401 401 else:
402 402 self.ui.warn(_("no rollback information available\n"))
403 403
404 404 def wreload(self):
405 405 self.dirstate.read()
406 406
407 407 def reload(self):
408 408 self.changelog.load()
409 409 self.manifest.load()
410 410 self.tagscache = None
411 411 self.nodetagscache = None
412 412
413 413 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
414 414 desc=None):
415 415 try:
416 416 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
417 417 except lock.LockHeld, inst:
418 418 if not wait:
419 419 raise
420 420 self.ui.warn(_("waiting for lock on %s held by %s\n") %
421 421 (desc, inst.args[0]))
422 422 # default to 600 seconds timeout
423 423 l = lock.lock(self.join(lockname),
424 424 int(self.ui.config("ui", "timeout") or 600),
425 425 releasefn, desc=desc)
426 426 if acquirefn:
427 427 acquirefn()
428 428 return l
429 429
430 430 def lock(self, wait=1):
431 431 return self.do_lock("lock", wait, acquirefn=self.reload,
432 432 desc=_('repository %s') % self.origroot)
433 433
434 434 def wlock(self, wait=1):
435 435 return self.do_lock("wlock", wait, self.dirstate.write,
436 436 self.wreload,
437 437 desc=_('working directory of %s') % self.origroot)
438 438
439 439 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
440 440 "determine whether a new filenode is needed"
441 441 fp1 = manifest1.get(filename, nullid)
442 442 fp2 = manifest2.get(filename, nullid)
443 443
444 444 if fp2 != nullid:
445 445 # is one parent an ancestor of the other?
446 446 fpa = filelog.ancestor(fp1, fp2)
447 447 if fpa == fp1:
448 448 fp1, fp2 = fp2, nullid
449 449 elif fpa == fp2:
450 450 fp2 = nullid
451 451
452 452 # is the file unmodified from the parent? report existing entry
453 453 if fp2 == nullid and text == filelog.read(fp1):
454 454 return (fp1, None, None)
455 455
456 456 return (None, fp1, fp2)
457 457
458 458 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
459 459 orig_parent = self.dirstate.parents()[0] or nullid
460 460 p1 = p1 or self.dirstate.parents()[0] or nullid
461 461 p2 = p2 or self.dirstate.parents()[1] or nullid
462 462 c1 = self.changelog.read(p1)
463 463 c2 = self.changelog.read(p2)
464 464 m1 = self.manifest.read(c1[0])
465 465 mf1 = self.manifest.readflags(c1[0])
466 466 m2 = self.manifest.read(c2[0])
467 467 changed = []
468 468
469 469 if orig_parent == p1:
470 470 update_dirstate = 1
471 471 else:
472 472 update_dirstate = 0
473 473
474 474 if not wlock:
475 475 wlock = self.wlock()
476 476 l = self.lock()
477 477 tr = self.transaction()
478 478 mm = m1.copy()
479 479 mfm = mf1.copy()
480 480 linkrev = self.changelog.count()
481 481 for f in files:
482 482 try:
483 483 t = self.wread(f)
484 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
484 mfm.set(f, util.is_exec(self.wjoin(f), mfm.execf(f)))
485 485 r = self.file(f)
486 mfm[f] = tm
487 486
488 487 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
489 488 if entry:
490 489 mm[f] = entry
491 490 continue
492 491
493 492 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
494 493 changed.append(f)
495 494 if update_dirstate:
496 495 self.dirstate.update([f], "n")
497 496 except IOError:
498 497 try:
499 498 del mm[f]
500 499 del mfm[f]
501 500 if update_dirstate:
502 501 self.dirstate.forget([f])
503 502 except:
504 503 # deleted from p2?
505 504 pass
506 505
507 506 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
508 507 user = user or self.ui.username()
509 508 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
510 509 tr.close()
511 510 if update_dirstate:
512 511 self.dirstate.setparents(n, nullid)
513 512
514 513 def commit(self, files=None, text="", user=None, date=None,
515 514 match=util.always, force=False, lock=None, wlock=None,
516 515 force_editor=False):
517 516 commit = []
518 517 remove = []
519 518 changed = []
520 519
521 520 if files:
522 521 for f in files:
523 522 s = self.dirstate.state(f)
524 523 if s in 'nmai':
525 524 commit.append(f)
526 525 elif s == 'r':
527 526 remove.append(f)
528 527 else:
529 528 self.ui.warn(_("%s not tracked!\n") % f)
530 529 else:
531 530 modified, added, removed, deleted, unknown = self.changes(match=match)
532 531 commit = modified + added
533 532 remove = removed
534 533
535 534 p1, p2 = self.dirstate.parents()
536 535 c1 = self.changelog.read(p1)
537 536 c2 = self.changelog.read(p2)
538 537 m1 = self.manifest.read(c1[0])
539 538 mf1 = self.manifest.readflags(c1[0])
540 539 m2 = self.manifest.read(c2[0])
541 540
542 541 if not commit and not remove and not force and p2 == nullid:
543 542 self.ui.status(_("nothing changed\n"))
544 543 return None
545 544
546 545 xp1 = hex(p1)
547 546 if p2 == nullid: xp2 = ''
548 547 else: xp2 = hex(p2)
549 548
550 549 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
551 550
552 551 if not wlock:
553 552 wlock = self.wlock()
554 553 if not lock:
555 554 lock = self.lock()
556 555 tr = self.transaction()
557 556
558 557 # check in files
559 558 new = {}
560 559 linkrev = self.changelog.count()
561 560 commit.sort()
562 561 for f in commit:
563 562 self.ui.note(f + "\n")
564 563 try:
565 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
564 mf1.set(f, util.is_exec(self.wjoin(f), mf1.execf(f)))
566 565 t = self.wread(f)
567 566 except IOError:
568 567 self.ui.warn(_("trouble committing %s!\n") % f)
569 568 raise
570 569
571 570 r = self.file(f)
572 571
573 572 meta = {}
574 573 cp = self.dirstate.copied(f)
575 574 if cp:
576 575 meta["copy"] = cp
577 576 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
578 577 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
579 578 fp1, fp2 = nullid, nullid
580 579 else:
581 580 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
582 581 if entry:
583 582 new[f] = entry
584 583 continue
585 584
586 585 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
587 586 # remember what we've added so that we can later calculate
588 587 # the files to pull from a set of changesets
589 588 changed.append(f)
590 589
591 590 # update manifest
592 591 m1 = m1.copy()
593 592 m1.update(new)
594 593 for f in remove:
595 594 if f in m1:
596 595 del m1[f]
597 596 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
598 597 (new, remove))
599 598
600 599 # add changeset
601 600 new = new.keys()
602 601 new.sort()
603 602
604 603 user = user or self.ui.username()
605 604 if not text or force_editor:
606 605 edittext = []
607 606 if text:
608 607 edittext.append(text)
609 608 edittext.append("")
610 609 if p2 != nullid:
611 610 edittext.append("HG: branch merge")
612 611 edittext.extend(["HG: changed %s" % f for f in changed])
613 612 edittext.extend(["HG: removed %s" % f for f in remove])
614 613 if not changed and not remove:
615 614 edittext.append("HG: no files changed")
616 615 edittext.append("")
617 616 # run editor in the repository root
618 617 olddir = os.getcwd()
619 618 os.chdir(self.root)
620 619 text = self.ui.edit("\n".join(edittext), user)
621 620 os.chdir(olddir)
622 621
623 622 lines = [line.rstrip() for line in text.rstrip().splitlines()]
624 623 while lines and not lines[0]:
625 624 del lines[0]
626 625 if not lines:
627 626 return None
628 627 text = '\n'.join(lines)
629 628 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
630 629 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
631 630 parent2=xp2)
632 631 tr.close()
633 632
634 633 self.dirstate.setparents(n)
635 634 self.dirstate.update(new, "n")
636 635 self.dirstate.forget(remove)
637 636
638 637 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
639 638 return n
640 639
641 640 def walk(self, node=None, files=[], match=util.always, badmatch=None):
642 641 if node:
643 642 fdict = dict.fromkeys(files)
644 643 for fn in self.manifest.read(self.changelog.read(node)[0]):
645 644 fdict.pop(fn, None)
646 645 if match(fn):
647 646 yield 'm', fn
648 647 for fn in fdict:
649 648 if badmatch and badmatch(fn):
650 649 if match(fn):
651 650 yield 'b', fn
652 651 else:
653 652 self.ui.warn(_('%s: No such file in rev %s\n') % (
654 653 util.pathto(self.getcwd(), fn), short(node)))
655 654 else:
656 655 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
657 656 yield src, fn
658 657
659 658 def changes(self, node1=None, node2=None, files=[], match=util.always,
660 659 wlock=None, show_ignored=None):
661 660 """return changes between two nodes or node and working directory
662 661
663 662 If node1 is None, use the first dirstate parent instead.
664 663 If node2 is None, compare node1 with working directory.
665 664 """
666 665
667 666 def fcmp(fn, mf):
668 667 t1 = self.wread(fn)
669 668 t2 = self.file(fn).read(mf.get(fn, nullid))
670 669 return cmp(t1, t2)
671 670
672 671 def mfmatches(node):
673 672 change = self.changelog.read(node)
674 673 mf = dict(self.manifest.read(change[0]))
675 674 for fn in mf.keys():
676 675 if not match(fn):
677 676 del mf[fn]
678 677 return mf
679 678
680 679 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
681 680 compareworking = False
682 681 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
683 682 compareworking = True
684 683
685 684 if not compareworking:
686 685 # read the manifest from node1 before the manifest from node2,
687 686 # so that we'll hit the manifest cache if we're going through
688 687 # all the revisions in parent->child order.
689 688 mf1 = mfmatches(node1)
690 689
691 690 # are we comparing the working directory?
692 691 if not node2:
693 692 if not wlock:
694 693 try:
695 694 wlock = self.wlock(wait=0)
696 695 except lock.LockException:
697 696 wlock = None
698 697 lookup, modified, added, removed, deleted, unknown, ignored = (
699 698 self.dirstate.changes(files, match, show_ignored))
700 699
701 700 # are we comparing working dir against its parent?
702 701 if compareworking:
703 702 if lookup:
704 703 # do a full compare of any files that might have changed
705 704 mf2 = mfmatches(self.dirstate.parents()[0])
706 705 for f in lookup:
707 706 if fcmp(f, mf2):
708 707 modified.append(f)
709 708 elif wlock is not None:
710 709 self.dirstate.update([f], "n")
711 710 else:
712 711 # we are comparing working dir against non-parent
713 712 # generate a pseudo-manifest for the working dir
714 713 mf2 = mfmatches(self.dirstate.parents()[0])
715 714 for f in lookup + modified + added:
716 715 mf2[f] = ""
717 716 for f in removed:
718 717 if f in mf2:
719 718 del mf2[f]
720 719 else:
721 720 # we are comparing two revisions
722 721 deleted, unknown, ignored = [], [], []
723 722 mf2 = mfmatches(node2)
724 723
725 724 if not compareworking:
726 725 # flush lists from dirstate before comparing manifests
727 726 modified, added = [], []
728 727
729 728 # make sure to sort the files so we talk to the disk in a
730 729 # reasonable order
731 730 mf2keys = mf2.keys()
732 731 mf2keys.sort()
733 732 for fn in mf2keys:
734 733 if mf1.has_key(fn):
735 734 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
736 735 modified.append(fn)
737 736 del mf1[fn]
738 737 else:
739 738 added.append(fn)
740 739
741 740 removed = mf1.keys()
742 741
743 742 # sort and return results:
744 743 for l in modified, added, removed, deleted, unknown, ignored:
745 744 l.sort()
746 745 if show_ignored is None:
747 746 return (modified, added, removed, deleted, unknown)
748 747 else:
749 748 return (modified, added, removed, deleted, unknown, ignored)
750 749
751 750 def add(self, list, wlock=None):
752 751 if not wlock:
753 752 wlock = self.wlock()
754 753 for f in list:
755 754 p = self.wjoin(f)
756 755 if not os.path.exists(p):
757 756 self.ui.warn(_("%s does not exist!\n") % f)
758 757 elif not os.path.isfile(p):
759 758 self.ui.warn(_("%s not added: only files supported currently\n")
760 759 % f)
761 760 elif self.dirstate.state(f) in 'an':
762 761 self.ui.warn(_("%s already tracked!\n") % f)
763 762 else:
764 763 self.dirstate.update([f], "a")
765 764
766 765 def forget(self, list, wlock=None):
767 766 if not wlock:
768 767 wlock = self.wlock()
769 768 for f in list:
770 769 if self.dirstate.state(f) not in 'ai':
771 770 self.ui.warn(_("%s not added!\n") % f)
772 771 else:
773 772 self.dirstate.forget([f])
774 773
775 774 def remove(self, list, unlink=False, wlock=None):
776 775 if unlink:
777 776 for f in list:
778 777 try:
779 778 util.unlink(self.wjoin(f))
780 779 except OSError, inst:
781 780 if inst.errno != errno.ENOENT:
782 781 raise
783 782 if not wlock:
784 783 wlock = self.wlock()
785 784 for f in list:
786 785 p = self.wjoin(f)
787 786 if os.path.exists(p):
788 787 self.ui.warn(_("%s still exists!\n") % f)
789 788 elif self.dirstate.state(f) == 'a':
790 789 self.dirstate.forget([f])
791 790 elif f not in self.dirstate:
792 791 self.ui.warn(_("%s not tracked!\n") % f)
793 792 else:
794 793 self.dirstate.update([f], "r")
795 794
796 795 def undelete(self, list, wlock=None):
797 796 p = self.dirstate.parents()[0]
798 797 mn = self.changelog.read(p)[0]
799 798 mf = self.manifest.readflags(mn)
800 799 m = self.manifest.read(mn)
801 800 if not wlock:
802 801 wlock = self.wlock()
803 802 for f in list:
804 803 if self.dirstate.state(f) not in "r":
805 804 self.ui.warn("%s not removed!\n" % f)
806 805 else:
807 806 t = self.file(f).read(m[f])
808 807 self.wwrite(f, t)
809 util.set_exec(self.wjoin(f), mf[f])
808 util.set_exec(self.wjoin(f), mf.execf(f))
810 809 self.dirstate.update([f], "n")
811 810
812 811 def copy(self, source, dest, wlock=None):
813 812 p = self.wjoin(dest)
814 813 if not os.path.exists(p):
815 814 self.ui.warn(_("%s does not exist!\n") % dest)
816 815 elif not os.path.isfile(p):
817 816 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
818 817 else:
819 818 if not wlock:
820 819 wlock = self.wlock()
821 820 if self.dirstate.state(dest) == '?':
822 821 self.dirstate.update([dest], "a")
823 822 self.dirstate.copy(source, dest)
824 823
825 824 def heads(self, start=None):
826 825 heads = self.changelog.heads(start)
827 826 # sort the output in rev descending order
828 827 heads = [(-self.changelog.rev(h), h) for h in heads]
829 828 heads.sort()
830 829 return [n for (r, n) in heads]
831 830
832 831 # branchlookup returns a dict giving a list of branches for
833 832 # each head. A branch is defined as the tag of a node or
834 833 # the branch of the node's parents. If a node has multiple
835 834 # branch tags, tags are eliminated if they are visible from other
836 835 # branch tags.
837 836 #
838 837 # So, for this graph: a->b->c->d->e
839 838 # \ /
840 839 # aa -----/
841 840 # a has tag 2.6.12
842 841 # d has tag 2.6.13
843 842 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
844 843 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
845 844 # from the list.
846 845 #
847 846 # It is possible that more than one head will have the same branch tag.
848 847 # callers need to check the result for multiple heads under the same
849 848 # branch tag if that is a problem for them (ie checkout of a specific
850 849 # branch).
851 850 #
852 851 # passing in a specific branch will limit the depth of the search
853 852 # through the parents. It won't limit the branches returned in the
854 853 # result though.
855 854 def branchlookup(self, heads=None, branch=None):
856 855 if not heads:
857 856 heads = self.heads()
858 857 headt = [ h for h in heads ]
859 858 chlog = self.changelog
860 859 branches = {}
861 860 merges = []
862 861 seenmerge = {}
863 862
864 863 # traverse the tree once for each head, recording in the branches
865 864 # dict which tags are visible from this head. The branches
866 865 # dict also records which tags are visible from each tag
867 866 # while we traverse.
868 867 while headt or merges:
869 868 if merges:
870 869 n, found = merges.pop()
871 870 visit = [n]
872 871 else:
873 872 h = headt.pop()
874 873 visit = [h]
875 874 found = [h]
876 875 seen = {}
877 876 while visit:
878 877 n = visit.pop()
879 878 if n in seen:
880 879 continue
881 880 pp = chlog.parents(n)
882 881 tags = self.nodetags(n)
883 882 if tags:
884 883 for x in tags:
885 884 if x == 'tip':
886 885 continue
887 886 for f in found:
888 887 branches.setdefault(f, {})[n] = 1
889 888 branches.setdefault(n, {})[n] = 1
890 889 break
891 890 if n not in found:
892 891 found.append(n)
893 892 if branch in tags:
894 893 continue
895 894 seen[n] = 1
896 895 if pp[1] != nullid and n not in seenmerge:
897 896 merges.append((pp[1], [x for x in found]))
898 897 seenmerge[n] = 1
899 898 if pp[0] != nullid:
900 899 visit.append(pp[0])
901 900 # traverse the branches dict, eliminating branch tags from each
902 901 # head that are visible from another branch tag for that head.
903 902 out = {}
904 903 viscache = {}
905 904 for h in heads:
906 905 def visible(node):
907 906 if node in viscache:
908 907 return viscache[node]
909 908 ret = {}
910 909 visit = [node]
911 910 while visit:
912 911 x = visit.pop()
913 912 if x in viscache:
914 913 ret.update(viscache[x])
915 914 elif x not in ret:
916 915 ret[x] = 1
917 916 if x in branches:
918 917 visit[len(visit):] = branches[x].keys()
919 918 viscache[node] = ret
920 919 return ret
921 920 if h not in branches:
922 921 continue
923 922 # O(n^2), but somewhat limited. This only searches the
924 923 # tags visible from a specific head, not all the tags in the
925 924 # whole repo.
926 925 for b in branches[h]:
927 926 vis = False
928 927 for bb in branches[h].keys():
929 928 if b != bb:
930 929 if b in visible(bb):
931 930 vis = True
932 931 break
933 932 if not vis:
934 933 l = out.setdefault(h, [])
935 934 l[len(l):] = self.nodetags(b)
936 935 return out
937 936
938 937 def branches(self, nodes):
939 938 if not nodes:
940 939 nodes = [self.changelog.tip()]
941 940 b = []
942 941 for n in nodes:
943 942 t = n
944 943 while 1:
945 944 p = self.changelog.parents(n)
946 945 if p[1] != nullid or p[0] == nullid:
947 946 b.append((t, n, p[0], p[1]))
948 947 break
949 948 n = p[0]
950 949 return b
951 950
952 951 def between(self, pairs):
953 952 r = []
954 953
955 954 for top, bottom in pairs:
956 955 n, l, i = top, [], 0
957 956 f = 1
958 957
959 958 while n != bottom:
960 959 p = self.changelog.parents(n)[0]
961 960 if i == f:
962 961 l.append(n)
963 962 f = f * 2
964 963 n = p
965 964 i += 1
966 965
967 966 r.append(l)
968 967
969 968 return r
970 969
971 970 def findincoming(self, remote, base=None, heads=None, force=False):
972 971 """Return list of roots of the subsets of missing nodes from remote
973 972
974 973 If base dict is specified, assume that these nodes and their parents
975 974 exist on the remote side and that no child of a node of base exists
976 975 in both remote and self.
977 976 Furthermore base will be updated to include the nodes that exists
978 977 in self and remote but no children exists in self and remote.
979 978 If a list of heads is specified, return only nodes which are heads
980 979 or ancestors of these heads.
981 980
982 981 All the ancestors of base are in self and in remote.
983 982 All the descendants of the list returned are missing in self.
984 983 (and so we know that the rest of the nodes are missing in remote, see
985 984 outgoing)
986 985 """
987 986 m = self.changelog.nodemap
988 987 search = []
989 988 fetch = {}
990 989 seen = {}
991 990 seenbranch = {}
992 991 if base == None:
993 992 base = {}
994 993
995 994 if not heads:
996 995 heads = remote.heads()
997 996
998 997 if self.changelog.tip() == nullid:
999 998 base[nullid] = 1
1000 999 if heads != [nullid]:
1001 1000 return [nullid]
1002 1001 return []
1003 1002
1004 1003 # assume we're closer to the tip than the root
1005 1004 # and start by examining the heads
1006 1005 self.ui.status(_("searching for changes\n"))
1007 1006
1008 1007 unknown = []
1009 1008 for h in heads:
1010 1009 if h not in m:
1011 1010 unknown.append(h)
1012 1011 else:
1013 1012 base[h] = 1
1014 1013
1015 1014 if not unknown:
1016 1015 return []
1017 1016
1018 1017 req = dict.fromkeys(unknown)
1019 1018 reqcnt = 0
1020 1019
1021 1020 # search through remote branches
1022 1021 # a 'branch' here is a linear segment of history, with four parts:
1023 1022 # head, root, first parent, second parent
1024 1023 # (a branch always has two parents (or none) by definition)
1025 1024 unknown = remote.branches(unknown)
1026 1025 while unknown:
1027 1026 r = []
1028 1027 while unknown:
1029 1028 n = unknown.pop(0)
1030 1029 if n[0] in seen:
1031 1030 continue
1032 1031
1033 1032 self.ui.debug(_("examining %s:%s\n")
1034 1033 % (short(n[0]), short(n[1])))
1035 1034 if n[0] == nullid: # found the end of the branch
1036 1035 pass
1037 1036 elif n in seenbranch:
1038 1037 self.ui.debug(_("branch already found\n"))
1039 1038 continue
1040 1039 elif n[1] and n[1] in m: # do we know the base?
1041 1040 self.ui.debug(_("found incomplete branch %s:%s\n")
1042 1041 % (short(n[0]), short(n[1])))
1043 1042 search.append(n) # schedule branch range for scanning
1044 1043 seenbranch[n] = 1
1045 1044 else:
1046 1045 if n[1] not in seen and n[1] not in fetch:
1047 1046 if n[2] in m and n[3] in m:
1048 1047 self.ui.debug(_("found new changeset %s\n") %
1049 1048 short(n[1]))
1050 1049 fetch[n[1]] = 1 # earliest unknown
1051 1050 for p in n[2:4]:
1052 1051 if p in m:
1053 1052 base[p] = 1 # latest known
1054 1053
1055 1054 for p in n[2:4]:
1056 1055 if p not in req and p not in m:
1057 1056 r.append(p)
1058 1057 req[p] = 1
1059 1058 seen[n[0]] = 1
1060 1059
1061 1060 if r:
1062 1061 reqcnt += 1
1063 1062 self.ui.debug(_("request %d: %s\n") %
1064 1063 (reqcnt, " ".join(map(short, r))))
1065 1064 for p in range(0, len(r), 10):
1066 1065 for b in remote.branches(r[p:p+10]):
1067 1066 self.ui.debug(_("received %s:%s\n") %
1068 1067 (short(b[0]), short(b[1])))
1069 1068 unknown.append(b)
1070 1069
1071 1070 # do binary search on the branches we found
1072 1071 while search:
1073 1072 n = search.pop(0)
1074 1073 reqcnt += 1
1075 1074 l = remote.between([(n[0], n[1])])[0]
1076 1075 l.append(n[1])
1077 1076 p = n[0]
1078 1077 f = 1
1079 1078 for i in l:
1080 1079 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1081 1080 if i in m:
1082 1081 if f <= 2:
1083 1082 self.ui.debug(_("found new branch changeset %s\n") %
1084 1083 short(p))
1085 1084 fetch[p] = 1
1086 1085 base[i] = 1
1087 1086 else:
1088 1087 self.ui.debug(_("narrowed branch search to %s:%s\n")
1089 1088 % (short(p), short(i)))
1090 1089 search.append((p, i))
1091 1090 break
1092 1091 p, f = i, f * 2
1093 1092
1094 1093 # sanity check our fetch list
1095 1094 for f in fetch.keys():
1096 1095 if f in m:
1097 1096 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1098 1097
1099 1098 if base.keys() == [nullid]:
1100 1099 if force:
1101 1100 self.ui.warn(_("warning: repository is unrelated\n"))
1102 1101 else:
1103 1102 raise util.Abort(_("repository is unrelated"))
1104 1103
1105 1104 self.ui.note(_("found new changesets starting at ") +
1106 1105 " ".join([short(f) for f in fetch]) + "\n")
1107 1106
1108 1107 self.ui.debug(_("%d total queries\n") % reqcnt)
1109 1108
1110 1109 return fetch.keys()
1111 1110
1112 1111 def findoutgoing(self, remote, base=None, heads=None, force=False):
1113 1112 """Return list of nodes that are roots of subsets not in remote
1114 1113
1115 1114 If base dict is specified, assume that these nodes and their parents
1116 1115 exist on the remote side.
1117 1116 If a list of heads is specified, return only nodes which are heads
1118 1117 or ancestors of these heads, and return a second element which
1119 1118 contains all remote heads which get new children.
1120 1119 """
1121 1120 if base == None:
1122 1121 base = {}
1123 1122 self.findincoming(remote, base, heads, force=force)
1124 1123
1125 1124 self.ui.debug(_("common changesets up to ")
1126 1125 + " ".join(map(short, base.keys())) + "\n")
1127 1126
1128 1127 remain = dict.fromkeys(self.changelog.nodemap)
1129 1128
1130 1129 # prune everything remote has from the tree
1131 1130 del remain[nullid]
1132 1131 remove = base.keys()
1133 1132 while remove:
1134 1133 n = remove.pop(0)
1135 1134 if n in remain:
1136 1135 del remain[n]
1137 1136 for p in self.changelog.parents(n):
1138 1137 remove.append(p)
1139 1138
1140 1139 # find every node whose parents have been pruned
1141 1140 subset = []
1142 1141 # find every remote head that will get new children
1143 1142 updated_heads = {}
1144 1143 for n in remain:
1145 1144 p1, p2 = self.changelog.parents(n)
1146 1145 if p1 not in remain and p2 not in remain:
1147 1146 subset.append(n)
1148 1147 if heads:
1149 1148 if p1 in heads:
1150 1149 updated_heads[p1] = True
1151 1150 if p2 in heads:
1152 1151 updated_heads[p2] = True
1153 1152
1154 1153 # this is the set of all roots we have to push
1155 1154 if heads:
1156 1155 return subset, updated_heads.keys()
1157 1156 else:
1158 1157 return subset
1159 1158
1160 1159 def pull(self, remote, heads=None, force=False):
1161 1160 l = self.lock()
1162 1161
1163 1162 fetch = self.findincoming(remote, force=force)
1164 1163 if fetch == [nullid]:
1165 1164 self.ui.status(_("requesting all changes\n"))
1166 1165
1167 1166 if not fetch:
1168 1167 self.ui.status(_("no changes found\n"))
1169 1168 return 0
1170 1169
1171 1170 if heads is None:
1172 1171 cg = remote.changegroup(fetch, 'pull')
1173 1172 else:
1174 1173 cg = remote.changegroupsubset(fetch, heads, 'pull')
1175 1174 return self.addchangegroup(cg, 'pull')
1176 1175
1177 1176 def push(self, remote, force=False, revs=None):
1178 1177 # there are two ways to push to remote repo:
1179 1178 #
1180 1179 # addchangegroup assumes local user can lock remote
1181 1180 # repo (local filesystem, old ssh servers).
1182 1181 #
1183 1182 # unbundle assumes local user cannot lock remote repo (new ssh
1184 1183 # servers, http servers).
1185 1184
1186 1185 if 'unbundle' in remote.capabilities:
1187 1186 return self.push_unbundle(remote, force, revs)
1188 1187 return self.push_addchangegroup(remote, force, revs)
1189 1188
1190 1189 def prepush(self, remote, force, revs):
1191 1190 base = {}
1192 1191 remote_heads = remote.heads()
1193 1192 inc = self.findincoming(remote, base, remote_heads, force=force)
1194 1193 if not force and inc:
1195 1194 self.ui.warn(_("abort: unsynced remote changes!\n"))
1196 1195 self.ui.status(_("(did you forget to sync?"
1197 1196 " use push -f to force)\n"))
1198 1197 return None, 1
1199 1198
1200 1199 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1201 1200 if revs is not None:
1202 1201 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1203 1202 else:
1204 1203 bases, heads = update, self.changelog.heads()
1205 1204
1206 1205 if not bases:
1207 1206 self.ui.status(_("no changes found\n"))
1208 1207 return None, 1
1209 1208 elif not force:
1210 1209 # FIXME we don't properly detect creation of new heads
1211 1210 # in the push -r case, assume the user knows what he's doing
1212 1211 if not revs and len(remote_heads) < len(heads) \
1213 1212 and remote_heads != [nullid]:
1214 1213 self.ui.warn(_("abort: push creates new remote branches!\n"))
1215 1214 self.ui.status(_("(did you forget to merge?"
1216 1215 " use push -f to force)\n"))
1217 1216 return None, 1
1218 1217
1219 1218 if revs is None:
1220 1219 cg = self.changegroup(update, 'push')
1221 1220 else:
1222 1221 cg = self.changegroupsubset(update, revs, 'push')
1223 1222 return cg, remote_heads
1224 1223
1225 1224 def push_addchangegroup(self, remote, force, revs):
1226 1225 lock = remote.lock()
1227 1226
1228 1227 ret = self.prepush(remote, force, revs)
1229 1228 if ret[0] is not None:
1230 1229 cg, remote_heads = ret
1231 1230 return remote.addchangegroup(cg, 'push')
1232 1231 return ret[1]
1233 1232
1234 1233 def push_unbundle(self, remote, force, revs):
1235 1234 # local repo finds heads on server, finds out what revs it
1236 1235 # must push. once revs transferred, if server finds it has
1237 1236 # different heads (someone else won commit/push race), server
1238 1237 # aborts.
1239 1238
1240 1239 ret = self.prepush(remote, force, revs)
1241 1240 if ret[0] is not None:
1242 1241 cg, remote_heads = ret
1243 1242 if force: remote_heads = ['force']
1244 1243 return remote.unbundle(cg, remote_heads, 'push')
1245 1244 return ret[1]
1246 1245
1247 1246 def changegroupsubset(self, bases, heads, source):
1248 1247 """This function generates a changegroup consisting of all the nodes
1249 1248 that are descendents of any of the bases, and ancestors of any of
1250 1249 the heads.
1251 1250
1252 1251 It is fairly complex as determining which filenodes and which
1253 1252 manifest nodes need to be included for the changeset to be complete
1254 1253 is non-trivial.
1255 1254
1256 1255 Another wrinkle is doing the reverse, figuring out which changeset in
1257 1256 the changegroup a particular filenode or manifestnode belongs to."""
1258 1257
1259 1258 self.hook('preoutgoing', throw=True, source=source)
1260 1259
1261 1260 # Set up some initial variables
1262 1261 # Make it easy to refer to self.changelog
1263 1262 cl = self.changelog
1264 1263 # msng is short for missing - compute the list of changesets in this
1265 1264 # changegroup.
1266 1265 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1267 1266 # Some bases may turn out to be superfluous, and some heads may be
1268 1267 # too. nodesbetween will return the minimal set of bases and heads
1269 1268 # necessary to re-create the changegroup.
1270 1269
1271 1270 # Known heads are the list of heads that it is assumed the recipient
1272 1271 # of this changegroup will know about.
1273 1272 knownheads = {}
1274 1273 # We assume that all parents of bases are known heads.
1275 1274 for n in bases:
1276 1275 for p in cl.parents(n):
1277 1276 if p != nullid:
1278 1277 knownheads[p] = 1
1279 1278 knownheads = knownheads.keys()
1280 1279 if knownheads:
1281 1280 # Now that we know what heads are known, we can compute which
1282 1281 # changesets are known. The recipient must know about all
1283 1282 # changesets required to reach the known heads from the null
1284 1283 # changeset.
1285 1284 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1286 1285 junk = None
1287 1286 # Transform the list into an ersatz set.
1288 1287 has_cl_set = dict.fromkeys(has_cl_set)
1289 1288 else:
1290 1289 # If there were no known heads, the recipient cannot be assumed to
1291 1290 # know about any changesets.
1292 1291 has_cl_set = {}
1293 1292
1294 1293 # Make it easy to refer to self.manifest
1295 1294 mnfst = self.manifest
1296 1295 # We don't know which manifests are missing yet
1297 1296 msng_mnfst_set = {}
1298 1297 # Nor do we know which filenodes are missing.
1299 1298 msng_filenode_set = {}
1300 1299
1301 1300 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1302 1301 junk = None
1303 1302
1304 1303 # A changeset always belongs to itself, so the changenode lookup
1305 1304 # function for a changenode is identity.
1306 1305 def identity(x):
1307 1306 return x
1308 1307
1309 1308 # A function generating function. Sets up an environment for the
1310 1309 # inner function.
1311 1310 def cmp_by_rev_func(revlog):
1312 1311 # Compare two nodes by their revision number in the environment's
1313 1312 # revision history. Since the revision number both represents the
1314 1313 # most efficient order to read the nodes in, and represents a
1315 1314 # topological sorting of the nodes, this function is often useful.
1316 1315 def cmp_by_rev(a, b):
1317 1316 return cmp(revlog.rev(a), revlog.rev(b))
1318 1317 return cmp_by_rev
1319 1318
1320 1319 # If we determine that a particular file or manifest node must be a
1321 1320 # node that the recipient of the changegroup will already have, we can
1322 1321 # also assume the recipient will have all the parents. This function
1323 1322 # prunes them from the set of missing nodes.
1324 1323 def prune_parents(revlog, hasset, msngset):
1325 1324 haslst = hasset.keys()
1326 1325 haslst.sort(cmp_by_rev_func(revlog))
1327 1326 for node in haslst:
1328 1327 parentlst = [p for p in revlog.parents(node) if p != nullid]
1329 1328 while parentlst:
1330 1329 n = parentlst.pop()
1331 1330 if n not in hasset:
1332 1331 hasset[n] = 1
1333 1332 p = [p for p in revlog.parents(n) if p != nullid]
1334 1333 parentlst.extend(p)
1335 1334 for n in hasset:
1336 1335 msngset.pop(n, None)
1337 1336
1338 1337 # This is a function generating function used to set up an environment
1339 1338 # for the inner function to execute in.
1340 1339 def manifest_and_file_collector(changedfileset):
1341 1340 # This is an information gathering function that gathers
1342 1341 # information from each changeset node that goes out as part of
1343 1342 # the changegroup. The information gathered is a list of which
1344 1343 # manifest nodes are potentially required (the recipient may
1345 1344 # already have them) and total list of all files which were
1346 1345 # changed in any changeset in the changegroup.
1347 1346 #
1348 1347 # We also remember the first changenode we saw any manifest
1349 1348 # referenced by so we can later determine which changenode 'owns'
1350 1349 # the manifest.
1351 1350 def collect_manifests_and_files(clnode):
1352 1351 c = cl.read(clnode)
1353 1352 for f in c[3]:
1354 1353 # This is to make sure we only have one instance of each
1355 1354 # filename string for each filename.
1356 1355 changedfileset.setdefault(f, f)
1357 1356 msng_mnfst_set.setdefault(c[0], clnode)
1358 1357 return collect_manifests_and_files
1359 1358
1360 1359 # Figure out which manifest nodes (of the ones we think might be part
1361 1360 # of the changegroup) the recipient must know about and remove them
1362 1361 # from the changegroup.
1363 1362 def prune_manifests():
1364 1363 has_mnfst_set = {}
1365 1364 for n in msng_mnfst_set:
1366 1365 # If a 'missing' manifest thinks it belongs to a changenode
1367 1366 # the recipient is assumed to have, obviously the recipient
1368 1367 # must have that manifest.
1369 1368 linknode = cl.node(mnfst.linkrev(n))
1370 1369 if linknode in has_cl_set:
1371 1370 has_mnfst_set[n] = 1
1372 1371 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1373 1372
1374 1373 # Use the information collected in collect_manifests_and_files to say
1375 1374 # which changenode any manifestnode belongs to.
1376 1375 def lookup_manifest_link(mnfstnode):
1377 1376 return msng_mnfst_set[mnfstnode]
1378 1377
1379 1378 # A function generating function that sets up the initial environment
1380 1379 # the inner function.
1381 1380 def filenode_collector(changedfiles):
1382 1381 next_rev = [0]
1383 1382 # This gathers information from each manifestnode included in the
1384 1383 # changegroup about which filenodes the manifest node references
1385 1384 # so we can include those in the changegroup too.
1386 1385 #
1387 1386 # It also remembers which changenode each filenode belongs to. It
1388 1387 # does this by assuming the a filenode belongs to the changenode
1389 1388 # the first manifest that references it belongs to.
1390 1389 def collect_msng_filenodes(mnfstnode):
1391 1390 r = mnfst.rev(mnfstnode)
1392 1391 if r == next_rev[0]:
1393 1392 # If the last rev we looked at was the one just previous,
1394 1393 # we only need to see a diff.
1395 1394 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1396 1395 # For each line in the delta
1397 1396 for dline in delta.splitlines():
1398 1397 # get the filename and filenode for that line
1399 1398 f, fnode = dline.split('\0')
1400 1399 fnode = bin(fnode[:40])
1401 1400 f = changedfiles.get(f, None)
1402 1401 # And if the file is in the list of files we care
1403 1402 # about.
1404 1403 if f is not None:
1405 1404 # Get the changenode this manifest belongs to
1406 1405 clnode = msng_mnfst_set[mnfstnode]
1407 1406 # Create the set of filenodes for the file if
1408 1407 # there isn't one already.
1409 1408 ndset = msng_filenode_set.setdefault(f, {})
1410 1409 # And set the filenode's changelog node to the
1411 1410 # manifest's if it hasn't been set already.
1412 1411 ndset.setdefault(fnode, clnode)
1413 1412 else:
1414 1413 # Otherwise we need a full manifest.
1415 1414 m = mnfst.read(mnfstnode)
1416 1415 # For every file in we care about.
1417 1416 for f in changedfiles:
1418 1417 fnode = m.get(f, None)
1419 1418 # If it's in the manifest
1420 1419 if fnode is not None:
1421 1420 # See comments above.
1422 1421 clnode = msng_mnfst_set[mnfstnode]
1423 1422 ndset = msng_filenode_set.setdefault(f, {})
1424 1423 ndset.setdefault(fnode, clnode)
1425 1424 # Remember the revision we hope to see next.
1426 1425 next_rev[0] = r + 1
1427 1426 return collect_msng_filenodes
1428 1427
1429 1428 # We have a list of filenodes we think we need for a file, lets remove
1430 1429 # all those we now the recipient must have.
1431 1430 def prune_filenodes(f, filerevlog):
1432 1431 msngset = msng_filenode_set[f]
1433 1432 hasset = {}
1434 1433 # If a 'missing' filenode thinks it belongs to a changenode we
1435 1434 # assume the recipient must have, then the recipient must have
1436 1435 # that filenode.
1437 1436 for n in msngset:
1438 1437 clnode = cl.node(filerevlog.linkrev(n))
1439 1438 if clnode in has_cl_set:
1440 1439 hasset[n] = 1
1441 1440 prune_parents(filerevlog, hasset, msngset)
1442 1441
1443 1442 # A function generator function that sets up the a context for the
1444 1443 # inner function.
1445 1444 def lookup_filenode_link_func(fname):
1446 1445 msngset = msng_filenode_set[fname]
1447 1446 # Lookup the changenode the filenode belongs to.
1448 1447 def lookup_filenode_link(fnode):
1449 1448 return msngset[fnode]
1450 1449 return lookup_filenode_link
1451 1450
1452 1451 # Now that we have all theses utility functions to help out and
1453 1452 # logically divide up the task, generate the group.
1454 1453 def gengroup():
1455 1454 # The set of changed files starts empty.
1456 1455 changedfiles = {}
1457 1456 # Create a changenode group generator that will call our functions
1458 1457 # back to lookup the owning changenode and collect information.
1459 1458 group = cl.group(msng_cl_lst, identity,
1460 1459 manifest_and_file_collector(changedfiles))
1461 1460 for chnk in group:
1462 1461 yield chnk
1463 1462
1464 1463 # The list of manifests has been collected by the generator
1465 1464 # calling our functions back.
1466 1465 prune_manifests()
1467 1466 msng_mnfst_lst = msng_mnfst_set.keys()
1468 1467 # Sort the manifestnodes by revision number.
1469 1468 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1470 1469 # Create a generator for the manifestnodes that calls our lookup
1471 1470 # and data collection functions back.
1472 1471 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1473 1472 filenode_collector(changedfiles))
1474 1473 for chnk in group:
1475 1474 yield chnk
1476 1475
1477 1476 # These are no longer needed, dereference and toss the memory for
1478 1477 # them.
1479 1478 msng_mnfst_lst = None
1480 1479 msng_mnfst_set.clear()
1481 1480
1482 1481 changedfiles = changedfiles.keys()
1483 1482 changedfiles.sort()
1484 1483 # Go through all our files in order sorted by name.
1485 1484 for fname in changedfiles:
1486 1485 filerevlog = self.file(fname)
1487 1486 # Toss out the filenodes that the recipient isn't really
1488 1487 # missing.
1489 1488 if msng_filenode_set.has_key(fname):
1490 1489 prune_filenodes(fname, filerevlog)
1491 1490 msng_filenode_lst = msng_filenode_set[fname].keys()
1492 1491 else:
1493 1492 msng_filenode_lst = []
1494 1493 # If any filenodes are left, generate the group for them,
1495 1494 # otherwise don't bother.
1496 1495 if len(msng_filenode_lst) > 0:
1497 1496 yield changegroup.genchunk(fname)
1498 1497 # Sort the filenodes by their revision #
1499 1498 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1500 1499 # Create a group generator and only pass in a changenode
1501 1500 # lookup function as we need to collect no information
1502 1501 # from filenodes.
1503 1502 group = filerevlog.group(msng_filenode_lst,
1504 1503 lookup_filenode_link_func(fname))
1505 1504 for chnk in group:
1506 1505 yield chnk
1507 1506 if msng_filenode_set.has_key(fname):
1508 1507 # Don't need this anymore, toss it to free memory.
1509 1508 del msng_filenode_set[fname]
1510 1509 # Signal that no more groups are left.
1511 1510 yield changegroup.closechunk()
1512 1511
1513 1512 if msng_cl_lst:
1514 1513 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1515 1514
1516 1515 return util.chunkbuffer(gengroup())
1517 1516
1518 1517 def changegroup(self, basenodes, source):
1519 1518 """Generate a changegroup of all nodes that we have that a recipient
1520 1519 doesn't.
1521 1520
1522 1521 This is much easier than the previous function as we can assume that
1523 1522 the recipient has any changenode we aren't sending them."""
1524 1523
1525 1524 self.hook('preoutgoing', throw=True, source=source)
1526 1525
1527 1526 cl = self.changelog
1528 1527 nodes = cl.nodesbetween(basenodes, None)[0]
1529 1528 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1530 1529
1531 1530 def identity(x):
1532 1531 return x
1533 1532
1534 1533 def gennodelst(revlog):
1535 1534 for r in xrange(0, revlog.count()):
1536 1535 n = revlog.node(r)
1537 1536 if revlog.linkrev(n) in revset:
1538 1537 yield n
1539 1538
1540 1539 def changed_file_collector(changedfileset):
1541 1540 def collect_changed_files(clnode):
1542 1541 c = cl.read(clnode)
1543 1542 for fname in c[3]:
1544 1543 changedfileset[fname] = 1
1545 1544 return collect_changed_files
1546 1545
1547 1546 def lookuprevlink_func(revlog):
1548 1547 def lookuprevlink(n):
1549 1548 return cl.node(revlog.linkrev(n))
1550 1549 return lookuprevlink
1551 1550
1552 1551 def gengroup():
1553 1552 # construct a list of all changed files
1554 1553 changedfiles = {}
1555 1554
1556 1555 for chnk in cl.group(nodes, identity,
1557 1556 changed_file_collector(changedfiles)):
1558 1557 yield chnk
1559 1558 changedfiles = changedfiles.keys()
1560 1559 changedfiles.sort()
1561 1560
1562 1561 mnfst = self.manifest
1563 1562 nodeiter = gennodelst(mnfst)
1564 1563 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1565 1564 yield chnk
1566 1565
1567 1566 for fname in changedfiles:
1568 1567 filerevlog = self.file(fname)
1569 1568 nodeiter = gennodelst(filerevlog)
1570 1569 nodeiter = list(nodeiter)
1571 1570 if nodeiter:
1572 1571 yield changegroup.genchunk(fname)
1573 1572 lookup = lookuprevlink_func(filerevlog)
1574 1573 for chnk in filerevlog.group(nodeiter, lookup):
1575 1574 yield chnk
1576 1575
1577 1576 yield changegroup.closechunk()
1578 1577
1579 1578 if nodes:
1580 1579 self.hook('outgoing', node=hex(nodes[0]), source=source)
1581 1580
1582 1581 return util.chunkbuffer(gengroup())
1583 1582
1584 1583 def addchangegroup(self, source, srctype):
1585 1584 """add changegroup to repo.
1586 1585 returns number of heads modified or added + 1."""
1587 1586
1588 1587 def csmap(x):
1589 1588 self.ui.debug(_("add changeset %s\n") % short(x))
1590 1589 return cl.count()
1591 1590
1592 1591 def revmap(x):
1593 1592 return cl.rev(x)
1594 1593
1595 1594 if not source:
1596 1595 return 0
1597 1596
1598 1597 self.hook('prechangegroup', throw=True, source=srctype)
1599 1598
1600 1599 changesets = files = revisions = 0
1601 1600
1602 1601 tr = self.transaction()
1603 1602
1604 1603 # write changelog data to temp files so concurrent readers will not see
1605 1604 # inconsistent view
1606 1605 cl = None
1607 1606 try:
1608 1607 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1609 1608
1610 1609 oldheads = len(cl.heads())
1611 1610
1612 1611 # pull off the changeset group
1613 1612 self.ui.status(_("adding changesets\n"))
1614 1613 cor = cl.count() - 1
1615 1614 chunkiter = changegroup.chunkiter(source)
1616 1615 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1617 1616 raise util.Abort(_("received changelog group is empty"))
1618 1617 cnr = cl.count() - 1
1619 1618 changesets = cnr - cor
1620 1619
1621 1620 # pull off the manifest group
1622 1621 self.ui.status(_("adding manifests\n"))
1623 1622 chunkiter = changegroup.chunkiter(source)
1624 1623 # no need to check for empty manifest group here:
1625 1624 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1626 1625 # no new manifest will be created and the manifest group will
1627 1626 # be empty during the pull
1628 1627 self.manifest.addgroup(chunkiter, revmap, tr)
1629 1628
1630 1629 # process the files
1631 1630 self.ui.status(_("adding file changes\n"))
1632 1631 while 1:
1633 1632 f = changegroup.getchunk(source)
1634 1633 if not f:
1635 1634 break
1636 1635 self.ui.debug(_("adding %s revisions\n") % f)
1637 1636 fl = self.file(f)
1638 1637 o = fl.count()
1639 1638 chunkiter = changegroup.chunkiter(source)
1640 1639 if fl.addgroup(chunkiter, revmap, tr) is None:
1641 1640 raise util.Abort(_("received file revlog group is empty"))
1642 1641 revisions += fl.count() - o
1643 1642 files += 1
1644 1643
1645 1644 cl.writedata()
1646 1645 finally:
1647 1646 if cl:
1648 1647 cl.cleanup()
1649 1648
1650 1649 # make changelog see real files again
1651 1650 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1652 1651 self.changelog.checkinlinesize(tr)
1653 1652
1654 1653 newheads = len(self.changelog.heads())
1655 1654 heads = ""
1656 1655 if oldheads and newheads != oldheads:
1657 1656 heads = _(" (%+d heads)") % (newheads - oldheads)
1658 1657
1659 1658 self.ui.status(_("added %d changesets"
1660 1659 " with %d changes to %d files%s\n")
1661 1660 % (changesets, revisions, files, heads))
1662 1661
1663 1662 if changesets > 0:
1664 1663 self.hook('pretxnchangegroup', throw=True,
1665 1664 node=hex(self.changelog.node(cor+1)), source=srctype)
1666 1665
1667 1666 tr.close()
1668 1667
1669 1668 if changesets > 0:
1670 1669 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1671 1670 source=srctype)
1672 1671
1673 1672 for i in range(cor + 1, cnr + 1):
1674 1673 self.hook("incoming", node=hex(self.changelog.node(i)),
1675 1674 source=srctype)
1676 1675
1677 1676 return newheads - oldheads + 1
1678 1677
1679 1678 def update(self, node, allow=False, force=False, choose=None,
1680 1679 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1681 1680 pl = self.dirstate.parents()
1682 1681 if not force and pl[1] != nullid:
1683 1682 raise util.Abort(_("outstanding uncommitted merges"))
1684 1683
1685 1684 err = False
1686 1685
1687 1686 p1, p2 = pl[0], node
1688 1687 pa = self.changelog.ancestor(p1, p2)
1689 1688 m1n = self.changelog.read(p1)[0]
1690 1689 m2n = self.changelog.read(p2)[0]
1691 1690 man = self.manifest.ancestor(m1n, m2n)
1692 1691 m1 = self.manifest.read(m1n)
1693 1692 mf1 = self.manifest.readflags(m1n)
1694 1693 m2 = self.manifest.read(m2n).copy()
1695 1694 mf2 = self.manifest.readflags(m2n)
1696 1695 ma = self.manifest.read(man)
1697 1696 mfa = self.manifest.readflags(man)
1698 1697
1699 1698 modified, added, removed, deleted, unknown = self.changes()
1700 1699
1701 1700 # is this a jump, or a merge? i.e. is there a linear path
1702 1701 # from p1 to p2?
1703 1702 linear_path = (pa == p1 or pa == p2)
1704 1703
1705 1704 if allow and linear_path:
1706 1705 raise util.Abort(_("there is nothing to merge, just use "
1707 1706 "'hg update' or look at 'hg heads'"))
1708 1707 if allow and not forcemerge:
1709 1708 if modified or added or removed:
1710 1709 raise util.Abort(_("outstanding uncommitted changes"))
1711 1710
1712 1711 if not forcemerge and not force:
1713 1712 for f in unknown:
1714 1713 if f in m2:
1715 1714 t1 = self.wread(f)
1716 1715 t2 = self.file(f).read(m2[f])
1717 1716 if cmp(t1, t2) != 0:
1718 1717 raise util.Abort(_("'%s' already exists in the working"
1719 1718 " dir and differs from remote") % f)
1720 1719
1721 1720 # resolve the manifest to determine which files
1722 1721 # we care about merging
1723 1722 self.ui.note(_("resolving manifests\n"))
1724 1723 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1725 1724 (force, allow, moddirstate, linear_path))
1726 1725 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1727 1726 (short(man), short(m1n), short(m2n)))
1728 1727
1729 1728 merge = {}
1730 1729 get = {}
1731 1730 remove = []
1732 1731
1733 1732 # construct a working dir manifest
1734 1733 mw = m1.copy()
1735 1734 mfw = mf1.copy()
1736 1735 umap = dict.fromkeys(unknown)
1737 1736
1738 1737 for f in added + modified + unknown:
1739 1738 mw[f] = ""
1740 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1739 mfw.set(f, util.is_exec(self.wjoin(f), mfw.execf(f)))
1741 1740
1742 1741 if moddirstate and not wlock:
1743 1742 wlock = self.wlock()
1744 1743
1745 1744 for f in deleted + removed:
1746 1745 if f in mw:
1747 1746 del mw[f]
1748 1747
1749 1748 # If we're jumping between revisions (as opposed to merging),
1750 1749 # and if neither the working directory nor the target rev has
1751 1750 # the file, then we need to remove it from the dirstate, to
1752 1751 # prevent the dirstate from listing the file when it is no
1753 1752 # longer in the manifest.
1754 1753 if moddirstate and linear_path and f not in m2:
1755 1754 self.dirstate.forget((f,))
1756 1755
1757 1756 # Compare manifests
1758 1757 for f, n in mw.iteritems():
1759 1758 if choose and not choose(f):
1760 1759 continue
1761 1760 if f in m2:
1762 1761 s = 0
1763 1762
1764 1763 # is the wfile new since m1, and match m2?
1765 1764 if f not in m1:
1766 1765 t1 = self.wread(f)
1767 1766 t2 = self.file(f).read(m2[f])
1768 1767 if cmp(t1, t2) == 0:
1769 1768 n = m2[f]
1770 1769 del t1, t2
1771 1770
1772 1771 # are files different?
1773 1772 if n != m2[f]:
1774 1773 a = ma.get(f, nullid)
1775 1774 # are both different from the ancestor?
1776 1775 if n != a and m2[f] != a:
1777 1776 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1778 1777 # merge executable bits
1779 1778 # "if we changed or they changed, change in merge"
1780 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1779 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
1781 1780 mode = ((a^b) | (a^c)) ^ a
1782 1781 merge[f] = (m1.get(f, nullid), m2[f], mode)
1783 1782 s = 1
1784 1783 # are we clobbering?
1785 1784 # is remote's version newer?
1786 1785 # or are we going back in time?
1787 1786 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1788 1787 self.ui.debug(_(" remote %s is newer, get\n") % f)
1789 1788 get[f] = m2[f]
1790 1789 s = 1
1791 1790 elif f in umap or f in added:
1792 1791 # this unknown file is the same as the checkout
1793 1792 # we need to reset the dirstate if the file was added
1794 1793 get[f] = m2[f]
1795 1794
1796 1795 if not s and mfw[f] != mf2[f]:
1797 1796 if force:
1798 1797 self.ui.debug(_(" updating permissions for %s\n") % f)
1799 util.set_exec(self.wjoin(f), mf2[f])
1798 util.set_exec(self.wjoin(f), mf2.execf(f))
1800 1799 else:
1801 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1800 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
1802 1801 mode = ((a^b) | (a^c)) ^ a
1803 1802 if mode != b:
1804 1803 self.ui.debug(_(" updating permissions for %s\n")
1805 1804 % f)
1806 1805 util.set_exec(self.wjoin(f), mode)
1807 1806 del m2[f]
1808 1807 elif f in ma:
1809 1808 if n != ma[f]:
1810 1809 r = _("d")
1811 1810 if not force and (linear_path or allow):
1812 1811 r = self.ui.prompt(
1813 1812 (_(" local changed %s which remote deleted\n") % f) +
1814 1813 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1815 1814 if r == _("d"):
1816 1815 remove.append(f)
1817 1816 else:
1818 1817 self.ui.debug(_("other deleted %s\n") % f)
1819 1818 remove.append(f) # other deleted it
1820 1819 else:
1821 1820 # file is created on branch or in working directory
1822 1821 if force and f not in umap:
1823 1822 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1824 1823 remove.append(f)
1825 1824 elif n == m1.get(f, nullid): # same as parent
1826 1825 if p2 == pa: # going backwards?
1827 1826 self.ui.debug(_("remote deleted %s\n") % f)
1828 1827 remove.append(f)
1829 1828 else:
1830 1829 self.ui.debug(_("local modified %s, keeping\n") % f)
1831 1830 else:
1832 1831 self.ui.debug(_("working dir created %s, keeping\n") % f)
1833 1832
1834 1833 for f, n in m2.iteritems():
1835 1834 if choose and not choose(f):
1836 1835 continue
1837 1836 if f[0] == "/":
1838 1837 continue
1839 1838 if f in ma and n != ma[f]:
1840 1839 r = _("k")
1841 1840 if not force and (linear_path or allow):
1842 1841 r = self.ui.prompt(
1843 1842 (_("remote changed %s which local deleted\n") % f) +
1844 1843 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1845 1844 if r == _("k"):
1846 1845 get[f] = n
1847 1846 elif f not in ma:
1848 1847 self.ui.debug(_("remote created %s\n") % f)
1849 1848 get[f] = n
1850 1849 else:
1851 1850 if force or p2 == pa: # going backwards?
1852 1851 self.ui.debug(_("local deleted %s, recreating\n") % f)
1853 1852 get[f] = n
1854 1853 else:
1855 1854 self.ui.debug(_("local deleted %s\n") % f)
1856 1855
1857 1856 del mw, m1, m2, ma
1858 1857
1859 1858 if force:
1860 1859 for f in merge:
1861 1860 get[f] = merge[f][1]
1862 1861 merge = {}
1863 1862
1864 1863 if linear_path or force:
1865 1864 # we don't need to do any magic, just jump to the new rev
1866 1865 branch_merge = False
1867 1866 p1, p2 = p2, nullid
1868 1867 else:
1869 1868 if not allow:
1870 1869 self.ui.status(_("this update spans a branch"
1871 1870 " affecting the following files:\n"))
1872 1871 fl = merge.keys() + get.keys()
1873 1872 fl.sort()
1874 1873 for f in fl:
1875 1874 cf = ""
1876 1875 if f in merge:
1877 1876 cf = _(" (resolve)")
1878 1877 self.ui.status(" %s%s\n" % (f, cf))
1879 1878 self.ui.warn(_("aborting update spanning branches!\n"))
1880 1879 self.ui.status(_("(use 'hg merge' to merge across branches"
1881 1880 " or 'hg update -C' to lose changes)\n"))
1882 1881 return 1
1883 1882 branch_merge = True
1884 1883
1885 1884 xp1 = hex(p1)
1886 1885 xp2 = hex(p2)
1887 1886 if p2 == nullid: xxp2 = ''
1888 1887 else: xxp2 = xp2
1889 1888
1890 1889 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1891 1890
1892 1891 # get the files we don't need to change
1893 1892 files = get.keys()
1894 1893 files.sort()
1895 1894 for f in files:
1896 1895 if f[0] == "/":
1897 1896 continue
1898 1897 self.ui.note(_("getting %s\n") % f)
1899 1898 t = self.file(f).read(get[f])
1900 1899 self.wwrite(f, t)
1901 util.set_exec(self.wjoin(f), mf2[f])
1900 util.set_exec(self.wjoin(f), mf2.execf(f))
1902 1901 if moddirstate:
1903 1902 if branch_merge:
1904 1903 self.dirstate.update([f], 'n', st_mtime=-1)
1905 1904 else:
1906 1905 self.dirstate.update([f], 'n')
1907 1906
1908 1907 # merge the tricky bits
1909 1908 failedmerge = []
1910 1909 files = merge.keys()
1911 1910 files.sort()
1912 1911 for f in files:
1913 1912 self.ui.status(_("merging %s\n") % f)
1914 1913 my, other, flag = merge[f]
1915 1914 ret = self.merge3(f, my, other, xp1, xp2)
1916 1915 if ret:
1917 1916 err = True
1918 1917 failedmerge.append(f)
1919 1918 util.set_exec(self.wjoin(f), flag)
1920 1919 if moddirstate:
1921 1920 if branch_merge:
1922 1921 # We've done a branch merge, mark this file as merged
1923 1922 # so that we properly record the merger later
1924 1923 self.dirstate.update([f], 'm')
1925 1924 else:
1926 1925 # We've update-merged a locally modified file, so
1927 1926 # we set the dirstate to emulate a normal checkout
1928 1927 # of that file some time in the past. Thus our
1929 1928 # merge will appear as a normal local file
1930 1929 # modification.
1931 1930 f_len = len(self.file(f).read(other))
1932 1931 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1933 1932
1934 1933 remove.sort()
1935 1934 for f in remove:
1936 1935 self.ui.note(_("removing %s\n") % f)
1937 1936 util.audit_path(f)
1938 1937 try:
1939 1938 util.unlink(self.wjoin(f))
1940 1939 except OSError, inst:
1941 1940 if inst.errno != errno.ENOENT:
1942 1941 self.ui.warn(_("update failed to remove %s: %s!\n") %
1943 1942 (f, inst.strerror))
1944 1943 if moddirstate:
1945 1944 if branch_merge:
1946 1945 self.dirstate.update(remove, 'r')
1947 1946 else:
1948 1947 self.dirstate.forget(remove)
1949 1948
1950 1949 if moddirstate:
1951 1950 self.dirstate.setparents(p1, p2)
1952 1951
1953 1952 if show_stats:
1954 1953 stats = ((len(get), _("updated")),
1955 1954 (len(merge) - len(failedmerge), _("merged")),
1956 1955 (len(remove), _("removed")),
1957 1956 (len(failedmerge), _("unresolved")))
1958 1957 note = ", ".join([_("%d files %s") % s for s in stats])
1959 1958 self.ui.status("%s\n" % note)
1960 1959 if moddirstate:
1961 1960 if branch_merge:
1962 1961 if failedmerge:
1963 1962 self.ui.status(_("There are unresolved merges,"
1964 1963 " you can redo the full merge using:\n"
1965 1964 " hg update -C %s\n"
1966 1965 " hg merge %s\n"
1967 1966 % (self.changelog.rev(p1),
1968 1967 self.changelog.rev(p2))))
1969 1968 else:
1970 1969 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1971 1970 elif failedmerge:
1972 1971 self.ui.status(_("There are unresolved merges with"
1973 1972 " locally modified files.\n"))
1974 1973
1975 1974 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1976 1975 return err
1977 1976
1978 1977 def merge3(self, fn, my, other, p1, p2):
1979 1978 """perform a 3-way merge in the working directory"""
1980 1979
1981 1980 def temp(prefix, node):
1982 1981 pre = "%s~%s." % (os.path.basename(fn), prefix)
1983 1982 (fd, name) = tempfile.mkstemp(prefix=pre)
1984 1983 f = os.fdopen(fd, "wb")
1985 1984 self.wwrite(fn, fl.read(node), f)
1986 1985 f.close()
1987 1986 return name
1988 1987
1989 1988 fl = self.file(fn)
1990 1989 base = fl.ancestor(my, other)
1991 1990 a = self.wjoin(fn)
1992 1991 b = temp("base", base)
1993 1992 c = temp("other", other)
1994 1993
1995 1994 self.ui.note(_("resolving %s\n") % fn)
1996 1995 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1997 1996 (fn, short(my), short(other), short(base)))
1998 1997
1999 1998 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2000 1999 or "hgmerge")
2001 2000 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2002 2001 environ={'HG_FILE': fn,
2003 2002 'HG_MY_NODE': p1,
2004 2003 'HG_OTHER_NODE': p2,
2005 2004 'HG_FILE_MY_NODE': hex(my),
2006 2005 'HG_FILE_OTHER_NODE': hex(other),
2007 2006 'HG_FILE_BASE_NODE': hex(base)})
2008 2007 if r:
2009 2008 self.ui.warn(_("merging %s failed!\n") % fn)
2010 2009
2011 2010 os.unlink(b)
2012 2011 os.unlink(c)
2013 2012 return r
2014 2013
2015 2014 def verify(self):
2016 2015 filelinkrevs = {}
2017 2016 filenodes = {}
2018 2017 changesets = revisions = files = 0
2019 2018 errors = [0]
2020 2019 warnings = [0]
2021 2020 neededmanifests = {}
2022 2021
2023 2022 def err(msg):
2024 2023 self.ui.warn(msg + "\n")
2025 2024 errors[0] += 1
2026 2025
2027 2026 def warn(msg):
2028 2027 self.ui.warn(msg + "\n")
2029 2028 warnings[0] += 1
2030 2029
2031 2030 def checksize(obj, name):
2032 2031 d = obj.checksize()
2033 2032 if d[0]:
2034 2033 err(_("%s data length off by %d bytes") % (name, d[0]))
2035 2034 if d[1]:
2036 2035 err(_("%s index contains %d extra bytes") % (name, d[1]))
2037 2036
2038 2037 def checkversion(obj, name):
2039 2038 if obj.version != revlog.REVLOGV0:
2040 2039 if not revlogv1:
2041 2040 warn(_("warning: `%s' uses revlog format 1") % name)
2042 2041 elif revlogv1:
2043 2042 warn(_("warning: `%s' uses revlog format 0") % name)
2044 2043
2045 2044 revlogv1 = self.revlogversion != revlog.REVLOGV0
2046 2045 if self.ui.verbose or revlogv1 != self.revlogv1:
2047 2046 self.ui.status(_("repository uses revlog format %d\n") %
2048 2047 (revlogv1 and 1 or 0))
2049 2048
2050 2049 seen = {}
2051 2050 self.ui.status(_("checking changesets\n"))
2052 2051 checksize(self.changelog, "changelog")
2053 2052
2054 2053 for i in range(self.changelog.count()):
2055 2054 changesets += 1
2056 2055 n = self.changelog.node(i)
2057 2056 l = self.changelog.linkrev(n)
2058 2057 if l != i:
2059 2058 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2060 2059 if n in seen:
2061 2060 err(_("duplicate changeset at revision %d") % i)
2062 2061 seen[n] = 1
2063 2062
2064 2063 for p in self.changelog.parents(n):
2065 2064 if p not in self.changelog.nodemap:
2066 2065 err(_("changeset %s has unknown parent %s") %
2067 2066 (short(n), short(p)))
2068 2067 try:
2069 2068 changes = self.changelog.read(n)
2070 2069 except KeyboardInterrupt:
2071 2070 self.ui.warn(_("interrupted"))
2072 2071 raise
2073 2072 except Exception, inst:
2074 2073 err(_("unpacking changeset %s: %s") % (short(n), inst))
2075 2074 continue
2076 2075
2077 2076 neededmanifests[changes[0]] = n
2078 2077
2079 2078 for f in changes[3]:
2080 2079 filelinkrevs.setdefault(f, []).append(i)
2081 2080
2082 2081 seen = {}
2083 2082 self.ui.status(_("checking manifests\n"))
2084 2083 checkversion(self.manifest, "manifest")
2085 2084 checksize(self.manifest, "manifest")
2086 2085
2087 2086 for i in range(self.manifest.count()):
2088 2087 n = self.manifest.node(i)
2089 2088 l = self.manifest.linkrev(n)
2090 2089
2091 2090 if l < 0 or l >= self.changelog.count():
2092 2091 err(_("bad manifest link (%d) at revision %d") % (l, i))
2093 2092
2094 2093 if n in neededmanifests:
2095 2094 del neededmanifests[n]
2096 2095
2097 2096 if n in seen:
2098 2097 err(_("duplicate manifest at revision %d") % i)
2099 2098
2100 2099 seen[n] = 1
2101 2100
2102 2101 for p in self.manifest.parents(n):
2103 2102 if p not in self.manifest.nodemap:
2104 2103 err(_("manifest %s has unknown parent %s") %
2105 2104 (short(n), short(p)))
2106 2105
2107 2106 try:
2108 2107 delta = mdiff.patchtext(self.manifest.delta(n))
2109 2108 except KeyboardInterrupt:
2110 2109 self.ui.warn(_("interrupted"))
2111 2110 raise
2112 2111 except Exception, inst:
2113 2112 err(_("unpacking manifest %s: %s") % (short(n), inst))
2114 2113 continue
2115 2114
2116 2115 try:
2117 2116 ff = [ l.split('\0') for l in delta.splitlines() ]
2118 2117 for f, fn in ff:
2119 2118 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2120 2119 except (ValueError, TypeError), inst:
2121 2120 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2122 2121
2123 2122 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2124 2123
2125 2124 for m, c in neededmanifests.items():
2126 2125 err(_("Changeset %s refers to unknown manifest %s") %
2127 2126 (short(m), short(c)))
2128 2127 del neededmanifests
2129 2128
2130 2129 for f in filenodes:
2131 2130 if f not in filelinkrevs:
2132 2131 err(_("file %s in manifest but not in changesets") % f)
2133 2132
2134 2133 for f in filelinkrevs:
2135 2134 if f not in filenodes:
2136 2135 err(_("file %s in changeset but not in manifest") % f)
2137 2136
2138 2137 self.ui.status(_("checking files\n"))
2139 2138 ff = filenodes.keys()
2140 2139 ff.sort()
2141 2140 for f in ff:
2142 2141 if f == "/dev/null":
2143 2142 continue
2144 2143 files += 1
2145 2144 if not f:
2146 2145 err(_("file without name in manifest %s") % short(n))
2147 2146 continue
2148 2147 fl = self.file(f)
2149 2148 checkversion(fl, f)
2150 2149 checksize(fl, f)
2151 2150
2152 2151 nodes = {nullid: 1}
2153 2152 seen = {}
2154 2153 for i in range(fl.count()):
2155 2154 revisions += 1
2156 2155 n = fl.node(i)
2157 2156
2158 2157 if n in seen:
2159 2158 err(_("%s: duplicate revision %d") % (f, i))
2160 2159 if n not in filenodes[f]:
2161 2160 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2162 2161 else:
2163 2162 del filenodes[f][n]
2164 2163
2165 2164 flr = fl.linkrev(n)
2166 2165 if flr not in filelinkrevs.get(f, []):
2167 2166 err(_("%s:%s points to unexpected changeset %d")
2168 2167 % (f, short(n), flr))
2169 2168 else:
2170 2169 filelinkrevs[f].remove(flr)
2171 2170
2172 2171 # verify contents
2173 2172 try:
2174 2173 t = fl.read(n)
2175 2174 except KeyboardInterrupt:
2176 2175 self.ui.warn(_("interrupted"))
2177 2176 raise
2178 2177 except Exception, inst:
2179 2178 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2180 2179
2181 2180 # verify parents
2182 2181 (p1, p2) = fl.parents(n)
2183 2182 if p1 not in nodes:
2184 2183 err(_("file %s:%s unknown parent 1 %s") %
2185 2184 (f, short(n), short(p1)))
2186 2185 if p2 not in nodes:
2187 2186 err(_("file %s:%s unknown parent 2 %s") %
2188 2187 (f, short(n), short(p1)))
2189 2188 nodes[n] = 1
2190 2189
2191 2190 # cross-check
2192 2191 for node in filenodes[f]:
2193 2192 err(_("node %s in manifests not in %s") % (hex(node), f))
2194 2193
2195 2194 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2196 2195 (files, changesets, revisions))
2197 2196
2198 2197 if warnings[0]:
2199 2198 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2200 2199 if errors[0]:
2201 2200 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2202 2201 return 1
2203 2202
2204 2203 # used to avoid circular references so destructors work
2205 2204 def aftertrans(base):
2206 2205 p = base
2207 2206 def a():
2208 2207 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2209 2208 util.rename(os.path.join(p, "journal.dirstate"),
2210 2209 os.path.join(p, "undo.dirstate"))
2211 2210 return a
2212 2211
@@ -1,202 +1,202 b''
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "array bisect struct")
12 12
13 13 class manifestflags(dict):
14 14 def __init__(self, mapping={}):
15 15 dict.__init__(self, mapping)
16 16 def execf(self, f):
17 17 "test for executable in manifest flags"
18 18 return self.get(f, False)
19 19 def linkf(self, f):
20 20 "test for symlink in manifest flags"
21 21 return False
22 22 def set(self, f, execf=False, linkf=False):
23 23 self[f] = execf
24 24 def copy(self):
25 25 return manifestflags(dict.copy(self))
26 26
27 27 class manifest(revlog):
28 28 def __init__(self, opener, defversion=REVLOGV0):
29 29 self.mapcache = None
30 30 self.listcache = None
31 31 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
32 32 defversion)
33 33
34 34 def read(self, node):
35 35 if node == nullid: return {} # don't upset local cache
36 36 if self.mapcache and self.mapcache[0] == node:
37 37 return self.mapcache[1]
38 38 text = self.revision(node)
39 39 map = {}
40 40 flag = manifestflags()
41 41 self.listcache = array.array('c', text)
42 42 lines = text.splitlines(1)
43 43 for l in lines:
44 44 (f, n) = l.split('\0')
45 45 map[f] = bin(n[:40])
46 46 flag[f] = (n[40:-1] == "x")
47 47 self.mapcache = (node, map, flag)
48 48 return map
49 49
50 50 def readflags(self, node):
51 if node == nullid: return {} # don't upset local cache
51 if node == nullid: return manifestflags() # don't upset local cache
52 52 if not self.mapcache or self.mapcache[0] != node:
53 53 self.read(node)
54 54 return self.mapcache[2]
55 55
56 56 def diff(self, a, b):
57 57 return mdiff.textdiff(str(a), str(b))
58 58
59 59 def _search(self, m, s, lo=0, hi=None):
60 60 '''return a tuple (start, end) that says where to find s within m.
61 61
62 62 If the string is found m[start:end] are the line containing
63 63 that string. If start == end the string was not found and
64 64 they indicate the proper sorted insertion point. This was
65 65 taken from bisect_left, and modified to find line start/end as
66 66 it goes along.
67 67
68 68 m should be a buffer or a string
69 69 s is a string'''
70 70 def advance(i, c):
71 71 while i < lenm and m[i] != c:
72 72 i += 1
73 73 return i
74 74 lenm = len(m)
75 75 if not hi:
76 76 hi = lenm
77 77 while lo < hi:
78 78 mid = (lo + hi) // 2
79 79 start = mid
80 80 while start > 0 and m[start-1] != '\n':
81 81 start -= 1
82 82 end = advance(start, '\0')
83 83 if m[start:end] < s:
84 84 # we know that after the null there are 40 bytes of sha1
85 85 # this translates to the bisect lo = mid + 1
86 86 lo = advance(end + 40, '\n') + 1
87 87 else:
88 88 # this translates to the bisect hi = mid
89 89 hi = start
90 90 end = advance(lo, '\0')
91 91 found = m[lo:end]
92 92 if cmp(s, found) == 0:
93 93 # we know that after the null there are 40 bytes of sha1
94 94 end = advance(end + 40, '\n')
95 95 return (lo, end+1)
96 96 else:
97 97 return (lo, lo)
98 98
99 99 def find(self, node, f):
100 100 '''look up entry for a single file efficiently.
101 101 return (node, flag) pair if found, (None, None) if not.'''
102 102 if self.mapcache and node == self.mapcache[0]:
103 103 return self.mapcache[1].get(f), self.mapcache[2].get(f)
104 104 text = self.revision(node)
105 105 start, end = self._search(text, f)
106 106 if start == end:
107 107 return None, None
108 108 l = text[start:end]
109 109 f, n = l.split('\0')
110 110 return bin(n[:40]), n[40:-1] == 'x'
111 111
112 112 def add(self, map, flags, transaction, link, p1=None, p2=None,
113 113 changed=None):
114 114 # apply the changes collected during the bisect loop to our addlist
115 115 # return a delta suitable for addrevision
116 116 def addlistdelta(addlist, x):
117 117 # start from the bottom up
118 118 # so changes to the offsets don't mess things up.
119 119 i = len(x)
120 120 while i > 0:
121 121 i -= 1
122 122 start = x[i][0]
123 123 end = x[i][1]
124 124 if x[i][2]:
125 125 addlist[start:end] = array.array('c', x[i][2])
126 126 else:
127 127 del addlist[start:end]
128 128 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
129 129 for d in x ])
130 130
131 131 # if we're using the listcache, make sure it is valid and
132 132 # parented by the same node we're diffing against
133 133 if not changed or not self.listcache or not p1 or \
134 134 self.mapcache[0] != p1:
135 135 files = map.keys()
136 136 files.sort()
137 137
138 138 # if this is changed to support newlines in filenames,
139 139 # be sure to check the templates/ dir again (especially *-raw.tmpl)
140 140 text = ["%s\000%s%s\n" %
141 141 (f, hex(map[f]), flags[f] and "x" or '')
142 142 for f in files]
143 143 self.listcache = array.array('c', "".join(text))
144 144 cachedelta = None
145 145 else:
146 146 addlist = self.listcache
147 147
148 148 # combine the changed lists into one list for sorting
149 149 work = [[x, 0] for x in changed[0]]
150 150 work[len(work):] = [[x, 1] for x in changed[1]]
151 151 work.sort()
152 152
153 153 delta = []
154 154 dstart = None
155 155 dend = None
156 156 dline = [""]
157 157 start = 0
158 158 # zero copy representation of addlist as a buffer
159 159 addbuf = buffer(addlist)
160 160
161 161 # start with a readonly loop that finds the offset of
162 162 # each line and creates the deltas
163 163 for w in work:
164 164 f = w[0]
165 165 # bs will either be the index of the item or the insert point
166 166 start, end = self._search(addbuf, f, start)
167 167 if w[1] == 0:
168 168 l = "%s\000%s%s\n" % (f, hex(map[f]),
169 169 flags[f] and "x" or '')
170 170 else:
171 171 l = ""
172 172 if start == end and w[1] == 1:
173 173 # item we want to delete was not found, error out
174 174 raise AssertionError(
175 175 _("failed to remove %s from manifest\n") % f)
176 176 if dstart != None and dstart <= start and dend >= start:
177 177 if dend < end:
178 178 dend = end
179 179 if l:
180 180 dline.append(l)
181 181 else:
182 182 if dstart != None:
183 183 delta.append([dstart, dend, "".join(dline)])
184 184 dstart = start
185 185 dend = end
186 186 dline = [l]
187 187
188 188 if dstart != None:
189 189 delta.append([dstart, dend, "".join(dline)])
190 190 # apply the delta to the addlist, and get a delta for addrevision
191 191 cachedelta = addlistdelta(addlist, delta)
192 192
193 193 # the delta is only valid if we've been processing the tip revision
194 194 if self.mapcache[0] != self.tip():
195 195 cachedelta = None
196 196 self.listcache = addlist
197 197
198 198 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
199 199 p2, cachedelta)
200 200 self.mapcache = (n, map, flags)
201 201
202 202 return n
General Comments 0
You need to be logged in to leave comments. Login now