##// END OF EJS Templates
Merge manifest refactor work
Matt Mackall -
r2836:e78cad1f merge default
parent child Browse files
Show More
@@ -1,174 +1,174 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import *
9 9 from i18n import gettext as _
10 10 from node import *
11 11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
12 12
13 13 def tidyprefix(dest, prefix, suffixes):
14 14 '''choose prefix to use for names in archive. make sure prefix is
15 15 safe for consumers.'''
16 16
17 17 if prefix:
18 18 prefix = prefix.replace('\\', '/')
19 19 else:
20 20 if not isinstance(dest, str):
21 21 raise ValueError('dest must be string if no prefix')
22 22 prefix = os.path.basename(dest)
23 23 lower = prefix.lower()
24 24 for sfx in suffixes:
25 25 if lower.endswith(sfx):
26 26 prefix = prefix[:-len(sfx)]
27 27 break
28 28 lpfx = os.path.normpath(util.localpath(prefix))
29 29 prefix = util.pconvert(lpfx)
30 30 if not prefix.endswith('/'):
31 31 prefix += '/'
32 32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 33 raise util.Abort(_('archive prefix contains illegal components'))
34 34 return prefix
35 35
36 36 class tarit:
37 37 '''write archive to tar file or stream. can write uncompressed,
38 38 or compress with gzip or bzip2.'''
39 39
40 40 def __init__(self, dest, prefix, mtime, kind=''):
41 41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
42 42 '.tgz', 'tbz2'])
43 43 self.mtime = mtime
44 44 if isinstance(dest, str):
45 45 self.z = tarfile.open(dest, mode='w:'+kind)
46 46 else:
47 47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
48 48
49 49 def addfile(self, name, mode, data):
50 50 i = tarfile.TarInfo(self.prefix + name)
51 51 i.mtime = self.mtime
52 52 i.size = len(data)
53 53 i.mode = mode
54 54 self.z.addfile(i, cStringIO.StringIO(data))
55 55
56 56 def done(self):
57 57 self.z.close()
58 58
59 59 class tellable:
60 60 '''provide tell method for zipfile.ZipFile when writing to http
61 61 response file object.'''
62 62
63 63 def __init__(self, fp):
64 64 self.fp = fp
65 65 self.offset = 0
66 66
67 67 def __getattr__(self, key):
68 68 return getattr(self.fp, key)
69 69
70 70 def write(self, s):
71 71 self.fp.write(s)
72 72 self.offset += len(s)
73 73
74 74 def tell(self):
75 75 return self.offset
76 76
77 77 class zipit:
78 78 '''write archive to zip file or stream. can write uncompressed,
79 79 or compressed with deflate.'''
80 80
81 81 def __init__(self, dest, prefix, mtime, compress=True):
82 82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
83 83 if not isinstance(dest, str):
84 84 try:
85 85 dest.tell()
86 86 except (AttributeError, IOError):
87 87 dest = tellable(dest)
88 88 self.z = zipfile.ZipFile(dest, 'w',
89 89 compress and zipfile.ZIP_DEFLATED or
90 90 zipfile.ZIP_STORED)
91 91 self.date_time = time.gmtime(mtime)[:6]
92 92
93 93 def addfile(self, name, mode, data):
94 94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
95 95 i.compress_type = self.z.compression
96 96 i.flag_bits = 0x08
97 97 # unzip will not honor unix file modes unless file creator is
98 98 # set to unix (id 3).
99 99 i.create_system = 3
100 100 i.external_attr = (mode | stat.S_IFREG) << 16L
101 101 self.z.writestr(i, data)
102 102
103 103 def done(self):
104 104 self.z.close()
105 105
106 106 class fileit:
107 107 '''write archive as files in directory.'''
108 108
109 109 def __init__(self, name, prefix, mtime):
110 110 if prefix:
111 111 raise util.Abort(_('cannot give prefix when archiving to files'))
112 112 self.basedir = name
113 113 self.dirs = {}
114 114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
115 115 getattr(os, 'O_BINARY', 0) |
116 116 getattr(os, 'O_NOFOLLOW', 0))
117 117
118 118 def addfile(self, name, mode, data):
119 119 destfile = os.path.join(self.basedir, name)
120 120 destdir = os.path.dirname(destfile)
121 121 if destdir not in self.dirs:
122 122 if not os.path.isdir(destdir):
123 123 os.makedirs(destdir)
124 124 self.dirs[destdir] = 1
125 125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
126 126
127 127 def done(self):
128 128 pass
129 129
130 130 archivers = {
131 131 'files': fileit,
132 132 'tar': tarit,
133 133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
134 134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
135 135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
136 136 'zip': zipit,
137 137 }
138 138
139 139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
140 140 prefix=None, mtime=None):
141 141 '''create archive of repo as it was at node.
142 142
143 143 dest can be name of directory, name of archive file, or file
144 144 object to write archive to.
145 145
146 146 kind is type of archive to create.
147 147
148 148 decode tells whether to put files through decode filters from
149 149 hgrc.
150 150
151 151 matchfn is function to filter names of files to write to archive.
152 152
153 153 prefix is name of path to put before every archive member.'''
154 154
155 155 def write(name, mode, data):
156 156 if matchfn and not matchfn(name): return
157 157 if decode:
158 158 fp = cStringIO.StringIO()
159 159 repo.wwrite(name, data, fp)
160 160 data = fp.getvalue()
161 161 archiver.addfile(name, mode, data)
162 162
163 163 change = repo.changelog.read(node)
164 164 mn = change[0]
165 165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
166 166 mf = repo.manifest.read(mn).items()
167 167 mff = repo.manifest.readflags(mn)
168 168 mf.sort()
169 169 write('.hg_archival.txt', 0644,
170 170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
171 171 for filename, filenode in mf:
172 write(filename, mff[filename] and 0755 or 0644,
172 write(filename, mff.execf(filename) and 0755 or 0644,
173 173 repo.file(filename).read(filenode))
174 174 archiver.done()
@@ -1,3686 +1,3687 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 16 demandload(globals(), "hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted, unknown = repo.changes()
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def filterfiles(filters, files):
29 29 l = [x for x in files if x in filters]
30 30
31 31 for t in filters:
32 32 if t and t[-1] != "/":
33 33 t += "/"
34 34 l += [x for x in files if x.startswith(t)]
35 35 return l
36 36
37 37 def relpath(repo, args):
38 38 cwd = repo.getcwd()
39 39 if cwd:
40 40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 41 return args
42 42
43 43 def logmessage(opts):
44 44 """ get the log message according to -m and -l option """
45 45 message = opts['message']
46 46 logfile = opts['logfile']
47 47
48 48 if message and logfile:
49 49 raise util.Abort(_('options --message and --logfile are mutually '
50 50 'exclusive'))
51 51 if not message and logfile:
52 52 try:
53 53 if logfile == '-':
54 54 message = sys.stdin.read()
55 55 else:
56 56 message = open(logfile).read()
57 57 except IOError, inst:
58 58 raise util.Abort(_("can't read commit message '%s': %s") %
59 59 (logfile, inst.strerror))
60 60 return message
61 61
62 62 def matchpats(repo, pats=[], opts={}, head=''):
63 63 cwd = repo.getcwd()
64 64 if not pats and cwd:
65 65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
66 66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
67 67 cwd = ''
68 68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
69 69 opts.get('exclude'), head)
70 70
71 71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
72 72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
73 73 exact = dict(zip(files, files))
74 74 def walk():
75 75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
76 76 badmatch=badmatch):
77 77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
78 78 return files, matchfn, walk()
79 79
80 80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
81 81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
82 82 for r in results:
83 83 yield r
84 84
85 85 def walkchangerevs(ui, repo, pats, opts):
86 86 '''Iterate over files and the revs they changed in.
87 87
88 88 Callers most commonly need to iterate backwards over the history
89 89 it is interested in. Doing so has awful (quadratic-looking)
90 90 performance, so we use iterators in a "windowed" way.
91 91
92 92 We walk a window of revisions in the desired order. Within the
93 93 window, we first walk forwards to gather data, then in the desired
94 94 order (usually backwards) to display it.
95 95
96 96 This function returns an (iterator, getchange, matchfn) tuple. The
97 97 getchange function returns the changelog entry for a numeric
98 98 revision. The iterator yields 3-tuples. They will be of one of
99 99 the following forms:
100 100
101 101 "window", incrementing, lastrev: stepping through a window,
102 102 positive if walking forwards through revs, last rev in the
103 103 sequence iterated over - use to reset state for the current window
104 104
105 105 "add", rev, fns: out-of-order traversal of the given file names
106 106 fns, which changed during revision rev - use to gather data for
107 107 possible display
108 108
109 109 "iter", rev, None: in-order traversal of the revs earlier iterated
110 110 over with "add" - use to display data'''
111 111
112 112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
113 113 if start < end:
114 114 while start < end:
115 115 yield start, min(windowsize, end-start)
116 116 start += windowsize
117 117 if windowsize < sizelimit:
118 118 windowsize *= 2
119 119 else:
120 120 while start > end:
121 121 yield start, min(windowsize, start-end-1)
122 122 start -= windowsize
123 123 if windowsize < sizelimit:
124 124 windowsize *= 2
125 125
126 126
127 127 files, matchfn, anypats = matchpats(repo, pats, opts)
128 128 follow = opts.get('follow') or opts.get('follow_first')
129 129
130 130 if repo.changelog.count() == 0:
131 131 return [], False, matchfn
132 132
133 133 if follow:
134 134 p = repo.dirstate.parents()[0]
135 135 if p == nullid:
136 136 ui.warn(_('No working directory revision; defaulting to tip\n'))
137 137 start = 'tip'
138 138 else:
139 139 start = repo.changelog.rev(p)
140 140 defrange = '%s:0' % start
141 141 else:
142 142 defrange = 'tip:0'
143 143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
144 144 wanted = {}
145 145 slowpath = anypats
146 146 fncache = {}
147 147
148 148 chcache = {}
149 149 def getchange(rev):
150 150 ch = chcache.get(rev)
151 151 if ch is None:
152 152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
153 153 return ch
154 154
155 155 if not slowpath and not files:
156 156 # No files, no patterns. Display all revs.
157 157 wanted = dict(zip(revs, revs))
158 158 copies = []
159 159 if not slowpath:
160 160 # Only files, no patterns. Check the history of each file.
161 161 def filerevgen(filelog, node):
162 162 cl_count = repo.changelog.count()
163 163 if node is None:
164 164 last = filelog.count() - 1
165 165 else:
166 166 last = filelog.rev(node)
167 167 for i, window in increasing_windows(last, -1):
168 168 revs = []
169 169 for j in xrange(i - window, i + 1):
170 170 n = filelog.node(j)
171 171 revs.append((filelog.linkrev(n),
172 172 follow and filelog.renamed(n)))
173 173 revs.reverse()
174 174 for rev in revs:
175 175 # only yield rev for which we have the changelog, it can
176 176 # happen while doing "hg log" during a pull or commit
177 177 if rev[0] < cl_count:
178 178 yield rev
179 179 def iterfiles():
180 180 for filename in files:
181 181 yield filename, None
182 182 for filename_node in copies:
183 183 yield filename_node
184 184 minrev, maxrev = min(revs), max(revs)
185 185 for file_, node in iterfiles():
186 186 filelog = repo.file(file_)
187 187 # A zero count may be a directory or deleted file, so
188 188 # try to find matching entries on the slow path.
189 189 if filelog.count() == 0:
190 190 slowpath = True
191 191 break
192 192 for rev, copied in filerevgen(filelog, node):
193 193 if rev <= maxrev:
194 194 if rev < minrev:
195 195 break
196 196 fncache.setdefault(rev, [])
197 197 fncache[rev].append(file_)
198 198 wanted[rev] = 1
199 199 if follow and copied:
200 200 copies.append(copied)
201 201 if slowpath:
202 202 if follow:
203 203 raise util.Abort(_('can only follow copies/renames for explicit '
204 204 'file names'))
205 205
206 206 # The slow path checks files modified in every changeset.
207 207 def changerevgen():
208 208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
209 209 for j in xrange(i - window, i + 1):
210 210 yield j, getchange(j)[3]
211 211
212 212 for rev, changefiles in changerevgen():
213 213 matches = filter(matchfn, changefiles)
214 214 if matches:
215 215 fncache[rev] = matches
216 216 wanted[rev] = 1
217 217
218 218 def iterate():
219 219 class followfilter:
220 220 def __init__(self, onlyfirst=False):
221 221 self.startrev = -1
222 222 self.roots = []
223 223 self.onlyfirst = onlyfirst
224 224
225 225 def match(self, rev):
226 226 def realparents(rev):
227 227 if self.onlyfirst:
228 228 return repo.changelog.parentrevs(rev)[0:1]
229 229 else:
230 230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
231 231
232 232 if self.startrev == -1:
233 233 self.startrev = rev
234 234 return True
235 235
236 236 if rev > self.startrev:
237 237 # forward: all descendants
238 238 if not self.roots:
239 239 self.roots.append(self.startrev)
240 240 for parent in realparents(rev):
241 241 if parent in self.roots:
242 242 self.roots.append(rev)
243 243 return True
244 244 else:
245 245 # backwards: all parents
246 246 if not self.roots:
247 247 self.roots.extend(realparents(self.startrev))
248 248 if rev in self.roots:
249 249 self.roots.remove(rev)
250 250 self.roots.extend(realparents(rev))
251 251 return True
252 252
253 253 return False
254 254
255 255 if follow and not files:
256 256 ff = followfilter(onlyfirst=opts.get('follow_first'))
257 257 def want(rev):
258 258 if rev not in wanted:
259 259 return False
260 260 return ff.match(rev)
261 261 else:
262 262 def want(rev):
263 263 return rev in wanted
264 264
265 265 for i, window in increasing_windows(0, len(revs)):
266 266 yield 'window', revs[0] < revs[-1], revs[-1]
267 267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
268 268 srevs = list(nrevs)
269 269 srevs.sort()
270 270 for rev in srevs:
271 271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
272 272 yield 'add', rev, fns
273 273 for rev in nrevs:
274 274 yield 'iter', rev, None
275 275 return iterate(), getchange, matchfn
276 276
277 277 revrangesep = ':'
278 278
279 279 def revfix(repo, val, defval):
280 280 '''turn user-level id of changeset into rev number.
281 281 user-level id can be tag, changeset, rev number, or negative rev
282 282 number relative to number of revs (-1 is tip, etc).'''
283 283 if not val:
284 284 return defval
285 285 try:
286 286 num = int(val)
287 287 if str(num) != val:
288 288 raise ValueError
289 289 if num < 0:
290 290 num += repo.changelog.count()
291 291 if num < 0:
292 292 num = 0
293 293 elif num >= repo.changelog.count():
294 294 raise ValueError
295 295 except ValueError:
296 296 try:
297 297 num = repo.changelog.rev(repo.lookup(val))
298 298 except KeyError:
299 299 raise util.Abort(_('invalid revision identifier %s'), val)
300 300 return num
301 301
302 302 def revpair(ui, repo, revs):
303 303 '''return pair of nodes, given list of revisions. second item can
304 304 be None, meaning use working dir.'''
305 305 if not revs:
306 306 return repo.dirstate.parents()[0], None
307 307 end = None
308 308 if len(revs) == 1:
309 309 start = revs[0]
310 310 if revrangesep in start:
311 311 start, end = start.split(revrangesep, 1)
312 312 start = revfix(repo, start, 0)
313 313 end = revfix(repo, end, repo.changelog.count() - 1)
314 314 else:
315 315 start = revfix(repo, start, None)
316 316 elif len(revs) == 2:
317 317 if revrangesep in revs[0] or revrangesep in revs[1]:
318 318 raise util.Abort(_('too many revisions specified'))
319 319 start = revfix(repo, revs[0], None)
320 320 end = revfix(repo, revs[1], None)
321 321 else:
322 322 raise util.Abort(_('too many revisions specified'))
323 323 if end is not None: end = repo.lookup(str(end))
324 324 return repo.lookup(str(start)), end
325 325
326 326 def revrange(ui, repo, revs):
327 327 """Yield revision as strings from a list of revision specifications."""
328 328 seen = {}
329 329 for spec in revs:
330 330 if revrangesep in spec:
331 331 start, end = spec.split(revrangesep, 1)
332 332 start = revfix(repo, start, 0)
333 333 end = revfix(repo, end, repo.changelog.count() - 1)
334 334 step = start > end and -1 or 1
335 335 for rev in xrange(start, end+step, step):
336 336 if rev in seen:
337 337 continue
338 338 seen[rev] = 1
339 339 yield str(rev)
340 340 else:
341 341 rev = revfix(repo, spec, None)
342 342 if rev in seen:
343 343 continue
344 344 seen[rev] = 1
345 345 yield str(rev)
346 346
347 347 def make_filename(repo, pat, node,
348 348 total=None, seqno=None, revwidth=None, pathname=None):
349 349 node_expander = {
350 350 'H': lambda: hex(node),
351 351 'R': lambda: str(repo.changelog.rev(node)),
352 352 'h': lambda: short(node),
353 353 }
354 354 expander = {
355 355 '%': lambda: '%',
356 356 'b': lambda: os.path.basename(repo.root),
357 357 }
358 358
359 359 try:
360 360 if node:
361 361 expander.update(node_expander)
362 362 if node and revwidth is not None:
363 363 expander['r'] = (lambda:
364 364 str(repo.changelog.rev(node)).zfill(revwidth))
365 365 if total is not None:
366 366 expander['N'] = lambda: str(total)
367 367 if seqno is not None:
368 368 expander['n'] = lambda: str(seqno)
369 369 if total is not None and seqno is not None:
370 370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
371 371 if pathname is not None:
372 372 expander['s'] = lambda: os.path.basename(pathname)
373 373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
374 374 expander['p'] = lambda: pathname
375 375
376 376 newname = []
377 377 patlen = len(pat)
378 378 i = 0
379 379 while i < patlen:
380 380 c = pat[i]
381 381 if c == '%':
382 382 i += 1
383 383 c = pat[i]
384 384 c = expander[c]()
385 385 newname.append(c)
386 386 i += 1
387 387 return ''.join(newname)
388 388 except KeyError, inst:
389 389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
390 390 inst.args[0])
391 391
392 392 def make_file(repo, pat, node=None,
393 393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
394 394 if not pat or pat == '-':
395 395 return 'w' in mode and sys.stdout or sys.stdin
396 396 if hasattr(pat, 'write') and 'w' in mode:
397 397 return pat
398 398 if hasattr(pat, 'read') and 'r' in mode:
399 399 return pat
400 400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
401 401 pathname),
402 402 mode)
403 403
404 404 def write_bundle(cg, filename=None, compress=True):
405 405 """Write a bundle file and return its filename.
406 406
407 407 Existing files will not be overwritten.
408 408 If no filename is specified, a temporary file is created.
409 409 bz2 compression can be turned off.
410 410 The bundle file will be deleted in case of errors.
411 411 """
412 412 class nocompress(object):
413 413 def compress(self, x):
414 414 return x
415 415 def flush(self):
416 416 return ""
417 417
418 418 fh = None
419 419 cleanup = None
420 420 try:
421 421 if filename:
422 422 if os.path.exists(filename):
423 423 raise util.Abort(_("file '%s' already exists"), filename)
424 424 fh = open(filename, "wb")
425 425 else:
426 426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
427 427 fh = os.fdopen(fd, "wb")
428 428 cleanup = filename
429 429
430 430 if compress:
431 431 fh.write("HG10")
432 432 z = bz2.BZ2Compressor(9)
433 433 else:
434 434 fh.write("HG10UN")
435 435 z = nocompress()
436 436 # parse the changegroup data, otherwise we will block
437 437 # in case of sshrepo because we don't know the end of the stream
438 438
439 439 # an empty chunkiter is the end of the changegroup
440 440 empty = False
441 441 while not empty:
442 442 empty = True
443 443 for chunk in changegroup.chunkiter(cg):
444 444 empty = False
445 445 fh.write(z.compress(changegroup.genchunk(chunk)))
446 446 fh.write(z.compress(changegroup.closechunk()))
447 447 fh.write(z.flush())
448 448 cleanup = None
449 449 return filename
450 450 finally:
451 451 if fh is not None:
452 452 fh.close()
453 453 if cleanup is not None:
454 454 os.unlink(cleanup)
455 455
456 456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
457 457 changes=None, text=False, opts={}):
458 458 if not node1:
459 459 node1 = repo.dirstate.parents()[0]
460 460 # reading the data for node1 early allows it to play nicely
461 461 # with repo.changes and the revlog cache.
462 462 change = repo.changelog.read(node1)
463 463 mmap = repo.manifest.read(change[0])
464 464 date1 = util.datestr(change[2])
465 465
466 466 if not changes:
467 467 changes = repo.changes(node1, node2, files, match=match)
468 468 modified, added, removed, deleted, unknown = changes
469 469 if files:
470 470 modified, added, removed = map(lambda x: filterfiles(files, x),
471 471 (modified, added, removed))
472 472
473 473 if not modified and not added and not removed:
474 474 return
475 475
476 476 if node2:
477 477 change = repo.changelog.read(node2)
478 478 mmap2 = repo.manifest.read(change[0])
479 479 _date2 = util.datestr(change[2])
480 480 def date2(f):
481 481 return _date2
482 482 def read(f):
483 483 return repo.file(f).read(mmap2[f])
484 484 else:
485 485 tz = util.makedate()[1]
486 486 _date2 = util.datestr()
487 487 def date2(f):
488 488 try:
489 489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
490 490 except OSError, err:
491 491 if err.errno != errno.ENOENT: raise
492 492 return _date2
493 493 def read(f):
494 494 return repo.wread(f)
495 495
496 496 if ui.quiet:
497 497 r = None
498 498 else:
499 499 hexfunc = ui.verbose and hex or short
500 500 r = [hexfunc(node) for node in [node1, node2] if node]
501 501
502 502 diffopts = ui.diffopts()
503 503 showfunc = opts.get('show_function') or diffopts['showfunc']
504 504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
505 505 ignorewsamount = opts.get('ignore_space_change') or \
506 506 diffopts['ignorewsamount']
507 507 ignoreblanklines = opts.get('ignore_blank_lines') or \
508 508 diffopts['ignoreblanklines']
509 509
510 510 all = modified + added + removed
511 511 all.sort()
512 512 for f in all:
513 513 to = None
514 514 tn = None
515 515 if f in mmap:
516 516 to = repo.file(f).read(mmap[f])
517 517 if f not in removed:
518 518 tn = read(f)
519 519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
520 520 showfunc=showfunc, ignorews=ignorews,
521 521 ignorewsamount=ignorewsamount,
522 522 ignoreblanklines=ignoreblanklines))
523 523
524 524 def trimuser(ui, name, rev, revcache):
525 525 """trim the name of the user who committed a change"""
526 526 user = revcache.get(rev)
527 527 if user is None:
528 528 user = revcache[rev] = ui.shortuser(name)
529 529 return user
530 530
531 531 class changeset_printer(object):
532 532 '''show changeset information when templating not requested.'''
533 533
534 534 def __init__(self, ui, repo):
535 535 self.ui = ui
536 536 self.repo = repo
537 537
538 538 def show(self, rev=0, changenode=None, brinfo=None):
539 539 '''show a single changeset or file revision'''
540 540 log = self.repo.changelog
541 541 if changenode is None:
542 542 changenode = log.node(rev)
543 543 elif not rev:
544 544 rev = log.rev(changenode)
545 545
546 546 if self.ui.quiet:
547 547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
548 548 return
549 549
550 550 changes = log.read(changenode)
551 551 date = util.datestr(changes[2])
552 552
553 553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
554 554 for p in log.parents(changenode)
555 555 if self.ui.debugflag or p != nullid]
556 556 if (not self.ui.debugflag and len(parents) == 1 and
557 557 parents[0][0] == rev-1):
558 558 parents = []
559 559
560 560 if self.ui.verbose:
561 561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
562 562 else:
563 563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
564 564
565 565 for tag in self.repo.nodetags(changenode):
566 566 self.ui.status(_("tag: %s\n") % tag)
567 567 for parent in parents:
568 568 self.ui.write(_("parent: %d:%s\n") % parent)
569 569
570 570 if brinfo and changenode in brinfo:
571 571 br = brinfo[changenode]
572 572 self.ui.write(_("branch: %s\n") % " ".join(br))
573 573
574 574 self.ui.debug(_("manifest: %d:%s\n") %
575 575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
576 576 self.ui.status(_("user: %s\n") % changes[1])
577 577 self.ui.status(_("date: %s\n") % date)
578 578
579 579 if self.ui.debugflag:
580 580 files = self.repo.changes(log.parents(changenode)[0], changenode)
581 581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
582 582 files):
583 583 if value:
584 584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
585 585 else:
586 586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
587 587
588 588 description = changes[4].strip()
589 589 if description:
590 590 if self.ui.verbose:
591 591 self.ui.status(_("description:\n"))
592 592 self.ui.status(description)
593 593 self.ui.status("\n\n")
594 594 else:
595 595 self.ui.status(_("summary: %s\n") %
596 596 description.splitlines()[0])
597 597 self.ui.status("\n")
598 598
599 599 def show_changeset(ui, repo, opts):
600 600 '''show one changeset. uses template or regular display. caller
601 601 can pass in 'style' and 'template' options in opts.'''
602 602
603 603 tmpl = opts.get('template')
604 604 if tmpl:
605 605 tmpl = templater.parsestring(tmpl, quoted=False)
606 606 else:
607 607 tmpl = ui.config('ui', 'logtemplate')
608 608 if tmpl: tmpl = templater.parsestring(tmpl)
609 609 mapfile = opts.get('style') or ui.config('ui', 'style')
610 610 if tmpl or mapfile:
611 611 if mapfile:
612 612 if not os.path.isfile(mapfile):
613 613 mapname = templater.templatepath('map-cmdline.' + mapfile)
614 614 if not mapname: mapname = templater.templatepath(mapfile)
615 615 if mapname: mapfile = mapname
616 616 try:
617 617 t = templater.changeset_templater(ui, repo, mapfile)
618 618 except SyntaxError, inst:
619 619 raise util.Abort(inst.args[0])
620 620 if tmpl: t.use_template(tmpl)
621 621 return t
622 622 return changeset_printer(ui, repo)
623 623
624 624 def setremoteconfig(ui, opts):
625 625 "copy remote options to ui tree"
626 626 if opts.get('ssh'):
627 627 ui.setconfig("ui", "ssh", opts['ssh'])
628 628 if opts.get('remotecmd'):
629 629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
630 630
631 631 def show_version(ui):
632 632 """output version and copyright information"""
633 633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
634 634 % version.get_version())
635 635 ui.status(_(
636 636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
637 637 "This is free software; see the source for copying conditions. "
638 638 "There is NO\nwarranty; "
639 639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
640 640 ))
641 641
642 642 def help_(ui, name=None, with_version=False):
643 643 """show help for a command, extension, or list of commands
644 644
645 645 With no arguments, print a list of commands and short help.
646 646
647 647 Given a command name, print help for that command.
648 648
649 649 Given an extension name, print help for that extension, and the
650 650 commands it provides."""
651 651 option_lists = []
652 652
653 653 def helpcmd(name):
654 654 if with_version:
655 655 show_version(ui)
656 656 ui.write('\n')
657 657 aliases, i = findcmd(name)
658 658 # synopsis
659 659 ui.write("%s\n\n" % i[2])
660 660
661 661 # description
662 662 doc = i[0].__doc__
663 663 if not doc:
664 664 doc = _("(No help text available)")
665 665 if ui.quiet:
666 666 doc = doc.splitlines(0)[0]
667 667 ui.write("%s\n" % doc.rstrip())
668 668
669 669 if not ui.quiet:
670 670 # aliases
671 671 if len(aliases) > 1:
672 672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
673 673
674 674 # options
675 675 if i[1]:
676 676 option_lists.append(("options", i[1]))
677 677
678 678 def helplist(select=None):
679 679 h = {}
680 680 cmds = {}
681 681 for c, e in table.items():
682 682 f = c.split("|", 1)[0]
683 683 if select and not select(f):
684 684 continue
685 685 if name == "shortlist" and not f.startswith("^"):
686 686 continue
687 687 f = f.lstrip("^")
688 688 if not ui.debugflag and f.startswith("debug"):
689 689 continue
690 690 doc = e[0].__doc__
691 691 if not doc:
692 692 doc = _("(No help text available)")
693 693 h[f] = doc.splitlines(0)[0].rstrip()
694 694 cmds[f] = c.lstrip("^")
695 695
696 696 fns = h.keys()
697 697 fns.sort()
698 698 m = max(map(len, fns))
699 699 for f in fns:
700 700 if ui.verbose:
701 701 commands = cmds[f].replace("|",", ")
702 702 ui.write(" %s:\n %s\n"%(commands, h[f]))
703 703 else:
704 704 ui.write(' %-*s %s\n' % (m, f, h[f]))
705 705
706 706 def helpext(name):
707 707 try:
708 708 mod = findext(name)
709 709 except KeyError:
710 710 raise UnknownCommand(name)
711 711
712 712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
713 713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
714 714 for d in doc[1:]:
715 715 ui.write(d, '\n')
716 716
717 717 ui.status('\n')
718 718 if ui.verbose:
719 719 ui.status(_('list of commands:\n\n'))
720 720 else:
721 721 ui.status(_('list of commands (use "hg help -v %s" '
722 722 'to show aliases and global options):\n\n') % name)
723 723
724 724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
725 725 helplist(modcmds.has_key)
726 726
727 727 if name and name != 'shortlist':
728 728 try:
729 729 helpcmd(name)
730 730 except UnknownCommand:
731 731 helpext(name)
732 732
733 733 else:
734 734 # program name
735 735 if ui.verbose or with_version:
736 736 show_version(ui)
737 737 else:
738 738 ui.status(_("Mercurial Distributed SCM\n"))
739 739 ui.status('\n')
740 740
741 741 # list of commands
742 742 if name == "shortlist":
743 743 ui.status(_('basic commands (use "hg help" '
744 744 'for the full list or option "-v" for details):\n\n'))
745 745 elif ui.verbose:
746 746 ui.status(_('list of commands:\n\n'))
747 747 else:
748 748 ui.status(_('list of commands (use "hg help -v" '
749 749 'to show aliases and global options):\n\n'))
750 750
751 751 helplist()
752 752
753 753 # global options
754 754 if ui.verbose:
755 755 option_lists.append(("global options", globalopts))
756 756
757 757 # list all option lists
758 758 opt_output = []
759 759 for title, options in option_lists:
760 760 opt_output.append(("\n%s:\n" % title, None))
761 761 for shortopt, longopt, default, desc in options:
762 762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
763 763 longopt and " --%s" % longopt),
764 764 "%s%s" % (desc,
765 765 default
766 766 and _(" (default: %s)") % default
767 767 or "")))
768 768
769 769 if opt_output:
770 770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
771 771 for first, second in opt_output:
772 772 if second:
773 773 ui.write(" %-*s %s\n" % (opts_len, first, second))
774 774 else:
775 775 ui.write("%s\n" % first)
776 776
777 777 # Commands start here, listed alphabetically
778 778
779 779 def add(ui, repo, *pats, **opts):
780 780 """add the specified files on the next commit
781 781
782 782 Schedule files to be version controlled and added to the repository.
783 783
784 784 The files will be added to the repository at the next commit.
785 785
786 786 If no names are given, add all files in the repository.
787 787 """
788 788
789 789 names = []
790 790 for src, abs, rel, exact in walk(repo, pats, opts):
791 791 if exact:
792 792 if ui.verbose:
793 793 ui.status(_('adding %s\n') % rel)
794 794 names.append(abs)
795 795 elif repo.dirstate.state(abs) == '?':
796 796 ui.status(_('adding %s\n') % rel)
797 797 names.append(abs)
798 798 if not opts.get('dry_run'):
799 799 repo.add(names)
800 800
801 801 def addremove(ui, repo, *pats, **opts):
802 802 """add all new files, delete all missing files (DEPRECATED)
803 803
804 804 (DEPRECATED)
805 805 Add all new files and remove all missing files from the repository.
806 806
807 807 New files are ignored if they match any of the patterns in .hgignore. As
808 808 with add, these changes take effect at the next commit.
809 809
810 810 This command is now deprecated and will be removed in a future
811 811 release. Please use add and remove --after instead.
812 812 """
813 813 ui.warn(_('(the addremove command is deprecated; use add and remove '
814 814 '--after instead)\n'))
815 815 return addremove_lock(ui, repo, pats, opts)
816 816
817 817 def addremove_lock(ui, repo, pats, opts, wlock=None):
818 818 add, remove = [], []
819 819 for src, abs, rel, exact in walk(repo, pats, opts):
820 820 if src == 'f' and repo.dirstate.state(abs) == '?':
821 821 add.append(abs)
822 822 if ui.verbose or not exact:
823 823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
824 824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
825 825 remove.append(abs)
826 826 if ui.verbose or not exact:
827 827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
828 828 if not opts.get('dry_run'):
829 829 repo.add(add, wlock=wlock)
830 830 repo.remove(remove, wlock=wlock)
831 831
832 832 def annotate(ui, repo, *pats, **opts):
833 833 """show changeset information per file line
834 834
835 835 List changes in files, showing the revision id responsible for each line
836 836
837 837 This command is useful to discover who did a change or when a change took
838 838 place.
839 839
840 840 Without the -a option, annotate will avoid processing files it
841 841 detects as binary. With -a, annotate will generate an annotation
842 842 anyway, probably with undesirable results.
843 843 """
844 844 def getnode(rev):
845 845 return short(repo.changelog.node(rev))
846 846
847 847 ucache = {}
848 848 def getname(rev):
849 849 try:
850 850 return ucache[rev]
851 851 except:
852 852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
853 853 ucache[rev] = u
854 854 return u
855 855
856 856 dcache = {}
857 857 def getdate(rev):
858 858 datestr = dcache.get(rev)
859 859 if datestr is None:
860 860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
861 861 return datestr
862 862
863 863 if not pats:
864 864 raise util.Abort(_('at least one file name or pattern required'))
865 865
866 866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 867 ['date', getdate]]
868 868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 869 opts['number'] = 1
870 870
871 871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
872 872
873 873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
874 874 fctx = ctx.filectx(abs)
875 875 if not opts['text'] and util.binary(fctx.data()):
876 876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
877 877 continue
878 878
879 879 lines = fctx.annotate()
880 880 pieces = []
881 881
882 882 for o, f in opmap:
883 883 if opts[o]:
884 884 l = [f(n) for n, dummy in lines]
885 885 if l:
886 886 m = max(map(len, l))
887 887 pieces.append(["%*s" % (m, x) for x in l])
888 888
889 889 if pieces:
890 890 for p, l in zip(zip(*pieces), lines):
891 891 ui.write("%s: %s" % (" ".join(p), l[1]))
892 892
893 893 def archive(ui, repo, dest, **opts):
894 894 '''create unversioned archive of a repository revision
895 895
896 896 By default, the revision used is the parent of the working
897 897 directory; use "-r" to specify a different revision.
898 898
899 899 To specify the type of archive to create, use "-t". Valid
900 900 types are:
901 901
902 902 "files" (default): a directory full of files
903 903 "tar": tar archive, uncompressed
904 904 "tbz2": tar archive, compressed using bzip2
905 905 "tgz": tar archive, compressed using gzip
906 906 "uzip": zip archive, uncompressed
907 907 "zip": zip archive, compressed using deflate
908 908
909 909 The exact name of the destination archive or directory is given
910 910 using a format string; see "hg help export" for details.
911 911
912 912 Each member added to an archive file has a directory prefix
913 913 prepended. Use "-p" to specify a format string for the prefix.
914 914 The default is the basename of the archive, with suffixes removed.
915 915 '''
916 916
917 917 if opts['rev']:
918 918 node = repo.lookup(opts['rev'])
919 919 else:
920 920 node, p2 = repo.dirstate.parents()
921 921 if p2 != nullid:
922 922 raise util.Abort(_('uncommitted merge - please provide a '
923 923 'specific revision'))
924 924
925 925 dest = make_filename(repo, dest, node)
926 926 if os.path.realpath(dest) == repo.root:
927 927 raise util.Abort(_('repository root cannot be destination'))
928 928 dummy, matchfn, dummy = matchpats(repo, [], opts)
929 929 kind = opts.get('type') or 'files'
930 930 prefix = opts['prefix']
931 931 if dest == '-':
932 932 if kind == 'files':
933 933 raise util.Abort(_('cannot archive plain files to stdout'))
934 934 dest = sys.stdout
935 935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
936 936 prefix = make_filename(repo, prefix, node)
937 937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
938 938 matchfn, prefix)
939 939
940 940 def backout(ui, repo, rev, **opts):
941 941 '''reverse effect of earlier changeset
942 942
943 943 Commit the backed out changes as a new changeset. The new
944 944 changeset is a child of the backed out changeset.
945 945
946 946 If you back out a changeset other than the tip, a new head is
947 947 created. This head is the parent of the working directory. If
948 948 you back out an old changeset, your working directory will appear
949 949 old after the backout. You should merge the backout changeset
950 950 with another head.
951 951
952 952 The --merge option remembers the parent of the working directory
953 953 before starting the backout, then merges the new head with that
954 954 changeset afterwards. This saves you from doing the merge by
955 955 hand. The result of this merge is not committed, as for a normal
956 956 merge.'''
957 957
958 958 bail_if_changed(repo)
959 959 op1, op2 = repo.dirstate.parents()
960 960 if op2 != nullid:
961 961 raise util.Abort(_('outstanding uncommitted merge'))
962 962 node = repo.lookup(rev)
963 963 p1, p2 = repo.changelog.parents(node)
964 964 if p1 == nullid:
965 965 raise util.Abort(_('cannot back out a change with no parents'))
966 966 if p2 != nullid:
967 967 if not opts['parent']:
968 968 raise util.Abort(_('cannot back out a merge changeset without '
969 969 '--parent'))
970 970 p = repo.lookup(opts['parent'])
971 971 if p not in (p1, p2):
972 972 raise util.Abort(_('%s is not a parent of %s' %
973 973 (short(p), short(node))))
974 974 parent = p
975 975 else:
976 976 if opts['parent']:
977 977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
978 978 parent = p1
979 979 hg.clean(repo, node, show_stats=False)
980 980 revert_opts = opts.copy()
981 981 revert_opts['rev'] = hex(parent)
982 982 revert(ui, repo, **revert_opts)
983 983 commit_opts = opts.copy()
984 984 commit_opts['addremove'] = False
985 985 if not commit_opts['message'] and not commit_opts['logfile']:
986 986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
987 987 commit_opts['force_editor'] = True
988 988 commit(ui, repo, **commit_opts)
989 989 def nice(node):
990 990 return '%d:%s' % (repo.changelog.rev(node), short(node))
991 991 ui.status(_('changeset %s backs out changeset %s\n') %
992 992 (nice(repo.changelog.tip()), nice(node)))
993 993 if op1 != node:
994 994 if opts['merge']:
995 995 ui.status(_('merging with changeset %s\n') % nice(op1))
996 996 n = _lookup(repo, hex(op1))
997 997 hg.merge(repo, n)
998 998 else:
999 999 ui.status(_('the backout changeset is a new head - '
1000 1000 'do not forget to merge\n'))
1001 1001 ui.status(_('(use "backout --merge" '
1002 1002 'if you want to auto-merge)\n'))
1003 1003
1004 1004 def bundle(ui, repo, fname, dest=None, **opts):
1005 1005 """create a changegroup file
1006 1006
1007 1007 Generate a compressed changegroup file collecting all changesets
1008 1008 not found in the other repository.
1009 1009
1010 1010 This file can then be transferred using conventional means and
1011 1011 applied to another repository with the unbundle command. This is
1012 1012 useful when native push and pull are not available or when
1013 1013 exporting an entire repository is undesirable. The standard file
1014 1014 extension is ".hg".
1015 1015
1016 1016 Unlike import/export, this exactly preserves all changeset
1017 1017 contents including permissions, rename data, and revision history.
1018 1018 """
1019 1019 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1020 1020 other = hg.repository(ui, dest)
1021 1021 o = repo.findoutgoing(other, force=opts['force'])
1022 1022 cg = repo.changegroup(o, 'bundle')
1023 1023 write_bundle(cg, fname)
1024 1024
1025 1025 def cat(ui, repo, file1, *pats, **opts):
1026 1026 """output the latest or given revisions of files
1027 1027
1028 1028 Print the specified files as they were at the given revision.
1029 1029 If no revision is given then the tip is used.
1030 1030
1031 1031 Output may be to a file, in which case the name of the file is
1032 1032 given using a format string. The formatting rules are the same as
1033 1033 for the export command, with the following additions:
1034 1034
1035 1035 %s basename of file being printed
1036 1036 %d dirname of file being printed, or '.' if in repo root
1037 1037 %p root-relative path name of file being printed
1038 1038 """
1039 1039 ctx = repo.changectx(opts['rev'] or "-1")
1040 1040 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1041 1041 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1042 1042 fp.write(ctx.filectx(abs).data())
1043 1043
1044 1044 def clone(ui, source, dest=None, **opts):
1045 1045 """make a copy of an existing repository
1046 1046
1047 1047 Create a copy of an existing repository in a new directory.
1048 1048
1049 1049 If no destination directory name is specified, it defaults to the
1050 1050 basename of the source.
1051 1051
1052 1052 The location of the source is added to the new repository's
1053 1053 .hg/hgrc file, as the default to be used for future pulls.
1054 1054
1055 1055 For efficiency, hardlinks are used for cloning whenever the source
1056 1056 and destination are on the same filesystem (note this applies only
1057 1057 to the repository data, not to the checked out files). Some
1058 1058 filesystems, such as AFS, implement hardlinking incorrectly, but
1059 1059 do not report errors. In these cases, use the --pull option to
1060 1060 avoid hardlinking.
1061 1061
1062 1062 You can safely clone repositories and checked out files using full
1063 1063 hardlinks with
1064 1064
1065 1065 $ cp -al REPO REPOCLONE
1066 1066
1067 1067 which is the fastest way to clone. However, the operation is not
1068 1068 atomic (making sure REPO is not modified during the operation is
1069 1069 up to you) and you have to make sure your editor breaks hardlinks
1070 1070 (Emacs and most Linux Kernel tools do so).
1071 1071
1072 1072 If you use the -r option to clone up to a specific revision, no
1073 1073 subsequent revisions will be present in the cloned repository.
1074 1074 This option implies --pull, even on local repositories.
1075 1075
1076 1076 See pull for valid source format details.
1077 1077
1078 1078 It is possible to specify an ssh:// URL as the destination, but no
1079 1079 .hg/hgrc will be created on the remote side. Look at the help text
1080 1080 for the pull command for important details about ssh:// URLs.
1081 1081 """
1082 1082 setremoteconfig(ui, opts)
1083 1083 hg.clone(ui, ui.expandpath(source), dest,
1084 1084 pull=opts['pull'],
1085 1085 stream=opts['uncompressed'],
1086 1086 rev=opts['rev'],
1087 1087 update=not opts['noupdate'])
1088 1088
1089 1089 def commit(ui, repo, *pats, **opts):
1090 1090 """commit the specified files or all outstanding changes
1091 1091
1092 1092 Commit changes to the given files into the repository.
1093 1093
1094 1094 If a list of files is omitted, all changes reported by "hg status"
1095 1095 will be committed.
1096 1096
1097 1097 If no commit message is specified, the editor configured in your hgrc
1098 1098 or in the EDITOR environment variable is started to enter a message.
1099 1099 """
1100 1100 message = logmessage(opts)
1101 1101
1102 1102 if opts['addremove']:
1103 1103 addremove_lock(ui, repo, pats, opts)
1104 1104 fns, match, anypats = matchpats(repo, pats, opts)
1105 1105 if pats:
1106 1106 modified, added, removed, deleted, unknown = (
1107 1107 repo.changes(files=fns, match=match))
1108 1108 files = modified + added + removed
1109 1109 else:
1110 1110 files = []
1111 1111 try:
1112 1112 repo.commit(files, message, opts['user'], opts['date'], match,
1113 1113 force_editor=opts.get('force_editor'))
1114 1114 except ValueError, inst:
1115 1115 raise util.Abort(str(inst))
1116 1116
1117 1117 def docopy(ui, repo, pats, opts, wlock):
1118 1118 # called with the repo lock held
1119 1119 cwd = repo.getcwd()
1120 1120 errors = 0
1121 1121 copied = []
1122 1122 targets = {}
1123 1123
1124 1124 def okaytocopy(abs, rel, exact):
1125 1125 reasons = {'?': _('is not managed'),
1126 1126 'a': _('has been marked for add'),
1127 1127 'r': _('has been marked for remove')}
1128 1128 state = repo.dirstate.state(abs)
1129 1129 reason = reasons.get(state)
1130 1130 if reason:
1131 1131 if state == 'a':
1132 1132 origsrc = repo.dirstate.copied(abs)
1133 1133 if origsrc is not None:
1134 1134 return origsrc
1135 1135 if exact:
1136 1136 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1137 1137 else:
1138 1138 return abs
1139 1139
1140 1140 def copy(origsrc, abssrc, relsrc, target, exact):
1141 1141 abstarget = util.canonpath(repo.root, cwd, target)
1142 1142 reltarget = util.pathto(cwd, abstarget)
1143 1143 prevsrc = targets.get(abstarget)
1144 1144 if prevsrc is not None:
1145 1145 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1146 1146 (reltarget, abssrc, prevsrc))
1147 1147 return
1148 1148 if (not opts['after'] and os.path.exists(reltarget) or
1149 1149 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1150 1150 if not opts['force']:
1151 1151 ui.warn(_('%s: not overwriting - file exists\n') %
1152 1152 reltarget)
1153 1153 return
1154 1154 if not opts['after'] and not opts.get('dry_run'):
1155 1155 os.unlink(reltarget)
1156 1156 if opts['after']:
1157 1157 if not os.path.exists(reltarget):
1158 1158 return
1159 1159 else:
1160 1160 targetdir = os.path.dirname(reltarget) or '.'
1161 1161 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1162 1162 os.makedirs(targetdir)
1163 1163 try:
1164 1164 restore = repo.dirstate.state(abstarget) == 'r'
1165 1165 if restore and not opts.get('dry_run'):
1166 1166 repo.undelete([abstarget], wlock)
1167 1167 try:
1168 1168 if not opts.get('dry_run'):
1169 1169 shutil.copyfile(relsrc, reltarget)
1170 1170 shutil.copymode(relsrc, reltarget)
1171 1171 restore = False
1172 1172 finally:
1173 1173 if restore:
1174 1174 repo.remove([abstarget], wlock)
1175 1175 except shutil.Error, inst:
1176 1176 raise util.Abort(str(inst))
1177 1177 except IOError, inst:
1178 1178 if inst.errno == errno.ENOENT:
1179 1179 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1180 1180 else:
1181 1181 ui.warn(_('%s: cannot copy - %s\n') %
1182 1182 (relsrc, inst.strerror))
1183 1183 errors += 1
1184 1184 return
1185 1185 if ui.verbose or not exact:
1186 1186 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1187 1187 targets[abstarget] = abssrc
1188 1188 if abstarget != origsrc and not opts.get('dry_run'):
1189 1189 repo.copy(origsrc, abstarget, wlock)
1190 1190 copied.append((abssrc, relsrc, exact))
1191 1191
1192 1192 def targetpathfn(pat, dest, srcs):
1193 1193 if os.path.isdir(pat):
1194 1194 abspfx = util.canonpath(repo.root, cwd, pat)
1195 1195 if destdirexists:
1196 1196 striplen = len(os.path.split(abspfx)[0])
1197 1197 else:
1198 1198 striplen = len(abspfx)
1199 1199 if striplen:
1200 1200 striplen += len(os.sep)
1201 1201 res = lambda p: os.path.join(dest, p[striplen:])
1202 1202 elif destdirexists:
1203 1203 res = lambda p: os.path.join(dest, os.path.basename(p))
1204 1204 else:
1205 1205 res = lambda p: dest
1206 1206 return res
1207 1207
1208 1208 def targetpathafterfn(pat, dest, srcs):
1209 1209 if util.patkind(pat, None)[0]:
1210 1210 # a mercurial pattern
1211 1211 res = lambda p: os.path.join(dest, os.path.basename(p))
1212 1212 else:
1213 1213 abspfx = util.canonpath(repo.root, cwd, pat)
1214 1214 if len(abspfx) < len(srcs[0][0]):
1215 1215 # A directory. Either the target path contains the last
1216 1216 # component of the source path or it does not.
1217 1217 def evalpath(striplen):
1218 1218 score = 0
1219 1219 for s in srcs:
1220 1220 t = os.path.join(dest, s[0][striplen:])
1221 1221 if os.path.exists(t):
1222 1222 score += 1
1223 1223 return score
1224 1224
1225 1225 striplen = len(abspfx)
1226 1226 if striplen:
1227 1227 striplen += len(os.sep)
1228 1228 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1229 1229 score = evalpath(striplen)
1230 1230 striplen1 = len(os.path.split(abspfx)[0])
1231 1231 if striplen1:
1232 1232 striplen1 += len(os.sep)
1233 1233 if evalpath(striplen1) > score:
1234 1234 striplen = striplen1
1235 1235 res = lambda p: os.path.join(dest, p[striplen:])
1236 1236 else:
1237 1237 # a file
1238 1238 if destdirexists:
1239 1239 res = lambda p: os.path.join(dest, os.path.basename(p))
1240 1240 else:
1241 1241 res = lambda p: dest
1242 1242 return res
1243 1243
1244 1244
1245 1245 pats = list(pats)
1246 1246 if not pats:
1247 1247 raise util.Abort(_('no source or destination specified'))
1248 1248 if len(pats) == 1:
1249 1249 raise util.Abort(_('no destination specified'))
1250 1250 dest = pats.pop()
1251 1251 destdirexists = os.path.isdir(dest)
1252 1252 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1253 1253 raise util.Abort(_('with multiple sources, destination must be an '
1254 1254 'existing directory'))
1255 1255 if opts['after']:
1256 1256 tfn = targetpathafterfn
1257 1257 else:
1258 1258 tfn = targetpathfn
1259 1259 copylist = []
1260 1260 for pat in pats:
1261 1261 srcs = []
1262 1262 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1263 1263 origsrc = okaytocopy(abssrc, relsrc, exact)
1264 1264 if origsrc:
1265 1265 srcs.append((origsrc, abssrc, relsrc, exact))
1266 1266 if not srcs:
1267 1267 continue
1268 1268 copylist.append((tfn(pat, dest, srcs), srcs))
1269 1269 if not copylist:
1270 1270 raise util.Abort(_('no files to copy'))
1271 1271
1272 1272 for targetpath, srcs in copylist:
1273 1273 for origsrc, abssrc, relsrc, exact in srcs:
1274 1274 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1275 1275
1276 1276 if errors:
1277 1277 ui.warn(_('(consider using --after)\n'))
1278 1278 return errors, copied
1279 1279
1280 1280 def copy(ui, repo, *pats, **opts):
1281 1281 """mark files as copied for the next commit
1282 1282
1283 1283 Mark dest as having copies of source files. If dest is a
1284 1284 directory, copies are put in that directory. If dest is a file,
1285 1285 there can only be one source.
1286 1286
1287 1287 By default, this command copies the contents of files as they
1288 1288 stand in the working directory. If invoked with --after, the
1289 1289 operation is recorded, but no copying is performed.
1290 1290
1291 1291 This command takes effect in the next commit.
1292 1292
1293 1293 NOTE: This command should be treated as experimental. While it
1294 1294 should properly record copied files, this information is not yet
1295 1295 fully used by merge, nor fully reported by log.
1296 1296 """
1297 1297 wlock = repo.wlock(0)
1298 1298 errs, copied = docopy(ui, repo, pats, opts, wlock)
1299 1299 return errs
1300 1300
1301 1301 def debugancestor(ui, index, rev1, rev2):
1302 1302 """find the ancestor revision of two revisions in a given index"""
1303 1303 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1304 1304 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1305 1305 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1306 1306
1307 1307 def debugcomplete(ui, cmd='', **opts):
1308 1308 """returns the completion list associated with the given command"""
1309 1309
1310 1310 if opts['options']:
1311 1311 options = []
1312 1312 otables = [globalopts]
1313 1313 if cmd:
1314 1314 aliases, entry = findcmd(cmd)
1315 1315 otables.append(entry[1])
1316 1316 for t in otables:
1317 1317 for o in t:
1318 1318 if o[0]:
1319 1319 options.append('-%s' % o[0])
1320 1320 options.append('--%s' % o[1])
1321 1321 ui.write("%s\n" % "\n".join(options))
1322 1322 return
1323 1323
1324 1324 clist = findpossible(cmd).keys()
1325 1325 clist.sort()
1326 1326 ui.write("%s\n" % "\n".join(clist))
1327 1327
1328 1328 def debugrebuildstate(ui, repo, rev=None):
1329 1329 """rebuild the dirstate as it would look like for the given revision"""
1330 1330 if not rev:
1331 1331 rev = repo.changelog.tip()
1332 1332 else:
1333 1333 rev = repo.lookup(rev)
1334 1334 change = repo.changelog.read(rev)
1335 1335 n = change[0]
1336 1336 files = repo.manifest.readflags(n)
1337 1337 wlock = repo.wlock()
1338 1338 repo.dirstate.rebuild(rev, files.iteritems())
1339 1339
1340 1340 def debugcheckstate(ui, repo):
1341 1341 """validate the correctness of the current dirstate"""
1342 1342 parent1, parent2 = repo.dirstate.parents()
1343 1343 repo.dirstate.read()
1344 1344 dc = repo.dirstate.map
1345 1345 keys = dc.keys()
1346 1346 keys.sort()
1347 1347 m1n = repo.changelog.read(parent1)[0]
1348 1348 m2n = repo.changelog.read(parent2)[0]
1349 1349 m1 = repo.manifest.read(m1n)
1350 1350 m2 = repo.manifest.read(m2n)
1351 1351 errors = 0
1352 1352 for f in dc:
1353 1353 state = repo.dirstate.state(f)
1354 1354 if state in "nr" and f not in m1:
1355 1355 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1356 1356 errors += 1
1357 1357 if state in "a" and f in m1:
1358 1358 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1359 1359 errors += 1
1360 1360 if state in "m" and f not in m1 and f not in m2:
1361 1361 ui.warn(_("%s in state %s, but not in either manifest\n") %
1362 1362 (f, state))
1363 1363 errors += 1
1364 1364 for f in m1:
1365 1365 state = repo.dirstate.state(f)
1366 1366 if state not in "nrm":
1367 1367 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1368 1368 errors += 1
1369 1369 if errors:
1370 1370 error = _(".hg/dirstate inconsistent with current parent's manifest")
1371 1371 raise util.Abort(error)
1372 1372
1373 1373 def debugconfig(ui, repo, *values):
1374 1374 """show combined config settings from all hgrc files
1375 1375
1376 1376 With no args, print names and values of all config items.
1377 1377
1378 1378 With one arg of the form section.name, print just the value of
1379 1379 that config item.
1380 1380
1381 1381 With multiple args, print names and values of all config items
1382 1382 with matching section names."""
1383 1383
1384 1384 if values:
1385 1385 if len([v for v in values if '.' in v]) > 1:
1386 1386 raise util.Abort(_('only one config item permitted'))
1387 1387 for section, name, value in ui.walkconfig():
1388 1388 sectname = section + '.' + name
1389 1389 if values:
1390 1390 for v in values:
1391 1391 if v == section:
1392 1392 ui.write('%s=%s\n' % (sectname, value))
1393 1393 elif v == sectname:
1394 1394 ui.write(value, '\n')
1395 1395 else:
1396 1396 ui.write('%s=%s\n' % (sectname, value))
1397 1397
1398 1398 def debugsetparents(ui, repo, rev1, rev2=None):
1399 1399 """manually set the parents of the current working directory
1400 1400
1401 1401 This is useful for writing repository conversion tools, but should
1402 1402 be used with care.
1403 1403 """
1404 1404
1405 1405 if not rev2:
1406 1406 rev2 = hex(nullid)
1407 1407
1408 1408 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1409 1409
1410 1410 def debugstate(ui, repo):
1411 1411 """show the contents of the current dirstate"""
1412 1412 repo.dirstate.read()
1413 1413 dc = repo.dirstate.map
1414 1414 keys = dc.keys()
1415 1415 keys.sort()
1416 1416 for file_ in keys:
1417 1417 ui.write("%c %3o %10d %s %s\n"
1418 1418 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1419 1419 time.strftime("%x %X",
1420 1420 time.localtime(dc[file_][3])), file_))
1421 1421 for f in repo.dirstate.copies:
1422 1422 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1423 1423
1424 1424 def debugdata(ui, file_, rev):
1425 1425 """dump the contents of an data file revision"""
1426 1426 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1427 1427 file_[:-2] + ".i", file_, 0)
1428 1428 try:
1429 1429 ui.write(r.revision(r.lookup(rev)))
1430 1430 except KeyError:
1431 1431 raise util.Abort(_('invalid revision identifier %s'), rev)
1432 1432
1433 1433 def debugindex(ui, file_):
1434 1434 """dump the contents of an index file"""
1435 1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1436 1436 ui.write(" rev offset length base linkrev" +
1437 1437 " nodeid p1 p2\n")
1438 1438 for i in range(r.count()):
1439 1439 node = r.node(i)
1440 1440 pp = r.parents(node)
1441 1441 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1442 1442 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1443 1443 short(node), short(pp[0]), short(pp[1])))
1444 1444
1445 1445 def debugindexdot(ui, file_):
1446 1446 """dump an index DAG as a .dot file"""
1447 1447 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1448 1448 ui.write("digraph G {\n")
1449 1449 for i in range(r.count()):
1450 1450 node = r.node(i)
1451 1451 pp = r.parents(node)
1452 1452 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1453 1453 if pp[1] != nullid:
1454 1454 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1455 1455 ui.write("}\n")
1456 1456
1457 1457 def debugrename(ui, repo, file, rev=None):
1458 1458 """dump rename information"""
1459 1459 r = repo.file(relpath(repo, [file])[0])
1460 1460 if rev:
1461 1461 try:
1462 1462 # assume all revision numbers are for changesets
1463 1463 n = repo.lookup(rev)
1464 1464 change = repo.changelog.read(n)
1465 1465 m = repo.manifest.read(change[0])
1466 1466 n = m[relpath(repo, [file])[0]]
1467 1467 except (hg.RepoError, KeyError):
1468 1468 n = r.lookup(rev)
1469 1469 else:
1470 1470 n = r.tip()
1471 1471 m = r.renamed(n)
1472 1472 if m:
1473 1473 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1474 1474 else:
1475 1475 ui.write(_("not renamed\n"))
1476 1476
1477 1477 def debugwalk(ui, repo, *pats, **opts):
1478 1478 """show how files match on given patterns"""
1479 1479 items = list(walk(repo, pats, opts))
1480 1480 if not items:
1481 1481 return
1482 1482 fmt = '%%s %%-%ds %%-%ds %%s' % (
1483 1483 max([len(abs) for (src, abs, rel, exact) in items]),
1484 1484 max([len(rel) for (src, abs, rel, exact) in items]))
1485 1485 for src, abs, rel, exact in items:
1486 1486 line = fmt % (src, abs, rel, exact and 'exact' or '')
1487 1487 ui.write("%s\n" % line.rstrip())
1488 1488
1489 1489 def diff(ui, repo, *pats, **opts):
1490 1490 """diff repository (or selected files)
1491 1491
1492 1492 Show differences between revisions for the specified files.
1493 1493
1494 1494 Differences between files are shown using the unified diff format.
1495 1495
1496 1496 When two revision arguments are given, then changes are shown
1497 1497 between those revisions. If only one revision is specified then
1498 1498 that revision is compared to the working directory, and, when no
1499 1499 revisions are specified, the working directory files are compared
1500 1500 to its parent.
1501 1501
1502 1502 Without the -a option, diff will avoid generating diffs of files
1503 1503 it detects as binary. With -a, diff will generate a diff anyway,
1504 1504 probably with undesirable results.
1505 1505 """
1506 1506 node1, node2 = revpair(ui, repo, opts['rev'])
1507 1507
1508 1508 fns, matchfn, anypats = matchpats(repo, pats, opts)
1509 1509
1510 1510 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1511 1511 text=opts['text'], opts=opts)
1512 1512
1513 1513 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1514 1514 node = repo.lookup(changeset)
1515 1515 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1516 1516 if opts['switch_parent']:
1517 1517 parents.reverse()
1518 1518 prev = (parents and parents[0]) or nullid
1519 1519 change = repo.changelog.read(node)
1520 1520
1521 1521 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1522 1522 revwidth=revwidth)
1523 1523 if fp != sys.stdout:
1524 1524 ui.note("%s\n" % fp.name)
1525 1525
1526 1526 fp.write("# HG changeset patch\n")
1527 1527 fp.write("# User %s\n" % change[1])
1528 1528 fp.write("# Date %d %d\n" % change[2])
1529 1529 fp.write("# Node ID %s\n" % hex(node))
1530 1530 fp.write("# Parent %s\n" % hex(prev))
1531 1531 if len(parents) > 1:
1532 1532 fp.write("# Parent %s\n" % hex(parents[1]))
1533 1533 fp.write(change[4].rstrip())
1534 1534 fp.write("\n\n")
1535 1535
1536 1536 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1537 1537 if fp != sys.stdout:
1538 1538 fp.close()
1539 1539
1540 1540 def export(ui, repo, *changesets, **opts):
1541 1541 """dump the header and diffs for one or more changesets
1542 1542
1543 1543 Print the changeset header and diffs for one or more revisions.
1544 1544
1545 1545 The information shown in the changeset header is: author,
1546 1546 changeset hash, parent and commit comment.
1547 1547
1548 1548 Output may be to a file, in which case the name of the file is
1549 1549 given using a format string. The formatting rules are as follows:
1550 1550
1551 1551 %% literal "%" character
1552 1552 %H changeset hash (40 bytes of hexadecimal)
1553 1553 %N number of patches being generated
1554 1554 %R changeset revision number
1555 1555 %b basename of the exporting repository
1556 1556 %h short-form changeset hash (12 bytes of hexadecimal)
1557 1557 %n zero-padded sequence number, starting at 1
1558 1558 %r zero-padded changeset revision number
1559 1559
1560 1560 Without the -a option, export will avoid generating diffs of files
1561 1561 it detects as binary. With -a, export will generate a diff anyway,
1562 1562 probably with undesirable results.
1563 1563
1564 1564 With the --switch-parent option, the diff will be against the second
1565 1565 parent. It can be useful to review a merge.
1566 1566 """
1567 1567 if not changesets:
1568 1568 raise util.Abort(_("export requires at least one changeset"))
1569 1569 seqno = 0
1570 1570 revs = list(revrange(ui, repo, changesets))
1571 1571 total = len(revs)
1572 1572 revwidth = max(map(len, revs))
1573 1573 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1574 1574 ui.note(msg)
1575 1575 for cset in revs:
1576 1576 seqno += 1
1577 1577 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1578 1578
1579 1579 def forget(ui, repo, *pats, **opts):
1580 1580 """don't add the specified files on the next commit (DEPRECATED)
1581 1581
1582 1582 (DEPRECATED)
1583 1583 Undo an 'hg add' scheduled for the next commit.
1584 1584
1585 1585 This command is now deprecated and will be removed in a future
1586 1586 release. Please use revert instead.
1587 1587 """
1588 1588 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1589 1589 forget = []
1590 1590 for src, abs, rel, exact in walk(repo, pats, opts):
1591 1591 if repo.dirstate.state(abs) == 'a':
1592 1592 forget.append(abs)
1593 1593 if ui.verbose or not exact:
1594 1594 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1595 1595 repo.forget(forget)
1596 1596
1597 1597 def grep(ui, repo, pattern, *pats, **opts):
1598 1598 """search for a pattern in specified files and revisions
1599 1599
1600 1600 Search revisions of files for a regular expression.
1601 1601
1602 1602 This command behaves differently than Unix grep. It only accepts
1603 1603 Python/Perl regexps. It searches repository history, not the
1604 1604 working directory. It always prints the revision number in which
1605 1605 a match appears.
1606 1606
1607 1607 By default, grep only prints output for the first revision of a
1608 1608 file in which it finds a match. To get it to print every revision
1609 1609 that contains a change in match status ("-" for a match that
1610 1610 becomes a non-match, or "+" for a non-match that becomes a match),
1611 1611 use the --all flag.
1612 1612 """
1613 1613 reflags = 0
1614 1614 if opts['ignore_case']:
1615 1615 reflags |= re.I
1616 1616 regexp = re.compile(pattern, reflags)
1617 1617 sep, eol = ':', '\n'
1618 1618 if opts['print0']:
1619 1619 sep = eol = '\0'
1620 1620
1621 1621 fcache = {}
1622 1622 def getfile(fn):
1623 1623 if fn not in fcache:
1624 1624 fcache[fn] = repo.file(fn)
1625 1625 return fcache[fn]
1626 1626
1627 1627 def matchlines(body):
1628 1628 begin = 0
1629 1629 linenum = 0
1630 1630 while True:
1631 1631 match = regexp.search(body, begin)
1632 1632 if not match:
1633 1633 break
1634 1634 mstart, mend = match.span()
1635 1635 linenum += body.count('\n', begin, mstart) + 1
1636 1636 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1637 1637 lend = body.find('\n', mend)
1638 1638 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1639 1639 begin = lend + 1
1640 1640
1641 1641 class linestate(object):
1642 1642 def __init__(self, line, linenum, colstart, colend):
1643 1643 self.line = line
1644 1644 self.linenum = linenum
1645 1645 self.colstart = colstart
1646 1646 self.colend = colend
1647 1647 def __eq__(self, other):
1648 1648 return self.line == other.line
1649 1649 def __hash__(self):
1650 1650 return hash(self.line)
1651 1651
1652 1652 matches = {}
1653 1653 def grepbody(fn, rev, body):
1654 1654 matches[rev].setdefault(fn, {})
1655 1655 m = matches[rev][fn]
1656 1656 for lnum, cstart, cend, line in matchlines(body):
1657 1657 s = linestate(line, lnum, cstart, cend)
1658 1658 m[s] = s
1659 1659
1660 1660 # FIXME: prev isn't used, why ?
1661 1661 prev = {}
1662 1662 ucache = {}
1663 1663 def display(fn, rev, states, prevstates):
1664 1664 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1665 1665 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1666 1666 counts = {'-': 0, '+': 0}
1667 1667 filerevmatches = {}
1668 1668 for l in diff:
1669 1669 if incrementing or not opts['all']:
1670 1670 change = ((l in prevstates) and '-') or '+'
1671 1671 r = rev
1672 1672 else:
1673 1673 change = ((l in states) and '-') or '+'
1674 1674 r = prev[fn]
1675 1675 cols = [fn, str(rev)]
1676 1676 if opts['line_number']:
1677 1677 cols.append(str(l.linenum))
1678 1678 if opts['all']:
1679 1679 cols.append(change)
1680 1680 if opts['user']:
1681 1681 cols.append(trimuser(ui, getchange(rev)[1], rev,
1682 1682 ucache))
1683 1683 if opts['files_with_matches']:
1684 1684 c = (fn, rev)
1685 1685 if c in filerevmatches:
1686 1686 continue
1687 1687 filerevmatches[c] = 1
1688 1688 else:
1689 1689 cols.append(l.line)
1690 1690 ui.write(sep.join(cols), eol)
1691 1691 counts[change] += 1
1692 1692 return counts['+'], counts['-']
1693 1693
1694 1694 fstate = {}
1695 1695 skip = {}
1696 1696 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1697 1697 count = 0
1698 1698 incrementing = False
1699 1699 for st, rev, fns in changeiter:
1700 1700 if st == 'window':
1701 1701 incrementing = rev
1702 1702 matches.clear()
1703 1703 elif st == 'add':
1704 1704 change = repo.changelog.read(repo.lookup(str(rev)))
1705 1705 mf = repo.manifest.read(change[0])
1706 1706 matches[rev] = {}
1707 1707 for fn in fns:
1708 1708 if fn in skip:
1709 1709 continue
1710 1710 fstate.setdefault(fn, {})
1711 1711 try:
1712 1712 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1713 1713 except KeyError:
1714 1714 pass
1715 1715 elif st == 'iter':
1716 1716 states = matches[rev].items()
1717 1717 states.sort()
1718 1718 for fn, m in states:
1719 1719 if fn in skip:
1720 1720 continue
1721 1721 if incrementing or not opts['all'] or fstate[fn]:
1722 1722 pos, neg = display(fn, rev, m, fstate[fn])
1723 1723 count += pos + neg
1724 1724 if pos and not opts['all']:
1725 1725 skip[fn] = True
1726 1726 fstate[fn] = m
1727 1727 prev[fn] = rev
1728 1728
1729 1729 if not incrementing:
1730 1730 fstate = fstate.items()
1731 1731 fstate.sort()
1732 1732 for fn, state in fstate:
1733 1733 if fn in skip:
1734 1734 continue
1735 1735 display(fn, rev, {}, state)
1736 1736 return (count == 0 and 1) or 0
1737 1737
1738 1738 def heads(ui, repo, **opts):
1739 1739 """show current repository heads
1740 1740
1741 1741 Show all repository head changesets.
1742 1742
1743 1743 Repository "heads" are changesets that don't have children
1744 1744 changesets. They are where development generally takes place and
1745 1745 are the usual targets for update and merge operations.
1746 1746 """
1747 1747 if opts['rev']:
1748 1748 heads = repo.heads(repo.lookup(opts['rev']))
1749 1749 else:
1750 1750 heads = repo.heads()
1751 1751 br = None
1752 1752 if opts['branches']:
1753 1753 br = repo.branchlookup(heads)
1754 1754 displayer = show_changeset(ui, repo, opts)
1755 1755 for n in heads:
1756 1756 displayer.show(changenode=n, brinfo=br)
1757 1757
1758 1758 def identify(ui, repo):
1759 1759 """print information about the working copy
1760 1760
1761 1761 Print a short summary of the current state of the repo.
1762 1762
1763 1763 This summary identifies the repository state using one or two parent
1764 1764 hash identifiers, followed by a "+" if there are uncommitted changes
1765 1765 in the working directory, followed by a list of tags for this revision.
1766 1766 """
1767 1767 parents = [p for p in repo.dirstate.parents() if p != nullid]
1768 1768 if not parents:
1769 1769 ui.write(_("unknown\n"))
1770 1770 return
1771 1771
1772 1772 hexfunc = ui.verbose and hex or short
1773 1773 modified, added, removed, deleted, unknown = repo.changes()
1774 1774 output = ["%s%s" %
1775 1775 ('+'.join([hexfunc(parent) for parent in parents]),
1776 1776 (modified or added or removed or deleted) and "+" or "")]
1777 1777
1778 1778 if not ui.quiet:
1779 1779 # multiple tags for a single parent separated by '/'
1780 1780 parenttags = ['/'.join(tags)
1781 1781 for tags in map(repo.nodetags, parents) if tags]
1782 1782 # tags for multiple parents separated by ' + '
1783 1783 if parenttags:
1784 1784 output.append(' + '.join(parenttags))
1785 1785
1786 1786 ui.write("%s\n" % ' '.join(output))
1787 1787
1788 1788 def import_(ui, repo, patch1, *patches, **opts):
1789 1789 """import an ordered set of patches
1790 1790
1791 1791 Import a list of patches and commit them individually.
1792 1792
1793 1793 If there are outstanding changes in the working directory, import
1794 1794 will abort unless given the -f flag.
1795 1795
1796 1796 You can import a patch straight from a mail message. Even patches
1797 1797 as attachments work (body part must be type text/plain or
1798 1798 text/x-patch to be used). From and Subject headers of email
1799 1799 message are used as default committer and commit message. All
1800 1800 text/plain body parts before first diff are added to commit
1801 1801 message.
1802 1802
1803 1803 If imported patch was generated by hg export, user and description
1804 1804 from patch override values from message headers and body. Values
1805 1805 given on command line with -m and -u override these.
1806 1806
1807 1807 To read a patch from standard input, use patch name "-".
1808 1808 """
1809 1809 patches = (patch1,) + patches
1810 1810
1811 1811 if not opts['force']:
1812 1812 bail_if_changed(repo)
1813 1813
1814 1814 d = opts["base"]
1815 1815 strip = opts["strip"]
1816 1816
1817 1817 mailre = re.compile(r'(?:From |[\w-]+:)')
1818 1818
1819 1819 # attempt to detect the start of a patch
1820 1820 # (this heuristic is borrowed from quilt)
1821 1821 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1822 1822 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1823 1823 '(---|\*\*\*)[ \t])', re.MULTILINE)
1824 1824
1825 1825 for patch in patches:
1826 1826 pf = os.path.join(d, patch)
1827 1827
1828 1828 message = None
1829 1829 user = None
1830 1830 date = None
1831 1831 hgpatch = False
1832 1832
1833 1833 p = email.Parser.Parser()
1834 1834 if pf == '-':
1835 1835 msg = p.parse(sys.stdin)
1836 1836 ui.status(_("applying patch from stdin\n"))
1837 1837 else:
1838 1838 msg = p.parse(file(pf))
1839 1839 ui.status(_("applying %s\n") % patch)
1840 1840
1841 1841 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1842 1842 tmpfp = os.fdopen(fd, 'w')
1843 1843 try:
1844 1844 message = msg['Subject']
1845 1845 if message:
1846 1846 message = message.replace('\n\t', ' ')
1847 1847 ui.debug('Subject: %s\n' % message)
1848 1848 user = msg['From']
1849 1849 if user:
1850 1850 ui.debug('From: %s\n' % user)
1851 1851 diffs_seen = 0
1852 1852 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1853 1853 for part in msg.walk():
1854 1854 content_type = part.get_content_type()
1855 1855 ui.debug('Content-Type: %s\n' % content_type)
1856 1856 if content_type not in ok_types:
1857 1857 continue
1858 1858 payload = part.get_payload(decode=True)
1859 1859 m = diffre.search(payload)
1860 1860 if m:
1861 1861 ui.debug(_('found patch at byte %d\n') % m.start(0))
1862 1862 diffs_seen += 1
1863 1863 hgpatch = False
1864 1864 fp = cStringIO.StringIO()
1865 1865 if message:
1866 1866 fp.write(message)
1867 1867 fp.write('\n')
1868 1868 for line in payload[:m.start(0)].splitlines():
1869 1869 if line.startswith('# HG changeset patch'):
1870 1870 ui.debug(_('patch generated by hg export\n'))
1871 1871 hgpatch = True
1872 1872 # drop earlier commit message content
1873 1873 fp.seek(0)
1874 1874 fp.truncate()
1875 1875 elif hgpatch:
1876 1876 if line.startswith('# User '):
1877 1877 user = line[7:]
1878 1878 ui.debug('From: %s\n' % user)
1879 1879 elif line.startswith("# Date "):
1880 1880 date = line[7:]
1881 1881 if not line.startswith('# '):
1882 1882 fp.write(line)
1883 1883 fp.write('\n')
1884 1884 message = fp.getvalue()
1885 1885 if tmpfp:
1886 1886 tmpfp.write(payload)
1887 1887 if not payload.endswith('\n'):
1888 1888 tmpfp.write('\n')
1889 1889 elif not diffs_seen and message and content_type == 'text/plain':
1890 1890 message += '\n' + payload
1891 1891
1892 1892 if opts['message']:
1893 1893 # pickup the cmdline msg
1894 1894 message = opts['message']
1895 1895 elif message:
1896 1896 # pickup the patch msg
1897 1897 message = message.strip()
1898 1898 else:
1899 1899 # launch the editor
1900 1900 message = None
1901 1901 ui.debug(_('message:\n%s\n') % message)
1902 1902
1903 1903 tmpfp.close()
1904 1904 if not diffs_seen:
1905 1905 raise util.Abort(_('no diffs found'))
1906 1906
1907 1907 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1908 1908 if len(files) > 0:
1909 1909 cfiles = files
1910 1910 cwd = repo.getcwd()
1911 1911 if cwd:
1912 1912 cfiles = [util.pathto(cwd, f) for f in files]
1913 1913 addremove_lock(ui, repo, cfiles, {})
1914 1914 repo.commit(files, message, user, date)
1915 1915 finally:
1916 1916 os.unlink(tmpname)
1917 1917
1918 1918 def incoming(ui, repo, source="default", **opts):
1919 1919 """show new changesets found in source
1920 1920
1921 1921 Show new changesets found in the specified path/URL or the default
1922 1922 pull location. These are the changesets that would be pulled if a pull
1923 1923 was requested.
1924 1924
1925 1925 For remote repository, using --bundle avoids downloading the changesets
1926 1926 twice if the incoming is followed by a pull.
1927 1927
1928 1928 See pull for valid source format details.
1929 1929 """
1930 1930 source = ui.expandpath(source)
1931 1931 setremoteconfig(ui, opts)
1932 1932
1933 1933 other = hg.repository(ui, source)
1934 1934 incoming = repo.findincoming(other, force=opts["force"])
1935 1935 if not incoming:
1936 1936 ui.status(_("no changes found\n"))
1937 1937 return
1938 1938
1939 1939 cleanup = None
1940 1940 try:
1941 1941 fname = opts["bundle"]
1942 1942 if fname or not other.local():
1943 1943 # create a bundle (uncompressed if other repo is not local)
1944 1944 cg = other.changegroup(incoming, "incoming")
1945 1945 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1946 1946 # keep written bundle?
1947 1947 if opts["bundle"]:
1948 1948 cleanup = None
1949 1949 if not other.local():
1950 1950 # use the created uncompressed bundlerepo
1951 1951 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1952 1952
1953 1953 revs = None
1954 1954 if opts['rev']:
1955 1955 revs = [other.lookup(rev) for rev in opts['rev']]
1956 1956 o = other.changelog.nodesbetween(incoming, revs)[0]
1957 1957 if opts['newest_first']:
1958 1958 o.reverse()
1959 1959 displayer = show_changeset(ui, other, opts)
1960 1960 for n in o:
1961 1961 parents = [p for p in other.changelog.parents(n) if p != nullid]
1962 1962 if opts['no_merges'] and len(parents) == 2:
1963 1963 continue
1964 1964 displayer.show(changenode=n)
1965 1965 if opts['patch']:
1966 1966 prev = (parents and parents[0]) or nullid
1967 1967 dodiff(ui, ui, other, prev, n)
1968 1968 ui.write("\n")
1969 1969 finally:
1970 1970 if hasattr(other, 'close'):
1971 1971 other.close()
1972 1972 if cleanup:
1973 1973 os.unlink(cleanup)
1974 1974
1975 1975 def init(ui, dest=".", **opts):
1976 1976 """create a new repository in the given directory
1977 1977
1978 1978 Initialize a new repository in the given directory. If the given
1979 1979 directory does not exist, it is created.
1980 1980
1981 1981 If no directory is given, the current directory is used.
1982 1982
1983 1983 It is possible to specify an ssh:// URL as the destination.
1984 1984 Look at the help text for the pull command for important details
1985 1985 about ssh:// URLs.
1986 1986 """
1987 1987 setremoteconfig(ui, opts)
1988 1988 hg.repository(ui, dest, create=1)
1989 1989
1990 1990 def locate(ui, repo, *pats, **opts):
1991 1991 """locate files matching specific patterns
1992 1992
1993 1993 Print all files under Mercurial control whose names match the
1994 1994 given patterns.
1995 1995
1996 1996 This command searches the current directory and its
1997 1997 subdirectories. To search an entire repository, move to the root
1998 1998 of the repository.
1999 1999
2000 2000 If no patterns are given to match, this command prints all file
2001 2001 names.
2002 2002
2003 2003 If you want to feed the output of this command into the "xargs"
2004 2004 command, use the "-0" option to both this command and "xargs".
2005 2005 This will avoid the problem of "xargs" treating single filenames
2006 2006 that contain white space as multiple filenames.
2007 2007 """
2008 2008 end = opts['print0'] and '\0' or '\n'
2009 2009 rev = opts['rev']
2010 2010 if rev:
2011 2011 node = repo.lookup(rev)
2012 2012 else:
2013 2013 node = None
2014 2014
2015 2015 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2016 2016 head='(?:.*/|)'):
2017 2017 if not node and repo.dirstate.state(abs) == '?':
2018 2018 continue
2019 2019 if opts['fullpath']:
2020 2020 ui.write(os.path.join(repo.root, abs), end)
2021 2021 else:
2022 2022 ui.write(((pats and rel) or abs), end)
2023 2023
2024 2024 def log(ui, repo, *pats, **opts):
2025 2025 """show revision history of entire repository or files
2026 2026
2027 2027 Print the revision history of the specified files or the entire
2028 2028 project.
2029 2029
2030 2030 File history is shown without following rename or copy history of
2031 2031 files. Use -f/--follow with a file name to follow history across
2032 2032 renames and copies. --follow without a file name will only show
2033 2033 ancestors or descendants of the starting revision. --follow-first
2034 2034 only follows the first parent of merge revisions.
2035 2035
2036 2036 If no revision range is specified, the default is tip:0 unless
2037 2037 --follow is set, in which case the working directory parent is
2038 2038 used as the starting revision.
2039 2039
2040 2040 By default this command outputs: changeset id and hash, tags,
2041 2041 non-trivial parents, user, date and time, and a summary for each
2042 2042 commit. When the -v/--verbose switch is used, the list of changed
2043 2043 files and full commit message is shown.
2044 2044 """
2045 2045 class dui(object):
2046 2046 # Implement and delegate some ui protocol. Save hunks of
2047 2047 # output for later display in the desired order.
2048 2048 def __init__(self, ui):
2049 2049 self.ui = ui
2050 2050 self.hunk = {}
2051 2051 self.header = {}
2052 2052 def bump(self, rev):
2053 2053 self.rev = rev
2054 2054 self.hunk[rev] = []
2055 2055 self.header[rev] = []
2056 2056 def note(self, *args):
2057 2057 if self.verbose:
2058 2058 self.write(*args)
2059 2059 def status(self, *args):
2060 2060 if not self.quiet:
2061 2061 self.write(*args)
2062 2062 def write(self, *args):
2063 2063 self.hunk[self.rev].append(args)
2064 2064 def write_header(self, *args):
2065 2065 self.header[self.rev].append(args)
2066 2066 def debug(self, *args):
2067 2067 if self.debugflag:
2068 2068 self.write(*args)
2069 2069 def __getattr__(self, key):
2070 2070 return getattr(self.ui, key)
2071 2071
2072 2072 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2073 2073
2074 2074 if opts['limit']:
2075 2075 try:
2076 2076 limit = int(opts['limit'])
2077 2077 except ValueError:
2078 2078 raise util.Abort(_('limit must be a positive integer'))
2079 2079 if limit <= 0: raise util.Abort(_('limit must be positive'))
2080 2080 else:
2081 2081 limit = sys.maxint
2082 2082 count = 0
2083 2083
2084 2084 displayer = show_changeset(ui, repo, opts)
2085 2085 for st, rev, fns in changeiter:
2086 2086 if st == 'window':
2087 2087 du = dui(ui)
2088 2088 displayer.ui = du
2089 2089 elif st == 'add':
2090 2090 du.bump(rev)
2091 2091 changenode = repo.changelog.node(rev)
2092 2092 parents = [p for p in repo.changelog.parents(changenode)
2093 2093 if p != nullid]
2094 2094 if opts['no_merges'] and len(parents) == 2:
2095 2095 continue
2096 2096 if opts['only_merges'] and len(parents) != 2:
2097 2097 continue
2098 2098
2099 2099 if opts['keyword']:
2100 2100 changes = getchange(rev)
2101 2101 miss = 0
2102 2102 for k in [kw.lower() for kw in opts['keyword']]:
2103 2103 if not (k in changes[1].lower() or
2104 2104 k in changes[4].lower() or
2105 2105 k in " ".join(changes[3][:20]).lower()):
2106 2106 miss = 1
2107 2107 break
2108 2108 if miss:
2109 2109 continue
2110 2110
2111 2111 br = None
2112 2112 if opts['branches']:
2113 2113 br = repo.branchlookup([repo.changelog.node(rev)])
2114 2114
2115 2115 displayer.show(rev, brinfo=br)
2116 2116 if opts['patch']:
2117 2117 prev = (parents and parents[0]) or nullid
2118 2118 dodiff(du, du, repo, prev, changenode, match=matchfn)
2119 2119 du.write("\n\n")
2120 2120 elif st == 'iter':
2121 2121 if count == limit: break
2122 2122 if du.header[rev]:
2123 2123 for args in du.header[rev]:
2124 2124 ui.write_header(*args)
2125 2125 if du.hunk[rev]:
2126 2126 count += 1
2127 2127 for args in du.hunk[rev]:
2128 2128 ui.write(*args)
2129 2129
2130 2130 def manifest(ui, repo, rev=None):
2131 2131 """output the latest or given revision of the project manifest
2132 2132
2133 2133 Print a list of version controlled files for the given revision.
2134 2134
2135 2135 The manifest is the list of files being version controlled. If no revision
2136 2136 is given then the tip is used.
2137 2137 """
2138 2138 if rev:
2139 2139 try:
2140 2140 # assume all revision numbers are for changesets
2141 2141 n = repo.lookup(rev)
2142 2142 change = repo.changelog.read(n)
2143 2143 n = change[0]
2144 2144 except hg.RepoError:
2145 2145 n = repo.manifest.lookup(rev)
2146 2146 else:
2147 2147 n = repo.manifest.tip()
2148 2148 m = repo.manifest.read(n)
2149 2149 mf = repo.manifest.readflags(n)
2150 2150 files = m.keys()
2151 2151 files.sort()
2152 2152
2153 2153 for f in files:
2154 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2154 ui.write("%40s %3s %s\n" % (hex(m[f]),
2155 mf.execf(f) and "755" or "644", f))
2155 2156
2156 2157 def merge(ui, repo, node=None, force=None, branch=None):
2157 2158 """Merge working directory with another revision
2158 2159
2159 2160 Merge the contents of the current working directory and the
2160 2161 requested revision. Files that changed between either parent are
2161 2162 marked as changed for the next commit and a commit must be
2162 2163 performed before any further updates are allowed.
2163 2164 """
2164 2165
2165 2166 node = _lookup(repo, node, branch)
2166 2167 return hg.merge(repo, node, force=force)
2167 2168
2168 2169 def outgoing(ui, repo, dest=None, **opts):
2169 2170 """show changesets not found in destination
2170 2171
2171 2172 Show changesets not found in the specified destination repository or
2172 2173 the default push location. These are the changesets that would be pushed
2173 2174 if a push was requested.
2174 2175
2175 2176 See pull for valid destination format details.
2176 2177 """
2177 2178 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2178 2179 setremoteconfig(ui, opts)
2179 2180 revs = None
2180 2181 if opts['rev']:
2181 2182 revs = [repo.lookup(rev) for rev in opts['rev']]
2182 2183
2183 2184 other = hg.repository(ui, dest)
2184 2185 o = repo.findoutgoing(other, force=opts['force'])
2185 2186 if not o:
2186 2187 ui.status(_("no changes found\n"))
2187 2188 return
2188 2189 o = repo.changelog.nodesbetween(o, revs)[0]
2189 2190 if opts['newest_first']:
2190 2191 o.reverse()
2191 2192 displayer = show_changeset(ui, repo, opts)
2192 2193 for n in o:
2193 2194 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2194 2195 if opts['no_merges'] and len(parents) == 2:
2195 2196 continue
2196 2197 displayer.show(changenode=n)
2197 2198 if opts['patch']:
2198 2199 prev = (parents and parents[0]) or nullid
2199 2200 dodiff(ui, ui, repo, prev, n)
2200 2201 ui.write("\n")
2201 2202
2202 2203 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2203 2204 """show the parents of the working dir or revision
2204 2205
2205 2206 Print the working directory's parent revisions.
2206 2207 """
2207 2208 # legacy
2208 2209 if file_ and not rev:
2209 2210 try:
2210 2211 rev = repo.lookup(file_)
2211 2212 file_ = None
2212 2213 except hg.RepoError:
2213 2214 pass
2214 2215 else:
2215 2216 ui.warn(_("'hg parent REV' is deprecated, "
2216 2217 "please use 'hg parents -r REV instead\n"))
2217 2218
2218 2219 if rev:
2219 2220 if file_:
2220 2221 ctx = repo.filectx(file_, changeid=rev)
2221 2222 else:
2222 2223 ctx = repo.changectx(rev)
2223 2224 p = [cp.node() for cp in ctx.parents()]
2224 2225 else:
2225 2226 p = repo.dirstate.parents()
2226 2227
2227 2228 br = None
2228 2229 if branches is not None:
2229 2230 br = repo.branchlookup(p)
2230 2231 displayer = show_changeset(ui, repo, opts)
2231 2232 for n in p:
2232 2233 if n != nullid:
2233 2234 displayer.show(changenode=n, brinfo=br)
2234 2235
2235 2236 def paths(ui, repo, search=None):
2236 2237 """show definition of symbolic path names
2237 2238
2238 2239 Show definition of symbolic path name NAME. If no name is given, show
2239 2240 definition of available names.
2240 2241
2241 2242 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2242 2243 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2243 2244 """
2244 2245 if search:
2245 2246 for name, path in ui.configitems("paths"):
2246 2247 if name == search:
2247 2248 ui.write("%s\n" % path)
2248 2249 return
2249 2250 ui.warn(_("not found!\n"))
2250 2251 return 1
2251 2252 else:
2252 2253 for name, path in ui.configitems("paths"):
2253 2254 ui.write("%s = %s\n" % (name, path))
2254 2255
2255 2256 def postincoming(ui, repo, modheads, optupdate):
2256 2257 if modheads == 0:
2257 2258 return
2258 2259 if optupdate:
2259 2260 if modheads == 1:
2260 2261 return hg.update(repo, repo.changelog.tip()) # update
2261 2262 else:
2262 2263 ui.status(_("not updating, since new heads added\n"))
2263 2264 if modheads > 1:
2264 2265 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2265 2266 else:
2266 2267 ui.status(_("(run 'hg update' to get a working copy)\n"))
2267 2268
2268 2269 def pull(ui, repo, source="default", **opts):
2269 2270 """pull changes from the specified source
2270 2271
2271 2272 Pull changes from a remote repository to a local one.
2272 2273
2273 2274 This finds all changes from the repository at the specified path
2274 2275 or URL and adds them to the local repository. By default, this
2275 2276 does not update the copy of the project in the working directory.
2276 2277
2277 2278 Valid URLs are of the form:
2278 2279
2279 2280 local/filesystem/path
2280 2281 http://[user@]host[:port]/[path]
2281 2282 https://[user@]host[:port]/[path]
2282 2283 ssh://[user@]host[:port]/[path]
2283 2284
2284 2285 Some notes about using SSH with Mercurial:
2285 2286 - SSH requires an accessible shell account on the destination machine
2286 2287 and a copy of hg in the remote path or specified with as remotecmd.
2287 2288 - path is relative to the remote user's home directory by default.
2288 2289 Use an extra slash at the start of a path to specify an absolute path:
2289 2290 ssh://example.com//tmp/repository
2290 2291 - Mercurial doesn't use its own compression via SSH; the right thing
2291 2292 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2292 2293 Host *.mylocalnetwork.example.com
2293 2294 Compression off
2294 2295 Host *
2295 2296 Compression on
2296 2297 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2297 2298 with the --ssh command line option.
2298 2299 """
2299 2300 source = ui.expandpath(source)
2300 2301 setremoteconfig(ui, opts)
2301 2302
2302 2303 other = hg.repository(ui, source)
2303 2304 ui.status(_('pulling from %s\n') % (source))
2304 2305 revs = None
2305 2306 if opts['rev'] and not other.local():
2306 2307 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2307 2308 elif opts['rev']:
2308 2309 revs = [other.lookup(rev) for rev in opts['rev']]
2309 2310 modheads = repo.pull(other, heads=revs, force=opts['force'])
2310 2311 return postincoming(ui, repo, modheads, opts['update'])
2311 2312
2312 2313 def push(ui, repo, dest=None, **opts):
2313 2314 """push changes to the specified destination
2314 2315
2315 2316 Push changes from the local repository to the given destination.
2316 2317
2317 2318 This is the symmetrical operation for pull. It helps to move
2318 2319 changes from the current repository to a different one. If the
2319 2320 destination is local this is identical to a pull in that directory
2320 2321 from the current one.
2321 2322
2322 2323 By default, push will refuse to run if it detects the result would
2323 2324 increase the number of remote heads. This generally indicates the
2324 2325 the client has forgotten to sync and merge before pushing.
2325 2326
2326 2327 Valid URLs are of the form:
2327 2328
2328 2329 local/filesystem/path
2329 2330 ssh://[user@]host[:port]/[path]
2330 2331
2331 2332 Look at the help text for the pull command for important details
2332 2333 about ssh:// URLs.
2333 2334
2334 2335 Pushing to http:// and https:// URLs is possible, too, if this
2335 2336 feature is enabled on the remote Mercurial server.
2336 2337 """
2337 2338 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2338 2339 setremoteconfig(ui, opts)
2339 2340
2340 2341 other = hg.repository(ui, dest)
2341 2342 ui.status('pushing to %s\n' % (dest))
2342 2343 revs = None
2343 2344 if opts['rev']:
2344 2345 revs = [repo.lookup(rev) for rev in opts['rev']]
2345 2346 r = repo.push(other, opts['force'], revs=revs)
2346 2347 return r == 0
2347 2348
2348 2349 def rawcommit(ui, repo, *flist, **rc):
2349 2350 """raw commit interface (DEPRECATED)
2350 2351
2351 2352 (DEPRECATED)
2352 2353 Lowlevel commit, for use in helper scripts.
2353 2354
2354 2355 This command is not intended to be used by normal users, as it is
2355 2356 primarily useful for importing from other SCMs.
2356 2357
2357 2358 This command is now deprecated and will be removed in a future
2358 2359 release, please use debugsetparents and commit instead.
2359 2360 """
2360 2361
2361 2362 ui.warn(_("(the rawcommit command is deprecated)\n"))
2362 2363
2363 2364 message = rc['message']
2364 2365 if not message and rc['logfile']:
2365 2366 try:
2366 2367 message = open(rc['logfile']).read()
2367 2368 except IOError:
2368 2369 pass
2369 2370 if not message and not rc['logfile']:
2370 2371 raise util.Abort(_("missing commit message"))
2371 2372
2372 2373 files = relpath(repo, list(flist))
2373 2374 if rc['files']:
2374 2375 files += open(rc['files']).read().splitlines()
2375 2376
2376 2377 rc['parent'] = map(repo.lookup, rc['parent'])
2377 2378
2378 2379 try:
2379 2380 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2380 2381 except ValueError, inst:
2381 2382 raise util.Abort(str(inst))
2382 2383
2383 2384 def recover(ui, repo):
2384 2385 """roll back an interrupted transaction
2385 2386
2386 2387 Recover from an interrupted commit or pull.
2387 2388
2388 2389 This command tries to fix the repository status after an interrupted
2389 2390 operation. It should only be necessary when Mercurial suggests it.
2390 2391 """
2391 2392 if repo.recover():
2392 2393 return hg.verify(repo)
2393 2394 return 1
2394 2395
2395 2396 def remove(ui, repo, *pats, **opts):
2396 2397 """remove the specified files on the next commit
2397 2398
2398 2399 Schedule the indicated files for removal from the repository.
2399 2400
2400 2401 This command schedules the files to be removed at the next commit.
2401 2402 This only removes files from the current branch, not from the
2402 2403 entire project history. If the files still exist in the working
2403 2404 directory, they will be deleted from it. If invoked with --after,
2404 2405 files that have been manually deleted are marked as removed.
2405 2406
2406 2407 Modified files and added files are not removed by default. To
2407 2408 remove them, use the -f/--force option.
2408 2409 """
2409 2410 names = []
2410 2411 if not opts['after'] and not pats:
2411 2412 raise util.Abort(_('no files specified'))
2412 2413 files, matchfn, anypats = matchpats(repo, pats, opts)
2413 2414 exact = dict.fromkeys(files)
2414 2415 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2415 2416 modified, added, removed, deleted, unknown = mardu
2416 2417 remove, forget = [], []
2417 2418 for src, abs, rel, exact in walk(repo, pats, opts):
2418 2419 reason = None
2419 2420 if abs not in deleted and opts['after']:
2420 2421 reason = _('is still present')
2421 2422 elif abs in modified and not opts['force']:
2422 2423 reason = _('is modified (use -f to force removal)')
2423 2424 elif abs in added:
2424 2425 if opts['force']:
2425 2426 forget.append(abs)
2426 2427 continue
2427 2428 reason = _('has been marked for add (use -f to force removal)')
2428 2429 elif abs in unknown:
2429 2430 reason = _('is not managed')
2430 2431 elif abs in removed:
2431 2432 continue
2432 2433 if reason:
2433 2434 if exact:
2434 2435 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2435 2436 else:
2436 2437 if ui.verbose or not exact:
2437 2438 ui.status(_('removing %s\n') % rel)
2438 2439 remove.append(abs)
2439 2440 repo.forget(forget)
2440 2441 repo.remove(remove, unlink=not opts['after'])
2441 2442
2442 2443 def rename(ui, repo, *pats, **opts):
2443 2444 """rename files; equivalent of copy + remove
2444 2445
2445 2446 Mark dest as copies of sources; mark sources for deletion. If
2446 2447 dest is a directory, copies are put in that directory. If dest is
2447 2448 a file, there can only be one source.
2448 2449
2449 2450 By default, this command copies the contents of files as they
2450 2451 stand in the working directory. If invoked with --after, the
2451 2452 operation is recorded, but no copying is performed.
2452 2453
2453 2454 This command takes effect in the next commit.
2454 2455
2455 2456 NOTE: This command should be treated as experimental. While it
2456 2457 should properly record rename files, this information is not yet
2457 2458 fully used by merge, nor fully reported by log.
2458 2459 """
2459 2460 wlock = repo.wlock(0)
2460 2461 errs, copied = docopy(ui, repo, pats, opts, wlock)
2461 2462 names = []
2462 2463 for abs, rel, exact in copied:
2463 2464 if ui.verbose or not exact:
2464 2465 ui.status(_('removing %s\n') % rel)
2465 2466 names.append(abs)
2466 2467 if not opts.get('dry_run'):
2467 2468 repo.remove(names, True, wlock)
2468 2469 return errs
2469 2470
2470 2471 def revert(ui, repo, *pats, **opts):
2471 2472 """revert files or dirs to their states as of some revision
2472 2473
2473 2474 With no revision specified, revert the named files or directories
2474 2475 to the contents they had in the parent of the working directory.
2475 2476 This restores the contents of the affected files to an unmodified
2476 2477 state. If the working directory has two parents, you must
2477 2478 explicitly specify the revision to revert to.
2478 2479
2479 2480 Modified files are saved with a .orig suffix before reverting.
2480 2481 To disable these backups, use --no-backup.
2481 2482
2482 2483 Using the -r option, revert the given files or directories to
2483 2484 their contents as of a specific revision. This can be helpful to"roll
2484 2485 back" some or all of a change that should not have been committed.
2485 2486
2486 2487 Revert modifies the working directory. It does not commit any
2487 2488 changes, or change the parent of the working directory. If you
2488 2489 revert to a revision other than the parent of the working
2489 2490 directory, the reverted files will thus appear modified
2490 2491 afterwards.
2491 2492
2492 2493 If a file has been deleted, it is recreated. If the executable
2493 2494 mode of a file was changed, it is reset.
2494 2495
2495 2496 If names are given, all files matching the names are reverted.
2496 2497
2497 2498 If no arguments are given, all files in the repository are reverted.
2498 2499 """
2499 2500 parent, p2 = repo.dirstate.parents()
2500 2501 if opts['rev']:
2501 2502 node = repo.lookup(opts['rev'])
2502 2503 elif p2 != nullid:
2503 2504 raise util.Abort(_('working dir has two parents; '
2504 2505 'you must specify the revision to revert to'))
2505 2506 else:
2506 2507 node = parent
2507 2508 mf = repo.manifest.read(repo.changelog.read(node)[0])
2508 2509 if node == parent:
2509 2510 pmf = mf
2510 2511 else:
2511 2512 pmf = None
2512 2513
2513 2514 wlock = repo.wlock()
2514 2515
2515 2516 # need all matching names in dirstate and manifest of target rev,
2516 2517 # so have to walk both. do not print errors if files exist in one
2517 2518 # but not other.
2518 2519
2519 2520 names = {}
2520 2521 target_only = {}
2521 2522
2522 2523 # walk dirstate.
2523 2524
2524 2525 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2525 2526 names[abs] = (rel, exact)
2526 2527 if src == 'b':
2527 2528 target_only[abs] = True
2528 2529
2529 2530 # walk target manifest.
2530 2531
2531 2532 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2532 2533 badmatch=names.has_key):
2533 2534 if abs in names: continue
2534 2535 names[abs] = (rel, exact)
2535 2536 target_only[abs] = True
2536 2537
2537 2538 changes = repo.changes(match=names.has_key, wlock=wlock)
2538 2539 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2539 2540
2540 2541 revert = ([], _('reverting %s\n'))
2541 2542 add = ([], _('adding %s\n'))
2542 2543 remove = ([], _('removing %s\n'))
2543 2544 forget = ([], _('forgetting %s\n'))
2544 2545 undelete = ([], _('undeleting %s\n'))
2545 2546 update = {}
2546 2547
2547 2548 disptable = (
2548 2549 # dispatch table:
2549 2550 # file state
2550 2551 # action if in target manifest
2551 2552 # action if not in target manifest
2552 2553 # make backup if in target manifest
2553 2554 # make backup if not in target manifest
2554 2555 (modified, revert, remove, True, True),
2555 2556 (added, revert, forget, True, False),
2556 2557 (removed, undelete, None, False, False),
2557 2558 (deleted, revert, remove, False, False),
2558 2559 (unknown, add, None, True, False),
2559 2560 (target_only, add, None, False, False),
2560 2561 )
2561 2562
2562 2563 entries = names.items()
2563 2564 entries.sort()
2564 2565
2565 2566 for abs, (rel, exact) in entries:
2566 2567 mfentry = mf.get(abs)
2567 2568 def handle(xlist, dobackup):
2568 2569 xlist[0].append(abs)
2569 2570 update[abs] = 1
2570 2571 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2571 2572 bakname = "%s.orig" % rel
2572 2573 ui.note(_('saving current version of %s as %s\n') %
2573 2574 (rel, bakname))
2574 2575 if not opts.get('dry_run'):
2575 2576 shutil.copyfile(rel, bakname)
2576 2577 shutil.copymode(rel, bakname)
2577 2578 if ui.verbose or not exact:
2578 2579 ui.status(xlist[1] % rel)
2579 2580 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2580 2581 if abs not in table: continue
2581 2582 # file has changed in dirstate
2582 2583 if mfentry:
2583 2584 handle(hitlist, backuphit)
2584 2585 elif misslist is not None:
2585 2586 handle(misslist, backupmiss)
2586 2587 else:
2587 2588 if exact: ui.warn(_('file not managed: %s\n' % rel))
2588 2589 break
2589 2590 else:
2590 2591 # file has not changed in dirstate
2591 2592 if node == parent:
2592 2593 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2593 2594 continue
2594 2595 if pmf is None:
2595 2596 # only need parent manifest in this unlikely case,
2596 2597 # so do not read by default
2597 2598 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2598 2599 if abs in pmf:
2599 2600 if mfentry:
2600 2601 # if version of file is same in parent and target
2601 2602 # manifests, do nothing
2602 2603 if pmf[abs] != mfentry:
2603 2604 handle(revert, False)
2604 2605 else:
2605 2606 handle(remove, False)
2606 2607
2607 2608 if not opts.get('dry_run'):
2608 2609 repo.dirstate.forget(forget[0])
2609 2610 r = hg.revert(repo, node, update.has_key, wlock)
2610 2611 repo.dirstate.update(add[0], 'a')
2611 2612 repo.dirstate.update(undelete[0], 'n')
2612 2613 repo.dirstate.update(remove[0], 'r')
2613 2614 return r
2614 2615
2615 2616 def rollback(ui, repo):
2616 2617 """roll back the last transaction in this repository
2617 2618
2618 2619 Roll back the last transaction in this repository, restoring the
2619 2620 project to its state prior to the transaction.
2620 2621
2621 2622 Transactions are used to encapsulate the effects of all commands
2622 2623 that create new changesets or propagate existing changesets into a
2623 2624 repository. For example, the following commands are transactional,
2624 2625 and their effects can be rolled back:
2625 2626
2626 2627 commit
2627 2628 import
2628 2629 pull
2629 2630 push (with this repository as destination)
2630 2631 unbundle
2631 2632
2632 2633 This command should be used with care. There is only one level of
2633 2634 rollback, and there is no way to undo a rollback.
2634 2635
2635 2636 This command is not intended for use on public repositories. Once
2636 2637 changes are visible for pull by other users, rolling a transaction
2637 2638 back locally is ineffective (someone else may already have pulled
2638 2639 the changes). Furthermore, a race is possible with readers of the
2639 2640 repository; for example an in-progress pull from the repository
2640 2641 may fail if a rollback is performed.
2641 2642 """
2642 2643 repo.rollback()
2643 2644
2644 2645 def root(ui, repo):
2645 2646 """print the root (top) of the current working dir
2646 2647
2647 2648 Print the root directory of the current repository.
2648 2649 """
2649 2650 ui.write(repo.root + "\n")
2650 2651
2651 2652 def serve(ui, repo, **opts):
2652 2653 """export the repository via HTTP
2653 2654
2654 2655 Start a local HTTP repository browser and pull server.
2655 2656
2656 2657 By default, the server logs accesses to stdout and errors to
2657 2658 stderr. Use the "-A" and "-E" options to log to files.
2658 2659 """
2659 2660
2660 2661 if opts["stdio"]:
2661 2662 if repo is None:
2662 2663 raise hg.RepoError(_('no repo found'))
2663 2664 s = sshserver.sshserver(ui, repo)
2664 2665 s.serve_forever()
2665 2666
2666 2667 optlist = ("name templates style address port ipv6"
2667 2668 " accesslog errorlog webdir_conf")
2668 2669 for o in optlist.split():
2669 2670 if opts[o]:
2670 2671 ui.setconfig("web", o, opts[o])
2671 2672
2672 2673 if repo is None and not ui.config("web", "webdir_conf"):
2673 2674 raise hg.RepoError(_('no repo found'))
2674 2675
2675 2676 if opts['daemon'] and not opts['daemon_pipefds']:
2676 2677 rfd, wfd = os.pipe()
2677 2678 args = sys.argv[:]
2678 2679 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2679 2680 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2680 2681 args[0], args)
2681 2682 os.close(wfd)
2682 2683 os.read(rfd, 1)
2683 2684 os._exit(0)
2684 2685
2685 2686 try:
2686 2687 httpd = hgweb.server.create_server(ui, repo)
2687 2688 except socket.error, inst:
2688 2689 raise util.Abort(_('cannot start server: ') + inst.args[1])
2689 2690
2690 2691 if ui.verbose:
2691 2692 addr, port = httpd.socket.getsockname()
2692 2693 if addr == '0.0.0.0':
2693 2694 addr = socket.gethostname()
2694 2695 else:
2695 2696 try:
2696 2697 addr = socket.gethostbyaddr(addr)[0]
2697 2698 except socket.error:
2698 2699 pass
2699 2700 if port != 80:
2700 2701 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2701 2702 else:
2702 2703 ui.status(_('listening at http://%s/\n') % addr)
2703 2704
2704 2705 if opts['pid_file']:
2705 2706 fp = open(opts['pid_file'], 'w')
2706 2707 fp.write(str(os.getpid()) + '\n')
2707 2708 fp.close()
2708 2709
2709 2710 if opts['daemon_pipefds']:
2710 2711 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2711 2712 os.close(rfd)
2712 2713 os.write(wfd, 'y')
2713 2714 os.close(wfd)
2714 2715 sys.stdout.flush()
2715 2716 sys.stderr.flush()
2716 2717 fd = os.open(util.nulldev, os.O_RDWR)
2717 2718 if fd != 0: os.dup2(fd, 0)
2718 2719 if fd != 1: os.dup2(fd, 1)
2719 2720 if fd != 2: os.dup2(fd, 2)
2720 2721 if fd not in (0, 1, 2): os.close(fd)
2721 2722
2722 2723 httpd.serve_forever()
2723 2724
2724 2725 def status(ui, repo, *pats, **opts):
2725 2726 """show changed files in the working directory
2726 2727
2727 2728 Show status of files in the repository. If names are given, only
2728 2729 files that match are shown. Files that are clean or ignored, are
2729 2730 not listed unless -c (clean), -i (ignored) or -A is given.
2730 2731
2731 2732 The codes used to show the status of files are:
2732 2733 M = modified
2733 2734 A = added
2734 2735 R = removed
2735 2736 C = clean
2736 2737 ! = deleted, but still tracked
2737 2738 ? = not tracked
2738 2739 I = ignored (not shown by default)
2739 2740 = the previous added file was copied from here
2740 2741 """
2741 2742
2742 2743 all = opts['all']
2743 2744
2744 2745 files, matchfn, anypats = matchpats(repo, pats, opts)
2745 2746 cwd = (pats and repo.getcwd()) or ''
2746 2747 modified, added, removed, deleted, unknown, ignored, clean = [
2747 2748 [util.pathto(cwd, x) for x in n]
2748 2749 for n in repo.status(files=files, match=matchfn,
2749 2750 list_ignored=all or opts['ignored'],
2750 2751 list_clean=all or opts['clean'])]
2751 2752
2752 2753 changetypes = (('modified', 'M', modified),
2753 2754 ('added', 'A', added),
2754 2755 ('removed', 'R', removed),
2755 2756 ('deleted', '!', deleted),
2756 2757 ('unknown', '?', unknown),
2757 2758 ('ignored', 'I', ignored))
2758 2759
2759 2760 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2760 2761
2761 2762 end = opts['print0'] and '\0' or '\n'
2762 2763
2763 2764 for opt, char, changes in ([ct for ct in explicit_changetypes
2764 2765 if all or opts[ct[0]]]
2765 2766 or changetypes):
2766 2767 if opts['no_status']:
2767 2768 format = "%%s%s" % end
2768 2769 else:
2769 2770 format = "%s %%s%s" % (char, end)
2770 2771
2771 2772 for f in changes:
2772 2773 ui.write(format % f)
2773 2774 if ((all or opts.get('copies')) and not opts.get('no_status')
2774 2775 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2775 2776 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2776 2777
2777 2778 def tag(ui, repo, name, rev_=None, **opts):
2778 2779 """add a tag for the current tip or a given revision
2779 2780
2780 2781 Name a particular revision using <name>.
2781 2782
2782 2783 Tags are used to name particular revisions of the repository and are
2783 2784 very useful to compare different revision, to go back to significant
2784 2785 earlier versions or to mark branch points as releases, etc.
2785 2786
2786 2787 If no revision is given, the parent of the working directory is used.
2787 2788
2788 2789 To facilitate version control, distribution, and merging of tags,
2789 2790 they are stored as a file named ".hgtags" which is managed
2790 2791 similarly to other project files and can be hand-edited if
2791 2792 necessary. The file '.hg/localtags' is used for local tags (not
2792 2793 shared among repositories).
2793 2794 """
2794 2795 if name in ['tip', '.']:
2795 2796 raise util.Abort(_("the name '%s' is reserved") % name)
2796 2797 if rev_ is not None:
2797 2798 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2798 2799 "please use 'hg tag [-r REV] NAME' instead\n"))
2799 2800 if opts['rev']:
2800 2801 raise util.Abort(_("use only one form to specify the revision"))
2801 2802 if opts['rev']:
2802 2803 rev_ = opts['rev']
2803 2804 if rev_:
2804 2805 r = hex(repo.lookup(rev_))
2805 2806 else:
2806 2807 p1, p2 = repo.dirstate.parents()
2807 2808 if p1 == nullid:
2808 2809 raise util.Abort(_('no revision to tag'))
2809 2810 if p2 != nullid:
2810 2811 raise util.Abort(_('outstanding uncommitted merges'))
2811 2812 r = hex(p1)
2812 2813
2813 2814 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2814 2815 opts['date'])
2815 2816
2816 2817 def tags(ui, repo):
2817 2818 """list repository tags
2818 2819
2819 2820 List the repository tags.
2820 2821
2821 2822 This lists both regular and local tags.
2822 2823 """
2823 2824
2824 2825 l = repo.tagslist()
2825 2826 l.reverse()
2826 2827 for t, n in l:
2827 2828 try:
2828 2829 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2829 2830 except KeyError:
2830 2831 r = " ?:?"
2831 2832 if ui.quiet:
2832 2833 ui.write("%s\n" % t)
2833 2834 else:
2834 2835 ui.write("%-30s %s\n" % (t, r))
2835 2836
2836 2837 def tip(ui, repo, **opts):
2837 2838 """show the tip revision
2838 2839
2839 2840 Show the tip revision.
2840 2841 """
2841 2842 n = repo.changelog.tip()
2842 2843 br = None
2843 2844 if opts['branches']:
2844 2845 br = repo.branchlookup([n])
2845 2846 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2846 2847 if opts['patch']:
2847 2848 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2848 2849
2849 2850 def unbundle(ui, repo, fname, **opts):
2850 2851 """apply a changegroup file
2851 2852
2852 2853 Apply a compressed changegroup file generated by the bundle
2853 2854 command.
2854 2855 """
2855 2856 f = urllib.urlopen(fname)
2856 2857
2857 2858 header = f.read(6)
2858 2859 if not header.startswith("HG"):
2859 2860 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2860 2861 elif not header.startswith("HG10"):
2861 2862 raise util.Abort(_("%s: unknown bundle version") % fname)
2862 2863 elif header == "HG10BZ":
2863 2864 def generator(f):
2864 2865 zd = bz2.BZ2Decompressor()
2865 2866 zd.decompress("BZ")
2866 2867 for chunk in f:
2867 2868 yield zd.decompress(chunk)
2868 2869 elif header == "HG10UN":
2869 2870 def generator(f):
2870 2871 for chunk in f:
2871 2872 yield chunk
2872 2873 else:
2873 2874 raise util.Abort(_("%s: unknown bundle compression type")
2874 2875 % fname)
2875 2876 gen = generator(util.filechunkiter(f, 4096))
2876 2877 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2877 2878 'bundle:' + fname)
2878 2879 return postincoming(ui, repo, modheads, opts['update'])
2879 2880
2880 2881 def undo(ui, repo):
2881 2882 """undo the last commit or pull (DEPRECATED)
2882 2883
2883 2884 (DEPRECATED)
2884 2885 This command is now deprecated and will be removed in a future
2885 2886 release. Please use the rollback command instead. For usage
2886 2887 instructions, see the rollback command.
2887 2888 """
2888 2889 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2889 2890 repo.rollback()
2890 2891
2891 2892 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2892 2893 branch=None):
2893 2894 """update or merge working directory
2894 2895
2895 2896 Update the working directory to the specified revision.
2896 2897
2897 2898 If there are no outstanding changes in the working directory and
2898 2899 there is a linear relationship between the current version and the
2899 2900 requested version, the result is the requested version.
2900 2901
2901 2902 To merge the working directory with another revision, use the
2902 2903 merge command.
2903 2904
2904 2905 By default, update will refuse to run if doing so would require
2905 2906 merging or discarding local changes.
2906 2907 """
2907 2908 node = _lookup(repo, node, branch)
2908 2909 if merge:
2909 2910 ui.warn(_('(the -m/--merge option is deprecated; '
2910 2911 'use the merge command instead)\n'))
2911 2912 return hg.merge(repo, node, force=force)
2912 2913 elif clean:
2913 2914 return hg.clean(repo, node)
2914 2915 else:
2915 2916 return hg.update(repo, node)
2916 2917
2917 2918 def _lookup(repo, node, branch=None):
2918 2919 if branch:
2919 2920 br = repo.branchlookup(branch=branch)
2920 2921 found = []
2921 2922 for x in br:
2922 2923 if branch in br[x]:
2923 2924 found.append(x)
2924 2925 if len(found) > 1:
2925 2926 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2926 2927 for x in found:
2927 2928 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2928 2929 raise util.Abort("")
2929 2930 if len(found) == 1:
2930 2931 node = found[0]
2931 2932 repo.ui.warn(_("Using head %s for branch %s\n")
2932 2933 % (short(node), branch))
2933 2934 else:
2934 2935 raise util.Abort(_("branch %s not found\n") % (branch))
2935 2936 else:
2936 2937 node = node and repo.lookup(node) or repo.changelog.tip()
2937 2938 return node
2938 2939
2939 2940 def verify(ui, repo):
2940 2941 """verify the integrity of the repository
2941 2942
2942 2943 Verify the integrity of the current repository.
2943 2944
2944 2945 This will perform an extensive check of the repository's
2945 2946 integrity, validating the hashes and checksums of each entry in
2946 2947 the changelog, manifest, and tracked files, as well as the
2947 2948 integrity of their crosslinks and indices.
2948 2949 """
2949 2950 return hg.verify(repo)
2950 2951
2951 2952 # Command options and aliases are listed here, alphabetically
2952 2953
2953 2954 table = {
2954 2955 "^add":
2955 2956 (add,
2956 2957 [('I', 'include', [], _('include names matching the given patterns')),
2957 2958 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2958 2959 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2959 2960 _('hg add [OPTION]... [FILE]...')),
2960 2961 "debugaddremove|addremove":
2961 2962 (addremove,
2962 2963 [('I', 'include', [], _('include names matching the given patterns')),
2963 2964 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2964 2965 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2965 2966 _('hg addremove [OPTION]... [FILE]...')),
2966 2967 "^annotate":
2967 2968 (annotate,
2968 2969 [('r', 'rev', '', _('annotate the specified revision')),
2969 2970 ('a', 'text', None, _('treat all files as text')),
2970 2971 ('u', 'user', None, _('list the author')),
2971 2972 ('d', 'date', None, _('list the date')),
2972 2973 ('n', 'number', None, _('list the revision number (default)')),
2973 2974 ('c', 'changeset', None, _('list the changeset')),
2974 2975 ('I', 'include', [], _('include names matching the given patterns')),
2975 2976 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2976 2977 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2977 2978 "archive":
2978 2979 (archive,
2979 2980 [('', 'no-decode', None, _('do not pass files through decoders')),
2980 2981 ('p', 'prefix', '', _('directory prefix for files in archive')),
2981 2982 ('r', 'rev', '', _('revision to distribute')),
2982 2983 ('t', 'type', '', _('type of distribution to create')),
2983 2984 ('I', 'include', [], _('include names matching the given patterns')),
2984 2985 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 2986 _('hg archive [OPTION]... DEST')),
2986 2987 "backout":
2987 2988 (backout,
2988 2989 [('', 'merge', None,
2989 2990 _('merge with old dirstate parent after backout')),
2990 2991 ('m', 'message', '', _('use <text> as commit message')),
2991 2992 ('l', 'logfile', '', _('read commit message from <file>')),
2992 2993 ('d', 'date', '', _('record datecode as commit date')),
2993 2994 ('', 'parent', '', _('parent to choose when backing out merge')),
2994 2995 ('u', 'user', '', _('record user as committer')),
2995 2996 ('I', 'include', [], _('include names matching the given patterns')),
2996 2997 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2997 2998 _('hg backout [OPTION]... REV')),
2998 2999 "bundle":
2999 3000 (bundle,
3000 3001 [('f', 'force', None,
3001 3002 _('run even when remote repository is unrelated'))],
3002 3003 _('hg bundle FILE DEST')),
3003 3004 "cat":
3004 3005 (cat,
3005 3006 [('o', 'output', '', _('print output to file with formatted name')),
3006 3007 ('r', 'rev', '', _('print the given revision')),
3007 3008 ('I', 'include', [], _('include names matching the given patterns')),
3008 3009 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3009 3010 _('hg cat [OPTION]... FILE...')),
3010 3011 "^clone":
3011 3012 (clone,
3012 3013 [('U', 'noupdate', None, _('do not update the new working directory')),
3013 3014 ('r', 'rev', [],
3014 3015 _('a changeset you would like to have after cloning')),
3015 3016 ('', 'pull', None, _('use pull protocol to copy metadata')),
3016 3017 ('', 'uncompressed', None,
3017 3018 _('use uncompressed transfer (fast over LAN)')),
3018 3019 ('e', 'ssh', '', _('specify ssh command to use')),
3019 3020 ('', 'remotecmd', '',
3020 3021 _('specify hg command to run on the remote side'))],
3021 3022 _('hg clone [OPTION]... SOURCE [DEST]')),
3022 3023 "^commit|ci":
3023 3024 (commit,
3024 3025 [('A', 'addremove', None,
3025 3026 _('mark new/missing files as added/removed before committing')),
3026 3027 ('m', 'message', '', _('use <text> as commit message')),
3027 3028 ('l', 'logfile', '', _('read the commit message from <file>')),
3028 3029 ('d', 'date', '', _('record datecode as commit date')),
3029 3030 ('u', 'user', '', _('record user as commiter')),
3030 3031 ('I', 'include', [], _('include names matching the given patterns')),
3031 3032 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3032 3033 _('hg commit [OPTION]... [FILE]...')),
3033 3034 "copy|cp":
3034 3035 (copy,
3035 3036 [('A', 'after', None, _('record a copy that has already occurred')),
3036 3037 ('f', 'force', None,
3037 3038 _('forcibly copy over an existing managed file')),
3038 3039 ('I', 'include', [], _('include names matching the given patterns')),
3039 3040 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3040 3041 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3041 3042 _('hg copy [OPTION]... [SOURCE]... DEST')),
3042 3043 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3043 3044 "debugcomplete":
3044 3045 (debugcomplete,
3045 3046 [('o', 'options', None, _('show the command options'))],
3046 3047 _('debugcomplete [-o] CMD')),
3047 3048 "debugrebuildstate":
3048 3049 (debugrebuildstate,
3049 3050 [('r', 'rev', '', _('revision to rebuild to'))],
3050 3051 _('debugrebuildstate [-r REV] [REV]')),
3051 3052 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3052 3053 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3053 3054 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3054 3055 "debugstate": (debugstate, [], _('debugstate')),
3055 3056 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3056 3057 "debugindex": (debugindex, [], _('debugindex FILE')),
3057 3058 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3058 3059 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3059 3060 "debugwalk":
3060 3061 (debugwalk,
3061 3062 [('I', 'include', [], _('include names matching the given patterns')),
3062 3063 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 3064 _('debugwalk [OPTION]... [FILE]...')),
3064 3065 "^diff":
3065 3066 (diff,
3066 3067 [('r', 'rev', [], _('revision')),
3067 3068 ('a', 'text', None, _('treat all files as text')),
3068 3069 ('p', 'show-function', None,
3069 3070 _('show which function each change is in')),
3070 3071 ('w', 'ignore-all-space', None,
3071 3072 _('ignore white space when comparing lines')),
3072 3073 ('b', 'ignore-space-change', None,
3073 3074 _('ignore changes in the amount of white space')),
3074 3075 ('B', 'ignore-blank-lines', None,
3075 3076 _('ignore changes whose lines are all blank')),
3076 3077 ('I', 'include', [], _('include names matching the given patterns')),
3077 3078 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3078 3079 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3079 3080 "^export":
3080 3081 (export,
3081 3082 [('o', 'output', '', _('print output to file with formatted name')),
3082 3083 ('a', 'text', None, _('treat all files as text')),
3083 3084 ('', 'switch-parent', None, _('diff against the second parent'))],
3084 3085 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3085 3086 "debugforget|forget":
3086 3087 (forget,
3087 3088 [('I', 'include', [], _('include names matching the given patterns')),
3088 3089 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3089 3090 _('hg forget [OPTION]... FILE...')),
3090 3091 "grep":
3091 3092 (grep,
3092 3093 [('0', 'print0', None, _('end fields with NUL')),
3093 3094 ('', 'all', None, _('print all revisions that match')),
3094 3095 ('i', 'ignore-case', None, _('ignore case when matching')),
3095 3096 ('l', 'files-with-matches', None,
3096 3097 _('print only filenames and revs that match')),
3097 3098 ('n', 'line-number', None, _('print matching line numbers')),
3098 3099 ('r', 'rev', [], _('search in given revision range')),
3099 3100 ('u', 'user', None, _('print user who committed change')),
3100 3101 ('I', 'include', [], _('include names matching the given patterns')),
3101 3102 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3102 3103 _('hg grep [OPTION]... PATTERN [FILE]...')),
3103 3104 "heads":
3104 3105 (heads,
3105 3106 [('b', 'branches', None, _('show branches')),
3106 3107 ('', 'style', '', _('display using template map file')),
3107 3108 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3108 3109 ('', 'template', '', _('display with template'))],
3109 3110 _('hg heads [-b] [-r <rev>]')),
3110 3111 "help": (help_, [], _('hg help [COMMAND]')),
3111 3112 "identify|id": (identify, [], _('hg identify')),
3112 3113 "import|patch":
3113 3114 (import_,
3114 3115 [('p', 'strip', 1,
3115 3116 _('directory strip option for patch. This has the same\n'
3116 3117 'meaning as the corresponding patch option')),
3117 3118 ('m', 'message', '', _('use <text> as commit message')),
3118 3119 ('b', 'base', '', _('base path')),
3119 3120 ('f', 'force', None,
3120 3121 _('skip check for outstanding uncommitted changes'))],
3121 3122 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3122 3123 "incoming|in": (incoming,
3123 3124 [('M', 'no-merges', None, _('do not show merges')),
3124 3125 ('f', 'force', None,
3125 3126 _('run even when remote repository is unrelated')),
3126 3127 ('', 'style', '', _('display using template map file')),
3127 3128 ('n', 'newest-first', None, _('show newest record first')),
3128 3129 ('', 'bundle', '', _('file to store the bundles into')),
3129 3130 ('p', 'patch', None, _('show patch')),
3130 3131 ('r', 'rev', [], _('a specific revision you would like to pull')),
3131 3132 ('', 'template', '', _('display with template')),
3132 3133 ('e', 'ssh', '', _('specify ssh command to use')),
3133 3134 ('', 'remotecmd', '',
3134 3135 _('specify hg command to run on the remote side'))],
3135 3136 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3136 3137 ' [--bundle FILENAME] [SOURCE]')),
3137 3138 "^init":
3138 3139 (init,
3139 3140 [('e', 'ssh', '', _('specify ssh command to use')),
3140 3141 ('', 'remotecmd', '',
3141 3142 _('specify hg command to run on the remote side'))],
3142 3143 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3143 3144 "locate":
3144 3145 (locate,
3145 3146 [('r', 'rev', '', _('search the repository as it stood at rev')),
3146 3147 ('0', 'print0', None,
3147 3148 _('end filenames with NUL, for use with xargs')),
3148 3149 ('f', 'fullpath', None,
3149 3150 _('print complete paths from the filesystem root')),
3150 3151 ('I', 'include', [], _('include names matching the given patterns')),
3151 3152 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3152 3153 _('hg locate [OPTION]... [PATTERN]...')),
3153 3154 "^log|history":
3154 3155 (log,
3155 3156 [('b', 'branches', None, _('show branches')),
3156 3157 ('f', 'follow', None,
3157 3158 _('follow changeset history, or file history across copies and renames')),
3158 3159 ('', 'follow-first', None,
3159 3160 _('only follow the first parent of merge changesets')),
3160 3161 ('k', 'keyword', [], _('search for a keyword')),
3161 3162 ('l', 'limit', '', _('limit number of changes displayed')),
3162 3163 ('r', 'rev', [], _('show the specified revision or range')),
3163 3164 ('M', 'no-merges', None, _('do not show merges')),
3164 3165 ('', 'style', '', _('display using template map file')),
3165 3166 ('m', 'only-merges', None, _('show only merges')),
3166 3167 ('p', 'patch', None, _('show patch')),
3167 3168 ('', 'template', '', _('display with template')),
3168 3169 ('I', 'include', [], _('include names matching the given patterns')),
3169 3170 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3170 3171 _('hg log [OPTION]... [FILE]')),
3171 3172 "manifest": (manifest, [], _('hg manifest [REV]')),
3172 3173 "merge":
3173 3174 (merge,
3174 3175 [('b', 'branch', '', _('merge with head of a specific branch')),
3175 3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3176 3177 _('hg merge [-b TAG] [-f] [REV]')),
3177 3178 "outgoing|out": (outgoing,
3178 3179 [('M', 'no-merges', None, _('do not show merges')),
3179 3180 ('f', 'force', None,
3180 3181 _('run even when remote repository is unrelated')),
3181 3182 ('p', 'patch', None, _('show patch')),
3182 3183 ('', 'style', '', _('display using template map file')),
3183 3184 ('r', 'rev', [], _('a specific revision you would like to push')),
3184 3185 ('n', 'newest-first', None, _('show newest record first')),
3185 3186 ('', 'template', '', _('display with template')),
3186 3187 ('e', 'ssh', '', _('specify ssh command to use')),
3187 3188 ('', 'remotecmd', '',
3188 3189 _('specify hg command to run on the remote side'))],
3189 3190 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3190 3191 "^parents":
3191 3192 (parents,
3192 3193 [('b', 'branches', None, _('show branches')),
3193 3194 ('r', 'rev', '', _('show parents from the specified rev')),
3194 3195 ('', 'style', '', _('display using template map file')),
3195 3196 ('', 'template', '', _('display with template'))],
3196 3197 _('hg parents [-b] [-r REV] [FILE]')),
3197 3198 "paths": (paths, [], _('hg paths [NAME]')),
3198 3199 "^pull":
3199 3200 (pull,
3200 3201 [('u', 'update', None,
3201 3202 _('update the working directory to tip after pull')),
3202 3203 ('e', 'ssh', '', _('specify ssh command to use')),
3203 3204 ('f', 'force', None,
3204 3205 _('run even when remote repository is unrelated')),
3205 3206 ('r', 'rev', [], _('a specific revision you would like to pull')),
3206 3207 ('', 'remotecmd', '',
3207 3208 _('specify hg command to run on the remote side'))],
3208 3209 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3209 3210 "^push":
3210 3211 (push,
3211 3212 [('f', 'force', None, _('force push')),
3212 3213 ('e', 'ssh', '', _('specify ssh command to use')),
3213 3214 ('r', 'rev', [], _('a specific revision you would like to push')),
3214 3215 ('', 'remotecmd', '',
3215 3216 _('specify hg command to run on the remote side'))],
3216 3217 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3217 3218 "debugrawcommit|rawcommit":
3218 3219 (rawcommit,
3219 3220 [('p', 'parent', [], _('parent')),
3220 3221 ('d', 'date', '', _('date code')),
3221 3222 ('u', 'user', '', _('user')),
3222 3223 ('F', 'files', '', _('file list')),
3223 3224 ('m', 'message', '', _('commit message')),
3224 3225 ('l', 'logfile', '', _('commit message file'))],
3225 3226 _('hg debugrawcommit [OPTION]... [FILE]...')),
3226 3227 "recover": (recover, [], _('hg recover')),
3227 3228 "^remove|rm":
3228 3229 (remove,
3229 3230 [('A', 'after', None, _('record remove that has already occurred')),
3230 3231 ('f', 'force', None, _('remove file even if modified')),
3231 3232 ('I', 'include', [], _('include names matching the given patterns')),
3232 3233 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3233 3234 _('hg remove [OPTION]... FILE...')),
3234 3235 "rename|mv":
3235 3236 (rename,
3236 3237 [('A', 'after', None, _('record a rename that has already occurred')),
3237 3238 ('f', 'force', None,
3238 3239 _('forcibly copy over an existing managed file')),
3239 3240 ('I', 'include', [], _('include names matching the given patterns')),
3240 3241 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3241 3242 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3242 3243 _('hg rename [OPTION]... SOURCE... DEST')),
3243 3244 "^revert":
3244 3245 (revert,
3245 3246 [('r', 'rev', '', _('revision to revert to')),
3246 3247 ('', 'no-backup', None, _('do not save backup copies of files')),
3247 3248 ('I', 'include', [], _('include names matching given patterns')),
3248 3249 ('X', 'exclude', [], _('exclude names matching given patterns')),
3249 3250 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3250 3251 _('hg revert [-r REV] [NAME]...')),
3251 3252 "rollback": (rollback, [], _('hg rollback')),
3252 3253 "root": (root, [], _('hg root')),
3253 3254 "^serve":
3254 3255 (serve,
3255 3256 [('A', 'accesslog', '', _('name of access log file to write to')),
3256 3257 ('d', 'daemon', None, _('run server in background')),
3257 3258 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3258 3259 ('E', 'errorlog', '', _('name of error log file to write to')),
3259 3260 ('p', 'port', 0, _('port to use (default: 8000)')),
3260 3261 ('a', 'address', '', _('address to use')),
3261 3262 ('n', 'name', '',
3262 3263 _('name to show in web pages (default: working dir)')),
3263 3264 ('', 'webdir-conf', '', _('name of the webdir config file'
3264 3265 ' (serve more than one repo)')),
3265 3266 ('', 'pid-file', '', _('name of file to write process ID to')),
3266 3267 ('', 'stdio', None, _('for remote clients')),
3267 3268 ('t', 'templates', '', _('web templates to use')),
3268 3269 ('', 'style', '', _('template style to use')),
3269 3270 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3270 3271 _('hg serve [OPTION]...')),
3271 3272 "^status|st":
3272 3273 (status,
3273 3274 [('A', 'all', None, _('show status of all files')),
3274 3275 ('m', 'modified', None, _('show only modified files')),
3275 3276 ('a', 'added', None, _('show only added files')),
3276 3277 ('r', 'removed', None, _('show only removed files')),
3277 3278 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3278 3279 ('c', 'clean', None, _('show only files without changes')),
3279 3280 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3280 3281 ('i', 'ignored', None, _('show ignored files')),
3281 3282 ('n', 'no-status', None, _('hide status prefix')),
3282 3283 ('C', 'copies', None, _('show source of copied files')),
3283 3284 ('0', 'print0', None,
3284 3285 _('end filenames with NUL, for use with xargs')),
3285 3286 ('I', 'include', [], _('include names matching the given patterns')),
3286 3287 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3287 3288 _('hg status [OPTION]... [FILE]...')),
3288 3289 "tag":
3289 3290 (tag,
3290 3291 [('l', 'local', None, _('make the tag local')),
3291 3292 ('m', 'message', '', _('message for tag commit log entry')),
3292 3293 ('d', 'date', '', _('record datecode as commit date')),
3293 3294 ('u', 'user', '', _('record user as commiter')),
3294 3295 ('r', 'rev', '', _('revision to tag'))],
3295 3296 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3296 3297 "tags": (tags, [], _('hg tags')),
3297 3298 "tip":
3298 3299 (tip,
3299 3300 [('b', 'branches', None, _('show branches')),
3300 3301 ('', 'style', '', _('display using template map file')),
3301 3302 ('p', 'patch', None, _('show patch')),
3302 3303 ('', 'template', '', _('display with template'))],
3303 3304 _('hg tip [-b] [-p]')),
3304 3305 "unbundle":
3305 3306 (unbundle,
3306 3307 [('u', 'update', None,
3307 3308 _('update the working directory to tip after unbundle'))],
3308 3309 _('hg unbundle [-u] FILE')),
3309 3310 "debugundo|undo": (undo, [], _('hg undo')),
3310 3311 "^update|up|checkout|co":
3311 3312 (update,
3312 3313 [('b', 'branch', '', _('checkout the head of a specific branch')),
3313 3314 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3314 3315 ('C', 'clean', None, _('overwrite locally modified files')),
3315 3316 ('f', 'force', None, _('force a merge with outstanding changes'))],
3316 3317 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3317 3318 "verify": (verify, [], _('hg verify')),
3318 3319 "version": (show_version, [], _('hg version')),
3319 3320 }
3320 3321
3321 3322 globalopts = [
3322 3323 ('R', 'repository', '',
3323 3324 _('repository root directory or symbolic path name')),
3324 3325 ('', 'cwd', '', _('change working directory')),
3325 3326 ('y', 'noninteractive', None,
3326 3327 _('do not prompt, assume \'yes\' for any required answers')),
3327 3328 ('q', 'quiet', None, _('suppress output')),
3328 3329 ('v', 'verbose', None, _('enable additional output')),
3329 3330 ('', 'config', [], _('set/override config option')),
3330 3331 ('', 'debug', None, _('enable debugging output')),
3331 3332 ('', 'debugger', None, _('start debugger')),
3332 3333 ('', 'lsprof', None, _('print improved command execution profile')),
3333 3334 ('', 'traceback', None, _('print traceback on exception')),
3334 3335 ('', 'time', None, _('time how long the command takes')),
3335 3336 ('', 'profile', None, _('print command execution profile')),
3336 3337 ('', 'version', None, _('output version information and exit')),
3337 3338 ('h', 'help', None, _('display help and exit')),
3338 3339 ]
3339 3340
3340 3341 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3341 3342 " debugindex debugindexdot")
3342 3343 optionalrepo = ("paths serve debugconfig")
3343 3344
3344 3345 def findpossible(cmd):
3345 3346 """
3346 3347 Return cmd -> (aliases, command table entry)
3347 3348 for each matching command.
3348 3349 Return debug commands (or their aliases) only if no normal command matches.
3349 3350 """
3350 3351 choice = {}
3351 3352 debugchoice = {}
3352 3353 for e in table.keys():
3353 3354 aliases = e.lstrip("^").split("|")
3354 3355 found = None
3355 3356 if cmd in aliases:
3356 3357 found = cmd
3357 3358 else:
3358 3359 for a in aliases:
3359 3360 if a.startswith(cmd):
3360 3361 found = a
3361 3362 break
3362 3363 if found is not None:
3363 3364 if aliases[0].startswith("debug"):
3364 3365 debugchoice[found] = (aliases, table[e])
3365 3366 else:
3366 3367 choice[found] = (aliases, table[e])
3367 3368
3368 3369 if not choice and debugchoice:
3369 3370 choice = debugchoice
3370 3371
3371 3372 return choice
3372 3373
3373 3374 def findcmd(cmd):
3374 3375 """Return (aliases, command table entry) for command string."""
3375 3376 choice = findpossible(cmd)
3376 3377
3377 3378 if choice.has_key(cmd):
3378 3379 return choice[cmd]
3379 3380
3380 3381 if len(choice) > 1:
3381 3382 clist = choice.keys()
3382 3383 clist.sort()
3383 3384 raise AmbiguousCommand(cmd, clist)
3384 3385
3385 3386 if choice:
3386 3387 return choice.values()[0]
3387 3388
3388 3389 raise UnknownCommand(cmd)
3389 3390
3390 3391 def catchterm(*args):
3391 3392 raise util.SignalInterrupt
3392 3393
3393 3394 def run():
3394 3395 sys.exit(dispatch(sys.argv[1:]))
3395 3396
3396 3397 class ParseError(Exception):
3397 3398 """Exception raised on errors in parsing the command line."""
3398 3399
3399 3400 def parse(ui, args):
3400 3401 options = {}
3401 3402 cmdoptions = {}
3402 3403
3403 3404 try:
3404 3405 args = fancyopts.fancyopts(args, globalopts, options)
3405 3406 except fancyopts.getopt.GetoptError, inst:
3406 3407 raise ParseError(None, inst)
3407 3408
3408 3409 if args:
3409 3410 cmd, args = args[0], args[1:]
3410 3411 aliases, i = findcmd(cmd)
3411 3412 cmd = aliases[0]
3412 3413 defaults = ui.config("defaults", cmd)
3413 3414 if defaults:
3414 3415 args = defaults.split() + args
3415 3416 c = list(i[1])
3416 3417 else:
3417 3418 cmd = None
3418 3419 c = []
3419 3420
3420 3421 # combine global options into local
3421 3422 for o in globalopts:
3422 3423 c.append((o[0], o[1], options[o[1]], o[3]))
3423 3424
3424 3425 try:
3425 3426 args = fancyopts.fancyopts(args, c, cmdoptions)
3426 3427 except fancyopts.getopt.GetoptError, inst:
3427 3428 raise ParseError(cmd, inst)
3428 3429
3429 3430 # separate global options back out
3430 3431 for o in globalopts:
3431 3432 n = o[1]
3432 3433 options[n] = cmdoptions[n]
3433 3434 del cmdoptions[n]
3434 3435
3435 3436 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3436 3437
3437 3438 external = {}
3438 3439
3439 3440 def findext(name):
3440 3441 '''return module with given extension name'''
3441 3442 try:
3442 3443 return sys.modules[external[name]]
3443 3444 except KeyError:
3444 3445 for k, v in external.iteritems():
3445 3446 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3446 3447 return sys.modules[v]
3447 3448 raise KeyError(name)
3448 3449
3449 3450 def dispatch(args):
3450 3451 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3451 3452 num = getattr(signal, name, None)
3452 3453 if num: signal.signal(num, catchterm)
3453 3454
3454 3455 try:
3455 3456 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3456 3457 except util.Abort, inst:
3457 3458 sys.stderr.write(_("abort: %s\n") % inst)
3458 3459 return -1
3459 3460
3460 3461 for ext_name, load_from_name in u.extensions():
3461 3462 try:
3462 3463 if load_from_name:
3463 3464 # the module will be loaded in sys.modules
3464 3465 # choose an unique name so that it doesn't
3465 3466 # conflicts with other modules
3466 3467 module_name = "hgext_%s" % ext_name.replace('.', '_')
3467 3468 mod = imp.load_source(module_name, load_from_name)
3468 3469 else:
3469 3470 def importh(name):
3470 3471 mod = __import__(name)
3471 3472 components = name.split('.')
3472 3473 for comp in components[1:]:
3473 3474 mod = getattr(mod, comp)
3474 3475 return mod
3475 3476 try:
3476 3477 mod = importh("hgext.%s" % ext_name)
3477 3478 except ImportError:
3478 3479 mod = importh(ext_name)
3479 3480 external[ext_name] = mod.__name__
3480 3481 except (util.SignalInterrupt, KeyboardInterrupt):
3481 3482 raise
3482 3483 except Exception, inst:
3483 3484 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3484 3485 if u.print_exc():
3485 3486 return 1
3486 3487
3487 3488 for name in external.itervalues():
3488 3489 mod = sys.modules[name]
3489 3490 uisetup = getattr(mod, 'uisetup', None)
3490 3491 if uisetup:
3491 3492 uisetup(u)
3492 3493 cmdtable = getattr(mod, 'cmdtable', {})
3493 3494 for t in cmdtable:
3494 3495 if t in table:
3495 3496 u.warn(_("module %s overrides %s\n") % (name, t))
3496 3497 table.update(cmdtable)
3497 3498
3498 3499 try:
3499 3500 cmd, func, args, options, cmdoptions = parse(u, args)
3500 3501 if options["time"]:
3501 3502 def get_times():
3502 3503 t = os.times()
3503 3504 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3504 3505 t = (t[0], t[1], t[2], t[3], time.clock())
3505 3506 return t
3506 3507 s = get_times()
3507 3508 def print_time():
3508 3509 t = get_times()
3509 3510 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3510 3511 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3511 3512 atexit.register(print_time)
3512 3513
3513 3514 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3514 3515 not options["noninteractive"], options["traceback"],
3515 3516 options["config"])
3516 3517
3517 3518 # enter the debugger before command execution
3518 3519 if options['debugger']:
3519 3520 pdb.set_trace()
3520 3521
3521 3522 try:
3522 3523 if options['cwd']:
3523 3524 try:
3524 3525 os.chdir(options['cwd'])
3525 3526 except OSError, inst:
3526 3527 raise util.Abort('%s: %s' %
3527 3528 (options['cwd'], inst.strerror))
3528 3529
3529 3530 path = u.expandpath(options["repository"]) or ""
3530 3531 repo = path and hg.repository(u, path=path) or None
3531 3532
3532 3533 if options['help']:
3533 3534 return help_(u, cmd, options['version'])
3534 3535 elif options['version']:
3535 3536 return show_version(u)
3536 3537 elif not cmd:
3537 3538 return help_(u, 'shortlist')
3538 3539
3539 3540 if cmd not in norepo.split():
3540 3541 try:
3541 3542 if not repo:
3542 3543 repo = hg.repository(u, path=path)
3543 3544 u = repo.ui
3544 3545 for name in external.itervalues():
3545 3546 mod = sys.modules[name]
3546 3547 if hasattr(mod, 'reposetup'):
3547 3548 mod.reposetup(u, repo)
3548 3549 except hg.RepoError:
3549 3550 if cmd not in optionalrepo.split():
3550 3551 raise
3551 3552 d = lambda: func(u, repo, *args, **cmdoptions)
3552 3553 else:
3553 3554 d = lambda: func(u, *args, **cmdoptions)
3554 3555
3555 3556 # reupdate the options, repo/.hg/hgrc may have changed them
3556 3557 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3557 3558 not options["noninteractive"], options["traceback"],
3558 3559 options["config"])
3559 3560
3560 3561 try:
3561 3562 if options['profile']:
3562 3563 import hotshot, hotshot.stats
3563 3564 prof = hotshot.Profile("hg.prof")
3564 3565 try:
3565 3566 try:
3566 3567 return prof.runcall(d)
3567 3568 except:
3568 3569 try:
3569 3570 u.warn(_('exception raised - generating '
3570 3571 'profile anyway\n'))
3571 3572 except:
3572 3573 pass
3573 3574 raise
3574 3575 finally:
3575 3576 prof.close()
3576 3577 stats = hotshot.stats.load("hg.prof")
3577 3578 stats.strip_dirs()
3578 3579 stats.sort_stats('time', 'calls')
3579 3580 stats.print_stats(40)
3580 3581 elif options['lsprof']:
3581 3582 try:
3582 3583 from mercurial import lsprof
3583 3584 except ImportError:
3584 3585 raise util.Abort(_(
3585 3586 'lsprof not available - install from '
3586 3587 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3587 3588 p = lsprof.Profiler()
3588 3589 p.enable(subcalls=True)
3589 3590 try:
3590 3591 return d()
3591 3592 finally:
3592 3593 p.disable()
3593 3594 stats = lsprof.Stats(p.getstats())
3594 3595 stats.sort()
3595 3596 stats.pprint(top=10, file=sys.stderr, climit=5)
3596 3597 else:
3597 3598 return d()
3598 3599 finally:
3599 3600 u.flush()
3600 3601 except:
3601 3602 # enter the debugger when we hit an exception
3602 3603 if options['debugger']:
3603 3604 pdb.post_mortem(sys.exc_info()[2])
3604 3605 u.print_exc()
3605 3606 raise
3606 3607 except ParseError, inst:
3607 3608 if inst.args[0]:
3608 3609 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3609 3610 help_(u, inst.args[0])
3610 3611 else:
3611 3612 u.warn(_("hg: %s\n") % inst.args[1])
3612 3613 help_(u, 'shortlist')
3613 3614 except AmbiguousCommand, inst:
3614 3615 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3615 3616 (inst.args[0], " ".join(inst.args[1])))
3616 3617 except UnknownCommand, inst:
3617 3618 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3618 3619 help_(u, 'shortlist')
3619 3620 except hg.RepoError, inst:
3620 3621 u.warn(_("abort: %s!\n") % inst)
3621 3622 except lock.LockHeld, inst:
3622 3623 if inst.errno == errno.ETIMEDOUT:
3623 3624 reason = _('timed out waiting for lock held by %s') % inst.locker
3624 3625 else:
3625 3626 reason = _('lock held by %s') % inst.locker
3626 3627 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3627 3628 except lock.LockUnavailable, inst:
3628 3629 u.warn(_("abort: could not lock %s: %s\n") %
3629 3630 (inst.desc or inst.filename, inst.strerror))
3630 3631 except revlog.RevlogError, inst:
3631 3632 u.warn(_("abort: "), inst, "!\n")
3632 3633 except util.SignalInterrupt:
3633 3634 u.warn(_("killed!\n"))
3634 3635 except KeyboardInterrupt:
3635 3636 try:
3636 3637 u.warn(_("interrupted!\n"))
3637 3638 except IOError, inst:
3638 3639 if inst.errno == errno.EPIPE:
3639 3640 if u.debugflag:
3640 3641 u.warn(_("\nbroken pipe\n"))
3641 3642 else:
3642 3643 raise
3643 3644 except IOError, inst:
3644 3645 if hasattr(inst, "code"):
3645 3646 u.warn(_("abort: %s\n") % inst)
3646 3647 elif hasattr(inst, "reason"):
3647 3648 u.warn(_("abort: error: %s\n") % inst.reason[1])
3648 3649 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3649 3650 if u.debugflag:
3650 3651 u.warn(_("broken pipe\n"))
3651 3652 elif getattr(inst, "strerror", None):
3652 3653 if getattr(inst, "filename", None):
3653 3654 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3654 3655 else:
3655 3656 u.warn(_("abort: %s\n") % inst.strerror)
3656 3657 else:
3657 3658 raise
3658 3659 except OSError, inst:
3659 3660 if hasattr(inst, "filename"):
3660 3661 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3661 3662 else:
3662 3663 u.warn(_("abort: %s\n") % inst.strerror)
3663 3664 except util.Abort, inst:
3664 3665 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3665 3666 except TypeError, inst:
3666 3667 # was this an argument error?
3667 3668 tb = traceback.extract_tb(sys.exc_info()[2])
3668 3669 if len(tb) > 2: # no
3669 3670 raise
3670 3671 u.debug(inst, "\n")
3671 3672 u.warn(_("%s: invalid arguments\n") % cmd)
3672 3673 help_(u, cmd)
3673 3674 except SystemExit, inst:
3674 3675 # Commands shouldn't sys.exit directly, but give a return code.
3675 3676 # Just in case catch this and and pass exit code to caller.
3676 3677 return inst.code
3677 3678 except:
3678 3679 u.warn(_("** unknown exception encountered, details follow\n"))
3679 3680 u.warn(_("** report bug details to "
3680 3681 "http://www.selenic.com/mercurial/bts\n"))
3681 3682 u.warn(_("** or mercurial@selenic.com\n"))
3682 3683 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3683 3684 % version.get_version())
3684 3685 raise
3685 3686
3686 3687 return -1
@@ -1,491 +1,491 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "struct os time bisect stat util re errno")
14 14
15 15 class dirstate(object):
16 16 format = ">cllll"
17 17
18 18 def __init__(self, opener, ui, root):
19 19 self.opener = opener
20 20 self.root = root
21 21 self.dirty = 0
22 22 self.ui = ui
23 23 self.map = None
24 24 self.pl = None
25 25 self.copies = {}
26 26 self.ignorefunc = None
27 27 self.blockignore = False
28 28
29 29 def wjoin(self, f):
30 30 return os.path.join(self.root, f)
31 31
32 32 def getcwd(self):
33 33 cwd = os.getcwd()
34 34 if cwd == self.root: return ''
35 35 return cwd[len(self.root) + 1:]
36 36
37 37 def hgignore(self):
38 38 '''return the contents of .hgignore files as a list of patterns.
39 39
40 40 the files parsed for patterns include:
41 41 .hgignore in the repository root
42 42 any additional files specified in the [ui] section of ~/.hgrc
43 43
44 44 trailing white space is dropped.
45 45 the escape character is backslash.
46 46 comments start with #.
47 47 empty lines are skipped.
48 48
49 49 lines can be of the following formats:
50 50
51 51 syntax: regexp # defaults following lines to non-rooted regexps
52 52 syntax: glob # defaults following lines to non-rooted globs
53 53 re:pattern # non-rooted regular expression
54 54 glob:pattern # non-rooted glob
55 55 pattern # pattern of the current default type'''
56 56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
57 57 def parselines(fp):
58 58 for line in fp:
59 59 escape = False
60 60 for i in xrange(len(line)):
61 61 if escape: escape = False
62 62 elif line[i] == '\\': escape = True
63 63 elif line[i] == '#': break
64 64 line = line[:i].rstrip()
65 65 if line: yield line
66 66 repoignore = self.wjoin('.hgignore')
67 67 files = [repoignore]
68 68 files.extend(self.ui.hgignorefiles())
69 69 pats = {}
70 70 for f in files:
71 71 try:
72 72 pats[f] = []
73 73 fp = open(f)
74 74 syntax = 'relre:'
75 75 for line in parselines(fp):
76 76 if line.startswith('syntax:'):
77 77 s = line[7:].strip()
78 78 try:
79 79 syntax = syntaxes[s]
80 80 except KeyError:
81 81 self.ui.warn(_("%s: ignoring invalid "
82 82 "syntax '%s'\n") % (f, s))
83 83 continue
84 84 pat = syntax + line
85 85 for s in syntaxes.values():
86 86 if line.startswith(s):
87 87 pat = line
88 88 break
89 89 pats[f].append(pat)
90 90 except IOError, inst:
91 91 if f != repoignore:
92 92 self.ui.warn(_("skipping unreadable ignore file"
93 93 " '%s': %s\n") % (f, inst.strerror))
94 94 return pats
95 95
96 96 def ignore(self, fn):
97 97 '''default match function used by dirstate and
98 98 localrepository. this honours the repository .hgignore file
99 99 and any other files specified in the [ui] section of .hgrc.'''
100 100 if self.blockignore:
101 101 return False
102 102 if not self.ignorefunc:
103 103 ignore = self.hgignore()
104 104 allpats = []
105 105 [allpats.extend(patlist) for patlist in ignore.values()]
106 106 if allpats:
107 107 try:
108 108 files, self.ignorefunc, anypats = (
109 109 util.matcher(self.root, inc=allpats, src='.hgignore'))
110 110 except util.Abort:
111 111 # Re-raise an exception where the src is the right file
112 112 for f, patlist in ignore.items():
113 113 files, self.ignorefunc, anypats = (
114 114 util.matcher(self.root, inc=patlist, src=f))
115 115 else:
116 116 self.ignorefunc = util.never
117 117 return self.ignorefunc(fn)
118 118
119 119 def __del__(self):
120 120 if self.dirty:
121 121 self.write()
122 122
123 123 def __getitem__(self, key):
124 124 try:
125 125 return self.map[key]
126 126 except TypeError:
127 127 self.lazyread()
128 128 return self[key]
129 129
130 130 def __contains__(self, key):
131 131 self.lazyread()
132 132 return key in self.map
133 133
134 134 def parents(self):
135 135 self.lazyread()
136 136 return self.pl
137 137
138 138 def markdirty(self):
139 139 if not self.dirty:
140 140 self.dirty = 1
141 141
142 142 def setparents(self, p1, p2=nullid):
143 143 self.lazyread()
144 144 self.markdirty()
145 145 self.pl = p1, p2
146 146
147 147 def state(self, key):
148 148 try:
149 149 return self[key][0]
150 150 except KeyError:
151 151 return "?"
152 152
153 153 def lazyread(self):
154 154 if self.map is None:
155 155 self.read()
156 156
157 157 def parse(self, st):
158 158 self.pl = [st[:20], st[20: 40]]
159 159
160 160 # deref fields so they will be local in loop
161 161 map = self.map
162 162 copies = self.copies
163 163 format = self.format
164 164 unpack = struct.unpack
165 165
166 166 pos = 40
167 167 e_size = struct.calcsize(format)
168 168
169 169 while pos < len(st):
170 170 newpos = pos + e_size
171 171 e = unpack(format, st[pos:newpos])
172 172 l = e[4]
173 173 pos = newpos
174 174 newpos = pos + l
175 175 f = st[pos:newpos]
176 176 if '\0' in f:
177 177 f, c = f.split('\0')
178 178 copies[f] = c
179 179 map[f] = e[:4]
180 180 pos = newpos
181 181
182 182 def read(self):
183 183 self.map = {}
184 184 self.pl = [nullid, nullid]
185 185 try:
186 186 st = self.opener("dirstate").read()
187 187 if st:
188 188 self.parse(st)
189 189 except IOError, err:
190 190 if err.errno != errno.ENOENT: raise
191 191
192 192 def copy(self, source, dest):
193 193 self.lazyread()
194 194 self.markdirty()
195 195 self.copies[dest] = source
196 196
197 197 def copied(self, file):
198 198 return self.copies.get(file, None)
199 199
200 200 def update(self, files, state, **kw):
201 201 ''' current states:
202 202 n normal
203 203 m needs merging
204 204 r marked for removal
205 205 a marked for addition'''
206 206
207 207 if not files: return
208 208 self.lazyread()
209 209 self.markdirty()
210 210 for f in files:
211 211 if state == "r":
212 212 self.map[f] = ('r', 0, 0, 0)
213 213 else:
214 214 s = os.lstat(self.wjoin(f))
215 215 st_size = kw.get('st_size', s.st_size)
216 216 st_mtime = kw.get('st_mtime', s.st_mtime)
217 217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
218 218 if self.copies.has_key(f):
219 219 del self.copies[f]
220 220
221 221 def forget(self, files):
222 222 if not files: return
223 223 self.lazyread()
224 224 self.markdirty()
225 225 for f in files:
226 226 try:
227 227 del self.map[f]
228 228 except KeyError:
229 229 self.ui.warn(_("not in dirstate: %s!\n") % f)
230 230 pass
231 231
232 232 def clear(self):
233 233 self.map = {}
234 234 self.copies = {}
235 235 self.markdirty()
236 236
237 237 def rebuild(self, parent, files):
238 238 self.clear()
239 239 umask = os.umask(0)
240 240 os.umask(umask)
241 for f, mode in files:
242 if mode:
241 for f in files:
242 if files.execf(f):
243 243 self.map[f] = ('n', ~umask, -1, 0)
244 244 else:
245 245 self.map[f] = ('n', ~umask & 0666, -1, 0)
246 246 self.pl = (parent, nullid)
247 247 self.markdirty()
248 248
249 249 def write(self):
250 250 if not self.dirty:
251 251 return
252 252 st = self.opener("dirstate", "w", atomic=True)
253 253 st.write("".join(self.pl))
254 254 for f, e in self.map.items():
255 255 c = self.copied(f)
256 256 if c:
257 257 f = f + "\0" + c
258 258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
259 259 st.write(e + f)
260 260 self.dirty = 0
261 261
262 262 def filterfiles(self, files):
263 263 ret = {}
264 264 unknown = []
265 265
266 266 for x in files:
267 267 if x == '.':
268 268 return self.map.copy()
269 269 if x not in self.map:
270 270 unknown.append(x)
271 271 else:
272 272 ret[x] = self.map[x]
273 273
274 274 if not unknown:
275 275 return ret
276 276
277 277 b = self.map.keys()
278 278 b.sort()
279 279 blen = len(b)
280 280
281 281 for x in unknown:
282 282 bs = bisect.bisect(b, "%s%s" % (x, '/'))
283 283 while bs < blen:
284 284 s = b[bs]
285 285 if len(s) > len(x) and s.startswith(x):
286 286 ret[s] = self.map[s]
287 287 else:
288 288 break
289 289 bs += 1
290 290 return ret
291 291
292 292 def supported_type(self, f, st, verbose=False):
293 293 if stat.S_ISREG(st.st_mode):
294 294 return True
295 295 if verbose:
296 296 kind = 'unknown'
297 297 if stat.S_ISCHR(st.st_mode): kind = _('character device')
298 298 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
299 299 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
300 300 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
301 301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
302 302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
303 303 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
304 304 util.pathto(self.getcwd(), f),
305 305 kind))
306 306 return False
307 307
308 308 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
309 309 badmatch=None):
310 310 self.lazyread()
311 311
312 312 # walk all files by default
313 313 if not files:
314 314 files = [self.root]
315 315 if not dc:
316 316 dc = self.map.copy()
317 317 elif not dc:
318 318 dc = self.filterfiles(files)
319 319
320 320 def statmatch(file_, stat):
321 321 file_ = util.pconvert(file_)
322 322 if not ignored and file_ not in dc and self.ignore(file_):
323 323 return False
324 324 return match(file_)
325 325
326 326 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
327 327 badmatch=badmatch)
328 328
329 329 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
330 330 # filter out the stat
331 331 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
332 332 yield src, f
333 333
334 334 # walk recursively through the directory tree, finding all files
335 335 # matched by the statmatch function
336 336 #
337 337 # results are yielded in a tuple (src, filename, st), where src
338 338 # is one of:
339 339 # 'f' the file was found in the directory tree
340 340 # 'm' the file was only in the dirstate and not in the tree
341 341 # and st is the stat result if the file was found in the directory.
342 342 #
343 343 # dc is an optional arg for the current dirstate. dc is not modified
344 344 # directly by this function, but might be modified by your statmatch call.
345 345 #
346 346 def walkhelper(self, files, statmatch, dc, badmatch=None):
347 347 # self.root may end with a path separator when self.root == '/'
348 348 common_prefix_len = len(self.root)
349 349 if not self.root.endswith('/'):
350 350 common_prefix_len += 1
351 351 # recursion free walker, faster than os.walk.
352 352 def findfiles(s):
353 353 work = [s]
354 354 while work:
355 355 top = work.pop()
356 356 names = os.listdir(top)
357 357 names.sort()
358 358 # nd is the top of the repository dir tree
359 359 nd = util.normpath(top[common_prefix_len:])
360 360 if nd == '.':
361 361 nd = ''
362 362 else:
363 363 # do not recurse into a repo contained in this
364 364 # one. use bisect to find .hg directory so speed
365 365 # is good on big directory.
366 366 hg = bisect.bisect_left(names, '.hg')
367 367 if hg < len(names) and names[hg] == '.hg':
368 368 if os.path.isdir(os.path.join(top, '.hg')):
369 369 continue
370 370 for f in names:
371 371 np = util.pconvert(os.path.join(nd, f))
372 372 if seen(np):
373 373 continue
374 374 p = os.path.join(top, f)
375 375 # don't trip over symlinks
376 376 st = os.lstat(p)
377 377 if stat.S_ISDIR(st.st_mode):
378 378 ds = os.path.join(nd, f +'/')
379 379 if statmatch(ds, st):
380 380 work.append(p)
381 381 if statmatch(np, st) and np in dc:
382 382 yield 'm', np, st
383 383 elif statmatch(np, st):
384 384 if self.supported_type(np, st):
385 385 yield 'f', np, st
386 386 elif np in dc:
387 387 yield 'm', np, st
388 388
389 389 known = {'.hg': 1}
390 390 def seen(fn):
391 391 if fn in known: return True
392 392 known[fn] = 1
393 393
394 394 # step one, find all files that match our criteria
395 395 files.sort()
396 396 for ff in util.unique(files):
397 397 f = self.wjoin(ff)
398 398 try:
399 399 st = os.lstat(f)
400 400 except OSError, inst:
401 401 nf = util.normpath(ff)
402 402 found = False
403 403 for fn in dc:
404 404 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
405 405 found = True
406 406 break
407 407 if not found:
408 408 if inst.errno != errno.ENOENT or not badmatch:
409 409 self.ui.warn('%s: %s\n' % (
410 410 util.pathto(self.getcwd(), ff),
411 411 inst.strerror))
412 412 elif badmatch and badmatch(ff) and statmatch(ff, None):
413 413 yield 'b', ff, None
414 414 continue
415 415 if stat.S_ISDIR(st.st_mode):
416 416 cmp1 = (lambda x, y: cmp(x[1], y[1]))
417 417 sorted_ = [ x for x in findfiles(f) ]
418 418 sorted_.sort(cmp1)
419 419 for e in sorted_:
420 420 yield e
421 421 else:
422 422 ff = util.normpath(ff)
423 423 if seen(ff):
424 424 continue
425 425 self.blockignore = True
426 426 if statmatch(ff, st):
427 427 if self.supported_type(ff, st, verbose=True):
428 428 yield 'f', ff, st
429 429 elif ff in dc:
430 430 yield 'm', ff, st
431 431 self.blockignore = False
432 432
433 433 # step two run through anything left in the dc hash and yield
434 434 # if we haven't already seen it
435 435 ks = dc.keys()
436 436 ks.sort()
437 437 for k in ks:
438 438 if not seen(k) and (statmatch(k, None)):
439 439 yield 'm', k, None
440 440
441 441 def status(self, files=None, match=util.always, list_ignored=False,
442 442 list_clean=False):
443 443 lookup, modified, added, unknown, ignored = [], [], [], [], []
444 444 removed, deleted, clean = [], [], []
445 445
446 446 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
447 447 try:
448 448 type_, mode, size, time = self[fn]
449 449 except KeyError:
450 450 if list_ignored and self.ignore(fn):
451 451 ignored.append(fn)
452 452 else:
453 453 unknown.append(fn)
454 454 continue
455 455 if src == 'm':
456 456 nonexistent = True
457 457 if not st:
458 458 try:
459 459 st = os.lstat(self.wjoin(fn))
460 460 except OSError, inst:
461 461 if inst.errno != errno.ENOENT:
462 462 raise
463 463 st = None
464 464 # We need to re-check that it is a valid file
465 465 if st and self.supported_type(fn, st):
466 466 nonexistent = False
467 467 # XXX: what to do with file no longer present in the fs
468 468 # who are not removed in the dirstate ?
469 469 if nonexistent and type_ in "nm":
470 470 deleted.append(fn)
471 471 continue
472 472 # check the common case first
473 473 if type_ == 'n':
474 474 if not st:
475 475 st = os.lstat(self.wjoin(fn))
476 476 if size >= 0 and (size != st.st_size
477 477 or (mode ^ st.st_mode) & 0100):
478 478 modified.append(fn)
479 479 elif time != st.st_mtime:
480 480 lookup.append(fn)
481 481 elif list_clean:
482 482 clean.append(fn)
483 483 elif type_ == 'm':
484 484 modified.append(fn)
485 485 elif type_ == 'a':
486 486 added.append(fn)
487 487 elif type_ == 'r':
488 488 removed.append(fn)
489 489
490 490 return (lookup, modified, added, removed, deleted, unknown, ignored,
491 491 clean)
@@ -1,1765 +1,1764 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ()
19 19
20 20 def __del__(self):
21 21 self.transhandle = None
22 22 def __init__(self, parentui, path=None, create=0):
23 23 repo.repository.__init__(self)
24 24 if not path:
25 25 p = os.getcwd()
26 26 while not os.path.isdir(os.path.join(p, ".hg")):
27 27 oldp = p
28 28 p = os.path.dirname(p)
29 29 if p == oldp:
30 30 raise repo.RepoError(_("no repo found"))
31 31 path = p
32 32 self.path = os.path.join(path, ".hg")
33 33
34 34 if not create and not os.path.isdir(self.path):
35 35 raise repo.RepoError(_("repository %s not found") % path)
36 36
37 37 self.root = os.path.abspath(path)
38 38 self.origroot = path
39 39 self.ui = ui.ui(parentui=parentui)
40 40 self.opener = util.opener(self.path)
41 41 self.wopener = util.opener(self.root)
42 42
43 43 try:
44 44 self.ui.readconfig(self.join("hgrc"), self.root)
45 45 except IOError:
46 46 pass
47 47
48 48 v = self.ui.revlogopts
49 49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 51 fl = v.get('flags', None)
52 52 flags = 0
53 53 if fl != None:
54 54 for x in fl.split():
55 55 flags |= revlog.flagstr(x)
56 56 elif self.revlogv1:
57 57 flags = revlog.REVLOG_DEFAULT_FLAGS
58 58
59 59 v = self.revlogversion | flags
60 60 self.manifest = manifest.manifest(self.opener, v)
61 61 self.changelog = changelog.changelog(self.opener, v)
62 62
63 63 # the changelog might not have the inline index flag
64 64 # on. If the format of the changelog is the same as found in
65 65 # .hgrc, apply any flags found in the .hgrc as well.
66 66 # Otherwise, just version from the changelog
67 67 v = self.changelog.version
68 68 if v == self.revlogversion:
69 69 v |= flags
70 70 self.revlogversion = v
71 71
72 72 self.tagscache = None
73 73 self.nodetagscache = None
74 74 self.encodepats = None
75 75 self.decodepats = None
76 76 self.transhandle = None
77 77
78 78 if create:
79 79 if not os.path.exists(path):
80 80 os.mkdir(path)
81 81 os.mkdir(self.path)
82 82 os.mkdir(self.join("data"))
83 83
84 84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85 85
86 86 def url(self):
87 87 return 'file:' + self.root
88 88
89 89 def hook(self, name, throw=False, **args):
90 90 def callhook(hname, funcname):
91 91 '''call python hook. hook is callable object, looked up as
92 92 name in python module. if callable returns "true", hook
93 93 fails, else passes. if hook raises exception, treated as
94 94 hook failure. exception propagates if throw is "true".
95 95
96 96 reason for "true" meaning "hook failed" is so that
97 97 unmodified commands (e.g. mercurial.commands.update) can
98 98 be run as hooks without wrappers to convert return values.'''
99 99
100 100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 101 d = funcname.rfind('.')
102 102 if d == -1:
103 103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 104 % (hname, funcname))
105 105 modname = funcname[:d]
106 106 try:
107 107 obj = __import__(modname)
108 108 except ImportError:
109 109 try:
110 110 # extensions are loaded with hgext_ prefix
111 111 obj = __import__("hgext_%s" % modname)
112 112 except ImportError:
113 113 raise util.Abort(_('%s hook is invalid '
114 114 '(import of "%s" failed)') %
115 115 (hname, modname))
116 116 try:
117 117 for p in funcname.split('.')[1:]:
118 118 obj = getattr(obj, p)
119 119 except AttributeError, err:
120 120 raise util.Abort(_('%s hook is invalid '
121 121 '("%s" is not defined)') %
122 122 (hname, funcname))
123 123 if not callable(obj):
124 124 raise util.Abort(_('%s hook is invalid '
125 125 '("%s" is not callable)') %
126 126 (hname, funcname))
127 127 try:
128 128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 129 except (KeyboardInterrupt, util.SignalInterrupt):
130 130 raise
131 131 except Exception, exc:
132 132 if isinstance(exc, util.Abort):
133 133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 134 (hname, exc.args[0] % exc.args[1:]))
135 135 else:
136 136 self.ui.warn(_('error: %s hook raised an exception: '
137 137 '%s\n') % (hname, exc))
138 138 if throw:
139 139 raise
140 140 self.ui.print_exc()
141 141 return True
142 142 if r:
143 143 if throw:
144 144 raise util.Abort(_('%s hook failed') % hname)
145 145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 146 return r
147 147
148 148 def runhook(name, cmd):
149 149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 151 r = util.system(cmd, environ=env, cwd=self.root)
152 152 if r:
153 153 desc, r = util.explain_exit(r)
154 154 if throw:
155 155 raise util.Abort(_('%s hook %s') % (name, desc))
156 156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 157 return r
158 158
159 159 r = False
160 160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 161 if hname.split(".", 1)[0] == name and cmd]
162 162 hooks.sort()
163 163 for hname, cmd in hooks:
164 164 if cmd.startswith('python:'):
165 165 r = callhook(hname, cmd[7:].strip()) or r
166 166 else:
167 167 r = runhook(hname, cmd) or r
168 168 return r
169 169
170 170 tag_disallowed = ':\r\n'
171 171
172 172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 173 '''tag a revision with a symbolic name.
174 174
175 175 if local is True, the tag is stored in a per-repository file.
176 176 otherwise, it is stored in the .hgtags file, and a new
177 177 changeset is committed with the change.
178 178
179 179 keyword arguments:
180 180
181 181 local: whether to store tag in non-version-controlled file
182 182 (default False)
183 183
184 184 message: commit message to use if committing
185 185
186 186 user: name of user to use if committing
187 187
188 188 date: date tuple to use if committing'''
189 189
190 190 for c in self.tag_disallowed:
191 191 if c in name:
192 192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193 193
194 194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195 195
196 196 if local:
197 197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 198 self.hook('tag', node=node, tag=name, local=local)
199 199 return
200 200
201 201 for x in self.changes():
202 202 if '.hgtags' in x:
203 203 raise util.Abort(_('working copy of .hgtags is changed '
204 204 '(please commit .hgtags manually)'))
205 205
206 206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 207 if self.dirstate.state('.hgtags') == '?':
208 208 self.add(['.hgtags'])
209 209
210 210 if not message:
211 211 message = _('Added tag %s for changeset %s') % (name, node)
212 212
213 213 self.commit(['.hgtags'], message, user, date)
214 214 self.hook('tag', node=node, tag=name, local=local)
215 215
216 216 def tags(self):
217 217 '''return a mapping of tag to node'''
218 218 if not self.tagscache:
219 219 self.tagscache = {}
220 220
221 221 def parsetag(line, context):
222 222 if not line:
223 223 return
224 224 s = l.split(" ", 1)
225 225 if len(s) != 2:
226 226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 227 return
228 228 node, key = s
229 229 key = key.strip()
230 230 try:
231 231 bin_n = bin(node)
232 232 except TypeError:
233 233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 234 (context, node))
235 235 return
236 236 if bin_n not in self.changelog.nodemap:
237 237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 238 (context, key))
239 239 return
240 240 self.tagscache[key] = bin_n
241 241
242 242 # read the tags file from each head, ending with the tip,
243 243 # and add each tag found to the map, with "newer" ones
244 244 # taking precedence
245 245 heads = self.heads()
246 246 heads.reverse()
247 247 fl = self.file(".hgtags")
248 248 for node in heads:
249 249 change = self.changelog.read(node)
250 250 rev = self.changelog.rev(node)
251 251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 252 if fn is None: continue
253 253 count = 0
254 254 for l in fl.read(fn).splitlines():
255 255 count += 1
256 256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 257 (rev, short(node), count))
258 258 try:
259 259 f = self.opener("localtags")
260 260 count = 0
261 261 for l in f:
262 262 count += 1
263 263 parsetag(l, _("localtags, line %d") % count)
264 264 except IOError:
265 265 pass
266 266
267 267 self.tagscache['tip'] = self.changelog.tip()
268 268
269 269 return self.tagscache
270 270
271 271 def tagslist(self):
272 272 '''return a list of tags ordered by revision'''
273 273 l = []
274 274 for t, n in self.tags().items():
275 275 try:
276 276 r = self.changelog.rev(n)
277 277 except:
278 278 r = -2 # sort to the beginning of the list if unknown
279 279 l.append((r, t, n))
280 280 l.sort()
281 281 return [(t, n) for r, t, n in l]
282 282
283 283 def nodetags(self, node):
284 284 '''return the tags associated with a node'''
285 285 if not self.nodetagscache:
286 286 self.nodetagscache = {}
287 287 for t, n in self.tags().items():
288 288 self.nodetagscache.setdefault(n, []).append(t)
289 289 return self.nodetagscache.get(node, [])
290 290
291 291 def lookup(self, key):
292 292 try:
293 293 return self.tags()[key]
294 294 except KeyError:
295 295 if key == '.':
296 296 key = self.dirstate.parents()[0]
297 297 if key == nullid:
298 298 raise repo.RepoError(_("no revision checked out"))
299 299 try:
300 300 return self.changelog.lookup(key)
301 301 except:
302 302 raise repo.RepoError(_("unknown revision '%s'") % key)
303 303
304 304 def dev(self):
305 305 return os.lstat(self.path).st_dev
306 306
307 307 def local(self):
308 308 return True
309 309
310 310 def join(self, f):
311 311 return os.path.join(self.path, f)
312 312
313 313 def wjoin(self, f):
314 314 return os.path.join(self.root, f)
315 315
316 316 def file(self, f):
317 317 if f[0] == '/':
318 318 f = f[1:]
319 319 return filelog.filelog(self.opener, f, self.revlogversion)
320 320
321 321 def changectx(self, changeid):
322 322 return context.changectx(self, changeid)
323 323
324 324 def filectx(self, path, changeid=None, fileid=None):
325 325 """changeid can be a changeset revision, node, or tag.
326 326 fileid can be a file revision or node."""
327 327 return context.filectx(self, path, changeid, fileid)
328 328
329 329 def getcwd(self):
330 330 return self.dirstate.getcwd()
331 331
332 332 def wfile(self, f, mode='r'):
333 333 return self.wopener(f, mode)
334 334
335 335 def wread(self, filename):
336 336 if self.encodepats == None:
337 337 l = []
338 338 for pat, cmd in self.ui.configitems("encode"):
339 339 mf = util.matcher(self.root, "", [pat], [], [])[1]
340 340 l.append((mf, cmd))
341 341 self.encodepats = l
342 342
343 343 data = self.wopener(filename, 'r').read()
344 344
345 345 for mf, cmd in self.encodepats:
346 346 if mf(filename):
347 347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
348 348 data = util.filter(data, cmd)
349 349 break
350 350
351 351 return data
352 352
353 353 def wwrite(self, filename, data, fd=None):
354 354 if self.decodepats == None:
355 355 l = []
356 356 for pat, cmd in self.ui.configitems("decode"):
357 357 mf = util.matcher(self.root, "", [pat], [], [])[1]
358 358 l.append((mf, cmd))
359 359 self.decodepats = l
360 360
361 361 for mf, cmd in self.decodepats:
362 362 if mf(filename):
363 363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
364 364 data = util.filter(data, cmd)
365 365 break
366 366
367 367 if fd:
368 368 return fd.write(data)
369 369 return self.wopener(filename, 'w').write(data)
370 370
371 371 def transaction(self):
372 372 tr = self.transhandle
373 373 if tr != None and tr.running():
374 374 return tr.nest()
375 375
376 376 # save dirstate for rollback
377 377 try:
378 378 ds = self.opener("dirstate").read()
379 379 except IOError:
380 380 ds = ""
381 381 self.opener("journal.dirstate", "w").write(ds)
382 382
383 383 tr = transaction.transaction(self.ui.warn, self.opener,
384 384 self.join("journal"),
385 385 aftertrans(self.path))
386 386 self.transhandle = tr
387 387 return tr
388 388
389 389 def recover(self):
390 390 l = self.lock()
391 391 if os.path.exists(self.join("journal")):
392 392 self.ui.status(_("rolling back interrupted transaction\n"))
393 393 transaction.rollback(self.opener, self.join("journal"))
394 394 self.reload()
395 395 return True
396 396 else:
397 397 self.ui.warn(_("no interrupted transaction available\n"))
398 398 return False
399 399
400 400 def rollback(self, wlock=None):
401 401 if not wlock:
402 402 wlock = self.wlock()
403 403 l = self.lock()
404 404 if os.path.exists(self.join("undo")):
405 405 self.ui.status(_("rolling back last transaction\n"))
406 406 transaction.rollback(self.opener, self.join("undo"))
407 407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
408 408 self.reload()
409 409 self.wreload()
410 410 else:
411 411 self.ui.warn(_("no rollback information available\n"))
412 412
413 413 def wreload(self):
414 414 self.dirstate.read()
415 415
416 416 def reload(self):
417 417 self.changelog.load()
418 418 self.manifest.load()
419 419 self.tagscache = None
420 420 self.nodetagscache = None
421 421
422 422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
423 423 desc=None):
424 424 try:
425 425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
426 426 except lock.LockHeld, inst:
427 427 if not wait:
428 428 raise
429 429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
430 430 (desc, inst.args[0]))
431 431 # default to 600 seconds timeout
432 432 l = lock.lock(self.join(lockname),
433 433 int(self.ui.config("ui", "timeout") or 600),
434 434 releasefn, desc=desc)
435 435 if acquirefn:
436 436 acquirefn()
437 437 return l
438 438
439 439 def lock(self, wait=1):
440 440 return self.do_lock("lock", wait, acquirefn=self.reload,
441 441 desc=_('repository %s') % self.origroot)
442 442
443 443 def wlock(self, wait=1):
444 444 return self.do_lock("wlock", wait, self.dirstate.write,
445 445 self.wreload,
446 446 desc=_('working directory of %s') % self.origroot)
447 447
448 448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
449 449 "determine whether a new filenode is needed"
450 450 fp1 = manifest1.get(filename, nullid)
451 451 fp2 = manifest2.get(filename, nullid)
452 452
453 453 if fp2 != nullid:
454 454 # is one parent an ancestor of the other?
455 455 fpa = filelog.ancestor(fp1, fp2)
456 456 if fpa == fp1:
457 457 fp1, fp2 = fp2, nullid
458 458 elif fpa == fp2:
459 459 fp2 = nullid
460 460
461 461 # is the file unmodified from the parent? report existing entry
462 462 if fp2 == nullid and text == filelog.read(fp1):
463 463 return (fp1, None, None)
464 464
465 465 return (None, fp1, fp2)
466 466
467 467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
468 468 orig_parent = self.dirstate.parents()[0] or nullid
469 469 p1 = p1 or self.dirstate.parents()[0] or nullid
470 470 p2 = p2 or self.dirstate.parents()[1] or nullid
471 471 c1 = self.changelog.read(p1)
472 472 c2 = self.changelog.read(p2)
473 473 m1 = self.manifest.read(c1[0])
474 474 mf1 = self.manifest.readflags(c1[0])
475 475 m2 = self.manifest.read(c2[0])
476 476 changed = []
477 477
478 478 if orig_parent == p1:
479 479 update_dirstate = 1
480 480 else:
481 481 update_dirstate = 0
482 482
483 483 if not wlock:
484 484 wlock = self.wlock()
485 485 l = self.lock()
486 486 tr = self.transaction()
487 487 mm = m1.copy()
488 488 mfm = mf1.copy()
489 489 linkrev = self.changelog.count()
490 490 for f in files:
491 491 try:
492 492 t = self.wread(f)
493 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
493 mfm.set(f, util.is_exec(self.wjoin(f), mfm.execf(f)))
494 494 r = self.file(f)
495 mfm[f] = tm
496 495
497 496 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
498 497 if entry:
499 498 mm[f] = entry
500 499 continue
501 500
502 501 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
503 502 changed.append(f)
504 503 if update_dirstate:
505 504 self.dirstate.update([f], "n")
506 505 except IOError:
507 506 try:
508 507 del mm[f]
509 508 del mfm[f]
510 509 if update_dirstate:
511 510 self.dirstate.forget([f])
512 511 except:
513 512 # deleted from p2?
514 513 pass
515 514
516 515 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
517 516 user = user or self.ui.username()
518 517 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
519 518 tr.close()
520 519 if update_dirstate:
521 520 self.dirstate.setparents(n, nullid)
522 521
523 522 def commit(self, files=None, text="", user=None, date=None,
524 523 match=util.always, force=False, lock=None, wlock=None,
525 524 force_editor=False):
526 525 commit = []
527 526 remove = []
528 527 changed = []
529 528
530 529 if files:
531 530 for f in files:
532 531 s = self.dirstate.state(f)
533 532 if s in 'nmai':
534 533 commit.append(f)
535 534 elif s == 'r':
536 535 remove.append(f)
537 536 else:
538 537 self.ui.warn(_("%s not tracked!\n") % f)
539 538 else:
540 539 modified, added, removed, deleted, unknown = self.changes(match=match)
541 540 commit = modified + added
542 541 remove = removed
543 542
544 543 p1, p2 = self.dirstate.parents()
545 544 c1 = self.changelog.read(p1)
546 545 c2 = self.changelog.read(p2)
547 546 m1 = self.manifest.read(c1[0])
548 547 mf1 = self.manifest.readflags(c1[0])
549 548 m2 = self.manifest.read(c2[0])
550 549
551 550 if not commit and not remove and not force and p2 == nullid:
552 551 self.ui.status(_("nothing changed\n"))
553 552 return None
554 553
555 554 xp1 = hex(p1)
556 555 if p2 == nullid: xp2 = ''
557 556 else: xp2 = hex(p2)
558 557
559 558 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
560 559
561 560 if not wlock:
562 561 wlock = self.wlock()
563 562 if not lock:
564 563 lock = self.lock()
565 564 tr = self.transaction()
566 565
567 566 # check in files
568 567 new = {}
569 568 linkrev = self.changelog.count()
570 569 commit.sort()
571 570 for f in commit:
572 571 self.ui.note(f + "\n")
573 572 try:
574 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
573 mf1.set(f, util.is_exec(self.wjoin(f), mf1.execf(f)))
575 574 t = self.wread(f)
576 575 except IOError:
577 576 self.ui.warn(_("trouble committing %s!\n") % f)
578 577 raise
579 578
580 579 r = self.file(f)
581 580
582 581 meta = {}
583 582 cp = self.dirstate.copied(f)
584 583 if cp:
585 584 meta["copy"] = cp
586 585 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
587 586 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
588 587 fp1, fp2 = nullid, nullid
589 588 else:
590 589 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
591 590 if entry:
592 591 new[f] = entry
593 592 continue
594 593
595 594 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
596 595 # remember what we've added so that we can later calculate
597 596 # the files to pull from a set of changesets
598 597 changed.append(f)
599 598
600 599 # update manifest
601 600 m1 = m1.copy()
602 601 m1.update(new)
603 602 for f in remove:
604 603 if f in m1:
605 604 del m1[f]
606 605 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
607 606 (new, remove))
608 607
609 608 # add changeset
610 609 new = new.keys()
611 610 new.sort()
612 611
613 612 user = user or self.ui.username()
614 613 if not text or force_editor:
615 614 edittext = []
616 615 if text:
617 616 edittext.append(text)
618 617 edittext.append("")
619 618 if p2 != nullid:
620 619 edittext.append("HG: branch merge")
621 620 edittext.extend(["HG: changed %s" % f for f in changed])
622 621 edittext.extend(["HG: removed %s" % f for f in remove])
623 622 if not changed and not remove:
624 623 edittext.append("HG: no files changed")
625 624 edittext.append("")
626 625 # run editor in the repository root
627 626 olddir = os.getcwd()
628 627 os.chdir(self.root)
629 628 text = self.ui.edit("\n".join(edittext), user)
630 629 os.chdir(olddir)
631 630
632 631 lines = [line.rstrip() for line in text.rstrip().splitlines()]
633 632 while lines and not lines[0]:
634 633 del lines[0]
635 634 if not lines:
636 635 return None
637 636 text = '\n'.join(lines)
638 637 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
639 638 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
640 639 parent2=xp2)
641 640 tr.close()
642 641
643 642 self.dirstate.setparents(n)
644 643 self.dirstate.update(new, "n")
645 644 self.dirstate.forget(remove)
646 645
647 646 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
648 647 return n
649 648
650 649 def walk(self, node=None, files=[], match=util.always, badmatch=None):
651 650 if node:
652 651 fdict = dict.fromkeys(files)
653 652 for fn in self.manifest.read(self.changelog.read(node)[0]):
654 653 fdict.pop(fn, None)
655 654 if match(fn):
656 655 yield 'm', fn
657 656 for fn in fdict:
658 657 if badmatch and badmatch(fn):
659 658 if match(fn):
660 659 yield 'b', fn
661 660 else:
662 661 self.ui.warn(_('%s: No such file in rev %s\n') % (
663 662 util.pathto(self.getcwd(), fn), short(node)))
664 663 else:
665 664 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
666 665 yield src, fn
667 666
668 667 def status(self, node1=None, node2=None, files=[], match=util.always,
669 668 wlock=None, list_ignored=False, list_clean=False):
670 669 """return status of files between two nodes or node and working directory
671 670
672 671 If node1 is None, use the first dirstate parent instead.
673 672 If node2 is None, compare node1 with working directory.
674 673 """
675 674
676 675 def fcmp(fn, mf):
677 676 t1 = self.wread(fn)
678 677 t2 = self.file(fn).read(mf.get(fn, nullid))
679 678 return cmp(t1, t2)
680 679
681 680 def mfmatches(node):
682 681 change = self.changelog.read(node)
683 682 mf = dict(self.manifest.read(change[0]))
684 683 for fn in mf.keys():
685 684 if not match(fn):
686 685 del mf[fn]
687 686 return mf
688 687
689 688 modified, added, removed, deleted, unknown = [], [], [], [], []
690 689 ignored, clean = [], []
691 690
692 691 compareworking = False
693 692 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
694 693 compareworking = True
695 694
696 695 if not compareworking:
697 696 # read the manifest from node1 before the manifest from node2,
698 697 # so that we'll hit the manifest cache if we're going through
699 698 # all the revisions in parent->child order.
700 699 mf1 = mfmatches(node1)
701 700
702 701 # are we comparing the working directory?
703 702 if not node2:
704 703 if not wlock:
705 704 try:
706 705 wlock = self.wlock(wait=0)
707 706 except lock.LockException:
708 707 wlock = None
709 708 (lookup, modified, added, removed, deleted, unknown,
710 709 ignored, clean) = self.dirstate.status(files, match,
711 710 list_ignored, list_clean)
712 711
713 712 # are we comparing working dir against its parent?
714 713 if compareworking:
715 714 if lookup:
716 715 # do a full compare of any files that might have changed
717 716 mf2 = mfmatches(self.dirstate.parents()[0])
718 717 for f in lookup:
719 718 if fcmp(f, mf2):
720 719 modified.append(f)
721 720 elif wlock is not None:
722 721 self.dirstate.update([f], "n")
723 722 else:
724 723 # we are comparing working dir against non-parent
725 724 # generate a pseudo-manifest for the working dir
726 725 mf2 = mfmatches(self.dirstate.parents()[0])
727 726 for f in lookup + modified + added:
728 727 mf2[f] = ""
729 728 for f in removed:
730 729 if f in mf2:
731 730 del mf2[f]
732 731 else:
733 732 # we are comparing two revisions
734 733 mf2 = mfmatches(node2)
735 734
736 735 if not compareworking:
737 736 # flush lists from dirstate before comparing manifests
738 737 modified, added, clean = [], [], []
739 738
740 739 # make sure to sort the files so we talk to the disk in a
741 740 # reasonable order
742 741 mf2keys = mf2.keys()
743 742 mf2keys.sort()
744 743 for fn in mf2keys:
745 744 if mf1.has_key(fn):
746 745 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
747 746 modified.append(fn)
748 747 elif list_clean:
749 748 clean.append(fn)
750 749 del mf1[fn]
751 750 else:
752 751 added.append(fn)
753 752
754 753 removed = mf1.keys()
755 754
756 755 # sort and return results:
757 756 for l in modified, added, removed, deleted, unknown, ignored, clean:
758 757 l.sort()
759 758 return (modified, added, removed, deleted, unknown, ignored, clean)
760 759
761 760 def changes(self, node1=None, node2=None, files=[], match=util.always,
762 761 wlock=None, list_ignored=False, list_clean=False):
763 762 '''DEPRECATED - use status instead'''
764 763 marduit = self.status(node1, node2, files, match, wlock,
765 764 list_ignored, list_clean)
766 765 if list_ignored:
767 766 return marduit[:-1]
768 767 else:
769 768 return marduit[:-2]
770 769
771 770 def add(self, list, wlock=None):
772 771 if not wlock:
773 772 wlock = self.wlock()
774 773 for f in list:
775 774 p = self.wjoin(f)
776 775 if not os.path.exists(p):
777 776 self.ui.warn(_("%s does not exist!\n") % f)
778 777 elif not os.path.isfile(p):
779 778 self.ui.warn(_("%s not added: only files supported currently\n")
780 779 % f)
781 780 elif self.dirstate.state(f) in 'an':
782 781 self.ui.warn(_("%s already tracked!\n") % f)
783 782 else:
784 783 self.dirstate.update([f], "a")
785 784
786 785 def forget(self, list, wlock=None):
787 786 if not wlock:
788 787 wlock = self.wlock()
789 788 for f in list:
790 789 if self.dirstate.state(f) not in 'ai':
791 790 self.ui.warn(_("%s not added!\n") % f)
792 791 else:
793 792 self.dirstate.forget([f])
794 793
795 794 def remove(self, list, unlink=False, wlock=None):
796 795 if unlink:
797 796 for f in list:
798 797 try:
799 798 util.unlink(self.wjoin(f))
800 799 except OSError, inst:
801 800 if inst.errno != errno.ENOENT:
802 801 raise
803 802 if not wlock:
804 803 wlock = self.wlock()
805 804 for f in list:
806 805 p = self.wjoin(f)
807 806 if os.path.exists(p):
808 807 self.ui.warn(_("%s still exists!\n") % f)
809 808 elif self.dirstate.state(f) == 'a':
810 809 self.dirstate.forget([f])
811 810 elif f not in self.dirstate:
812 811 self.ui.warn(_("%s not tracked!\n") % f)
813 812 else:
814 813 self.dirstate.update([f], "r")
815 814
816 815 def undelete(self, list, wlock=None):
817 816 p = self.dirstate.parents()[0]
818 817 mn = self.changelog.read(p)[0]
819 818 mf = self.manifest.readflags(mn)
820 819 m = self.manifest.read(mn)
821 820 if not wlock:
822 821 wlock = self.wlock()
823 822 for f in list:
824 823 if self.dirstate.state(f) not in "r":
825 824 self.ui.warn("%s not removed!\n" % f)
826 825 else:
827 826 t = self.file(f).read(m[f])
828 827 self.wwrite(f, t)
829 util.set_exec(self.wjoin(f), mf[f])
828 util.set_exec(self.wjoin(f), mf.execf(f))
830 829 self.dirstate.update([f], "n")
831 830
832 831 def copy(self, source, dest, wlock=None):
833 832 p = self.wjoin(dest)
834 833 if not os.path.exists(p):
835 834 self.ui.warn(_("%s does not exist!\n") % dest)
836 835 elif not os.path.isfile(p):
837 836 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
838 837 else:
839 838 if not wlock:
840 839 wlock = self.wlock()
841 840 if self.dirstate.state(dest) == '?':
842 841 self.dirstate.update([dest], "a")
843 842 self.dirstate.copy(source, dest)
844 843
845 844 def heads(self, start=None):
846 845 heads = self.changelog.heads(start)
847 846 # sort the output in rev descending order
848 847 heads = [(-self.changelog.rev(h), h) for h in heads]
849 848 heads.sort()
850 849 return [n for (r, n) in heads]
851 850
852 851 # branchlookup returns a dict giving a list of branches for
853 852 # each head. A branch is defined as the tag of a node or
854 853 # the branch of the node's parents. If a node has multiple
855 854 # branch tags, tags are eliminated if they are visible from other
856 855 # branch tags.
857 856 #
858 857 # So, for this graph: a->b->c->d->e
859 858 # \ /
860 859 # aa -----/
861 860 # a has tag 2.6.12
862 861 # d has tag 2.6.13
863 862 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
864 863 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
865 864 # from the list.
866 865 #
867 866 # It is possible that more than one head will have the same branch tag.
868 867 # callers need to check the result for multiple heads under the same
869 868 # branch tag if that is a problem for them (ie checkout of a specific
870 869 # branch).
871 870 #
872 871 # passing in a specific branch will limit the depth of the search
873 872 # through the parents. It won't limit the branches returned in the
874 873 # result though.
875 874 def branchlookup(self, heads=None, branch=None):
876 875 if not heads:
877 876 heads = self.heads()
878 877 headt = [ h for h in heads ]
879 878 chlog = self.changelog
880 879 branches = {}
881 880 merges = []
882 881 seenmerge = {}
883 882
884 883 # traverse the tree once for each head, recording in the branches
885 884 # dict which tags are visible from this head. The branches
886 885 # dict also records which tags are visible from each tag
887 886 # while we traverse.
888 887 while headt or merges:
889 888 if merges:
890 889 n, found = merges.pop()
891 890 visit = [n]
892 891 else:
893 892 h = headt.pop()
894 893 visit = [h]
895 894 found = [h]
896 895 seen = {}
897 896 while visit:
898 897 n = visit.pop()
899 898 if n in seen:
900 899 continue
901 900 pp = chlog.parents(n)
902 901 tags = self.nodetags(n)
903 902 if tags:
904 903 for x in tags:
905 904 if x == 'tip':
906 905 continue
907 906 for f in found:
908 907 branches.setdefault(f, {})[n] = 1
909 908 branches.setdefault(n, {})[n] = 1
910 909 break
911 910 if n not in found:
912 911 found.append(n)
913 912 if branch in tags:
914 913 continue
915 914 seen[n] = 1
916 915 if pp[1] != nullid and n not in seenmerge:
917 916 merges.append((pp[1], [x for x in found]))
918 917 seenmerge[n] = 1
919 918 if pp[0] != nullid:
920 919 visit.append(pp[0])
921 920 # traverse the branches dict, eliminating branch tags from each
922 921 # head that are visible from another branch tag for that head.
923 922 out = {}
924 923 viscache = {}
925 924 for h in heads:
926 925 def visible(node):
927 926 if node in viscache:
928 927 return viscache[node]
929 928 ret = {}
930 929 visit = [node]
931 930 while visit:
932 931 x = visit.pop()
933 932 if x in viscache:
934 933 ret.update(viscache[x])
935 934 elif x not in ret:
936 935 ret[x] = 1
937 936 if x in branches:
938 937 visit[len(visit):] = branches[x].keys()
939 938 viscache[node] = ret
940 939 return ret
941 940 if h not in branches:
942 941 continue
943 942 # O(n^2), but somewhat limited. This only searches the
944 943 # tags visible from a specific head, not all the tags in the
945 944 # whole repo.
946 945 for b in branches[h]:
947 946 vis = False
948 947 for bb in branches[h].keys():
949 948 if b != bb:
950 949 if b in visible(bb):
951 950 vis = True
952 951 break
953 952 if not vis:
954 953 l = out.setdefault(h, [])
955 954 l[len(l):] = self.nodetags(b)
956 955 return out
957 956
958 957 def branches(self, nodes):
959 958 if not nodes:
960 959 nodes = [self.changelog.tip()]
961 960 b = []
962 961 for n in nodes:
963 962 t = n
964 963 while 1:
965 964 p = self.changelog.parents(n)
966 965 if p[1] != nullid or p[0] == nullid:
967 966 b.append((t, n, p[0], p[1]))
968 967 break
969 968 n = p[0]
970 969 return b
971 970
972 971 def between(self, pairs):
973 972 r = []
974 973
975 974 for top, bottom in pairs:
976 975 n, l, i = top, [], 0
977 976 f = 1
978 977
979 978 while n != bottom:
980 979 p = self.changelog.parents(n)[0]
981 980 if i == f:
982 981 l.append(n)
983 982 f = f * 2
984 983 n = p
985 984 i += 1
986 985
987 986 r.append(l)
988 987
989 988 return r
990 989
991 990 def findincoming(self, remote, base=None, heads=None, force=False):
992 991 """Return list of roots of the subsets of missing nodes from remote
993 992
994 993 If base dict is specified, assume that these nodes and their parents
995 994 exist on the remote side and that no child of a node of base exists
996 995 in both remote and self.
997 996 Furthermore base will be updated to include the nodes that exists
998 997 in self and remote but no children exists in self and remote.
999 998 If a list of heads is specified, return only nodes which are heads
1000 999 or ancestors of these heads.
1001 1000
1002 1001 All the ancestors of base are in self and in remote.
1003 1002 All the descendants of the list returned are missing in self.
1004 1003 (and so we know that the rest of the nodes are missing in remote, see
1005 1004 outgoing)
1006 1005 """
1007 1006 m = self.changelog.nodemap
1008 1007 search = []
1009 1008 fetch = {}
1010 1009 seen = {}
1011 1010 seenbranch = {}
1012 1011 if base == None:
1013 1012 base = {}
1014 1013
1015 1014 if not heads:
1016 1015 heads = remote.heads()
1017 1016
1018 1017 if self.changelog.tip() == nullid:
1019 1018 base[nullid] = 1
1020 1019 if heads != [nullid]:
1021 1020 return [nullid]
1022 1021 return []
1023 1022
1024 1023 # assume we're closer to the tip than the root
1025 1024 # and start by examining the heads
1026 1025 self.ui.status(_("searching for changes\n"))
1027 1026
1028 1027 unknown = []
1029 1028 for h in heads:
1030 1029 if h not in m:
1031 1030 unknown.append(h)
1032 1031 else:
1033 1032 base[h] = 1
1034 1033
1035 1034 if not unknown:
1036 1035 return []
1037 1036
1038 1037 req = dict.fromkeys(unknown)
1039 1038 reqcnt = 0
1040 1039
1041 1040 # search through remote branches
1042 1041 # a 'branch' here is a linear segment of history, with four parts:
1043 1042 # head, root, first parent, second parent
1044 1043 # (a branch always has two parents (or none) by definition)
1045 1044 unknown = remote.branches(unknown)
1046 1045 while unknown:
1047 1046 r = []
1048 1047 while unknown:
1049 1048 n = unknown.pop(0)
1050 1049 if n[0] in seen:
1051 1050 continue
1052 1051
1053 1052 self.ui.debug(_("examining %s:%s\n")
1054 1053 % (short(n[0]), short(n[1])))
1055 1054 if n[0] == nullid: # found the end of the branch
1056 1055 pass
1057 1056 elif n in seenbranch:
1058 1057 self.ui.debug(_("branch already found\n"))
1059 1058 continue
1060 1059 elif n[1] and n[1] in m: # do we know the base?
1061 1060 self.ui.debug(_("found incomplete branch %s:%s\n")
1062 1061 % (short(n[0]), short(n[1])))
1063 1062 search.append(n) # schedule branch range for scanning
1064 1063 seenbranch[n] = 1
1065 1064 else:
1066 1065 if n[1] not in seen and n[1] not in fetch:
1067 1066 if n[2] in m and n[3] in m:
1068 1067 self.ui.debug(_("found new changeset %s\n") %
1069 1068 short(n[1]))
1070 1069 fetch[n[1]] = 1 # earliest unknown
1071 1070 for p in n[2:4]:
1072 1071 if p in m:
1073 1072 base[p] = 1 # latest known
1074 1073
1075 1074 for p in n[2:4]:
1076 1075 if p not in req and p not in m:
1077 1076 r.append(p)
1078 1077 req[p] = 1
1079 1078 seen[n[0]] = 1
1080 1079
1081 1080 if r:
1082 1081 reqcnt += 1
1083 1082 self.ui.debug(_("request %d: %s\n") %
1084 1083 (reqcnt, " ".join(map(short, r))))
1085 1084 for p in range(0, len(r), 10):
1086 1085 for b in remote.branches(r[p:p+10]):
1087 1086 self.ui.debug(_("received %s:%s\n") %
1088 1087 (short(b[0]), short(b[1])))
1089 1088 unknown.append(b)
1090 1089
1091 1090 # do binary search on the branches we found
1092 1091 while search:
1093 1092 n = search.pop(0)
1094 1093 reqcnt += 1
1095 1094 l = remote.between([(n[0], n[1])])[0]
1096 1095 l.append(n[1])
1097 1096 p = n[0]
1098 1097 f = 1
1099 1098 for i in l:
1100 1099 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1101 1100 if i in m:
1102 1101 if f <= 2:
1103 1102 self.ui.debug(_("found new branch changeset %s\n") %
1104 1103 short(p))
1105 1104 fetch[p] = 1
1106 1105 base[i] = 1
1107 1106 else:
1108 1107 self.ui.debug(_("narrowed branch search to %s:%s\n")
1109 1108 % (short(p), short(i)))
1110 1109 search.append((p, i))
1111 1110 break
1112 1111 p, f = i, f * 2
1113 1112
1114 1113 # sanity check our fetch list
1115 1114 for f in fetch.keys():
1116 1115 if f in m:
1117 1116 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1118 1117
1119 1118 if base.keys() == [nullid]:
1120 1119 if force:
1121 1120 self.ui.warn(_("warning: repository is unrelated\n"))
1122 1121 else:
1123 1122 raise util.Abort(_("repository is unrelated"))
1124 1123
1125 1124 self.ui.note(_("found new changesets starting at ") +
1126 1125 " ".join([short(f) for f in fetch]) + "\n")
1127 1126
1128 1127 self.ui.debug(_("%d total queries\n") % reqcnt)
1129 1128
1130 1129 return fetch.keys()
1131 1130
1132 1131 def findoutgoing(self, remote, base=None, heads=None, force=False):
1133 1132 """Return list of nodes that are roots of subsets not in remote
1134 1133
1135 1134 If base dict is specified, assume that these nodes and their parents
1136 1135 exist on the remote side.
1137 1136 If a list of heads is specified, return only nodes which are heads
1138 1137 or ancestors of these heads, and return a second element which
1139 1138 contains all remote heads which get new children.
1140 1139 """
1141 1140 if base == None:
1142 1141 base = {}
1143 1142 self.findincoming(remote, base, heads, force=force)
1144 1143
1145 1144 self.ui.debug(_("common changesets up to ")
1146 1145 + " ".join(map(short, base.keys())) + "\n")
1147 1146
1148 1147 remain = dict.fromkeys(self.changelog.nodemap)
1149 1148
1150 1149 # prune everything remote has from the tree
1151 1150 del remain[nullid]
1152 1151 remove = base.keys()
1153 1152 while remove:
1154 1153 n = remove.pop(0)
1155 1154 if n in remain:
1156 1155 del remain[n]
1157 1156 for p in self.changelog.parents(n):
1158 1157 remove.append(p)
1159 1158
1160 1159 # find every node whose parents have been pruned
1161 1160 subset = []
1162 1161 # find every remote head that will get new children
1163 1162 updated_heads = {}
1164 1163 for n in remain:
1165 1164 p1, p2 = self.changelog.parents(n)
1166 1165 if p1 not in remain and p2 not in remain:
1167 1166 subset.append(n)
1168 1167 if heads:
1169 1168 if p1 in heads:
1170 1169 updated_heads[p1] = True
1171 1170 if p2 in heads:
1172 1171 updated_heads[p2] = True
1173 1172
1174 1173 # this is the set of all roots we have to push
1175 1174 if heads:
1176 1175 return subset, updated_heads.keys()
1177 1176 else:
1178 1177 return subset
1179 1178
1180 1179 def pull(self, remote, heads=None, force=False, lock=None):
1181 1180 mylock = False
1182 1181 if not lock:
1183 1182 lock = self.lock()
1184 1183 mylock = True
1185 1184
1186 1185 try:
1187 1186 fetch = self.findincoming(remote, force=force)
1188 1187 if fetch == [nullid]:
1189 1188 self.ui.status(_("requesting all changes\n"))
1190 1189
1191 1190 if not fetch:
1192 1191 self.ui.status(_("no changes found\n"))
1193 1192 return 0
1194 1193
1195 1194 if heads is None:
1196 1195 cg = remote.changegroup(fetch, 'pull')
1197 1196 else:
1198 1197 cg = remote.changegroupsubset(fetch, heads, 'pull')
1199 1198 return self.addchangegroup(cg, 'pull', remote.url())
1200 1199 finally:
1201 1200 if mylock:
1202 1201 lock.release()
1203 1202
1204 1203 def push(self, remote, force=False, revs=None):
1205 1204 # there are two ways to push to remote repo:
1206 1205 #
1207 1206 # addchangegroup assumes local user can lock remote
1208 1207 # repo (local filesystem, old ssh servers).
1209 1208 #
1210 1209 # unbundle assumes local user cannot lock remote repo (new ssh
1211 1210 # servers, http servers).
1212 1211
1213 1212 if remote.capable('unbundle'):
1214 1213 return self.push_unbundle(remote, force, revs)
1215 1214 return self.push_addchangegroup(remote, force, revs)
1216 1215
1217 1216 def prepush(self, remote, force, revs):
1218 1217 base = {}
1219 1218 remote_heads = remote.heads()
1220 1219 inc = self.findincoming(remote, base, remote_heads, force=force)
1221 1220 if not force and inc:
1222 1221 self.ui.warn(_("abort: unsynced remote changes!\n"))
1223 1222 self.ui.status(_("(did you forget to sync?"
1224 1223 " use push -f to force)\n"))
1225 1224 return None, 1
1226 1225
1227 1226 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1228 1227 if revs is not None:
1229 1228 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1230 1229 else:
1231 1230 bases, heads = update, self.changelog.heads()
1232 1231
1233 1232 if not bases:
1234 1233 self.ui.status(_("no changes found\n"))
1235 1234 return None, 1
1236 1235 elif not force:
1237 1236 # FIXME we don't properly detect creation of new heads
1238 1237 # in the push -r case, assume the user knows what he's doing
1239 1238 if not revs and len(remote_heads) < len(heads) \
1240 1239 and remote_heads != [nullid]:
1241 1240 self.ui.warn(_("abort: push creates new remote branches!\n"))
1242 1241 self.ui.status(_("(did you forget to merge?"
1243 1242 " use push -f to force)\n"))
1244 1243 return None, 1
1245 1244
1246 1245 if revs is None:
1247 1246 cg = self.changegroup(update, 'push')
1248 1247 else:
1249 1248 cg = self.changegroupsubset(update, revs, 'push')
1250 1249 return cg, remote_heads
1251 1250
1252 1251 def push_addchangegroup(self, remote, force, revs):
1253 1252 lock = remote.lock()
1254 1253
1255 1254 ret = self.prepush(remote, force, revs)
1256 1255 if ret[0] is not None:
1257 1256 cg, remote_heads = ret
1258 1257 return remote.addchangegroup(cg, 'push', self.url())
1259 1258 return ret[1]
1260 1259
1261 1260 def push_unbundle(self, remote, force, revs):
1262 1261 # local repo finds heads on server, finds out what revs it
1263 1262 # must push. once revs transferred, if server finds it has
1264 1263 # different heads (someone else won commit/push race), server
1265 1264 # aborts.
1266 1265
1267 1266 ret = self.prepush(remote, force, revs)
1268 1267 if ret[0] is not None:
1269 1268 cg, remote_heads = ret
1270 1269 if force: remote_heads = ['force']
1271 1270 return remote.unbundle(cg, remote_heads, 'push')
1272 1271 return ret[1]
1273 1272
1274 1273 def changegroupsubset(self, bases, heads, source):
1275 1274 """This function generates a changegroup consisting of all the nodes
1276 1275 that are descendents of any of the bases, and ancestors of any of
1277 1276 the heads.
1278 1277
1279 1278 It is fairly complex as determining which filenodes and which
1280 1279 manifest nodes need to be included for the changeset to be complete
1281 1280 is non-trivial.
1282 1281
1283 1282 Another wrinkle is doing the reverse, figuring out which changeset in
1284 1283 the changegroup a particular filenode or manifestnode belongs to."""
1285 1284
1286 1285 self.hook('preoutgoing', throw=True, source=source)
1287 1286
1288 1287 # Set up some initial variables
1289 1288 # Make it easy to refer to self.changelog
1290 1289 cl = self.changelog
1291 1290 # msng is short for missing - compute the list of changesets in this
1292 1291 # changegroup.
1293 1292 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1294 1293 # Some bases may turn out to be superfluous, and some heads may be
1295 1294 # too. nodesbetween will return the minimal set of bases and heads
1296 1295 # necessary to re-create the changegroup.
1297 1296
1298 1297 # Known heads are the list of heads that it is assumed the recipient
1299 1298 # of this changegroup will know about.
1300 1299 knownheads = {}
1301 1300 # We assume that all parents of bases are known heads.
1302 1301 for n in bases:
1303 1302 for p in cl.parents(n):
1304 1303 if p != nullid:
1305 1304 knownheads[p] = 1
1306 1305 knownheads = knownheads.keys()
1307 1306 if knownheads:
1308 1307 # Now that we know what heads are known, we can compute which
1309 1308 # changesets are known. The recipient must know about all
1310 1309 # changesets required to reach the known heads from the null
1311 1310 # changeset.
1312 1311 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1313 1312 junk = None
1314 1313 # Transform the list into an ersatz set.
1315 1314 has_cl_set = dict.fromkeys(has_cl_set)
1316 1315 else:
1317 1316 # If there were no known heads, the recipient cannot be assumed to
1318 1317 # know about any changesets.
1319 1318 has_cl_set = {}
1320 1319
1321 1320 # Make it easy to refer to self.manifest
1322 1321 mnfst = self.manifest
1323 1322 # We don't know which manifests are missing yet
1324 1323 msng_mnfst_set = {}
1325 1324 # Nor do we know which filenodes are missing.
1326 1325 msng_filenode_set = {}
1327 1326
1328 1327 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1329 1328 junk = None
1330 1329
1331 1330 # A changeset always belongs to itself, so the changenode lookup
1332 1331 # function for a changenode is identity.
1333 1332 def identity(x):
1334 1333 return x
1335 1334
1336 1335 # A function generating function. Sets up an environment for the
1337 1336 # inner function.
1338 1337 def cmp_by_rev_func(revlog):
1339 1338 # Compare two nodes by their revision number in the environment's
1340 1339 # revision history. Since the revision number both represents the
1341 1340 # most efficient order to read the nodes in, and represents a
1342 1341 # topological sorting of the nodes, this function is often useful.
1343 1342 def cmp_by_rev(a, b):
1344 1343 return cmp(revlog.rev(a), revlog.rev(b))
1345 1344 return cmp_by_rev
1346 1345
1347 1346 # If we determine that a particular file or manifest node must be a
1348 1347 # node that the recipient of the changegroup will already have, we can
1349 1348 # also assume the recipient will have all the parents. This function
1350 1349 # prunes them from the set of missing nodes.
1351 1350 def prune_parents(revlog, hasset, msngset):
1352 1351 haslst = hasset.keys()
1353 1352 haslst.sort(cmp_by_rev_func(revlog))
1354 1353 for node in haslst:
1355 1354 parentlst = [p for p in revlog.parents(node) if p != nullid]
1356 1355 while parentlst:
1357 1356 n = parentlst.pop()
1358 1357 if n not in hasset:
1359 1358 hasset[n] = 1
1360 1359 p = [p for p in revlog.parents(n) if p != nullid]
1361 1360 parentlst.extend(p)
1362 1361 for n in hasset:
1363 1362 msngset.pop(n, None)
1364 1363
1365 1364 # This is a function generating function used to set up an environment
1366 1365 # for the inner function to execute in.
1367 1366 def manifest_and_file_collector(changedfileset):
1368 1367 # This is an information gathering function that gathers
1369 1368 # information from each changeset node that goes out as part of
1370 1369 # the changegroup. The information gathered is a list of which
1371 1370 # manifest nodes are potentially required (the recipient may
1372 1371 # already have them) and total list of all files which were
1373 1372 # changed in any changeset in the changegroup.
1374 1373 #
1375 1374 # We also remember the first changenode we saw any manifest
1376 1375 # referenced by so we can later determine which changenode 'owns'
1377 1376 # the manifest.
1378 1377 def collect_manifests_and_files(clnode):
1379 1378 c = cl.read(clnode)
1380 1379 for f in c[3]:
1381 1380 # This is to make sure we only have one instance of each
1382 1381 # filename string for each filename.
1383 1382 changedfileset.setdefault(f, f)
1384 1383 msng_mnfst_set.setdefault(c[0], clnode)
1385 1384 return collect_manifests_and_files
1386 1385
1387 1386 # Figure out which manifest nodes (of the ones we think might be part
1388 1387 # of the changegroup) the recipient must know about and remove them
1389 1388 # from the changegroup.
1390 1389 def prune_manifests():
1391 1390 has_mnfst_set = {}
1392 1391 for n in msng_mnfst_set:
1393 1392 # If a 'missing' manifest thinks it belongs to a changenode
1394 1393 # the recipient is assumed to have, obviously the recipient
1395 1394 # must have that manifest.
1396 1395 linknode = cl.node(mnfst.linkrev(n))
1397 1396 if linknode in has_cl_set:
1398 1397 has_mnfst_set[n] = 1
1399 1398 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1400 1399
1401 1400 # Use the information collected in collect_manifests_and_files to say
1402 1401 # which changenode any manifestnode belongs to.
1403 1402 def lookup_manifest_link(mnfstnode):
1404 1403 return msng_mnfst_set[mnfstnode]
1405 1404
1406 1405 # A function generating function that sets up the initial environment
1407 1406 # the inner function.
1408 1407 def filenode_collector(changedfiles):
1409 1408 next_rev = [0]
1410 1409 # This gathers information from each manifestnode included in the
1411 1410 # changegroup about which filenodes the manifest node references
1412 1411 # so we can include those in the changegroup too.
1413 1412 #
1414 1413 # It also remembers which changenode each filenode belongs to. It
1415 1414 # does this by assuming the a filenode belongs to the changenode
1416 1415 # the first manifest that references it belongs to.
1417 1416 def collect_msng_filenodes(mnfstnode):
1418 1417 r = mnfst.rev(mnfstnode)
1419 1418 if r == next_rev[0]:
1420 1419 # If the last rev we looked at was the one just previous,
1421 1420 # we only need to see a diff.
1422 1421 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1423 1422 # For each line in the delta
1424 1423 for dline in delta.splitlines():
1425 1424 # get the filename and filenode for that line
1426 1425 f, fnode = dline.split('\0')
1427 1426 fnode = bin(fnode[:40])
1428 1427 f = changedfiles.get(f, None)
1429 1428 # And if the file is in the list of files we care
1430 1429 # about.
1431 1430 if f is not None:
1432 1431 # Get the changenode this manifest belongs to
1433 1432 clnode = msng_mnfst_set[mnfstnode]
1434 1433 # Create the set of filenodes for the file if
1435 1434 # there isn't one already.
1436 1435 ndset = msng_filenode_set.setdefault(f, {})
1437 1436 # And set the filenode's changelog node to the
1438 1437 # manifest's if it hasn't been set already.
1439 1438 ndset.setdefault(fnode, clnode)
1440 1439 else:
1441 1440 # Otherwise we need a full manifest.
1442 1441 m = mnfst.read(mnfstnode)
1443 1442 # For every file in we care about.
1444 1443 for f in changedfiles:
1445 1444 fnode = m.get(f, None)
1446 1445 # If it's in the manifest
1447 1446 if fnode is not None:
1448 1447 # See comments above.
1449 1448 clnode = msng_mnfst_set[mnfstnode]
1450 1449 ndset = msng_filenode_set.setdefault(f, {})
1451 1450 ndset.setdefault(fnode, clnode)
1452 1451 # Remember the revision we hope to see next.
1453 1452 next_rev[0] = r + 1
1454 1453 return collect_msng_filenodes
1455 1454
1456 1455 # We have a list of filenodes we think we need for a file, lets remove
1457 1456 # all those we now the recipient must have.
1458 1457 def prune_filenodes(f, filerevlog):
1459 1458 msngset = msng_filenode_set[f]
1460 1459 hasset = {}
1461 1460 # If a 'missing' filenode thinks it belongs to a changenode we
1462 1461 # assume the recipient must have, then the recipient must have
1463 1462 # that filenode.
1464 1463 for n in msngset:
1465 1464 clnode = cl.node(filerevlog.linkrev(n))
1466 1465 if clnode in has_cl_set:
1467 1466 hasset[n] = 1
1468 1467 prune_parents(filerevlog, hasset, msngset)
1469 1468
1470 1469 # A function generator function that sets up the a context for the
1471 1470 # inner function.
1472 1471 def lookup_filenode_link_func(fname):
1473 1472 msngset = msng_filenode_set[fname]
1474 1473 # Lookup the changenode the filenode belongs to.
1475 1474 def lookup_filenode_link(fnode):
1476 1475 return msngset[fnode]
1477 1476 return lookup_filenode_link
1478 1477
1479 1478 # Now that we have all theses utility functions to help out and
1480 1479 # logically divide up the task, generate the group.
1481 1480 def gengroup():
1482 1481 # The set of changed files starts empty.
1483 1482 changedfiles = {}
1484 1483 # Create a changenode group generator that will call our functions
1485 1484 # back to lookup the owning changenode and collect information.
1486 1485 group = cl.group(msng_cl_lst, identity,
1487 1486 manifest_and_file_collector(changedfiles))
1488 1487 for chnk in group:
1489 1488 yield chnk
1490 1489
1491 1490 # The list of manifests has been collected by the generator
1492 1491 # calling our functions back.
1493 1492 prune_manifests()
1494 1493 msng_mnfst_lst = msng_mnfst_set.keys()
1495 1494 # Sort the manifestnodes by revision number.
1496 1495 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1497 1496 # Create a generator for the manifestnodes that calls our lookup
1498 1497 # and data collection functions back.
1499 1498 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1500 1499 filenode_collector(changedfiles))
1501 1500 for chnk in group:
1502 1501 yield chnk
1503 1502
1504 1503 # These are no longer needed, dereference and toss the memory for
1505 1504 # them.
1506 1505 msng_mnfst_lst = None
1507 1506 msng_mnfst_set.clear()
1508 1507
1509 1508 changedfiles = changedfiles.keys()
1510 1509 changedfiles.sort()
1511 1510 # Go through all our files in order sorted by name.
1512 1511 for fname in changedfiles:
1513 1512 filerevlog = self.file(fname)
1514 1513 # Toss out the filenodes that the recipient isn't really
1515 1514 # missing.
1516 1515 if msng_filenode_set.has_key(fname):
1517 1516 prune_filenodes(fname, filerevlog)
1518 1517 msng_filenode_lst = msng_filenode_set[fname].keys()
1519 1518 else:
1520 1519 msng_filenode_lst = []
1521 1520 # If any filenodes are left, generate the group for them,
1522 1521 # otherwise don't bother.
1523 1522 if len(msng_filenode_lst) > 0:
1524 1523 yield changegroup.genchunk(fname)
1525 1524 # Sort the filenodes by their revision #
1526 1525 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1527 1526 # Create a group generator and only pass in a changenode
1528 1527 # lookup function as we need to collect no information
1529 1528 # from filenodes.
1530 1529 group = filerevlog.group(msng_filenode_lst,
1531 1530 lookup_filenode_link_func(fname))
1532 1531 for chnk in group:
1533 1532 yield chnk
1534 1533 if msng_filenode_set.has_key(fname):
1535 1534 # Don't need this anymore, toss it to free memory.
1536 1535 del msng_filenode_set[fname]
1537 1536 # Signal that no more groups are left.
1538 1537 yield changegroup.closechunk()
1539 1538
1540 1539 if msng_cl_lst:
1541 1540 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1542 1541
1543 1542 return util.chunkbuffer(gengroup())
1544 1543
1545 1544 def changegroup(self, basenodes, source):
1546 1545 """Generate a changegroup of all nodes that we have that a recipient
1547 1546 doesn't.
1548 1547
1549 1548 This is much easier than the previous function as we can assume that
1550 1549 the recipient has any changenode we aren't sending them."""
1551 1550
1552 1551 self.hook('preoutgoing', throw=True, source=source)
1553 1552
1554 1553 cl = self.changelog
1555 1554 nodes = cl.nodesbetween(basenodes, None)[0]
1556 1555 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1557 1556
1558 1557 def identity(x):
1559 1558 return x
1560 1559
1561 1560 def gennodelst(revlog):
1562 1561 for r in xrange(0, revlog.count()):
1563 1562 n = revlog.node(r)
1564 1563 if revlog.linkrev(n) in revset:
1565 1564 yield n
1566 1565
1567 1566 def changed_file_collector(changedfileset):
1568 1567 def collect_changed_files(clnode):
1569 1568 c = cl.read(clnode)
1570 1569 for fname in c[3]:
1571 1570 changedfileset[fname] = 1
1572 1571 return collect_changed_files
1573 1572
1574 1573 def lookuprevlink_func(revlog):
1575 1574 def lookuprevlink(n):
1576 1575 return cl.node(revlog.linkrev(n))
1577 1576 return lookuprevlink
1578 1577
1579 1578 def gengroup():
1580 1579 # construct a list of all changed files
1581 1580 changedfiles = {}
1582 1581
1583 1582 for chnk in cl.group(nodes, identity,
1584 1583 changed_file_collector(changedfiles)):
1585 1584 yield chnk
1586 1585 changedfiles = changedfiles.keys()
1587 1586 changedfiles.sort()
1588 1587
1589 1588 mnfst = self.manifest
1590 1589 nodeiter = gennodelst(mnfst)
1591 1590 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1592 1591 yield chnk
1593 1592
1594 1593 for fname in changedfiles:
1595 1594 filerevlog = self.file(fname)
1596 1595 nodeiter = gennodelst(filerevlog)
1597 1596 nodeiter = list(nodeiter)
1598 1597 if nodeiter:
1599 1598 yield changegroup.genchunk(fname)
1600 1599 lookup = lookuprevlink_func(filerevlog)
1601 1600 for chnk in filerevlog.group(nodeiter, lookup):
1602 1601 yield chnk
1603 1602
1604 1603 yield changegroup.closechunk()
1605 1604
1606 1605 if nodes:
1607 1606 self.hook('outgoing', node=hex(nodes[0]), source=source)
1608 1607
1609 1608 return util.chunkbuffer(gengroup())
1610 1609
1611 1610 def addchangegroup(self, source, srctype, url):
1612 1611 """add changegroup to repo.
1613 1612 returns number of heads modified or added + 1."""
1614 1613
1615 1614 def csmap(x):
1616 1615 self.ui.debug(_("add changeset %s\n") % short(x))
1617 1616 return cl.count()
1618 1617
1619 1618 def revmap(x):
1620 1619 return cl.rev(x)
1621 1620
1622 1621 if not source:
1623 1622 return 0
1624 1623
1625 1624 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1626 1625
1627 1626 changesets = files = revisions = 0
1628 1627
1629 1628 tr = self.transaction()
1630 1629
1631 1630 # write changelog data to temp files so concurrent readers will not see
1632 1631 # inconsistent view
1633 1632 cl = None
1634 1633 try:
1635 1634 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1636 1635
1637 1636 oldheads = len(cl.heads())
1638 1637
1639 1638 # pull off the changeset group
1640 1639 self.ui.status(_("adding changesets\n"))
1641 1640 cor = cl.count() - 1
1642 1641 chunkiter = changegroup.chunkiter(source)
1643 1642 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1644 1643 raise util.Abort(_("received changelog group is empty"))
1645 1644 cnr = cl.count() - 1
1646 1645 changesets = cnr - cor
1647 1646
1648 1647 # pull off the manifest group
1649 1648 self.ui.status(_("adding manifests\n"))
1650 1649 chunkiter = changegroup.chunkiter(source)
1651 1650 # no need to check for empty manifest group here:
1652 1651 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1653 1652 # no new manifest will be created and the manifest group will
1654 1653 # be empty during the pull
1655 1654 self.manifest.addgroup(chunkiter, revmap, tr)
1656 1655
1657 1656 # process the files
1658 1657 self.ui.status(_("adding file changes\n"))
1659 1658 while 1:
1660 1659 f = changegroup.getchunk(source)
1661 1660 if not f:
1662 1661 break
1663 1662 self.ui.debug(_("adding %s revisions\n") % f)
1664 1663 fl = self.file(f)
1665 1664 o = fl.count()
1666 1665 chunkiter = changegroup.chunkiter(source)
1667 1666 if fl.addgroup(chunkiter, revmap, tr) is None:
1668 1667 raise util.Abort(_("received file revlog group is empty"))
1669 1668 revisions += fl.count() - o
1670 1669 files += 1
1671 1670
1672 1671 cl.writedata()
1673 1672 finally:
1674 1673 if cl:
1675 1674 cl.cleanup()
1676 1675
1677 1676 # make changelog see real files again
1678 1677 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1679 1678 self.changelog.checkinlinesize(tr)
1680 1679
1681 1680 newheads = len(self.changelog.heads())
1682 1681 heads = ""
1683 1682 if oldheads and newheads != oldheads:
1684 1683 heads = _(" (%+d heads)") % (newheads - oldheads)
1685 1684
1686 1685 self.ui.status(_("added %d changesets"
1687 1686 " with %d changes to %d files%s\n")
1688 1687 % (changesets, revisions, files, heads))
1689 1688
1690 1689 if changesets > 0:
1691 1690 self.hook('pretxnchangegroup', throw=True,
1692 1691 node=hex(self.changelog.node(cor+1)), source=srctype,
1693 1692 url=url)
1694 1693
1695 1694 tr.close()
1696 1695
1697 1696 if changesets > 0:
1698 1697 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1699 1698 source=srctype, url=url)
1700 1699
1701 1700 for i in range(cor + 1, cnr + 1):
1702 1701 self.hook("incoming", node=hex(self.changelog.node(i)),
1703 1702 source=srctype, url=url)
1704 1703
1705 1704 return newheads - oldheads + 1
1706 1705
1707 1706
1708 1707 def stream_in(self, remote):
1709 1708 fp = remote.stream_out()
1710 1709 resp = int(fp.readline())
1711 1710 if resp != 0:
1712 1711 raise util.Abort(_('operation forbidden by server'))
1713 1712 self.ui.status(_('streaming all changes\n'))
1714 1713 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1715 1714 self.ui.status(_('%d files to transfer, %s of data\n') %
1716 1715 (total_files, util.bytecount(total_bytes)))
1717 1716 start = time.time()
1718 1717 for i in xrange(total_files):
1719 1718 name, size = fp.readline().split('\0', 1)
1720 1719 size = int(size)
1721 1720 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1722 1721 ofp = self.opener(name, 'w')
1723 1722 for chunk in util.filechunkiter(fp, limit=size):
1724 1723 ofp.write(chunk)
1725 1724 ofp.close()
1726 1725 elapsed = time.time() - start
1727 1726 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1728 1727 (util.bytecount(total_bytes), elapsed,
1729 1728 util.bytecount(total_bytes / elapsed)))
1730 1729 self.reload()
1731 1730 return len(self.heads()) + 1
1732 1731
1733 1732 def clone(self, remote, heads=[], stream=False):
1734 1733 '''clone remote repository.
1735 1734
1736 1735 keyword arguments:
1737 1736 heads: list of revs to clone (forces use of pull)
1738 1737 stream: use streaming clone if possible'''
1739 1738
1740 1739 # now, all clients that can request uncompressed clones can
1741 1740 # read repo formats supported by all servers that can serve
1742 1741 # them.
1743 1742
1744 1743 # if revlog format changes, client will have to check version
1745 1744 # and format flags on "stream" capability, and use
1746 1745 # uncompressed only if compatible.
1747 1746
1748 1747 if stream and not heads and remote.capable('stream'):
1749 1748 return self.stream_in(remote)
1750 1749 return self.pull(remote, heads)
1751 1750
1752 1751 # used to avoid circular references so destructors work
1753 1752 def aftertrans(base):
1754 1753 p = base
1755 1754 def a():
1756 1755 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1757 1756 util.rename(os.path.join(p, "journal.dirstate"),
1758 1757 os.path.join(p, "undo.dirstate"))
1759 1758 return a
1760 1759
1761 1760 def instance(ui, path, create):
1762 1761 return localrepository(ui, util.drop_scheme('file', path), create)
1763 1762
1764 1763 def islocal(path):
1765 1764 return True
@@ -1,188 +1,214 b''
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "array bisect struct")
12 12
13 class manifestdict(dict):
14 def __init__(self, mapping={}):
15 dict.__init__(self, mapping)
16 def __getitem__(self, f):
17 return self.node(f)
18 def get(self, f, default=None):
19 try:
20 return dict.__getitem__(self, f)[:20]
21 except KeyError:
22 return default
23 def __setitem__(self, f, node):
24 fl = self.flags(f)
25 dict.__setitem__(self, f, node + fl)
26 def node(self, f):
27 return dict.__getitem__(self, f)[:20]
28 def flags(self, f):
29 return dict.get(self, f, "")[20:]
30 def execf(self, f):
31 "test for executable in manifest flags"
32 return "x" in self.flags(f)
33 def linkf(self, f):
34 "test for symlink in manifest flags"
35 return "l" in self.flags(f)
36 def rawset(self, f, node, flags):
37 dict.__setitem__(self, f, node + flags)
38 def set(self, f, execf=False, linkf=False):
39 n = dict.get(self, f, nullid)[:20]
40 fl = ""
41 if execf: fl = "x"
42 if linkf: fl = "l"
43 dict.__setitem__(self, f, n + fl)
44 def copy(self):
45 return manifestdict(dict.copy(self))
46
13 47 class manifest(revlog):
14 48 def __init__(self, opener, defversion=REVLOGV0):
15 49 self.mapcache = None
16 50 self.listcache = None
17 51 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
18 52 defversion)
19 53
20 54 def read(self, node):
21 if node == nullid: return {} # don't upset local cache
55 if node == nullid: return manifestdict() # don't upset local cache
22 56 if self.mapcache and self.mapcache[0] == node:
23 57 return self.mapcache[1]
24 58 text = self.revision(node)
25 map = {}
26 flag = {}
27 59 self.listcache = array.array('c', text)
28 60 lines = text.splitlines(1)
61 mapping = manifestdict()
29 62 for l in lines:
30 63 (f, n) = l.split('\0')
31 map[f] = bin(n[:40])
32 flag[f] = (n[40:-1] == "x")
33 self.mapcache = (node, map, flag)
34 return map
64 mapping.rawset(f, bin(n[:40]), n[40:-1])
65 self.mapcache = (node, mapping)
66 return mapping
35 67
36 68 def readflags(self, node):
37 if node == nullid: return {} # don't upset local cache
38 if not self.mapcache or self.mapcache[0] != node:
39 self.read(node)
40 return self.mapcache[2]
69 return self.read(node)
41 70
42 71 def diff(self, a, b):
43 72 return mdiff.textdiff(str(a), str(b))
44 73
45 74 def _search(self, m, s, lo=0, hi=None):
46 75 '''return a tuple (start, end) that says where to find s within m.
47 76
48 77 If the string is found m[start:end] are the line containing
49 78 that string. If start == end the string was not found and
50 79 they indicate the proper sorted insertion point. This was
51 80 taken from bisect_left, and modified to find line start/end as
52 81 it goes along.
53 82
54 83 m should be a buffer or a string
55 84 s is a string'''
56 85 def advance(i, c):
57 86 while i < lenm and m[i] != c:
58 87 i += 1
59 88 return i
60 89 lenm = len(m)
61 90 if not hi:
62 91 hi = lenm
63 92 while lo < hi:
64 93 mid = (lo + hi) // 2
65 94 start = mid
66 95 while start > 0 and m[start-1] != '\n':
67 96 start -= 1
68 97 end = advance(start, '\0')
69 98 if m[start:end] < s:
70 99 # we know that after the null there are 40 bytes of sha1
71 100 # this translates to the bisect lo = mid + 1
72 101 lo = advance(end + 40, '\n') + 1
73 102 else:
74 103 # this translates to the bisect hi = mid
75 104 hi = start
76 105 end = advance(lo, '\0')
77 106 found = m[lo:end]
78 107 if cmp(s, found) == 0:
79 108 # we know that after the null there are 40 bytes of sha1
80 109 end = advance(end + 40, '\n')
81 110 return (lo, end+1)
82 111 else:
83 112 return (lo, lo)
84 113
85 114 def find(self, node, f):
86 115 '''look up entry for a single file efficiently.
87 116 return (node, flag) pair if found, (None, None) if not.'''
88 117 if self.mapcache and node == self.mapcache[0]:
89 return self.mapcache[1].get(f), self.mapcache[2].get(f)
118 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
90 119 text = self.revision(node)
91 120 start, end = self._search(text, f)
92 121 if start == end:
93 122 return None, None
94 123 l = text[start:end]
95 124 f, n = l.split('\0')
96 125 return bin(n[:40]), n[40:-1] == 'x'
97 126
98 127 def add(self, map, flags, transaction, link, p1=None, p2=None,
99 128 changed=None):
100 129 # apply the changes collected during the bisect loop to our addlist
101 130 # return a delta suitable for addrevision
102 131 def addlistdelta(addlist, x):
103 132 # start from the bottom up
104 133 # so changes to the offsets don't mess things up.
105 134 i = len(x)
106 135 while i > 0:
107 136 i -= 1
108 137 start = x[i][0]
109 138 end = x[i][1]
110 139 if x[i][2]:
111 140 addlist[start:end] = array.array('c', x[i][2])
112 141 else:
113 142 del addlist[start:end]
114 143 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
115 144 for d in x ])
116 145
117 146 # if we're using the listcache, make sure it is valid and
118 147 # parented by the same node we're diffing against
119 148 if not changed or not self.listcache or not p1 or \
120 149 self.mapcache[0] != p1:
121 150 files = map.keys()
122 151 files.sort()
123 152
124 153 # if this is changed to support newlines in filenames,
125 154 # be sure to check the templates/ dir again (especially *-raw.tmpl)
126 text = ["%s\000%s%s\n" %
127 (f, hex(map[f]), flags[f] and "x" or '')
128 for f in files]
155 text = ["%s\000%s%s\n" % (f, hex(map[f]), flags.flags(f)) for f in files]
129 156 self.listcache = array.array('c', "".join(text))
130 157 cachedelta = None
131 158 else:
132 159 addlist = self.listcache
133 160
134 161 # combine the changed lists into one list for sorting
135 162 work = [[x, 0] for x in changed[0]]
136 163 work[len(work):] = [[x, 1] for x in changed[1]]
137 164 work.sort()
138 165
139 166 delta = []
140 167 dstart = None
141 168 dend = None
142 169 dline = [""]
143 170 start = 0
144 171 # zero copy representation of addlist as a buffer
145 172 addbuf = buffer(addlist)
146 173
147 174 # start with a readonly loop that finds the offset of
148 175 # each line and creates the deltas
149 176 for w in work:
150 177 f = w[0]
151 178 # bs will either be the index of the item or the insert point
152 179 start, end = self._search(addbuf, f, start)
153 180 if w[1] == 0:
154 l = "%s\000%s%s\n" % (f, hex(map[f]),
155 flags[f] and "x" or '')
181 l = "%s\000%s%s\n" % (f, hex(map[f]), flags.flags(f))
156 182 else:
157 183 l = ""
158 184 if start == end and w[1] == 1:
159 185 # item we want to delete was not found, error out
160 186 raise AssertionError(
161 187 _("failed to remove %s from manifest\n") % f)
162 188 if dstart != None and dstart <= start and dend >= start:
163 189 if dend < end:
164 190 dend = end
165 191 if l:
166 192 dline.append(l)
167 193 else:
168 194 if dstart != None:
169 195 delta.append([dstart, dend, "".join(dline)])
170 196 dstart = start
171 197 dend = end
172 198 dline = [l]
173 199
174 200 if dstart != None:
175 201 delta.append([dstart, dend, "".join(dline)])
176 202 # apply the delta to the addlist, and get a delta for addrevision
177 203 cachedelta = addlistdelta(addlist, delta)
178 204
179 205 # the delta is only valid if we've been processing the tip revision
180 206 if self.mapcache[0] != self.tip():
181 207 cachedelta = None
182 208 self.listcache = addlist
183 209
184 210 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
185 211 p2, cachedelta)
186 self.mapcache = (n, map, flags)
212 self.mapcache = (n, map)
187 213
188 214 return n
@@ -1,337 +1,337 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "util os tempfile")
12 12
13 13 def merge3(repo, fn, my, other, p1, p2):
14 14 """perform a 3-way merge in the working directory"""
15 15
16 16 def temp(prefix, node):
17 17 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 18 (fd, name) = tempfile.mkstemp(prefix=pre)
19 19 f = os.fdopen(fd, "wb")
20 20 repo.wwrite(fn, fl.read(node), f)
21 21 f.close()
22 22 return name
23 23
24 24 fl = repo.file(fn)
25 25 base = fl.ancestor(my, other)
26 26 a = repo.wjoin(fn)
27 27 b = temp("base", base)
28 28 c = temp("other", other)
29 29
30 30 repo.ui.note(_("resolving %s\n") % fn)
31 31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 32 (fn, short(my), short(other), short(base)))
33 33
34 34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 35 or "hgmerge")
36 36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 37 environ={'HG_FILE': fn,
38 38 'HG_MY_NODE': p1,
39 39 'HG_OTHER_NODE': p2,
40 40 'HG_FILE_MY_NODE': hex(my),
41 41 'HG_FILE_OTHER_NODE': hex(other),
42 42 'HG_FILE_BASE_NODE': hex(base)})
43 43 if r:
44 44 repo.ui.warn(_("merging %s failed!\n") % fn)
45 45
46 46 os.unlink(b)
47 47 os.unlink(c)
48 48 return r
49 49
50 50 def update(repo, node, branchmerge=False, force=False, partial=None,
51 51 wlock=None, show_stats=True, remind=True):
52 52
53 53 overwrite = force and not branchmerge
54 54 forcemerge = force and branchmerge
55 55
56 56 if not wlock:
57 57 wlock = repo.wlock()
58 58
59 59 ### check phase
60 60
61 61 pl = repo.dirstate.parents()
62 62 if not overwrite and pl[1] != nullid:
63 63 raise util.Abort(_("outstanding uncommitted merges"))
64 64
65 65 p1, p2 = pl[0], node
66 66 pa = repo.changelog.ancestor(p1, p2)
67 67
68 68 # is there a linear path from p1 to p2?
69 69 linear_path = (pa == p1 or pa == p2)
70 70 if branchmerge and linear_path:
71 71 raise util.Abort(_("there is nothing to merge, just use "
72 72 "'hg update' or look at 'hg heads'"))
73 73
74 74 if not overwrite and not linear_path and not branchmerge:
75 75 raise util.Abort(_("update spans branches, use 'hg merge' "
76 76 "or 'hg update -C' to lose changes"))
77 77
78 78 modified, added, removed, deleted, unknown = repo.changes()
79 79 if branchmerge and not forcemerge:
80 80 if modified or added or removed:
81 81 raise util.Abort(_("outstanding uncommitted changes"))
82 82
83 83 m1n = repo.changelog.read(p1)[0]
84 84 m2n = repo.changelog.read(p2)[0]
85 85 man = repo.manifest.ancestor(m1n, m2n)
86 86 m1 = repo.manifest.read(m1n)
87 87 mf1 = repo.manifest.readflags(m1n)
88 88 m2 = repo.manifest.read(m2n).copy()
89 89 mf2 = repo.manifest.readflags(m2n)
90 90 ma = repo.manifest.read(man)
91 91 mfa = repo.manifest.readflags(man)
92 92
93 93 if not forcemerge and not overwrite:
94 94 for f in unknown:
95 95 if f in m2:
96 96 t1 = repo.wread(f)
97 97 t2 = repo.file(f).read(m2[f])
98 98 if cmp(t1, t2) != 0:
99 99 raise util.Abort(_("'%s' already exists in the working"
100 100 " dir and differs from remote") % f)
101 101
102 102 # resolve the manifest to determine which files
103 103 # we care about merging
104 104 repo.ui.note(_("resolving manifests\n"))
105 105 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
106 106 (overwrite, branchmerge, partial and True or False, linear_path))
107 107 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
108 108 (short(man), short(m1n), short(m2n)))
109 109
110 110 merge = {}
111 111 get = {}
112 112 remove = []
113 113
114 114 # construct a working dir manifest
115 115 mw = m1.copy()
116 116 mfw = mf1.copy()
117 117 umap = dict.fromkeys(unknown)
118 118
119 119 for f in added + modified + unknown:
120 120 mw[f] = ""
121 mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False))
121 mfw.set(f, util.is_exec(repo.wjoin(f), mfw.execf(f)))
122 122
123 123 for f in deleted + removed:
124 124 if f in mw:
125 125 del mw[f]
126 126
127 127 # If we're jumping between revisions (as opposed to merging),
128 128 # and if neither the working directory nor the target rev has
129 129 # the file, then we need to remove it from the dirstate, to
130 130 # prevent the dirstate from listing the file when it is no
131 131 # longer in the manifest.
132 132 if not partial and linear_path and f not in m2:
133 133 repo.dirstate.forget((f,))
134 134
135 135 # Compare manifests
136 136 for f, n in mw.iteritems():
137 137 if partial and not partial(f):
138 138 continue
139 139 if f in m2:
140 140 s = 0
141 141
142 142 # is the wfile new since m1, and match m2?
143 143 if f not in m1:
144 144 t1 = repo.wread(f)
145 145 t2 = repo.file(f).read(m2[f])
146 146 if cmp(t1, t2) == 0:
147 147 n = m2[f]
148 148 del t1, t2
149 149
150 150 # are files different?
151 151 if n != m2[f]:
152 152 a = ma.get(f, nullid)
153 153 # are both different from the ancestor?
154 154 if n != a and m2[f] != a:
155 155 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
156 156 # merge executable bits
157 157 # "if we changed or they changed, change in merge"
158 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
158 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
159 159 mode = ((a^b) | (a^c)) ^ a
160 160 merge[f] = (m1.get(f, nullid), m2[f], mode)
161 161 s = 1
162 162 # are we clobbering?
163 163 # is remote's version newer?
164 164 # or are we going back in time?
165 165 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
166 166 repo.ui.debug(_(" remote %s is newer, get\n") % f)
167 167 get[f] = m2[f]
168 168 s = 1
169 169 elif f in umap or f in added:
170 170 # this unknown file is the same as the checkout
171 171 # we need to reset the dirstate if the file was added
172 172 get[f] = m2[f]
173 173
174 if not s and mfw[f] != mf2[f]:
174 if not s and mfw.execf(f) != mf2.execf(f):
175 175 if overwrite:
176 176 repo.ui.debug(_(" updating permissions for %s\n") % f)
177 util.set_exec(repo.wjoin(f), mf2[f])
177 util.set_exec(repo.wjoin(f), mf2.execf(f))
178 178 else:
179 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
179 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
180 180 mode = ((a^b) | (a^c)) ^ a
181 181 if mode != b:
182 182 repo.ui.debug(_(" updating permissions for %s\n")
183 183 % f)
184 184 util.set_exec(repo.wjoin(f), mode)
185 185 del m2[f]
186 186 elif f in ma:
187 187 if n != ma[f]:
188 188 r = _("d")
189 189 if not overwrite and (linear_path or branchmerge):
190 190 r = repo.ui.prompt(
191 191 (_(" local changed %s which remote deleted\n") % f) +
192 192 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
193 193 if r == _("d"):
194 194 remove.append(f)
195 195 else:
196 196 repo.ui.debug(_("other deleted %s\n") % f)
197 197 remove.append(f) # other deleted it
198 198 else:
199 199 # file is created on branch or in working directory
200 200 if overwrite and f not in umap:
201 201 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
202 202 remove.append(f)
203 203 elif n == m1.get(f, nullid): # same as parent
204 204 if p2 == pa: # going backwards?
205 205 repo.ui.debug(_("remote deleted %s\n") % f)
206 206 remove.append(f)
207 207 else:
208 208 repo.ui.debug(_("local modified %s, keeping\n") % f)
209 209 else:
210 210 repo.ui.debug(_("working dir created %s, keeping\n") % f)
211 211
212 212 for f, n in m2.iteritems():
213 213 if partial and not partial(f):
214 214 continue
215 215 if f[0] == "/":
216 216 continue
217 217 if f in ma and n != ma[f]:
218 218 r = _("k")
219 219 if not overwrite and (linear_path or branchmerge):
220 220 r = repo.ui.prompt(
221 221 (_("remote changed %s which local deleted\n") % f) +
222 222 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
223 223 if r == _("k"):
224 224 get[f] = n
225 225 elif f not in ma:
226 226 repo.ui.debug(_("remote created %s\n") % f)
227 227 get[f] = n
228 228 else:
229 229 if overwrite or p2 == pa: # going backwards?
230 230 repo.ui.debug(_("local deleted %s, recreating\n") % f)
231 231 get[f] = n
232 232 else:
233 233 repo.ui.debug(_("local deleted %s\n") % f)
234 234
235 235 del mw, m1, m2, ma
236 236
237 237 if overwrite:
238 238 for f in merge:
239 239 get[f] = merge[f][1]
240 240 merge = {}
241 241
242 242 if linear_path or overwrite:
243 243 # we don't need to do any magic, just jump to the new rev
244 244 p1, p2 = p2, nullid
245 245
246 246 xp1 = hex(p1)
247 247 xp2 = hex(p2)
248 248 if p2 == nullid: xxp2 = ''
249 249 else: xxp2 = xp2
250 250
251 251 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
252 252
253 253 # get the files we don't need to change
254 254 files = get.keys()
255 255 files.sort()
256 256 for f in files:
257 257 if f[0] == "/":
258 258 continue
259 259 repo.ui.note(_("getting %s\n") % f)
260 260 t = repo.file(f).read(get[f])
261 261 repo.wwrite(f, t)
262 util.set_exec(repo.wjoin(f), mf2[f])
262 util.set_exec(repo.wjoin(f), mf2.execf(f))
263 263 if not partial:
264 264 if branchmerge:
265 265 repo.dirstate.update([f], 'n', st_mtime=-1)
266 266 else:
267 267 repo.dirstate.update([f], 'n')
268 268
269 269 # merge the tricky bits
270 270 unresolved = []
271 271 files = merge.keys()
272 272 files.sort()
273 273 for f in files:
274 274 repo.ui.status(_("merging %s\n") % f)
275 275 my, other, flag = merge[f]
276 276 ret = merge3(repo, f, my, other, xp1, xp2)
277 277 if ret:
278 278 unresolved.append(f)
279 279 util.set_exec(repo.wjoin(f), flag)
280 280 if not partial:
281 281 if branchmerge:
282 282 # We've done a branch merge, mark this file as merged
283 283 # so that we properly record the merger later
284 284 repo.dirstate.update([f], 'm')
285 285 else:
286 286 # We've update-merged a locally modified file, so
287 287 # we set the dirstate to emulate a normal checkout
288 288 # of that file some time in the past. Thus our
289 289 # merge will appear as a normal local file
290 290 # modification.
291 291 f_len = len(repo.file(f).read(other))
292 292 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
293 293
294 294 remove.sort()
295 295 for f in remove:
296 296 repo.ui.note(_("removing %s\n") % f)
297 297 util.audit_path(f)
298 298 try:
299 299 util.unlink(repo.wjoin(f))
300 300 except OSError, inst:
301 301 if inst.errno != errno.ENOENT:
302 302 repo.ui.warn(_("update failed to remove %s: %s!\n") %
303 303 (f, inst.strerror))
304 304 if not partial:
305 305 if branchmerge:
306 306 repo.dirstate.update(remove, 'r')
307 307 else:
308 308 repo.dirstate.forget(remove)
309 309
310 310 if not partial:
311 311 repo.dirstate.setparents(p1, p2)
312 312
313 313 if show_stats:
314 314 stats = ((len(get), _("updated")),
315 315 (len(merge) - len(unresolved), _("merged")),
316 316 (len(remove), _("removed")),
317 317 (len(unresolved), _("unresolved")))
318 318 note = ", ".join([_("%d files %s") % s for s in stats])
319 319 repo.ui.status("%s\n" % note)
320 320 if not partial:
321 321 if branchmerge:
322 322 if unresolved:
323 323 repo.ui.status(_("There are unresolved merges,"
324 324 " you can redo the full merge using:\n"
325 325 " hg update -C %s\n"
326 326 " hg merge %s\n"
327 327 % (repo.changelog.rev(p1),
328 328 repo.changelog.rev(p2))))
329 329 elif remind:
330 330 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
331 331 elif unresolved:
332 332 repo.ui.status(_("There are unresolved merges with"
333 333 " locally modified files.\n"))
334 334
335 335 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
336 336 return len(unresolved)
337 337
General Comments 0
You need to be logged in to leave comments. Login now