##// END OF EJS Templates
localrepo: add branchtip() method for faster single-branch lookups...
Brodie Rao -
r16719:e7bf09ac default
parent child Browse files
Show More
@@ -1,153 +1,156 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''pull, update and merge in one command (DEPRECATED)'''
9 9
10 10 from mercurial.i18n import _
11 11 from mercurial.node import nullid, short
12 12 from mercurial import commands, cmdutil, hg, util, error
13 13 from mercurial.lock import release
14 14
15 15 def fetch(ui, repo, source='default', **opts):
16 16 '''pull changes from a remote repository, merge new changes if needed.
17 17
18 18 This finds all changes from the repository at the specified path
19 19 or URL and adds them to the local repository.
20 20
21 21 If the pulled changes add a new branch head, the head is
22 22 automatically merged, and the result of the merge is committed.
23 23 Otherwise, the working directory is updated to include the new
24 24 changes.
25 25
26 26 When a merge is needed, the working directory is first updated to
27 27 the newly pulled changes. Local changes are then merged into the
28 28 pulled changes. To switch the merge order, use --switch-parent.
29 29
30 30 See :hg:`help dates` for a list of formats valid for -d/--date.
31 31
32 32 Returns 0 on success.
33 33 '''
34 34
35 35 date = opts.get('date')
36 36 if date:
37 37 opts['date'] = util.parsedate(date)
38 38
39 39 parent, p2 = repo.dirstate.parents()
40 40 branch = repo.dirstate.branch()
41 branchnode = repo.branchtags().get(branch)
41 try:
42 branchnode = repo.branchtip(branch)
43 except error.RepoLookupError:
44 branchnode = None
42 45 if parent != branchnode:
43 46 raise util.Abort(_('working dir not at branch tip '
44 47 '(use "hg update" to check out branch tip)'))
45 48
46 49 if p2 != nullid:
47 50 raise util.Abort(_('outstanding uncommitted merge'))
48 51
49 52 wlock = lock = None
50 53 try:
51 54 wlock = repo.wlock()
52 55 lock = repo.lock()
53 56 mod, add, rem, del_ = repo.status()[:4]
54 57
55 58 if mod or add or rem:
56 59 raise util.Abort(_('outstanding uncommitted changes'))
57 60 if del_:
58 61 raise util.Abort(_('working directory is missing some files'))
59 62 bheads = repo.branchheads(branch)
60 63 bheads = [head for head in bheads if len(repo[head].children()) == 0]
61 64 if len(bheads) > 1:
62 65 raise util.Abort(_('multiple heads in this branch '
63 66 '(use "hg heads ." and "hg merge" to merge)'))
64 67
65 68 other = hg.peer(repo, opts, ui.expandpath(source))
66 69 ui.status(_('pulling from %s\n') %
67 70 util.hidepassword(ui.expandpath(source)))
68 71 revs = None
69 72 if opts['rev']:
70 73 try:
71 74 revs = [other.lookup(rev) for rev in opts['rev']]
72 75 except error.CapabilityError:
73 76 err = _("Other repository doesn't support revision lookup, "
74 77 "so a rev cannot be specified.")
75 78 raise util.Abort(err)
76 79
77 80 # Are there any changes at all?
78 81 modheads = repo.pull(other, heads=revs)
79 82 if modheads == 0:
80 83 return 0
81 84
82 85 # Is this a simple fast-forward along the current branch?
83 86 newheads = repo.branchheads(branch)
84 87 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
85 88 if len(newheads) == 1 and len(newchildren):
86 89 if newchildren[0] != parent:
87 90 return hg.update(repo, newchildren[0])
88 91 else:
89 92 return 0
90 93
91 94 # Are there more than one additional branch heads?
92 95 newchildren = [n for n in newchildren if n != parent]
93 96 newparent = parent
94 97 if newchildren:
95 98 newparent = newchildren[0]
96 99 hg.clean(repo, newparent)
97 100 newheads = [n for n in newheads if n != newparent]
98 101 if len(newheads) > 1:
99 102 ui.status(_('not merging with %d other new branch heads '
100 103 '(use "hg heads ." and "hg merge" to merge them)\n') %
101 104 (len(newheads) - 1))
102 105 return 1
103 106
104 107 if not newheads:
105 108 return 0
106 109
107 110 # Otherwise, let's merge.
108 111 err = False
109 112 if newheads:
110 113 # By default, we consider the repository we're pulling
111 114 # *from* as authoritative, so we merge our changes into
112 115 # theirs.
113 116 if opts['switch_parent']:
114 117 firstparent, secondparent = newparent, newheads[0]
115 118 else:
116 119 firstparent, secondparent = newheads[0], newparent
117 120 ui.status(_('updating to %d:%s\n') %
118 121 (repo.changelog.rev(firstparent),
119 122 short(firstparent)))
120 123 hg.clean(repo, firstparent)
121 124 ui.status(_('merging with %d:%s\n') %
122 125 (repo.changelog.rev(secondparent), short(secondparent)))
123 126 err = hg.merge(repo, secondparent, remind=False)
124 127
125 128 if not err:
126 129 # we don't translate commit messages
127 130 message = (cmdutil.logmessage(ui, opts) or
128 131 ('Automated merge with %s' %
129 132 util.removeauth(other.url())))
130 133 editor = cmdutil.commiteditor
131 134 if opts.get('force_editor') or opts.get('edit'):
132 135 editor = cmdutil.commitforceeditor
133 136 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
134 137 ui.status(_('new changeset %d:%s merges remote changes '
135 138 'with local\n') % (repo.changelog.rev(n),
136 139 short(n)))
137 140
138 141 return err
139 142
140 143 finally:
141 144 release(lock, wlock)
142 145
143 146 cmdtable = {
144 147 'fetch':
145 148 (fetch,
146 149 [('r', 'rev', [],
147 150 _('a specific revision you would like to pull'), _('REV')),
148 151 ('e', 'edit', None, _('edit commit message')),
149 152 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
150 153 ('', 'switch-parent', None, _('switch parents when merging')),
151 154 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
152 155 _('hg fetch [SOURCE]')),
153 156 }
@@ -1,254 +1,254 b''
1 1 # Mercurial bookmark support code
2 2 #
3 3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from mercurial.i18n import _
9 9 from mercurial.node import hex
10 from mercurial import encoding, util
10 from mercurial import encoding, error, util
11 11 import errno, os
12 12
13 13 def valid(mark):
14 14 for c in (':', '\0', '\n', '\r'):
15 15 if c in mark:
16 16 return False
17 17 return True
18 18
19 19 def read(repo):
20 20 '''Parse .hg/bookmarks file and return a dictionary
21 21
22 22 Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
23 23 in the .hg/bookmarks file.
24 24 Read the file and return a (name=>nodeid) dictionary
25 25 '''
26 26 bookmarks = {}
27 27 try:
28 28 for line in repo.opener('bookmarks'):
29 29 line = line.strip()
30 30 if not line:
31 31 continue
32 32 if ' ' not in line:
33 33 repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n') % line)
34 34 continue
35 35 sha, refspec = line.split(' ', 1)
36 36 refspec = encoding.tolocal(refspec)
37 37 try:
38 38 bookmarks[refspec] = repo.changelog.lookup(sha)
39 39 except LookupError:
40 40 pass
41 41 except IOError, inst:
42 42 if inst.errno != errno.ENOENT:
43 43 raise
44 44 return bookmarks
45 45
46 46 def readcurrent(repo):
47 47 '''Get the current bookmark
48 48
49 49 If we use gittishsh branches we have a current bookmark that
50 50 we are on. This function returns the name of the bookmark. It
51 51 is stored in .hg/bookmarks.current
52 52 '''
53 53 mark = None
54 54 try:
55 55 file = repo.opener('bookmarks.current')
56 56 except IOError, inst:
57 57 if inst.errno != errno.ENOENT:
58 58 raise
59 59 return None
60 60 try:
61 61 # No readline() in posixfile_nt, reading everything is cheap
62 62 mark = encoding.tolocal((file.readlines() or [''])[0])
63 63 if mark == '' or mark not in repo._bookmarks:
64 64 mark = None
65 65 finally:
66 66 file.close()
67 67 return mark
68 68
69 69 def write(repo):
70 70 '''Write bookmarks
71 71
72 72 Write the given bookmark => hash dictionary to the .hg/bookmarks file
73 73 in a format equal to those of localtags.
74 74
75 75 We also store a backup of the previous state in undo.bookmarks that
76 76 can be copied back on rollback.
77 77 '''
78 78 refs = repo._bookmarks
79 79
80 80 if repo._bookmarkcurrent not in refs:
81 81 setcurrent(repo, None)
82 82 for mark in refs.keys():
83 83 if not valid(mark):
84 84 raise util.Abort(_("bookmark '%s' contains illegal "
85 85 "character" % mark))
86 86
87 87 wlock = repo.wlock()
88 88 try:
89 89
90 90 file = repo.opener('bookmarks', 'w', atomictemp=True)
91 91 for refspec, node in refs.iteritems():
92 92 file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec)))
93 93 file.close()
94 94
95 95 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
96 96 try:
97 97 os.utime(repo.sjoin('00changelog.i'), None)
98 98 except OSError:
99 99 pass
100 100
101 101 finally:
102 102 wlock.release()
103 103
104 104 def setcurrent(repo, mark):
105 105 '''Set the name of the bookmark that we are currently on
106 106
107 107 Set the name of the bookmark that we are on (hg update <bookmark>).
108 108 The name is recorded in .hg/bookmarks.current
109 109 '''
110 110 current = repo._bookmarkcurrent
111 111 if current == mark:
112 112 return
113 113
114 114 if mark not in repo._bookmarks:
115 115 mark = ''
116 116 if not valid(mark):
117 117 raise util.Abort(_("bookmark '%s' contains illegal "
118 118 "character" % mark))
119 119
120 120 wlock = repo.wlock()
121 121 try:
122 122 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
123 123 file.write(encoding.fromlocal(mark))
124 124 file.close()
125 125 finally:
126 126 wlock.release()
127 127 repo._bookmarkcurrent = mark
128 128
129 129 def unsetcurrent(repo):
130 130 wlock = repo.wlock()
131 131 try:
132 132 try:
133 133 util.unlink(repo.join('bookmarks.current'))
134 134 repo._bookmarkcurrent = None
135 135 except OSError, inst:
136 136 if inst.errno != errno.ENOENT:
137 137 raise
138 138 finally:
139 139 wlock.release()
140 140
141 141 def updatecurrentbookmark(repo, oldnode, curbranch):
142 142 try:
143 return update(repo, oldnode, repo.branchtags()[curbranch])
144 except KeyError:
143 return update(repo, oldnode, repo.branchtip(curbranch))
144 except error.RepoLookupError:
145 145 if curbranch == "default": # no default branch!
146 146 return update(repo, oldnode, repo.lookup("tip"))
147 147 else:
148 148 raise util.Abort(_("branch %s not found") % curbranch)
149 149
150 150 def update(repo, parents, node):
151 151 marks = repo._bookmarks
152 152 update = False
153 153 cur = repo._bookmarkcurrent
154 154 if not cur:
155 155 return False
156 156
157 157 toupdate = [b for b in marks if b.split('@', 1)[0] == cur.split('@', 1)[0]]
158 158 for mark in toupdate:
159 159 if mark and marks[mark] in parents:
160 160 old = repo[marks[mark]]
161 161 new = repo[node]
162 162 if new in old.descendants() and mark == cur:
163 163 marks[cur] = new.node()
164 164 update = True
165 165 if mark != cur:
166 166 del marks[mark]
167 167 if update:
168 168 repo._writebookmarks(marks)
169 169 return update
170 170
171 171 def listbookmarks(repo):
172 172 # We may try to list bookmarks on a repo type that does not
173 173 # support it (e.g., statichttprepository).
174 174 marks = getattr(repo, '_bookmarks', {})
175 175
176 176 d = {}
177 177 for k, v in marks.iteritems():
178 178 # don't expose local divergent bookmarks
179 179 if '@' not in k or k.endswith('@'):
180 180 d[k] = hex(v)
181 181 return d
182 182
183 183 def pushbookmark(repo, key, old, new):
184 184 w = repo.wlock()
185 185 try:
186 186 marks = repo._bookmarks
187 187 if hex(marks.get(key, '')) != old:
188 188 return False
189 189 if new == '':
190 190 del marks[key]
191 191 else:
192 192 if new not in repo:
193 193 return False
194 194 marks[key] = repo[new].node()
195 195 write(repo)
196 196 return True
197 197 finally:
198 198 w.release()
199 199
200 200 def updatefromremote(ui, repo, remote, path):
201 201 ui.debug("checking for updated bookmarks\n")
202 202 rb = remote.listkeys('bookmarks')
203 203 changed = False
204 204 for k in rb.keys():
205 205 if k in repo._bookmarks:
206 206 nr, nl = rb[k], repo._bookmarks[k]
207 207 if nr in repo:
208 208 cr = repo[nr]
209 209 cl = repo[nl]
210 210 if cl.rev() >= cr.rev():
211 211 continue
212 212 if cr in cl.descendants():
213 213 repo._bookmarks[k] = cr.node()
214 214 changed = True
215 215 ui.status(_("updating bookmark %s\n") % k)
216 216 else:
217 217 # find a unique @ suffix
218 218 for x in range(1, 100):
219 219 n = '%s@%d' % (k, x)
220 220 if n not in repo._bookmarks:
221 221 break
222 222 # try to use an @pathalias suffix
223 223 # if an @pathalias already exists, we overwrite (update) it
224 224 for p, u in ui.configitems("paths"):
225 225 if path == u:
226 226 n = '%s@%s' % (k, p)
227 227
228 228 repo._bookmarks[n] = cr.node()
229 229 changed = True
230 230 ui.warn(_("divergent bookmark %s stored as %s\n") % (k, n))
231 231 elif rb[k] in repo:
232 232 # add remote bookmarks for changes we already have
233 233 repo._bookmarks[k] = repo[rb[k]].node()
234 234 changed = True
235 235 ui.status(_("adding remote bookmark %s\n") % k)
236 236
237 237 if changed:
238 238 write(repo)
239 239
240 240 def diff(ui, repo, remote):
241 241 ui.status(_("searching for changed bookmarks\n"))
242 242
243 243 lmarks = repo.listkeys('bookmarks')
244 244 rmarks = remote.listkeys('bookmarks')
245 245
246 246 diff = sorted(set(rmarks) - set(lmarks))
247 247 for k in diff:
248 248 mark = ui.debugflag and rmarks[k] or rmarks[k][:12]
249 249 ui.write(" %-25s %s\n" % (k, mark))
250 250
251 251 if len(diff) <= 0:
252 252 ui.status(_("no changed bookmarks found\n"))
253 253 return 1
254 254 return 0
@@ -1,5811 +1,5811 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, difflib, time, tempfile, errno
12 12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 13 import patch, help, url, encoding, templatekw, discovery
14 14 import archival, changegroup, cmdutil, hbisect
15 15 import sshserver, hgweb, hgweb.server, commandserver
16 16 import merge as mergemod
17 17 import minirst, revset, fileset
18 18 import dagparser, context, simplemerge
19 19 import random, setdiscovery, treediscovery, dagutil, pvec
20 20 import phases
21 21
22 22 table = {}
23 23
24 24 command = cmdutil.command(table)
25 25
26 26 # common command options
27 27
28 28 globalopts = [
29 29 ('R', 'repository', '',
30 30 _('repository root directory or name of overlay bundle file'),
31 31 _('REPO')),
32 32 ('', 'cwd', '',
33 33 _('change working directory'), _('DIR')),
34 34 ('y', 'noninteractive', None,
35 35 _('do not prompt, automatically pick the first choice for all prompts')),
36 36 ('q', 'quiet', None, _('suppress output')),
37 37 ('v', 'verbose', None, _('enable additional output')),
38 38 ('', 'config', [],
39 39 _('set/override config option (use \'section.name=value\')'),
40 40 _('CONFIG')),
41 41 ('', 'debug', None, _('enable debugging output')),
42 42 ('', 'debugger', None, _('start debugger')),
43 43 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
44 44 _('ENCODE')),
45 45 ('', 'encodingmode', encoding.encodingmode,
46 46 _('set the charset encoding mode'), _('MODE')),
47 47 ('', 'traceback', None, _('always print a traceback on exception')),
48 48 ('', 'time', None, _('time how long the command takes')),
49 49 ('', 'profile', None, _('print command execution profile')),
50 50 ('', 'version', None, _('output version information and exit')),
51 51 ('h', 'help', None, _('display help and exit')),
52 52 ]
53 53
54 54 dryrunopts = [('n', 'dry-run', None,
55 55 _('do not perform actions, just print output'))]
56 56
57 57 remoteopts = [
58 58 ('e', 'ssh', '',
59 59 _('specify ssh command to use'), _('CMD')),
60 60 ('', 'remotecmd', '',
61 61 _('specify hg command to run on the remote side'), _('CMD')),
62 62 ('', 'insecure', None,
63 63 _('do not verify server certificate (ignoring web.cacerts config)')),
64 64 ]
65 65
66 66 walkopts = [
67 67 ('I', 'include', [],
68 68 _('include names matching the given patterns'), _('PATTERN')),
69 69 ('X', 'exclude', [],
70 70 _('exclude names matching the given patterns'), _('PATTERN')),
71 71 ]
72 72
73 73 commitopts = [
74 74 ('m', 'message', '',
75 75 _('use text as commit message'), _('TEXT')),
76 76 ('l', 'logfile', '',
77 77 _('read commit message from file'), _('FILE')),
78 78 ]
79 79
80 80 commitopts2 = [
81 81 ('d', 'date', '',
82 82 _('record the specified date as commit date'), _('DATE')),
83 83 ('u', 'user', '',
84 84 _('record the specified user as committer'), _('USER')),
85 85 ]
86 86
87 87 templateopts = [
88 88 ('', 'style', '',
89 89 _('display using template map file'), _('STYLE')),
90 90 ('', 'template', '',
91 91 _('display with template'), _('TEMPLATE')),
92 92 ]
93 93
94 94 logopts = [
95 95 ('p', 'patch', None, _('show patch')),
96 96 ('g', 'git', None, _('use git extended diff format')),
97 97 ('l', 'limit', '',
98 98 _('limit number of changes displayed'), _('NUM')),
99 99 ('M', 'no-merges', None, _('do not show merges')),
100 100 ('', 'stat', None, _('output diffstat-style summary of changes')),
101 101 ] + templateopts
102 102
103 103 diffopts = [
104 104 ('a', 'text', None, _('treat all files as text')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('', 'nodates', None, _('omit dates from diff headers'))
107 107 ]
108 108
109 109 diffwsopts = [
110 110 ('w', 'ignore-all-space', None,
111 111 _('ignore white space when comparing lines')),
112 112 ('b', 'ignore-space-change', None,
113 113 _('ignore changes in the amount of white space')),
114 114 ('B', 'ignore-blank-lines', None,
115 115 _('ignore changes whose lines are all blank')),
116 116 ]
117 117
118 118 diffopts2 = [
119 119 ('p', 'show-function', None, _('show which function each change is in')),
120 120 ('', 'reverse', None, _('produce a diff that undoes the changes')),
121 121 ] + diffwsopts + [
122 122 ('U', 'unified', '',
123 123 _('number of lines of context to show'), _('NUM')),
124 124 ('', 'stat', None, _('output diffstat-style summary of changes')),
125 125 ]
126 126
127 127 mergetoolopts = [
128 128 ('t', 'tool', '', _('specify merge tool')),
129 129 ]
130 130
131 131 similarityopts = [
132 132 ('s', 'similarity', '',
133 133 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
134 134 ]
135 135
136 136 subrepoopts = [
137 137 ('S', 'subrepos', None,
138 138 _('recurse into subrepositories'))
139 139 ]
140 140
141 141 # Commands start here, listed alphabetically
142 142
143 143 @command('^add',
144 144 walkopts + subrepoopts + dryrunopts,
145 145 _('[OPTION]... [FILE]...'))
146 146 def add(ui, repo, *pats, **opts):
147 147 """add the specified files on the next commit
148 148
149 149 Schedule files to be version controlled and added to the
150 150 repository.
151 151
152 152 The files will be added to the repository at the next commit. To
153 153 undo an add before that, see :hg:`forget`.
154 154
155 155 If no names are given, add all files to the repository.
156 156
157 157 .. container:: verbose
158 158
159 159 An example showing how new (unknown) files are added
160 160 automatically by :hg:`add`::
161 161
162 162 $ ls
163 163 foo.c
164 164 $ hg status
165 165 ? foo.c
166 166 $ hg add
167 167 adding foo.c
168 168 $ hg status
169 169 A foo.c
170 170
171 171 Returns 0 if all files are successfully added.
172 172 """
173 173
174 174 m = scmutil.match(repo[None], pats, opts)
175 175 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
176 176 opts.get('subrepos'), prefix="", explicitonly=False)
177 177 return rejected and 1 or 0
178 178
179 179 @command('addremove',
180 180 similarityopts + walkopts + dryrunopts,
181 181 _('[OPTION]... [FILE]...'))
182 182 def addremove(ui, repo, *pats, **opts):
183 183 """add all new files, delete all missing files
184 184
185 185 Add all new files and remove all missing files from the
186 186 repository.
187 187
188 188 New files are ignored if they match any of the patterns in
189 189 ``.hgignore``. As with add, these changes take effect at the next
190 190 commit.
191 191
192 192 Use the -s/--similarity option to detect renamed files. With a
193 193 parameter greater than 0, this compares every removed file with
194 194 every added file and records those similar enough as renames. This
195 195 option takes a percentage between 0 (disabled) and 100 (files must
196 196 be identical) as its parameter. Detecting renamed files this way
197 197 can be expensive. After using this option, :hg:`status -C` can be
198 198 used to check which files were identified as moved or renamed.
199 199 If this option is not specified, only renames of identical files
200 200 are detected.
201 201
202 202 Returns 0 if all files are successfully added.
203 203 """
204 204 try:
205 205 sim = float(opts.get('similarity') or 100)
206 206 except ValueError:
207 207 raise util.Abort(_('similarity must be a number'))
208 208 if sim < 0 or sim > 100:
209 209 raise util.Abort(_('similarity must be between 0 and 100'))
210 210 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
211 211
212 212 @command('^annotate|blame',
213 213 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
214 214 ('', 'follow', None,
215 215 _('follow copies/renames and list the filename (DEPRECATED)')),
216 216 ('', 'no-follow', None, _("don't follow copies and renames")),
217 217 ('a', 'text', None, _('treat all files as text')),
218 218 ('u', 'user', None, _('list the author (long with -v)')),
219 219 ('f', 'file', None, _('list the filename')),
220 220 ('d', 'date', None, _('list the date (short with -q)')),
221 221 ('n', 'number', None, _('list the revision number (default)')),
222 222 ('c', 'changeset', None, _('list the changeset')),
223 223 ('l', 'line-number', None, _('show line number at the first appearance'))
224 224 ] + diffwsopts + walkopts,
225 225 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
226 226 def annotate(ui, repo, *pats, **opts):
227 227 """show changeset information by line for each file
228 228
229 229 List changes in files, showing the revision id responsible for
230 230 each line
231 231
232 232 This command is useful for discovering when a change was made and
233 233 by whom.
234 234
235 235 Without the -a/--text option, annotate will avoid processing files
236 236 it detects as binary. With -a, annotate will annotate the file
237 237 anyway, although the results will probably be neither useful
238 238 nor desirable.
239 239
240 240 Returns 0 on success.
241 241 """
242 242 if opts.get('follow'):
243 243 # --follow is deprecated and now just an alias for -f/--file
244 244 # to mimic the behavior of Mercurial before version 1.5
245 245 opts['file'] = True
246 246
247 247 datefunc = ui.quiet and util.shortdate or util.datestr
248 248 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
249 249
250 250 if not pats:
251 251 raise util.Abort(_('at least one filename or pattern is required'))
252 252
253 253 hexfn = ui.debugflag and hex or short
254 254
255 255 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
256 256 ('number', ' ', lambda x: str(x[0].rev())),
257 257 ('changeset', ' ', lambda x: hexfn(x[0].node())),
258 258 ('date', ' ', getdate),
259 259 ('file', ' ', lambda x: x[0].path()),
260 260 ('line_number', ':', lambda x: str(x[1])),
261 261 ]
262 262
263 263 if (not opts.get('user') and not opts.get('changeset')
264 264 and not opts.get('date') and not opts.get('file')):
265 265 opts['number'] = True
266 266
267 267 linenumber = opts.get('line_number') is not None
268 268 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
269 269 raise util.Abort(_('at least one of -n/-c is required for -l'))
270 270
271 271 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
272 272 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
273 273
274 274 def bad(x, y):
275 275 raise util.Abort("%s: %s" % (x, y))
276 276
277 277 ctx = scmutil.revsingle(repo, opts.get('rev'))
278 278 m = scmutil.match(ctx, pats, opts)
279 279 m.bad = bad
280 280 follow = not opts.get('no_follow')
281 281 diffopts = patch.diffopts(ui, opts, section='annotate')
282 282 for abs in ctx.walk(m):
283 283 fctx = ctx[abs]
284 284 if not opts.get('text') and util.binary(fctx.data()):
285 285 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
286 286 continue
287 287
288 288 lines = fctx.annotate(follow=follow, linenumber=linenumber,
289 289 diffopts=diffopts)
290 290 pieces = []
291 291
292 292 for f, sep in funcmap:
293 293 l = [f(n) for n, dummy in lines]
294 294 if l:
295 295 sized = [(x, encoding.colwidth(x)) for x in l]
296 296 ml = max([w for x, w in sized])
297 297 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
298 298 for x, w in sized])
299 299
300 300 if pieces:
301 301 for p, l in zip(zip(*pieces), lines):
302 302 ui.write("%s: %s" % ("".join(p), l[1]))
303 303
304 304 if lines and not lines[-1][1].endswith('\n'):
305 305 ui.write('\n')
306 306
307 307 @command('archive',
308 308 [('', 'no-decode', None, _('do not pass files through decoders')),
309 309 ('p', 'prefix', '', _('directory prefix for files in archive'),
310 310 _('PREFIX')),
311 311 ('r', 'rev', '', _('revision to distribute'), _('REV')),
312 312 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
313 313 ] + subrepoopts + walkopts,
314 314 _('[OPTION]... DEST'))
315 315 def archive(ui, repo, dest, **opts):
316 316 '''create an unversioned archive of a repository revision
317 317
318 318 By default, the revision used is the parent of the working
319 319 directory; use -r/--rev to specify a different revision.
320 320
321 321 The archive type is automatically detected based on file
322 322 extension (or override using -t/--type).
323 323
324 324 .. container:: verbose
325 325
326 326 Examples:
327 327
328 328 - create a zip file containing the 1.0 release::
329 329
330 330 hg archive -r 1.0 project-1.0.zip
331 331
332 332 - create a tarball excluding .hg files::
333 333
334 334 hg archive project.tar.gz -X ".hg*"
335 335
336 336 Valid types are:
337 337
338 338 :``files``: a directory full of files (default)
339 339 :``tar``: tar archive, uncompressed
340 340 :``tbz2``: tar archive, compressed using bzip2
341 341 :``tgz``: tar archive, compressed using gzip
342 342 :``uzip``: zip archive, uncompressed
343 343 :``zip``: zip archive, compressed using deflate
344 344
345 345 The exact name of the destination archive or directory is given
346 346 using a format string; see :hg:`help export` for details.
347 347
348 348 Each member added to an archive file has a directory prefix
349 349 prepended. Use -p/--prefix to specify a format string for the
350 350 prefix. The default is the basename of the archive, with suffixes
351 351 removed.
352 352
353 353 Returns 0 on success.
354 354 '''
355 355
356 356 ctx = scmutil.revsingle(repo, opts.get('rev'))
357 357 if not ctx:
358 358 raise util.Abort(_('no working directory: please specify a revision'))
359 359 node = ctx.node()
360 360 dest = cmdutil.makefilename(repo, dest, node)
361 361 if os.path.realpath(dest) == repo.root:
362 362 raise util.Abort(_('repository root cannot be destination'))
363 363
364 364 kind = opts.get('type') or archival.guesskind(dest) or 'files'
365 365 prefix = opts.get('prefix')
366 366
367 367 if dest == '-':
368 368 if kind == 'files':
369 369 raise util.Abort(_('cannot archive plain files to stdout'))
370 370 dest = cmdutil.makefileobj(repo, dest)
371 371 if not prefix:
372 372 prefix = os.path.basename(repo.root) + '-%h'
373 373
374 374 prefix = cmdutil.makefilename(repo, prefix, node)
375 375 matchfn = scmutil.match(ctx, [], opts)
376 376 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
377 377 matchfn, prefix, subrepos=opts.get('subrepos'))
378 378
379 379 @command('backout',
380 380 [('', 'merge', None, _('merge with old dirstate parent after backout')),
381 381 ('', 'parent', '',
382 382 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
383 383 ('r', 'rev', '', _('revision to backout'), _('REV')),
384 384 ] + mergetoolopts + walkopts + commitopts + commitopts2,
385 385 _('[OPTION]... [-r] REV'))
386 386 def backout(ui, repo, node=None, rev=None, **opts):
387 387 '''reverse effect of earlier changeset
388 388
389 389 Prepare a new changeset with the effect of REV undone in the
390 390 current working directory.
391 391
392 392 If REV is the parent of the working directory, then this new changeset
393 393 is committed automatically. Otherwise, hg needs to merge the
394 394 changes and the merged result is left uncommitted.
395 395
396 396 .. note::
397 397 backout cannot be used to fix either an unwanted or
398 398 incorrect merge.
399 399
400 400 .. container:: verbose
401 401
402 402 By default, the pending changeset will have one parent,
403 403 maintaining a linear history. With --merge, the pending
404 404 changeset will instead have two parents: the old parent of the
405 405 working directory and a new child of REV that simply undoes REV.
406 406
407 407 Before version 1.7, the behavior without --merge was equivalent
408 408 to specifying --merge followed by :hg:`update --clean .` to
409 409 cancel the merge and leave the child of REV as a head to be
410 410 merged separately.
411 411
412 412 See :hg:`help dates` for a list of formats valid for -d/--date.
413 413
414 414 Returns 0 on success.
415 415 '''
416 416 if rev and node:
417 417 raise util.Abort(_("please specify just one revision"))
418 418
419 419 if not rev:
420 420 rev = node
421 421
422 422 if not rev:
423 423 raise util.Abort(_("please specify a revision to backout"))
424 424
425 425 date = opts.get('date')
426 426 if date:
427 427 opts['date'] = util.parsedate(date)
428 428
429 429 cmdutil.bailifchanged(repo)
430 430 node = scmutil.revsingle(repo, rev).node()
431 431
432 432 op1, op2 = repo.dirstate.parents()
433 433 a = repo.changelog.ancestor(op1, node)
434 434 if a != node:
435 435 raise util.Abort(_('cannot backout change on a different branch'))
436 436
437 437 p1, p2 = repo.changelog.parents(node)
438 438 if p1 == nullid:
439 439 raise util.Abort(_('cannot backout a change with no parents'))
440 440 if p2 != nullid:
441 441 if not opts.get('parent'):
442 442 raise util.Abort(_('cannot backout a merge changeset'))
443 443 p = repo.lookup(opts['parent'])
444 444 if p not in (p1, p2):
445 445 raise util.Abort(_('%s is not a parent of %s') %
446 446 (short(p), short(node)))
447 447 parent = p
448 448 else:
449 449 if opts.get('parent'):
450 450 raise util.Abort(_('cannot use --parent on non-merge changeset'))
451 451 parent = p1
452 452
453 453 # the backout should appear on the same branch
454 454 wlock = repo.wlock()
455 455 try:
456 456 branch = repo.dirstate.branch()
457 457 hg.clean(repo, node, show_stats=False)
458 458 repo.dirstate.setbranch(branch)
459 459 revert_opts = opts.copy()
460 460 revert_opts['date'] = None
461 461 revert_opts['all'] = True
462 462 revert_opts['rev'] = hex(parent)
463 463 revert_opts['no_backup'] = None
464 464 revert(ui, repo, **revert_opts)
465 465 if not opts.get('merge') and op1 != node:
466 466 try:
467 467 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
468 468 return hg.update(repo, op1)
469 469 finally:
470 470 ui.setconfig('ui', 'forcemerge', '')
471 471
472 472 commit_opts = opts.copy()
473 473 commit_opts['addremove'] = False
474 474 if not commit_opts['message'] and not commit_opts['logfile']:
475 475 # we don't translate commit messages
476 476 commit_opts['message'] = "Backed out changeset %s" % short(node)
477 477 commit_opts['force_editor'] = True
478 478 commit(ui, repo, **commit_opts)
479 479 def nice(node):
480 480 return '%d:%s' % (repo.changelog.rev(node), short(node))
481 481 ui.status(_('changeset %s backs out changeset %s\n') %
482 482 (nice(repo.changelog.tip()), nice(node)))
483 483 if opts.get('merge') and op1 != node:
484 484 hg.clean(repo, op1, show_stats=False)
485 485 ui.status(_('merging with changeset %s\n')
486 486 % nice(repo.changelog.tip()))
487 487 try:
488 488 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
489 489 return hg.merge(repo, hex(repo.changelog.tip()))
490 490 finally:
491 491 ui.setconfig('ui', 'forcemerge', '')
492 492 finally:
493 493 wlock.release()
494 494 return 0
495 495
496 496 @command('bisect',
497 497 [('r', 'reset', False, _('reset bisect state')),
498 498 ('g', 'good', False, _('mark changeset good')),
499 499 ('b', 'bad', False, _('mark changeset bad')),
500 500 ('s', 'skip', False, _('skip testing changeset')),
501 501 ('e', 'extend', False, _('extend the bisect range')),
502 502 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
503 503 ('U', 'noupdate', False, _('do not update to target'))],
504 504 _("[-gbsr] [-U] [-c CMD] [REV]"))
505 505 def bisect(ui, repo, rev=None, extra=None, command=None,
506 506 reset=None, good=None, bad=None, skip=None, extend=None,
507 507 noupdate=None):
508 508 """subdivision search of changesets
509 509
510 510 This command helps to find changesets which introduce problems. To
511 511 use, mark the earliest changeset you know exhibits the problem as
512 512 bad, then mark the latest changeset which is free from the problem
513 513 as good. Bisect will update your working directory to a revision
514 514 for testing (unless the -U/--noupdate option is specified). Once
515 515 you have performed tests, mark the working directory as good or
516 516 bad, and bisect will either update to another candidate changeset
517 517 or announce that it has found the bad revision.
518 518
519 519 As a shortcut, you can also use the revision argument to mark a
520 520 revision as good or bad without checking it out first.
521 521
522 522 If you supply a command, it will be used for automatic bisection.
523 523 The environment variable HG_NODE will contain the ID of the
524 524 changeset being tested. The exit status of the command will be
525 525 used to mark revisions as good or bad: status 0 means good, 125
526 526 means to skip the revision, 127 (command not found) will abort the
527 527 bisection, and any other non-zero exit status means the revision
528 528 is bad.
529 529
530 530 .. container:: verbose
531 531
532 532 Some examples:
533 533
534 534 - start a bisection with known bad revision 12, and good revision 34::
535 535
536 536 hg bisect --bad 34
537 537 hg bisect --good 12
538 538
539 539 - advance the current bisection by marking current revision as good or
540 540 bad::
541 541
542 542 hg bisect --good
543 543 hg bisect --bad
544 544
545 545 - mark the current revision, or a known revision, to be skipped (eg. if
546 546 that revision is not usable because of another issue)::
547 547
548 548 hg bisect --skip
549 549 hg bisect --skip 23
550 550
551 551 - forget the current bisection::
552 552
553 553 hg bisect --reset
554 554
555 555 - use 'make && make tests' to automatically find the first broken
556 556 revision::
557 557
558 558 hg bisect --reset
559 559 hg bisect --bad 34
560 560 hg bisect --good 12
561 561 hg bisect --command 'make && make tests'
562 562
563 563 - see all changesets whose states are already known in the current
564 564 bisection::
565 565
566 566 hg log -r "bisect(pruned)"
567 567
568 568 - see the changeset currently being bisected (especially useful
569 569 if running with -U/--noupdate)::
570 570
571 571 hg log -r "bisect(current)"
572 572
573 573 - see all changesets that took part in the current bisection::
574 574
575 575 hg log -r "bisect(range)"
576 576
577 577 - with the graphlog extension, you can even get a nice graph::
578 578
579 579 hg log --graph -r "bisect(range)"
580 580
581 581 See :hg:`help revsets` for more about the `bisect()` keyword.
582 582
583 583 Returns 0 on success.
584 584 """
585 585 def extendbisectrange(nodes, good):
586 586 # bisect is incomplete when it ends on a merge node and
587 587 # one of the parent was not checked.
588 588 parents = repo[nodes[0]].parents()
589 589 if len(parents) > 1:
590 590 side = good and state['bad'] or state['good']
591 591 num = len(set(i.node() for i in parents) & set(side))
592 592 if num == 1:
593 593 return parents[0].ancestor(parents[1])
594 594 return None
595 595
596 596 def print_result(nodes, good):
597 597 displayer = cmdutil.show_changeset(ui, repo, {})
598 598 if len(nodes) == 1:
599 599 # narrowed it down to a single revision
600 600 if good:
601 601 ui.write(_("The first good revision is:\n"))
602 602 else:
603 603 ui.write(_("The first bad revision is:\n"))
604 604 displayer.show(repo[nodes[0]])
605 605 extendnode = extendbisectrange(nodes, good)
606 606 if extendnode is not None:
607 607 ui.write(_('Not all ancestors of this changeset have been'
608 608 ' checked.\nUse bisect --extend to continue the '
609 609 'bisection from\nthe common ancestor, %s.\n')
610 610 % extendnode)
611 611 else:
612 612 # multiple possible revisions
613 613 if good:
614 614 ui.write(_("Due to skipped revisions, the first "
615 615 "good revision could be any of:\n"))
616 616 else:
617 617 ui.write(_("Due to skipped revisions, the first "
618 618 "bad revision could be any of:\n"))
619 619 for n in nodes:
620 620 displayer.show(repo[n])
621 621 displayer.close()
622 622
623 623 def check_state(state, interactive=True):
624 624 if not state['good'] or not state['bad']:
625 625 if (good or bad or skip or reset) and interactive:
626 626 return
627 627 if not state['good']:
628 628 raise util.Abort(_('cannot bisect (no known good revisions)'))
629 629 else:
630 630 raise util.Abort(_('cannot bisect (no known bad revisions)'))
631 631 return True
632 632
633 633 # backward compatibility
634 634 if rev in "good bad reset init".split():
635 635 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
636 636 cmd, rev, extra = rev, extra, None
637 637 if cmd == "good":
638 638 good = True
639 639 elif cmd == "bad":
640 640 bad = True
641 641 else:
642 642 reset = True
643 643 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
644 644 raise util.Abort(_('incompatible arguments'))
645 645
646 646 if reset:
647 647 p = repo.join("bisect.state")
648 648 if os.path.exists(p):
649 649 os.unlink(p)
650 650 return
651 651
652 652 state = hbisect.load_state(repo)
653 653
654 654 if command:
655 655 changesets = 1
656 656 try:
657 657 node = state['current'][0]
658 658 except LookupError:
659 659 if noupdate:
660 660 raise util.Abort(_('current bisect revision is unknown - '
661 661 'start a new bisect to fix'))
662 662 node, p2 = repo.dirstate.parents()
663 663 if p2 != nullid:
664 664 raise util.Abort(_('current bisect revision is a merge'))
665 665 try:
666 666 while changesets:
667 667 # update state
668 668 state['current'] = [node]
669 669 hbisect.save_state(repo, state)
670 670 status = util.system(command,
671 671 environ={'HG_NODE': hex(node)},
672 672 out=ui.fout)
673 673 if status == 125:
674 674 transition = "skip"
675 675 elif status == 0:
676 676 transition = "good"
677 677 # status < 0 means process was killed
678 678 elif status == 127:
679 679 raise util.Abort(_("failed to execute %s") % command)
680 680 elif status < 0:
681 681 raise util.Abort(_("%s killed") % command)
682 682 else:
683 683 transition = "bad"
684 684 ctx = scmutil.revsingle(repo, rev, node)
685 685 rev = None # clear for future iterations
686 686 state[transition].append(ctx.node())
687 687 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
688 688 check_state(state, interactive=False)
689 689 # bisect
690 690 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
691 691 # update to next check
692 692 node = nodes[0]
693 693 if not noupdate:
694 694 cmdutil.bailifchanged(repo)
695 695 hg.clean(repo, node, show_stats=False)
696 696 finally:
697 697 state['current'] = [node]
698 698 hbisect.save_state(repo, state)
699 699 print_result(nodes, good)
700 700 return
701 701
702 702 # update state
703 703
704 704 if rev:
705 705 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
706 706 else:
707 707 nodes = [repo.lookup('.')]
708 708
709 709 if good or bad or skip:
710 710 if good:
711 711 state['good'] += nodes
712 712 elif bad:
713 713 state['bad'] += nodes
714 714 elif skip:
715 715 state['skip'] += nodes
716 716 hbisect.save_state(repo, state)
717 717
718 718 if not check_state(state):
719 719 return
720 720
721 721 # actually bisect
722 722 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
723 723 if extend:
724 724 if not changesets:
725 725 extendnode = extendbisectrange(nodes, good)
726 726 if extendnode is not None:
727 727 ui.write(_("Extending search to changeset %d:%s\n"
728 728 % (extendnode.rev(), extendnode)))
729 729 state['current'] = [extendnode.node()]
730 730 hbisect.save_state(repo, state)
731 731 if noupdate:
732 732 return
733 733 cmdutil.bailifchanged(repo)
734 734 return hg.clean(repo, extendnode.node())
735 735 raise util.Abort(_("nothing to extend"))
736 736
737 737 if changesets == 0:
738 738 print_result(nodes, good)
739 739 else:
740 740 assert len(nodes) == 1 # only a single node can be tested next
741 741 node = nodes[0]
742 742 # compute the approximate number of remaining tests
743 743 tests, size = 0, 2
744 744 while size <= changesets:
745 745 tests, size = tests + 1, size * 2
746 746 rev = repo.changelog.rev(node)
747 747 ui.write(_("Testing changeset %d:%s "
748 748 "(%d changesets remaining, ~%d tests)\n")
749 749 % (rev, short(node), changesets, tests))
750 750 state['current'] = [node]
751 751 hbisect.save_state(repo, state)
752 752 if not noupdate:
753 753 cmdutil.bailifchanged(repo)
754 754 return hg.clean(repo, node)
755 755
756 756 @command('bookmarks',
757 757 [('f', 'force', False, _('force')),
758 758 ('r', 'rev', '', _('revision'), _('REV')),
759 759 ('d', 'delete', False, _('delete a given bookmark')),
760 760 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
761 761 ('i', 'inactive', False, _('mark a bookmark inactive'))],
762 762 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
763 763 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
764 764 rename=None, inactive=False):
765 765 '''track a line of development with movable markers
766 766
767 767 Bookmarks are pointers to certain commits that move when committing.
768 768 Bookmarks are local. They can be renamed, copied and deleted. It is
769 769 possible to use :hg:`merge NAME` to merge from a given bookmark, and
770 770 :hg:`update NAME` to update to a given bookmark.
771 771
772 772 You can use :hg:`bookmark NAME` to set a bookmark on the working
773 773 directory's parent revision with the given name. If you specify
774 774 a revision using -r REV (where REV may be an existing bookmark),
775 775 the bookmark is assigned to that revision.
776 776
777 777 Bookmarks can be pushed and pulled between repositories (see :hg:`help
778 778 push` and :hg:`help pull`). This requires both the local and remote
779 779 repositories to support bookmarks. For versions prior to 1.8, this means
780 780 the bookmarks extension must be enabled.
781 781
782 782 With -i/--inactive, the new bookmark will not be made the active
783 783 bookmark. If -r/--rev is given, the new bookmark will not be made
784 784 active even if -i/--inactive is not given. If no NAME is given, the
785 785 current active bookmark will be marked inactive.
786 786 '''
787 787 hexfn = ui.debugflag and hex or short
788 788 marks = repo._bookmarks
789 789 cur = repo.changectx('.').node()
790 790
791 791 if delete:
792 792 if mark is None:
793 793 raise util.Abort(_("bookmark name required"))
794 794 if mark not in marks:
795 795 raise util.Abort(_("bookmark '%s' does not exist") % mark)
796 796 if mark == repo._bookmarkcurrent:
797 797 bookmarks.setcurrent(repo, None)
798 798 del marks[mark]
799 799 bookmarks.write(repo)
800 800 return
801 801
802 802 if rename:
803 803 if rename not in marks:
804 804 raise util.Abort(_("bookmark '%s' does not exist") % rename)
805 805 if mark in marks and not force:
806 806 raise util.Abort(_("bookmark '%s' already exists "
807 807 "(use -f to force)") % mark)
808 808 if mark is None:
809 809 raise util.Abort(_("new bookmark name required"))
810 810 marks[mark] = marks[rename]
811 811 if repo._bookmarkcurrent == rename and not inactive:
812 812 bookmarks.setcurrent(repo, mark)
813 813 del marks[rename]
814 814 bookmarks.write(repo)
815 815 return
816 816
817 817 if mark is not None:
818 818 if "\n" in mark:
819 819 raise util.Abort(_("bookmark name cannot contain newlines"))
820 820 mark = mark.strip()
821 821 if not mark:
822 822 raise util.Abort(_("bookmark names cannot consist entirely of "
823 823 "whitespace"))
824 824 if inactive and mark == repo._bookmarkcurrent:
825 825 bookmarks.setcurrent(repo, None)
826 826 return
827 827 if mark in marks and not force:
828 828 raise util.Abort(_("bookmark '%s' already exists "
829 829 "(use -f to force)") % mark)
830 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
830 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
831 831 and not force):
832 832 raise util.Abort(
833 833 _("a bookmark cannot have the name of an existing branch"))
834 834 if rev:
835 835 marks[mark] = repo.lookup(rev)
836 836 else:
837 837 marks[mark] = cur
838 838 if not inactive and cur == marks[mark]:
839 839 bookmarks.setcurrent(repo, mark)
840 840 bookmarks.write(repo)
841 841 return
842 842
843 843 if mark is None:
844 844 if rev:
845 845 raise util.Abort(_("bookmark name required"))
846 846 if len(marks) == 0:
847 847 ui.status(_("no bookmarks set\n"))
848 848 else:
849 849 for bmark, n in sorted(marks.iteritems()):
850 850 current = repo._bookmarkcurrent
851 851 if bmark == current and n == cur:
852 852 prefix, label = '*', 'bookmarks.current'
853 853 else:
854 854 prefix, label = ' ', ''
855 855
856 856 if ui.quiet:
857 857 ui.write("%s\n" % bmark, label=label)
858 858 else:
859 859 ui.write(" %s %-25s %d:%s\n" % (
860 860 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
861 861 label=label)
862 862 return
863 863
864 864 @command('branch',
865 865 [('f', 'force', None,
866 866 _('set branch name even if it shadows an existing branch')),
867 867 ('C', 'clean', None, _('reset branch name to parent branch name'))],
868 868 _('[-fC] [NAME]'))
869 869 def branch(ui, repo, label=None, **opts):
870 870 """set or show the current branch name
871 871
872 872 .. note::
873 873 Branch names are permanent and global. Use :hg:`bookmark` to create a
874 874 light-weight bookmark instead. See :hg:`help glossary` for more
875 875 information about named branches and bookmarks.
876 876
877 877 With no argument, show the current branch name. With one argument,
878 878 set the working directory branch name (the branch will not exist
879 879 in the repository until the next commit). Standard practice
880 880 recommends that primary development take place on the 'default'
881 881 branch.
882 882
883 883 Unless -f/--force is specified, branch will not let you set a
884 884 branch name that already exists, even if it's inactive.
885 885
886 886 Use -C/--clean to reset the working directory branch to that of
887 887 the parent of the working directory, negating a previous branch
888 888 change.
889 889
890 890 Use the command :hg:`update` to switch to an existing branch. Use
891 891 :hg:`commit --close-branch` to mark this branch as closed.
892 892
893 893 Returns 0 on success.
894 894 """
895 895 if not opts.get('clean') and not label:
896 896 ui.write("%s\n" % repo.dirstate.branch())
897 897 return
898 898
899 899 wlock = repo.wlock()
900 900 try:
901 901 if opts.get('clean'):
902 902 label = repo[None].p1().branch()
903 903 repo.dirstate.setbranch(label)
904 904 ui.status(_('reset working directory to branch %s\n') % label)
905 905 elif label:
906 if not opts.get('force') and label in repo.branchtags():
906 if not opts.get('force') and label in repo.branchmap():
907 907 if label not in [p.branch() for p in repo.parents()]:
908 908 raise util.Abort(_('a branch of the same name already'
909 909 ' exists'),
910 910 # i18n: "it" refers to an existing branch
911 911 hint=_("use 'hg update' to switch to it"))
912 912 repo.dirstate.setbranch(label)
913 913 ui.status(_('marked working directory as branch %s\n') % label)
914 914 ui.status(_('(branches are permanent and global, '
915 915 'did you want a bookmark?)\n'))
916 916 finally:
917 917 wlock.release()
918 918
919 919 @command('branches',
920 920 [('a', 'active', False, _('show only branches that have unmerged heads')),
921 921 ('c', 'closed', False, _('show normal and closed branches'))],
922 922 _('[-ac]'))
923 923 def branches(ui, repo, active=False, closed=False):
924 924 """list repository named branches
925 925
926 926 List the repository's named branches, indicating which ones are
927 927 inactive. If -c/--closed is specified, also list branches which have
928 928 been marked closed (see :hg:`commit --close-branch`).
929 929
930 930 If -a/--active is specified, only show active branches. A branch
931 931 is considered active if it contains repository heads.
932 932
933 933 Use the command :hg:`update` to switch to an existing branch.
934 934
935 935 Returns 0.
936 936 """
937 937
938 938 hexfunc = ui.debugflag and hex or short
939 939 activebranches = [repo[n].branch() for n in repo.heads()]
940 940 def testactive(tag, node):
941 941 realhead = tag in activebranches
942 942 open = node in repo.branchheads(tag, closed=False)
943 943 return realhead and open
944 944 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
945 945 for tag, node in repo.branchtags().items()],
946 946 reverse=True)
947 947
948 948 for isactive, node, tag in branches:
949 949 if (not active) or isactive:
950 950 hn = repo.lookup(node)
951 951 if isactive:
952 952 label = 'branches.active'
953 953 notice = ''
954 954 elif hn not in repo.branchheads(tag, closed=False):
955 955 if not closed:
956 956 continue
957 957 label = 'branches.closed'
958 958 notice = _(' (closed)')
959 959 else:
960 960 label = 'branches.inactive'
961 961 notice = _(' (inactive)')
962 962 if tag == repo.dirstate.branch():
963 963 label = 'branches.current'
964 964 rev = str(node).rjust(31 - encoding.colwidth(tag))
965 965 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
966 966 tag = ui.label(tag, label)
967 967 if ui.quiet:
968 968 ui.write("%s\n" % tag)
969 969 else:
970 970 ui.write("%s %s%s\n" % (tag, rev, notice))
971 971
972 972 @command('bundle',
973 973 [('f', 'force', None, _('run even when the destination is unrelated')),
974 974 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
975 975 _('REV')),
976 976 ('b', 'branch', [], _('a specific branch you would like to bundle'),
977 977 _('BRANCH')),
978 978 ('', 'base', [],
979 979 _('a base changeset assumed to be available at the destination'),
980 980 _('REV')),
981 981 ('a', 'all', None, _('bundle all changesets in the repository')),
982 982 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
983 983 ] + remoteopts,
984 984 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
985 985 def bundle(ui, repo, fname, dest=None, **opts):
986 986 """create a changegroup file
987 987
988 988 Generate a compressed changegroup file collecting changesets not
989 989 known to be in another repository.
990 990
991 991 If you omit the destination repository, then hg assumes the
992 992 destination will have all the nodes you specify with --base
993 993 parameters. To create a bundle containing all changesets, use
994 994 -a/--all (or --base null).
995 995
996 996 You can change compression method with the -t/--type option.
997 997 The available compression methods are: none, bzip2, and
998 998 gzip (by default, bundles are compressed using bzip2).
999 999
1000 1000 The bundle file can then be transferred using conventional means
1001 1001 and applied to another repository with the unbundle or pull
1002 1002 command. This is useful when direct push and pull are not
1003 1003 available or when exporting an entire repository is undesirable.
1004 1004
1005 1005 Applying bundles preserves all changeset contents including
1006 1006 permissions, copy/rename information, and revision history.
1007 1007
1008 1008 Returns 0 on success, 1 if no changes found.
1009 1009 """
1010 1010 revs = None
1011 1011 if 'rev' in opts:
1012 1012 revs = scmutil.revrange(repo, opts['rev'])
1013 1013
1014 1014 bundletype = opts.get('type', 'bzip2').lower()
1015 1015 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1016 1016 bundletype = btypes.get(bundletype)
1017 1017 if bundletype not in changegroup.bundletypes:
1018 1018 raise util.Abort(_('unknown bundle type specified with --type'))
1019 1019
1020 1020 if opts.get('all'):
1021 1021 base = ['null']
1022 1022 else:
1023 1023 base = scmutil.revrange(repo, opts.get('base'))
1024 1024 if base:
1025 1025 if dest:
1026 1026 raise util.Abort(_("--base is incompatible with specifying "
1027 1027 "a destination"))
1028 1028 common = [repo.lookup(rev) for rev in base]
1029 1029 heads = revs and map(repo.lookup, revs) or revs
1030 1030 cg = repo.getbundle('bundle', heads=heads, common=common)
1031 1031 outgoing = None
1032 1032 else:
1033 1033 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1034 1034 dest, branches = hg.parseurl(dest, opts.get('branch'))
1035 1035 other = hg.peer(repo, opts, dest)
1036 1036 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
1037 1037 heads = revs and map(repo.lookup, revs) or revs
1038 1038 outgoing = discovery.findcommonoutgoing(repo, other,
1039 1039 onlyheads=heads,
1040 1040 force=opts.get('force'))
1041 1041 cg = repo.getlocalbundle('bundle', outgoing)
1042 1042 if not cg:
1043 1043 scmutil.nochangesfound(ui, outgoing and outgoing.excluded)
1044 1044 return 1
1045 1045
1046 1046 changegroup.writebundle(cg, fname, bundletype)
1047 1047
1048 1048 @command('cat',
1049 1049 [('o', 'output', '',
1050 1050 _('print output to file with formatted name'), _('FORMAT')),
1051 1051 ('r', 'rev', '', _('print the given revision'), _('REV')),
1052 1052 ('', 'decode', None, _('apply any matching decode filter')),
1053 1053 ] + walkopts,
1054 1054 _('[OPTION]... FILE...'))
1055 1055 def cat(ui, repo, file1, *pats, **opts):
1056 1056 """output the current or given revision of files
1057 1057
1058 1058 Print the specified files as they were at the given revision. If
1059 1059 no revision is given, the parent of the working directory is used,
1060 1060 or tip if no revision is checked out.
1061 1061
1062 1062 Output may be to a file, in which case the name of the file is
1063 1063 given using a format string. The formatting rules are the same as
1064 1064 for the export command, with the following additions:
1065 1065
1066 1066 :``%s``: basename of file being printed
1067 1067 :``%d``: dirname of file being printed, or '.' if in repository root
1068 1068 :``%p``: root-relative path name of file being printed
1069 1069
1070 1070 Returns 0 on success.
1071 1071 """
1072 1072 ctx = scmutil.revsingle(repo, opts.get('rev'))
1073 1073 err = 1
1074 1074 m = scmutil.match(ctx, (file1,) + pats, opts)
1075 1075 for abs in ctx.walk(m):
1076 1076 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1077 1077 pathname=abs)
1078 1078 data = ctx[abs].data()
1079 1079 if opts.get('decode'):
1080 1080 data = repo.wwritedata(abs, data)
1081 1081 fp.write(data)
1082 1082 fp.close()
1083 1083 err = 0
1084 1084 return err
1085 1085
1086 1086 @command('^clone',
1087 1087 [('U', 'noupdate', None,
1088 1088 _('the clone will include an empty working copy (only a repository)')),
1089 1089 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1090 1090 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1091 1091 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1092 1092 ('', 'pull', None, _('use pull protocol to copy metadata')),
1093 1093 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1094 1094 ] + remoteopts,
1095 1095 _('[OPTION]... SOURCE [DEST]'))
1096 1096 def clone(ui, source, dest=None, **opts):
1097 1097 """make a copy of an existing repository
1098 1098
1099 1099 Create a copy of an existing repository in a new directory.
1100 1100
1101 1101 If no destination directory name is specified, it defaults to the
1102 1102 basename of the source.
1103 1103
1104 1104 The location of the source is added to the new repository's
1105 1105 ``.hg/hgrc`` file, as the default to be used for future pulls.
1106 1106
1107 1107 Only local paths and ``ssh://`` URLs are supported as
1108 1108 destinations. For ``ssh://`` destinations, no working directory or
1109 1109 ``.hg/hgrc`` will be created on the remote side.
1110 1110
1111 1111 To pull only a subset of changesets, specify one or more revisions
1112 1112 identifiers with -r/--rev or branches with -b/--branch. The
1113 1113 resulting clone will contain only the specified changesets and
1114 1114 their ancestors. These options (or 'clone src#rev dest') imply
1115 1115 --pull, even for local source repositories. Note that specifying a
1116 1116 tag will include the tagged changeset but not the changeset
1117 1117 containing the tag.
1118 1118
1119 1119 To check out a particular version, use -u/--update, or
1120 1120 -U/--noupdate to create a clone with no working directory.
1121 1121
1122 1122 .. container:: verbose
1123 1123
1124 1124 For efficiency, hardlinks are used for cloning whenever the
1125 1125 source and destination are on the same filesystem (note this
1126 1126 applies only to the repository data, not to the working
1127 1127 directory). Some filesystems, such as AFS, implement hardlinking
1128 1128 incorrectly, but do not report errors. In these cases, use the
1129 1129 --pull option to avoid hardlinking.
1130 1130
1131 1131 In some cases, you can clone repositories and the working
1132 1132 directory using full hardlinks with ::
1133 1133
1134 1134 $ cp -al REPO REPOCLONE
1135 1135
1136 1136 This is the fastest way to clone, but it is not always safe. The
1137 1137 operation is not atomic (making sure REPO is not modified during
1138 1138 the operation is up to you) and you have to make sure your
1139 1139 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1140 1140 so). Also, this is not compatible with certain extensions that
1141 1141 place their metadata under the .hg directory, such as mq.
1142 1142
1143 1143 Mercurial will update the working directory to the first applicable
1144 1144 revision from this list:
1145 1145
1146 1146 a) null if -U or the source repository has no changesets
1147 1147 b) if -u . and the source repository is local, the first parent of
1148 1148 the source repository's working directory
1149 1149 c) the changeset specified with -u (if a branch name, this means the
1150 1150 latest head of that branch)
1151 1151 d) the changeset specified with -r
1152 1152 e) the tipmost head specified with -b
1153 1153 f) the tipmost head specified with the url#branch source syntax
1154 1154 g) the tipmost head of the default branch
1155 1155 h) tip
1156 1156
1157 1157 Examples:
1158 1158
1159 1159 - clone a remote repository to a new directory named hg/::
1160 1160
1161 1161 hg clone http://selenic.com/hg
1162 1162
1163 1163 - create a lightweight local clone::
1164 1164
1165 1165 hg clone project/ project-feature/
1166 1166
1167 1167 - clone from an absolute path on an ssh server (note double-slash)::
1168 1168
1169 1169 hg clone ssh://user@server//home/projects/alpha/
1170 1170
1171 1171 - do a high-speed clone over a LAN while checking out a
1172 1172 specified version::
1173 1173
1174 1174 hg clone --uncompressed http://server/repo -u 1.5
1175 1175
1176 1176 - create a repository without changesets after a particular revision::
1177 1177
1178 1178 hg clone -r 04e544 experimental/ good/
1179 1179
1180 1180 - clone (and track) a particular named branch::
1181 1181
1182 1182 hg clone http://selenic.com/hg#stable
1183 1183
1184 1184 See :hg:`help urls` for details on specifying URLs.
1185 1185
1186 1186 Returns 0 on success.
1187 1187 """
1188 1188 if opts.get('noupdate') and opts.get('updaterev'):
1189 1189 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1190 1190
1191 1191 r = hg.clone(ui, opts, source, dest,
1192 1192 pull=opts.get('pull'),
1193 1193 stream=opts.get('uncompressed'),
1194 1194 rev=opts.get('rev'),
1195 1195 update=opts.get('updaterev') or not opts.get('noupdate'),
1196 1196 branch=opts.get('branch'))
1197 1197
1198 1198 return r is None
1199 1199
1200 1200 @command('^commit|ci',
1201 1201 [('A', 'addremove', None,
1202 1202 _('mark new/missing files as added/removed before committing')),
1203 1203 ('', 'close-branch', None,
1204 1204 _('mark a branch as closed, hiding it from the branch list')),
1205 1205 ('', 'amend', None, _('amend the parent of the working dir')),
1206 1206 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1207 1207 _('[OPTION]... [FILE]...'))
1208 1208 def commit(ui, repo, *pats, **opts):
1209 1209 """commit the specified files or all outstanding changes
1210 1210
1211 1211 Commit changes to the given files into the repository. Unlike a
1212 1212 centralized SCM, this operation is a local operation. See
1213 1213 :hg:`push` for a way to actively distribute your changes.
1214 1214
1215 1215 If a list of files is omitted, all changes reported by :hg:`status`
1216 1216 will be committed.
1217 1217
1218 1218 If you are committing the result of a merge, do not provide any
1219 1219 filenames or -I/-X filters.
1220 1220
1221 1221 If no commit message is specified, Mercurial starts your
1222 1222 configured editor where you can enter a message. In case your
1223 1223 commit fails, you will find a backup of your message in
1224 1224 ``.hg/last-message.txt``.
1225 1225
1226 1226 The --amend flag can be used to amend the parent of the
1227 1227 working directory with a new commit that contains the changes
1228 1228 in the parent in addition to those currently reported by :hg:`status`,
1229 1229 if there are any. The old commit is stored in a backup bundle in
1230 1230 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1231 1231 on how to restore it).
1232 1232
1233 1233 Message, user and date are taken from the amended commit unless
1234 1234 specified. When a message isn't specified on the command line,
1235 1235 the editor will open with the message of the amended commit.
1236 1236
1237 1237 It is not possible to amend public changesets (see :hg:`help phases`)
1238 1238 or changesets that have children.
1239 1239
1240 1240 See :hg:`help dates` for a list of formats valid for -d/--date.
1241 1241
1242 1242 Returns 0 on success, 1 if nothing changed.
1243 1243 """
1244 1244 if opts.get('subrepos'):
1245 1245 # Let --subrepos on the command line overide config setting.
1246 1246 ui.setconfig('ui', 'commitsubrepos', True)
1247 1247
1248 1248 extra = {}
1249 1249 if opts.get('close_branch'):
1250 1250 if repo['.'].node() not in repo.branchheads():
1251 1251 # The topo heads set is included in the branch heads set of the
1252 1252 # current branch, so it's sufficient to test branchheads
1253 1253 raise util.Abort(_('can only close branch heads'))
1254 1254 extra['close'] = 1
1255 1255
1256 1256 branch = repo[None].branch()
1257 1257 bheads = repo.branchheads(branch)
1258 1258
1259 1259 if opts.get('amend'):
1260 1260 if ui.configbool('ui', 'commitsubrepos'):
1261 1261 raise util.Abort(_('cannot amend recursively'))
1262 1262
1263 1263 old = repo['.']
1264 1264 if old.phase() == phases.public:
1265 1265 raise util.Abort(_('cannot amend public changesets'))
1266 1266 if len(old.parents()) > 1:
1267 1267 raise util.Abort(_('cannot amend merge changesets'))
1268 1268 if len(repo[None].parents()) > 1:
1269 1269 raise util.Abort(_('cannot amend while merging'))
1270 1270 if old.children():
1271 1271 raise util.Abort(_('cannot amend changeset with children'))
1272 1272
1273 1273 e = cmdutil.commiteditor
1274 1274 if opts.get('force_editor'):
1275 1275 e = cmdutil.commitforceeditor
1276 1276
1277 1277 def commitfunc(ui, repo, message, match, opts):
1278 1278 editor = e
1279 1279 # message contains text from -m or -l, if it's empty,
1280 1280 # open the editor with the old message
1281 1281 if not message:
1282 1282 message = old.description()
1283 1283 editor = cmdutil.commitforceeditor
1284 1284 return repo.commit(message,
1285 1285 opts.get('user') or old.user(),
1286 1286 opts.get('date') or old.date(),
1287 1287 match,
1288 1288 editor=editor,
1289 1289 extra=extra)
1290 1290
1291 1291 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1292 1292 if node == old.node():
1293 1293 ui.status(_("nothing changed\n"))
1294 1294 return 1
1295 1295 else:
1296 1296 e = cmdutil.commiteditor
1297 1297 if opts.get('force_editor'):
1298 1298 e = cmdutil.commitforceeditor
1299 1299
1300 1300 def commitfunc(ui, repo, message, match, opts):
1301 1301 return repo.commit(message, opts.get('user'), opts.get('date'),
1302 1302 match, editor=e, extra=extra)
1303 1303
1304 1304 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1305 1305
1306 1306 if not node:
1307 1307 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1308 1308 if stat[3]:
1309 1309 ui.status(_("nothing changed (%d missing files, see "
1310 1310 "'hg status')\n") % len(stat[3]))
1311 1311 else:
1312 1312 ui.status(_("nothing changed\n"))
1313 1313 return 1
1314 1314
1315 1315 ctx = repo[node]
1316 1316 parents = ctx.parents()
1317 1317
1318 1318 if (not opts.get('amend') and bheads and node not in bheads and not
1319 1319 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1320 1320 ui.status(_('created new head\n'))
1321 1321 # The message is not printed for initial roots. For the other
1322 1322 # changesets, it is printed in the following situations:
1323 1323 #
1324 1324 # Par column: for the 2 parents with ...
1325 1325 # N: null or no parent
1326 1326 # B: parent is on another named branch
1327 1327 # C: parent is a regular non head changeset
1328 1328 # H: parent was a branch head of the current branch
1329 1329 # Msg column: whether we print "created new head" message
1330 1330 # In the following, it is assumed that there already exists some
1331 1331 # initial branch heads of the current branch, otherwise nothing is
1332 1332 # printed anyway.
1333 1333 #
1334 1334 # Par Msg Comment
1335 1335 # NN y additional topo root
1336 1336 #
1337 1337 # BN y additional branch root
1338 1338 # CN y additional topo head
1339 1339 # HN n usual case
1340 1340 #
1341 1341 # BB y weird additional branch root
1342 1342 # CB y branch merge
1343 1343 # HB n merge with named branch
1344 1344 #
1345 1345 # CC y additional head from merge
1346 1346 # CH n merge with a head
1347 1347 #
1348 1348 # HH n head merge: head count decreases
1349 1349
1350 1350 if not opts.get('close_branch'):
1351 1351 for r in parents:
1352 1352 if r.extra().get('close') and r.branch() == branch:
1353 1353 ui.status(_('reopening closed branch head %d\n') % r)
1354 1354
1355 1355 if ui.debugflag:
1356 1356 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1357 1357 elif ui.verbose:
1358 1358 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1359 1359
1360 1360 @command('copy|cp',
1361 1361 [('A', 'after', None, _('record a copy that has already occurred')),
1362 1362 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1363 1363 ] + walkopts + dryrunopts,
1364 1364 _('[OPTION]... [SOURCE]... DEST'))
1365 1365 def copy(ui, repo, *pats, **opts):
1366 1366 """mark files as copied for the next commit
1367 1367
1368 1368 Mark dest as having copies of source files. If dest is a
1369 1369 directory, copies are put in that directory. If dest is a file,
1370 1370 the source must be a single file.
1371 1371
1372 1372 By default, this command copies the contents of files as they
1373 1373 exist in the working directory. If invoked with -A/--after, the
1374 1374 operation is recorded, but no copying is performed.
1375 1375
1376 1376 This command takes effect with the next commit. To undo a copy
1377 1377 before that, see :hg:`revert`.
1378 1378
1379 1379 Returns 0 on success, 1 if errors are encountered.
1380 1380 """
1381 1381 wlock = repo.wlock(False)
1382 1382 try:
1383 1383 return cmdutil.copy(ui, repo, pats, opts)
1384 1384 finally:
1385 1385 wlock.release()
1386 1386
1387 1387 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1388 1388 def debugancestor(ui, repo, *args):
1389 1389 """find the ancestor revision of two revisions in a given index"""
1390 1390 if len(args) == 3:
1391 1391 index, rev1, rev2 = args
1392 1392 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1393 1393 lookup = r.lookup
1394 1394 elif len(args) == 2:
1395 1395 if not repo:
1396 1396 raise util.Abort(_("there is no Mercurial repository here "
1397 1397 "(.hg not found)"))
1398 1398 rev1, rev2 = args
1399 1399 r = repo.changelog
1400 1400 lookup = repo.lookup
1401 1401 else:
1402 1402 raise util.Abort(_('either two or three arguments required'))
1403 1403 a = r.ancestor(lookup(rev1), lookup(rev2))
1404 1404 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1405 1405
1406 1406 @command('debugbuilddag',
1407 1407 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1408 1408 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1409 1409 ('n', 'new-file', None, _('add new file at each rev'))],
1410 1410 _('[OPTION]... [TEXT]'))
1411 1411 def debugbuilddag(ui, repo, text=None,
1412 1412 mergeable_file=False,
1413 1413 overwritten_file=False,
1414 1414 new_file=False):
1415 1415 """builds a repo with a given DAG from scratch in the current empty repo
1416 1416
1417 1417 The description of the DAG is read from stdin if not given on the
1418 1418 command line.
1419 1419
1420 1420 Elements:
1421 1421
1422 1422 - "+n" is a linear run of n nodes based on the current default parent
1423 1423 - "." is a single node based on the current default parent
1424 1424 - "$" resets the default parent to null (implied at the start);
1425 1425 otherwise the default parent is always the last node created
1426 1426 - "<p" sets the default parent to the backref p
1427 1427 - "*p" is a fork at parent p, which is a backref
1428 1428 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1429 1429 - "/p2" is a merge of the preceding node and p2
1430 1430 - ":tag" defines a local tag for the preceding node
1431 1431 - "@branch" sets the named branch for subsequent nodes
1432 1432 - "#...\\n" is a comment up to the end of the line
1433 1433
1434 1434 Whitespace between the above elements is ignored.
1435 1435
1436 1436 A backref is either
1437 1437
1438 1438 - a number n, which references the node curr-n, where curr is the current
1439 1439 node, or
1440 1440 - the name of a local tag you placed earlier using ":tag", or
1441 1441 - empty to denote the default parent.
1442 1442
1443 1443 All string valued-elements are either strictly alphanumeric, or must
1444 1444 be enclosed in double quotes ("..."), with "\\" as escape character.
1445 1445 """
1446 1446
1447 1447 if text is None:
1448 1448 ui.status(_("reading DAG from stdin\n"))
1449 1449 text = ui.fin.read()
1450 1450
1451 1451 cl = repo.changelog
1452 1452 if len(cl) > 0:
1453 1453 raise util.Abort(_('repository is not empty'))
1454 1454
1455 1455 # determine number of revs in DAG
1456 1456 total = 0
1457 1457 for type, data in dagparser.parsedag(text):
1458 1458 if type == 'n':
1459 1459 total += 1
1460 1460
1461 1461 if mergeable_file:
1462 1462 linesperrev = 2
1463 1463 # make a file with k lines per rev
1464 1464 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1465 1465 initialmergedlines.append("")
1466 1466
1467 1467 tags = []
1468 1468
1469 1469 lock = tr = None
1470 1470 try:
1471 1471 lock = repo.lock()
1472 1472 tr = repo.transaction("builddag")
1473 1473
1474 1474 at = -1
1475 1475 atbranch = 'default'
1476 1476 nodeids = []
1477 1477 id = 0
1478 1478 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1479 1479 for type, data in dagparser.parsedag(text):
1480 1480 if type == 'n':
1481 1481 ui.note('node %s\n' % str(data))
1482 1482 id, ps = data
1483 1483
1484 1484 files = []
1485 1485 fctxs = {}
1486 1486
1487 1487 p2 = None
1488 1488 if mergeable_file:
1489 1489 fn = "mf"
1490 1490 p1 = repo[ps[0]]
1491 1491 if len(ps) > 1:
1492 1492 p2 = repo[ps[1]]
1493 1493 pa = p1.ancestor(p2)
1494 1494 base, local, other = [x[fn].data() for x in pa, p1, p2]
1495 1495 m3 = simplemerge.Merge3Text(base, local, other)
1496 1496 ml = [l.strip() for l in m3.merge_lines()]
1497 1497 ml.append("")
1498 1498 elif at > 0:
1499 1499 ml = p1[fn].data().split("\n")
1500 1500 else:
1501 1501 ml = initialmergedlines
1502 1502 ml[id * linesperrev] += " r%i" % id
1503 1503 mergedtext = "\n".join(ml)
1504 1504 files.append(fn)
1505 1505 fctxs[fn] = context.memfilectx(fn, mergedtext)
1506 1506
1507 1507 if overwritten_file:
1508 1508 fn = "of"
1509 1509 files.append(fn)
1510 1510 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1511 1511
1512 1512 if new_file:
1513 1513 fn = "nf%i" % id
1514 1514 files.append(fn)
1515 1515 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1516 1516 if len(ps) > 1:
1517 1517 if not p2:
1518 1518 p2 = repo[ps[1]]
1519 1519 for fn in p2:
1520 1520 if fn.startswith("nf"):
1521 1521 files.append(fn)
1522 1522 fctxs[fn] = p2[fn]
1523 1523
1524 1524 def fctxfn(repo, cx, path):
1525 1525 return fctxs.get(path)
1526 1526
1527 1527 if len(ps) == 0 or ps[0] < 0:
1528 1528 pars = [None, None]
1529 1529 elif len(ps) == 1:
1530 1530 pars = [nodeids[ps[0]], None]
1531 1531 else:
1532 1532 pars = [nodeids[p] for p in ps]
1533 1533 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1534 1534 date=(id, 0),
1535 1535 user="debugbuilddag",
1536 1536 extra={'branch': atbranch})
1537 1537 nodeid = repo.commitctx(cx)
1538 1538 nodeids.append(nodeid)
1539 1539 at = id
1540 1540 elif type == 'l':
1541 1541 id, name = data
1542 1542 ui.note('tag %s\n' % name)
1543 1543 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1544 1544 elif type == 'a':
1545 1545 ui.note('branch %s\n' % data)
1546 1546 atbranch = data
1547 1547 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1548 1548 tr.close()
1549 1549
1550 1550 if tags:
1551 1551 repo.opener.write("localtags", "".join(tags))
1552 1552 finally:
1553 1553 ui.progress(_('building'), None)
1554 1554 release(tr, lock)
1555 1555
1556 1556 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1557 1557 def debugbundle(ui, bundlepath, all=None, **opts):
1558 1558 """lists the contents of a bundle"""
1559 1559 f = url.open(ui, bundlepath)
1560 1560 try:
1561 1561 gen = changegroup.readbundle(f, bundlepath)
1562 1562 if all:
1563 1563 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1564 1564
1565 1565 def showchunks(named):
1566 1566 ui.write("\n%s\n" % named)
1567 1567 chain = None
1568 1568 while True:
1569 1569 chunkdata = gen.deltachunk(chain)
1570 1570 if not chunkdata:
1571 1571 break
1572 1572 node = chunkdata['node']
1573 1573 p1 = chunkdata['p1']
1574 1574 p2 = chunkdata['p2']
1575 1575 cs = chunkdata['cs']
1576 1576 deltabase = chunkdata['deltabase']
1577 1577 delta = chunkdata['delta']
1578 1578 ui.write("%s %s %s %s %s %s\n" %
1579 1579 (hex(node), hex(p1), hex(p2),
1580 1580 hex(cs), hex(deltabase), len(delta)))
1581 1581 chain = node
1582 1582
1583 1583 chunkdata = gen.changelogheader()
1584 1584 showchunks("changelog")
1585 1585 chunkdata = gen.manifestheader()
1586 1586 showchunks("manifest")
1587 1587 while True:
1588 1588 chunkdata = gen.filelogheader()
1589 1589 if not chunkdata:
1590 1590 break
1591 1591 fname = chunkdata['filename']
1592 1592 showchunks(fname)
1593 1593 else:
1594 1594 chunkdata = gen.changelogheader()
1595 1595 chain = None
1596 1596 while True:
1597 1597 chunkdata = gen.deltachunk(chain)
1598 1598 if not chunkdata:
1599 1599 break
1600 1600 node = chunkdata['node']
1601 1601 ui.write("%s\n" % hex(node))
1602 1602 chain = node
1603 1603 finally:
1604 1604 f.close()
1605 1605
1606 1606 @command('debugcheckstate', [], '')
1607 1607 def debugcheckstate(ui, repo):
1608 1608 """validate the correctness of the current dirstate"""
1609 1609 parent1, parent2 = repo.dirstate.parents()
1610 1610 m1 = repo[parent1].manifest()
1611 1611 m2 = repo[parent2].manifest()
1612 1612 errors = 0
1613 1613 for f in repo.dirstate:
1614 1614 state = repo.dirstate[f]
1615 1615 if state in "nr" and f not in m1:
1616 1616 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1617 1617 errors += 1
1618 1618 if state in "a" and f in m1:
1619 1619 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1620 1620 errors += 1
1621 1621 if state in "m" and f not in m1 and f not in m2:
1622 1622 ui.warn(_("%s in state %s, but not in either manifest\n") %
1623 1623 (f, state))
1624 1624 errors += 1
1625 1625 for f in m1:
1626 1626 state = repo.dirstate[f]
1627 1627 if state not in "nrm":
1628 1628 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1629 1629 errors += 1
1630 1630 if errors:
1631 1631 error = _(".hg/dirstate inconsistent with current parent's manifest")
1632 1632 raise util.Abort(error)
1633 1633
1634 1634 @command('debugcommands', [], _('[COMMAND]'))
1635 1635 def debugcommands(ui, cmd='', *args):
1636 1636 """list all available commands and options"""
1637 1637 for cmd, vals in sorted(table.iteritems()):
1638 1638 cmd = cmd.split('|')[0].strip('^')
1639 1639 opts = ', '.join([i[1] for i in vals[1]])
1640 1640 ui.write('%s: %s\n' % (cmd, opts))
1641 1641
1642 1642 @command('debugcomplete',
1643 1643 [('o', 'options', None, _('show the command options'))],
1644 1644 _('[-o] CMD'))
1645 1645 def debugcomplete(ui, cmd='', **opts):
1646 1646 """returns the completion list associated with the given command"""
1647 1647
1648 1648 if opts.get('options'):
1649 1649 options = []
1650 1650 otables = [globalopts]
1651 1651 if cmd:
1652 1652 aliases, entry = cmdutil.findcmd(cmd, table, False)
1653 1653 otables.append(entry[1])
1654 1654 for t in otables:
1655 1655 for o in t:
1656 1656 if "(DEPRECATED)" in o[3]:
1657 1657 continue
1658 1658 if o[0]:
1659 1659 options.append('-%s' % o[0])
1660 1660 options.append('--%s' % o[1])
1661 1661 ui.write("%s\n" % "\n".join(options))
1662 1662 return
1663 1663
1664 1664 cmdlist = cmdutil.findpossible(cmd, table)
1665 1665 if ui.verbose:
1666 1666 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1667 1667 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1668 1668
1669 1669 @command('debugdag',
1670 1670 [('t', 'tags', None, _('use tags as labels')),
1671 1671 ('b', 'branches', None, _('annotate with branch names')),
1672 1672 ('', 'dots', None, _('use dots for runs')),
1673 1673 ('s', 'spaces', None, _('separate elements by spaces'))],
1674 1674 _('[OPTION]... [FILE [REV]...]'))
1675 1675 def debugdag(ui, repo, file_=None, *revs, **opts):
1676 1676 """format the changelog or an index DAG as a concise textual description
1677 1677
1678 1678 If you pass a revlog index, the revlog's DAG is emitted. If you list
1679 1679 revision numbers, they get labelled in the output as rN.
1680 1680
1681 1681 Otherwise, the changelog DAG of the current repo is emitted.
1682 1682 """
1683 1683 spaces = opts.get('spaces')
1684 1684 dots = opts.get('dots')
1685 1685 if file_:
1686 1686 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1687 1687 revs = set((int(r) for r in revs))
1688 1688 def events():
1689 1689 for r in rlog:
1690 1690 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1691 1691 if p != -1)))
1692 1692 if r in revs:
1693 1693 yield 'l', (r, "r%i" % r)
1694 1694 elif repo:
1695 1695 cl = repo.changelog
1696 1696 tags = opts.get('tags')
1697 1697 branches = opts.get('branches')
1698 1698 if tags:
1699 1699 labels = {}
1700 1700 for l, n in repo.tags().items():
1701 1701 labels.setdefault(cl.rev(n), []).append(l)
1702 1702 def events():
1703 1703 b = "default"
1704 1704 for r in cl:
1705 1705 if branches:
1706 1706 newb = cl.read(cl.node(r))[5]['branch']
1707 1707 if newb != b:
1708 1708 yield 'a', newb
1709 1709 b = newb
1710 1710 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1711 1711 if p != -1)))
1712 1712 if tags:
1713 1713 ls = labels.get(r)
1714 1714 if ls:
1715 1715 for l in ls:
1716 1716 yield 'l', (r, l)
1717 1717 else:
1718 1718 raise util.Abort(_('need repo for changelog dag'))
1719 1719
1720 1720 for line in dagparser.dagtextlines(events(),
1721 1721 addspaces=spaces,
1722 1722 wraplabels=True,
1723 1723 wrapannotations=True,
1724 1724 wrapnonlinear=dots,
1725 1725 usedots=dots,
1726 1726 maxlinewidth=70):
1727 1727 ui.write(line)
1728 1728 ui.write("\n")
1729 1729
1730 1730 @command('debugdata',
1731 1731 [('c', 'changelog', False, _('open changelog')),
1732 1732 ('m', 'manifest', False, _('open manifest'))],
1733 1733 _('-c|-m|FILE REV'))
1734 1734 def debugdata(ui, repo, file_, rev = None, **opts):
1735 1735 """dump the contents of a data file revision"""
1736 1736 if opts.get('changelog') or opts.get('manifest'):
1737 1737 file_, rev = None, file_
1738 1738 elif rev is None:
1739 1739 raise error.CommandError('debugdata', _('invalid arguments'))
1740 1740 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1741 1741 try:
1742 1742 ui.write(r.revision(r.lookup(rev)))
1743 1743 except KeyError:
1744 1744 raise util.Abort(_('invalid revision identifier %s') % rev)
1745 1745
1746 1746 @command('debugdate',
1747 1747 [('e', 'extended', None, _('try extended date formats'))],
1748 1748 _('[-e] DATE [RANGE]'))
1749 1749 def debugdate(ui, date, range=None, **opts):
1750 1750 """parse and display a date"""
1751 1751 if opts["extended"]:
1752 1752 d = util.parsedate(date, util.extendeddateformats)
1753 1753 else:
1754 1754 d = util.parsedate(date)
1755 1755 ui.write("internal: %s %s\n" % d)
1756 1756 ui.write("standard: %s\n" % util.datestr(d))
1757 1757 if range:
1758 1758 m = util.matchdate(range)
1759 1759 ui.write("match: %s\n" % m(d[0]))
1760 1760
1761 1761 @command('debugdiscovery',
1762 1762 [('', 'old', None, _('use old-style discovery')),
1763 1763 ('', 'nonheads', None,
1764 1764 _('use old-style discovery with non-heads included')),
1765 1765 ] + remoteopts,
1766 1766 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1767 1767 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1768 1768 """runs the changeset discovery protocol in isolation"""
1769 1769 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1770 1770 opts.get('branch'))
1771 1771 remote = hg.peer(repo, opts, remoteurl)
1772 1772 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1773 1773
1774 1774 # make sure tests are repeatable
1775 1775 random.seed(12323)
1776 1776
1777 1777 def doit(localheads, remoteheads):
1778 1778 if opts.get('old'):
1779 1779 if localheads:
1780 1780 raise util.Abort('cannot use localheads with old style '
1781 1781 'discovery')
1782 1782 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1783 1783 force=True)
1784 1784 common = set(common)
1785 1785 if not opts.get('nonheads'):
1786 1786 ui.write("unpruned common: %s\n" % " ".join([short(n)
1787 1787 for n in common]))
1788 1788 dag = dagutil.revlogdag(repo.changelog)
1789 1789 all = dag.ancestorset(dag.internalizeall(common))
1790 1790 common = dag.externalizeall(dag.headsetofconnecteds(all))
1791 1791 else:
1792 1792 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1793 1793 common = set(common)
1794 1794 rheads = set(hds)
1795 1795 lheads = set(repo.heads())
1796 1796 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1797 1797 if lheads <= common:
1798 1798 ui.write("local is subset\n")
1799 1799 elif rheads <= common:
1800 1800 ui.write("remote is subset\n")
1801 1801
1802 1802 serverlogs = opts.get('serverlog')
1803 1803 if serverlogs:
1804 1804 for filename in serverlogs:
1805 1805 logfile = open(filename, 'r')
1806 1806 try:
1807 1807 line = logfile.readline()
1808 1808 while line:
1809 1809 parts = line.strip().split(';')
1810 1810 op = parts[1]
1811 1811 if op == 'cg':
1812 1812 pass
1813 1813 elif op == 'cgss':
1814 1814 doit(parts[2].split(' '), parts[3].split(' '))
1815 1815 elif op == 'unb':
1816 1816 doit(parts[3].split(' '), parts[2].split(' '))
1817 1817 line = logfile.readline()
1818 1818 finally:
1819 1819 logfile.close()
1820 1820
1821 1821 else:
1822 1822 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1823 1823 opts.get('remote_head'))
1824 1824 localrevs = opts.get('local_head')
1825 1825 doit(localrevs, remoterevs)
1826 1826
1827 1827 @command('debugfileset', [], ('REVSPEC'))
1828 1828 def debugfileset(ui, repo, expr):
1829 1829 '''parse and apply a fileset specification'''
1830 1830 if ui.verbose:
1831 1831 tree = fileset.parse(expr)[0]
1832 1832 ui.note(tree, "\n")
1833 1833
1834 1834 for f in fileset.getfileset(repo[None], expr):
1835 1835 ui.write("%s\n" % f)
1836 1836
1837 1837 @command('debugfsinfo', [], _('[PATH]'))
1838 1838 def debugfsinfo(ui, path = "."):
1839 1839 """show information detected about current filesystem"""
1840 1840 util.writefile('.debugfsinfo', '')
1841 1841 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1842 1842 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1843 1843 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1844 1844 and 'yes' or 'no'))
1845 1845 os.unlink('.debugfsinfo')
1846 1846
1847 1847 @command('debuggetbundle',
1848 1848 [('H', 'head', [], _('id of head node'), _('ID')),
1849 1849 ('C', 'common', [], _('id of common node'), _('ID')),
1850 1850 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1851 1851 _('REPO FILE [-H|-C ID]...'))
1852 1852 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1853 1853 """retrieves a bundle from a repo
1854 1854
1855 1855 Every ID must be a full-length hex node id string. Saves the bundle to the
1856 1856 given file.
1857 1857 """
1858 1858 repo = hg.peer(ui, opts, repopath)
1859 1859 if not repo.capable('getbundle'):
1860 1860 raise util.Abort("getbundle() not supported by target repository")
1861 1861 args = {}
1862 1862 if common:
1863 1863 args['common'] = [bin(s) for s in common]
1864 1864 if head:
1865 1865 args['heads'] = [bin(s) for s in head]
1866 1866 bundle = repo.getbundle('debug', **args)
1867 1867
1868 1868 bundletype = opts.get('type', 'bzip2').lower()
1869 1869 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1870 1870 bundletype = btypes.get(bundletype)
1871 1871 if bundletype not in changegroup.bundletypes:
1872 1872 raise util.Abort(_('unknown bundle type specified with --type'))
1873 1873 changegroup.writebundle(bundle, bundlepath, bundletype)
1874 1874
1875 1875 @command('debugignore', [], '')
1876 1876 def debugignore(ui, repo, *values, **opts):
1877 1877 """display the combined ignore pattern"""
1878 1878 ignore = repo.dirstate._ignore
1879 1879 includepat = getattr(ignore, 'includepat', None)
1880 1880 if includepat is not None:
1881 1881 ui.write("%s\n" % includepat)
1882 1882 else:
1883 1883 raise util.Abort(_("no ignore patterns found"))
1884 1884
1885 1885 @command('debugindex',
1886 1886 [('c', 'changelog', False, _('open changelog')),
1887 1887 ('m', 'manifest', False, _('open manifest')),
1888 1888 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1889 1889 _('[-f FORMAT] -c|-m|FILE'))
1890 1890 def debugindex(ui, repo, file_ = None, **opts):
1891 1891 """dump the contents of an index file"""
1892 1892 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1893 1893 format = opts.get('format', 0)
1894 1894 if format not in (0, 1):
1895 1895 raise util.Abort(_("unknown format %d") % format)
1896 1896
1897 1897 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1898 1898 if generaldelta:
1899 1899 basehdr = ' delta'
1900 1900 else:
1901 1901 basehdr = ' base'
1902 1902
1903 1903 if format == 0:
1904 1904 ui.write(" rev offset length " + basehdr + " linkrev"
1905 1905 " nodeid p1 p2\n")
1906 1906 elif format == 1:
1907 1907 ui.write(" rev flag offset length"
1908 1908 " size " + basehdr + " link p1 p2"
1909 1909 " nodeid\n")
1910 1910
1911 1911 for i in r:
1912 1912 node = r.node(i)
1913 1913 if generaldelta:
1914 1914 base = r.deltaparent(i)
1915 1915 else:
1916 1916 base = r.chainbase(i)
1917 1917 if format == 0:
1918 1918 try:
1919 1919 pp = r.parents(node)
1920 1920 except Exception:
1921 1921 pp = [nullid, nullid]
1922 1922 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1923 1923 i, r.start(i), r.length(i), base, r.linkrev(i),
1924 1924 short(node), short(pp[0]), short(pp[1])))
1925 1925 elif format == 1:
1926 1926 pr = r.parentrevs(i)
1927 1927 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1928 1928 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1929 1929 base, r.linkrev(i), pr[0], pr[1], short(node)))
1930 1930
1931 1931 @command('debugindexdot', [], _('FILE'))
1932 1932 def debugindexdot(ui, repo, file_):
1933 1933 """dump an index DAG as a graphviz dot file"""
1934 1934 r = None
1935 1935 if repo:
1936 1936 filelog = repo.file(file_)
1937 1937 if len(filelog):
1938 1938 r = filelog
1939 1939 if not r:
1940 1940 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1941 1941 ui.write("digraph G {\n")
1942 1942 for i in r:
1943 1943 node = r.node(i)
1944 1944 pp = r.parents(node)
1945 1945 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1946 1946 if pp[1] != nullid:
1947 1947 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1948 1948 ui.write("}\n")
1949 1949
1950 1950 @command('debuginstall', [], '')
1951 1951 def debuginstall(ui):
1952 1952 '''test Mercurial installation
1953 1953
1954 1954 Returns 0 on success.
1955 1955 '''
1956 1956
1957 1957 def writetemp(contents):
1958 1958 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1959 1959 f = os.fdopen(fd, "wb")
1960 1960 f.write(contents)
1961 1961 f.close()
1962 1962 return name
1963 1963
1964 1964 problems = 0
1965 1965
1966 1966 # encoding
1967 1967 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1968 1968 try:
1969 1969 encoding.fromlocal("test")
1970 1970 except util.Abort, inst:
1971 1971 ui.write(" %s\n" % inst)
1972 1972 ui.write(_(" (check that your locale is properly set)\n"))
1973 1973 problems += 1
1974 1974
1975 1975 # compiled modules
1976 1976 ui.status(_("Checking installed modules (%s)...\n")
1977 1977 % os.path.dirname(__file__))
1978 1978 try:
1979 1979 import bdiff, mpatch, base85, osutil
1980 1980 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1981 1981 except Exception, inst:
1982 1982 ui.write(" %s\n" % inst)
1983 1983 ui.write(_(" One or more extensions could not be found"))
1984 1984 ui.write(_(" (check that you compiled the extensions)\n"))
1985 1985 problems += 1
1986 1986
1987 1987 # templates
1988 1988 import templater
1989 1989 p = templater.templatepath()
1990 1990 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1991 1991 try:
1992 1992 templater.templater(templater.templatepath("map-cmdline.default"))
1993 1993 except Exception, inst:
1994 1994 ui.write(" %s\n" % inst)
1995 1995 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1996 1996 problems += 1
1997 1997
1998 1998 # editor
1999 1999 ui.status(_("Checking commit editor...\n"))
2000 2000 editor = ui.geteditor()
2001 2001 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2002 2002 if not cmdpath:
2003 2003 if editor == 'vi':
2004 2004 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2005 2005 ui.write(_(" (specify a commit editor in your configuration"
2006 2006 " file)\n"))
2007 2007 else:
2008 2008 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2009 2009 ui.write(_(" (specify a commit editor in your configuration"
2010 2010 " file)\n"))
2011 2011 problems += 1
2012 2012
2013 2013 # check username
2014 2014 ui.status(_("Checking username...\n"))
2015 2015 try:
2016 2016 ui.username()
2017 2017 except util.Abort, e:
2018 2018 ui.write(" %s\n" % e)
2019 2019 ui.write(_(" (specify a username in your configuration file)\n"))
2020 2020 problems += 1
2021 2021
2022 2022 if not problems:
2023 2023 ui.status(_("No problems detected\n"))
2024 2024 else:
2025 2025 ui.write(_("%s problems detected,"
2026 2026 " please check your install!\n") % problems)
2027 2027
2028 2028 return problems
2029 2029
2030 2030 @command('debugknown', [], _('REPO ID...'))
2031 2031 def debugknown(ui, repopath, *ids, **opts):
2032 2032 """test whether node ids are known to a repo
2033 2033
2034 2034 Every ID must be a full-length hex node id string. Returns a list of 0s
2035 2035 and 1s indicating unknown/known.
2036 2036 """
2037 2037 repo = hg.peer(ui, opts, repopath)
2038 2038 if not repo.capable('known'):
2039 2039 raise util.Abort("known() not supported by target repository")
2040 2040 flags = repo.known([bin(s) for s in ids])
2041 2041 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2042 2042
2043 2043 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2044 2044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2045 2045 '''access the pushkey key/value protocol
2046 2046
2047 2047 With two args, list the keys in the given namespace.
2048 2048
2049 2049 With five args, set a key to new if it currently is set to old.
2050 2050 Reports success or failure.
2051 2051 '''
2052 2052
2053 2053 target = hg.peer(ui, {}, repopath)
2054 2054 if keyinfo:
2055 2055 key, old, new = keyinfo
2056 2056 r = target.pushkey(namespace, key, old, new)
2057 2057 ui.status(str(r) + '\n')
2058 2058 return not r
2059 2059 else:
2060 2060 for k, v in target.listkeys(namespace).iteritems():
2061 2061 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2062 2062 v.encode('string-escape')))
2063 2063
2064 2064 @command('debugpvec', [], _('A B'))
2065 2065 def debugpvec(ui, repo, a, b=None):
2066 2066 ca = scmutil.revsingle(repo, a)
2067 2067 cb = scmutil.revsingle(repo, b)
2068 2068 pa = pvec.ctxpvec(ca)
2069 2069 pb = pvec.ctxpvec(cb)
2070 2070 if pa == pb:
2071 2071 rel = "="
2072 2072 elif pa > pb:
2073 2073 rel = ">"
2074 2074 elif pa < pb:
2075 2075 rel = "<"
2076 2076 elif pa | pb:
2077 2077 rel = "|"
2078 2078 ui.write(_("a: %s\n") % pa)
2079 2079 ui.write(_("b: %s\n") % pb)
2080 2080 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2081 2081 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2082 2082 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2083 2083 pa.distance(pb), rel))
2084 2084
2085 2085 @command('debugrebuildstate',
2086 2086 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2087 2087 _('[-r REV] [REV]'))
2088 2088 def debugrebuildstate(ui, repo, rev="tip"):
2089 2089 """rebuild the dirstate as it would look like for the given revision"""
2090 2090 ctx = scmutil.revsingle(repo, rev)
2091 2091 wlock = repo.wlock()
2092 2092 try:
2093 2093 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2094 2094 finally:
2095 2095 wlock.release()
2096 2096
2097 2097 @command('debugrename',
2098 2098 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2099 2099 _('[-r REV] FILE'))
2100 2100 def debugrename(ui, repo, file1, *pats, **opts):
2101 2101 """dump rename information"""
2102 2102
2103 2103 ctx = scmutil.revsingle(repo, opts.get('rev'))
2104 2104 m = scmutil.match(ctx, (file1,) + pats, opts)
2105 2105 for abs in ctx.walk(m):
2106 2106 fctx = ctx[abs]
2107 2107 o = fctx.filelog().renamed(fctx.filenode())
2108 2108 rel = m.rel(abs)
2109 2109 if o:
2110 2110 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2111 2111 else:
2112 2112 ui.write(_("%s not renamed\n") % rel)
2113 2113
2114 2114 @command('debugrevlog',
2115 2115 [('c', 'changelog', False, _('open changelog')),
2116 2116 ('m', 'manifest', False, _('open manifest')),
2117 2117 ('d', 'dump', False, _('dump index data'))],
2118 2118 _('-c|-m|FILE'))
2119 2119 def debugrevlog(ui, repo, file_ = None, **opts):
2120 2120 """show data and statistics about a revlog"""
2121 2121 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2122 2122
2123 2123 if opts.get("dump"):
2124 2124 numrevs = len(r)
2125 2125 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2126 2126 " rawsize totalsize compression heads\n")
2127 2127 ts = 0
2128 2128 heads = set()
2129 2129 for rev in xrange(numrevs):
2130 2130 dbase = r.deltaparent(rev)
2131 2131 if dbase == -1:
2132 2132 dbase = rev
2133 2133 cbase = r.chainbase(rev)
2134 2134 p1, p2 = r.parentrevs(rev)
2135 2135 rs = r.rawsize(rev)
2136 2136 ts = ts + rs
2137 2137 heads -= set(r.parentrevs(rev))
2138 2138 heads.add(rev)
2139 2139 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2140 2140 (rev, p1, p2, r.start(rev), r.end(rev),
2141 2141 r.start(dbase), r.start(cbase),
2142 2142 r.start(p1), r.start(p2),
2143 2143 rs, ts, ts / r.end(rev), len(heads)))
2144 2144 return 0
2145 2145
2146 2146 v = r.version
2147 2147 format = v & 0xFFFF
2148 2148 flags = []
2149 2149 gdelta = False
2150 2150 if v & revlog.REVLOGNGINLINEDATA:
2151 2151 flags.append('inline')
2152 2152 if v & revlog.REVLOGGENERALDELTA:
2153 2153 gdelta = True
2154 2154 flags.append('generaldelta')
2155 2155 if not flags:
2156 2156 flags = ['(none)']
2157 2157
2158 2158 nummerges = 0
2159 2159 numfull = 0
2160 2160 numprev = 0
2161 2161 nump1 = 0
2162 2162 nump2 = 0
2163 2163 numother = 0
2164 2164 nump1prev = 0
2165 2165 nump2prev = 0
2166 2166 chainlengths = []
2167 2167
2168 2168 datasize = [None, 0, 0L]
2169 2169 fullsize = [None, 0, 0L]
2170 2170 deltasize = [None, 0, 0L]
2171 2171
2172 2172 def addsize(size, l):
2173 2173 if l[0] is None or size < l[0]:
2174 2174 l[0] = size
2175 2175 if size > l[1]:
2176 2176 l[1] = size
2177 2177 l[2] += size
2178 2178
2179 2179 numrevs = len(r)
2180 2180 for rev in xrange(numrevs):
2181 2181 p1, p2 = r.parentrevs(rev)
2182 2182 delta = r.deltaparent(rev)
2183 2183 if format > 0:
2184 2184 addsize(r.rawsize(rev), datasize)
2185 2185 if p2 != nullrev:
2186 2186 nummerges += 1
2187 2187 size = r.length(rev)
2188 2188 if delta == nullrev:
2189 2189 chainlengths.append(0)
2190 2190 numfull += 1
2191 2191 addsize(size, fullsize)
2192 2192 else:
2193 2193 chainlengths.append(chainlengths[delta] + 1)
2194 2194 addsize(size, deltasize)
2195 2195 if delta == rev - 1:
2196 2196 numprev += 1
2197 2197 if delta == p1:
2198 2198 nump1prev += 1
2199 2199 elif delta == p2:
2200 2200 nump2prev += 1
2201 2201 elif delta == p1:
2202 2202 nump1 += 1
2203 2203 elif delta == p2:
2204 2204 nump2 += 1
2205 2205 elif delta != nullrev:
2206 2206 numother += 1
2207 2207
2208 2208 numdeltas = numrevs - numfull
2209 2209 numoprev = numprev - nump1prev - nump2prev
2210 2210 totalrawsize = datasize[2]
2211 2211 datasize[2] /= numrevs
2212 2212 fulltotal = fullsize[2]
2213 2213 fullsize[2] /= numfull
2214 2214 deltatotal = deltasize[2]
2215 2215 deltasize[2] /= numrevs - numfull
2216 2216 totalsize = fulltotal + deltatotal
2217 2217 avgchainlen = sum(chainlengths) / numrevs
2218 2218 compratio = totalrawsize / totalsize
2219 2219
2220 2220 basedfmtstr = '%%%dd\n'
2221 2221 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2222 2222
2223 2223 def dfmtstr(max):
2224 2224 return basedfmtstr % len(str(max))
2225 2225 def pcfmtstr(max, padding=0):
2226 2226 return basepcfmtstr % (len(str(max)), ' ' * padding)
2227 2227
2228 2228 def pcfmt(value, total):
2229 2229 return (value, 100 * float(value) / total)
2230 2230
2231 2231 ui.write('format : %d\n' % format)
2232 2232 ui.write('flags : %s\n' % ', '.join(flags))
2233 2233
2234 2234 ui.write('\n')
2235 2235 fmt = pcfmtstr(totalsize)
2236 2236 fmt2 = dfmtstr(totalsize)
2237 2237 ui.write('revisions : ' + fmt2 % numrevs)
2238 2238 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2239 2239 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2240 2240 ui.write('revisions : ' + fmt2 % numrevs)
2241 2241 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2242 2242 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2243 2243 ui.write('revision size : ' + fmt2 % totalsize)
2244 2244 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2245 2245 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2246 2246
2247 2247 ui.write('\n')
2248 2248 fmt = dfmtstr(max(avgchainlen, compratio))
2249 2249 ui.write('avg chain length : ' + fmt % avgchainlen)
2250 2250 ui.write('compression ratio : ' + fmt % compratio)
2251 2251
2252 2252 if format > 0:
2253 2253 ui.write('\n')
2254 2254 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2255 2255 % tuple(datasize))
2256 2256 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2257 2257 % tuple(fullsize))
2258 2258 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2259 2259 % tuple(deltasize))
2260 2260
2261 2261 if numdeltas > 0:
2262 2262 ui.write('\n')
2263 2263 fmt = pcfmtstr(numdeltas)
2264 2264 fmt2 = pcfmtstr(numdeltas, 4)
2265 2265 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2266 2266 if numprev > 0:
2267 2267 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev,
2268 2268 numprev))
2269 2269 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev,
2270 2270 numprev))
2271 2271 ui.write(' other : ' + fmt2 % pcfmt(numoprev,
2272 2272 numprev))
2273 2273 if gdelta:
2274 2274 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2275 2275 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2276 2276 ui.write('deltas against other : ' + fmt % pcfmt(numother,
2277 2277 numdeltas))
2278 2278
2279 2279 @command('debugrevspec', [], ('REVSPEC'))
2280 2280 def debugrevspec(ui, repo, expr):
2281 2281 """parse and apply a revision specification
2282 2282
2283 2283 Use --verbose to print the parsed tree before and after aliases
2284 2284 expansion.
2285 2285 """
2286 2286 if ui.verbose:
2287 2287 tree = revset.parse(expr)[0]
2288 2288 ui.note(revset.prettyformat(tree), "\n")
2289 2289 newtree = revset.findaliases(ui, tree)
2290 2290 if newtree != tree:
2291 2291 ui.note(revset.prettyformat(newtree), "\n")
2292 2292 func = revset.match(ui, expr)
2293 2293 for c in func(repo, range(len(repo))):
2294 2294 ui.write("%s\n" % c)
2295 2295
2296 2296 @command('debugsetparents', [], _('REV1 [REV2]'))
2297 2297 def debugsetparents(ui, repo, rev1, rev2=None):
2298 2298 """manually set the parents of the current working directory
2299 2299
2300 2300 This is useful for writing repository conversion tools, but should
2301 2301 be used with care.
2302 2302
2303 2303 Returns 0 on success.
2304 2304 """
2305 2305
2306 2306 r1 = scmutil.revsingle(repo, rev1).node()
2307 2307 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2308 2308
2309 2309 wlock = repo.wlock()
2310 2310 try:
2311 2311 repo.setparents(r1, r2)
2312 2312 finally:
2313 2313 wlock.release()
2314 2314
2315 2315 @command('debugstate',
2316 2316 [('', 'nodates', None, _('do not display the saved mtime')),
2317 2317 ('', 'datesort', None, _('sort by saved mtime'))],
2318 2318 _('[OPTION]...'))
2319 2319 def debugstate(ui, repo, nodates=None, datesort=None):
2320 2320 """show the contents of the current dirstate"""
2321 2321 timestr = ""
2322 2322 showdate = not nodates
2323 2323 if datesort:
2324 2324 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2325 2325 else:
2326 2326 keyfunc = None # sort by filename
2327 2327 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2328 2328 if showdate:
2329 2329 if ent[3] == -1:
2330 2330 # Pad or slice to locale representation
2331 2331 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2332 2332 time.localtime(0)))
2333 2333 timestr = 'unset'
2334 2334 timestr = (timestr[:locale_len] +
2335 2335 ' ' * (locale_len - len(timestr)))
2336 2336 else:
2337 2337 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2338 2338 time.localtime(ent[3]))
2339 2339 if ent[1] & 020000:
2340 2340 mode = 'lnk'
2341 2341 else:
2342 2342 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2343 2343 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2344 2344 for f in repo.dirstate.copies():
2345 2345 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2346 2346
2347 2347 @command('debugsub',
2348 2348 [('r', 'rev', '',
2349 2349 _('revision to check'), _('REV'))],
2350 2350 _('[-r REV] [REV]'))
2351 2351 def debugsub(ui, repo, rev=None):
2352 2352 ctx = scmutil.revsingle(repo, rev, None)
2353 2353 for k, v in sorted(ctx.substate.items()):
2354 2354 ui.write('path %s\n' % k)
2355 2355 ui.write(' source %s\n' % v[0])
2356 2356 ui.write(' revision %s\n' % v[1])
2357 2357
2358 2358 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2359 2359 def debugwalk(ui, repo, *pats, **opts):
2360 2360 """show how files match on given patterns"""
2361 2361 m = scmutil.match(repo[None], pats, opts)
2362 2362 items = list(repo.walk(m))
2363 2363 if not items:
2364 2364 return
2365 2365 fmt = 'f %%-%ds %%-%ds %%s' % (
2366 2366 max([len(abs) for abs in items]),
2367 2367 max([len(m.rel(abs)) for abs in items]))
2368 2368 for abs in items:
2369 2369 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2370 2370 ui.write("%s\n" % line.rstrip())
2371 2371
2372 2372 @command('debugwireargs',
2373 2373 [('', 'three', '', 'three'),
2374 2374 ('', 'four', '', 'four'),
2375 2375 ('', 'five', '', 'five'),
2376 2376 ] + remoteopts,
2377 2377 _('REPO [OPTIONS]... [ONE [TWO]]'))
2378 2378 def debugwireargs(ui, repopath, *vals, **opts):
2379 2379 repo = hg.peer(ui, opts, repopath)
2380 2380 for opt in remoteopts:
2381 2381 del opts[opt[1]]
2382 2382 args = {}
2383 2383 for k, v in opts.iteritems():
2384 2384 if v:
2385 2385 args[k] = v
2386 2386 # run twice to check that we don't mess up the stream for the next command
2387 2387 res1 = repo.debugwireargs(*vals, **args)
2388 2388 res2 = repo.debugwireargs(*vals, **args)
2389 2389 ui.write("%s\n" % res1)
2390 2390 if res1 != res2:
2391 2391 ui.warn("%s\n" % res2)
2392 2392
2393 2393 @command('^diff',
2394 2394 [('r', 'rev', [], _('revision'), _('REV')),
2395 2395 ('c', 'change', '', _('change made by revision'), _('REV'))
2396 2396 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2397 2397 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2398 2398 def diff(ui, repo, *pats, **opts):
2399 2399 """diff repository (or selected files)
2400 2400
2401 2401 Show differences between revisions for the specified files.
2402 2402
2403 2403 Differences between files are shown using the unified diff format.
2404 2404
2405 2405 .. note::
2406 2406 diff may generate unexpected results for merges, as it will
2407 2407 default to comparing against the working directory's first
2408 2408 parent changeset if no revisions are specified.
2409 2409
2410 2410 When two revision arguments are given, then changes are shown
2411 2411 between those revisions. If only one revision is specified then
2412 2412 that revision is compared to the working directory, and, when no
2413 2413 revisions are specified, the working directory files are compared
2414 2414 to its parent.
2415 2415
2416 2416 Alternatively you can specify -c/--change with a revision to see
2417 2417 the changes in that changeset relative to its first parent.
2418 2418
2419 2419 Without the -a/--text option, diff will avoid generating diffs of
2420 2420 files it detects as binary. With -a, diff will generate a diff
2421 2421 anyway, probably with undesirable results.
2422 2422
2423 2423 Use the -g/--git option to generate diffs in the git extended diff
2424 2424 format. For more information, read :hg:`help diffs`.
2425 2425
2426 2426 .. container:: verbose
2427 2427
2428 2428 Examples:
2429 2429
2430 2430 - compare a file in the current working directory to its parent::
2431 2431
2432 2432 hg diff foo.c
2433 2433
2434 2434 - compare two historical versions of a directory, with rename info::
2435 2435
2436 2436 hg diff --git -r 1.0:1.2 lib/
2437 2437
2438 2438 - get change stats relative to the last change on some date::
2439 2439
2440 2440 hg diff --stat -r "date('may 2')"
2441 2441
2442 2442 - diff all newly-added files that contain a keyword::
2443 2443
2444 2444 hg diff "set:added() and grep(GNU)"
2445 2445
2446 2446 - compare a revision and its parents::
2447 2447
2448 2448 hg diff -c 9353 # compare against first parent
2449 2449 hg diff -r 9353^:9353 # same using revset syntax
2450 2450 hg diff -r 9353^2:9353 # compare against the second parent
2451 2451
2452 2452 Returns 0 on success.
2453 2453 """
2454 2454
2455 2455 revs = opts.get('rev')
2456 2456 change = opts.get('change')
2457 2457 stat = opts.get('stat')
2458 2458 reverse = opts.get('reverse')
2459 2459
2460 2460 if revs and change:
2461 2461 msg = _('cannot specify --rev and --change at the same time')
2462 2462 raise util.Abort(msg)
2463 2463 elif change:
2464 2464 node2 = scmutil.revsingle(repo, change, None).node()
2465 2465 node1 = repo[node2].p1().node()
2466 2466 else:
2467 2467 node1, node2 = scmutil.revpair(repo, revs)
2468 2468
2469 2469 if reverse:
2470 2470 node1, node2 = node2, node1
2471 2471
2472 2472 diffopts = patch.diffopts(ui, opts)
2473 2473 m = scmutil.match(repo[node2], pats, opts)
2474 2474 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2475 2475 listsubrepos=opts.get('subrepos'))
2476 2476
2477 2477 @command('^export',
2478 2478 [('o', 'output', '',
2479 2479 _('print output to file with formatted name'), _('FORMAT')),
2480 2480 ('', 'switch-parent', None, _('diff against the second parent')),
2481 2481 ('r', 'rev', [], _('revisions to export'), _('REV')),
2482 2482 ] + diffopts,
2483 2483 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2484 2484 def export(ui, repo, *changesets, **opts):
2485 2485 """dump the header and diffs for one or more changesets
2486 2486
2487 2487 Print the changeset header and diffs for one or more revisions.
2488 2488
2489 2489 The information shown in the changeset header is: author, date,
2490 2490 branch name (if non-default), changeset hash, parent(s) and commit
2491 2491 comment.
2492 2492
2493 2493 .. note::
2494 2494 export may generate unexpected diff output for merge
2495 2495 changesets, as it will compare the merge changeset against its
2496 2496 first parent only.
2497 2497
2498 2498 Output may be to a file, in which case the name of the file is
2499 2499 given using a format string. The formatting rules are as follows:
2500 2500
2501 2501 :``%%``: literal "%" character
2502 2502 :``%H``: changeset hash (40 hexadecimal digits)
2503 2503 :``%N``: number of patches being generated
2504 2504 :``%R``: changeset revision number
2505 2505 :``%b``: basename of the exporting repository
2506 2506 :``%h``: short-form changeset hash (12 hexadecimal digits)
2507 2507 :``%m``: first line of the commit message (only alphanumeric characters)
2508 2508 :``%n``: zero-padded sequence number, starting at 1
2509 2509 :``%r``: zero-padded changeset revision number
2510 2510
2511 2511 Without the -a/--text option, export will avoid generating diffs
2512 2512 of files it detects as binary. With -a, export will generate a
2513 2513 diff anyway, probably with undesirable results.
2514 2514
2515 2515 Use the -g/--git option to generate diffs in the git extended diff
2516 2516 format. See :hg:`help diffs` for more information.
2517 2517
2518 2518 With the --switch-parent option, the diff will be against the
2519 2519 second parent. It can be useful to review a merge.
2520 2520
2521 2521 .. container:: verbose
2522 2522
2523 2523 Examples:
2524 2524
2525 2525 - use export and import to transplant a bugfix to the current
2526 2526 branch::
2527 2527
2528 2528 hg export -r 9353 | hg import -
2529 2529
2530 2530 - export all the changesets between two revisions to a file with
2531 2531 rename information::
2532 2532
2533 2533 hg export --git -r 123:150 > changes.txt
2534 2534
2535 2535 - split outgoing changes into a series of patches with
2536 2536 descriptive names::
2537 2537
2538 2538 hg export -r "outgoing()" -o "%n-%m.patch"
2539 2539
2540 2540 Returns 0 on success.
2541 2541 """
2542 2542 changesets += tuple(opts.get('rev', []))
2543 2543 revs = scmutil.revrange(repo, changesets)
2544 2544 if not revs:
2545 2545 raise util.Abort(_("export requires at least one changeset"))
2546 2546 if len(revs) > 1:
2547 2547 ui.note(_('exporting patches:\n'))
2548 2548 else:
2549 2549 ui.note(_('exporting patch:\n'))
2550 2550 cmdutil.export(repo, revs, template=opts.get('output'),
2551 2551 switch_parent=opts.get('switch_parent'),
2552 2552 opts=patch.diffopts(ui, opts))
2553 2553
2554 2554 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2555 2555 def forget(ui, repo, *pats, **opts):
2556 2556 """forget the specified files on the next commit
2557 2557
2558 2558 Mark the specified files so they will no longer be tracked
2559 2559 after the next commit.
2560 2560
2561 2561 This only removes files from the current branch, not from the
2562 2562 entire project history, and it does not delete them from the
2563 2563 working directory.
2564 2564
2565 2565 To undo a forget before the next commit, see :hg:`add`.
2566 2566
2567 2567 .. container:: verbose
2568 2568
2569 2569 Examples:
2570 2570
2571 2571 - forget newly-added binary files::
2572 2572
2573 2573 hg forget "set:added() and binary()"
2574 2574
2575 2575 - forget files that would be excluded by .hgignore::
2576 2576
2577 2577 hg forget "set:hgignore()"
2578 2578
2579 2579 Returns 0 on success.
2580 2580 """
2581 2581
2582 2582 if not pats:
2583 2583 raise util.Abort(_('no files specified'))
2584 2584
2585 2585 m = scmutil.match(repo[None], pats, opts)
2586 2586 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2587 2587 return rejected and 1 or 0
2588 2588
2589 2589 @command(
2590 2590 'graft',
2591 2591 [('c', 'continue', False, _('resume interrupted graft')),
2592 2592 ('e', 'edit', False, _('invoke editor on commit messages')),
2593 2593 ('', 'log', None, _('append graft info to log message')),
2594 2594 ('D', 'currentdate', False,
2595 2595 _('record the current date as commit date')),
2596 2596 ('U', 'currentuser', False,
2597 2597 _('record the current user as committer'), _('DATE'))]
2598 2598 + commitopts2 + mergetoolopts + dryrunopts,
2599 2599 _('[OPTION]... REVISION...'))
2600 2600 def graft(ui, repo, *revs, **opts):
2601 2601 '''copy changes from other branches onto the current branch
2602 2602
2603 2603 This command uses Mercurial's merge logic to copy individual
2604 2604 changes from other branches without merging branches in the
2605 2605 history graph. This is sometimes known as 'backporting' or
2606 2606 'cherry-picking'. By default, graft will copy user, date, and
2607 2607 description from the source changesets.
2608 2608
2609 2609 Changesets that are ancestors of the current revision, that have
2610 2610 already been grafted, or that are merges will be skipped.
2611 2611
2612 2612 If --log is specified, log messages will have a comment appended
2613 2613 of the form::
2614 2614
2615 2615 (grafted from CHANGESETHASH)
2616 2616
2617 2617 If a graft merge results in conflicts, the graft process is
2618 2618 interrupted so that the current merge can be manually resolved.
2619 2619 Once all conflicts are addressed, the graft process can be
2620 2620 continued with the -c/--continue option.
2621 2621
2622 2622 .. note::
2623 2623 The -c/--continue option does not reapply earlier options.
2624 2624
2625 2625 .. container:: verbose
2626 2626
2627 2627 Examples:
2628 2628
2629 2629 - copy a single change to the stable branch and edit its description::
2630 2630
2631 2631 hg update stable
2632 2632 hg graft --edit 9393
2633 2633
2634 2634 - graft a range of changesets with one exception, updating dates::
2635 2635
2636 2636 hg graft -D "2085::2093 and not 2091"
2637 2637
2638 2638 - continue a graft after resolving conflicts::
2639 2639
2640 2640 hg graft -c
2641 2641
2642 2642 - show the source of a grafted changeset::
2643 2643
2644 2644 hg log --debug -r tip
2645 2645
2646 2646 Returns 0 on successful completion.
2647 2647 '''
2648 2648
2649 2649 if not opts.get('user') and opts.get('currentuser'):
2650 2650 opts['user'] = ui.username()
2651 2651 if not opts.get('date') and opts.get('currentdate'):
2652 2652 opts['date'] = "%d %d" % util.makedate()
2653 2653
2654 2654 editor = None
2655 2655 if opts.get('edit'):
2656 2656 editor = cmdutil.commitforceeditor
2657 2657
2658 2658 cont = False
2659 2659 if opts['continue']:
2660 2660 cont = True
2661 2661 if revs:
2662 2662 raise util.Abort(_("can't specify --continue and revisions"))
2663 2663 # read in unfinished revisions
2664 2664 try:
2665 2665 nodes = repo.opener.read('graftstate').splitlines()
2666 2666 revs = [repo[node].rev() for node in nodes]
2667 2667 except IOError, inst:
2668 2668 if inst.errno != errno.ENOENT:
2669 2669 raise
2670 2670 raise util.Abort(_("no graft state found, can't continue"))
2671 2671 else:
2672 2672 cmdutil.bailifchanged(repo)
2673 2673 if not revs:
2674 2674 raise util.Abort(_('no revisions specified'))
2675 2675 revs = scmutil.revrange(repo, revs)
2676 2676
2677 2677 # check for merges
2678 2678 for rev in repo.revs('%ld and merge()', revs):
2679 2679 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2680 2680 revs.remove(rev)
2681 2681 if not revs:
2682 2682 return -1
2683 2683
2684 2684 # check for ancestors of dest branch
2685 2685 for rev in repo.revs('::. and %ld', revs):
2686 2686 ui.warn(_('skipping ancestor revision %s\n') % rev)
2687 2687 revs.remove(rev)
2688 2688 if not revs:
2689 2689 return -1
2690 2690
2691 2691 # analyze revs for earlier grafts
2692 2692 ids = {}
2693 2693 for ctx in repo.set("%ld", revs):
2694 2694 ids[ctx.hex()] = ctx.rev()
2695 2695 n = ctx.extra().get('source')
2696 2696 if n:
2697 2697 ids[n] = ctx.rev()
2698 2698
2699 2699 # check ancestors for earlier grafts
2700 2700 ui.debug('scanning for duplicate grafts\n')
2701 2701 for ctx in repo.set("::. - ::%ld", revs):
2702 2702 n = ctx.extra().get('source')
2703 2703 if n in ids:
2704 2704 r = repo[n].rev()
2705 2705 if r in revs:
2706 2706 ui.warn(_('skipping already grafted revision %s\n') % r)
2707 2707 revs.remove(r)
2708 2708 elif ids[n] in revs:
2709 2709 ui.warn(_('skipping already grafted revision %s '
2710 2710 '(same origin %d)\n') % (ids[n], r))
2711 2711 revs.remove(ids[n])
2712 2712 elif ctx.hex() in ids:
2713 2713 r = ids[ctx.hex()]
2714 2714 ui.warn(_('skipping already grafted revision %s '
2715 2715 '(was grafted from %d)\n') % (r, ctx.rev()))
2716 2716 revs.remove(r)
2717 2717 if not revs:
2718 2718 return -1
2719 2719
2720 2720 wlock = repo.wlock()
2721 2721 try:
2722 2722 for pos, ctx in enumerate(repo.set("%ld", revs)):
2723 2723 current = repo['.']
2724 2724
2725 2725 ui.status(_('grafting revision %s\n') % ctx.rev())
2726 2726 if opts.get('dry_run'):
2727 2727 continue
2728 2728
2729 2729 # we don't merge the first commit when continuing
2730 2730 if not cont:
2731 2731 # perform the graft merge with p1(rev) as 'ancestor'
2732 2732 try:
2733 2733 # ui.forcemerge is an internal variable, do not document
2734 2734 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2735 2735 stats = mergemod.update(repo, ctx.node(), True, True, False,
2736 2736 ctx.p1().node())
2737 2737 finally:
2738 2738 ui.setconfig('ui', 'forcemerge', '')
2739 2739 # drop the second merge parent
2740 2740 repo.setparents(current.node(), nullid)
2741 2741 repo.dirstate.write()
2742 2742 # fix up dirstate for copies and renames
2743 2743 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
2744 2744 # report any conflicts
2745 2745 if stats and stats[3] > 0:
2746 2746 # write out state for --continue
2747 2747 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2748 2748 repo.opener.write('graftstate', ''.join(nodelines))
2749 2749 raise util.Abort(
2750 2750 _("unresolved conflicts, can't continue"),
2751 2751 hint=_('use hg resolve and hg graft --continue'))
2752 2752 else:
2753 2753 cont = False
2754 2754
2755 2755 # commit
2756 2756 source = ctx.extra().get('source')
2757 2757 if not source:
2758 2758 source = ctx.hex()
2759 2759 extra = {'source': source}
2760 2760 user = ctx.user()
2761 2761 if opts.get('user'):
2762 2762 user = opts['user']
2763 2763 date = ctx.date()
2764 2764 if opts.get('date'):
2765 2765 date = opts['date']
2766 2766 message = ctx.description()
2767 2767 if opts.get('log'):
2768 2768 message += '\n(grafted from %s)' % ctx.hex()
2769 2769 node = repo.commit(text=message, user=user,
2770 2770 date=date, extra=extra, editor=editor)
2771 2771 if node is None:
2772 2772 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
2773 2773 finally:
2774 2774 wlock.release()
2775 2775
2776 2776 # remove state when we complete successfully
2777 2777 if not opts.get('dry_run') and os.path.exists(repo.join('graftstate')):
2778 2778 util.unlinkpath(repo.join('graftstate'))
2779 2779
2780 2780 return 0
2781 2781
2782 2782 @command('grep',
2783 2783 [('0', 'print0', None, _('end fields with NUL')),
2784 2784 ('', 'all', None, _('print all revisions that match')),
2785 2785 ('a', 'text', None, _('treat all files as text')),
2786 2786 ('f', 'follow', None,
2787 2787 _('follow changeset history,'
2788 2788 ' or file history across copies and renames')),
2789 2789 ('i', 'ignore-case', None, _('ignore case when matching')),
2790 2790 ('l', 'files-with-matches', None,
2791 2791 _('print only filenames and revisions that match')),
2792 2792 ('n', 'line-number', None, _('print matching line numbers')),
2793 2793 ('r', 'rev', [],
2794 2794 _('only search files changed within revision range'), _('REV')),
2795 2795 ('u', 'user', None, _('list the author (long with -v)')),
2796 2796 ('d', 'date', None, _('list the date (short with -q)')),
2797 2797 ] + walkopts,
2798 2798 _('[OPTION]... PATTERN [FILE]...'))
2799 2799 def grep(ui, repo, pattern, *pats, **opts):
2800 2800 """search for a pattern in specified files and revisions
2801 2801
2802 2802 Search revisions of files for a regular expression.
2803 2803
2804 2804 This command behaves differently than Unix grep. It only accepts
2805 2805 Python/Perl regexps. It searches repository history, not the
2806 2806 working directory. It always prints the revision number in which a
2807 2807 match appears.
2808 2808
2809 2809 By default, grep only prints output for the first revision of a
2810 2810 file in which it finds a match. To get it to print every revision
2811 2811 that contains a change in match status ("-" for a match that
2812 2812 becomes a non-match, or "+" for a non-match that becomes a match),
2813 2813 use the --all flag.
2814 2814
2815 2815 Returns 0 if a match is found, 1 otherwise.
2816 2816 """
2817 2817 reflags = re.M
2818 2818 if opts.get('ignore_case'):
2819 2819 reflags |= re.I
2820 2820 try:
2821 2821 regexp = re.compile(pattern, reflags)
2822 2822 except re.error, inst:
2823 2823 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2824 2824 return 1
2825 2825 sep, eol = ':', '\n'
2826 2826 if opts.get('print0'):
2827 2827 sep = eol = '\0'
2828 2828
2829 2829 getfile = util.lrucachefunc(repo.file)
2830 2830
2831 2831 def matchlines(body):
2832 2832 begin = 0
2833 2833 linenum = 0
2834 2834 while True:
2835 2835 match = regexp.search(body, begin)
2836 2836 if not match:
2837 2837 break
2838 2838 mstart, mend = match.span()
2839 2839 linenum += body.count('\n', begin, mstart) + 1
2840 2840 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2841 2841 begin = body.find('\n', mend) + 1 or len(body) + 1
2842 2842 lend = begin - 1
2843 2843 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2844 2844
2845 2845 class linestate(object):
2846 2846 def __init__(self, line, linenum, colstart, colend):
2847 2847 self.line = line
2848 2848 self.linenum = linenum
2849 2849 self.colstart = colstart
2850 2850 self.colend = colend
2851 2851
2852 2852 def __hash__(self):
2853 2853 return hash((self.linenum, self.line))
2854 2854
2855 2855 def __eq__(self, other):
2856 2856 return self.line == other.line
2857 2857
2858 2858 matches = {}
2859 2859 copies = {}
2860 2860 def grepbody(fn, rev, body):
2861 2861 matches[rev].setdefault(fn, [])
2862 2862 m = matches[rev][fn]
2863 2863 for lnum, cstart, cend, line in matchlines(body):
2864 2864 s = linestate(line, lnum, cstart, cend)
2865 2865 m.append(s)
2866 2866
2867 2867 def difflinestates(a, b):
2868 2868 sm = difflib.SequenceMatcher(None, a, b)
2869 2869 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2870 2870 if tag == 'insert':
2871 2871 for i in xrange(blo, bhi):
2872 2872 yield ('+', b[i])
2873 2873 elif tag == 'delete':
2874 2874 for i in xrange(alo, ahi):
2875 2875 yield ('-', a[i])
2876 2876 elif tag == 'replace':
2877 2877 for i in xrange(alo, ahi):
2878 2878 yield ('-', a[i])
2879 2879 for i in xrange(blo, bhi):
2880 2880 yield ('+', b[i])
2881 2881
2882 2882 def display(fn, ctx, pstates, states):
2883 2883 rev = ctx.rev()
2884 2884 datefunc = ui.quiet and util.shortdate or util.datestr
2885 2885 found = False
2886 2886 filerevmatches = {}
2887 2887 def binary():
2888 2888 flog = getfile(fn)
2889 2889 return util.binary(flog.read(ctx.filenode(fn)))
2890 2890
2891 2891 if opts.get('all'):
2892 2892 iter = difflinestates(pstates, states)
2893 2893 else:
2894 2894 iter = [('', l) for l in states]
2895 2895 for change, l in iter:
2896 2896 cols = [fn, str(rev)]
2897 2897 before, match, after = None, None, None
2898 2898 if opts.get('line_number'):
2899 2899 cols.append(str(l.linenum))
2900 2900 if opts.get('all'):
2901 2901 cols.append(change)
2902 2902 if opts.get('user'):
2903 2903 cols.append(ui.shortuser(ctx.user()))
2904 2904 if opts.get('date'):
2905 2905 cols.append(datefunc(ctx.date()))
2906 2906 if opts.get('files_with_matches'):
2907 2907 c = (fn, rev)
2908 2908 if c in filerevmatches:
2909 2909 continue
2910 2910 filerevmatches[c] = 1
2911 2911 else:
2912 2912 before = l.line[:l.colstart]
2913 2913 match = l.line[l.colstart:l.colend]
2914 2914 after = l.line[l.colend:]
2915 2915 ui.write(sep.join(cols))
2916 2916 if before is not None:
2917 2917 if not opts.get('text') and binary():
2918 2918 ui.write(sep + " Binary file matches")
2919 2919 else:
2920 2920 ui.write(sep + before)
2921 2921 ui.write(match, label='grep.match')
2922 2922 ui.write(after)
2923 2923 ui.write(eol)
2924 2924 found = True
2925 2925 return found
2926 2926
2927 2927 skip = {}
2928 2928 revfiles = {}
2929 2929 matchfn = scmutil.match(repo[None], pats, opts)
2930 2930 found = False
2931 2931 follow = opts.get('follow')
2932 2932
2933 2933 def prep(ctx, fns):
2934 2934 rev = ctx.rev()
2935 2935 pctx = ctx.p1()
2936 2936 parent = pctx.rev()
2937 2937 matches.setdefault(rev, {})
2938 2938 matches.setdefault(parent, {})
2939 2939 files = revfiles.setdefault(rev, [])
2940 2940 for fn in fns:
2941 2941 flog = getfile(fn)
2942 2942 try:
2943 2943 fnode = ctx.filenode(fn)
2944 2944 except error.LookupError:
2945 2945 continue
2946 2946
2947 2947 copied = flog.renamed(fnode)
2948 2948 copy = follow and copied and copied[0]
2949 2949 if copy:
2950 2950 copies.setdefault(rev, {})[fn] = copy
2951 2951 if fn in skip:
2952 2952 if copy:
2953 2953 skip[copy] = True
2954 2954 continue
2955 2955 files.append(fn)
2956 2956
2957 2957 if fn not in matches[rev]:
2958 2958 grepbody(fn, rev, flog.read(fnode))
2959 2959
2960 2960 pfn = copy or fn
2961 2961 if pfn not in matches[parent]:
2962 2962 try:
2963 2963 fnode = pctx.filenode(pfn)
2964 2964 grepbody(pfn, parent, flog.read(fnode))
2965 2965 except error.LookupError:
2966 2966 pass
2967 2967
2968 2968 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2969 2969 rev = ctx.rev()
2970 2970 parent = ctx.p1().rev()
2971 2971 for fn in sorted(revfiles.get(rev, [])):
2972 2972 states = matches[rev][fn]
2973 2973 copy = copies.get(rev, {}).get(fn)
2974 2974 if fn in skip:
2975 2975 if copy:
2976 2976 skip[copy] = True
2977 2977 continue
2978 2978 pstates = matches.get(parent, {}).get(copy or fn, [])
2979 2979 if pstates or states:
2980 2980 r = display(fn, ctx, pstates, states)
2981 2981 found = found or r
2982 2982 if r and not opts.get('all'):
2983 2983 skip[fn] = True
2984 2984 if copy:
2985 2985 skip[copy] = True
2986 2986 del matches[rev]
2987 2987 del revfiles[rev]
2988 2988
2989 2989 return not found
2990 2990
2991 2991 @command('heads',
2992 2992 [('r', 'rev', '',
2993 2993 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2994 2994 ('t', 'topo', False, _('show topological heads only')),
2995 2995 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2996 2996 ('c', 'closed', False, _('show normal and closed branch heads')),
2997 2997 ] + templateopts,
2998 2998 _('[-ac] [-r STARTREV] [REV]...'))
2999 2999 def heads(ui, repo, *branchrevs, **opts):
3000 3000 """show current repository heads or show branch heads
3001 3001
3002 3002 With no arguments, show all repository branch heads.
3003 3003
3004 3004 Repository "heads" are changesets with no child changesets. They are
3005 3005 where development generally takes place and are the usual targets
3006 3006 for update and merge operations. Branch heads are changesets that have
3007 3007 no child changeset on the same branch.
3008 3008
3009 3009 If one or more REVs are given, only branch heads on the branches
3010 3010 associated with the specified changesets are shown. This means
3011 3011 that you can use :hg:`heads foo` to see the heads on a branch
3012 3012 named ``foo``.
3013 3013
3014 3014 If -c/--closed is specified, also show branch heads marked closed
3015 3015 (see :hg:`commit --close-branch`).
3016 3016
3017 3017 If STARTREV is specified, only those heads that are descendants of
3018 3018 STARTREV will be displayed.
3019 3019
3020 3020 If -t/--topo is specified, named branch mechanics will be ignored and only
3021 3021 changesets without children will be shown.
3022 3022
3023 3023 Returns 0 if matching heads are found, 1 if not.
3024 3024 """
3025 3025
3026 3026 start = None
3027 3027 if 'rev' in opts:
3028 3028 start = scmutil.revsingle(repo, opts['rev'], None).node()
3029 3029
3030 3030 if opts.get('topo'):
3031 3031 heads = [repo[h] for h in repo.heads(start)]
3032 3032 else:
3033 3033 heads = []
3034 3034 for branch in repo.branchmap():
3035 3035 heads += repo.branchheads(branch, start, opts.get('closed'))
3036 3036 heads = [repo[h] for h in heads]
3037 3037
3038 3038 if branchrevs:
3039 3039 branches = set(repo[br].branch() for br in branchrevs)
3040 3040 heads = [h for h in heads if h.branch() in branches]
3041 3041
3042 3042 if opts.get('active') and branchrevs:
3043 3043 dagheads = repo.heads(start)
3044 3044 heads = [h for h in heads if h.node() in dagheads]
3045 3045
3046 3046 if branchrevs:
3047 3047 haveheads = set(h.branch() for h in heads)
3048 3048 if branches - haveheads:
3049 3049 headless = ', '.join(b for b in branches - haveheads)
3050 3050 msg = _('no open branch heads found on branches %s')
3051 3051 if opts.get('rev'):
3052 3052 msg += _(' (started at %s)') % opts['rev']
3053 3053 ui.warn((msg + '\n') % headless)
3054 3054
3055 3055 if not heads:
3056 3056 return 1
3057 3057
3058 3058 heads = sorted(heads, key=lambda x: -x.rev())
3059 3059 displayer = cmdutil.show_changeset(ui, repo, opts)
3060 3060 for ctx in heads:
3061 3061 displayer.show(ctx)
3062 3062 displayer.close()
3063 3063
3064 3064 @command('help',
3065 3065 [('e', 'extension', None, _('show only help for extensions')),
3066 3066 ('c', 'command', None, _('show only help for commands')),
3067 3067 ('k', 'keyword', '', _('show topics matching keyword')),
3068 3068 ],
3069 3069 _('[-ec] [TOPIC]'))
3070 3070 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
3071 3071 """show help for a given topic or a help overview
3072 3072
3073 3073 With no arguments, print a list of commands with short help messages.
3074 3074
3075 3075 Given a topic, extension, or command name, print help for that
3076 3076 topic.
3077 3077
3078 3078 Returns 0 if successful.
3079 3079 """
3080 3080
3081 3081 textwidth = min(ui.termwidth(), 80) - 2
3082 3082
3083 3083 def optrst(options):
3084 3084 data = []
3085 3085 multioccur = False
3086 3086 for option in options:
3087 3087 if len(option) == 5:
3088 3088 shortopt, longopt, default, desc, optlabel = option
3089 3089 else:
3090 3090 shortopt, longopt, default, desc = option
3091 3091 optlabel = _("VALUE") # default label
3092 3092
3093 3093 if _("DEPRECATED") in desc and not ui.verbose:
3094 3094 continue
3095 3095
3096 3096 so = ''
3097 3097 if shortopt:
3098 3098 so = '-' + shortopt
3099 3099 lo = '--' + longopt
3100 3100 if default:
3101 3101 desc += _(" (default: %s)") % default
3102 3102
3103 3103 if isinstance(default, list):
3104 3104 lo += " %s [+]" % optlabel
3105 3105 multioccur = True
3106 3106 elif (default is not None) and not isinstance(default, bool):
3107 3107 lo += " %s" % optlabel
3108 3108
3109 3109 data.append((so, lo, desc))
3110 3110
3111 3111 rst = minirst.maketable(data, 1)
3112 3112
3113 3113 if multioccur:
3114 3114 rst += _("\n[+] marked option can be specified multiple times\n")
3115 3115
3116 3116 return rst
3117 3117
3118 3118 # list all option lists
3119 3119 def opttext(optlist, width):
3120 3120 rst = ''
3121 3121 if not optlist:
3122 3122 return ''
3123 3123
3124 3124 for title, options in optlist:
3125 3125 rst += '\n%s\n' % title
3126 3126 if options:
3127 3127 rst += "\n"
3128 3128 rst += optrst(options)
3129 3129 rst += '\n'
3130 3130
3131 3131 return '\n' + minirst.format(rst, width)
3132 3132
3133 3133 def addglobalopts(optlist, aliases):
3134 3134 if ui.quiet:
3135 3135 return []
3136 3136
3137 3137 if ui.verbose:
3138 3138 optlist.append((_("global options:"), globalopts))
3139 3139 if name == 'shortlist':
3140 3140 optlist.append((_('use "hg help" for the full list '
3141 3141 'of commands'), ()))
3142 3142 else:
3143 3143 if name == 'shortlist':
3144 3144 msg = _('use "hg help" for the full list of commands '
3145 3145 'or "hg -v" for details')
3146 3146 elif name and not full:
3147 3147 msg = _('use "hg help %s" to show the full help text') % name
3148 3148 elif aliases:
3149 3149 msg = _('use "hg -v help%s" to show builtin aliases and '
3150 3150 'global options') % (name and " " + name or "")
3151 3151 else:
3152 3152 msg = _('use "hg -v help %s" to show more info') % name
3153 3153 optlist.append((msg, ()))
3154 3154
3155 3155 def helpcmd(name):
3156 3156 try:
3157 3157 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
3158 3158 except error.AmbiguousCommand, inst:
3159 3159 # py3k fix: except vars can't be used outside the scope of the
3160 3160 # except block, nor can be used inside a lambda. python issue4617
3161 3161 prefix = inst.args[0]
3162 3162 select = lambda c: c.lstrip('^').startswith(prefix)
3163 3163 helplist(select)
3164 3164 return
3165 3165
3166 3166 # check if it's an invalid alias and display its error if it is
3167 3167 if getattr(entry[0], 'badalias', False):
3168 3168 if not unknowncmd:
3169 3169 entry[0](ui)
3170 3170 return
3171 3171
3172 3172 rst = ""
3173 3173
3174 3174 # synopsis
3175 3175 if len(entry) > 2:
3176 3176 if entry[2].startswith('hg'):
3177 3177 rst += "%s\n" % entry[2]
3178 3178 else:
3179 3179 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3180 3180 else:
3181 3181 rst += 'hg %s\n' % aliases[0]
3182 3182
3183 3183 # aliases
3184 3184 if full and not ui.quiet and len(aliases) > 1:
3185 3185 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3186 3186
3187 3187 # description
3188 3188 doc = gettext(entry[0].__doc__)
3189 3189 if not doc:
3190 3190 doc = _("(no help text available)")
3191 3191 if util.safehasattr(entry[0], 'definition'): # aliased command
3192 3192 if entry[0].definition.startswith('!'): # shell alias
3193 3193 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3194 3194 else:
3195 3195 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3196 3196 if ui.quiet or not full:
3197 3197 doc = doc.splitlines()[0]
3198 3198 rst += "\n" + doc + "\n"
3199 3199
3200 3200 # check if this command shadows a non-trivial (multi-line)
3201 3201 # extension help text
3202 3202 try:
3203 3203 mod = extensions.find(name)
3204 3204 doc = gettext(mod.__doc__) or ''
3205 3205 if '\n' in doc.strip():
3206 3206 msg = _('use "hg help -e %s" to show help for '
3207 3207 'the %s extension') % (name, name)
3208 3208 rst += '\n%s\n' % msg
3209 3209 except KeyError:
3210 3210 pass
3211 3211
3212 3212 # options
3213 3213 if not ui.quiet and entry[1]:
3214 3214 rst += '\n'
3215 3215 rst += _("options:")
3216 3216 rst += '\n\n'
3217 3217 rst += optrst(entry[1])
3218 3218
3219 3219 if ui.verbose:
3220 3220 rst += '\n'
3221 3221 rst += _("global options:")
3222 3222 rst += '\n\n'
3223 3223 rst += optrst(globalopts)
3224 3224
3225 3225 keep = ui.verbose and ['verbose'] or []
3226 3226 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3227 3227 ui.write(formatted)
3228 3228
3229 3229 if not ui.verbose:
3230 3230 if not full:
3231 3231 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3232 3232 % name)
3233 3233 elif not ui.quiet:
3234 3234 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3235 3235
3236 3236
3237 3237 def helplist(select=None):
3238 3238 # list of commands
3239 3239 if name == "shortlist":
3240 3240 header = _('basic commands:\n\n')
3241 3241 else:
3242 3242 header = _('list of commands:\n\n')
3243 3243
3244 3244 h = {}
3245 3245 cmds = {}
3246 3246 for c, e in table.iteritems():
3247 3247 f = c.split("|", 1)[0]
3248 3248 if select and not select(f):
3249 3249 continue
3250 3250 if (not select and name != 'shortlist' and
3251 3251 e[0].__module__ != __name__):
3252 3252 continue
3253 3253 if name == "shortlist" and not f.startswith("^"):
3254 3254 continue
3255 3255 f = f.lstrip("^")
3256 3256 if not ui.debugflag and f.startswith("debug"):
3257 3257 continue
3258 3258 doc = e[0].__doc__
3259 3259 if doc and 'DEPRECATED' in doc and not ui.verbose:
3260 3260 continue
3261 3261 doc = gettext(doc)
3262 3262 if not doc:
3263 3263 doc = _("(no help text available)")
3264 3264 h[f] = doc.splitlines()[0].rstrip()
3265 3265 cmds[f] = c.lstrip("^")
3266 3266
3267 3267 if not h:
3268 3268 ui.status(_('no commands defined\n'))
3269 3269 return
3270 3270
3271 3271 ui.status(header)
3272 3272 fns = sorted(h)
3273 3273 m = max(map(len, fns))
3274 3274 for f in fns:
3275 3275 if ui.verbose:
3276 3276 commands = cmds[f].replace("|",", ")
3277 3277 ui.write(" %s:\n %s\n"%(commands, h[f]))
3278 3278 else:
3279 3279 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3280 3280 initindent=' %-*s ' % (m, f),
3281 3281 hangindent=' ' * (m + 4))))
3282 3282
3283 3283 if not name:
3284 3284 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3285 3285 if text:
3286 3286 ui.write("\n%s" % minirst.format(text, textwidth))
3287 3287
3288 3288 ui.write(_("\nadditional help topics:\n\n"))
3289 3289 topics = []
3290 3290 for names, header, doc in help.helptable:
3291 3291 topics.append((sorted(names, key=len, reverse=True)[0], header))
3292 3292 topics_len = max([len(s[0]) for s in topics])
3293 3293 for t, desc in topics:
3294 3294 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3295 3295
3296 3296 optlist = []
3297 3297 addglobalopts(optlist, True)
3298 3298 ui.write(opttext(optlist, textwidth))
3299 3299
3300 3300 def helptopic(name):
3301 3301 for names, header, doc in help.helptable:
3302 3302 if name in names:
3303 3303 break
3304 3304 else:
3305 3305 raise error.UnknownCommand(name)
3306 3306
3307 3307 # description
3308 3308 if not doc:
3309 3309 doc = _("(no help text available)")
3310 3310 if util.safehasattr(doc, '__call__'):
3311 3311 doc = doc()
3312 3312
3313 3313 ui.write("%s\n\n" % header)
3314 3314 ui.write(minirst.format(doc, textwidth, indent=4))
3315 3315 try:
3316 3316 cmdutil.findcmd(name, table)
3317 3317 ui.write(_('\nuse "hg help -c %s" to see help for '
3318 3318 'the %s command\n') % (name, name))
3319 3319 except error.UnknownCommand:
3320 3320 pass
3321 3321
3322 3322 def helpext(name):
3323 3323 try:
3324 3324 mod = extensions.find(name)
3325 3325 doc = gettext(mod.__doc__) or _('no help text available')
3326 3326 except KeyError:
3327 3327 mod = None
3328 3328 doc = extensions.disabledext(name)
3329 3329 if not doc:
3330 3330 raise error.UnknownCommand(name)
3331 3331
3332 3332 if '\n' not in doc:
3333 3333 head, tail = doc, ""
3334 3334 else:
3335 3335 head, tail = doc.split('\n', 1)
3336 3336 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3337 3337 if tail:
3338 3338 ui.write(minirst.format(tail, textwidth))
3339 3339 ui.status('\n')
3340 3340
3341 3341 if mod:
3342 3342 try:
3343 3343 ct = mod.cmdtable
3344 3344 except AttributeError:
3345 3345 ct = {}
3346 3346 modcmds = set([c.split('|', 1)[0] for c in ct])
3347 3347 helplist(modcmds.__contains__)
3348 3348 else:
3349 3349 ui.write(_('use "hg help extensions" for information on enabling '
3350 3350 'extensions\n'))
3351 3351
3352 3352 def helpextcmd(name):
3353 3353 cmd, ext, mod = extensions.disabledcmd(ui, name,
3354 3354 ui.configbool('ui', 'strict'))
3355 3355 doc = gettext(mod.__doc__).splitlines()[0]
3356 3356
3357 3357 msg = help.listexts(_("'%s' is provided by the following "
3358 3358 "extension:") % cmd, {ext: doc}, indent=4)
3359 3359 ui.write(minirst.format(msg, textwidth))
3360 3360 ui.write('\n')
3361 3361 ui.write(_('use "hg help extensions" for information on enabling '
3362 3362 'extensions\n'))
3363 3363
3364 3364 kw = opts.get('keyword')
3365 3365 if kw:
3366 3366 matches = help.topicmatch(kw)
3367 3367 for t, title in (('topics', _('Topics')),
3368 3368 ('commands', _('Commands')),
3369 3369 ('extensions', _('Extensions')),
3370 3370 ('extensioncommands', _('Extension Commands'))):
3371 3371 if matches[t]:
3372 3372 ui.write('%s:\n\n' % title)
3373 3373 ui.write(minirst.format(minirst.maketable(matches[t], 1)))
3374 3374 return
3375 3375
3376 3376 if name and name != 'shortlist':
3377 3377 i = None
3378 3378 if unknowncmd:
3379 3379 queries = (helpextcmd,)
3380 3380 elif opts.get('extension'):
3381 3381 queries = (helpext,)
3382 3382 elif opts.get('command'):
3383 3383 queries = (helpcmd,)
3384 3384 else:
3385 3385 queries = (helptopic, helpcmd, helpext, helpextcmd)
3386 3386 for f in queries:
3387 3387 try:
3388 3388 f(name)
3389 3389 i = None
3390 3390 break
3391 3391 except error.UnknownCommand, inst:
3392 3392 i = inst
3393 3393 if i:
3394 3394 raise i
3395 3395 else:
3396 3396 # program name
3397 3397 ui.status(_("Mercurial Distributed SCM\n"))
3398 3398 ui.status('\n')
3399 3399 helplist()
3400 3400
3401 3401
3402 3402 @command('identify|id',
3403 3403 [('r', 'rev', '',
3404 3404 _('identify the specified revision'), _('REV')),
3405 3405 ('n', 'num', None, _('show local revision number')),
3406 3406 ('i', 'id', None, _('show global revision id')),
3407 3407 ('b', 'branch', None, _('show branch')),
3408 3408 ('t', 'tags', None, _('show tags')),
3409 3409 ('B', 'bookmarks', None, _('show bookmarks')),
3410 3410 ] + remoteopts,
3411 3411 _('[-nibtB] [-r REV] [SOURCE]'))
3412 3412 def identify(ui, repo, source=None, rev=None,
3413 3413 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3414 3414 """identify the working copy or specified revision
3415 3415
3416 3416 Print a summary identifying the repository state at REV using one or
3417 3417 two parent hash identifiers, followed by a "+" if the working
3418 3418 directory has uncommitted changes, the branch name (if not default),
3419 3419 a list of tags, and a list of bookmarks.
3420 3420
3421 3421 When REV is not given, print a summary of the current state of the
3422 3422 repository.
3423 3423
3424 3424 Specifying a path to a repository root or Mercurial bundle will
3425 3425 cause lookup to operate on that repository/bundle.
3426 3426
3427 3427 .. container:: verbose
3428 3428
3429 3429 Examples:
3430 3430
3431 3431 - generate a build identifier for the working directory::
3432 3432
3433 3433 hg id --id > build-id.dat
3434 3434
3435 3435 - find the revision corresponding to a tag::
3436 3436
3437 3437 hg id -n -r 1.3
3438 3438
3439 3439 - check the most recent revision of a remote repository::
3440 3440
3441 3441 hg id -r tip http://selenic.com/hg/
3442 3442
3443 3443 Returns 0 if successful.
3444 3444 """
3445 3445
3446 3446 if not repo and not source:
3447 3447 raise util.Abort(_("there is no Mercurial repository here "
3448 3448 "(.hg not found)"))
3449 3449
3450 3450 hexfunc = ui.debugflag and hex or short
3451 3451 default = not (num or id or branch or tags or bookmarks)
3452 3452 output = []
3453 3453 revs = []
3454 3454
3455 3455 if source:
3456 3456 source, branches = hg.parseurl(ui.expandpath(source))
3457 3457 repo = hg.peer(ui, opts, source)
3458 3458 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3459 3459
3460 3460 if not repo.local():
3461 3461 if num or branch or tags:
3462 3462 raise util.Abort(
3463 3463 _("can't query remote revision number, branch, or tags"))
3464 3464 if not rev and revs:
3465 3465 rev = revs[0]
3466 3466 if not rev:
3467 3467 rev = "tip"
3468 3468
3469 3469 remoterev = repo.lookup(rev)
3470 3470 if default or id:
3471 3471 output = [hexfunc(remoterev)]
3472 3472
3473 3473 def getbms():
3474 3474 bms = []
3475 3475
3476 3476 if 'bookmarks' in repo.listkeys('namespaces'):
3477 3477 hexremoterev = hex(remoterev)
3478 3478 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3479 3479 if bmr == hexremoterev]
3480 3480
3481 3481 return bms
3482 3482
3483 3483 if bookmarks:
3484 3484 output.extend(getbms())
3485 3485 elif default and not ui.quiet:
3486 3486 # multiple bookmarks for a single parent separated by '/'
3487 3487 bm = '/'.join(getbms())
3488 3488 if bm:
3489 3489 output.append(bm)
3490 3490 else:
3491 3491 if not rev:
3492 3492 ctx = repo[None]
3493 3493 parents = ctx.parents()
3494 3494 changed = ""
3495 3495 if default or id or num:
3496 3496 changed = util.any(repo.status()) and "+" or ""
3497 3497 if default or id:
3498 3498 output = ["%s%s" %
3499 3499 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3500 3500 if num:
3501 3501 output.append("%s%s" %
3502 3502 ('+'.join([str(p.rev()) for p in parents]), changed))
3503 3503 else:
3504 3504 ctx = scmutil.revsingle(repo, rev)
3505 3505 if default or id:
3506 3506 output = [hexfunc(ctx.node())]
3507 3507 if num:
3508 3508 output.append(str(ctx.rev()))
3509 3509
3510 3510 if default and not ui.quiet:
3511 3511 b = ctx.branch()
3512 3512 if b != 'default':
3513 3513 output.append("(%s)" % b)
3514 3514
3515 3515 # multiple tags for a single parent separated by '/'
3516 3516 t = '/'.join(ctx.tags())
3517 3517 if t:
3518 3518 output.append(t)
3519 3519
3520 3520 # multiple bookmarks for a single parent separated by '/'
3521 3521 bm = '/'.join(ctx.bookmarks())
3522 3522 if bm:
3523 3523 output.append(bm)
3524 3524 else:
3525 3525 if branch:
3526 3526 output.append(ctx.branch())
3527 3527
3528 3528 if tags:
3529 3529 output.extend(ctx.tags())
3530 3530
3531 3531 if bookmarks:
3532 3532 output.extend(ctx.bookmarks())
3533 3533
3534 3534 ui.write("%s\n" % ' '.join(output))
3535 3535
3536 3536 @command('import|patch',
3537 3537 [('p', 'strip', 1,
3538 3538 _('directory strip option for patch. This has the same '
3539 3539 'meaning as the corresponding patch option'), _('NUM')),
3540 3540 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3541 3541 ('e', 'edit', False, _('invoke editor on commit messages')),
3542 3542 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3543 3543 ('', 'no-commit', None,
3544 3544 _("don't commit, just update the working directory")),
3545 3545 ('', 'bypass', None,
3546 3546 _("apply patch without touching the working directory")),
3547 3547 ('', 'exact', None,
3548 3548 _('apply patch to the nodes from which it was generated')),
3549 3549 ('', 'import-branch', None,
3550 3550 _('use any branch information in patch (implied by --exact)'))] +
3551 3551 commitopts + commitopts2 + similarityopts,
3552 3552 _('[OPTION]... PATCH...'))
3553 3553 def import_(ui, repo, patch1=None, *patches, **opts):
3554 3554 """import an ordered set of patches
3555 3555
3556 3556 Import a list of patches and commit them individually (unless
3557 3557 --no-commit is specified).
3558 3558
3559 3559 If there are outstanding changes in the working directory, import
3560 3560 will abort unless given the -f/--force flag.
3561 3561
3562 3562 You can import a patch straight from a mail message. Even patches
3563 3563 as attachments work (to use the body part, it must have type
3564 3564 text/plain or text/x-patch). From and Subject headers of email
3565 3565 message are used as default committer and commit message. All
3566 3566 text/plain body parts before first diff are added to commit
3567 3567 message.
3568 3568
3569 3569 If the imported patch was generated by :hg:`export`, user and
3570 3570 description from patch override values from message headers and
3571 3571 body. Values given on command line with -m/--message and -u/--user
3572 3572 override these.
3573 3573
3574 3574 If --exact is specified, import will set the working directory to
3575 3575 the parent of each patch before applying it, and will abort if the
3576 3576 resulting changeset has a different ID than the one recorded in
3577 3577 the patch. This may happen due to character set problems or other
3578 3578 deficiencies in the text patch format.
3579 3579
3580 3580 Use --bypass to apply and commit patches directly to the
3581 3581 repository, not touching the working directory. Without --exact,
3582 3582 patches will be applied on top of the working directory parent
3583 3583 revision.
3584 3584
3585 3585 With -s/--similarity, hg will attempt to discover renames and
3586 3586 copies in the patch in the same way as :hg:`addremove`.
3587 3587
3588 3588 To read a patch from standard input, use "-" as the patch name. If
3589 3589 a URL is specified, the patch will be downloaded from it.
3590 3590 See :hg:`help dates` for a list of formats valid for -d/--date.
3591 3591
3592 3592 .. container:: verbose
3593 3593
3594 3594 Examples:
3595 3595
3596 3596 - import a traditional patch from a website and detect renames::
3597 3597
3598 3598 hg import -s 80 http://example.com/bugfix.patch
3599 3599
3600 3600 - import a changeset from an hgweb server::
3601 3601
3602 3602 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3603 3603
3604 3604 - import all the patches in an Unix-style mbox::
3605 3605
3606 3606 hg import incoming-patches.mbox
3607 3607
3608 3608 - attempt to exactly restore an exported changeset (not always
3609 3609 possible)::
3610 3610
3611 3611 hg import --exact proposed-fix.patch
3612 3612
3613 3613 Returns 0 on success.
3614 3614 """
3615 3615
3616 3616 if not patch1:
3617 3617 raise util.Abort(_('need at least one patch to import'))
3618 3618
3619 3619 patches = (patch1,) + patches
3620 3620
3621 3621 date = opts.get('date')
3622 3622 if date:
3623 3623 opts['date'] = util.parsedate(date)
3624 3624
3625 3625 editor = cmdutil.commiteditor
3626 3626 if opts.get('edit'):
3627 3627 editor = cmdutil.commitforceeditor
3628 3628
3629 3629 update = not opts.get('bypass')
3630 3630 if not update and opts.get('no_commit'):
3631 3631 raise util.Abort(_('cannot use --no-commit with --bypass'))
3632 3632 try:
3633 3633 sim = float(opts.get('similarity') or 0)
3634 3634 except ValueError:
3635 3635 raise util.Abort(_('similarity must be a number'))
3636 3636 if sim < 0 or sim > 100:
3637 3637 raise util.Abort(_('similarity must be between 0 and 100'))
3638 3638 if sim and not update:
3639 3639 raise util.Abort(_('cannot use --similarity with --bypass'))
3640 3640
3641 3641 if (opts.get('exact') or not opts.get('force')) and update:
3642 3642 cmdutil.bailifchanged(repo)
3643 3643
3644 3644 base = opts["base"]
3645 3645 strip = opts["strip"]
3646 3646 wlock = lock = tr = None
3647 3647 msgs = []
3648 3648
3649 3649 def checkexact(repo, n, nodeid):
3650 3650 if opts.get('exact') and hex(n) != nodeid:
3651 3651 repo.rollback()
3652 3652 raise util.Abort(_('patch is damaged or loses information'))
3653 3653
3654 3654 def tryone(ui, hunk, parents):
3655 3655 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3656 3656 patch.extract(ui, hunk)
3657 3657
3658 3658 if not tmpname:
3659 3659 return (None, None)
3660 3660 msg = _('applied to working directory')
3661 3661
3662 3662 try:
3663 3663 cmdline_message = cmdutil.logmessage(ui, opts)
3664 3664 if cmdline_message:
3665 3665 # pickup the cmdline msg
3666 3666 message = cmdline_message
3667 3667 elif message:
3668 3668 # pickup the patch msg
3669 3669 message = message.strip()
3670 3670 else:
3671 3671 # launch the editor
3672 3672 message = None
3673 3673 ui.debug('message:\n%s\n' % message)
3674 3674
3675 3675 if len(parents) == 1:
3676 3676 parents.append(repo[nullid])
3677 3677 if opts.get('exact'):
3678 3678 if not nodeid or not p1:
3679 3679 raise util.Abort(_('not a Mercurial patch'))
3680 3680 p1 = repo[p1]
3681 3681 p2 = repo[p2 or nullid]
3682 3682 elif p2:
3683 3683 try:
3684 3684 p1 = repo[p1]
3685 3685 p2 = repo[p2]
3686 3686 # Without any options, consider p2 only if the
3687 3687 # patch is being applied on top of the recorded
3688 3688 # first parent.
3689 3689 if p1 != parents[0]:
3690 3690 p1 = parents[0]
3691 3691 p2 = repo[nullid]
3692 3692 except error.RepoError:
3693 3693 p1, p2 = parents
3694 3694 else:
3695 3695 p1, p2 = parents
3696 3696
3697 3697 n = None
3698 3698 if update:
3699 3699 if p1 != parents[0]:
3700 3700 hg.clean(repo, p1.node())
3701 3701 if p2 != parents[1]:
3702 3702 repo.setparents(p1.node(), p2.node())
3703 3703
3704 3704 if opts.get('exact') or opts.get('import_branch'):
3705 3705 repo.dirstate.setbranch(branch or 'default')
3706 3706
3707 3707 files = set()
3708 3708 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3709 3709 eolmode=None, similarity=sim / 100.0)
3710 3710 files = list(files)
3711 3711 if opts.get('no_commit'):
3712 3712 if message:
3713 3713 msgs.append(message)
3714 3714 else:
3715 3715 if opts.get('exact') or p2:
3716 3716 # If you got here, you either use --force and know what
3717 3717 # you are doing or used --exact or a merge patch while
3718 3718 # being updated to its first parent.
3719 3719 m = None
3720 3720 else:
3721 3721 m = scmutil.matchfiles(repo, files or [])
3722 3722 n = repo.commit(message, opts.get('user') or user,
3723 3723 opts.get('date') or date, match=m,
3724 3724 editor=editor)
3725 3725 checkexact(repo, n, nodeid)
3726 3726 else:
3727 3727 if opts.get('exact') or opts.get('import_branch'):
3728 3728 branch = branch or 'default'
3729 3729 else:
3730 3730 branch = p1.branch()
3731 3731 store = patch.filestore()
3732 3732 try:
3733 3733 files = set()
3734 3734 try:
3735 3735 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3736 3736 files, eolmode=None)
3737 3737 except patch.PatchError, e:
3738 3738 raise util.Abort(str(e))
3739 3739 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3740 3740 message,
3741 3741 opts.get('user') or user,
3742 3742 opts.get('date') or date,
3743 3743 branch, files, store,
3744 3744 editor=cmdutil.commiteditor)
3745 3745 repo.savecommitmessage(memctx.description())
3746 3746 n = memctx.commit()
3747 3747 checkexact(repo, n, nodeid)
3748 3748 finally:
3749 3749 store.close()
3750 3750 if n:
3751 3751 # i18n: refers to a short changeset id
3752 3752 msg = _('created %s') % short(n)
3753 3753 return (msg, n)
3754 3754 finally:
3755 3755 os.unlink(tmpname)
3756 3756
3757 3757 try:
3758 3758 try:
3759 3759 wlock = repo.wlock()
3760 3760 if not opts.get('no_commit'):
3761 3761 lock = repo.lock()
3762 3762 tr = repo.transaction('import')
3763 3763 parents = repo.parents()
3764 3764 for patchurl in patches:
3765 3765 if patchurl == '-':
3766 3766 ui.status(_('applying patch from stdin\n'))
3767 3767 patchfile = ui.fin
3768 3768 patchurl = 'stdin' # for error message
3769 3769 else:
3770 3770 patchurl = os.path.join(base, patchurl)
3771 3771 ui.status(_('applying %s\n') % patchurl)
3772 3772 patchfile = url.open(ui, patchurl)
3773 3773
3774 3774 haspatch = False
3775 3775 for hunk in patch.split(patchfile):
3776 3776 (msg, node) = tryone(ui, hunk, parents)
3777 3777 if msg:
3778 3778 haspatch = True
3779 3779 ui.note(msg + '\n')
3780 3780 if update or opts.get('exact'):
3781 3781 parents = repo.parents()
3782 3782 else:
3783 3783 parents = [repo[node]]
3784 3784
3785 3785 if not haspatch:
3786 3786 raise util.Abort(_('%s: no diffs found') % patchurl)
3787 3787
3788 3788 if tr:
3789 3789 tr.close()
3790 3790 if msgs:
3791 3791 repo.savecommitmessage('\n* * *\n'.join(msgs))
3792 3792 except: # re-raises
3793 3793 # wlock.release() indirectly calls dirstate.write(): since
3794 3794 # we're crashing, we do not want to change the working dir
3795 3795 # parent after all, so make sure it writes nothing
3796 3796 repo.dirstate.invalidate()
3797 3797 raise
3798 3798 finally:
3799 3799 if tr:
3800 3800 tr.release()
3801 3801 release(lock, wlock)
3802 3802
3803 3803 @command('incoming|in',
3804 3804 [('f', 'force', None,
3805 3805 _('run even if remote repository is unrelated')),
3806 3806 ('n', 'newest-first', None, _('show newest record first')),
3807 3807 ('', 'bundle', '',
3808 3808 _('file to store the bundles into'), _('FILE')),
3809 3809 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3810 3810 ('B', 'bookmarks', False, _("compare bookmarks")),
3811 3811 ('b', 'branch', [],
3812 3812 _('a specific branch you would like to pull'), _('BRANCH')),
3813 3813 ] + logopts + remoteopts + subrepoopts,
3814 3814 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3815 3815 def incoming(ui, repo, source="default", **opts):
3816 3816 """show new changesets found in source
3817 3817
3818 3818 Show new changesets found in the specified path/URL or the default
3819 3819 pull location. These are the changesets that would have been pulled
3820 3820 if a pull at the time you issued this command.
3821 3821
3822 3822 For remote repository, using --bundle avoids downloading the
3823 3823 changesets twice if the incoming is followed by a pull.
3824 3824
3825 3825 See pull for valid source format details.
3826 3826
3827 3827 Returns 0 if there are incoming changes, 1 otherwise.
3828 3828 """
3829 3829 if opts.get('bundle') and opts.get('subrepos'):
3830 3830 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3831 3831
3832 3832 if opts.get('bookmarks'):
3833 3833 source, branches = hg.parseurl(ui.expandpath(source),
3834 3834 opts.get('branch'))
3835 3835 other = hg.peer(repo, opts, source)
3836 3836 if 'bookmarks' not in other.listkeys('namespaces'):
3837 3837 ui.warn(_("remote doesn't support bookmarks\n"))
3838 3838 return 0
3839 3839 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3840 3840 return bookmarks.diff(ui, repo, other)
3841 3841
3842 3842 repo._subtoppath = ui.expandpath(source)
3843 3843 try:
3844 3844 return hg.incoming(ui, repo, source, opts)
3845 3845 finally:
3846 3846 del repo._subtoppath
3847 3847
3848 3848
3849 3849 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3850 3850 def init(ui, dest=".", **opts):
3851 3851 """create a new repository in the given directory
3852 3852
3853 3853 Initialize a new repository in the given directory. If the given
3854 3854 directory does not exist, it will be created.
3855 3855
3856 3856 If no directory is given, the current directory is used.
3857 3857
3858 3858 It is possible to specify an ``ssh://`` URL as the destination.
3859 3859 See :hg:`help urls` for more information.
3860 3860
3861 3861 Returns 0 on success.
3862 3862 """
3863 3863 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3864 3864
3865 3865 @command('locate',
3866 3866 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3867 3867 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3868 3868 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3869 3869 ] + walkopts,
3870 3870 _('[OPTION]... [PATTERN]...'))
3871 3871 def locate(ui, repo, *pats, **opts):
3872 3872 """locate files matching specific patterns
3873 3873
3874 3874 Print files under Mercurial control in the working directory whose
3875 3875 names match the given patterns.
3876 3876
3877 3877 By default, this command searches all directories in the working
3878 3878 directory. To search just the current directory and its
3879 3879 subdirectories, use "--include .".
3880 3880
3881 3881 If no patterns are given to match, this command prints the names
3882 3882 of all files under Mercurial control in the working directory.
3883 3883
3884 3884 If you want to feed the output of this command into the "xargs"
3885 3885 command, use the -0 option to both this command and "xargs". This
3886 3886 will avoid the problem of "xargs" treating single filenames that
3887 3887 contain whitespace as multiple filenames.
3888 3888
3889 3889 Returns 0 if a match is found, 1 otherwise.
3890 3890 """
3891 3891 end = opts.get('print0') and '\0' or '\n'
3892 3892 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3893 3893
3894 3894 ret = 1
3895 3895 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3896 3896 m.bad = lambda x, y: False
3897 3897 for abs in repo[rev].walk(m):
3898 3898 if not rev and abs not in repo.dirstate:
3899 3899 continue
3900 3900 if opts.get('fullpath'):
3901 3901 ui.write(repo.wjoin(abs), end)
3902 3902 else:
3903 3903 ui.write(((pats and m.rel(abs)) or abs), end)
3904 3904 ret = 0
3905 3905
3906 3906 return ret
3907 3907
3908 3908 @command('^log|history',
3909 3909 [('f', 'follow', None,
3910 3910 _('follow changeset history, or file history across copies and renames')),
3911 3911 ('', 'follow-first', None,
3912 3912 _('only follow the first parent of merge changesets (DEPRECATED)')),
3913 3913 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3914 3914 ('C', 'copies', None, _('show copied files')),
3915 3915 ('k', 'keyword', [],
3916 3916 _('do case-insensitive search for a given text'), _('TEXT')),
3917 3917 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3918 3918 ('', 'removed', None, _('include revisions where files were removed')),
3919 3919 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3920 3920 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3921 3921 ('', 'only-branch', [],
3922 3922 _('show only changesets within the given named branch (DEPRECATED)'),
3923 3923 _('BRANCH')),
3924 3924 ('b', 'branch', [],
3925 3925 _('show changesets within the given named branch'), _('BRANCH')),
3926 3926 ('P', 'prune', [],
3927 3927 _('do not display revision or any of its ancestors'), _('REV')),
3928 3928 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
3929 3929 ] + logopts + walkopts,
3930 3930 _('[OPTION]... [FILE]'))
3931 3931 def log(ui, repo, *pats, **opts):
3932 3932 """show revision history of entire repository or files
3933 3933
3934 3934 Print the revision history of the specified files or the entire
3935 3935 project.
3936 3936
3937 3937 If no revision range is specified, the default is ``tip:0`` unless
3938 3938 --follow is set, in which case the working directory parent is
3939 3939 used as the starting revision.
3940 3940
3941 3941 File history is shown without following rename or copy history of
3942 3942 files. Use -f/--follow with a filename to follow history across
3943 3943 renames and copies. --follow without a filename will only show
3944 3944 ancestors or descendants of the starting revision.
3945 3945
3946 3946 By default this command prints revision number and changeset id,
3947 3947 tags, non-trivial parents, user, date and time, and a summary for
3948 3948 each commit. When the -v/--verbose switch is used, the list of
3949 3949 changed files and full commit message are shown.
3950 3950
3951 3951 .. note::
3952 3952 log -p/--patch may generate unexpected diff output for merge
3953 3953 changesets, as it will only compare the merge changeset against
3954 3954 its first parent. Also, only files different from BOTH parents
3955 3955 will appear in files:.
3956 3956
3957 3957 .. note::
3958 3958 for performance reasons, log FILE may omit duplicate changes
3959 3959 made on branches and will not show deletions. To see all
3960 3960 changes including duplicates and deletions, use the --removed
3961 3961 switch.
3962 3962
3963 3963 .. container:: verbose
3964 3964
3965 3965 Some examples:
3966 3966
3967 3967 - changesets with full descriptions and file lists::
3968 3968
3969 3969 hg log -v
3970 3970
3971 3971 - changesets ancestral to the working directory::
3972 3972
3973 3973 hg log -f
3974 3974
3975 3975 - last 10 commits on the current branch::
3976 3976
3977 3977 hg log -l 10 -b .
3978 3978
3979 3979 - changesets showing all modifications of a file, including removals::
3980 3980
3981 3981 hg log --removed file.c
3982 3982
3983 3983 - all changesets that touch a directory, with diffs, excluding merges::
3984 3984
3985 3985 hg log -Mp lib/
3986 3986
3987 3987 - all revision numbers that match a keyword::
3988 3988
3989 3989 hg log -k bug --template "{rev}\\n"
3990 3990
3991 3991 - check if a given changeset is included is a tagged release::
3992 3992
3993 3993 hg log -r "a21ccf and ancestor(1.9)"
3994 3994
3995 3995 - find all changesets by some user in a date range::
3996 3996
3997 3997 hg log -k alice -d "may 2008 to jul 2008"
3998 3998
3999 3999 - summary of all changesets after the last tag::
4000 4000
4001 4001 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4002 4002
4003 4003 See :hg:`help dates` for a list of formats valid for -d/--date.
4004 4004
4005 4005 See :hg:`help revisions` and :hg:`help revsets` for more about
4006 4006 specifying revisions.
4007 4007
4008 4008 See :hg:`help templates` for more about pre-packaged styles and
4009 4009 specifying custom templates.
4010 4010
4011 4011 Returns 0 on success.
4012 4012 """
4013 4013
4014 4014 matchfn = scmutil.match(repo[None], pats, opts)
4015 4015 limit = cmdutil.loglimit(opts)
4016 4016 count = 0
4017 4017
4018 4018 getrenamed, endrev = None, None
4019 4019 if opts.get('copies'):
4020 4020 if opts.get('rev'):
4021 4021 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4022 4022 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4023 4023
4024 4024 df = False
4025 4025 if opts["date"]:
4026 4026 df = util.matchdate(opts["date"])
4027 4027
4028 4028 branches = opts.get('branch', []) + opts.get('only_branch', [])
4029 4029 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4030 4030
4031 4031 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4032 4032 def prep(ctx, fns):
4033 4033 rev = ctx.rev()
4034 4034 parents = [p for p in repo.changelog.parentrevs(rev)
4035 4035 if p != nullrev]
4036 4036 if opts.get('no_merges') and len(parents) == 2:
4037 4037 return
4038 4038 if opts.get('only_merges') and len(parents) != 2:
4039 4039 return
4040 4040 if opts.get('branch') and ctx.branch() not in opts['branch']:
4041 4041 return
4042 4042 if not opts.get('hidden') and ctx.hidden():
4043 4043 return
4044 4044 if df and not df(ctx.date()[0]):
4045 4045 return
4046 4046
4047 4047 lower = encoding.lower
4048 4048 if opts.get('user'):
4049 4049 luser = lower(ctx.user())
4050 4050 for k in [lower(x) for x in opts['user']]:
4051 4051 if (k in luser):
4052 4052 break
4053 4053 else:
4054 4054 return
4055 4055 if opts.get('keyword'):
4056 4056 luser = lower(ctx.user())
4057 4057 ldesc = lower(ctx.description())
4058 4058 lfiles = lower(" ".join(ctx.files()))
4059 4059 for k in [lower(x) for x in opts['keyword']]:
4060 4060 if (k in luser or k in ldesc or k in lfiles):
4061 4061 break
4062 4062 else:
4063 4063 return
4064 4064
4065 4065 copies = None
4066 4066 if getrenamed is not None and rev:
4067 4067 copies = []
4068 4068 for fn in ctx.files():
4069 4069 rename = getrenamed(fn, rev)
4070 4070 if rename:
4071 4071 copies.append((fn, rename[0]))
4072 4072
4073 4073 revmatchfn = None
4074 4074 if opts.get('patch') or opts.get('stat'):
4075 4075 if opts.get('follow') or opts.get('follow_first'):
4076 4076 # note: this might be wrong when following through merges
4077 4077 revmatchfn = scmutil.match(repo[None], fns, default='path')
4078 4078 else:
4079 4079 revmatchfn = matchfn
4080 4080
4081 4081 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4082 4082
4083 4083 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4084 4084 if count == limit:
4085 4085 break
4086 4086 if displayer.flush(ctx.rev()):
4087 4087 count += 1
4088 4088 displayer.close()
4089 4089
4090 4090 @command('manifest',
4091 4091 [('r', 'rev', '', _('revision to display'), _('REV')),
4092 4092 ('', 'all', False, _("list files from all revisions"))],
4093 4093 _('[-r REV]'))
4094 4094 def manifest(ui, repo, node=None, rev=None, **opts):
4095 4095 """output the current or given revision of the project manifest
4096 4096
4097 4097 Print a list of version controlled files for the given revision.
4098 4098 If no revision is given, the first parent of the working directory
4099 4099 is used, or the null revision if no revision is checked out.
4100 4100
4101 4101 With -v, print file permissions, symlink and executable bits.
4102 4102 With --debug, print file revision hashes.
4103 4103
4104 4104 If option --all is specified, the list of all files from all revisions
4105 4105 is printed. This includes deleted and renamed files.
4106 4106
4107 4107 Returns 0 on success.
4108 4108 """
4109 4109 if opts.get('all'):
4110 4110 if rev or node:
4111 4111 raise util.Abort(_("can't specify a revision with --all"))
4112 4112
4113 4113 res = []
4114 4114 prefix = "data/"
4115 4115 suffix = ".i"
4116 4116 plen = len(prefix)
4117 4117 slen = len(suffix)
4118 4118 lock = repo.lock()
4119 4119 try:
4120 4120 for fn, b, size in repo.store.datafiles():
4121 4121 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4122 4122 res.append(fn[plen:-slen])
4123 4123 finally:
4124 4124 lock.release()
4125 4125 for f in sorted(res):
4126 4126 ui.write("%s\n" % f)
4127 4127 return
4128 4128
4129 4129 if rev and node:
4130 4130 raise util.Abort(_("please specify just one revision"))
4131 4131
4132 4132 if not node:
4133 4133 node = rev
4134 4134
4135 4135 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
4136 4136 ctx = scmutil.revsingle(repo, node)
4137 4137 for f in ctx:
4138 4138 if ui.debugflag:
4139 4139 ui.write("%40s " % hex(ctx.manifest()[f]))
4140 4140 if ui.verbose:
4141 4141 ui.write(decor[ctx.flags(f)])
4142 4142 ui.write("%s\n" % f)
4143 4143
4144 4144 @command('^merge',
4145 4145 [('f', 'force', None, _('force a merge with outstanding changes')),
4146 4146 ('r', 'rev', '', _('revision to merge'), _('REV')),
4147 4147 ('P', 'preview', None,
4148 4148 _('review revisions to merge (no merge is performed)'))
4149 4149 ] + mergetoolopts,
4150 4150 _('[-P] [-f] [[-r] REV]'))
4151 4151 def merge(ui, repo, node=None, **opts):
4152 4152 """merge working directory with another revision
4153 4153
4154 4154 The current working directory is updated with all changes made in
4155 4155 the requested revision since the last common predecessor revision.
4156 4156
4157 4157 Files that changed between either parent are marked as changed for
4158 4158 the next commit and a commit must be performed before any further
4159 4159 updates to the repository are allowed. The next commit will have
4160 4160 two parents.
4161 4161
4162 4162 ``--tool`` can be used to specify the merge tool used for file
4163 4163 merges. It overrides the HGMERGE environment variable and your
4164 4164 configuration files. See :hg:`help merge-tools` for options.
4165 4165
4166 4166 If no revision is specified, the working directory's parent is a
4167 4167 head revision, and the current branch contains exactly one other
4168 4168 head, the other head is merged with by default. Otherwise, an
4169 4169 explicit revision with which to merge with must be provided.
4170 4170
4171 4171 :hg:`resolve` must be used to resolve unresolved files.
4172 4172
4173 4173 To undo an uncommitted merge, use :hg:`update --clean .` which
4174 4174 will check out a clean copy of the original merge parent, losing
4175 4175 all changes.
4176 4176
4177 4177 Returns 0 on success, 1 if there are unresolved files.
4178 4178 """
4179 4179
4180 4180 if opts.get('rev') and node:
4181 4181 raise util.Abort(_("please specify just one revision"))
4182 4182 if not node:
4183 4183 node = opts.get('rev')
4184 4184
4185 4185 if node:
4186 4186 node = scmutil.revsingle(repo, node).node()
4187 4187
4188 4188 if not node and repo._bookmarkcurrent:
4189 4189 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4190 4190 curhead = repo[repo._bookmarkcurrent]
4191 4191 if len(bmheads) == 2:
4192 4192 if curhead == bmheads[0]:
4193 4193 node = bmheads[1]
4194 4194 else:
4195 4195 node = bmheads[0]
4196 4196 elif len(bmheads) > 2:
4197 4197 raise util.Abort(_("multiple matching bookmarks to merge - "
4198 4198 "please merge with an explicit rev or bookmark"),
4199 4199 hint=_("run 'hg heads' to see all heads"))
4200 4200 elif len(bmheads) <= 1:
4201 4201 raise util.Abort(_("no matching bookmark to merge - "
4202 4202 "please merge with an explicit rev or bookmark"),
4203 4203 hint=_("run 'hg heads' to see all heads"))
4204 4204
4205 4205 if not node and not repo._bookmarkcurrent:
4206 4206 branch = repo[None].branch()
4207 4207 bheads = repo.branchheads(branch)
4208 4208 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4209 4209
4210 4210 if len(nbhs) > 2:
4211 4211 raise util.Abort(_("branch '%s' has %d heads - "
4212 4212 "please merge with an explicit rev")
4213 4213 % (branch, len(bheads)),
4214 4214 hint=_("run 'hg heads .' to see heads"))
4215 4215
4216 4216 parent = repo.dirstate.p1()
4217 4217 if len(nbhs) == 1:
4218 4218 if len(bheads) > 1:
4219 4219 raise util.Abort(_("heads are bookmarked - "
4220 4220 "please merge with an explicit rev"),
4221 4221 hint=_("run 'hg heads' to see all heads"))
4222 4222 if len(repo.heads()) > 1:
4223 4223 raise util.Abort(_("branch '%s' has one head - "
4224 4224 "please merge with an explicit rev")
4225 4225 % branch,
4226 4226 hint=_("run 'hg heads' to see all heads"))
4227 4227 msg, hint = _('nothing to merge'), None
4228 4228 if parent != repo.lookup(branch):
4229 4229 hint = _("use 'hg update' instead")
4230 4230 raise util.Abort(msg, hint=hint)
4231 4231
4232 4232 if parent not in bheads:
4233 4233 raise util.Abort(_('working directory not at a head revision'),
4234 4234 hint=_("use 'hg update' or merge with an "
4235 4235 "explicit revision"))
4236 4236 if parent == nbhs[0]:
4237 4237 node = nbhs[-1]
4238 4238 else:
4239 4239 node = nbhs[0]
4240 4240
4241 4241 if opts.get('preview'):
4242 4242 # find nodes that are ancestors of p2 but not of p1
4243 4243 p1 = repo.lookup('.')
4244 4244 p2 = repo.lookup(node)
4245 4245 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4246 4246
4247 4247 displayer = cmdutil.show_changeset(ui, repo, opts)
4248 4248 for node in nodes:
4249 4249 displayer.show(repo[node])
4250 4250 displayer.close()
4251 4251 return 0
4252 4252
4253 4253 try:
4254 4254 # ui.forcemerge is an internal variable, do not document
4255 4255 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4256 4256 return hg.merge(repo, node, force=opts.get('force'))
4257 4257 finally:
4258 4258 ui.setconfig('ui', 'forcemerge', '')
4259 4259
4260 4260 @command('outgoing|out',
4261 4261 [('f', 'force', None, _('run even when the destination is unrelated')),
4262 4262 ('r', 'rev', [],
4263 4263 _('a changeset intended to be included in the destination'), _('REV')),
4264 4264 ('n', 'newest-first', None, _('show newest record first')),
4265 4265 ('B', 'bookmarks', False, _('compare bookmarks')),
4266 4266 ('b', 'branch', [], _('a specific branch you would like to push'),
4267 4267 _('BRANCH')),
4268 4268 ] + logopts + remoteopts + subrepoopts,
4269 4269 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4270 4270 def outgoing(ui, repo, dest=None, **opts):
4271 4271 """show changesets not found in the destination
4272 4272
4273 4273 Show changesets not found in the specified destination repository
4274 4274 or the default push location. These are the changesets that would
4275 4275 be pushed if a push was requested.
4276 4276
4277 4277 See pull for details of valid destination formats.
4278 4278
4279 4279 Returns 0 if there are outgoing changes, 1 otherwise.
4280 4280 """
4281 4281
4282 4282 if opts.get('bookmarks'):
4283 4283 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4284 4284 dest, branches = hg.parseurl(dest, opts.get('branch'))
4285 4285 other = hg.peer(repo, opts, dest)
4286 4286 if 'bookmarks' not in other.listkeys('namespaces'):
4287 4287 ui.warn(_("remote doesn't support bookmarks\n"))
4288 4288 return 0
4289 4289 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4290 4290 return bookmarks.diff(ui, other, repo)
4291 4291
4292 4292 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4293 4293 try:
4294 4294 return hg.outgoing(ui, repo, dest, opts)
4295 4295 finally:
4296 4296 del repo._subtoppath
4297 4297
4298 4298 @command('parents',
4299 4299 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4300 4300 ] + templateopts,
4301 4301 _('[-r REV] [FILE]'))
4302 4302 def parents(ui, repo, file_=None, **opts):
4303 4303 """show the parents of the working directory or revision
4304 4304
4305 4305 Print the working directory's parent revisions. If a revision is
4306 4306 given via -r/--rev, the parent of that revision will be printed.
4307 4307 If a file argument is given, the revision in which the file was
4308 4308 last changed (before the working directory revision or the
4309 4309 argument to --rev if given) is printed.
4310 4310
4311 4311 Returns 0 on success.
4312 4312 """
4313 4313
4314 4314 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4315 4315
4316 4316 if file_:
4317 4317 m = scmutil.match(ctx, (file_,), opts)
4318 4318 if m.anypats() or len(m.files()) != 1:
4319 4319 raise util.Abort(_('can only specify an explicit filename'))
4320 4320 file_ = m.files()[0]
4321 4321 filenodes = []
4322 4322 for cp in ctx.parents():
4323 4323 if not cp:
4324 4324 continue
4325 4325 try:
4326 4326 filenodes.append(cp.filenode(file_))
4327 4327 except error.LookupError:
4328 4328 pass
4329 4329 if not filenodes:
4330 4330 raise util.Abort(_("'%s' not found in manifest!") % file_)
4331 4331 fl = repo.file(file_)
4332 4332 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4333 4333 else:
4334 4334 p = [cp.node() for cp in ctx.parents()]
4335 4335
4336 4336 displayer = cmdutil.show_changeset(ui, repo, opts)
4337 4337 for n in p:
4338 4338 if n != nullid:
4339 4339 displayer.show(repo[n])
4340 4340 displayer.close()
4341 4341
4342 4342 @command('paths', [], _('[NAME]'))
4343 4343 def paths(ui, repo, search=None):
4344 4344 """show aliases for remote repositories
4345 4345
4346 4346 Show definition of symbolic path name NAME. If no name is given,
4347 4347 show definition of all available names.
4348 4348
4349 4349 Option -q/--quiet suppresses all output when searching for NAME
4350 4350 and shows only the path names when listing all definitions.
4351 4351
4352 4352 Path names are defined in the [paths] section of your
4353 4353 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4354 4354 repository, ``.hg/hgrc`` is used, too.
4355 4355
4356 4356 The path names ``default`` and ``default-push`` have a special
4357 4357 meaning. When performing a push or pull operation, they are used
4358 4358 as fallbacks if no location is specified on the command-line.
4359 4359 When ``default-push`` is set, it will be used for push and
4360 4360 ``default`` will be used for pull; otherwise ``default`` is used
4361 4361 as the fallback for both. When cloning a repository, the clone
4362 4362 source is written as ``default`` in ``.hg/hgrc``. Note that
4363 4363 ``default`` and ``default-push`` apply to all inbound (e.g.
4364 4364 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4365 4365 :hg:`bundle`) operations.
4366 4366
4367 4367 See :hg:`help urls` for more information.
4368 4368
4369 4369 Returns 0 on success.
4370 4370 """
4371 4371 if search:
4372 4372 for name, path in ui.configitems("paths"):
4373 4373 if name == search:
4374 4374 ui.status("%s\n" % util.hidepassword(path))
4375 4375 return
4376 4376 if not ui.quiet:
4377 4377 ui.warn(_("not found!\n"))
4378 4378 return 1
4379 4379 else:
4380 4380 for name, path in ui.configitems("paths"):
4381 4381 if ui.quiet:
4382 4382 ui.write("%s\n" % name)
4383 4383 else:
4384 4384 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4385 4385
4386 4386 @command('^phase',
4387 4387 [('p', 'public', False, _('set changeset phase to public')),
4388 4388 ('d', 'draft', False, _('set changeset phase to draft')),
4389 4389 ('s', 'secret', False, _('set changeset phase to secret')),
4390 4390 ('f', 'force', False, _('allow to move boundary backward')),
4391 4391 ('r', 'rev', [], _('target revision'), _('REV')),
4392 4392 ],
4393 4393 _('[-p|-d|-s] [-f] [-r] REV...'))
4394 4394 def phase(ui, repo, *revs, **opts):
4395 4395 """set or show the current phase name
4396 4396
4397 4397 With no argument, show the phase name of specified revisions.
4398 4398
4399 4399 With one of -p/--public, -d/--draft or -s/--secret, change the
4400 4400 phase value of the specified revisions.
4401 4401
4402 4402 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4403 4403 lower phase to an higher phase. Phases are ordered as follows::
4404 4404
4405 4405 public < draft < secret
4406 4406
4407 4407 Return 0 on success, 1 if no phases were changed or some could not
4408 4408 be changed.
4409 4409 """
4410 4410 # search for a unique phase argument
4411 4411 targetphase = None
4412 4412 for idx, name in enumerate(phases.phasenames):
4413 4413 if opts[name]:
4414 4414 if targetphase is not None:
4415 4415 raise util.Abort(_('only one phase can be specified'))
4416 4416 targetphase = idx
4417 4417
4418 4418 # look for specified revision
4419 4419 revs = list(revs)
4420 4420 revs.extend(opts['rev'])
4421 4421 if not revs:
4422 4422 raise util.Abort(_('no revisions specified'))
4423 4423
4424 4424 revs = scmutil.revrange(repo, revs)
4425 4425
4426 4426 lock = None
4427 4427 ret = 0
4428 4428 if targetphase is None:
4429 4429 # display
4430 4430 for r in revs:
4431 4431 ctx = repo[r]
4432 4432 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4433 4433 else:
4434 4434 lock = repo.lock()
4435 4435 try:
4436 4436 # set phase
4437 4437 if not revs:
4438 4438 raise util.Abort(_('empty revision set'))
4439 4439 nodes = [repo[r].node() for r in revs]
4440 4440 olddata = repo._phasecache.getphaserevs(repo)[:]
4441 4441 phases.advanceboundary(repo, targetphase, nodes)
4442 4442 if opts['force']:
4443 4443 phases.retractboundary(repo, targetphase, nodes)
4444 4444 finally:
4445 4445 lock.release()
4446 4446 newdata = repo._phasecache.getphaserevs(repo)
4447 4447 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4448 4448 rejected = [n for n in nodes
4449 4449 if newdata[repo[n].rev()] < targetphase]
4450 4450 if rejected:
4451 4451 ui.warn(_('cannot move %i changesets to a more permissive '
4452 4452 'phase, use --force\n') % len(rejected))
4453 4453 ret = 1
4454 4454 if changes:
4455 4455 msg = _('phase changed for %i changesets\n') % changes
4456 4456 if ret:
4457 4457 ui.status(msg)
4458 4458 else:
4459 4459 ui.note(msg)
4460 4460 else:
4461 4461 ui.warn(_('no phases changed\n'))
4462 4462 ret = 1
4463 4463 return ret
4464 4464
4465 4465 def postincoming(ui, repo, modheads, optupdate, checkout):
4466 4466 if modheads == 0:
4467 4467 return
4468 4468 if optupdate:
4469 4469 movemarkfrom = repo['.'].node()
4470 4470 try:
4471 4471 ret = hg.update(repo, checkout)
4472 4472 except util.Abort, inst:
4473 4473 ui.warn(_("not updating: %s\n") % str(inst))
4474 4474 return 0
4475 4475 if not ret and not checkout:
4476 4476 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4477 4477 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4478 4478 return ret
4479 4479 if modheads > 1:
4480 4480 currentbranchheads = len(repo.branchheads())
4481 4481 if currentbranchheads == modheads:
4482 4482 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4483 4483 elif currentbranchheads > 1:
4484 4484 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4485 4485 "merge)\n"))
4486 4486 else:
4487 4487 ui.status(_("(run 'hg heads' to see heads)\n"))
4488 4488 else:
4489 4489 ui.status(_("(run 'hg update' to get a working copy)\n"))
4490 4490
4491 4491 @command('^pull',
4492 4492 [('u', 'update', None,
4493 4493 _('update to new branch head if changesets were pulled')),
4494 4494 ('f', 'force', None, _('run even when remote repository is unrelated')),
4495 4495 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4496 4496 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4497 4497 ('b', 'branch', [], _('a specific branch you would like to pull'),
4498 4498 _('BRANCH')),
4499 4499 ] + remoteopts,
4500 4500 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4501 4501 def pull(ui, repo, source="default", **opts):
4502 4502 """pull changes from the specified source
4503 4503
4504 4504 Pull changes from a remote repository to a local one.
4505 4505
4506 4506 This finds all changes from the repository at the specified path
4507 4507 or URL and adds them to a local repository (the current one unless
4508 4508 -R is specified). By default, this does not update the copy of the
4509 4509 project in the working directory.
4510 4510
4511 4511 Use :hg:`incoming` if you want to see what would have been added
4512 4512 by a pull at the time you issued this command. If you then decide
4513 4513 to add those changes to the repository, you should use :hg:`pull
4514 4514 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4515 4515
4516 4516 If SOURCE is omitted, the 'default' path will be used.
4517 4517 See :hg:`help urls` for more information.
4518 4518
4519 4519 Returns 0 on success, 1 if an update had unresolved files.
4520 4520 """
4521 4521 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4522 4522 other = hg.peer(repo, opts, source)
4523 4523 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4524 4524 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4525 4525
4526 4526 if opts.get('bookmark'):
4527 4527 if not revs:
4528 4528 revs = []
4529 4529 rb = other.listkeys('bookmarks')
4530 4530 for b in opts['bookmark']:
4531 4531 if b not in rb:
4532 4532 raise util.Abort(_('remote bookmark %s not found!') % b)
4533 4533 revs.append(rb[b])
4534 4534
4535 4535 if revs:
4536 4536 try:
4537 4537 revs = [other.lookup(rev) for rev in revs]
4538 4538 except error.CapabilityError:
4539 4539 err = _("other repository doesn't support revision lookup, "
4540 4540 "so a rev cannot be specified.")
4541 4541 raise util.Abort(err)
4542 4542
4543 4543 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4544 4544 bookmarks.updatefromremote(ui, repo, other, source)
4545 4545 if checkout:
4546 4546 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4547 4547 repo._subtoppath = source
4548 4548 try:
4549 4549 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4550 4550
4551 4551 finally:
4552 4552 del repo._subtoppath
4553 4553
4554 4554 # update specified bookmarks
4555 4555 if opts.get('bookmark'):
4556 4556 for b in opts['bookmark']:
4557 4557 # explicit pull overrides local bookmark if any
4558 4558 ui.status(_("importing bookmark %s\n") % b)
4559 4559 repo._bookmarks[b] = repo[rb[b]].node()
4560 4560 bookmarks.write(repo)
4561 4561
4562 4562 return ret
4563 4563
4564 4564 @command('^push',
4565 4565 [('f', 'force', None, _('force push')),
4566 4566 ('r', 'rev', [],
4567 4567 _('a changeset intended to be included in the destination'),
4568 4568 _('REV')),
4569 4569 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4570 4570 ('b', 'branch', [],
4571 4571 _('a specific branch you would like to push'), _('BRANCH')),
4572 4572 ('', 'new-branch', False, _('allow pushing a new branch')),
4573 4573 ] + remoteopts,
4574 4574 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4575 4575 def push(ui, repo, dest=None, **opts):
4576 4576 """push changes to the specified destination
4577 4577
4578 4578 Push changesets from the local repository to the specified
4579 4579 destination.
4580 4580
4581 4581 This operation is symmetrical to pull: it is identical to a pull
4582 4582 in the destination repository from the current one.
4583 4583
4584 4584 By default, push will not allow creation of new heads at the
4585 4585 destination, since multiple heads would make it unclear which head
4586 4586 to use. In this situation, it is recommended to pull and merge
4587 4587 before pushing.
4588 4588
4589 4589 Use --new-branch if you want to allow push to create a new named
4590 4590 branch that is not present at the destination. This allows you to
4591 4591 only create a new branch without forcing other changes.
4592 4592
4593 4593 Use -f/--force to override the default behavior and push all
4594 4594 changesets on all branches.
4595 4595
4596 4596 If -r/--rev is used, the specified revision and all its ancestors
4597 4597 will be pushed to the remote repository.
4598 4598
4599 4599 Please see :hg:`help urls` for important details about ``ssh://``
4600 4600 URLs. If DESTINATION is omitted, a default path will be used.
4601 4601
4602 4602 Returns 0 if push was successful, 1 if nothing to push.
4603 4603 """
4604 4604
4605 4605 if opts.get('bookmark'):
4606 4606 for b in opts['bookmark']:
4607 4607 # translate -B options to -r so changesets get pushed
4608 4608 if b in repo._bookmarks:
4609 4609 opts.setdefault('rev', []).append(b)
4610 4610 else:
4611 4611 # if we try to push a deleted bookmark, translate it to null
4612 4612 # this lets simultaneous -r, -b options continue working
4613 4613 opts.setdefault('rev', []).append("null")
4614 4614
4615 4615 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4616 4616 dest, branches = hg.parseurl(dest, opts.get('branch'))
4617 4617 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4618 4618 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4619 4619 other = hg.peer(repo, opts, dest)
4620 4620 if revs:
4621 4621 revs = [repo.lookup(rev) for rev in revs]
4622 4622
4623 4623 repo._subtoppath = dest
4624 4624 try:
4625 4625 # push subrepos depth-first for coherent ordering
4626 4626 c = repo['']
4627 4627 subs = c.substate # only repos that are committed
4628 4628 for s in sorted(subs):
4629 4629 if c.sub(s).push(opts) == 0:
4630 4630 return False
4631 4631 finally:
4632 4632 del repo._subtoppath
4633 4633 result = repo.push(other, opts.get('force'), revs=revs,
4634 4634 newbranch=opts.get('new_branch'))
4635 4635
4636 4636 result = not result
4637 4637
4638 4638 if opts.get('bookmark'):
4639 4639 rb = other.listkeys('bookmarks')
4640 4640 for b in opts['bookmark']:
4641 4641 # explicit push overrides remote bookmark if any
4642 4642 if b in repo._bookmarks:
4643 4643 ui.status(_("exporting bookmark %s\n") % b)
4644 4644 new = repo[b].hex()
4645 4645 elif b in rb:
4646 4646 ui.status(_("deleting remote bookmark %s\n") % b)
4647 4647 new = '' # delete
4648 4648 else:
4649 4649 ui.warn(_('bookmark %s does not exist on the local '
4650 4650 'or remote repository!\n') % b)
4651 4651 return 2
4652 4652 old = rb.get(b, '')
4653 4653 r = other.pushkey('bookmarks', b, old, new)
4654 4654 if not r:
4655 4655 ui.warn(_('updating bookmark %s failed!\n') % b)
4656 4656 if not result:
4657 4657 result = 2
4658 4658
4659 4659 return result
4660 4660
4661 4661 @command('recover', [])
4662 4662 def recover(ui, repo):
4663 4663 """roll back an interrupted transaction
4664 4664
4665 4665 Recover from an interrupted commit or pull.
4666 4666
4667 4667 This command tries to fix the repository status after an
4668 4668 interrupted operation. It should only be necessary when Mercurial
4669 4669 suggests it.
4670 4670
4671 4671 Returns 0 if successful, 1 if nothing to recover or verify fails.
4672 4672 """
4673 4673 if repo.recover():
4674 4674 return hg.verify(repo)
4675 4675 return 1
4676 4676
4677 4677 @command('^remove|rm',
4678 4678 [('A', 'after', None, _('record delete for missing files')),
4679 4679 ('f', 'force', None,
4680 4680 _('remove (and delete) file even if added or modified')),
4681 4681 ] + walkopts,
4682 4682 _('[OPTION]... FILE...'))
4683 4683 def remove(ui, repo, *pats, **opts):
4684 4684 """remove the specified files on the next commit
4685 4685
4686 4686 Schedule the indicated files for removal from the current branch.
4687 4687
4688 4688 This command schedules the files to be removed at the next commit.
4689 4689 To undo a remove before that, see :hg:`revert`. To undo added
4690 4690 files, see :hg:`forget`.
4691 4691
4692 4692 .. container:: verbose
4693 4693
4694 4694 -A/--after can be used to remove only files that have already
4695 4695 been deleted, -f/--force can be used to force deletion, and -Af
4696 4696 can be used to remove files from the next revision without
4697 4697 deleting them from the working directory.
4698 4698
4699 4699 The following table details the behavior of remove for different
4700 4700 file states (columns) and option combinations (rows). The file
4701 4701 states are Added [A], Clean [C], Modified [M] and Missing [!]
4702 4702 (as reported by :hg:`status`). The actions are Warn, Remove
4703 4703 (from branch) and Delete (from disk):
4704 4704
4705 4705 ======= == == == ==
4706 4706 A C M !
4707 4707 ======= == == == ==
4708 4708 none W RD W R
4709 4709 -f R RD RD R
4710 4710 -A W W W R
4711 4711 -Af R R R R
4712 4712 ======= == == == ==
4713 4713
4714 4714 Note that remove never deletes files in Added [A] state from the
4715 4715 working directory, not even if option --force is specified.
4716 4716
4717 4717 Returns 0 on success, 1 if any warnings encountered.
4718 4718 """
4719 4719
4720 4720 ret = 0
4721 4721 after, force = opts.get('after'), opts.get('force')
4722 4722 if not pats and not after:
4723 4723 raise util.Abort(_('no files specified'))
4724 4724
4725 4725 m = scmutil.match(repo[None], pats, opts)
4726 4726 s = repo.status(match=m, clean=True)
4727 4727 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4728 4728
4729 4729 for f in m.files():
4730 4730 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4731 4731 if os.path.exists(m.rel(f)):
4732 4732 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4733 4733 ret = 1
4734 4734
4735 4735 if force:
4736 4736 list = modified + deleted + clean + added
4737 4737 elif after:
4738 4738 list = deleted
4739 4739 for f in modified + added + clean:
4740 4740 ui.warn(_('not removing %s: file still exists (use -f'
4741 4741 ' to force removal)\n') % m.rel(f))
4742 4742 ret = 1
4743 4743 else:
4744 4744 list = deleted + clean
4745 4745 for f in modified:
4746 4746 ui.warn(_('not removing %s: file is modified (use -f'
4747 4747 ' to force removal)\n') % m.rel(f))
4748 4748 ret = 1
4749 4749 for f in added:
4750 4750 ui.warn(_('not removing %s: file has been marked for add'
4751 4751 ' (use forget to undo)\n') % m.rel(f))
4752 4752 ret = 1
4753 4753
4754 4754 for f in sorted(list):
4755 4755 if ui.verbose or not m.exact(f):
4756 4756 ui.status(_('removing %s\n') % m.rel(f))
4757 4757
4758 4758 wlock = repo.wlock()
4759 4759 try:
4760 4760 if not after:
4761 4761 for f in list:
4762 4762 if f in added:
4763 4763 continue # we never unlink added files on remove
4764 4764 try:
4765 4765 util.unlinkpath(repo.wjoin(f))
4766 4766 except OSError, inst:
4767 4767 if inst.errno != errno.ENOENT:
4768 4768 raise
4769 4769 repo[None].forget(list)
4770 4770 finally:
4771 4771 wlock.release()
4772 4772
4773 4773 return ret
4774 4774
4775 4775 @command('rename|move|mv',
4776 4776 [('A', 'after', None, _('record a rename that has already occurred')),
4777 4777 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4778 4778 ] + walkopts + dryrunopts,
4779 4779 _('[OPTION]... SOURCE... DEST'))
4780 4780 def rename(ui, repo, *pats, **opts):
4781 4781 """rename files; equivalent of copy + remove
4782 4782
4783 4783 Mark dest as copies of sources; mark sources for deletion. If dest
4784 4784 is a directory, copies are put in that directory. If dest is a
4785 4785 file, there can only be one source.
4786 4786
4787 4787 By default, this command copies the contents of files as they
4788 4788 exist in the working directory. If invoked with -A/--after, the
4789 4789 operation is recorded, but no copying is performed.
4790 4790
4791 4791 This command takes effect at the next commit. To undo a rename
4792 4792 before that, see :hg:`revert`.
4793 4793
4794 4794 Returns 0 on success, 1 if errors are encountered.
4795 4795 """
4796 4796 wlock = repo.wlock(False)
4797 4797 try:
4798 4798 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4799 4799 finally:
4800 4800 wlock.release()
4801 4801
4802 4802 @command('resolve',
4803 4803 [('a', 'all', None, _('select all unresolved files')),
4804 4804 ('l', 'list', None, _('list state of files needing merge')),
4805 4805 ('m', 'mark', None, _('mark files as resolved')),
4806 4806 ('u', 'unmark', None, _('mark files as unresolved')),
4807 4807 ('n', 'no-status', None, _('hide status prefix'))]
4808 4808 + mergetoolopts + walkopts,
4809 4809 _('[OPTION]... [FILE]...'))
4810 4810 def resolve(ui, repo, *pats, **opts):
4811 4811 """redo merges or set/view the merge status of files
4812 4812
4813 4813 Merges with unresolved conflicts are often the result of
4814 4814 non-interactive merging using the ``internal:merge`` configuration
4815 4815 setting, or a command-line merge tool like ``diff3``. The resolve
4816 4816 command is used to manage the files involved in a merge, after
4817 4817 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4818 4818 working directory must have two parents). See :hg:`help
4819 4819 merge-tools` for information on configuring merge tools.
4820 4820
4821 4821 The resolve command can be used in the following ways:
4822 4822
4823 4823 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4824 4824 files, discarding any previous merge attempts. Re-merging is not
4825 4825 performed for files already marked as resolved. Use ``--all/-a``
4826 4826 to select all unresolved files. ``--tool`` can be used to specify
4827 4827 the merge tool used for the given files. It overrides the HGMERGE
4828 4828 environment variable and your configuration files. Previous file
4829 4829 contents are saved with a ``.orig`` suffix.
4830 4830
4831 4831 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4832 4832 (e.g. after having manually fixed-up the files). The default is
4833 4833 to mark all unresolved files.
4834 4834
4835 4835 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4836 4836 default is to mark all resolved files.
4837 4837
4838 4838 - :hg:`resolve -l`: list files which had or still have conflicts.
4839 4839 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4840 4840
4841 4841 Note that Mercurial will not let you commit files with unresolved
4842 4842 merge conflicts. You must use :hg:`resolve -m ...` before you can
4843 4843 commit after a conflicting merge.
4844 4844
4845 4845 Returns 0 on success, 1 if any files fail a resolve attempt.
4846 4846 """
4847 4847
4848 4848 all, mark, unmark, show, nostatus = \
4849 4849 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4850 4850
4851 4851 if (show and (mark or unmark)) or (mark and unmark):
4852 4852 raise util.Abort(_("too many options specified"))
4853 4853 if pats and all:
4854 4854 raise util.Abort(_("can't specify --all and patterns"))
4855 4855 if not (all or pats or show or mark or unmark):
4856 4856 raise util.Abort(_('no files or directories specified; '
4857 4857 'use --all to remerge all files'))
4858 4858
4859 4859 ms = mergemod.mergestate(repo)
4860 4860 m = scmutil.match(repo[None], pats, opts)
4861 4861 ret = 0
4862 4862
4863 4863 for f in ms:
4864 4864 if m(f):
4865 4865 if show:
4866 4866 if nostatus:
4867 4867 ui.write("%s\n" % f)
4868 4868 else:
4869 4869 ui.write("%s %s\n" % (ms[f].upper(), f),
4870 4870 label='resolve.' +
4871 4871 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4872 4872 elif mark:
4873 4873 ms.mark(f, "r")
4874 4874 elif unmark:
4875 4875 ms.mark(f, "u")
4876 4876 else:
4877 4877 wctx = repo[None]
4878 4878 mctx = wctx.parents()[-1]
4879 4879
4880 4880 # backup pre-resolve (merge uses .orig for its own purposes)
4881 4881 a = repo.wjoin(f)
4882 4882 util.copyfile(a, a + ".resolve")
4883 4883
4884 4884 try:
4885 4885 # resolve file
4886 4886 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4887 4887 if ms.resolve(f, wctx, mctx):
4888 4888 ret = 1
4889 4889 finally:
4890 4890 ui.setconfig('ui', 'forcemerge', '')
4891 4891
4892 4892 # replace filemerge's .orig file with our resolve file
4893 4893 util.rename(a + ".resolve", a + ".orig")
4894 4894
4895 4895 ms.commit()
4896 4896 return ret
4897 4897
4898 4898 @command('revert',
4899 4899 [('a', 'all', None, _('revert all changes when no arguments given')),
4900 4900 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4901 4901 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4902 4902 ('C', 'no-backup', None, _('do not save backup copies of files')),
4903 4903 ] + walkopts + dryrunopts,
4904 4904 _('[OPTION]... [-r REV] [NAME]...'))
4905 4905 def revert(ui, repo, *pats, **opts):
4906 4906 """restore files to their checkout state
4907 4907
4908 4908 .. note::
4909 4909 To check out earlier revisions, you should use :hg:`update REV`.
4910 4910 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4911 4911
4912 4912 With no revision specified, revert the specified files or directories
4913 4913 to the contents they had in the parent of the working directory.
4914 4914 This restores the contents of files to an unmodified
4915 4915 state and unschedules adds, removes, copies, and renames. If the
4916 4916 working directory has two parents, you must explicitly specify a
4917 4917 revision.
4918 4918
4919 4919 Using the -r/--rev or -d/--date options, revert the given files or
4920 4920 directories to their states as of a specific revision. Because
4921 4921 revert does not change the working directory parents, this will
4922 4922 cause these files to appear modified. This can be helpful to "back
4923 4923 out" some or all of an earlier change. See :hg:`backout` for a
4924 4924 related method.
4925 4925
4926 4926 Modified files are saved with a .orig suffix before reverting.
4927 4927 To disable these backups, use --no-backup.
4928 4928
4929 4929 See :hg:`help dates` for a list of formats valid for -d/--date.
4930 4930
4931 4931 Returns 0 on success.
4932 4932 """
4933 4933
4934 4934 if opts.get("date"):
4935 4935 if opts.get("rev"):
4936 4936 raise util.Abort(_("you can't specify a revision and a date"))
4937 4937 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4938 4938
4939 4939 parent, p2 = repo.dirstate.parents()
4940 4940 if not opts.get('rev') and p2 != nullid:
4941 4941 # revert after merge is a trap for new users (issue2915)
4942 4942 raise util.Abort(_('uncommitted merge with no revision specified'),
4943 4943 hint=_('use "hg update" or see "hg help revert"'))
4944 4944
4945 4945 ctx = scmutil.revsingle(repo, opts.get('rev'))
4946 4946
4947 4947 if not pats and not opts.get('all'):
4948 4948 msg = _("no files or directories specified")
4949 4949 if p2 != nullid:
4950 4950 hint = _("uncommitted merge, use --all to discard all changes,"
4951 4951 " or 'hg update -C .' to abort the merge")
4952 4952 raise util.Abort(msg, hint=hint)
4953 4953 dirty = util.any(repo.status())
4954 4954 node = ctx.node()
4955 4955 if node != parent:
4956 4956 if dirty:
4957 4957 hint = _("uncommitted changes, use --all to discard all"
4958 4958 " changes, or 'hg update %s' to update") % ctx.rev()
4959 4959 else:
4960 4960 hint = _("use --all to revert all files,"
4961 4961 " or 'hg update %s' to update") % ctx.rev()
4962 4962 elif dirty:
4963 4963 hint = _("uncommitted changes, use --all to discard all changes")
4964 4964 else:
4965 4965 hint = _("use --all to revert all files")
4966 4966 raise util.Abort(msg, hint=hint)
4967 4967
4968 4968 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4969 4969
4970 4970 @command('rollback', dryrunopts +
4971 4971 [('f', 'force', False, _('ignore safety measures'))])
4972 4972 def rollback(ui, repo, **opts):
4973 4973 """roll back the last transaction (dangerous)
4974 4974
4975 4975 This command should be used with care. There is only one level of
4976 4976 rollback, and there is no way to undo a rollback. It will also
4977 4977 restore the dirstate at the time of the last transaction, losing
4978 4978 any dirstate changes since that time. This command does not alter
4979 4979 the working directory.
4980 4980
4981 4981 Transactions are used to encapsulate the effects of all commands
4982 4982 that create new changesets or propagate existing changesets into a
4983 4983 repository. For example, the following commands are transactional,
4984 4984 and their effects can be rolled back:
4985 4985
4986 4986 - commit
4987 4987 - import
4988 4988 - pull
4989 4989 - push (with this repository as the destination)
4990 4990 - unbundle
4991 4991
4992 4992 To avoid permanent data loss, rollback will refuse to rollback a
4993 4993 commit transaction if it isn't checked out. Use --force to
4994 4994 override this protection.
4995 4995
4996 4996 This command is not intended for use on public repositories. Once
4997 4997 changes are visible for pull by other users, rolling a transaction
4998 4998 back locally is ineffective (someone else may already have pulled
4999 4999 the changes). Furthermore, a race is possible with readers of the
5000 5000 repository; for example an in-progress pull from the repository
5001 5001 may fail if a rollback is performed.
5002 5002
5003 5003 Returns 0 on success, 1 if no rollback data is available.
5004 5004 """
5005 5005 return repo.rollback(dryrun=opts.get('dry_run'),
5006 5006 force=opts.get('force'))
5007 5007
5008 5008 @command('root', [])
5009 5009 def root(ui, repo):
5010 5010 """print the root (top) of the current working directory
5011 5011
5012 5012 Print the root directory of the current repository.
5013 5013
5014 5014 Returns 0 on success.
5015 5015 """
5016 5016 ui.write(repo.root + "\n")
5017 5017
5018 5018 @command('^serve',
5019 5019 [('A', 'accesslog', '', _('name of access log file to write to'),
5020 5020 _('FILE')),
5021 5021 ('d', 'daemon', None, _('run server in background')),
5022 5022 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5023 5023 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5024 5024 # use string type, then we can check if something was passed
5025 5025 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5026 5026 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5027 5027 _('ADDR')),
5028 5028 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5029 5029 _('PREFIX')),
5030 5030 ('n', 'name', '',
5031 5031 _('name to show in web pages (default: working directory)'), _('NAME')),
5032 5032 ('', 'web-conf', '',
5033 5033 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5034 5034 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5035 5035 _('FILE')),
5036 5036 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5037 5037 ('', 'stdio', None, _('for remote clients')),
5038 5038 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5039 5039 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5040 5040 ('', 'style', '', _('template style to use'), _('STYLE')),
5041 5041 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5042 5042 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5043 5043 _('[OPTION]...'))
5044 5044 def serve(ui, repo, **opts):
5045 5045 """start stand-alone webserver
5046 5046
5047 5047 Start a local HTTP repository browser and pull server. You can use
5048 5048 this for ad-hoc sharing and browsing of repositories. It is
5049 5049 recommended to use a real web server to serve a repository for
5050 5050 longer periods of time.
5051 5051
5052 5052 Please note that the server does not implement access control.
5053 5053 This means that, by default, anybody can read from the server and
5054 5054 nobody can write to it by default. Set the ``web.allow_push``
5055 5055 option to ``*`` to allow everybody to push to the server. You
5056 5056 should use a real web server if you need to authenticate users.
5057 5057
5058 5058 By default, the server logs accesses to stdout and errors to
5059 5059 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5060 5060 files.
5061 5061
5062 5062 To have the server choose a free port number to listen on, specify
5063 5063 a port number of 0; in this case, the server will print the port
5064 5064 number it uses.
5065 5065
5066 5066 Returns 0 on success.
5067 5067 """
5068 5068
5069 5069 if opts["stdio"] and opts["cmdserver"]:
5070 5070 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5071 5071
5072 5072 def checkrepo():
5073 5073 if repo is None:
5074 5074 raise error.RepoError(_("There is no Mercurial repository here"
5075 5075 " (.hg not found)"))
5076 5076
5077 5077 if opts["stdio"]:
5078 5078 checkrepo()
5079 5079 s = sshserver.sshserver(ui, repo)
5080 5080 s.serve_forever()
5081 5081
5082 5082 if opts["cmdserver"]:
5083 5083 checkrepo()
5084 5084 s = commandserver.server(ui, repo, opts["cmdserver"])
5085 5085 return s.serve()
5086 5086
5087 5087 # this way we can check if something was given in the command-line
5088 5088 if opts.get('port'):
5089 5089 opts['port'] = util.getport(opts.get('port'))
5090 5090
5091 5091 baseui = repo and repo.baseui or ui
5092 5092 optlist = ("name templates style address port prefix ipv6"
5093 5093 " accesslog errorlog certificate encoding")
5094 5094 for o in optlist.split():
5095 5095 val = opts.get(o, '')
5096 5096 if val in (None, ''): # should check against default options instead
5097 5097 continue
5098 5098 baseui.setconfig("web", o, val)
5099 5099 if repo and repo.ui != baseui:
5100 5100 repo.ui.setconfig("web", o, val)
5101 5101
5102 5102 o = opts.get('web_conf') or opts.get('webdir_conf')
5103 5103 if not o:
5104 5104 if not repo:
5105 5105 raise error.RepoError(_("There is no Mercurial repository"
5106 5106 " here (.hg not found)"))
5107 5107 o = repo.root
5108 5108
5109 5109 app = hgweb.hgweb(o, baseui=ui)
5110 5110
5111 5111 class service(object):
5112 5112 def init(self):
5113 5113 util.setsignalhandler()
5114 5114 self.httpd = hgweb.server.create_server(ui, app)
5115 5115
5116 5116 if opts['port'] and not ui.verbose:
5117 5117 return
5118 5118
5119 5119 if self.httpd.prefix:
5120 5120 prefix = self.httpd.prefix.strip('/') + '/'
5121 5121 else:
5122 5122 prefix = ''
5123 5123
5124 5124 port = ':%d' % self.httpd.port
5125 5125 if port == ':80':
5126 5126 port = ''
5127 5127
5128 5128 bindaddr = self.httpd.addr
5129 5129 if bindaddr == '0.0.0.0':
5130 5130 bindaddr = '*'
5131 5131 elif ':' in bindaddr: # IPv6
5132 5132 bindaddr = '[%s]' % bindaddr
5133 5133
5134 5134 fqaddr = self.httpd.fqaddr
5135 5135 if ':' in fqaddr:
5136 5136 fqaddr = '[%s]' % fqaddr
5137 5137 if opts['port']:
5138 5138 write = ui.status
5139 5139 else:
5140 5140 write = ui.write
5141 5141 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5142 5142 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5143 5143
5144 5144 def run(self):
5145 5145 self.httpd.serve_forever()
5146 5146
5147 5147 service = service()
5148 5148
5149 5149 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5150 5150
5151 5151 @command('showconfig|debugconfig',
5152 5152 [('u', 'untrusted', None, _('show untrusted configuration options'))],
5153 5153 _('[-u] [NAME]...'))
5154 5154 def showconfig(ui, repo, *values, **opts):
5155 5155 """show combined config settings from all hgrc files
5156 5156
5157 5157 With no arguments, print names and values of all config items.
5158 5158
5159 5159 With one argument of the form section.name, print just the value
5160 5160 of that config item.
5161 5161
5162 5162 With multiple arguments, print names and values of all config
5163 5163 items with matching section names.
5164 5164
5165 5165 With --debug, the source (filename and line number) is printed
5166 5166 for each config item.
5167 5167
5168 5168 Returns 0 on success.
5169 5169 """
5170 5170
5171 5171 for f in scmutil.rcpath():
5172 5172 ui.debug('read config from: %s\n' % f)
5173 5173 untrusted = bool(opts.get('untrusted'))
5174 5174 if values:
5175 5175 sections = [v for v in values if '.' not in v]
5176 5176 items = [v for v in values if '.' in v]
5177 5177 if len(items) > 1 or items and sections:
5178 5178 raise util.Abort(_('only one config item permitted'))
5179 5179 for section, name, value in ui.walkconfig(untrusted=untrusted):
5180 5180 value = str(value).replace('\n', '\\n')
5181 5181 sectname = section + '.' + name
5182 5182 if values:
5183 5183 for v in values:
5184 5184 if v == section:
5185 5185 ui.debug('%s: ' %
5186 5186 ui.configsource(section, name, untrusted))
5187 5187 ui.write('%s=%s\n' % (sectname, value))
5188 5188 elif v == sectname:
5189 5189 ui.debug('%s: ' %
5190 5190 ui.configsource(section, name, untrusted))
5191 5191 ui.write(value, '\n')
5192 5192 else:
5193 5193 ui.debug('%s: ' %
5194 5194 ui.configsource(section, name, untrusted))
5195 5195 ui.write('%s=%s\n' % (sectname, value))
5196 5196
5197 5197 @command('^status|st',
5198 5198 [('A', 'all', None, _('show status of all files')),
5199 5199 ('m', 'modified', None, _('show only modified files')),
5200 5200 ('a', 'added', None, _('show only added files')),
5201 5201 ('r', 'removed', None, _('show only removed files')),
5202 5202 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5203 5203 ('c', 'clean', None, _('show only files without changes')),
5204 5204 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5205 5205 ('i', 'ignored', None, _('show only ignored files')),
5206 5206 ('n', 'no-status', None, _('hide status prefix')),
5207 5207 ('C', 'copies', None, _('show source of copied files')),
5208 5208 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5209 5209 ('', 'rev', [], _('show difference from revision'), _('REV')),
5210 5210 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5211 5211 ] + walkopts + subrepoopts,
5212 5212 _('[OPTION]... [FILE]...'))
5213 5213 def status(ui, repo, *pats, **opts):
5214 5214 """show changed files in the working directory
5215 5215
5216 5216 Show status of files in the repository. If names are given, only
5217 5217 files that match are shown. Files that are clean or ignored or
5218 5218 the source of a copy/move operation, are not listed unless
5219 5219 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5220 5220 Unless options described with "show only ..." are given, the
5221 5221 options -mardu are used.
5222 5222
5223 5223 Option -q/--quiet hides untracked (unknown and ignored) files
5224 5224 unless explicitly requested with -u/--unknown or -i/--ignored.
5225 5225
5226 5226 .. note::
5227 5227 status may appear to disagree with diff if permissions have
5228 5228 changed or a merge has occurred. The standard diff format does
5229 5229 not report permission changes and diff only reports changes
5230 5230 relative to one merge parent.
5231 5231
5232 5232 If one revision is given, it is used as the base revision.
5233 5233 If two revisions are given, the differences between them are
5234 5234 shown. The --change option can also be used as a shortcut to list
5235 5235 the changed files of a revision from its first parent.
5236 5236
5237 5237 The codes used to show the status of files are::
5238 5238
5239 5239 M = modified
5240 5240 A = added
5241 5241 R = removed
5242 5242 C = clean
5243 5243 ! = missing (deleted by non-hg command, but still tracked)
5244 5244 ? = not tracked
5245 5245 I = ignored
5246 5246 = origin of the previous file listed as A (added)
5247 5247
5248 5248 .. container:: verbose
5249 5249
5250 5250 Examples:
5251 5251
5252 5252 - show changes in the working directory relative to a
5253 5253 changeset::
5254 5254
5255 5255 hg status --rev 9353
5256 5256
5257 5257 - show all changes including copies in an existing changeset::
5258 5258
5259 5259 hg status --copies --change 9353
5260 5260
5261 5261 - get a NUL separated list of added files, suitable for xargs::
5262 5262
5263 5263 hg status -an0
5264 5264
5265 5265 Returns 0 on success.
5266 5266 """
5267 5267
5268 5268 revs = opts.get('rev')
5269 5269 change = opts.get('change')
5270 5270
5271 5271 if revs and change:
5272 5272 msg = _('cannot specify --rev and --change at the same time')
5273 5273 raise util.Abort(msg)
5274 5274 elif change:
5275 5275 node2 = scmutil.revsingle(repo, change, None).node()
5276 5276 node1 = repo[node2].p1().node()
5277 5277 else:
5278 5278 node1, node2 = scmutil.revpair(repo, revs)
5279 5279
5280 5280 cwd = (pats and repo.getcwd()) or ''
5281 5281 end = opts.get('print0') and '\0' or '\n'
5282 5282 copy = {}
5283 5283 states = 'modified added removed deleted unknown ignored clean'.split()
5284 5284 show = [k for k in states if opts.get(k)]
5285 5285 if opts.get('all'):
5286 5286 show += ui.quiet and (states[:4] + ['clean']) or states
5287 5287 if not show:
5288 5288 show = ui.quiet and states[:4] or states[:5]
5289 5289
5290 5290 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5291 5291 'ignored' in show, 'clean' in show, 'unknown' in show,
5292 5292 opts.get('subrepos'))
5293 5293 changestates = zip(states, 'MAR!?IC', stat)
5294 5294
5295 5295 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5296 5296 copy = copies.pathcopies(repo[node1], repo[node2])
5297 5297
5298 5298 fm = ui.formatter('status', opts)
5299 5299 format = '%s %s' + end
5300 5300 if opts.get('no_status'):
5301 5301 format = '%.0s%s' + end
5302 5302
5303 5303 for state, char, files in changestates:
5304 5304 if state in show:
5305 5305 label = 'status.' + state
5306 5306 for f in files:
5307 5307 fm.startitem()
5308 5308 fm.write("status path", format, char,
5309 5309 repo.pathto(f, cwd), label=label)
5310 5310 if f in copy:
5311 5311 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5312 5312 label='status.copied')
5313 5313 fm.end()
5314 5314
5315 5315 @command('^summary|sum',
5316 5316 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5317 5317 def summary(ui, repo, **opts):
5318 5318 """summarize working directory state
5319 5319
5320 5320 This generates a brief summary of the working directory state,
5321 5321 including parents, branch, commit status, and available updates.
5322 5322
5323 5323 With the --remote option, this will check the default paths for
5324 5324 incoming and outgoing changes. This can be time-consuming.
5325 5325
5326 5326 Returns 0 on success.
5327 5327 """
5328 5328
5329 5329 ctx = repo[None]
5330 5330 parents = ctx.parents()
5331 5331 pnode = parents[0].node()
5332 5332 marks = []
5333 5333
5334 5334 for p in parents:
5335 5335 # label with log.changeset (instead of log.parent) since this
5336 5336 # shows a working directory parent *changeset*:
5337 5337 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5338 5338 label='log.changeset')
5339 5339 ui.write(' '.join(p.tags()), label='log.tag')
5340 5340 if p.bookmarks():
5341 5341 marks.extend(p.bookmarks())
5342 5342 if p.rev() == -1:
5343 5343 if not len(repo):
5344 5344 ui.write(_(' (empty repository)'))
5345 5345 else:
5346 5346 ui.write(_(' (no revision checked out)'))
5347 5347 ui.write('\n')
5348 5348 if p.description():
5349 5349 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5350 5350 label='log.summary')
5351 5351
5352 5352 branch = ctx.branch()
5353 5353 bheads = repo.branchheads(branch)
5354 5354 m = _('branch: %s\n') % branch
5355 5355 if branch != 'default':
5356 5356 ui.write(m, label='log.branch')
5357 5357 else:
5358 5358 ui.status(m, label='log.branch')
5359 5359
5360 5360 if marks:
5361 5361 current = repo._bookmarkcurrent
5362 5362 ui.write(_('bookmarks:'), label='log.bookmark')
5363 5363 if current is not None:
5364 5364 try:
5365 5365 marks.remove(current)
5366 5366 ui.write(' *' + current, label='bookmarks.current')
5367 5367 except ValueError:
5368 5368 # current bookmark not in parent ctx marks
5369 5369 pass
5370 5370 for m in marks:
5371 5371 ui.write(' ' + m, label='log.bookmark')
5372 5372 ui.write('\n', label='log.bookmark')
5373 5373
5374 5374 st = list(repo.status(unknown=True))[:6]
5375 5375
5376 5376 c = repo.dirstate.copies()
5377 5377 copied, renamed = [], []
5378 5378 for d, s in c.iteritems():
5379 5379 if s in st[2]:
5380 5380 st[2].remove(s)
5381 5381 renamed.append(d)
5382 5382 else:
5383 5383 copied.append(d)
5384 5384 if d in st[1]:
5385 5385 st[1].remove(d)
5386 5386 st.insert(3, renamed)
5387 5387 st.insert(4, copied)
5388 5388
5389 5389 ms = mergemod.mergestate(repo)
5390 5390 st.append([f for f in ms if ms[f] == 'u'])
5391 5391
5392 5392 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5393 5393 st.append(subs)
5394 5394
5395 5395 labels = [ui.label(_('%d modified'), 'status.modified'),
5396 5396 ui.label(_('%d added'), 'status.added'),
5397 5397 ui.label(_('%d removed'), 'status.removed'),
5398 5398 ui.label(_('%d renamed'), 'status.copied'),
5399 5399 ui.label(_('%d copied'), 'status.copied'),
5400 5400 ui.label(_('%d deleted'), 'status.deleted'),
5401 5401 ui.label(_('%d unknown'), 'status.unknown'),
5402 5402 ui.label(_('%d ignored'), 'status.ignored'),
5403 5403 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5404 5404 ui.label(_('%d subrepos'), 'status.modified')]
5405 5405 t = []
5406 5406 for s, l in zip(st, labels):
5407 5407 if s:
5408 5408 t.append(l % len(s))
5409 5409
5410 5410 t = ', '.join(t)
5411 5411 cleanworkdir = False
5412 5412
5413 5413 if len(parents) > 1:
5414 5414 t += _(' (merge)')
5415 5415 elif branch != parents[0].branch():
5416 5416 t += _(' (new branch)')
5417 5417 elif (parents[0].extra().get('close') and
5418 5418 pnode in repo.branchheads(branch, closed=True)):
5419 5419 t += _(' (head closed)')
5420 5420 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5421 5421 t += _(' (clean)')
5422 5422 cleanworkdir = True
5423 5423 elif pnode not in bheads:
5424 5424 t += _(' (new branch head)')
5425 5425
5426 5426 if cleanworkdir:
5427 5427 ui.status(_('commit: %s\n') % t.strip())
5428 5428 else:
5429 5429 ui.write(_('commit: %s\n') % t.strip())
5430 5430
5431 5431 # all ancestors of branch heads - all ancestors of parent = new csets
5432 5432 new = [0] * len(repo)
5433 5433 cl = repo.changelog
5434 5434 for a in [cl.rev(n) for n in bheads]:
5435 5435 new[a] = 1
5436 5436 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5437 5437 new[a] = 1
5438 5438 for a in [p.rev() for p in parents]:
5439 5439 if a >= 0:
5440 5440 new[a] = 0
5441 5441 for a in cl.ancestors(*[p.rev() for p in parents]):
5442 5442 new[a] = 0
5443 5443 new = sum(new)
5444 5444
5445 5445 if new == 0:
5446 5446 ui.status(_('update: (current)\n'))
5447 5447 elif pnode not in bheads:
5448 5448 ui.write(_('update: %d new changesets (update)\n') % new)
5449 5449 else:
5450 5450 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5451 5451 (new, len(bheads)))
5452 5452
5453 5453 if opts.get('remote'):
5454 5454 t = []
5455 5455 source, branches = hg.parseurl(ui.expandpath('default'))
5456 5456 other = hg.peer(repo, {}, source)
5457 5457 revs, checkout = hg.addbranchrevs(repo, other, branches,
5458 5458 opts.get('rev'))
5459 5459 ui.debug('comparing with %s\n' % util.hidepassword(source))
5460 5460 repo.ui.pushbuffer()
5461 5461 commoninc = discovery.findcommonincoming(repo, other)
5462 5462 _common, incoming, _rheads = commoninc
5463 5463 repo.ui.popbuffer()
5464 5464 if incoming:
5465 5465 t.append(_('1 or more incoming'))
5466 5466
5467 5467 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5468 5468 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5469 5469 if source != dest:
5470 5470 other = hg.peer(repo, {}, dest)
5471 5471 commoninc = None
5472 5472 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5473 5473 repo.ui.pushbuffer()
5474 5474 outgoing = discovery.findcommonoutgoing(repo, other,
5475 5475 commoninc=commoninc)
5476 5476 repo.ui.popbuffer()
5477 5477 o = outgoing.missing
5478 5478 if o:
5479 5479 t.append(_('%d outgoing') % len(o))
5480 5480 if 'bookmarks' in other.listkeys('namespaces'):
5481 5481 lmarks = repo.listkeys('bookmarks')
5482 5482 rmarks = other.listkeys('bookmarks')
5483 5483 diff = set(rmarks) - set(lmarks)
5484 5484 if len(diff) > 0:
5485 5485 t.append(_('%d incoming bookmarks') % len(diff))
5486 5486 diff = set(lmarks) - set(rmarks)
5487 5487 if len(diff) > 0:
5488 5488 t.append(_('%d outgoing bookmarks') % len(diff))
5489 5489
5490 5490 if t:
5491 5491 ui.write(_('remote: %s\n') % (', '.join(t)))
5492 5492 else:
5493 5493 ui.status(_('remote: (synced)\n'))
5494 5494
5495 5495 @command('tag',
5496 5496 [('f', 'force', None, _('force tag')),
5497 5497 ('l', 'local', None, _('make the tag local')),
5498 5498 ('r', 'rev', '', _('revision to tag'), _('REV')),
5499 5499 ('', 'remove', None, _('remove a tag')),
5500 5500 # -l/--local is already there, commitopts cannot be used
5501 5501 ('e', 'edit', None, _('edit commit message')),
5502 5502 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5503 5503 ] + commitopts2,
5504 5504 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5505 5505 def tag(ui, repo, name1, *names, **opts):
5506 5506 """add one or more tags for the current or given revision
5507 5507
5508 5508 Name a particular revision using <name>.
5509 5509
5510 5510 Tags are used to name particular revisions of the repository and are
5511 5511 very useful to compare different revisions, to go back to significant
5512 5512 earlier versions or to mark branch points as releases, etc. Changing
5513 5513 an existing tag is normally disallowed; use -f/--force to override.
5514 5514
5515 5515 If no revision is given, the parent of the working directory is
5516 5516 used, or tip if no revision is checked out.
5517 5517
5518 5518 To facilitate version control, distribution, and merging of tags,
5519 5519 they are stored as a file named ".hgtags" which is managed similarly
5520 5520 to other project files and can be hand-edited if necessary. This
5521 5521 also means that tagging creates a new commit. The file
5522 5522 ".hg/localtags" is used for local tags (not shared among
5523 5523 repositories).
5524 5524
5525 5525 Tag commits are usually made at the head of a branch. If the parent
5526 5526 of the working directory is not a branch head, :hg:`tag` aborts; use
5527 5527 -f/--force to force the tag commit to be based on a non-head
5528 5528 changeset.
5529 5529
5530 5530 See :hg:`help dates` for a list of formats valid for -d/--date.
5531 5531
5532 5532 Since tag names have priority over branch names during revision
5533 5533 lookup, using an existing branch name as a tag name is discouraged.
5534 5534
5535 5535 Returns 0 on success.
5536 5536 """
5537 5537 wlock = lock = None
5538 5538 try:
5539 5539 wlock = repo.wlock()
5540 5540 lock = repo.lock()
5541 5541 rev_ = "."
5542 5542 names = [t.strip() for t in (name1,) + names]
5543 5543 if len(names) != len(set(names)):
5544 5544 raise util.Abort(_('tag names must be unique'))
5545 5545 for n in names:
5546 5546 if n in ['tip', '.', 'null']:
5547 5547 raise util.Abort(_("the name '%s' is reserved") % n)
5548 5548 if not n:
5549 5549 raise util.Abort(_('tag names cannot consist entirely of '
5550 5550 'whitespace'))
5551 5551 if opts.get('rev') and opts.get('remove'):
5552 5552 raise util.Abort(_("--rev and --remove are incompatible"))
5553 5553 if opts.get('rev'):
5554 5554 rev_ = opts['rev']
5555 5555 message = opts.get('message')
5556 5556 if opts.get('remove'):
5557 5557 expectedtype = opts.get('local') and 'local' or 'global'
5558 5558 for n in names:
5559 5559 if not repo.tagtype(n):
5560 5560 raise util.Abort(_("tag '%s' does not exist") % n)
5561 5561 if repo.tagtype(n) != expectedtype:
5562 5562 if expectedtype == 'global':
5563 5563 raise util.Abort(_("tag '%s' is not a global tag") % n)
5564 5564 else:
5565 5565 raise util.Abort(_("tag '%s' is not a local tag") % n)
5566 5566 rev_ = nullid
5567 5567 if not message:
5568 5568 # we don't translate commit messages
5569 5569 message = 'Removed tag %s' % ', '.join(names)
5570 5570 elif not opts.get('force'):
5571 5571 for n in names:
5572 5572 if n in repo.tags():
5573 5573 raise util.Abort(_("tag '%s' already exists "
5574 5574 "(use -f to force)") % n)
5575 5575 if not opts.get('local'):
5576 5576 p1, p2 = repo.dirstate.parents()
5577 5577 if p2 != nullid:
5578 5578 raise util.Abort(_('uncommitted merge'))
5579 5579 bheads = repo.branchheads()
5580 5580 if not opts.get('force') and bheads and p1 not in bheads:
5581 5581 raise util.Abort(_('not at a branch head (use -f to force)'))
5582 5582 r = scmutil.revsingle(repo, rev_).node()
5583 5583
5584 5584 if not message:
5585 5585 # we don't translate commit messages
5586 5586 message = ('Added tag %s for changeset %s' %
5587 5587 (', '.join(names), short(r)))
5588 5588
5589 5589 date = opts.get('date')
5590 5590 if date:
5591 5591 date = util.parsedate(date)
5592 5592
5593 5593 if opts.get('edit'):
5594 5594 message = ui.edit(message, ui.username())
5595 5595
5596 5596 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5597 5597 finally:
5598 5598 release(lock, wlock)
5599 5599
5600 5600 @command('tags', [], '')
5601 5601 def tags(ui, repo):
5602 5602 """list repository tags
5603 5603
5604 5604 This lists both regular and local tags. When the -v/--verbose
5605 5605 switch is used, a third column "local" is printed for local tags.
5606 5606
5607 5607 Returns 0 on success.
5608 5608 """
5609 5609
5610 5610 hexfunc = ui.debugflag and hex or short
5611 5611 tagtype = ""
5612 5612
5613 5613 for t, n in reversed(repo.tagslist()):
5614 5614 if ui.quiet:
5615 5615 ui.write("%s\n" % t, label='tags.normal')
5616 5616 continue
5617 5617
5618 5618 hn = hexfunc(n)
5619 5619 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5620 5620 rev = ui.label(r, 'log.changeset')
5621 5621 spaces = " " * (30 - encoding.colwidth(t))
5622 5622
5623 5623 tag = ui.label(t, 'tags.normal')
5624 5624 if ui.verbose:
5625 5625 if repo.tagtype(t) == 'local':
5626 5626 tagtype = " local"
5627 5627 tag = ui.label(t, 'tags.local')
5628 5628 else:
5629 5629 tagtype = ""
5630 5630 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5631 5631
5632 5632 @command('tip',
5633 5633 [('p', 'patch', None, _('show patch')),
5634 5634 ('g', 'git', None, _('use git extended diff format')),
5635 5635 ] + templateopts,
5636 5636 _('[-p] [-g]'))
5637 5637 def tip(ui, repo, **opts):
5638 5638 """show the tip revision
5639 5639
5640 5640 The tip revision (usually just called the tip) is the changeset
5641 5641 most recently added to the repository (and therefore the most
5642 5642 recently changed head).
5643 5643
5644 5644 If you have just made a commit, that commit will be the tip. If
5645 5645 you have just pulled changes from another repository, the tip of
5646 5646 that repository becomes the current tip. The "tip" tag is special
5647 5647 and cannot be renamed or assigned to a different changeset.
5648 5648
5649 5649 Returns 0 on success.
5650 5650 """
5651 5651 displayer = cmdutil.show_changeset(ui, repo, opts)
5652 5652 displayer.show(repo[len(repo) - 1])
5653 5653 displayer.close()
5654 5654
5655 5655 @command('unbundle',
5656 5656 [('u', 'update', None,
5657 5657 _('update to new branch head if changesets were unbundled'))],
5658 5658 _('[-u] FILE...'))
5659 5659 def unbundle(ui, repo, fname1, *fnames, **opts):
5660 5660 """apply one or more changegroup files
5661 5661
5662 5662 Apply one or more compressed changegroup files generated by the
5663 5663 bundle command.
5664 5664
5665 5665 Returns 0 on success, 1 if an update has unresolved files.
5666 5666 """
5667 5667 fnames = (fname1,) + fnames
5668 5668
5669 5669 lock = repo.lock()
5670 5670 wc = repo['.']
5671 5671 try:
5672 5672 for fname in fnames:
5673 5673 f = url.open(ui, fname)
5674 5674 gen = changegroup.readbundle(f, fname)
5675 5675 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5676 5676 finally:
5677 5677 lock.release()
5678 5678 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5679 5679 return postincoming(ui, repo, modheads, opts.get('update'), None)
5680 5680
5681 5681 @command('^update|up|checkout|co',
5682 5682 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5683 5683 ('c', 'check', None,
5684 5684 _('update across branches if no uncommitted changes')),
5685 5685 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5686 5686 ('r', 'rev', '', _('revision'), _('REV'))],
5687 5687 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5688 5688 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5689 5689 """update working directory (or switch revisions)
5690 5690
5691 5691 Update the repository's working directory to the specified
5692 5692 changeset. If no changeset is specified, update to the tip of the
5693 5693 current named branch and move the current bookmark (see :hg:`help
5694 5694 bookmarks`).
5695 5695
5696 5696 If the changeset is not a descendant of the working directory's
5697 5697 parent, the update is aborted. With the -c/--check option, the
5698 5698 working directory is checked for uncommitted changes; if none are
5699 5699 found, the working directory is updated to the specified
5700 5700 changeset.
5701 5701
5702 5702 Update sets the working directory's parent revison to the specified
5703 5703 changeset (see :hg:`help parents`).
5704 5704
5705 5705 The following rules apply when the working directory contains
5706 5706 uncommitted changes:
5707 5707
5708 5708 1. If neither -c/--check nor -C/--clean is specified, and if
5709 5709 the requested changeset is an ancestor or descendant of
5710 5710 the working directory's parent, the uncommitted changes
5711 5711 are merged into the requested changeset and the merged
5712 5712 result is left uncommitted. If the requested changeset is
5713 5713 not an ancestor or descendant (that is, it is on another
5714 5714 branch), the update is aborted and the uncommitted changes
5715 5715 are preserved.
5716 5716
5717 5717 2. With the -c/--check option, the update is aborted and the
5718 5718 uncommitted changes are preserved.
5719 5719
5720 5720 3. With the -C/--clean option, uncommitted changes are discarded and
5721 5721 the working directory is updated to the requested changeset.
5722 5722
5723 5723 Use null as the changeset to remove the working directory (like
5724 5724 :hg:`clone -U`).
5725 5725
5726 5726 If you want to revert just one file to an older revision, use
5727 5727 :hg:`revert [-r REV] NAME`.
5728 5728
5729 5729 See :hg:`help dates` for a list of formats valid for -d/--date.
5730 5730
5731 5731 Returns 0 on success, 1 if there are unresolved files.
5732 5732 """
5733 5733 if rev and node:
5734 5734 raise util.Abort(_("please specify just one revision"))
5735 5735
5736 5736 if rev is None or rev == '':
5737 5737 rev = node
5738 5738
5739 5739 # with no argument, we also move the current bookmark, if any
5740 5740 movemarkfrom = None
5741 5741 if rev is None or node == '':
5742 5742 movemarkfrom = repo['.'].node()
5743 5743
5744 5744 # if we defined a bookmark, we have to remember the original bookmark name
5745 5745 brev = rev
5746 5746 rev = scmutil.revsingle(repo, rev, rev).rev()
5747 5747
5748 5748 if check and clean:
5749 5749 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5750 5750
5751 5751 if date:
5752 5752 if rev is not None:
5753 5753 raise util.Abort(_("you can't specify a revision and a date"))
5754 5754 rev = cmdutil.finddate(ui, repo, date)
5755 5755
5756 5756 if check:
5757 5757 c = repo[None]
5758 5758 if c.dirty(merge=False, branch=False):
5759 5759 raise util.Abort(_("uncommitted local changes"))
5760 5760 if rev is None:
5761 5761 rev = repo[repo[None].branch()].rev()
5762 5762 mergemod._checkunknown(repo, repo[None], repo[rev])
5763 5763
5764 5764 if clean:
5765 5765 ret = hg.clean(repo, rev)
5766 5766 else:
5767 5767 ret = hg.update(repo, rev)
5768 5768
5769 5769 if not ret and movemarkfrom:
5770 5770 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5771 5771 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5772 5772 elif brev in repo._bookmarks:
5773 5773 bookmarks.setcurrent(repo, brev)
5774 5774 elif brev:
5775 5775 bookmarks.unsetcurrent(repo)
5776 5776
5777 5777 return ret
5778 5778
5779 5779 @command('verify', [])
5780 5780 def verify(ui, repo):
5781 5781 """verify the integrity of the repository
5782 5782
5783 5783 Verify the integrity of the current repository.
5784 5784
5785 5785 This will perform an extensive check of the repository's
5786 5786 integrity, validating the hashes and checksums of each entry in
5787 5787 the changelog, manifest, and tracked files, as well as the
5788 5788 integrity of their crosslinks and indices.
5789 5789
5790 5790 Returns 0 on success, 1 if errors are encountered.
5791 5791 """
5792 5792 return hg.verify(repo)
5793 5793
5794 5794 @command('version', [])
5795 5795 def version_(ui):
5796 5796 """output version and copyright information"""
5797 5797 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5798 5798 % util.version())
5799 5799 ui.status(_(
5800 5800 "(see http://mercurial.selenic.com for more information)\n"
5801 5801 "\nCopyright (C) 2005-2012 Matt Mackall and others\n"
5802 5802 "This is free software; see the source for copying conditions. "
5803 5803 "There is NO\nwarranty; "
5804 5804 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5805 5805 ))
5806 5806
5807 5807 norepo = ("clone init version help debugcommands debugcomplete"
5808 5808 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5809 5809 " debugknown debuggetbundle debugbundle")
5810 5810 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5811 5811 " debugdata debugindex debugindexdot debugrevlog")
@@ -1,1271 +1,1273 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, nullrev, short, hex, bin
9 9 from i18n import _
10 10 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 11 import copies
12 12 import match as matchmod
13 13 import os, errno, stat
14 14
15 15 propertycache = util.propertycache
16 16
17 17 class changectx(object):
18 18 """A changecontext object makes access to data related to a particular
19 19 changeset convenient."""
20 20 def __init__(self, repo, changeid=''):
21 21 """changeid is a revision number, node, or tag"""
22 22 if changeid == '':
23 23 changeid = '.'
24 24 self._repo = repo
25 25
26 26 if isinstance(changeid, int):
27 27 self._rev = changeid
28 28 self._node = repo.changelog.node(changeid)
29 29 return
30 30 if changeid == '.':
31 31 self._node = repo.dirstate.p1()
32 32 self._rev = repo.changelog.rev(self._node)
33 33 return
34 34 if changeid == 'null':
35 35 self._node = nullid
36 36 self._rev = nullrev
37 37 return
38 38 if changeid == 'tip':
39 39 self._rev = len(repo.changelog) - 1
40 40 self._node = repo.changelog.node(self._rev)
41 41 return
42 42 if len(changeid) == 20:
43 43 try:
44 44 self._node = changeid
45 45 self._rev = repo.changelog.rev(changeid)
46 46 return
47 47 except LookupError:
48 48 pass
49 49
50 50 try:
51 51 r = int(changeid)
52 52 if str(r) != changeid:
53 53 raise ValueError
54 54 l = len(repo.changelog)
55 55 if r < 0:
56 56 r += l
57 57 if r < 0 or r >= l:
58 58 raise ValueError
59 59 self._rev = r
60 60 self._node = repo.changelog.node(r)
61 61 return
62 62 except (ValueError, OverflowError):
63 63 pass
64 64
65 65 if len(changeid) == 40:
66 66 try:
67 67 self._node = bin(changeid)
68 68 self._rev = repo.changelog.rev(self._node)
69 69 return
70 70 except (TypeError, LookupError):
71 71 pass
72 72
73 73 if changeid in repo._bookmarks:
74 74 self._node = repo._bookmarks[changeid]
75 75 self._rev = repo.changelog.rev(self._node)
76 76 return
77 77 if changeid in repo._tagscache.tags:
78 78 self._node = repo._tagscache.tags[changeid]
79 79 self._rev = repo.changelog.rev(self._node)
80 80 return
81 if changeid in repo.branchtags():
82 self._node = repo.branchtags()[changeid]
81 try:
82 self._node = repo.branchtip(changeid)
83 83 self._rev = repo.changelog.rev(self._node)
84 84 return
85 except error.RepoLookupError:
86 pass
85 87
86 88 self._node = repo.changelog._partialmatch(changeid)
87 89 if self._node is not None:
88 90 self._rev = repo.changelog.rev(self._node)
89 91 return
90 92
91 93 # lookup failed
92 94 # check if it might have come from damaged dirstate
93 95 if changeid in repo.dirstate.parents():
94 96 raise error.Abort(_("working directory has unknown parent '%s'!")
95 97 % short(changeid))
96 98 try:
97 99 if len(changeid) == 20:
98 100 changeid = hex(changeid)
99 101 except TypeError:
100 102 pass
101 103 raise error.RepoLookupError(
102 104 _("unknown revision '%s'") % changeid)
103 105
104 106 def __str__(self):
105 107 return short(self.node())
106 108
107 109 def __int__(self):
108 110 return self.rev()
109 111
110 112 def __repr__(self):
111 113 return "<changectx %s>" % str(self)
112 114
113 115 def __hash__(self):
114 116 try:
115 117 return hash(self._rev)
116 118 except AttributeError:
117 119 return id(self)
118 120
119 121 def __eq__(self, other):
120 122 try:
121 123 return self._rev == other._rev
122 124 except AttributeError:
123 125 return False
124 126
125 127 def __ne__(self, other):
126 128 return not (self == other)
127 129
128 130 def __nonzero__(self):
129 131 return self._rev != nullrev
130 132
131 133 @propertycache
132 134 def _changeset(self):
133 135 return self._repo.changelog.read(self.rev())
134 136
135 137 @propertycache
136 138 def _manifest(self):
137 139 return self._repo.manifest.read(self._changeset[0])
138 140
139 141 @propertycache
140 142 def _manifestdelta(self):
141 143 return self._repo.manifest.readdelta(self._changeset[0])
142 144
143 145 @propertycache
144 146 def _parents(self):
145 147 p = self._repo.changelog.parentrevs(self._rev)
146 148 if p[1] == nullrev:
147 149 p = p[:-1]
148 150 return [changectx(self._repo, x) for x in p]
149 151
150 152 @propertycache
151 153 def substate(self):
152 154 return subrepo.state(self, self._repo.ui)
153 155
154 156 def __contains__(self, key):
155 157 return key in self._manifest
156 158
157 159 def __getitem__(self, key):
158 160 return self.filectx(key)
159 161
160 162 def __iter__(self):
161 163 for f in sorted(self._manifest):
162 164 yield f
163 165
164 166 def changeset(self):
165 167 return self._changeset
166 168 def manifest(self):
167 169 return self._manifest
168 170 def manifestnode(self):
169 171 return self._changeset[0]
170 172
171 173 def rev(self):
172 174 return self._rev
173 175 def node(self):
174 176 return self._node
175 177 def hex(self):
176 178 return hex(self._node)
177 179 def user(self):
178 180 return self._changeset[1]
179 181 def date(self):
180 182 return self._changeset[2]
181 183 def files(self):
182 184 return self._changeset[3]
183 185 def description(self):
184 186 return self._changeset[4]
185 187 def branch(self):
186 188 return encoding.tolocal(self._changeset[5].get("branch"))
187 189 def extra(self):
188 190 return self._changeset[5]
189 191 def tags(self):
190 192 return self._repo.nodetags(self._node)
191 193 def bookmarks(self):
192 194 return self._repo.nodebookmarks(self._node)
193 195 def phase(self):
194 196 return self._repo._phasecache.phase(self._repo, self._rev)
195 197 def phasestr(self):
196 198 return phases.phasenames[self.phase()]
197 199 def mutable(self):
198 200 return self.phase() > phases.public
199 201 def hidden(self):
200 202 return self._rev in self._repo.changelog.hiddenrevs
201 203
202 204 def parents(self):
203 205 """return contexts for each parent changeset"""
204 206 return self._parents
205 207
206 208 def p1(self):
207 209 return self._parents[0]
208 210
209 211 def p2(self):
210 212 if len(self._parents) == 2:
211 213 return self._parents[1]
212 214 return changectx(self._repo, -1)
213 215
214 216 def children(self):
215 217 """return contexts for each child changeset"""
216 218 c = self._repo.changelog.children(self._node)
217 219 return [changectx(self._repo, x) for x in c]
218 220
219 221 def ancestors(self):
220 222 for a in self._repo.changelog.ancestors(self._rev):
221 223 yield changectx(self._repo, a)
222 224
223 225 def descendants(self):
224 226 for d in self._repo.changelog.descendants(self._rev):
225 227 yield changectx(self._repo, d)
226 228
227 229 def _fileinfo(self, path):
228 230 if '_manifest' in self.__dict__:
229 231 try:
230 232 return self._manifest[path], self._manifest.flags(path)
231 233 except KeyError:
232 234 raise error.LookupError(self._node, path,
233 235 _('not found in manifest'))
234 236 if '_manifestdelta' in self.__dict__ or path in self.files():
235 237 if path in self._manifestdelta:
236 238 return (self._manifestdelta[path],
237 239 self._manifestdelta.flags(path))
238 240 node, flag = self._repo.manifest.find(self._changeset[0], path)
239 241 if not node:
240 242 raise error.LookupError(self._node, path,
241 243 _('not found in manifest'))
242 244
243 245 return node, flag
244 246
245 247 def filenode(self, path):
246 248 return self._fileinfo(path)[0]
247 249
248 250 def flags(self, path):
249 251 try:
250 252 return self._fileinfo(path)[1]
251 253 except error.LookupError:
252 254 return ''
253 255
254 256 def filectx(self, path, fileid=None, filelog=None):
255 257 """get a file context from this changeset"""
256 258 if fileid is None:
257 259 fileid = self.filenode(path)
258 260 return filectx(self._repo, path, fileid=fileid,
259 261 changectx=self, filelog=filelog)
260 262
261 263 def ancestor(self, c2):
262 264 """
263 265 return the ancestor context of self and c2
264 266 """
265 267 # deal with workingctxs
266 268 n2 = c2._node
267 269 if n2 is None:
268 270 n2 = c2._parents[0]._node
269 271 n = self._repo.changelog.ancestor(self._node, n2)
270 272 return changectx(self._repo, n)
271 273
272 274 def walk(self, match):
273 275 fset = set(match.files())
274 276 # for dirstate.walk, files=['.'] means "walk the whole tree".
275 277 # follow that here, too
276 278 fset.discard('.')
277 279 for fn in self:
278 280 if fn in fset:
279 281 # specified pattern is the exact name
280 282 fset.remove(fn)
281 283 if match(fn):
282 284 yield fn
283 285 for fn in sorted(fset):
284 286 if fn in self._dirs:
285 287 # specified pattern is a directory
286 288 continue
287 289 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
288 290 yield fn
289 291
290 292 def sub(self, path):
291 293 return subrepo.subrepo(self, path)
292 294
293 295 def match(self, pats=[], include=None, exclude=None, default='glob'):
294 296 r = self._repo
295 297 return matchmod.match(r.root, r.getcwd(), pats,
296 298 include, exclude, default,
297 299 auditor=r.auditor, ctx=self)
298 300
299 301 def diff(self, ctx2=None, match=None, **opts):
300 302 """Returns a diff generator for the given contexts and matcher"""
301 303 if ctx2 is None:
302 304 ctx2 = self.p1()
303 305 if ctx2 is not None and not isinstance(ctx2, changectx):
304 306 ctx2 = self._repo[ctx2]
305 307 diffopts = patch.diffopts(self._repo.ui, opts)
306 308 return patch.diff(self._repo, ctx2.node(), self.node(),
307 309 match=match, opts=diffopts)
308 310
309 311 @propertycache
310 312 def _dirs(self):
311 313 dirs = set()
312 314 for f in self._manifest:
313 315 pos = f.rfind('/')
314 316 while pos != -1:
315 317 f = f[:pos]
316 318 if f in dirs:
317 319 break # dirs already contains this and above
318 320 dirs.add(f)
319 321 pos = f.rfind('/')
320 322 return dirs
321 323
322 324 def dirs(self):
323 325 return self._dirs
324 326
325 327 class filectx(object):
326 328 """A filecontext object makes access to data related to a particular
327 329 filerevision convenient."""
328 330 def __init__(self, repo, path, changeid=None, fileid=None,
329 331 filelog=None, changectx=None):
330 332 """changeid can be a changeset revision, node, or tag.
331 333 fileid can be a file revision or node."""
332 334 self._repo = repo
333 335 self._path = path
334 336
335 337 assert (changeid is not None
336 338 or fileid is not None
337 339 or changectx is not None), \
338 340 ("bad args: changeid=%r, fileid=%r, changectx=%r"
339 341 % (changeid, fileid, changectx))
340 342
341 343 if filelog:
342 344 self._filelog = filelog
343 345
344 346 if changeid is not None:
345 347 self._changeid = changeid
346 348 if changectx is not None:
347 349 self._changectx = changectx
348 350 if fileid is not None:
349 351 self._fileid = fileid
350 352
351 353 @propertycache
352 354 def _changectx(self):
353 355 return changectx(self._repo, self._changeid)
354 356
355 357 @propertycache
356 358 def _filelog(self):
357 359 return self._repo.file(self._path)
358 360
359 361 @propertycache
360 362 def _changeid(self):
361 363 if '_changectx' in self.__dict__:
362 364 return self._changectx.rev()
363 365 else:
364 366 return self._filelog.linkrev(self._filerev)
365 367
366 368 @propertycache
367 369 def _filenode(self):
368 370 if '_fileid' in self.__dict__:
369 371 return self._filelog.lookup(self._fileid)
370 372 else:
371 373 return self._changectx.filenode(self._path)
372 374
373 375 @propertycache
374 376 def _filerev(self):
375 377 return self._filelog.rev(self._filenode)
376 378
377 379 @propertycache
378 380 def _repopath(self):
379 381 return self._path
380 382
381 383 def __nonzero__(self):
382 384 try:
383 385 self._filenode
384 386 return True
385 387 except error.LookupError:
386 388 # file is missing
387 389 return False
388 390
389 391 def __str__(self):
390 392 return "%s@%s" % (self.path(), short(self.node()))
391 393
392 394 def __repr__(self):
393 395 return "<filectx %s>" % str(self)
394 396
395 397 def __hash__(self):
396 398 try:
397 399 return hash((self._path, self._filenode))
398 400 except AttributeError:
399 401 return id(self)
400 402
401 403 def __eq__(self, other):
402 404 try:
403 405 return (self._path == other._path
404 406 and self._filenode == other._filenode)
405 407 except AttributeError:
406 408 return False
407 409
408 410 def __ne__(self, other):
409 411 return not (self == other)
410 412
411 413 def filectx(self, fileid):
412 414 '''opens an arbitrary revision of the file without
413 415 opening a new filelog'''
414 416 return filectx(self._repo, self._path, fileid=fileid,
415 417 filelog=self._filelog)
416 418
417 419 def filerev(self):
418 420 return self._filerev
419 421 def filenode(self):
420 422 return self._filenode
421 423 def flags(self):
422 424 return self._changectx.flags(self._path)
423 425 def filelog(self):
424 426 return self._filelog
425 427
426 428 def rev(self):
427 429 if '_changectx' in self.__dict__:
428 430 return self._changectx.rev()
429 431 if '_changeid' in self.__dict__:
430 432 return self._changectx.rev()
431 433 return self._filelog.linkrev(self._filerev)
432 434
433 435 def linkrev(self):
434 436 return self._filelog.linkrev(self._filerev)
435 437 def node(self):
436 438 return self._changectx.node()
437 439 def hex(self):
438 440 return hex(self.node())
439 441 def user(self):
440 442 return self._changectx.user()
441 443 def date(self):
442 444 return self._changectx.date()
443 445 def files(self):
444 446 return self._changectx.files()
445 447 def description(self):
446 448 return self._changectx.description()
447 449 def branch(self):
448 450 return self._changectx.branch()
449 451 def extra(self):
450 452 return self._changectx.extra()
451 453 def manifest(self):
452 454 return self._changectx.manifest()
453 455 def changectx(self):
454 456 return self._changectx
455 457
456 458 def data(self):
457 459 return self._filelog.read(self._filenode)
458 460 def path(self):
459 461 return self._path
460 462 def size(self):
461 463 return self._filelog.size(self._filerev)
462 464
463 465 def isbinary(self):
464 466 try:
465 467 return util.binary(self.data())
466 468 except IOError:
467 469 return False
468 470
469 471 def cmp(self, fctx):
470 472 """compare with other file context
471 473
472 474 returns True if different than fctx.
473 475 """
474 476 if (fctx._filerev is None
475 477 and (self._repo._encodefilterpats
476 478 # if file data starts with '\1\n', empty metadata block is
477 479 # prepended, which adds 4 bytes to filelog.size().
478 480 or self.size() - 4 == fctx.size())
479 481 or self.size() == fctx.size()):
480 482 return self._filelog.cmp(self._filenode, fctx.data())
481 483
482 484 return True
483 485
484 486 def renamed(self):
485 487 """check if file was actually renamed in this changeset revision
486 488
487 489 If rename logged in file revision, we report copy for changeset only
488 490 if file revisions linkrev points back to the changeset in question
489 491 or both changeset parents contain different file revisions.
490 492 """
491 493
492 494 renamed = self._filelog.renamed(self._filenode)
493 495 if not renamed:
494 496 return renamed
495 497
496 498 if self.rev() == self.linkrev():
497 499 return renamed
498 500
499 501 name = self.path()
500 502 fnode = self._filenode
501 503 for p in self._changectx.parents():
502 504 try:
503 505 if fnode == p.filenode(name):
504 506 return None
505 507 except error.LookupError:
506 508 pass
507 509 return renamed
508 510
509 511 def parents(self):
510 512 p = self._path
511 513 fl = self._filelog
512 514 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
513 515
514 516 r = self._filelog.renamed(self._filenode)
515 517 if r:
516 518 pl[0] = (r[0], r[1], None)
517 519
518 520 return [filectx(self._repo, p, fileid=n, filelog=l)
519 521 for p, n, l in pl if n != nullid]
520 522
521 523 def p1(self):
522 524 return self.parents()[0]
523 525
524 526 def p2(self):
525 527 p = self.parents()
526 528 if len(p) == 2:
527 529 return p[1]
528 530 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
529 531
530 532 def children(self):
531 533 # hard for renames
532 534 c = self._filelog.children(self._filenode)
533 535 return [filectx(self._repo, self._path, fileid=x,
534 536 filelog=self._filelog) for x in c]
535 537
536 538 def annotate(self, follow=False, linenumber=None, diffopts=None):
537 539 '''returns a list of tuples of (ctx, line) for each line
538 540 in the file, where ctx is the filectx of the node where
539 541 that line was last changed.
540 542 This returns tuples of ((ctx, linenumber), line) for each line,
541 543 if "linenumber" parameter is NOT "None".
542 544 In such tuples, linenumber means one at the first appearance
543 545 in the managed file.
544 546 To reduce annotation cost,
545 547 this returns fixed value(False is used) as linenumber,
546 548 if "linenumber" parameter is "False".'''
547 549
548 550 def decorate_compat(text, rev):
549 551 return ([rev] * len(text.splitlines()), text)
550 552
551 553 def without_linenumber(text, rev):
552 554 return ([(rev, False)] * len(text.splitlines()), text)
553 555
554 556 def with_linenumber(text, rev):
555 557 size = len(text.splitlines())
556 558 return ([(rev, i) for i in xrange(1, size + 1)], text)
557 559
558 560 decorate = (((linenumber is None) and decorate_compat) or
559 561 (linenumber and with_linenumber) or
560 562 without_linenumber)
561 563
562 564 def pair(parent, child):
563 565 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
564 566 refine=True)
565 567 for (a1, a2, b1, b2), t in blocks:
566 568 # Changed blocks ('!') or blocks made only of blank lines ('~')
567 569 # belong to the child.
568 570 if t == '=':
569 571 child[0][b1:b2] = parent[0][a1:a2]
570 572 return child
571 573
572 574 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
573 575 def getctx(path, fileid):
574 576 log = path == self._path and self._filelog or getlog(path)
575 577 return filectx(self._repo, path, fileid=fileid, filelog=log)
576 578 getctx = util.lrucachefunc(getctx)
577 579
578 580 def parents(f):
579 581 # we want to reuse filectx objects as much as possible
580 582 p = f._path
581 583 if f._filerev is None: # working dir
582 584 pl = [(n.path(), n.filerev()) for n in f.parents()]
583 585 else:
584 586 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
585 587
586 588 if follow:
587 589 r = f.renamed()
588 590 if r:
589 591 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
590 592
591 593 return [getctx(p, n) for p, n in pl if n != nullrev]
592 594
593 595 # use linkrev to find the first changeset where self appeared
594 596 if self.rev() != self.linkrev():
595 597 base = self.filectx(self.filerev())
596 598 else:
597 599 base = self
598 600
599 601 # This algorithm would prefer to be recursive, but Python is a
600 602 # bit recursion-hostile. Instead we do an iterative
601 603 # depth-first search.
602 604
603 605 visit = [base]
604 606 hist = {}
605 607 pcache = {}
606 608 needed = {base: 1}
607 609 while visit:
608 610 f = visit[-1]
609 611 if f not in pcache:
610 612 pcache[f] = parents(f)
611 613
612 614 ready = True
613 615 pl = pcache[f]
614 616 for p in pl:
615 617 if p not in hist:
616 618 ready = False
617 619 visit.append(p)
618 620 needed[p] = needed.get(p, 0) + 1
619 621 if ready:
620 622 visit.pop()
621 623 curr = decorate(f.data(), f)
622 624 for p in pl:
623 625 curr = pair(hist[p], curr)
624 626 if needed[p] == 1:
625 627 del hist[p]
626 628 else:
627 629 needed[p] -= 1
628 630
629 631 hist[f] = curr
630 632 pcache[f] = []
631 633
632 634 return zip(hist[base][0], hist[base][1].splitlines(True))
633 635
634 636 def ancestor(self, fc2, actx):
635 637 """
636 638 find the common ancestor file context, if any, of self, and fc2
637 639
638 640 actx must be the changectx of the common ancestor
639 641 of self's and fc2's respective changesets.
640 642 """
641 643
642 644 # the easy case: no (relevant) renames
643 645 if fc2.path() == self.path() and self.path() in actx:
644 646 return actx[self.path()]
645 647
646 648 # the next easiest cases: unambiguous predecessor (name trumps
647 649 # history)
648 650 if self.path() in actx and fc2.path() not in actx:
649 651 return actx[self.path()]
650 652 if fc2.path() in actx and self.path() not in actx:
651 653 return actx[fc2.path()]
652 654
653 655 # prime the ancestor cache for the working directory
654 656 acache = {}
655 657 for c in (self, fc2):
656 658 if c._filerev is None:
657 659 pl = [(n.path(), n.filenode()) for n in c.parents()]
658 660 acache[(c._path, None)] = pl
659 661
660 662 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
661 663 def parents(vertex):
662 664 if vertex in acache:
663 665 return acache[vertex]
664 666 f, n = vertex
665 667 if f not in flcache:
666 668 flcache[f] = self._repo.file(f)
667 669 fl = flcache[f]
668 670 pl = [(f, p) for p in fl.parents(n) if p != nullid]
669 671 re = fl.renamed(n)
670 672 if re:
671 673 pl.append(re)
672 674 acache[vertex] = pl
673 675 return pl
674 676
675 677 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
676 678 v = ancestor.ancestor(a, b, parents)
677 679 if v:
678 680 f, n = v
679 681 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
680 682
681 683 return None
682 684
683 685 def ancestors(self, followfirst=False):
684 686 visit = {}
685 687 c = self
686 688 cut = followfirst and 1 or None
687 689 while True:
688 690 for parent in c.parents()[:cut]:
689 691 visit[(parent.rev(), parent.node())] = parent
690 692 if not visit:
691 693 break
692 694 c = visit.pop(max(visit))
693 695 yield c
694 696
695 697 def copies(self, c2):
696 698 if not util.safehasattr(self, "_copycache"):
697 699 self._copycache = {}
698 700 sc2 = str(c2)
699 701 if sc2 not in self._copycache:
700 702 self._copycache[sc2] = copies.pathcopies(c2)
701 703 return self._copycache[sc2]
702 704
703 705 class workingctx(changectx):
704 706 """A workingctx object makes access to data related to
705 707 the current working directory convenient.
706 708 date - any valid date string or (unixtime, offset), or None.
707 709 user - username string, or None.
708 710 extra - a dictionary of extra values, or None.
709 711 changes - a list of file lists as returned by localrepo.status()
710 712 or None to use the repository status.
711 713 """
712 714 def __init__(self, repo, text="", user=None, date=None, extra=None,
713 715 changes=None):
714 716 self._repo = repo
715 717 self._rev = None
716 718 self._node = None
717 719 self._text = text
718 720 if date:
719 721 self._date = util.parsedate(date)
720 722 if user:
721 723 self._user = user
722 724 if changes:
723 725 self._status = list(changes[:4])
724 726 self._unknown = changes[4]
725 727 self._ignored = changes[5]
726 728 self._clean = changes[6]
727 729 else:
728 730 self._unknown = None
729 731 self._ignored = None
730 732 self._clean = None
731 733
732 734 self._extra = {}
733 735 if extra:
734 736 self._extra = extra.copy()
735 737 if 'branch' not in self._extra:
736 738 try:
737 739 branch = encoding.fromlocal(self._repo.dirstate.branch())
738 740 except UnicodeDecodeError:
739 741 raise util.Abort(_('branch name not in UTF-8!'))
740 742 self._extra['branch'] = branch
741 743 if self._extra['branch'] == '':
742 744 self._extra['branch'] = 'default'
743 745
744 746 def __str__(self):
745 747 return str(self._parents[0]) + "+"
746 748
747 749 def __repr__(self):
748 750 return "<workingctx %s>" % str(self)
749 751
750 752 def __nonzero__(self):
751 753 return True
752 754
753 755 def __contains__(self, key):
754 756 return self._repo.dirstate[key] not in "?r"
755 757
756 758 def _buildflagfunc(self):
757 759 # Create a fallback function for getting file flags when the
758 760 # filesystem doesn't support them
759 761
760 762 copiesget = self._repo.dirstate.copies().get
761 763
762 764 if len(self._parents) < 2:
763 765 # when we have one parent, it's easy: copy from parent
764 766 man = self._parents[0].manifest()
765 767 def func(f):
766 768 f = copiesget(f, f)
767 769 return man.flags(f)
768 770 else:
769 771 # merges are tricky: we try to reconstruct the unstored
770 772 # result from the merge (issue1802)
771 773 p1, p2 = self._parents
772 774 pa = p1.ancestor(p2)
773 775 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
774 776
775 777 def func(f):
776 778 f = copiesget(f, f) # may be wrong for merges with copies
777 779 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
778 780 if fl1 == fl2:
779 781 return fl1
780 782 if fl1 == fla:
781 783 return fl2
782 784 if fl2 == fla:
783 785 return fl1
784 786 return '' # punt for conflicts
785 787
786 788 return func
787 789
788 790 @propertycache
789 791 def _flagfunc(self):
790 792 return self._repo.dirstate.flagfunc(self._buildflagfunc)
791 793
792 794 @propertycache
793 795 def _manifest(self):
794 796 """generate a manifest corresponding to the working directory"""
795 797
796 798 man = self._parents[0].manifest().copy()
797 799 if len(self._parents) > 1:
798 800 man2 = self.p2().manifest()
799 801 def getman(f):
800 802 if f in man:
801 803 return man
802 804 return man2
803 805 else:
804 806 getman = lambda f: man
805 807
806 808 copied = self._repo.dirstate.copies()
807 809 ff = self._flagfunc
808 810 modified, added, removed, deleted = self._status
809 811 for i, l in (("a", added), ("m", modified)):
810 812 for f in l:
811 813 orig = copied.get(f, f)
812 814 man[f] = getman(orig).get(orig, nullid) + i
813 815 try:
814 816 man.set(f, ff(f))
815 817 except OSError:
816 818 pass
817 819
818 820 for f in deleted + removed:
819 821 if f in man:
820 822 del man[f]
821 823
822 824 return man
823 825
824 826 def __iter__(self):
825 827 d = self._repo.dirstate
826 828 for f in d:
827 829 if d[f] != 'r':
828 830 yield f
829 831
830 832 @propertycache
831 833 def _status(self):
832 834 return self._repo.status()[:4]
833 835
834 836 @propertycache
835 837 def _user(self):
836 838 return self._repo.ui.username()
837 839
838 840 @propertycache
839 841 def _date(self):
840 842 return util.makedate()
841 843
842 844 @propertycache
843 845 def _parents(self):
844 846 p = self._repo.dirstate.parents()
845 847 if p[1] == nullid:
846 848 p = p[:-1]
847 849 self._parents = [changectx(self._repo, x) for x in p]
848 850 return self._parents
849 851
850 852 def status(self, ignored=False, clean=False, unknown=False):
851 853 """Explicit status query
852 854 Unless this method is used to query the working copy status, the
853 855 _status property will implicitly read the status using its default
854 856 arguments."""
855 857 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
856 858 self._unknown = self._ignored = self._clean = None
857 859 if unknown:
858 860 self._unknown = stat[4]
859 861 if ignored:
860 862 self._ignored = stat[5]
861 863 if clean:
862 864 self._clean = stat[6]
863 865 self._status = stat[:4]
864 866 return stat
865 867
866 868 def manifest(self):
867 869 return self._manifest
868 870 def user(self):
869 871 return self._user or self._repo.ui.username()
870 872 def date(self):
871 873 return self._date
872 874 def description(self):
873 875 return self._text
874 876 def files(self):
875 877 return sorted(self._status[0] + self._status[1] + self._status[2])
876 878
877 879 def modified(self):
878 880 return self._status[0]
879 881 def added(self):
880 882 return self._status[1]
881 883 def removed(self):
882 884 return self._status[2]
883 885 def deleted(self):
884 886 return self._status[3]
885 887 def unknown(self):
886 888 assert self._unknown is not None # must call status first
887 889 return self._unknown
888 890 def ignored(self):
889 891 assert self._ignored is not None # must call status first
890 892 return self._ignored
891 893 def clean(self):
892 894 assert self._clean is not None # must call status first
893 895 return self._clean
894 896 def branch(self):
895 897 return encoding.tolocal(self._extra['branch'])
896 898 def extra(self):
897 899 return self._extra
898 900
899 901 def tags(self):
900 902 t = []
901 903 for p in self.parents():
902 904 t.extend(p.tags())
903 905 return t
904 906
905 907 def bookmarks(self):
906 908 b = []
907 909 for p in self.parents():
908 910 b.extend(p.bookmarks())
909 911 return b
910 912
911 913 def phase(self):
912 914 phase = phases.draft # default phase to draft
913 915 for p in self.parents():
914 916 phase = max(phase, p.phase())
915 917 return phase
916 918
917 919 def hidden(self):
918 920 return False
919 921
920 922 def children(self):
921 923 return []
922 924
923 925 def flags(self, path):
924 926 if '_manifest' in self.__dict__:
925 927 try:
926 928 return self._manifest.flags(path)
927 929 except KeyError:
928 930 return ''
929 931
930 932 try:
931 933 return self._flagfunc(path)
932 934 except OSError:
933 935 return ''
934 936
935 937 def filectx(self, path, filelog=None):
936 938 """get a file context from the working directory"""
937 939 return workingfilectx(self._repo, path, workingctx=self,
938 940 filelog=filelog)
939 941
940 942 def ancestor(self, c2):
941 943 """return the ancestor context of self and c2"""
942 944 return self._parents[0].ancestor(c2) # punt on two parents for now
943 945
944 946 def walk(self, match):
945 947 return sorted(self._repo.dirstate.walk(match, self.substate.keys(),
946 948 True, False))
947 949
948 950 def dirty(self, missing=False, merge=True, branch=True):
949 951 "check whether a working directory is modified"
950 952 # check subrepos first
951 953 for s in self.substate:
952 954 if self.sub(s).dirty():
953 955 return True
954 956 # check current working dir
955 957 return ((merge and self.p2()) or
956 958 (branch and self.branch() != self.p1().branch()) or
957 959 self.modified() or self.added() or self.removed() or
958 960 (missing and self.deleted()))
959 961
960 962 def add(self, list, prefix=""):
961 963 join = lambda f: os.path.join(prefix, f)
962 964 wlock = self._repo.wlock()
963 965 ui, ds = self._repo.ui, self._repo.dirstate
964 966 try:
965 967 rejected = []
966 968 for f in list:
967 969 scmutil.checkportable(ui, join(f))
968 970 p = self._repo.wjoin(f)
969 971 try:
970 972 st = os.lstat(p)
971 973 except OSError:
972 974 ui.warn(_("%s does not exist!\n") % join(f))
973 975 rejected.append(f)
974 976 continue
975 977 if st.st_size > 10000000:
976 978 ui.warn(_("%s: up to %d MB of RAM may be required "
977 979 "to manage this file\n"
978 980 "(use 'hg revert %s' to cancel the "
979 981 "pending addition)\n")
980 982 % (f, 3 * st.st_size // 1000000, join(f)))
981 983 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
982 984 ui.warn(_("%s not added: only files and symlinks "
983 985 "supported currently\n") % join(f))
984 986 rejected.append(p)
985 987 elif ds[f] in 'amn':
986 988 ui.warn(_("%s already tracked!\n") % join(f))
987 989 elif ds[f] == 'r':
988 990 ds.normallookup(f)
989 991 else:
990 992 ds.add(f)
991 993 return rejected
992 994 finally:
993 995 wlock.release()
994 996
995 997 def forget(self, files, prefix=""):
996 998 join = lambda f: os.path.join(prefix, f)
997 999 wlock = self._repo.wlock()
998 1000 try:
999 1001 rejected = []
1000 1002 for f in files:
1001 1003 if f not in self._repo.dirstate:
1002 1004 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1003 1005 rejected.append(f)
1004 1006 elif self._repo.dirstate[f] != 'a':
1005 1007 self._repo.dirstate.remove(f)
1006 1008 else:
1007 1009 self._repo.dirstate.drop(f)
1008 1010 return rejected
1009 1011 finally:
1010 1012 wlock.release()
1011 1013
1012 1014 def ancestors(self):
1013 1015 for a in self._repo.changelog.ancestors(
1014 1016 *[p.rev() for p in self._parents]):
1015 1017 yield changectx(self._repo, a)
1016 1018
1017 1019 def undelete(self, list):
1018 1020 pctxs = self.parents()
1019 1021 wlock = self._repo.wlock()
1020 1022 try:
1021 1023 for f in list:
1022 1024 if self._repo.dirstate[f] != 'r':
1023 1025 self._repo.ui.warn(_("%s not removed!\n") % f)
1024 1026 else:
1025 1027 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1026 1028 t = fctx.data()
1027 1029 self._repo.wwrite(f, t, fctx.flags())
1028 1030 self._repo.dirstate.normal(f)
1029 1031 finally:
1030 1032 wlock.release()
1031 1033
1032 1034 def copy(self, source, dest):
1033 1035 p = self._repo.wjoin(dest)
1034 1036 if not os.path.lexists(p):
1035 1037 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1036 1038 elif not (os.path.isfile(p) or os.path.islink(p)):
1037 1039 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1038 1040 "symbolic link\n") % dest)
1039 1041 else:
1040 1042 wlock = self._repo.wlock()
1041 1043 try:
1042 1044 if self._repo.dirstate[dest] in '?r':
1043 1045 self._repo.dirstate.add(dest)
1044 1046 self._repo.dirstate.copy(source, dest)
1045 1047 finally:
1046 1048 wlock.release()
1047 1049
1048 1050 def dirs(self):
1049 1051 return self._repo.dirstate.dirs()
1050 1052
1051 1053 class workingfilectx(filectx):
1052 1054 """A workingfilectx object makes access to data related to a particular
1053 1055 file in the working directory convenient."""
1054 1056 def __init__(self, repo, path, filelog=None, workingctx=None):
1055 1057 """changeid can be a changeset revision, node, or tag.
1056 1058 fileid can be a file revision or node."""
1057 1059 self._repo = repo
1058 1060 self._path = path
1059 1061 self._changeid = None
1060 1062 self._filerev = self._filenode = None
1061 1063
1062 1064 if filelog:
1063 1065 self._filelog = filelog
1064 1066 if workingctx:
1065 1067 self._changectx = workingctx
1066 1068
1067 1069 @propertycache
1068 1070 def _changectx(self):
1069 1071 return workingctx(self._repo)
1070 1072
1071 1073 def __nonzero__(self):
1072 1074 return True
1073 1075
1074 1076 def __str__(self):
1075 1077 return "%s@%s" % (self.path(), self._changectx)
1076 1078
1077 1079 def __repr__(self):
1078 1080 return "<workingfilectx %s>" % str(self)
1079 1081
1080 1082 def data(self):
1081 1083 return self._repo.wread(self._path)
1082 1084 def renamed(self):
1083 1085 rp = self._repo.dirstate.copied(self._path)
1084 1086 if not rp:
1085 1087 return None
1086 1088 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1087 1089
1088 1090 def parents(self):
1089 1091 '''return parent filectxs, following copies if necessary'''
1090 1092 def filenode(ctx, path):
1091 1093 return ctx._manifest.get(path, nullid)
1092 1094
1093 1095 path = self._path
1094 1096 fl = self._filelog
1095 1097 pcl = self._changectx._parents
1096 1098 renamed = self.renamed()
1097 1099
1098 1100 if renamed:
1099 1101 pl = [renamed + (None,)]
1100 1102 else:
1101 1103 pl = [(path, filenode(pcl[0], path), fl)]
1102 1104
1103 1105 for pc in pcl[1:]:
1104 1106 pl.append((path, filenode(pc, path), fl))
1105 1107
1106 1108 return [filectx(self._repo, p, fileid=n, filelog=l)
1107 1109 for p, n, l in pl if n != nullid]
1108 1110
1109 1111 def children(self):
1110 1112 return []
1111 1113
1112 1114 def size(self):
1113 1115 return os.lstat(self._repo.wjoin(self._path)).st_size
1114 1116 def date(self):
1115 1117 t, tz = self._changectx.date()
1116 1118 try:
1117 1119 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
1118 1120 except OSError, err:
1119 1121 if err.errno != errno.ENOENT:
1120 1122 raise
1121 1123 return (t, tz)
1122 1124
1123 1125 def cmp(self, fctx):
1124 1126 """compare with other file context
1125 1127
1126 1128 returns True if different than fctx.
1127 1129 """
1128 1130 # fctx should be a filectx (not a wfctx)
1129 1131 # invert comparison to reuse the same code path
1130 1132 return fctx.cmp(self)
1131 1133
1132 1134 class memctx(object):
1133 1135 """Use memctx to perform in-memory commits via localrepo.commitctx().
1134 1136
1135 1137 Revision information is supplied at initialization time while
1136 1138 related files data and is made available through a callback
1137 1139 mechanism. 'repo' is the current localrepo, 'parents' is a
1138 1140 sequence of two parent revisions identifiers (pass None for every
1139 1141 missing parent), 'text' is the commit message and 'files' lists
1140 1142 names of files touched by the revision (normalized and relative to
1141 1143 repository root).
1142 1144
1143 1145 filectxfn(repo, memctx, path) is a callable receiving the
1144 1146 repository, the current memctx object and the normalized path of
1145 1147 requested file, relative to repository root. It is fired by the
1146 1148 commit function for every file in 'files', but calls order is
1147 1149 undefined. If the file is available in the revision being
1148 1150 committed (updated or added), filectxfn returns a memfilectx
1149 1151 object. If the file was removed, filectxfn raises an
1150 1152 IOError. Moved files are represented by marking the source file
1151 1153 removed and the new file added with copy information (see
1152 1154 memfilectx).
1153 1155
1154 1156 user receives the committer name and defaults to current
1155 1157 repository username, date is the commit date in any format
1156 1158 supported by util.parsedate() and defaults to current date, extra
1157 1159 is a dictionary of metadata or is left empty.
1158 1160 """
1159 1161 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1160 1162 date=None, extra=None):
1161 1163 self._repo = repo
1162 1164 self._rev = None
1163 1165 self._node = None
1164 1166 self._text = text
1165 1167 self._date = date and util.parsedate(date) or util.makedate()
1166 1168 self._user = user
1167 1169 parents = [(p or nullid) for p in parents]
1168 1170 p1, p2 = parents
1169 1171 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1170 1172 files = sorted(set(files))
1171 1173 self._status = [files, [], [], [], []]
1172 1174 self._filectxfn = filectxfn
1173 1175
1174 1176 self._extra = extra and extra.copy() or {}
1175 1177 if self._extra.get('branch', '') == '':
1176 1178 self._extra['branch'] = 'default'
1177 1179
1178 1180 def __str__(self):
1179 1181 return str(self._parents[0]) + "+"
1180 1182
1181 1183 def __int__(self):
1182 1184 return self._rev
1183 1185
1184 1186 def __nonzero__(self):
1185 1187 return True
1186 1188
1187 1189 def __getitem__(self, key):
1188 1190 return self.filectx(key)
1189 1191
1190 1192 def p1(self):
1191 1193 return self._parents[0]
1192 1194 def p2(self):
1193 1195 return self._parents[1]
1194 1196
1195 1197 def user(self):
1196 1198 return self._user or self._repo.ui.username()
1197 1199 def date(self):
1198 1200 return self._date
1199 1201 def description(self):
1200 1202 return self._text
1201 1203 def files(self):
1202 1204 return self.modified()
1203 1205 def modified(self):
1204 1206 return self._status[0]
1205 1207 def added(self):
1206 1208 return self._status[1]
1207 1209 def removed(self):
1208 1210 return self._status[2]
1209 1211 def deleted(self):
1210 1212 return self._status[3]
1211 1213 def unknown(self):
1212 1214 return self._status[4]
1213 1215 def ignored(self):
1214 1216 return self._status[5]
1215 1217 def clean(self):
1216 1218 return self._status[6]
1217 1219 def branch(self):
1218 1220 return encoding.tolocal(self._extra['branch'])
1219 1221 def extra(self):
1220 1222 return self._extra
1221 1223 def flags(self, f):
1222 1224 return self[f].flags()
1223 1225
1224 1226 def parents(self):
1225 1227 """return contexts for each parent changeset"""
1226 1228 return self._parents
1227 1229
1228 1230 def filectx(self, path, filelog=None):
1229 1231 """get a file context from the working directory"""
1230 1232 return self._filectxfn(self._repo, self, path)
1231 1233
1232 1234 def commit(self):
1233 1235 """commit context to the repo"""
1234 1236 return self._repo.commitctx(self)
1235 1237
1236 1238 class memfilectx(object):
1237 1239 """memfilectx represents an in-memory file to commit.
1238 1240
1239 1241 See memctx for more details.
1240 1242 """
1241 1243 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1242 1244 """
1243 1245 path is the normalized file path relative to repository root.
1244 1246 data is the file content as a string.
1245 1247 islink is True if the file is a symbolic link.
1246 1248 isexec is True if the file is executable.
1247 1249 copied is the source file path if current file was copied in the
1248 1250 revision being committed, or None."""
1249 1251 self._path = path
1250 1252 self._data = data
1251 1253 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1252 1254 self._copied = None
1253 1255 if copied:
1254 1256 self._copied = (copied, nullid)
1255 1257
1256 1258 def __nonzero__(self):
1257 1259 return True
1258 1260 def __str__(self):
1259 1261 return "%s@%s" % (self.path(), self._changectx)
1260 1262 def path(self):
1261 1263 return self._path
1262 1264 def data(self):
1263 1265 return self._data
1264 1266 def flags(self):
1265 1267 return self._flags
1266 1268 def isexec(self):
1267 1269 return 'x' in self._flags
1268 1270 def islink(self):
1269 1271 return 'l' in self._flags
1270 1272 def renamed(self):
1271 1273 return self._copied
@@ -1,857 +1,861 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os, mimetypes, re, cgi, copy
9 9 import webutil
10 10 from mercurial import error, encoding, archival, templater, templatefilters
11 11 from mercurial.node import short, hex
12 12 from mercurial.util import binary
13 13 from common import paritygen, staticfile, get_contact, ErrorResponse
14 14 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
15 15 from mercurial import graphmod, patch
16 16 from mercurial import help as helpmod
17 17 from mercurial.i18n import _
18 18
19 19 # __all__ is populated with the allowed commands. Be sure to add to it if
20 20 # you're adding a new command, or the new command won't work.
21 21
22 22 __all__ = [
23 23 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
24 24 'manifest', 'tags', 'bookmarks', 'branches', 'summary', 'filediff', 'diff',
25 25 'annotate', 'filelog', 'archive', 'static', 'graph', 'help',
26 26 ]
27 27
28 28 def log(web, req, tmpl):
29 29 if 'file' in req.form and req.form['file'][0]:
30 30 return filelog(web, req, tmpl)
31 31 else:
32 32 return changelog(web, req, tmpl)
33 33
34 34 def rawfile(web, req, tmpl):
35 35 guessmime = web.configbool('web', 'guessmime', False)
36 36
37 37 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
38 38 if not path:
39 39 content = manifest(web, req, tmpl)
40 40 req.respond(HTTP_OK, web.ctype)
41 41 return content
42 42
43 43 try:
44 44 fctx = webutil.filectx(web.repo, req)
45 45 except error.LookupError, inst:
46 46 try:
47 47 content = manifest(web, req, tmpl)
48 48 req.respond(HTTP_OK, web.ctype)
49 49 return content
50 50 except ErrorResponse:
51 51 raise inst
52 52
53 53 path = fctx.path()
54 54 text = fctx.data()
55 55 mt = 'application/binary'
56 56 if guessmime:
57 57 mt = mimetypes.guess_type(path)[0]
58 58 if mt is None:
59 59 mt = binary(text) and 'application/binary' or 'text/plain'
60 60 if mt.startswith('text/'):
61 61 mt += '; charset="%s"' % encoding.encoding
62 62
63 63 req.respond(HTTP_OK, mt, path, len(text))
64 64 return [text]
65 65
66 66 def _filerevision(web, tmpl, fctx):
67 67 f = fctx.path()
68 68 text = fctx.data()
69 69 parity = paritygen(web.stripecount)
70 70
71 71 if binary(text):
72 72 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
73 73 text = '(binary:%s)' % mt
74 74
75 75 def lines():
76 76 for lineno, t in enumerate(text.splitlines(True)):
77 77 yield {"line": t,
78 78 "lineid": "l%d" % (lineno + 1),
79 79 "linenumber": "% 6d" % (lineno + 1),
80 80 "parity": parity.next()}
81 81
82 82 return tmpl("filerevision",
83 83 file=f,
84 84 path=webutil.up(f),
85 85 text=lines(),
86 86 rev=fctx.rev(),
87 87 node=fctx.hex(),
88 88 author=fctx.user(),
89 89 date=fctx.date(),
90 90 desc=fctx.description(),
91 91 branch=webutil.nodebranchnodefault(fctx),
92 92 parent=webutil.parents(fctx),
93 93 child=webutil.children(fctx),
94 94 rename=webutil.renamelink(fctx),
95 95 permissions=fctx.manifest().flags(f))
96 96
97 97 def file(web, req, tmpl):
98 98 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
99 99 if not path:
100 100 return manifest(web, req, tmpl)
101 101 try:
102 102 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
103 103 except error.LookupError, inst:
104 104 try:
105 105 return manifest(web, req, tmpl)
106 106 except ErrorResponse:
107 107 raise inst
108 108
109 109 def _search(web, req, tmpl):
110 110
111 111 query = req.form['rev'][0]
112 112 revcount = web.maxchanges
113 113 if 'revcount' in req.form:
114 114 revcount = int(req.form.get('revcount', [revcount])[0])
115 115 revcount = max(revcount, 1)
116 116 tmpl.defaults['sessionvars']['revcount'] = revcount
117 117
118 118 lessvars = copy.copy(tmpl.defaults['sessionvars'])
119 119 lessvars['revcount'] = max(revcount / 2, 1)
120 120 lessvars['rev'] = query
121 121 morevars = copy.copy(tmpl.defaults['sessionvars'])
122 122 morevars['revcount'] = revcount * 2
123 123 morevars['rev'] = query
124 124
125 125 def changelist(**map):
126 126 count = 0
127 127 lower = encoding.lower
128 128 qw = lower(query).split()
129 129
130 130 def revgen():
131 131 for i in xrange(len(web.repo) - 1, 0, -100):
132 132 l = []
133 133 for j in xrange(max(0, i - 100), i + 1):
134 134 ctx = web.repo[j]
135 135 l.append(ctx)
136 136 l.reverse()
137 137 for e in l:
138 138 yield e
139 139
140 140 for ctx in revgen():
141 141 miss = 0
142 142 for q in qw:
143 143 if not (q in lower(ctx.user()) or
144 144 q in lower(ctx.description()) or
145 145 q in lower(" ".join(ctx.files()))):
146 146 miss = 1
147 147 break
148 148 if miss:
149 149 continue
150 150
151 151 count += 1
152 152 n = ctx.node()
153 153 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
154 154 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
155 155
156 156 yield tmpl('searchentry',
157 157 parity=parity.next(),
158 158 author=ctx.user(),
159 159 parent=webutil.parents(ctx),
160 160 child=webutil.children(ctx),
161 161 changelogtag=showtags,
162 162 desc=ctx.description(),
163 163 date=ctx.date(),
164 164 files=files,
165 165 rev=ctx.rev(),
166 166 node=hex(n),
167 167 tags=webutil.nodetagsdict(web.repo, n),
168 168 bookmarks=webutil.nodebookmarksdict(web.repo, n),
169 169 inbranch=webutil.nodeinbranch(web.repo, ctx),
170 170 branches=webutil.nodebranchdict(web.repo, ctx))
171 171
172 172 if count >= revcount:
173 173 break
174 174
175 175 tip = web.repo['tip']
176 176 parity = paritygen(web.stripecount)
177 177
178 178 return tmpl('search', query=query, node=tip.hex(),
179 179 entries=changelist, archives=web.archivelist("tip"),
180 180 morevars=morevars, lessvars=lessvars)
181 181
182 182 def changelog(web, req, tmpl, shortlog=False):
183 183
184 184 if 'node' in req.form:
185 185 ctx = webutil.changectx(web.repo, req)
186 186 else:
187 187 if 'rev' in req.form:
188 188 hi = req.form['rev'][0]
189 189 else:
190 190 hi = len(web.repo) - 1
191 191 try:
192 192 ctx = web.repo[hi]
193 193 except error.RepoError:
194 194 return _search(web, req, tmpl) # XXX redirect to 404 page?
195 195
196 196 def changelist(limit=0, **map):
197 197 l = [] # build a list in forward order for efficiency
198 198 for i in xrange(start, end):
199 199 ctx = web.repo[i]
200 200 n = ctx.node()
201 201 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
202 202 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
203 203
204 204 l.insert(0, {"parity": parity.next(),
205 205 "author": ctx.user(),
206 206 "parent": webutil.parents(ctx, i - 1),
207 207 "child": webutil.children(ctx, i + 1),
208 208 "changelogtag": showtags,
209 209 "desc": ctx.description(),
210 210 "date": ctx.date(),
211 211 "files": files,
212 212 "rev": i,
213 213 "node": hex(n),
214 214 "tags": webutil.nodetagsdict(web.repo, n),
215 215 "bookmarks": webutil.nodebookmarksdict(web.repo, n),
216 216 "inbranch": webutil.nodeinbranch(web.repo, ctx),
217 217 "branches": webutil.nodebranchdict(web.repo, ctx)
218 218 })
219 219
220 220 if limit > 0:
221 221 l = l[:limit]
222 222
223 223 for e in l:
224 224 yield e
225 225
226 226 revcount = shortlog and web.maxshortchanges or web.maxchanges
227 227 if 'revcount' in req.form:
228 228 revcount = int(req.form.get('revcount', [revcount])[0])
229 229 revcount = max(revcount, 1)
230 230 tmpl.defaults['sessionvars']['revcount'] = revcount
231 231
232 232 lessvars = copy.copy(tmpl.defaults['sessionvars'])
233 233 lessvars['revcount'] = max(revcount / 2, 1)
234 234 morevars = copy.copy(tmpl.defaults['sessionvars'])
235 235 morevars['revcount'] = revcount * 2
236 236
237 237 count = len(web.repo)
238 238 pos = ctx.rev()
239 239 start = max(0, pos - revcount + 1)
240 240 end = min(count, start + revcount)
241 241 pos = end - 1
242 242 parity = paritygen(web.stripecount, offset=start - end)
243 243
244 244 changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx)
245 245
246 246 return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav,
247 247 node=ctx.hex(), rev=pos, changesets=count,
248 248 entries=lambda **x: changelist(limit=0,**x),
249 249 latestentry=lambda **x: changelist(limit=1,**x),
250 250 archives=web.archivelist("tip"), revcount=revcount,
251 251 morevars=morevars, lessvars=lessvars)
252 252
253 253 def shortlog(web, req, tmpl):
254 254 return changelog(web, req, tmpl, shortlog = True)
255 255
256 256 def changeset(web, req, tmpl):
257 257 ctx = webutil.changectx(web.repo, req)
258 258 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
259 259 showbookmarks = webutil.showbookmark(web.repo, tmpl, 'changesetbookmark',
260 260 ctx.node())
261 261 showbranch = webutil.nodebranchnodefault(ctx)
262 262
263 263 files = []
264 264 parity = paritygen(web.stripecount)
265 265 for blockno, f in enumerate(ctx.files()):
266 266 template = f in ctx and 'filenodelink' or 'filenolink'
267 267 files.append(tmpl(template,
268 268 node=ctx.hex(), file=f, blockno=blockno + 1,
269 269 parity=parity.next()))
270 270
271 271 style = web.config('web', 'style', 'paper')
272 272 if 'style' in req.form:
273 273 style = req.form['style'][0]
274 274
275 275 parity = paritygen(web.stripecount)
276 276 diffs = webutil.diffs(web.repo, tmpl, ctx, None, parity, style)
277 277
278 278 parity = paritygen(web.stripecount)
279 279 diffstatgen = webutil.diffstatgen(ctx)
280 280 diffstat = webutil.diffstat(tmpl, ctx, diffstatgen, parity)
281 281
282 282 return tmpl('changeset',
283 283 diff=diffs,
284 284 rev=ctx.rev(),
285 285 node=ctx.hex(),
286 286 parent=webutil.parents(ctx),
287 287 child=webutil.children(ctx),
288 288 changesettag=showtags,
289 289 changesetbookmark=showbookmarks,
290 290 changesetbranch=showbranch,
291 291 author=ctx.user(),
292 292 desc=ctx.description(),
293 293 date=ctx.date(),
294 294 files=files,
295 295 diffsummary=lambda **x: webutil.diffsummary(diffstatgen),
296 296 diffstat=diffstat,
297 297 archives=web.archivelist(ctx.hex()),
298 298 tags=webutil.nodetagsdict(web.repo, ctx.node()),
299 299 bookmarks=webutil.nodebookmarksdict(web.repo, ctx.node()),
300 300 branch=webutil.nodebranchnodefault(ctx),
301 301 inbranch=webutil.nodeinbranch(web.repo, ctx),
302 302 branches=webutil.nodebranchdict(web.repo, ctx))
303 303
304 304 rev = changeset
305 305
306 306 def decodepath(path):
307 307 """Hook for mapping a path in the repository to a path in the
308 308 working copy.
309 309
310 310 Extensions (e.g., largefiles) can override this to remap files in
311 311 the virtual file system presented by the manifest command below."""
312 312 return path
313 313
314 314 def manifest(web, req, tmpl):
315 315 ctx = webutil.changectx(web.repo, req)
316 316 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
317 317 mf = ctx.manifest()
318 318 node = ctx.node()
319 319
320 320 files = {}
321 321 dirs = {}
322 322 parity = paritygen(web.stripecount)
323 323
324 324 if path and path[-1] != "/":
325 325 path += "/"
326 326 l = len(path)
327 327 abspath = "/" + path
328 328
329 329 for full, n in mf.iteritems():
330 330 # the virtual path (working copy path) used for the full
331 331 # (repository) path
332 332 f = decodepath(full)
333 333
334 334 if f[:l] != path:
335 335 continue
336 336 remain = f[l:]
337 337 elements = remain.split('/')
338 338 if len(elements) == 1:
339 339 files[remain] = full
340 340 else:
341 341 h = dirs # need to retain ref to dirs (root)
342 342 for elem in elements[0:-1]:
343 343 if elem not in h:
344 344 h[elem] = {}
345 345 h = h[elem]
346 346 if len(h) > 1:
347 347 break
348 348 h[None] = None # denotes files present
349 349
350 350 if mf and not files and not dirs:
351 351 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
352 352
353 353 def filelist(**map):
354 354 for f in sorted(files):
355 355 full = files[f]
356 356
357 357 fctx = ctx.filectx(full)
358 358 yield {"file": full,
359 359 "parity": parity.next(),
360 360 "basename": f,
361 361 "date": fctx.date(),
362 362 "size": fctx.size(),
363 363 "permissions": mf.flags(full)}
364 364
365 365 def dirlist(**map):
366 366 for d in sorted(dirs):
367 367
368 368 emptydirs = []
369 369 h = dirs[d]
370 370 while isinstance(h, dict) and len(h) == 1:
371 371 k, v = h.items()[0]
372 372 if v:
373 373 emptydirs.append(k)
374 374 h = v
375 375
376 376 path = "%s%s" % (abspath, d)
377 377 yield {"parity": parity.next(),
378 378 "path": path,
379 379 "emptydirs": "/".join(emptydirs),
380 380 "basename": d}
381 381
382 382 return tmpl("manifest",
383 383 rev=ctx.rev(),
384 384 node=hex(node),
385 385 path=abspath,
386 386 up=webutil.up(abspath),
387 387 upparity=parity.next(),
388 388 fentries=filelist,
389 389 dentries=dirlist,
390 390 archives=web.archivelist(hex(node)),
391 391 tags=webutil.nodetagsdict(web.repo, node),
392 392 bookmarks=webutil.nodebookmarksdict(web.repo, node),
393 393 inbranch=webutil.nodeinbranch(web.repo, ctx),
394 394 branches=webutil.nodebranchdict(web.repo, ctx))
395 395
396 396 def tags(web, req, tmpl):
397 397 i = web.repo.tagslist()
398 398 i.reverse()
399 399 parity = paritygen(web.stripecount)
400 400
401 401 def entries(notip=False, limit=0, **map):
402 402 count = 0
403 403 for k, n in i:
404 404 if notip and k == "tip":
405 405 continue
406 406 if limit > 0 and count >= limit:
407 407 continue
408 408 count = count + 1
409 409 yield {"parity": parity.next(),
410 410 "tag": k,
411 411 "date": web.repo[n].date(),
412 412 "node": hex(n)}
413 413
414 414 return tmpl("tags",
415 415 node=hex(web.repo.changelog.tip()),
416 416 entries=lambda **x: entries(False, 0, **x),
417 417 entriesnotip=lambda **x: entries(True, 0, **x),
418 418 latestentry=lambda **x: entries(True, 1, **x))
419 419
420 420 def bookmarks(web, req, tmpl):
421 421 i = web.repo._bookmarks.items()
422 422 parity = paritygen(web.stripecount)
423 423
424 424 def entries(limit=0, **map):
425 425 count = 0
426 426 for k, n in sorted(i):
427 427 if limit > 0 and count >= limit:
428 428 continue
429 429 count = count + 1
430 430 yield {"parity": parity.next(),
431 431 "bookmark": k,
432 432 "date": web.repo[n].date(),
433 433 "node": hex(n)}
434 434
435 435 return tmpl("bookmarks",
436 436 node=hex(web.repo.changelog.tip()),
437 437 entries=lambda **x: entries(0, **x),
438 438 latestentry=lambda **x: entries(1, **x))
439 439
440 440 def branches(web, req, tmpl):
441 441 tips = (web.repo[n] for t, n in web.repo.branchtags().iteritems())
442 442 heads = web.repo.heads()
443 443 parity = paritygen(web.stripecount)
444 444 sortkey = lambda ctx: ('close' not in ctx.extra(), ctx.rev())
445 445
446 446 def entries(limit, **map):
447 447 count = 0
448 448 for ctx in sorted(tips, key=sortkey, reverse=True):
449 449 if limit > 0 and count >= limit:
450 450 return
451 451 count += 1
452 452 if not web.repo.branchheads(ctx.branch()):
453 453 status = 'closed'
454 454 elif ctx.node() not in heads:
455 455 status = 'inactive'
456 456 else:
457 457 status = 'open'
458 458 yield {'parity': parity.next(),
459 459 'branch': ctx.branch(),
460 460 'status': status,
461 461 'node': ctx.hex(),
462 462 'date': ctx.date()}
463 463
464 464 return tmpl('branches', node=hex(web.repo.changelog.tip()),
465 465 entries=lambda **x: entries(0, **x),
466 466 latestentry=lambda **x: entries(1, **x))
467 467
468 468 def summary(web, req, tmpl):
469 469 i = web.repo.tagslist()
470 470 i.reverse()
471 471
472 472 def tagentries(**map):
473 473 parity = paritygen(web.stripecount)
474 474 count = 0
475 475 for k, n in i:
476 476 if k == "tip": # skip tip
477 477 continue
478 478
479 479 count += 1
480 480 if count > 10: # limit to 10 tags
481 481 break
482 482
483 483 yield tmpl("tagentry",
484 484 parity=parity.next(),
485 485 tag=k,
486 486 node=hex(n),
487 487 date=web.repo[n].date())
488 488
489 489 def bookmarks(**map):
490 490 parity = paritygen(web.stripecount)
491 491 b = web.repo._bookmarks.items()
492 492 for k, n in sorted(b)[:10]: # limit to 10 bookmarks
493 493 yield {'parity': parity.next(),
494 494 'bookmark': k,
495 495 'date': web.repo[n].date(),
496 496 'node': hex(n)}
497 497
498 498 def branches(**map):
499 499 parity = paritygen(web.stripecount)
500 500
501 501 b = web.repo.branchtags()
502 502 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.iteritems()]
503 503 for r, n, t in sorted(l):
504 504 yield {'parity': parity.next(),
505 505 'branch': t,
506 506 'node': hex(n),
507 507 'date': web.repo[n].date()}
508 508
509 509 def changelist(**map):
510 510 parity = paritygen(web.stripecount, offset=start - end)
511 511 l = [] # build a list in forward order for efficiency
512 512 for i in xrange(start, end):
513 513 ctx = web.repo[i]
514 514 n = ctx.node()
515 515 hn = hex(n)
516 516
517 517 l.insert(0, tmpl(
518 518 'shortlogentry',
519 519 parity=parity.next(),
520 520 author=ctx.user(),
521 521 desc=ctx.description(),
522 522 date=ctx.date(),
523 523 rev=i,
524 524 node=hn,
525 525 tags=webutil.nodetagsdict(web.repo, n),
526 526 bookmarks=webutil.nodebookmarksdict(web.repo, n),
527 527 inbranch=webutil.nodeinbranch(web.repo, ctx),
528 528 branches=webutil.nodebranchdict(web.repo, ctx)))
529 529
530 530 yield l
531 531
532 532 tip = web.repo['tip']
533 533 count = len(web.repo)
534 534 start = max(0, count - web.maxchanges)
535 535 end = min(count, start + web.maxchanges)
536 536
537 537 return tmpl("summary",
538 538 desc=web.config("web", "description", "unknown"),
539 539 owner=get_contact(web.config) or "unknown",
540 540 lastchange=tip.date(),
541 541 tags=tagentries,
542 542 bookmarks=bookmarks,
543 543 branches=branches,
544 544 shortlog=changelist,
545 545 node=tip.hex(),
546 546 archives=web.archivelist("tip"))
547 547
548 548 def filediff(web, req, tmpl):
549 549 fctx, ctx = None, None
550 550 try:
551 551 fctx = webutil.filectx(web.repo, req)
552 552 except LookupError:
553 553 ctx = webutil.changectx(web.repo, req)
554 554 path = webutil.cleanpath(web.repo, req.form['file'][0])
555 555 if path not in ctx.files():
556 556 raise
557 557
558 558 if fctx is not None:
559 559 n = fctx.node()
560 560 path = fctx.path()
561 561 else:
562 562 n = ctx.node()
563 563 # path already defined in except clause
564 564
565 565 parity = paritygen(web.stripecount)
566 566 style = web.config('web', 'style', 'paper')
567 567 if 'style' in req.form:
568 568 style = req.form['style'][0]
569 569
570 570 diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity, style)
571 571 rename = fctx and webutil.renamelink(fctx) or []
572 572 ctx = fctx and fctx or ctx
573 573 return tmpl("filediff",
574 574 file=path,
575 575 node=hex(n),
576 576 rev=ctx.rev(),
577 577 date=ctx.date(),
578 578 desc=ctx.description(),
579 579 author=ctx.user(),
580 580 rename=rename,
581 581 branch=webutil.nodebranchnodefault(ctx),
582 582 parent=webutil.parents(ctx),
583 583 child=webutil.children(ctx),
584 584 diff=diffs)
585 585
586 586 diff = filediff
587 587
588 588 def annotate(web, req, tmpl):
589 589 fctx = webutil.filectx(web.repo, req)
590 590 f = fctx.path()
591 591 parity = paritygen(web.stripecount)
592 592 diffopts = patch.diffopts(web.repo.ui, untrusted=True, section='annotate')
593 593
594 594 def annotate(**map):
595 595 last = None
596 596 if binary(fctx.data()):
597 597 mt = (mimetypes.guess_type(fctx.path())[0]
598 598 or 'application/octet-stream')
599 599 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
600 600 '(binary:%s)' % mt)])
601 601 else:
602 602 lines = enumerate(fctx.annotate(follow=True, linenumber=True,
603 603 diffopts=diffopts))
604 604 for lineno, ((f, targetline), l) in lines:
605 605 fnode = f.filenode()
606 606
607 607 if last != fnode:
608 608 last = fnode
609 609
610 610 yield {"parity": parity.next(),
611 611 "node": f.hex(),
612 612 "rev": f.rev(),
613 613 "author": f.user(),
614 614 "desc": f.description(),
615 615 "file": f.path(),
616 616 "targetline": targetline,
617 617 "line": l,
618 618 "lineid": "l%d" % (lineno + 1),
619 619 "linenumber": "% 6d" % (lineno + 1),
620 620 "revdate": f.date()}
621 621
622 622 return tmpl("fileannotate",
623 623 file=f,
624 624 annotate=annotate,
625 625 path=webutil.up(f),
626 626 rev=fctx.rev(),
627 627 node=fctx.hex(),
628 628 author=fctx.user(),
629 629 date=fctx.date(),
630 630 desc=fctx.description(),
631 631 rename=webutil.renamelink(fctx),
632 632 branch=webutil.nodebranchnodefault(fctx),
633 633 parent=webutil.parents(fctx),
634 634 child=webutil.children(fctx),
635 635 permissions=fctx.manifest().flags(f))
636 636
637 637 def filelog(web, req, tmpl):
638 638
639 639 try:
640 640 fctx = webutil.filectx(web.repo, req)
641 641 f = fctx.path()
642 642 fl = fctx.filelog()
643 643 except error.LookupError:
644 644 f = webutil.cleanpath(web.repo, req.form['file'][0])
645 645 fl = web.repo.file(f)
646 646 numrevs = len(fl)
647 647 if not numrevs: # file doesn't exist at all
648 648 raise
649 649 rev = webutil.changectx(web.repo, req).rev()
650 650 first = fl.linkrev(0)
651 651 if rev < first: # current rev is from before file existed
652 652 raise
653 653 frev = numrevs - 1
654 654 while fl.linkrev(frev) > rev:
655 655 frev -= 1
656 656 fctx = web.repo.filectx(f, fl.linkrev(frev))
657 657
658 658 revcount = web.maxshortchanges
659 659 if 'revcount' in req.form:
660 660 revcount = int(req.form.get('revcount', [revcount])[0])
661 661 revcount = max(revcount, 1)
662 662 tmpl.defaults['sessionvars']['revcount'] = revcount
663 663
664 664 lessvars = copy.copy(tmpl.defaults['sessionvars'])
665 665 lessvars['revcount'] = max(revcount / 2, 1)
666 666 morevars = copy.copy(tmpl.defaults['sessionvars'])
667 667 morevars['revcount'] = revcount * 2
668 668
669 669 count = fctx.filerev() + 1
670 670 start = max(0, fctx.filerev() - revcount + 1) # first rev on this page
671 671 end = min(count, start + revcount) # last rev on this page
672 672 parity = paritygen(web.stripecount, offset=start - end)
673 673
674 674 def entries(limit=0, **map):
675 675 l = []
676 676
677 677 repo = web.repo
678 678 for i in xrange(start, end):
679 679 iterfctx = fctx.filectx(i)
680 680
681 681 l.insert(0, {"parity": parity.next(),
682 682 "filerev": i,
683 683 "file": f,
684 684 "node": iterfctx.hex(),
685 685 "author": iterfctx.user(),
686 686 "date": iterfctx.date(),
687 687 "rename": webutil.renamelink(iterfctx),
688 688 "parent": webutil.parents(iterfctx),
689 689 "child": webutil.children(iterfctx),
690 690 "desc": iterfctx.description(),
691 691 "tags": webutil.nodetagsdict(repo, iterfctx.node()),
692 692 "bookmarks": webutil.nodebookmarksdict(
693 693 repo, iterfctx.node()),
694 694 "branch": webutil.nodebranchnodefault(iterfctx),
695 695 "inbranch": webutil.nodeinbranch(repo, iterfctx),
696 696 "branches": webutil.nodebranchdict(repo, iterfctx)})
697 697
698 698 if limit > 0:
699 699 l = l[:limit]
700 700
701 701 for e in l:
702 702 yield e
703 703
704 704 nodefunc = lambda x: fctx.filectx(fileid=x)
705 705 nav = webutil.revnavgen(end - 1, revcount, count, nodefunc)
706 706 return tmpl("filelog", file=f, node=fctx.hex(), nav=nav,
707 707 entries=lambda **x: entries(limit=0, **x),
708 708 latestentry=lambda **x: entries(limit=1, **x),
709 709 revcount=revcount, morevars=morevars, lessvars=lessvars)
710 710
711 711 def archive(web, req, tmpl):
712 712 type_ = req.form.get('type', [None])[0]
713 713 allowed = web.configlist("web", "allow_archive")
714 714 key = req.form['node'][0]
715 715
716 716 if type_ not in web.archives:
717 717 msg = 'Unsupported archive type: %s' % type_
718 718 raise ErrorResponse(HTTP_NOT_FOUND, msg)
719 719
720 720 if not ((type_ in allowed or
721 721 web.configbool("web", "allow" + type_, False))):
722 722 msg = 'Archive type not allowed: %s' % type_
723 723 raise ErrorResponse(HTTP_FORBIDDEN, msg)
724 724
725 725 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
726 726 cnode = web.repo.lookup(key)
727 727 arch_version = key
728 728 if cnode == key or key == 'tip':
729 729 arch_version = short(cnode)
730 730 name = "%s-%s" % (reponame, arch_version)
731 731 mimetype, artype, extension, encoding = web.archive_specs[type_]
732 732 headers = [
733 733 ('Content-Type', mimetype),
734 734 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
735 735 ]
736 736 if encoding:
737 737 headers.append(('Content-Encoding', encoding))
738 738 req.header(headers)
739 739 req.respond(HTTP_OK)
740 740 archival.archive(web.repo, req, cnode, artype, prefix=name)
741 741 return []
742 742
743 743
744 744 def static(web, req, tmpl):
745 745 fname = req.form['file'][0]
746 746 # a repo owner may set web.static in .hg/hgrc to get any file
747 747 # readable by the user running the CGI script
748 748 static = web.config("web", "static", None, untrusted=False)
749 749 if not static:
750 750 tp = web.templatepath or templater.templatepath()
751 751 if isinstance(tp, str):
752 752 tp = [tp]
753 753 static = [os.path.join(p, 'static') for p in tp]
754 754 return [staticfile(static, fname, req)]
755 755
756 756 def graph(web, req, tmpl):
757 757
758 758 rev = webutil.changectx(web.repo, req).rev()
759 759 bg_height = 39
760 760 revcount = web.maxshortchanges
761 761 if 'revcount' in req.form:
762 762 revcount = int(req.form.get('revcount', [revcount])[0])
763 763 revcount = max(revcount, 1)
764 764 tmpl.defaults['sessionvars']['revcount'] = revcount
765 765
766 766 lessvars = copy.copy(tmpl.defaults['sessionvars'])
767 767 lessvars['revcount'] = max(revcount / 2, 1)
768 768 morevars = copy.copy(tmpl.defaults['sessionvars'])
769 769 morevars['revcount'] = revcount * 2
770 770
771 771 max_rev = len(web.repo) - 1
772 772 revcount = min(max_rev, revcount)
773 773 revnode = web.repo.changelog.node(rev)
774 774 revnode_hex = hex(revnode)
775 775 uprev = min(max_rev, rev + revcount)
776 776 downrev = max(0, rev - revcount)
777 777 count = len(web.repo)
778 778 changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)
779 779 startrev = rev
780 780 # if starting revision is less than 60 set it to uprev
781 781 if rev < web.maxshortchanges:
782 782 startrev = uprev
783 783
784 784 dag = graphmod.dagwalker(web.repo, range(startrev, downrev - 1, -1))
785 785 tree = list(graphmod.colored(dag, web.repo))
786 786 canvasheight = (len(tree) + 1) * bg_height - 27
787 787 data = []
788 788 for (id, type, ctx, vtx, edges) in tree:
789 789 if type != graphmod.CHANGESET:
790 790 continue
791 791 node = str(ctx)
792 792 age = templatefilters.age(ctx.date())
793 793 desc = templatefilters.firstline(ctx.description())
794 794 desc = cgi.escape(templatefilters.nonempty(desc))
795 795 user = cgi.escape(templatefilters.person(ctx.user()))
796 796 branch = ctx.branch()
797 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
797 try:
798 branchnode = web.repo.branchtip(branch)
799 except error.RepoLookupError:
800 branchnode = None
801 branch = branch, branchnode == ctx.node()
798 802 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags(),
799 803 ctx.bookmarks()))
800 804
801 805 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
802 806 lessvars=lessvars, morevars=morevars, downrev=downrev,
803 807 canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
804 808 node=revnode_hex, changenav=changenav)
805 809
806 810 def _getdoc(e):
807 811 doc = e[0].__doc__
808 812 if doc:
809 813 doc = _(doc).split('\n')[0]
810 814 else:
811 815 doc = _('(no help text available)')
812 816 return doc
813 817
814 818 def help(web, req, tmpl):
815 819 from mercurial import commands # avoid cycle
816 820
817 821 topicname = req.form.get('node', [None])[0]
818 822 if not topicname:
819 823 def topics(**map):
820 824 for entries, summary, _ in helpmod.helptable:
821 825 entries = sorted(entries, key=len)
822 826 yield {'topic': entries[-1], 'summary': summary}
823 827
824 828 early, other = [], []
825 829 primary = lambda s: s.split('|')[0]
826 830 for c, e in commands.table.iteritems():
827 831 doc = _getdoc(e)
828 832 if 'DEPRECATED' in doc or c.startswith('debug'):
829 833 continue
830 834 cmd = primary(c)
831 835 if cmd.startswith('^'):
832 836 early.append((cmd[1:], doc))
833 837 else:
834 838 other.append((cmd, doc))
835 839
836 840 early.sort()
837 841 other.sort()
838 842
839 843 def earlycommands(**map):
840 844 for c, doc in early:
841 845 yield {'topic': c, 'summary': doc}
842 846
843 847 def othercommands(**map):
844 848 for c, doc in other:
845 849 yield {'topic': c, 'summary': doc}
846 850
847 851 return tmpl('helptopics', topics=topics, earlycommands=earlycommands,
848 852 othercommands=othercommands, title='Index')
849 853
850 854 u = webutil.wsgiui()
851 855 u.pushbuffer()
852 856 try:
853 857 commands.help_(u, topicname)
854 858 except error.UnknownCommand:
855 859 raise ErrorResponse(HTTP_NOT_FOUND)
856 860 doc = u.popbuffer()
857 861 return tmpl('help', topic=topicname, doc=doc)
@@ -1,270 +1,277 b''
1 1 # hgweb/webutil.py - utility library for the web interface.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import os, copy
10 10 from mercurial import match, patch, scmutil, error, ui, util
11 11 from mercurial.i18n import _
12 12 from mercurial.node import hex, nullid
13 13
14 14 def up(p):
15 15 if p[0] != "/":
16 16 p = "/" + p
17 17 if p[-1] == "/":
18 18 p = p[:-1]
19 19 up = os.path.dirname(p)
20 20 if up == "/":
21 21 return "/"
22 22 return up + "/"
23 23
24 24 def revnavgen(pos, pagelen, limit, nodefunc):
25 25 def seq(factor, limit=None):
26 26 if limit:
27 27 yield limit
28 28 if limit >= 20 and limit <= 40:
29 29 yield 50
30 30 else:
31 31 yield 1 * factor
32 32 yield 3 * factor
33 33 for f in seq(factor * 10):
34 34 yield f
35 35
36 36 navbefore = []
37 37 navafter = []
38 38
39 39 last = 0
40 40 for f in seq(1, pagelen):
41 41 if f < pagelen or f <= last:
42 42 continue
43 43 if f > limit:
44 44 break
45 45 last = f
46 46 if pos + f < limit:
47 47 navafter.append(("+%d" % f, hex(nodefunc(pos + f).node())))
48 48 if pos - f >= 0:
49 49 navbefore.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
50 50
51 51 navafter.append(("tip", "tip"))
52 52 try:
53 53 navbefore.insert(0, ("(0)", hex(nodefunc('0').node())))
54 54 except error.RepoError:
55 55 pass
56 56
57 57 def gen(l):
58 58 def f(**map):
59 59 for label, node in l:
60 60 yield {"label": label, "node": node}
61 61 return f
62 62
63 63 return (dict(before=gen(navbefore), after=gen(navafter)),)
64 64
65 65 def _siblings(siblings=[], hiderev=None):
66 66 siblings = [s for s in siblings if s.node() != nullid]
67 67 if len(siblings) == 1 and siblings[0].rev() == hiderev:
68 68 return
69 69 for s in siblings:
70 70 d = {'node': s.hex(), 'rev': s.rev()}
71 71 d['user'] = s.user()
72 72 d['date'] = s.date()
73 73 d['description'] = s.description()
74 74 d['branch'] = s.branch()
75 75 if util.safehasattr(s, 'path'):
76 76 d['file'] = s.path()
77 77 yield d
78 78
79 79 def parents(ctx, hide=None):
80 80 return _siblings(ctx.parents(), hide)
81 81
82 82 def children(ctx, hide=None):
83 83 return _siblings(ctx.children(), hide)
84 84
85 85 def renamelink(fctx):
86 86 r = fctx.renamed()
87 87 if r:
88 88 return [dict(file=r[0], node=hex(r[1]))]
89 89 return []
90 90
91 91 def nodetagsdict(repo, node):
92 92 return [{"name": i} for i in repo.nodetags(node)]
93 93
94 94 def nodebookmarksdict(repo, node):
95 95 return [{"name": i} for i in repo.nodebookmarks(node)]
96 96
97 97 def nodebranchdict(repo, ctx):
98 98 branches = []
99 99 branch = ctx.branch()
100 100 # If this is an empty repo, ctx.node() == nullid,
101 # ctx.branch() == 'default', but branchtags() is
102 # an empty dict. Using dict.get avoids a traceback.
103 if repo.branchtags().get(branch) == ctx.node():
101 # ctx.branch() == 'default'.
102 try:
103 branchnode = repo.branchtip(branch)
104 except error.RepoLookupError:
105 branchnode = None
106 if branchnode == ctx.node():
104 107 branches.append({"name": branch})
105 108 return branches
106 109
107 110 def nodeinbranch(repo, ctx):
108 111 branches = []
109 112 branch = ctx.branch()
110 if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
113 try:
114 branchnode = repo.branchtip(branch)
115 except error.RepoLookupError:
116 branchnode = None
117 if branch != 'default' and branchnode != ctx.node():
111 118 branches.append({"name": branch})
112 119 return branches
113 120
114 121 def nodebranchnodefault(ctx):
115 122 branches = []
116 123 branch = ctx.branch()
117 124 if branch != 'default':
118 125 branches.append({"name": branch})
119 126 return branches
120 127
121 128 def showtag(repo, tmpl, t1, node=nullid, **args):
122 129 for t in repo.nodetags(node):
123 130 yield tmpl(t1, tag=t, **args)
124 131
125 132 def showbookmark(repo, tmpl, t1, node=nullid, **args):
126 133 for t in repo.nodebookmarks(node):
127 134 yield tmpl(t1, bookmark=t, **args)
128 135
129 136 def cleanpath(repo, path):
130 137 path = path.lstrip('/')
131 138 return scmutil.canonpath(repo.root, '', path)
132 139
133 140 def changectx(repo, req):
134 141 changeid = "tip"
135 142 if 'node' in req.form:
136 143 changeid = req.form['node'][0]
137 144 elif 'manifest' in req.form:
138 145 changeid = req.form['manifest'][0]
139 146
140 147 try:
141 148 ctx = repo[changeid]
142 149 except error.RepoError:
143 150 man = repo.manifest
144 151 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
145 152
146 153 return ctx
147 154
148 155 def filectx(repo, req):
149 156 path = cleanpath(repo, req.form['file'][0])
150 157 if 'node' in req.form:
151 158 changeid = req.form['node'][0]
152 159 else:
153 160 changeid = req.form['filenode'][0]
154 161 try:
155 162 fctx = repo[changeid][path]
156 163 except error.RepoError:
157 164 fctx = repo.filectx(path, fileid=changeid)
158 165
159 166 return fctx
160 167
161 168 def listfilediffs(tmpl, files, node, max):
162 169 for f in files[:max]:
163 170 yield tmpl('filedifflink', node=hex(node), file=f)
164 171 if len(files) > max:
165 172 yield tmpl('fileellipses')
166 173
167 174 def diffs(repo, tmpl, ctx, files, parity, style):
168 175
169 176 def countgen():
170 177 start = 1
171 178 while True:
172 179 yield start
173 180 start += 1
174 181
175 182 blockcount = countgen()
176 183 def prettyprintlines(diff, blockno):
177 184 for lineno, l in enumerate(diff.splitlines(True)):
178 185 lineno = "%d.%d" % (blockno, lineno + 1)
179 186 if l.startswith('+'):
180 187 ltype = "difflineplus"
181 188 elif l.startswith('-'):
182 189 ltype = "difflineminus"
183 190 elif l.startswith('@'):
184 191 ltype = "difflineat"
185 192 else:
186 193 ltype = "diffline"
187 194 yield tmpl(ltype,
188 195 line=l,
189 196 lineid="l%s" % lineno,
190 197 linenumber="% 8s" % lineno)
191 198
192 199 if files:
193 200 m = match.exact(repo.root, repo.getcwd(), files)
194 201 else:
195 202 m = match.always(repo.root, repo.getcwd())
196 203
197 204 diffopts = patch.diffopts(repo.ui, untrusted=True)
198 205 parents = ctx.parents()
199 206 node1 = parents and parents[0].node() or nullid
200 207 node2 = ctx.node()
201 208
202 209 block = []
203 210 for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
204 211 if chunk.startswith('diff') and block:
205 212 blockno = blockcount.next()
206 213 yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
207 214 lines=prettyprintlines(''.join(block), blockno))
208 215 block = []
209 216 if chunk.startswith('diff') and style != 'raw':
210 217 chunk = ''.join(chunk.splitlines(True)[1:])
211 218 block.append(chunk)
212 219 blockno = blockcount.next()
213 220 yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
214 221 lines=prettyprintlines(''.join(block), blockno))
215 222
216 223 def diffstatgen(ctx):
217 224 '''Generator function that provides the diffstat data.'''
218 225
219 226 stats = patch.diffstatdata(util.iterlines(ctx.diff()))
220 227 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
221 228 while True:
222 229 yield stats, maxname, maxtotal, addtotal, removetotal, binary
223 230
224 231 def diffsummary(statgen):
225 232 '''Return a short summary of the diff.'''
226 233
227 234 stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
228 235 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
229 236 len(stats), addtotal, removetotal)
230 237
231 238 def diffstat(tmpl, ctx, statgen, parity):
232 239 '''Return a diffstat template for each file in the diff.'''
233 240
234 241 stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
235 242 files = ctx.files()
236 243
237 244 def pct(i):
238 245 if maxtotal == 0:
239 246 return 0
240 247 return (float(i) / maxtotal) * 100
241 248
242 249 fileno = 0
243 250 for filename, adds, removes, isbinary in stats:
244 251 template = filename in files and 'diffstatlink' or 'diffstatnolink'
245 252 total = adds + removes
246 253 fileno += 1
247 254 yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
248 255 total=total, addpct=pct(adds), removepct=pct(removes),
249 256 parity=parity.next())
250 257
251 258 class sessionvars(object):
252 259 def __init__(self, vars, start='?'):
253 260 self.start = start
254 261 self.vars = vars
255 262 def __getitem__(self, key):
256 263 return self.vars[key]
257 264 def __setitem__(self, key, value):
258 265 self.vars[key] = value
259 266 def __copy__(self):
260 267 return sessionvars(copy.copy(self.vars), self.start)
261 268 def __iter__(self):
262 269 separator = self.start
263 270 for key, value in self.vars.iteritems():
264 271 yield {'name': key, 'value': str(value), 'separator': separator}
265 272 separator = '&'
266 273
267 274 class wsgiui(ui.ui):
268 275 # default termwidth breaks under mod_wsgi
269 276 def termwidth(self):
270 277 return 80
@@ -1,2401 +1,2411 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 12 import lock, transaction, store, encoding
13 13 import scmutil, util, extensions, hook, error, revset
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 from lock import release
18 18 import weakref, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 20 filecache = scmutil.filecache
21 21
22 22 class storecache(filecache):
23 23 """filecache for files in the store"""
24 24 def join(self, obj, fname):
25 25 return obj.sjoin(fname)
26 26
27 27 class localrepository(repo.repository):
28 28 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
29 29 'known', 'getbundle'))
30 30 supportedformats = set(('revlogv1', 'generaldelta'))
31 31 supported = supportedformats | set(('store', 'fncache', 'shared',
32 32 'dotencode'))
33 33
34 34 def __init__(self, baseui, path=None, create=False):
35 35 repo.repository.__init__(self)
36 36 self.root = os.path.realpath(util.expandpath(path))
37 37 self.path = os.path.join(self.root, ".hg")
38 38 self.origroot = path
39 39 self.auditor = scmutil.pathauditor(self.root, self._checknested)
40 40 self.opener = scmutil.opener(self.path)
41 41 self.wopener = scmutil.opener(self.root)
42 42 self.baseui = baseui
43 43 self.ui = baseui.copy()
44 44 # A list of callback to shape the phase if no data were found.
45 45 # Callback are in the form: func(repo, roots) --> processed root.
46 46 # This list it to be filled by extension during repo setup
47 47 self._phasedefaults = []
48 48
49 49 try:
50 50 self.ui.readconfig(self.join("hgrc"), self.root)
51 51 extensions.loadall(self.ui)
52 52 except IOError:
53 53 pass
54 54
55 55 if not os.path.isdir(self.path):
56 56 if create:
57 57 if not os.path.exists(path):
58 58 util.makedirs(path)
59 59 util.makedir(self.path, notindexed=True)
60 60 requirements = ["revlogv1"]
61 61 if self.ui.configbool('format', 'usestore', True):
62 62 os.mkdir(os.path.join(self.path, "store"))
63 63 requirements.append("store")
64 64 if self.ui.configbool('format', 'usefncache', True):
65 65 requirements.append("fncache")
66 66 if self.ui.configbool('format', 'dotencode', True):
67 67 requirements.append('dotencode')
68 68 # create an invalid changelog
69 69 self.opener.append(
70 70 "00changelog.i",
71 71 '\0\0\0\2' # represents revlogv2
72 72 ' dummy changelog to prevent using the old repo layout'
73 73 )
74 74 if self.ui.configbool('format', 'generaldelta', False):
75 75 requirements.append("generaldelta")
76 76 requirements = set(requirements)
77 77 else:
78 78 raise error.RepoError(_("repository %s not found") % path)
79 79 elif create:
80 80 raise error.RepoError(_("repository %s already exists") % path)
81 81 else:
82 82 try:
83 83 requirements = scmutil.readrequires(self.opener, self.supported)
84 84 except IOError, inst:
85 85 if inst.errno != errno.ENOENT:
86 86 raise
87 87 requirements = set()
88 88
89 89 self.sharedpath = self.path
90 90 try:
91 91 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
92 92 if not os.path.exists(s):
93 93 raise error.RepoError(
94 94 _('.hg/sharedpath points to nonexistent directory %s') % s)
95 95 self.sharedpath = s
96 96 except IOError, inst:
97 97 if inst.errno != errno.ENOENT:
98 98 raise
99 99
100 100 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
101 101 self.spath = self.store.path
102 102 self.sopener = self.store.opener
103 103 self.sjoin = self.store.join
104 104 self.opener.createmode = self.store.createmode
105 105 self._applyrequirements(requirements)
106 106 if create:
107 107 self._writerequirements()
108 108
109 109
110 110 self._branchcache = None
111 111 self._branchcachetip = None
112 112 self.filterpats = {}
113 113 self._datafilters = {}
114 114 self._transref = self._lockref = self._wlockref = None
115 115
116 116 # A cache for various files under .hg/ that tracks file changes,
117 117 # (used by the filecache decorator)
118 118 #
119 119 # Maps a property name to its util.filecacheentry
120 120 self._filecache = {}
121 121
122 122 def _applyrequirements(self, requirements):
123 123 self.requirements = requirements
124 124 openerreqs = set(('revlogv1', 'generaldelta'))
125 125 self.sopener.options = dict((r, 1) for r in requirements
126 126 if r in openerreqs)
127 127
128 128 def _writerequirements(self):
129 129 reqfile = self.opener("requires", "w")
130 130 for r in self.requirements:
131 131 reqfile.write("%s\n" % r)
132 132 reqfile.close()
133 133
134 134 def _checknested(self, path):
135 135 """Determine if path is a legal nested repository."""
136 136 if not path.startswith(self.root):
137 137 return False
138 138 subpath = path[len(self.root) + 1:]
139 139 normsubpath = util.pconvert(subpath)
140 140
141 141 # XXX: Checking against the current working copy is wrong in
142 142 # the sense that it can reject things like
143 143 #
144 144 # $ hg cat -r 10 sub/x.txt
145 145 #
146 146 # if sub/ is no longer a subrepository in the working copy
147 147 # parent revision.
148 148 #
149 149 # However, it can of course also allow things that would have
150 150 # been rejected before, such as the above cat command if sub/
151 151 # is a subrepository now, but was a normal directory before.
152 152 # The old path auditor would have rejected by mistake since it
153 153 # panics when it sees sub/.hg/.
154 154 #
155 155 # All in all, checking against the working copy seems sensible
156 156 # since we want to prevent access to nested repositories on
157 157 # the filesystem *now*.
158 158 ctx = self[None]
159 159 parts = util.splitpath(subpath)
160 160 while parts:
161 161 prefix = '/'.join(parts)
162 162 if prefix in ctx.substate:
163 163 if prefix == normsubpath:
164 164 return True
165 165 else:
166 166 sub = ctx.sub(prefix)
167 167 return sub.checknested(subpath[len(prefix) + 1:])
168 168 else:
169 169 parts.pop()
170 170 return False
171 171
172 172 @filecache('bookmarks')
173 173 def _bookmarks(self):
174 174 return bookmarks.read(self)
175 175
176 176 @filecache('bookmarks.current')
177 177 def _bookmarkcurrent(self):
178 178 return bookmarks.readcurrent(self)
179 179
180 180 def _writebookmarks(self, marks):
181 181 bookmarks.write(self)
182 182
183 183 def bookmarkheads(self, bookmark):
184 184 name = bookmark.split('@', 1)[0]
185 185 heads = []
186 186 for mark, n in self._bookmarks.iteritems():
187 187 if mark.split('@', 1)[0] == name:
188 188 heads.append(n)
189 189 return heads
190 190
191 191 @storecache('phaseroots')
192 192 def _phasecache(self):
193 193 return phases.phasecache(self, self._phasedefaults)
194 194
195 195 @storecache('00changelog.i')
196 196 def changelog(self):
197 197 c = changelog.changelog(self.sopener)
198 198 if 'HG_PENDING' in os.environ:
199 199 p = os.environ['HG_PENDING']
200 200 if p.startswith(self.root):
201 201 c.readpending('00changelog.i.a')
202 202 return c
203 203
204 204 @storecache('00manifest.i')
205 205 def manifest(self):
206 206 return manifest.manifest(self.sopener)
207 207
208 208 @filecache('dirstate')
209 209 def dirstate(self):
210 210 warned = [0]
211 211 def validate(node):
212 212 try:
213 213 self.changelog.rev(node)
214 214 return node
215 215 except error.LookupError:
216 216 if not warned[0]:
217 217 warned[0] = True
218 218 self.ui.warn(_("warning: ignoring unknown"
219 219 " working parent %s!\n") % short(node))
220 220 return nullid
221 221
222 222 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
223 223
224 224 def __getitem__(self, changeid):
225 225 if changeid is None:
226 226 return context.workingctx(self)
227 227 return context.changectx(self, changeid)
228 228
229 229 def __contains__(self, changeid):
230 230 try:
231 231 return bool(self.lookup(changeid))
232 232 except error.RepoLookupError:
233 233 return False
234 234
235 235 def __nonzero__(self):
236 236 return True
237 237
238 238 def __len__(self):
239 239 return len(self.changelog)
240 240
241 241 def __iter__(self):
242 242 for i in xrange(len(self)):
243 243 yield i
244 244
245 245 def revs(self, expr, *args):
246 246 '''Return a list of revisions matching the given revset'''
247 247 expr = revset.formatspec(expr, *args)
248 248 m = revset.match(None, expr)
249 249 return [r for r in m(self, range(len(self)))]
250 250
251 251 def set(self, expr, *args):
252 252 '''
253 253 Yield a context for each matching revision, after doing arg
254 254 replacement via revset.formatspec
255 255 '''
256 256 for r in self.revs(expr, *args):
257 257 yield self[r]
258 258
259 259 def url(self):
260 260 return 'file:' + self.root
261 261
262 262 def hook(self, name, throw=False, **args):
263 263 return hook.hook(self.ui, self, name, throw, **args)
264 264
265 265 tag_disallowed = ':\r\n'
266 266
267 267 def _tag(self, names, node, message, local, user, date, extra={}):
268 268 if isinstance(names, str):
269 269 allchars = names
270 270 names = (names,)
271 271 else:
272 272 allchars = ''.join(names)
273 273 for c in self.tag_disallowed:
274 274 if c in allchars:
275 275 raise util.Abort(_('%r cannot be used in a tag name') % c)
276 276
277 277 branches = self.branchmap()
278 278 for name in names:
279 279 self.hook('pretag', throw=True, node=hex(node), tag=name,
280 280 local=local)
281 281 if name in branches:
282 282 self.ui.warn(_("warning: tag %s conflicts with existing"
283 283 " branch name\n") % name)
284 284
285 285 def writetags(fp, names, munge, prevtags):
286 286 fp.seek(0, 2)
287 287 if prevtags and prevtags[-1] != '\n':
288 288 fp.write('\n')
289 289 for name in names:
290 290 m = munge and munge(name) or name
291 291 if (self._tagscache.tagtypes and
292 292 name in self._tagscache.tagtypes):
293 293 old = self.tags().get(name, nullid)
294 294 fp.write('%s %s\n' % (hex(old), m))
295 295 fp.write('%s %s\n' % (hex(node), m))
296 296 fp.close()
297 297
298 298 prevtags = ''
299 299 if local:
300 300 try:
301 301 fp = self.opener('localtags', 'r+')
302 302 except IOError:
303 303 fp = self.opener('localtags', 'a')
304 304 else:
305 305 prevtags = fp.read()
306 306
307 307 # local tags are stored in the current charset
308 308 writetags(fp, names, None, prevtags)
309 309 for name in names:
310 310 self.hook('tag', node=hex(node), tag=name, local=local)
311 311 return
312 312
313 313 try:
314 314 fp = self.wfile('.hgtags', 'rb+')
315 315 except IOError, e:
316 316 if e.errno != errno.ENOENT:
317 317 raise
318 318 fp = self.wfile('.hgtags', 'ab')
319 319 else:
320 320 prevtags = fp.read()
321 321
322 322 # committed tags are stored in UTF-8
323 323 writetags(fp, names, encoding.fromlocal, prevtags)
324 324
325 325 fp.close()
326 326
327 327 self.invalidatecaches()
328 328
329 329 if '.hgtags' not in self.dirstate:
330 330 self[None].add(['.hgtags'])
331 331
332 332 m = matchmod.exact(self.root, '', ['.hgtags'])
333 333 tagnode = self.commit(message, user, date, extra=extra, match=m)
334 334
335 335 for name in names:
336 336 self.hook('tag', node=hex(node), tag=name, local=local)
337 337
338 338 return tagnode
339 339
340 340 def tag(self, names, node, message, local, user, date):
341 341 '''tag a revision with one or more symbolic names.
342 342
343 343 names is a list of strings or, when adding a single tag, names may be a
344 344 string.
345 345
346 346 if local is True, the tags are stored in a per-repository file.
347 347 otherwise, they are stored in the .hgtags file, and a new
348 348 changeset is committed with the change.
349 349
350 350 keyword arguments:
351 351
352 352 local: whether to store tags in non-version-controlled file
353 353 (default False)
354 354
355 355 message: commit message to use if committing
356 356
357 357 user: name of user to use if committing
358 358
359 359 date: date tuple to use if committing'''
360 360
361 361 if not local:
362 362 for x in self.status()[:5]:
363 363 if '.hgtags' in x:
364 364 raise util.Abort(_('working copy of .hgtags is changed '
365 365 '(please commit .hgtags manually)'))
366 366
367 367 self.tags() # instantiate the cache
368 368 self._tag(names, node, message, local, user, date)
369 369
370 370 @propertycache
371 371 def _tagscache(self):
372 372 '''Returns a tagscache object that contains various tags related
373 373 caches.'''
374 374
375 375 # This simplifies its cache management by having one decorated
376 376 # function (this one) and the rest simply fetch things from it.
377 377 class tagscache(object):
378 378 def __init__(self):
379 379 # These two define the set of tags for this repository. tags
380 380 # maps tag name to node; tagtypes maps tag name to 'global' or
381 381 # 'local'. (Global tags are defined by .hgtags across all
382 382 # heads, and local tags are defined in .hg/localtags.)
383 383 # They constitute the in-memory cache of tags.
384 384 self.tags = self.tagtypes = None
385 385
386 386 self.nodetagscache = self.tagslist = None
387 387
388 388 cache = tagscache()
389 389 cache.tags, cache.tagtypes = self._findtags()
390 390
391 391 return cache
392 392
393 393 def tags(self):
394 394 '''return a mapping of tag to node'''
395 395 t = {}
396 396 for k, v in self._tagscache.tags.iteritems():
397 397 try:
398 398 # ignore tags to unknown nodes
399 399 self.changelog.rev(v)
400 400 t[k] = v
401 401 except (error.LookupError, ValueError):
402 402 pass
403 403 return t
404 404
405 405 def _findtags(self):
406 406 '''Do the hard work of finding tags. Return a pair of dicts
407 407 (tags, tagtypes) where tags maps tag name to node, and tagtypes
408 408 maps tag name to a string like \'global\' or \'local\'.
409 409 Subclasses or extensions are free to add their own tags, but
410 410 should be aware that the returned dicts will be retained for the
411 411 duration of the localrepo object.'''
412 412
413 413 # XXX what tagtype should subclasses/extensions use? Currently
414 414 # mq and bookmarks add tags, but do not set the tagtype at all.
415 415 # Should each extension invent its own tag type? Should there
416 416 # be one tagtype for all such "virtual" tags? Or is the status
417 417 # quo fine?
418 418
419 419 alltags = {} # map tag name to (node, hist)
420 420 tagtypes = {}
421 421
422 422 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
423 423 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
424 424
425 425 # Build the return dicts. Have to re-encode tag names because
426 426 # the tags module always uses UTF-8 (in order not to lose info
427 427 # writing to the cache), but the rest of Mercurial wants them in
428 428 # local encoding.
429 429 tags = {}
430 430 for (name, (node, hist)) in alltags.iteritems():
431 431 if node != nullid:
432 432 tags[encoding.tolocal(name)] = node
433 433 tags['tip'] = self.changelog.tip()
434 434 tagtypes = dict([(encoding.tolocal(name), value)
435 435 for (name, value) in tagtypes.iteritems()])
436 436 return (tags, tagtypes)
437 437
438 438 def tagtype(self, tagname):
439 439 '''
440 440 return the type of the given tag. result can be:
441 441
442 442 'local' : a local tag
443 443 'global' : a global tag
444 444 None : tag does not exist
445 445 '''
446 446
447 447 return self._tagscache.tagtypes.get(tagname)
448 448
449 449 def tagslist(self):
450 450 '''return a list of tags ordered by revision'''
451 451 if not self._tagscache.tagslist:
452 452 l = []
453 453 for t, n in self.tags().iteritems():
454 454 r = self.changelog.rev(n)
455 455 l.append((r, t, n))
456 456 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
457 457
458 458 return self._tagscache.tagslist
459 459
460 460 def nodetags(self, node):
461 461 '''return the tags associated with a node'''
462 462 if not self._tagscache.nodetagscache:
463 463 nodetagscache = {}
464 464 for t, n in self._tagscache.tags.iteritems():
465 465 nodetagscache.setdefault(n, []).append(t)
466 466 for tags in nodetagscache.itervalues():
467 467 tags.sort()
468 468 self._tagscache.nodetagscache = nodetagscache
469 469 return self._tagscache.nodetagscache.get(node, [])
470 470
471 471 def nodebookmarks(self, node):
472 472 marks = []
473 473 for bookmark, n in self._bookmarks.iteritems():
474 474 if n == node:
475 475 marks.append(bookmark)
476 476 return sorted(marks)
477 477
478 478 def _branchtags(self, partial, lrev):
479 479 # TODO: rename this function?
480 480 tiprev = len(self) - 1
481 481 if lrev != tiprev:
482 482 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
483 483 self._updatebranchcache(partial, ctxgen)
484 484 self._writebranchcache(partial, self.changelog.tip(), tiprev)
485 485
486 486 return partial
487 487
488 488 def updatebranchcache(self):
489 489 tip = self.changelog.tip()
490 490 if self._branchcache is not None and self._branchcachetip == tip:
491 491 return
492 492
493 493 oldtip = self._branchcachetip
494 494 self._branchcachetip = tip
495 495 if oldtip is None or oldtip not in self.changelog.nodemap:
496 496 partial, last, lrev = self._readbranchcache()
497 497 else:
498 498 lrev = self.changelog.rev(oldtip)
499 499 partial = self._branchcache
500 500
501 501 self._branchtags(partial, lrev)
502 502 # this private cache holds all heads (not just the branch tips)
503 503 self._branchcache = partial
504 504
505 505 def branchmap(self):
506 506 '''returns a dictionary {branch: [branchheads]}'''
507 507 self.updatebranchcache()
508 508 return self._branchcache
509 509
510 def _branchtip(self, heads):
511 '''return the tipmost branch head in heads'''
512 tip = heads[-1]
513 for h in reversed(heads):
514 if 'close' not in self.changelog.read(h)[5]:
515 tip = h
516 break
517 return tip
518
519 def branchtip(self, branch):
520 '''return the tip node for a given branch'''
521 if branch not in self.branchmap():
522 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
523 return self._branchtip(self.branchmap()[branch])
524
510 525 def branchtags(self):
511 526 '''return a dict where branch names map to the tipmost head of
512 527 the branch, open heads come before closed'''
513 528 bt = {}
514 529 for bn, heads in self.branchmap().iteritems():
515 tip = heads[-1]
516 for h in reversed(heads):
517 if 'close' not in self.changelog.read(h)[5]:
518 tip = h
519 break
520 bt[bn] = tip
530 bt[bn] = self._branchtip(heads)
521 531 return bt
522 532
523 533 def _readbranchcache(self):
524 534 partial = {}
525 535 try:
526 536 f = self.opener("cache/branchheads")
527 537 lines = f.read().split('\n')
528 538 f.close()
529 539 except (IOError, OSError):
530 540 return {}, nullid, nullrev
531 541
532 542 try:
533 543 last, lrev = lines.pop(0).split(" ", 1)
534 544 last, lrev = bin(last), int(lrev)
535 545 if lrev >= len(self) or self[lrev].node() != last:
536 546 # invalidate the cache
537 547 raise ValueError('invalidating branch cache (tip differs)')
538 548 for l in lines:
539 549 if not l:
540 550 continue
541 551 node, label = l.split(" ", 1)
542 552 label = encoding.tolocal(label.strip())
543 553 if not node in self:
544 554 raise ValueError('invalidating branch cache because node '+
545 555 '%s does not exist' % node)
546 556 partial.setdefault(label, []).append(bin(node))
547 557 except KeyboardInterrupt:
548 558 raise
549 559 except Exception, inst:
550 560 if self.ui.debugflag:
551 561 self.ui.warn(str(inst), '\n')
552 562 partial, last, lrev = {}, nullid, nullrev
553 563 return partial, last, lrev
554 564
555 565 def _writebranchcache(self, branches, tip, tiprev):
556 566 try:
557 567 f = self.opener("cache/branchheads", "w", atomictemp=True)
558 568 f.write("%s %s\n" % (hex(tip), tiprev))
559 569 for label, nodes in branches.iteritems():
560 570 for node in nodes:
561 571 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
562 572 f.close()
563 573 except (IOError, OSError):
564 574 pass
565 575
566 576 def _updatebranchcache(self, partial, ctxgen):
567 577 """Given a branchhead cache, partial, that may have extra nodes or be
568 578 missing heads, and a generator of nodes that are at least a superset of
569 579 heads missing, this function updates partial to be correct.
570 580 """
571 581 # collect new branch entries
572 582 newbranches = {}
573 583 for c in ctxgen:
574 584 newbranches.setdefault(c.branch(), []).append(c.node())
575 585 # if older branchheads are reachable from new ones, they aren't
576 586 # really branchheads. Note checking parents is insufficient:
577 587 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
578 588 for branch, newnodes in newbranches.iteritems():
579 589 bheads = partial.setdefault(branch, [])
580 590 bheads.extend(newnodes)
581 591 # Remove duplicates - nodes that are in newnodes and are already in
582 592 # bheads. This can happen if you strip a node and its parent was
583 593 # already a head (because they're on different branches).
584 594 bheads = set(bheads)
585 595
586 596 # Remove candidate heads that no longer are in the repo (e.g., as
587 597 # the result of a strip that just happened).
588 598 # avoid using 'bhead in self' here because that dives down into
589 599 # branchcache code somewhat recrusively.
590 600 bheads = [bhead for bhead in bheads \
591 601 if self.changelog.hasnode(bhead)]
592 602 if len(bheads) > 1:
593 603 bheads = sorted(bheads, key=lambda x: self[x].rev())
594 604 # starting from tip means fewer passes over reachable
595 605 while newnodes:
596 606 latest = newnodes.pop()
597 607 if latest not in bheads:
598 608 continue
599 609 minbhnode = self[bheads[0]].node()
600 610 reachable = self.changelog.reachable(latest, minbhnode)
601 611 reachable.remove(latest)
602 612 if reachable:
603 613 bheads = [b for b in bheads if b not in reachable]
604 614 partial[branch] = bheads
605 615
606 616 # There may be branches that cease to exist when the last commit in the
607 617 # branch was stripped. This code filters them out. Note that the
608 618 # branch that ceased to exist may not be in newbranches because
609 619 # newbranches is the set of candidate heads, which when you strip the
610 620 # last commit in a branch will be the parent branch.
611 621 for branch in partial.keys():
612 622 nodes = [head for head in partial[branch] \
613 623 if self.changelog.hasnode(head)]
614 624 if len(nodes) < 1:
615 625 del partial[branch]
616 626
617 627 def lookup(self, key):
618 628 return self[key].node()
619 629
620 630 def lookupbranch(self, key, remote=None):
621 631 repo = remote or self
622 632 if key in repo.branchmap():
623 633 return key
624 634
625 635 repo = (remote and remote.local()) and remote or self
626 636 return repo[key].branch()
627 637
628 638 def known(self, nodes):
629 639 nm = self.changelog.nodemap
630 640 pc = self._phasecache
631 641 result = []
632 642 for n in nodes:
633 643 r = nm.get(n)
634 644 resp = not (r is None or pc.phase(self, r) >= phases.secret)
635 645 result.append(resp)
636 646 return result
637 647
638 648 def local(self):
639 649 return self
640 650
641 651 def join(self, f):
642 652 return os.path.join(self.path, f)
643 653
644 654 def wjoin(self, f):
645 655 return os.path.join(self.root, f)
646 656
647 657 def file(self, f):
648 658 if f[0] == '/':
649 659 f = f[1:]
650 660 return filelog.filelog(self.sopener, f)
651 661
652 662 def changectx(self, changeid):
653 663 return self[changeid]
654 664
655 665 def parents(self, changeid=None):
656 666 '''get list of changectxs for parents of changeid'''
657 667 return self[changeid].parents()
658 668
659 669 def setparents(self, p1, p2=nullid):
660 670 copies = self.dirstate.setparents(p1, p2)
661 671 if copies:
662 672 # Adjust copy records, the dirstate cannot do it, it
663 673 # requires access to parents manifests. Preserve them
664 674 # only for entries added to first parent.
665 675 pctx = self[p1]
666 676 for f in copies:
667 677 if f not in pctx and copies[f] in pctx:
668 678 self.dirstate.copy(copies[f], f)
669 679
670 680 def filectx(self, path, changeid=None, fileid=None):
671 681 """changeid can be a changeset revision, node, or tag.
672 682 fileid can be a file revision or node."""
673 683 return context.filectx(self, path, changeid, fileid)
674 684
675 685 def getcwd(self):
676 686 return self.dirstate.getcwd()
677 687
678 688 def pathto(self, f, cwd=None):
679 689 return self.dirstate.pathto(f, cwd)
680 690
681 691 def wfile(self, f, mode='r'):
682 692 return self.wopener(f, mode)
683 693
684 694 def _link(self, f):
685 695 return os.path.islink(self.wjoin(f))
686 696
687 697 def _loadfilter(self, filter):
688 698 if filter not in self.filterpats:
689 699 l = []
690 700 for pat, cmd in self.ui.configitems(filter):
691 701 if cmd == '!':
692 702 continue
693 703 mf = matchmod.match(self.root, '', [pat])
694 704 fn = None
695 705 params = cmd
696 706 for name, filterfn in self._datafilters.iteritems():
697 707 if cmd.startswith(name):
698 708 fn = filterfn
699 709 params = cmd[len(name):].lstrip()
700 710 break
701 711 if not fn:
702 712 fn = lambda s, c, **kwargs: util.filter(s, c)
703 713 # Wrap old filters not supporting keyword arguments
704 714 if not inspect.getargspec(fn)[2]:
705 715 oldfn = fn
706 716 fn = lambda s, c, **kwargs: oldfn(s, c)
707 717 l.append((mf, fn, params))
708 718 self.filterpats[filter] = l
709 719 return self.filterpats[filter]
710 720
711 721 def _filter(self, filterpats, filename, data):
712 722 for mf, fn, cmd in filterpats:
713 723 if mf(filename):
714 724 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
715 725 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
716 726 break
717 727
718 728 return data
719 729
720 730 @propertycache
721 731 def _encodefilterpats(self):
722 732 return self._loadfilter('encode')
723 733
724 734 @propertycache
725 735 def _decodefilterpats(self):
726 736 return self._loadfilter('decode')
727 737
728 738 def adddatafilter(self, name, filter):
729 739 self._datafilters[name] = filter
730 740
731 741 def wread(self, filename):
732 742 if self._link(filename):
733 743 data = os.readlink(self.wjoin(filename))
734 744 else:
735 745 data = self.wopener.read(filename)
736 746 return self._filter(self._encodefilterpats, filename, data)
737 747
738 748 def wwrite(self, filename, data, flags):
739 749 data = self._filter(self._decodefilterpats, filename, data)
740 750 if 'l' in flags:
741 751 self.wopener.symlink(data, filename)
742 752 else:
743 753 self.wopener.write(filename, data)
744 754 if 'x' in flags:
745 755 util.setflags(self.wjoin(filename), False, True)
746 756
747 757 def wwritedata(self, filename, data):
748 758 return self._filter(self._decodefilterpats, filename, data)
749 759
750 760 def transaction(self, desc):
751 761 tr = self._transref and self._transref() or None
752 762 if tr and tr.running():
753 763 return tr.nest()
754 764
755 765 # abort here if the journal already exists
756 766 if os.path.exists(self.sjoin("journal")):
757 767 raise error.RepoError(
758 768 _("abandoned transaction found - run hg recover"))
759 769
760 770 self._writejournal(desc)
761 771 renames = [(x, undoname(x)) for x in self._journalfiles()]
762 772
763 773 tr = transaction.transaction(self.ui.warn, self.sopener,
764 774 self.sjoin("journal"),
765 775 aftertrans(renames),
766 776 self.store.createmode)
767 777 self._transref = weakref.ref(tr)
768 778 return tr
769 779
770 780 def _journalfiles(self):
771 781 return (self.sjoin('journal'), self.join('journal.dirstate'),
772 782 self.join('journal.branch'), self.join('journal.desc'),
773 783 self.join('journal.bookmarks'),
774 784 self.sjoin('journal.phaseroots'))
775 785
776 786 def undofiles(self):
777 787 return [undoname(x) for x in self._journalfiles()]
778 788
779 789 def _writejournal(self, desc):
780 790 self.opener.write("journal.dirstate",
781 791 self.opener.tryread("dirstate"))
782 792 self.opener.write("journal.branch",
783 793 encoding.fromlocal(self.dirstate.branch()))
784 794 self.opener.write("journal.desc",
785 795 "%d\n%s\n" % (len(self), desc))
786 796 self.opener.write("journal.bookmarks",
787 797 self.opener.tryread("bookmarks"))
788 798 self.sopener.write("journal.phaseroots",
789 799 self.sopener.tryread("phaseroots"))
790 800
791 801 def recover(self):
792 802 lock = self.lock()
793 803 try:
794 804 if os.path.exists(self.sjoin("journal")):
795 805 self.ui.status(_("rolling back interrupted transaction\n"))
796 806 transaction.rollback(self.sopener, self.sjoin("journal"),
797 807 self.ui.warn)
798 808 self.invalidate()
799 809 return True
800 810 else:
801 811 self.ui.warn(_("no interrupted transaction available\n"))
802 812 return False
803 813 finally:
804 814 lock.release()
805 815
806 816 def rollback(self, dryrun=False, force=False):
807 817 wlock = lock = None
808 818 try:
809 819 wlock = self.wlock()
810 820 lock = self.lock()
811 821 if os.path.exists(self.sjoin("undo")):
812 822 return self._rollback(dryrun, force)
813 823 else:
814 824 self.ui.warn(_("no rollback information available\n"))
815 825 return 1
816 826 finally:
817 827 release(lock, wlock)
818 828
819 829 def _rollback(self, dryrun, force):
820 830 ui = self.ui
821 831 try:
822 832 args = self.opener.read('undo.desc').splitlines()
823 833 (oldlen, desc, detail) = (int(args[0]), args[1], None)
824 834 if len(args) >= 3:
825 835 detail = args[2]
826 836 oldtip = oldlen - 1
827 837
828 838 if detail and ui.verbose:
829 839 msg = (_('repository tip rolled back to revision %s'
830 840 ' (undo %s: %s)\n')
831 841 % (oldtip, desc, detail))
832 842 else:
833 843 msg = (_('repository tip rolled back to revision %s'
834 844 ' (undo %s)\n')
835 845 % (oldtip, desc))
836 846 except IOError:
837 847 msg = _('rolling back unknown transaction\n')
838 848 desc = None
839 849
840 850 if not force and self['.'] != self['tip'] and desc == 'commit':
841 851 raise util.Abort(
842 852 _('rollback of last commit while not checked out '
843 853 'may lose data'), hint=_('use -f to force'))
844 854
845 855 ui.status(msg)
846 856 if dryrun:
847 857 return 0
848 858
849 859 parents = self.dirstate.parents()
850 860 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
851 861 if os.path.exists(self.join('undo.bookmarks')):
852 862 util.rename(self.join('undo.bookmarks'),
853 863 self.join('bookmarks'))
854 864 if os.path.exists(self.sjoin('undo.phaseroots')):
855 865 util.rename(self.sjoin('undo.phaseroots'),
856 866 self.sjoin('phaseroots'))
857 867 self.invalidate()
858 868
859 869 parentgone = (parents[0] not in self.changelog.nodemap or
860 870 parents[1] not in self.changelog.nodemap)
861 871 if parentgone:
862 872 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
863 873 try:
864 874 branch = self.opener.read('undo.branch')
865 875 self.dirstate.setbranch(branch)
866 876 except IOError:
867 877 ui.warn(_('named branch could not be reset: '
868 878 'current branch is still \'%s\'\n')
869 879 % self.dirstate.branch())
870 880
871 881 self.dirstate.invalidate()
872 882 parents = tuple([p.rev() for p in self.parents()])
873 883 if len(parents) > 1:
874 884 ui.status(_('working directory now based on '
875 885 'revisions %d and %d\n') % parents)
876 886 else:
877 887 ui.status(_('working directory now based on '
878 888 'revision %d\n') % parents)
879 889 # TODO: if we know which new heads may result from this rollback, pass
880 890 # them to destroy(), which will prevent the branchhead cache from being
881 891 # invalidated.
882 892 self.destroyed()
883 893 return 0
884 894
885 895 def invalidatecaches(self):
886 896 def delcache(name):
887 897 try:
888 898 delattr(self, name)
889 899 except AttributeError:
890 900 pass
891 901
892 902 delcache('_tagscache')
893 903
894 904 self._branchcache = None # in UTF-8
895 905 self._branchcachetip = None
896 906
897 907 def invalidatedirstate(self):
898 908 '''Invalidates the dirstate, causing the next call to dirstate
899 909 to check if it was modified since the last time it was read,
900 910 rereading it if it has.
901 911
902 912 This is different to dirstate.invalidate() that it doesn't always
903 913 rereads the dirstate. Use dirstate.invalidate() if you want to
904 914 explicitly read the dirstate again (i.e. restoring it to a previous
905 915 known good state).'''
906 916 if 'dirstate' in self.__dict__:
907 917 for k in self.dirstate._filecache:
908 918 try:
909 919 delattr(self.dirstate, k)
910 920 except AttributeError:
911 921 pass
912 922 delattr(self, 'dirstate')
913 923
914 924 def invalidate(self):
915 925 for k in self._filecache:
916 926 # dirstate is invalidated separately in invalidatedirstate()
917 927 if k == 'dirstate':
918 928 continue
919 929
920 930 try:
921 931 delattr(self, k)
922 932 except AttributeError:
923 933 pass
924 934 self.invalidatecaches()
925 935
926 936 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
927 937 try:
928 938 l = lock.lock(lockname, 0, releasefn, desc=desc)
929 939 except error.LockHeld, inst:
930 940 if not wait:
931 941 raise
932 942 self.ui.warn(_("waiting for lock on %s held by %r\n") %
933 943 (desc, inst.locker))
934 944 # default to 600 seconds timeout
935 945 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
936 946 releasefn, desc=desc)
937 947 if acquirefn:
938 948 acquirefn()
939 949 return l
940 950
941 951 def _afterlock(self, callback):
942 952 """add a callback to the current repository lock.
943 953
944 954 The callback will be executed on lock release."""
945 955 l = self._lockref and self._lockref()
946 956 if l:
947 957 l.postrelease.append(callback)
948 958 else:
949 959 callback()
950 960
951 961 def lock(self, wait=True):
952 962 '''Lock the repository store (.hg/store) and return a weak reference
953 963 to the lock. Use this before modifying the store (e.g. committing or
954 964 stripping). If you are opening a transaction, get a lock as well.)'''
955 965 l = self._lockref and self._lockref()
956 966 if l is not None and l.held:
957 967 l.lock()
958 968 return l
959 969
960 970 def unlock():
961 971 self.store.write()
962 972 if '_phasecache' in vars(self):
963 973 self._phasecache.write()
964 974 for k, ce in self._filecache.items():
965 975 if k == 'dirstate':
966 976 continue
967 977 ce.refresh()
968 978
969 979 l = self._lock(self.sjoin("lock"), wait, unlock,
970 980 self.invalidate, _('repository %s') % self.origroot)
971 981 self._lockref = weakref.ref(l)
972 982 return l
973 983
974 984 def wlock(self, wait=True):
975 985 '''Lock the non-store parts of the repository (everything under
976 986 .hg except .hg/store) and return a weak reference to the lock.
977 987 Use this before modifying files in .hg.'''
978 988 l = self._wlockref and self._wlockref()
979 989 if l is not None and l.held:
980 990 l.lock()
981 991 return l
982 992
983 993 def unlock():
984 994 self.dirstate.write()
985 995 ce = self._filecache.get('dirstate')
986 996 if ce:
987 997 ce.refresh()
988 998
989 999 l = self._lock(self.join("wlock"), wait, unlock,
990 1000 self.invalidatedirstate, _('working directory of %s') %
991 1001 self.origroot)
992 1002 self._wlockref = weakref.ref(l)
993 1003 return l
994 1004
995 1005 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
996 1006 """
997 1007 commit an individual file as part of a larger transaction
998 1008 """
999 1009
1000 1010 fname = fctx.path()
1001 1011 text = fctx.data()
1002 1012 flog = self.file(fname)
1003 1013 fparent1 = manifest1.get(fname, nullid)
1004 1014 fparent2 = fparent2o = manifest2.get(fname, nullid)
1005 1015
1006 1016 meta = {}
1007 1017 copy = fctx.renamed()
1008 1018 if copy and copy[0] != fname:
1009 1019 # Mark the new revision of this file as a copy of another
1010 1020 # file. This copy data will effectively act as a parent
1011 1021 # of this new revision. If this is a merge, the first
1012 1022 # parent will be the nullid (meaning "look up the copy data")
1013 1023 # and the second one will be the other parent. For example:
1014 1024 #
1015 1025 # 0 --- 1 --- 3 rev1 changes file foo
1016 1026 # \ / rev2 renames foo to bar and changes it
1017 1027 # \- 2 -/ rev3 should have bar with all changes and
1018 1028 # should record that bar descends from
1019 1029 # bar in rev2 and foo in rev1
1020 1030 #
1021 1031 # this allows this merge to succeed:
1022 1032 #
1023 1033 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1024 1034 # \ / merging rev3 and rev4 should use bar@rev2
1025 1035 # \- 2 --- 4 as the merge base
1026 1036 #
1027 1037
1028 1038 cfname = copy[0]
1029 1039 crev = manifest1.get(cfname)
1030 1040 newfparent = fparent2
1031 1041
1032 1042 if manifest2: # branch merge
1033 1043 if fparent2 == nullid or crev is None: # copied on remote side
1034 1044 if cfname in manifest2:
1035 1045 crev = manifest2[cfname]
1036 1046 newfparent = fparent1
1037 1047
1038 1048 # find source in nearest ancestor if we've lost track
1039 1049 if not crev:
1040 1050 self.ui.debug(" %s: searching for copy revision for %s\n" %
1041 1051 (fname, cfname))
1042 1052 for ancestor in self[None].ancestors():
1043 1053 if cfname in ancestor:
1044 1054 crev = ancestor[cfname].filenode()
1045 1055 break
1046 1056
1047 1057 if crev:
1048 1058 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1049 1059 meta["copy"] = cfname
1050 1060 meta["copyrev"] = hex(crev)
1051 1061 fparent1, fparent2 = nullid, newfparent
1052 1062 else:
1053 1063 self.ui.warn(_("warning: can't find ancestor for '%s' "
1054 1064 "copied from '%s'!\n") % (fname, cfname))
1055 1065
1056 1066 elif fparent2 != nullid:
1057 1067 # is one parent an ancestor of the other?
1058 1068 fparentancestor = flog.ancestor(fparent1, fparent2)
1059 1069 if fparentancestor == fparent1:
1060 1070 fparent1, fparent2 = fparent2, nullid
1061 1071 elif fparentancestor == fparent2:
1062 1072 fparent2 = nullid
1063 1073
1064 1074 # is the file changed?
1065 1075 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1066 1076 changelist.append(fname)
1067 1077 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1068 1078
1069 1079 # are just the flags changed during merge?
1070 1080 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1071 1081 changelist.append(fname)
1072 1082
1073 1083 return fparent1
1074 1084
1075 1085 def commit(self, text="", user=None, date=None, match=None, force=False,
1076 1086 editor=False, extra={}):
1077 1087 """Add a new revision to current repository.
1078 1088
1079 1089 Revision information is gathered from the working directory,
1080 1090 match can be used to filter the committed files. If editor is
1081 1091 supplied, it is called to get a commit message.
1082 1092 """
1083 1093
1084 1094 def fail(f, msg):
1085 1095 raise util.Abort('%s: %s' % (f, msg))
1086 1096
1087 1097 if not match:
1088 1098 match = matchmod.always(self.root, '')
1089 1099
1090 1100 if not force:
1091 1101 vdirs = []
1092 1102 match.dir = vdirs.append
1093 1103 match.bad = fail
1094 1104
1095 1105 wlock = self.wlock()
1096 1106 try:
1097 1107 wctx = self[None]
1098 1108 merge = len(wctx.parents()) > 1
1099 1109
1100 1110 if (not force and merge and match and
1101 1111 (match.files() or match.anypats())):
1102 1112 raise util.Abort(_('cannot partially commit a merge '
1103 1113 '(do not specify files or patterns)'))
1104 1114
1105 1115 changes = self.status(match=match, clean=force)
1106 1116 if force:
1107 1117 changes[0].extend(changes[6]) # mq may commit unchanged files
1108 1118
1109 1119 # check subrepos
1110 1120 subs = []
1111 1121 commitsubs = set()
1112 1122 newstate = wctx.substate.copy()
1113 1123 # only manage subrepos and .hgsubstate if .hgsub is present
1114 1124 if '.hgsub' in wctx:
1115 1125 # we'll decide whether to track this ourselves, thanks
1116 1126 if '.hgsubstate' in changes[0]:
1117 1127 changes[0].remove('.hgsubstate')
1118 1128 if '.hgsubstate' in changes[2]:
1119 1129 changes[2].remove('.hgsubstate')
1120 1130
1121 1131 # compare current state to last committed state
1122 1132 # build new substate based on last committed state
1123 1133 oldstate = wctx.p1().substate
1124 1134 for s in sorted(newstate.keys()):
1125 1135 if not match(s):
1126 1136 # ignore working copy, use old state if present
1127 1137 if s in oldstate:
1128 1138 newstate[s] = oldstate[s]
1129 1139 continue
1130 1140 if not force:
1131 1141 raise util.Abort(
1132 1142 _("commit with new subrepo %s excluded") % s)
1133 1143 if wctx.sub(s).dirty(True):
1134 1144 if not self.ui.configbool('ui', 'commitsubrepos'):
1135 1145 raise util.Abort(
1136 1146 _("uncommitted changes in subrepo %s") % s,
1137 1147 hint=_("use --subrepos for recursive commit"))
1138 1148 subs.append(s)
1139 1149 commitsubs.add(s)
1140 1150 else:
1141 1151 bs = wctx.sub(s).basestate()
1142 1152 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1143 1153 if oldstate.get(s, (None, None, None))[1] != bs:
1144 1154 subs.append(s)
1145 1155
1146 1156 # check for removed subrepos
1147 1157 for p in wctx.parents():
1148 1158 r = [s for s in p.substate if s not in newstate]
1149 1159 subs += [s for s in r if match(s)]
1150 1160 if subs:
1151 1161 if (not match('.hgsub') and
1152 1162 '.hgsub' in (wctx.modified() + wctx.added())):
1153 1163 raise util.Abort(
1154 1164 _("can't commit subrepos without .hgsub"))
1155 1165 changes[0].insert(0, '.hgsubstate')
1156 1166
1157 1167 elif '.hgsub' in changes[2]:
1158 1168 # clean up .hgsubstate when .hgsub is removed
1159 1169 if ('.hgsubstate' in wctx and
1160 1170 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1161 1171 changes[2].insert(0, '.hgsubstate')
1162 1172
1163 1173 # make sure all explicit patterns are matched
1164 1174 if not force and match.files():
1165 1175 matched = set(changes[0] + changes[1] + changes[2])
1166 1176
1167 1177 for f in match.files():
1168 1178 if f == '.' or f in matched or f in wctx.substate:
1169 1179 continue
1170 1180 if f in changes[3]: # missing
1171 1181 fail(f, _('file not found!'))
1172 1182 if f in vdirs: # visited directory
1173 1183 d = f + '/'
1174 1184 for mf in matched:
1175 1185 if mf.startswith(d):
1176 1186 break
1177 1187 else:
1178 1188 fail(f, _("no match under directory!"))
1179 1189 elif f not in self.dirstate:
1180 1190 fail(f, _("file not tracked!"))
1181 1191
1182 1192 if (not force and not extra.get("close") and not merge
1183 1193 and not (changes[0] or changes[1] or changes[2])
1184 1194 and wctx.branch() == wctx.p1().branch()):
1185 1195 return None
1186 1196
1187 1197 if merge and changes[3]:
1188 1198 raise util.Abort(_("cannot commit merge with missing files"))
1189 1199
1190 1200 ms = mergemod.mergestate(self)
1191 1201 for f in changes[0]:
1192 1202 if f in ms and ms[f] == 'u':
1193 1203 raise util.Abort(_("unresolved merge conflicts "
1194 1204 "(see hg help resolve)"))
1195 1205
1196 1206 cctx = context.workingctx(self, text, user, date, extra, changes)
1197 1207 if editor:
1198 1208 cctx._text = editor(self, cctx, subs)
1199 1209 edited = (text != cctx._text)
1200 1210
1201 1211 # commit subs and write new state
1202 1212 if subs:
1203 1213 for s in sorted(commitsubs):
1204 1214 sub = wctx.sub(s)
1205 1215 self.ui.status(_('committing subrepository %s\n') %
1206 1216 subrepo.subrelpath(sub))
1207 1217 sr = sub.commit(cctx._text, user, date)
1208 1218 newstate[s] = (newstate[s][0], sr)
1209 1219 subrepo.writestate(self, newstate)
1210 1220
1211 1221 # Save commit message in case this transaction gets rolled back
1212 1222 # (e.g. by a pretxncommit hook). Leave the content alone on
1213 1223 # the assumption that the user will use the same editor again.
1214 1224 msgfn = self.savecommitmessage(cctx._text)
1215 1225
1216 1226 p1, p2 = self.dirstate.parents()
1217 1227 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1218 1228 try:
1219 1229 self.hook("precommit", throw=True, parent1=hookp1,
1220 1230 parent2=hookp2)
1221 1231 ret = self.commitctx(cctx, True)
1222 1232 except: # re-raises
1223 1233 if edited:
1224 1234 self.ui.write(
1225 1235 _('note: commit message saved in %s\n') % msgfn)
1226 1236 raise
1227 1237
1228 1238 # update bookmarks, dirstate and mergestate
1229 1239 bookmarks.update(self, [p1, p2], ret)
1230 1240 for f in changes[0] + changes[1]:
1231 1241 self.dirstate.normal(f)
1232 1242 for f in changes[2]:
1233 1243 self.dirstate.drop(f)
1234 1244 self.dirstate.setparents(ret)
1235 1245 ms.reset()
1236 1246 finally:
1237 1247 wlock.release()
1238 1248
1239 1249 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1240 1250 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1241 1251 self._afterlock(commithook)
1242 1252 return ret
1243 1253
1244 1254 def commitctx(self, ctx, error=False):
1245 1255 """Add a new revision to current repository.
1246 1256 Revision information is passed via the context argument.
1247 1257 """
1248 1258
1249 1259 tr = lock = None
1250 1260 removed = list(ctx.removed())
1251 1261 p1, p2 = ctx.p1(), ctx.p2()
1252 1262 user = ctx.user()
1253 1263
1254 1264 lock = self.lock()
1255 1265 try:
1256 1266 tr = self.transaction("commit")
1257 1267 trp = weakref.proxy(tr)
1258 1268
1259 1269 if ctx.files():
1260 1270 m1 = p1.manifest().copy()
1261 1271 m2 = p2.manifest()
1262 1272
1263 1273 # check in files
1264 1274 new = {}
1265 1275 changed = []
1266 1276 linkrev = len(self)
1267 1277 for f in sorted(ctx.modified() + ctx.added()):
1268 1278 self.ui.note(f + "\n")
1269 1279 try:
1270 1280 fctx = ctx[f]
1271 1281 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1272 1282 changed)
1273 1283 m1.set(f, fctx.flags())
1274 1284 except OSError, inst:
1275 1285 self.ui.warn(_("trouble committing %s!\n") % f)
1276 1286 raise
1277 1287 except IOError, inst:
1278 1288 errcode = getattr(inst, 'errno', errno.ENOENT)
1279 1289 if error or errcode and errcode != errno.ENOENT:
1280 1290 self.ui.warn(_("trouble committing %s!\n") % f)
1281 1291 raise
1282 1292 else:
1283 1293 removed.append(f)
1284 1294
1285 1295 # update manifest
1286 1296 m1.update(new)
1287 1297 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1288 1298 drop = [f for f in removed if f in m1]
1289 1299 for f in drop:
1290 1300 del m1[f]
1291 1301 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1292 1302 p2.manifestnode(), (new, drop))
1293 1303 files = changed + removed
1294 1304 else:
1295 1305 mn = p1.manifestnode()
1296 1306 files = []
1297 1307
1298 1308 # update changelog
1299 1309 self.changelog.delayupdate()
1300 1310 n = self.changelog.add(mn, files, ctx.description(),
1301 1311 trp, p1.node(), p2.node(),
1302 1312 user, ctx.date(), ctx.extra().copy())
1303 1313 p = lambda: self.changelog.writepending() and self.root or ""
1304 1314 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1305 1315 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1306 1316 parent2=xp2, pending=p)
1307 1317 self.changelog.finalize(trp)
1308 1318 # set the new commit is proper phase
1309 1319 targetphase = phases.newcommitphase(self.ui)
1310 1320 if targetphase:
1311 1321 # retract boundary do not alter parent changeset.
1312 1322 # if a parent have higher the resulting phase will
1313 1323 # be compliant anyway
1314 1324 #
1315 1325 # if minimal phase was 0 we don't need to retract anything
1316 1326 phases.retractboundary(self, targetphase, [n])
1317 1327 tr.close()
1318 1328 self.updatebranchcache()
1319 1329 return n
1320 1330 finally:
1321 1331 if tr:
1322 1332 tr.release()
1323 1333 lock.release()
1324 1334
1325 1335 def destroyed(self, newheadrevs=None):
1326 1336 '''Inform the repository that nodes have been destroyed.
1327 1337 Intended for use by strip and rollback, so there's a common
1328 1338 place for anything that has to be done after destroying history.
1329 1339
1330 1340 If you know the branchheadcache was uptodate before nodes were removed
1331 1341 and you also know the set of candidate set of new heads that may have
1332 1342 resulted from the destruction, you can set newheadrevs. This will
1333 1343 enable the code to update the branchheads cache, rather than having
1334 1344 future code decide it's invalid and regenrating it.
1335 1345 '''
1336 1346 if newheadrevs:
1337 1347 tiprev = len(self) - 1
1338 1348 ctxgen = (self[rev] for rev in newheadrevs)
1339 1349 self._updatebranchcache(self._branchcache, ctxgen)
1340 1350 self._writebranchcache(self._branchcache, self.changelog.tip(),
1341 1351 tiprev)
1342 1352 else:
1343 1353 # No info to update the cache. If nodes were destroyed, the cache
1344 1354 # is stale and this will be caught the next time it is read.
1345 1355 pass
1346 1356
1347 1357 # Ensure the persistent tag cache is updated. Doing it now
1348 1358 # means that the tag cache only has to worry about destroyed
1349 1359 # heads immediately after a strip/rollback. That in turn
1350 1360 # guarantees that "cachetip == currenttip" (comparing both rev
1351 1361 # and node) always means no nodes have been added or destroyed.
1352 1362
1353 1363 # XXX this is suboptimal when qrefresh'ing: we strip the current
1354 1364 # head, refresh the tag cache, then immediately add a new head.
1355 1365 # But I think doing it this way is necessary for the "instant
1356 1366 # tag cache retrieval" case to work.
1357 1367 self.invalidatecaches()
1358 1368
1359 1369 # Discard all cache entries to force reloading everything.
1360 1370 self._filecache.clear()
1361 1371
1362 1372 def walk(self, match, node=None):
1363 1373 '''
1364 1374 walk recursively through the directory tree or a given
1365 1375 changeset, finding all files matched by the match
1366 1376 function
1367 1377 '''
1368 1378 return self[node].walk(match)
1369 1379
1370 1380 def status(self, node1='.', node2=None, match=None,
1371 1381 ignored=False, clean=False, unknown=False,
1372 1382 listsubrepos=False):
1373 1383 """return status of files between two nodes or node and working
1374 1384 directory.
1375 1385
1376 1386 If node1 is None, use the first dirstate parent instead.
1377 1387 If node2 is None, compare node1 with working directory.
1378 1388 """
1379 1389
1380 1390 def mfmatches(ctx):
1381 1391 mf = ctx.manifest().copy()
1382 1392 if match.always():
1383 1393 return mf
1384 1394 for fn in mf.keys():
1385 1395 if not match(fn):
1386 1396 del mf[fn]
1387 1397 return mf
1388 1398
1389 1399 if isinstance(node1, context.changectx):
1390 1400 ctx1 = node1
1391 1401 else:
1392 1402 ctx1 = self[node1]
1393 1403 if isinstance(node2, context.changectx):
1394 1404 ctx2 = node2
1395 1405 else:
1396 1406 ctx2 = self[node2]
1397 1407
1398 1408 working = ctx2.rev() is None
1399 1409 parentworking = working and ctx1 == self['.']
1400 1410 match = match or matchmod.always(self.root, self.getcwd())
1401 1411 listignored, listclean, listunknown = ignored, clean, unknown
1402 1412
1403 1413 # load earliest manifest first for caching reasons
1404 1414 if not working and ctx2.rev() < ctx1.rev():
1405 1415 ctx2.manifest()
1406 1416
1407 1417 if not parentworking:
1408 1418 def bad(f, msg):
1409 1419 # 'f' may be a directory pattern from 'match.files()',
1410 1420 # so 'f not in ctx1' is not enough
1411 1421 if f not in ctx1 and f not in ctx1.dirs():
1412 1422 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1413 1423 match.bad = bad
1414 1424
1415 1425 if working: # we need to scan the working dir
1416 1426 subrepos = []
1417 1427 if '.hgsub' in self.dirstate:
1418 1428 subrepos = ctx2.substate.keys()
1419 1429 s = self.dirstate.status(match, subrepos, listignored,
1420 1430 listclean, listunknown)
1421 1431 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1422 1432
1423 1433 # check for any possibly clean files
1424 1434 if parentworking and cmp:
1425 1435 fixup = []
1426 1436 # do a full compare of any files that might have changed
1427 1437 for f in sorted(cmp):
1428 1438 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1429 1439 or ctx1[f].cmp(ctx2[f])):
1430 1440 modified.append(f)
1431 1441 else:
1432 1442 fixup.append(f)
1433 1443
1434 1444 # update dirstate for files that are actually clean
1435 1445 if fixup:
1436 1446 if listclean:
1437 1447 clean += fixup
1438 1448
1439 1449 try:
1440 1450 # updating the dirstate is optional
1441 1451 # so we don't wait on the lock
1442 1452 wlock = self.wlock(False)
1443 1453 try:
1444 1454 for f in fixup:
1445 1455 self.dirstate.normal(f)
1446 1456 finally:
1447 1457 wlock.release()
1448 1458 except error.LockError:
1449 1459 pass
1450 1460
1451 1461 if not parentworking:
1452 1462 mf1 = mfmatches(ctx1)
1453 1463 if working:
1454 1464 # we are comparing working dir against non-parent
1455 1465 # generate a pseudo-manifest for the working dir
1456 1466 mf2 = mfmatches(self['.'])
1457 1467 for f in cmp + modified + added:
1458 1468 mf2[f] = None
1459 1469 mf2.set(f, ctx2.flags(f))
1460 1470 for f in removed:
1461 1471 if f in mf2:
1462 1472 del mf2[f]
1463 1473 else:
1464 1474 # we are comparing two revisions
1465 1475 deleted, unknown, ignored = [], [], []
1466 1476 mf2 = mfmatches(ctx2)
1467 1477
1468 1478 modified, added, clean = [], [], []
1469 1479 withflags = mf1.withflags() | mf2.withflags()
1470 1480 for fn in mf2:
1471 1481 if fn in mf1:
1472 1482 if (fn not in deleted and
1473 1483 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1474 1484 (mf1[fn] != mf2[fn] and
1475 1485 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1476 1486 modified.append(fn)
1477 1487 elif listclean:
1478 1488 clean.append(fn)
1479 1489 del mf1[fn]
1480 1490 elif fn not in deleted:
1481 1491 added.append(fn)
1482 1492 removed = mf1.keys()
1483 1493
1484 1494 if working and modified and not self.dirstate._checklink:
1485 1495 # Symlink placeholders may get non-symlink-like contents
1486 1496 # via user error or dereferencing by NFS or Samba servers,
1487 1497 # so we filter out any placeholders that don't look like a
1488 1498 # symlink
1489 1499 sane = []
1490 1500 for f in modified:
1491 1501 if ctx2.flags(f) == 'l':
1492 1502 d = ctx2[f].data()
1493 1503 if len(d) >= 1024 or '\n' in d or util.binary(d):
1494 1504 self.ui.debug('ignoring suspect symlink placeholder'
1495 1505 ' "%s"\n' % f)
1496 1506 continue
1497 1507 sane.append(f)
1498 1508 modified = sane
1499 1509
1500 1510 r = modified, added, removed, deleted, unknown, ignored, clean
1501 1511
1502 1512 if listsubrepos:
1503 1513 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1504 1514 if working:
1505 1515 rev2 = None
1506 1516 else:
1507 1517 rev2 = ctx2.substate[subpath][1]
1508 1518 try:
1509 1519 submatch = matchmod.narrowmatcher(subpath, match)
1510 1520 s = sub.status(rev2, match=submatch, ignored=listignored,
1511 1521 clean=listclean, unknown=listunknown,
1512 1522 listsubrepos=True)
1513 1523 for rfiles, sfiles in zip(r, s):
1514 1524 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1515 1525 except error.LookupError:
1516 1526 self.ui.status(_("skipping missing subrepository: %s\n")
1517 1527 % subpath)
1518 1528
1519 1529 for l in r:
1520 1530 l.sort()
1521 1531 return r
1522 1532
1523 1533 def heads(self, start=None):
1524 1534 heads = self.changelog.heads(start)
1525 1535 # sort the output in rev descending order
1526 1536 return sorted(heads, key=self.changelog.rev, reverse=True)
1527 1537
1528 1538 def branchheads(self, branch=None, start=None, closed=False):
1529 1539 '''return a (possibly filtered) list of heads for the given branch
1530 1540
1531 1541 Heads are returned in topological order, from newest to oldest.
1532 1542 If branch is None, use the dirstate branch.
1533 1543 If start is not None, return only heads reachable from start.
1534 1544 If closed is True, return heads that are marked as closed as well.
1535 1545 '''
1536 1546 if branch is None:
1537 1547 branch = self[None].branch()
1538 1548 branches = self.branchmap()
1539 1549 if branch not in branches:
1540 1550 return []
1541 1551 # the cache returns heads ordered lowest to highest
1542 1552 bheads = list(reversed(branches[branch]))
1543 1553 if start is not None:
1544 1554 # filter out the heads that cannot be reached from startrev
1545 1555 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1546 1556 bheads = [h for h in bheads if h in fbheads]
1547 1557 if not closed:
1548 1558 bheads = [h for h in bheads if
1549 1559 ('close' not in self.changelog.read(h)[5])]
1550 1560 return bheads
1551 1561
1552 1562 def branches(self, nodes):
1553 1563 if not nodes:
1554 1564 nodes = [self.changelog.tip()]
1555 1565 b = []
1556 1566 for n in nodes:
1557 1567 t = n
1558 1568 while True:
1559 1569 p = self.changelog.parents(n)
1560 1570 if p[1] != nullid or p[0] == nullid:
1561 1571 b.append((t, n, p[0], p[1]))
1562 1572 break
1563 1573 n = p[0]
1564 1574 return b
1565 1575
1566 1576 def between(self, pairs):
1567 1577 r = []
1568 1578
1569 1579 for top, bottom in pairs:
1570 1580 n, l, i = top, [], 0
1571 1581 f = 1
1572 1582
1573 1583 while n != bottom and n != nullid:
1574 1584 p = self.changelog.parents(n)[0]
1575 1585 if i == f:
1576 1586 l.append(n)
1577 1587 f = f * 2
1578 1588 n = p
1579 1589 i += 1
1580 1590
1581 1591 r.append(l)
1582 1592
1583 1593 return r
1584 1594
1585 1595 def pull(self, remote, heads=None, force=False):
1586 1596 lock = self.lock()
1587 1597 try:
1588 1598 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1589 1599 force=force)
1590 1600 common, fetch, rheads = tmp
1591 1601 if not fetch:
1592 1602 self.ui.status(_("no changes found\n"))
1593 1603 added = []
1594 1604 result = 0
1595 1605 else:
1596 1606 if heads is None and list(common) == [nullid]:
1597 1607 self.ui.status(_("requesting all changes\n"))
1598 1608 elif heads is None and remote.capable('changegroupsubset'):
1599 1609 # issue1320, avoid a race if remote changed after discovery
1600 1610 heads = rheads
1601 1611
1602 1612 if remote.capable('getbundle'):
1603 1613 cg = remote.getbundle('pull', common=common,
1604 1614 heads=heads or rheads)
1605 1615 elif heads is None:
1606 1616 cg = remote.changegroup(fetch, 'pull')
1607 1617 elif not remote.capable('changegroupsubset'):
1608 1618 raise util.Abort(_("partial pull cannot be done because "
1609 1619 "other repository doesn't support "
1610 1620 "changegroupsubset."))
1611 1621 else:
1612 1622 cg = remote.changegroupsubset(fetch, heads, 'pull')
1613 1623 clstart = len(self.changelog)
1614 1624 result = self.addchangegroup(cg, 'pull', remote.url())
1615 1625 clend = len(self.changelog)
1616 1626 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1617 1627
1618 1628 # compute target subset
1619 1629 if heads is None:
1620 1630 # We pulled every thing possible
1621 1631 # sync on everything common
1622 1632 subset = common + added
1623 1633 else:
1624 1634 # We pulled a specific subset
1625 1635 # sync on this subset
1626 1636 subset = heads
1627 1637
1628 1638 # Get remote phases data from remote
1629 1639 remotephases = remote.listkeys('phases')
1630 1640 publishing = bool(remotephases.get('publishing', False))
1631 1641 if remotephases and not publishing:
1632 1642 # remote is new and unpublishing
1633 1643 pheads, _dr = phases.analyzeremotephases(self, subset,
1634 1644 remotephases)
1635 1645 phases.advanceboundary(self, phases.public, pheads)
1636 1646 phases.advanceboundary(self, phases.draft, subset)
1637 1647 else:
1638 1648 # Remote is old or publishing all common changesets
1639 1649 # should be seen as public
1640 1650 phases.advanceboundary(self, phases.public, subset)
1641 1651 finally:
1642 1652 lock.release()
1643 1653
1644 1654 return result
1645 1655
1646 1656 def checkpush(self, force, revs):
1647 1657 """Extensions can override this function if additional checks have
1648 1658 to be performed before pushing, or call it if they override push
1649 1659 command.
1650 1660 """
1651 1661 pass
1652 1662
1653 1663 def push(self, remote, force=False, revs=None, newbranch=False):
1654 1664 '''Push outgoing changesets (limited by revs) from the current
1655 1665 repository to remote. Return an integer:
1656 1666 - None means nothing to push
1657 1667 - 0 means HTTP error
1658 1668 - 1 means we pushed and remote head count is unchanged *or*
1659 1669 we have outgoing changesets but refused to push
1660 1670 - other values as described by addchangegroup()
1661 1671 '''
1662 1672 # there are two ways to push to remote repo:
1663 1673 #
1664 1674 # addchangegroup assumes local user can lock remote
1665 1675 # repo (local filesystem, old ssh servers).
1666 1676 #
1667 1677 # unbundle assumes local user cannot lock remote repo (new ssh
1668 1678 # servers, http servers).
1669 1679
1670 1680 # get local lock as we might write phase data
1671 1681 locallock = self.lock()
1672 1682 try:
1673 1683 self.checkpush(force, revs)
1674 1684 lock = None
1675 1685 unbundle = remote.capable('unbundle')
1676 1686 if not unbundle:
1677 1687 lock = remote.lock()
1678 1688 try:
1679 1689 # discovery
1680 1690 fci = discovery.findcommonincoming
1681 1691 commoninc = fci(self, remote, force=force)
1682 1692 common, inc, remoteheads = commoninc
1683 1693 fco = discovery.findcommonoutgoing
1684 1694 outgoing = fco(self, remote, onlyheads=revs,
1685 1695 commoninc=commoninc, force=force)
1686 1696
1687 1697
1688 1698 if not outgoing.missing:
1689 1699 # nothing to push
1690 1700 scmutil.nochangesfound(self.ui, outgoing.excluded)
1691 1701 ret = None
1692 1702 else:
1693 1703 # something to push
1694 1704 if not force:
1695 1705 discovery.checkheads(self, remote, outgoing,
1696 1706 remoteheads, newbranch,
1697 1707 bool(inc))
1698 1708
1699 1709 # create a changegroup from local
1700 1710 if revs is None and not outgoing.excluded:
1701 1711 # push everything,
1702 1712 # use the fast path, no race possible on push
1703 1713 cg = self._changegroup(outgoing.missing, 'push')
1704 1714 else:
1705 1715 cg = self.getlocalbundle('push', outgoing)
1706 1716
1707 1717 # apply changegroup to remote
1708 1718 if unbundle:
1709 1719 # local repo finds heads on server, finds out what
1710 1720 # revs it must push. once revs transferred, if server
1711 1721 # finds it has different heads (someone else won
1712 1722 # commit/push race), server aborts.
1713 1723 if force:
1714 1724 remoteheads = ['force']
1715 1725 # ssh: return remote's addchangegroup()
1716 1726 # http: return remote's addchangegroup() or 0 for error
1717 1727 ret = remote.unbundle(cg, remoteheads, 'push')
1718 1728 else:
1719 1729 # we return an integer indicating remote head count
1720 1730 # change
1721 1731 ret = remote.addchangegroup(cg, 'push', self.url())
1722 1732
1723 1733 if ret:
1724 1734 # push succeed, synchonize target of the push
1725 1735 cheads = outgoing.missingheads
1726 1736 elif revs is None:
1727 1737 # All out push fails. synchronize all common
1728 1738 cheads = outgoing.commonheads
1729 1739 else:
1730 1740 # I want cheads = heads(::missingheads and ::commonheads)
1731 1741 # (missingheads is revs with secret changeset filtered out)
1732 1742 #
1733 1743 # This can be expressed as:
1734 1744 # cheads = ( (missingheads and ::commonheads)
1735 1745 # + (commonheads and ::missingheads))"
1736 1746 # )
1737 1747 #
1738 1748 # while trying to push we already computed the following:
1739 1749 # common = (::commonheads)
1740 1750 # missing = ((commonheads::missingheads) - commonheads)
1741 1751 #
1742 1752 # We can pick:
1743 1753 # * missingheads part of comon (::commonheads)
1744 1754 common = set(outgoing.common)
1745 1755 cheads = [node for node in revs if node in common]
1746 1756 # and
1747 1757 # * commonheads parents on missing
1748 1758 revset = self.set('%ln and parents(roots(%ln))',
1749 1759 outgoing.commonheads,
1750 1760 outgoing.missing)
1751 1761 cheads.extend(c.node() for c in revset)
1752 1762 # even when we don't push, exchanging phase data is useful
1753 1763 remotephases = remote.listkeys('phases')
1754 1764 if not remotephases: # old server or public only repo
1755 1765 phases.advanceboundary(self, phases.public, cheads)
1756 1766 # don't push any phase data as there is nothing to push
1757 1767 else:
1758 1768 ana = phases.analyzeremotephases(self, cheads, remotephases)
1759 1769 pheads, droots = ana
1760 1770 ### Apply remote phase on local
1761 1771 if remotephases.get('publishing', False):
1762 1772 phases.advanceboundary(self, phases.public, cheads)
1763 1773 else: # publish = False
1764 1774 phases.advanceboundary(self, phases.public, pheads)
1765 1775 phases.advanceboundary(self, phases.draft, cheads)
1766 1776 ### Apply local phase on remote
1767 1777
1768 1778 # Get the list of all revs draft on remote by public here.
1769 1779 # XXX Beware that revset break if droots is not strictly
1770 1780 # XXX root we may want to ensure it is but it is costly
1771 1781 outdated = self.set('heads((%ln::%ln) and public())',
1772 1782 droots, cheads)
1773 1783 for newremotehead in outdated:
1774 1784 r = remote.pushkey('phases',
1775 1785 newremotehead.hex(),
1776 1786 str(phases.draft),
1777 1787 str(phases.public))
1778 1788 if not r:
1779 1789 self.ui.warn(_('updating %s to public failed!\n')
1780 1790 % newremotehead)
1781 1791 finally:
1782 1792 if lock is not None:
1783 1793 lock.release()
1784 1794 finally:
1785 1795 locallock.release()
1786 1796
1787 1797 self.ui.debug("checking for updated bookmarks\n")
1788 1798 rb = remote.listkeys('bookmarks')
1789 1799 for k in rb.keys():
1790 1800 if k in self._bookmarks:
1791 1801 nr, nl = rb[k], hex(self._bookmarks[k])
1792 1802 if nr in self:
1793 1803 cr = self[nr]
1794 1804 cl = self[nl]
1795 1805 if cl in cr.descendants():
1796 1806 r = remote.pushkey('bookmarks', k, nr, nl)
1797 1807 if r:
1798 1808 self.ui.status(_("updating bookmark %s\n") % k)
1799 1809 else:
1800 1810 self.ui.warn(_('updating bookmark %s'
1801 1811 ' failed!\n') % k)
1802 1812
1803 1813 return ret
1804 1814
1805 1815 def changegroupinfo(self, nodes, source):
1806 1816 if self.ui.verbose or source == 'bundle':
1807 1817 self.ui.status(_("%d changesets found\n") % len(nodes))
1808 1818 if self.ui.debugflag:
1809 1819 self.ui.debug("list of changesets:\n")
1810 1820 for node in nodes:
1811 1821 self.ui.debug("%s\n" % hex(node))
1812 1822
1813 1823 def changegroupsubset(self, bases, heads, source):
1814 1824 """Compute a changegroup consisting of all the nodes that are
1815 1825 descendants of any of the bases and ancestors of any of the heads.
1816 1826 Return a chunkbuffer object whose read() method will return
1817 1827 successive changegroup chunks.
1818 1828
1819 1829 It is fairly complex as determining which filenodes and which
1820 1830 manifest nodes need to be included for the changeset to be complete
1821 1831 is non-trivial.
1822 1832
1823 1833 Another wrinkle is doing the reverse, figuring out which changeset in
1824 1834 the changegroup a particular filenode or manifestnode belongs to.
1825 1835 """
1826 1836 cl = self.changelog
1827 1837 if not bases:
1828 1838 bases = [nullid]
1829 1839 csets, bases, heads = cl.nodesbetween(bases, heads)
1830 1840 # We assume that all ancestors of bases are known
1831 1841 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1832 1842 return self._changegroupsubset(common, csets, heads, source)
1833 1843
1834 1844 def getlocalbundle(self, source, outgoing):
1835 1845 """Like getbundle, but taking a discovery.outgoing as an argument.
1836 1846
1837 1847 This is only implemented for local repos and reuses potentially
1838 1848 precomputed sets in outgoing."""
1839 1849 if not outgoing.missing:
1840 1850 return None
1841 1851 return self._changegroupsubset(outgoing.common,
1842 1852 outgoing.missing,
1843 1853 outgoing.missingheads,
1844 1854 source)
1845 1855
1846 1856 def getbundle(self, source, heads=None, common=None):
1847 1857 """Like changegroupsubset, but returns the set difference between the
1848 1858 ancestors of heads and the ancestors common.
1849 1859
1850 1860 If heads is None, use the local heads. If common is None, use [nullid].
1851 1861
1852 1862 The nodes in common might not all be known locally due to the way the
1853 1863 current discovery protocol works.
1854 1864 """
1855 1865 cl = self.changelog
1856 1866 if common:
1857 1867 nm = cl.nodemap
1858 1868 common = [n for n in common if n in nm]
1859 1869 else:
1860 1870 common = [nullid]
1861 1871 if not heads:
1862 1872 heads = cl.heads()
1863 1873 return self.getlocalbundle(source,
1864 1874 discovery.outgoing(cl, common, heads))
1865 1875
1866 1876 def _changegroupsubset(self, commonrevs, csets, heads, source):
1867 1877
1868 1878 cl = self.changelog
1869 1879 mf = self.manifest
1870 1880 mfs = {} # needed manifests
1871 1881 fnodes = {} # needed file nodes
1872 1882 changedfiles = set()
1873 1883 fstate = ['', {}]
1874 1884 count = [0, 0]
1875 1885
1876 1886 # can we go through the fast path ?
1877 1887 heads.sort()
1878 1888 if heads == sorted(self.heads()):
1879 1889 return self._changegroup(csets, source)
1880 1890
1881 1891 # slow path
1882 1892 self.hook('preoutgoing', throw=True, source=source)
1883 1893 self.changegroupinfo(csets, source)
1884 1894
1885 1895 # filter any nodes that claim to be part of the known set
1886 1896 def prune(revlog, missing):
1887 1897 rr, rl = revlog.rev, revlog.linkrev
1888 1898 return [n for n in missing
1889 1899 if rl(rr(n)) not in commonrevs]
1890 1900
1891 1901 progress = self.ui.progress
1892 1902 _bundling = _('bundling')
1893 1903 _changesets = _('changesets')
1894 1904 _manifests = _('manifests')
1895 1905 _files = _('files')
1896 1906
1897 1907 def lookup(revlog, x):
1898 1908 if revlog == cl:
1899 1909 c = cl.read(x)
1900 1910 changedfiles.update(c[3])
1901 1911 mfs.setdefault(c[0], x)
1902 1912 count[0] += 1
1903 1913 progress(_bundling, count[0],
1904 1914 unit=_changesets, total=count[1])
1905 1915 return x
1906 1916 elif revlog == mf:
1907 1917 clnode = mfs[x]
1908 1918 mdata = mf.readfast(x)
1909 1919 for f, n in mdata.iteritems():
1910 1920 if f in changedfiles:
1911 1921 fnodes[f].setdefault(n, clnode)
1912 1922 count[0] += 1
1913 1923 progress(_bundling, count[0],
1914 1924 unit=_manifests, total=count[1])
1915 1925 return clnode
1916 1926 else:
1917 1927 progress(_bundling, count[0], item=fstate[0],
1918 1928 unit=_files, total=count[1])
1919 1929 return fstate[1][x]
1920 1930
1921 1931 bundler = changegroup.bundle10(lookup)
1922 1932 reorder = self.ui.config('bundle', 'reorder', 'auto')
1923 1933 if reorder == 'auto':
1924 1934 reorder = None
1925 1935 else:
1926 1936 reorder = util.parsebool(reorder)
1927 1937
1928 1938 def gengroup():
1929 1939 # Create a changenode group generator that will call our functions
1930 1940 # back to lookup the owning changenode and collect information.
1931 1941 count[:] = [0, len(csets)]
1932 1942 for chunk in cl.group(csets, bundler, reorder=reorder):
1933 1943 yield chunk
1934 1944 progress(_bundling, None)
1935 1945
1936 1946 # Create a generator for the manifestnodes that calls our lookup
1937 1947 # and data collection functions back.
1938 1948 for f in changedfiles:
1939 1949 fnodes[f] = {}
1940 1950 count[:] = [0, len(mfs)]
1941 1951 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1942 1952 yield chunk
1943 1953 progress(_bundling, None)
1944 1954
1945 1955 mfs.clear()
1946 1956
1947 1957 # Go through all our files in order sorted by name.
1948 1958 count[:] = [0, len(changedfiles)]
1949 1959 for fname in sorted(changedfiles):
1950 1960 filerevlog = self.file(fname)
1951 1961 if not len(filerevlog):
1952 1962 raise util.Abort(_("empty or missing revlog for %s")
1953 1963 % fname)
1954 1964 fstate[0] = fname
1955 1965 fstate[1] = fnodes.pop(fname, {})
1956 1966
1957 1967 nodelist = prune(filerevlog, fstate[1])
1958 1968 if nodelist:
1959 1969 count[0] += 1
1960 1970 yield bundler.fileheader(fname)
1961 1971 for chunk in filerevlog.group(nodelist, bundler, reorder):
1962 1972 yield chunk
1963 1973
1964 1974 # Signal that no more groups are left.
1965 1975 yield bundler.close()
1966 1976 progress(_bundling, None)
1967 1977
1968 1978 if csets:
1969 1979 self.hook('outgoing', node=hex(csets[0]), source=source)
1970 1980
1971 1981 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1972 1982
1973 1983 def changegroup(self, basenodes, source):
1974 1984 # to avoid a race we use changegroupsubset() (issue1320)
1975 1985 return self.changegroupsubset(basenodes, self.heads(), source)
1976 1986
1977 1987 def _changegroup(self, nodes, source):
1978 1988 """Compute the changegroup of all nodes that we have that a recipient
1979 1989 doesn't. Return a chunkbuffer object whose read() method will return
1980 1990 successive changegroup chunks.
1981 1991
1982 1992 This is much easier than the previous function as we can assume that
1983 1993 the recipient has any changenode we aren't sending them.
1984 1994
1985 1995 nodes is the set of nodes to send"""
1986 1996
1987 1997 cl = self.changelog
1988 1998 mf = self.manifest
1989 1999 mfs = {}
1990 2000 changedfiles = set()
1991 2001 fstate = ['']
1992 2002 count = [0, 0]
1993 2003
1994 2004 self.hook('preoutgoing', throw=True, source=source)
1995 2005 self.changegroupinfo(nodes, source)
1996 2006
1997 2007 revset = set([cl.rev(n) for n in nodes])
1998 2008
1999 2009 def gennodelst(log):
2000 2010 ln, llr = log.node, log.linkrev
2001 2011 return [ln(r) for r in log if llr(r) in revset]
2002 2012
2003 2013 progress = self.ui.progress
2004 2014 _bundling = _('bundling')
2005 2015 _changesets = _('changesets')
2006 2016 _manifests = _('manifests')
2007 2017 _files = _('files')
2008 2018
2009 2019 def lookup(revlog, x):
2010 2020 if revlog == cl:
2011 2021 c = cl.read(x)
2012 2022 changedfiles.update(c[3])
2013 2023 mfs.setdefault(c[0], x)
2014 2024 count[0] += 1
2015 2025 progress(_bundling, count[0],
2016 2026 unit=_changesets, total=count[1])
2017 2027 return x
2018 2028 elif revlog == mf:
2019 2029 count[0] += 1
2020 2030 progress(_bundling, count[0],
2021 2031 unit=_manifests, total=count[1])
2022 2032 return cl.node(revlog.linkrev(revlog.rev(x)))
2023 2033 else:
2024 2034 progress(_bundling, count[0], item=fstate[0],
2025 2035 total=count[1], unit=_files)
2026 2036 return cl.node(revlog.linkrev(revlog.rev(x)))
2027 2037
2028 2038 bundler = changegroup.bundle10(lookup)
2029 2039 reorder = self.ui.config('bundle', 'reorder', 'auto')
2030 2040 if reorder == 'auto':
2031 2041 reorder = None
2032 2042 else:
2033 2043 reorder = util.parsebool(reorder)
2034 2044
2035 2045 def gengroup():
2036 2046 '''yield a sequence of changegroup chunks (strings)'''
2037 2047 # construct a list of all changed files
2038 2048
2039 2049 count[:] = [0, len(nodes)]
2040 2050 for chunk in cl.group(nodes, bundler, reorder=reorder):
2041 2051 yield chunk
2042 2052 progress(_bundling, None)
2043 2053
2044 2054 count[:] = [0, len(mfs)]
2045 2055 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2046 2056 yield chunk
2047 2057 progress(_bundling, None)
2048 2058
2049 2059 count[:] = [0, len(changedfiles)]
2050 2060 for fname in sorted(changedfiles):
2051 2061 filerevlog = self.file(fname)
2052 2062 if not len(filerevlog):
2053 2063 raise util.Abort(_("empty or missing revlog for %s")
2054 2064 % fname)
2055 2065 fstate[0] = fname
2056 2066 nodelist = gennodelst(filerevlog)
2057 2067 if nodelist:
2058 2068 count[0] += 1
2059 2069 yield bundler.fileheader(fname)
2060 2070 for chunk in filerevlog.group(nodelist, bundler, reorder):
2061 2071 yield chunk
2062 2072 yield bundler.close()
2063 2073 progress(_bundling, None)
2064 2074
2065 2075 if nodes:
2066 2076 self.hook('outgoing', node=hex(nodes[0]), source=source)
2067 2077
2068 2078 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2069 2079
2070 2080 def addchangegroup(self, source, srctype, url, emptyok=False):
2071 2081 """Add the changegroup returned by source.read() to this repo.
2072 2082 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2073 2083 the URL of the repo where this changegroup is coming from.
2074 2084
2075 2085 Return an integer summarizing the change to this repo:
2076 2086 - nothing changed or no source: 0
2077 2087 - more heads than before: 1+added heads (2..n)
2078 2088 - fewer heads than before: -1-removed heads (-2..-n)
2079 2089 - number of heads stays the same: 1
2080 2090 """
2081 2091 def csmap(x):
2082 2092 self.ui.debug("add changeset %s\n" % short(x))
2083 2093 return len(cl)
2084 2094
2085 2095 def revmap(x):
2086 2096 return cl.rev(x)
2087 2097
2088 2098 if not source:
2089 2099 return 0
2090 2100
2091 2101 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2092 2102
2093 2103 changesets = files = revisions = 0
2094 2104 efiles = set()
2095 2105
2096 2106 # write changelog data to temp files so concurrent readers will not see
2097 2107 # inconsistent view
2098 2108 cl = self.changelog
2099 2109 cl.delayupdate()
2100 2110 oldheads = cl.heads()
2101 2111
2102 2112 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2103 2113 try:
2104 2114 trp = weakref.proxy(tr)
2105 2115 # pull off the changeset group
2106 2116 self.ui.status(_("adding changesets\n"))
2107 2117 clstart = len(cl)
2108 2118 class prog(object):
2109 2119 step = _('changesets')
2110 2120 count = 1
2111 2121 ui = self.ui
2112 2122 total = None
2113 2123 def __call__(self):
2114 2124 self.ui.progress(self.step, self.count, unit=_('chunks'),
2115 2125 total=self.total)
2116 2126 self.count += 1
2117 2127 pr = prog()
2118 2128 source.callback = pr
2119 2129
2120 2130 source.changelogheader()
2121 2131 srccontent = cl.addgroup(source, csmap, trp)
2122 2132 if not (srccontent or emptyok):
2123 2133 raise util.Abort(_("received changelog group is empty"))
2124 2134 clend = len(cl)
2125 2135 changesets = clend - clstart
2126 2136 for c in xrange(clstart, clend):
2127 2137 efiles.update(self[c].files())
2128 2138 efiles = len(efiles)
2129 2139 self.ui.progress(_('changesets'), None)
2130 2140
2131 2141 # pull off the manifest group
2132 2142 self.ui.status(_("adding manifests\n"))
2133 2143 pr.step = _('manifests')
2134 2144 pr.count = 1
2135 2145 pr.total = changesets # manifests <= changesets
2136 2146 # no need to check for empty manifest group here:
2137 2147 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2138 2148 # no new manifest will be created and the manifest group will
2139 2149 # be empty during the pull
2140 2150 source.manifestheader()
2141 2151 self.manifest.addgroup(source, revmap, trp)
2142 2152 self.ui.progress(_('manifests'), None)
2143 2153
2144 2154 needfiles = {}
2145 2155 if self.ui.configbool('server', 'validate', default=False):
2146 2156 # validate incoming csets have their manifests
2147 2157 for cset in xrange(clstart, clend):
2148 2158 mfest = self.changelog.read(self.changelog.node(cset))[0]
2149 2159 mfest = self.manifest.readdelta(mfest)
2150 2160 # store file nodes we must see
2151 2161 for f, n in mfest.iteritems():
2152 2162 needfiles.setdefault(f, set()).add(n)
2153 2163
2154 2164 # process the files
2155 2165 self.ui.status(_("adding file changes\n"))
2156 2166 pr.step = _('files')
2157 2167 pr.count = 1
2158 2168 pr.total = efiles
2159 2169 source.callback = None
2160 2170
2161 2171 while True:
2162 2172 chunkdata = source.filelogheader()
2163 2173 if not chunkdata:
2164 2174 break
2165 2175 f = chunkdata["filename"]
2166 2176 self.ui.debug("adding %s revisions\n" % f)
2167 2177 pr()
2168 2178 fl = self.file(f)
2169 2179 o = len(fl)
2170 2180 if not fl.addgroup(source, revmap, trp):
2171 2181 raise util.Abort(_("received file revlog group is empty"))
2172 2182 revisions += len(fl) - o
2173 2183 files += 1
2174 2184 if f in needfiles:
2175 2185 needs = needfiles[f]
2176 2186 for new in xrange(o, len(fl)):
2177 2187 n = fl.node(new)
2178 2188 if n in needs:
2179 2189 needs.remove(n)
2180 2190 if not needs:
2181 2191 del needfiles[f]
2182 2192 self.ui.progress(_('files'), None)
2183 2193
2184 2194 for f, needs in needfiles.iteritems():
2185 2195 fl = self.file(f)
2186 2196 for n in needs:
2187 2197 try:
2188 2198 fl.rev(n)
2189 2199 except error.LookupError:
2190 2200 raise util.Abort(
2191 2201 _('missing file data for %s:%s - run hg verify') %
2192 2202 (f, hex(n)))
2193 2203
2194 2204 dh = 0
2195 2205 if oldheads:
2196 2206 heads = cl.heads()
2197 2207 dh = len(heads) - len(oldheads)
2198 2208 for h in heads:
2199 2209 if h not in oldheads and 'close' in self[h].extra():
2200 2210 dh -= 1
2201 2211 htext = ""
2202 2212 if dh:
2203 2213 htext = _(" (%+d heads)") % dh
2204 2214
2205 2215 self.ui.status(_("added %d changesets"
2206 2216 " with %d changes to %d files%s\n")
2207 2217 % (changesets, revisions, files, htext))
2208 2218
2209 2219 if changesets > 0:
2210 2220 p = lambda: cl.writepending() and self.root or ""
2211 2221 self.hook('pretxnchangegroup', throw=True,
2212 2222 node=hex(cl.node(clstart)), source=srctype,
2213 2223 url=url, pending=p)
2214 2224
2215 2225 added = [cl.node(r) for r in xrange(clstart, clend)]
2216 2226 publishing = self.ui.configbool('phases', 'publish', True)
2217 2227 if srctype == 'push':
2218 2228 # Old server can not push the boundary themself.
2219 2229 # New server won't push the boundary if changeset already
2220 2230 # existed locally as secrete
2221 2231 #
2222 2232 # We should not use added here but the list of all change in
2223 2233 # the bundle
2224 2234 if publishing:
2225 2235 phases.advanceboundary(self, phases.public, srccontent)
2226 2236 else:
2227 2237 phases.advanceboundary(self, phases.draft, srccontent)
2228 2238 phases.retractboundary(self, phases.draft, added)
2229 2239 elif srctype != 'strip':
2230 2240 # publishing only alter behavior during push
2231 2241 #
2232 2242 # strip should not touch boundary at all
2233 2243 phases.retractboundary(self, phases.draft, added)
2234 2244
2235 2245 # make changelog see real files again
2236 2246 cl.finalize(trp)
2237 2247
2238 2248 tr.close()
2239 2249
2240 2250 if changesets > 0:
2241 2251 def runhooks():
2242 2252 # forcefully update the on-disk branch cache
2243 2253 self.ui.debug("updating the branch cache\n")
2244 2254 self.updatebranchcache()
2245 2255 self.hook("changegroup", node=hex(cl.node(clstart)),
2246 2256 source=srctype, url=url)
2247 2257
2248 2258 for n in added:
2249 2259 self.hook("incoming", node=hex(n), source=srctype,
2250 2260 url=url)
2251 2261 self._afterlock(runhooks)
2252 2262
2253 2263 finally:
2254 2264 tr.release()
2255 2265 # never return 0 here:
2256 2266 if dh < 0:
2257 2267 return dh - 1
2258 2268 else:
2259 2269 return dh + 1
2260 2270
2261 2271 def stream_in(self, remote, requirements):
2262 2272 lock = self.lock()
2263 2273 try:
2264 2274 fp = remote.stream_out()
2265 2275 l = fp.readline()
2266 2276 try:
2267 2277 resp = int(l)
2268 2278 except ValueError:
2269 2279 raise error.ResponseError(
2270 2280 _('Unexpected response from remote server:'), l)
2271 2281 if resp == 1:
2272 2282 raise util.Abort(_('operation forbidden by server'))
2273 2283 elif resp == 2:
2274 2284 raise util.Abort(_('locking the remote repository failed'))
2275 2285 elif resp != 0:
2276 2286 raise util.Abort(_('the server sent an unknown error code'))
2277 2287 self.ui.status(_('streaming all changes\n'))
2278 2288 l = fp.readline()
2279 2289 try:
2280 2290 total_files, total_bytes = map(int, l.split(' ', 1))
2281 2291 except (ValueError, TypeError):
2282 2292 raise error.ResponseError(
2283 2293 _('Unexpected response from remote server:'), l)
2284 2294 self.ui.status(_('%d files to transfer, %s of data\n') %
2285 2295 (total_files, util.bytecount(total_bytes)))
2286 2296 start = time.time()
2287 2297 for i in xrange(total_files):
2288 2298 # XXX doesn't support '\n' or '\r' in filenames
2289 2299 l = fp.readline()
2290 2300 try:
2291 2301 name, size = l.split('\0', 1)
2292 2302 size = int(size)
2293 2303 except (ValueError, TypeError):
2294 2304 raise error.ResponseError(
2295 2305 _('Unexpected response from remote server:'), l)
2296 2306 if self.ui.debugflag:
2297 2307 self.ui.debug('adding %s (%s)\n' %
2298 2308 (name, util.bytecount(size)))
2299 2309 # for backwards compat, name was partially encoded
2300 2310 ofp = self.sopener(store.decodedir(name), 'w')
2301 2311 for chunk in util.filechunkiter(fp, limit=size):
2302 2312 ofp.write(chunk)
2303 2313 ofp.close()
2304 2314 elapsed = time.time() - start
2305 2315 if elapsed <= 0:
2306 2316 elapsed = 0.001
2307 2317 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2308 2318 (util.bytecount(total_bytes), elapsed,
2309 2319 util.bytecount(total_bytes / elapsed)))
2310 2320
2311 2321 # new requirements = old non-format requirements +
2312 2322 # new format-related
2313 2323 # requirements from the streamed-in repository
2314 2324 requirements.update(set(self.requirements) - self.supportedformats)
2315 2325 self._applyrequirements(requirements)
2316 2326 self._writerequirements()
2317 2327
2318 2328 self.invalidate()
2319 2329 return len(self.heads()) + 1
2320 2330 finally:
2321 2331 lock.release()
2322 2332
2323 2333 def clone(self, remote, heads=[], stream=False):
2324 2334 '''clone remote repository.
2325 2335
2326 2336 keyword arguments:
2327 2337 heads: list of revs to clone (forces use of pull)
2328 2338 stream: use streaming clone if possible'''
2329 2339
2330 2340 # now, all clients that can request uncompressed clones can
2331 2341 # read repo formats supported by all servers that can serve
2332 2342 # them.
2333 2343
2334 2344 # if revlog format changes, client will have to check version
2335 2345 # and format flags on "stream" capability, and use
2336 2346 # uncompressed only if compatible.
2337 2347
2338 2348 if not stream:
2339 2349 # if the server explicitely prefer to stream (for fast LANs)
2340 2350 stream = remote.capable('stream-preferred')
2341 2351
2342 2352 if stream and not heads:
2343 2353 # 'stream' means remote revlog format is revlogv1 only
2344 2354 if remote.capable('stream'):
2345 2355 return self.stream_in(remote, set(('revlogv1',)))
2346 2356 # otherwise, 'streamreqs' contains the remote revlog format
2347 2357 streamreqs = remote.capable('streamreqs')
2348 2358 if streamreqs:
2349 2359 streamreqs = set(streamreqs.split(','))
2350 2360 # if we support it, stream in and adjust our requirements
2351 2361 if not streamreqs - self.supportedformats:
2352 2362 return self.stream_in(remote, streamreqs)
2353 2363 return self.pull(remote, heads)
2354 2364
2355 2365 def pushkey(self, namespace, key, old, new):
2356 2366 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2357 2367 old=old, new=new)
2358 2368 ret = pushkey.push(self, namespace, key, old, new)
2359 2369 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2360 2370 ret=ret)
2361 2371 return ret
2362 2372
2363 2373 def listkeys(self, namespace):
2364 2374 self.hook('prelistkeys', throw=True, namespace=namespace)
2365 2375 values = pushkey.list(self, namespace)
2366 2376 self.hook('listkeys', namespace=namespace, values=values)
2367 2377 return values
2368 2378
2369 2379 def debugwireargs(self, one, two, three=None, four=None, five=None):
2370 2380 '''used to test argument passing over the wire'''
2371 2381 return "%s %s %s %s %s" % (one, two, three, four, five)
2372 2382
2373 2383 def savecommitmessage(self, text):
2374 2384 fp = self.opener('last-message.txt', 'wb')
2375 2385 try:
2376 2386 fp.write(text)
2377 2387 finally:
2378 2388 fp.close()
2379 2389 return self.pathto(fp.name[len(self.root)+1:])
2380 2390
2381 2391 # used to avoid circular references so destructors work
2382 2392 def aftertrans(files):
2383 2393 renamefiles = [tuple(t) for t in files]
2384 2394 def a():
2385 2395 for src, dest in renamefiles:
2386 2396 try:
2387 2397 util.rename(src, dest)
2388 2398 except OSError: # journal file does not yet exist
2389 2399 pass
2390 2400 return a
2391 2401
2392 2402 def undoname(fn):
2393 2403 base, name = os.path.split(fn)
2394 2404 assert name.startswith('journal')
2395 2405 return os.path.join(base, name.replace('journal', 'undo', 1))
2396 2406
2397 2407 def instance(ui, path, create):
2398 2408 return localrepository(ui, util.urllocalpath(path), create)
2399 2409
2400 2410 def islocal(path):
2401 2411 return True
@@ -1,614 +1,614 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, nullrev, hex, bin
9 9 from i18n import _
10 import scmutil, util, filemerge, copies, subrepo
10 import error, scmutil, util, filemerge, copies, subrepo
11 11 import errno, os, shutil
12 12
13 13 class mergestate(object):
14 14 '''track 3-way merge state of individual files'''
15 15 def __init__(self, repo):
16 16 self._repo = repo
17 17 self._dirty = False
18 18 self._read()
19 19 def reset(self, node=None):
20 20 self._state = {}
21 21 if node:
22 22 self._local = node
23 23 shutil.rmtree(self._repo.join("merge"), True)
24 24 self._dirty = False
25 25 def _read(self):
26 26 self._state = {}
27 27 try:
28 28 f = self._repo.opener("merge/state")
29 29 for i, l in enumerate(f):
30 30 if i == 0:
31 31 self._local = bin(l[:-1])
32 32 else:
33 33 bits = l[:-1].split("\0")
34 34 self._state[bits[0]] = bits[1:]
35 35 f.close()
36 36 except IOError, err:
37 37 if err.errno != errno.ENOENT:
38 38 raise
39 39 self._dirty = False
40 40 def commit(self):
41 41 if self._dirty:
42 42 f = self._repo.opener("merge/state", "w")
43 43 f.write(hex(self._local) + "\n")
44 44 for d, v in self._state.iteritems():
45 45 f.write("\0".join([d] + v) + "\n")
46 46 f.close()
47 47 self._dirty = False
48 48 def add(self, fcl, fco, fca, fd, flags):
49 49 hash = util.sha1(fcl.path()).hexdigest()
50 50 self._repo.opener.write("merge/" + hash, fcl.data())
51 51 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
52 52 hex(fca.filenode()), fco.path(), flags]
53 53 self._dirty = True
54 54 def __contains__(self, dfile):
55 55 return dfile in self._state
56 56 def __getitem__(self, dfile):
57 57 return self._state[dfile][0]
58 58 def __iter__(self):
59 59 l = self._state.keys()
60 60 l.sort()
61 61 for f in l:
62 62 yield f
63 63 def mark(self, dfile, state):
64 64 self._state[dfile][0] = state
65 65 self._dirty = True
66 66 def resolve(self, dfile, wctx, octx):
67 67 if self[dfile] == 'r':
68 68 return 0
69 69 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
70 70 f = self._repo.opener("merge/" + hash)
71 71 self._repo.wwrite(dfile, f.read(), flags)
72 72 f.close()
73 73 fcd = wctx[dfile]
74 74 fco = octx[ofile]
75 75 fca = self._repo.filectx(afile, fileid=anode)
76 76 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
77 77 if r is None:
78 78 # no real conflict
79 79 del self._state[dfile]
80 80 elif not r:
81 81 self.mark(dfile, 'r')
82 82 return r
83 83
84 84 def _checkunknownfile(repo, wctx, mctx, f):
85 85 return (not repo.dirstate._ignore(f)
86 86 and os.path.isfile(repo.wjoin(f))
87 87 and repo.dirstate.normalize(f) not in repo.dirstate
88 88 and mctx[f].cmp(wctx[f]))
89 89
90 90 def _checkunknown(repo, wctx, mctx):
91 91 "check for collisions between unknown files and files in mctx"
92 92
93 93 error = False
94 94 for f in mctx:
95 95 if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
96 96 error = True
97 97 wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
98 98 if error:
99 99 raise util.Abort(_("untracked files in working directory differ "
100 100 "from files in requested revision"))
101 101
102 102 def _checkcollision(mctx, wctx):
103 103 "check for case folding collisions in the destination context"
104 104 folded = {}
105 105 for fn in mctx:
106 106 fold = util.normcase(fn)
107 107 if fold in folded:
108 108 raise util.Abort(_("case-folding collision between %s and %s")
109 109 % (fn, folded[fold]))
110 110 folded[fold] = fn
111 111
112 112 if wctx:
113 113 # class to delay looking up copy mapping
114 114 class pathcopies(object):
115 115 @util.propertycache
116 116 def map(self):
117 117 # {dst@mctx: src@wctx} copy mapping
118 118 return copies.pathcopies(wctx, mctx)
119 119 pc = pathcopies()
120 120
121 121 for fn in wctx:
122 122 fold = util.normcase(fn)
123 123 mfn = folded.get(fold, None)
124 124 if mfn and mfn != fn and pc.map.get(mfn) != fn:
125 125 raise util.Abort(_("case-folding collision between %s and %s")
126 126 % (mfn, fn))
127 127
128 128 def _forgetremoved(wctx, mctx, branchmerge):
129 129 """
130 130 Forget removed files
131 131
132 132 If we're jumping between revisions (as opposed to merging), and if
133 133 neither the working directory nor the target rev has the file,
134 134 then we need to remove it from the dirstate, to prevent the
135 135 dirstate from listing the file when it is no longer in the
136 136 manifest.
137 137
138 138 If we're merging, and the other revision has removed a file
139 139 that is not present in the working directory, we need to mark it
140 140 as removed.
141 141 """
142 142
143 143 action = []
144 144 state = branchmerge and 'r' or 'f'
145 145 for f in wctx.deleted():
146 146 if f not in mctx:
147 147 action.append((f, state))
148 148
149 149 if not branchmerge:
150 150 for f in wctx.removed():
151 151 if f not in mctx:
152 152 action.append((f, "f"))
153 153
154 154 return action
155 155
156 156 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
157 157 """
158 158 Merge p1 and p2 with ancestor pa and generate merge action list
159 159
160 160 overwrite = whether we clobber working files
161 161 partial = function to filter file lists
162 162 """
163 163
164 164 def fmerge(f, f2, fa):
165 165 """merge flags"""
166 166 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
167 167 if m == n: # flags agree
168 168 return m # unchanged
169 169 if m and n and not a: # flags set, don't agree, differ from parent
170 170 r = repo.ui.promptchoice(
171 171 _(" conflicting flags for %s\n"
172 172 "(n)one, e(x)ec or sym(l)ink?") % f,
173 173 (_("&None"), _("E&xec"), _("Sym&link")), 0)
174 174 if r == 1:
175 175 return "x" # Exec
176 176 if r == 2:
177 177 return "l" # Symlink
178 178 return ""
179 179 if m and m != a: # changed from a to m
180 180 return m
181 181 if n and n != a: # changed from a to n
182 182 if (n == 'l' or a == 'l') and m1.get(f) != ma.get(f):
183 183 # can't automatically merge symlink flag when there
184 184 # are file-level conflicts here, let filemerge take
185 185 # care of it
186 186 return m
187 187 return n
188 188 return '' # flag was cleared
189 189
190 190 def act(msg, m, f, *args):
191 191 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
192 192 action.append((f, m) + args)
193 193
194 194 action, copy = [], {}
195 195
196 196 if overwrite:
197 197 pa = p1
198 198 elif pa == p2: # backwards
199 199 pa = p1.p1()
200 200 elif pa and repo.ui.configbool("merge", "followcopies", True):
201 201 copy, diverge = copies.mergecopies(repo, p1, p2, pa)
202 202 for of, fl in diverge.iteritems():
203 203 act("divergent renames", "dr", of, fl)
204 204
205 205 repo.ui.note(_("resolving manifests\n"))
206 206 repo.ui.debug(" overwrite: %s, partial: %s\n"
207 207 % (bool(overwrite), bool(partial)))
208 208 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, p1, p2))
209 209
210 210 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
211 211 copied = set(copy.values())
212 212
213 213 if '.hgsubstate' in m1:
214 214 # check whether sub state is modified
215 215 for s in p1.substate:
216 216 if p1.sub(s).dirty():
217 217 m1['.hgsubstate'] += "+"
218 218 break
219 219
220 220 # Compare manifests
221 221 for f, n in m1.iteritems():
222 222 if partial and not partial(f):
223 223 continue
224 224 if f in m2:
225 225 rflags = fmerge(f, f, f)
226 226 a = ma.get(f, nullid)
227 227 if n == m2[f] or m2[f] == a: # same or local newer
228 228 # is file locally modified or flags need changing?
229 229 # dirstate flags may need to be made current
230 230 if m1.flags(f) != rflags or n[20:]:
231 231 act("update permissions", "e", f, rflags)
232 232 elif n == a: # remote newer
233 233 act("remote is newer", "g", f, rflags)
234 234 else: # both changed
235 235 act("versions differ", "m", f, f, f, rflags, False)
236 236 elif f in copied: # files we'll deal with on m2 side
237 237 pass
238 238 elif f in copy:
239 239 f2 = copy[f]
240 240 if f2 not in m2: # directory rename
241 241 act("remote renamed directory to " + f2, "d",
242 242 f, None, f2, m1.flags(f))
243 243 else: # case 2 A,B/B/B or case 4,21 A/B/B
244 244 act("local copied/moved to " + f2, "m",
245 245 f, f2, f, fmerge(f, f2, f2), False)
246 246 elif f in ma: # clean, a different, no remote
247 247 if n != ma[f]:
248 248 if repo.ui.promptchoice(
249 249 _(" local changed %s which remote deleted\n"
250 250 "use (c)hanged version or (d)elete?") % f,
251 251 (_("&Changed"), _("&Delete")), 0):
252 252 act("prompt delete", "r", f)
253 253 else:
254 254 act("prompt keep", "a", f)
255 255 elif n[20:] == "a": # added, no remote
256 256 act("remote deleted", "f", f)
257 257 else:
258 258 act("other deleted", "r", f)
259 259
260 260 for f, n in m2.iteritems():
261 261 if partial and not partial(f):
262 262 continue
263 263 if f in m1 or f in copied: # files already visited
264 264 continue
265 265 if f in copy:
266 266 f2 = copy[f]
267 267 if f2 not in m1: # directory rename
268 268 act("local renamed directory to " + f2, "d",
269 269 None, f, f2, m2.flags(f))
270 270 elif f2 in m2: # rename case 1, A/A,B/A
271 271 act("remote copied to " + f, "m",
272 272 f2, f, f, fmerge(f2, f, f2), False)
273 273 else: # case 3,20 A/B/A
274 274 act("remote moved to " + f, "m",
275 275 f2, f, f, fmerge(f2, f, f2), True)
276 276 elif f not in ma:
277 277 if (not overwrite
278 278 and _checkunknownfile(repo, p1, p2, f)):
279 279 rflags = fmerge(f, f, f)
280 280 act("remote differs from untracked local",
281 281 "m", f, f, f, rflags, False)
282 282 else:
283 283 act("remote created", "g", f, m2.flags(f))
284 284 elif n != ma[f]:
285 285 if repo.ui.promptchoice(
286 286 _("remote changed %s which local deleted\n"
287 287 "use (c)hanged version or leave (d)eleted?") % f,
288 288 (_("&Changed"), _("&Deleted")), 0) == 0:
289 289 act("prompt recreating", "g", f, m2.flags(f))
290 290
291 291 return action
292 292
293 293 def actionkey(a):
294 294 return a[1] == 'r' and -1 or 0, a
295 295
296 296 def applyupdates(repo, action, wctx, mctx, actx, overwrite):
297 297 """apply the merge action list to the working directory
298 298
299 299 wctx is the working copy context
300 300 mctx is the context to be merged into the working copy
301 301 actx is the context of the common ancestor
302 302
303 303 Return a tuple of counts (updated, merged, removed, unresolved) that
304 304 describes how many files were affected by the update.
305 305 """
306 306
307 307 updated, merged, removed, unresolved = 0, 0, 0, 0
308 308 ms = mergestate(repo)
309 309 ms.reset(wctx.p1().node())
310 310 moves = []
311 311 action.sort(key=actionkey)
312 312
313 313 # prescan for merges
314 314 for a in action:
315 315 f, m = a[:2]
316 316 if m == 'm': # merge
317 317 f2, fd, flags, move = a[2:]
318 318 if f == '.hgsubstate': # merged internally
319 319 continue
320 320 repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
321 321 fcl = wctx[f]
322 322 fco = mctx[f2]
323 323 if mctx == actx: # backwards, use working dir parent as ancestor
324 324 if fcl.parents():
325 325 fca = fcl.p1()
326 326 else:
327 327 fca = repo.filectx(f, fileid=nullrev)
328 328 else:
329 329 fca = fcl.ancestor(fco, actx)
330 330 if not fca:
331 331 fca = repo.filectx(f, fileid=nullrev)
332 332 ms.add(fcl, fco, fca, fd, flags)
333 333 if f != fd and move:
334 334 moves.append(f)
335 335
336 336 audit = scmutil.pathauditor(repo.root)
337 337
338 338 # remove renamed files after safely stored
339 339 for f in moves:
340 340 if os.path.lexists(repo.wjoin(f)):
341 341 repo.ui.debug("removing %s\n" % f)
342 342 audit(f)
343 343 os.unlink(repo.wjoin(f))
344 344
345 345 numupdates = len(action)
346 346 for i, a in enumerate(action):
347 347 f, m = a[:2]
348 348 repo.ui.progress(_('updating'), i + 1, item=f, total=numupdates,
349 349 unit=_('files'))
350 350 if f and f[0] == "/":
351 351 continue
352 352 if m == "r": # remove
353 353 repo.ui.note(_("removing %s\n") % f)
354 354 audit(f)
355 355 if f == '.hgsubstate': # subrepo states need updating
356 356 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
357 357 try:
358 358 util.unlinkpath(repo.wjoin(f))
359 359 except OSError, inst:
360 360 if inst.errno != errno.ENOENT:
361 361 repo.ui.warn(_("update failed to remove %s: %s!\n") %
362 362 (f, inst.strerror))
363 363 removed += 1
364 364 elif m == "m": # merge
365 365 if f == '.hgsubstate': # subrepo states need updating
366 366 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
367 367 overwrite)
368 368 continue
369 369 f2, fd, flags, move = a[2:]
370 370 repo.wopener.audit(fd)
371 371 r = ms.resolve(fd, wctx, mctx)
372 372 if r is not None and r > 0:
373 373 unresolved += 1
374 374 else:
375 375 if r is None:
376 376 updated += 1
377 377 else:
378 378 merged += 1
379 379 if (move and repo.dirstate.normalize(fd) != f
380 380 and os.path.lexists(repo.wjoin(f))):
381 381 repo.ui.debug("removing %s\n" % f)
382 382 audit(f)
383 383 os.unlink(repo.wjoin(f))
384 384 elif m == "g": # get
385 385 flags = a[2]
386 386 repo.ui.note(_("getting %s\n") % f)
387 387 t = mctx.filectx(f).data()
388 388 repo.wwrite(f, t, flags)
389 389 t = None
390 390 updated += 1
391 391 if f == '.hgsubstate': # subrepo states need updating
392 392 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
393 393 elif m == "d": # directory rename
394 394 f2, fd, flags = a[2:]
395 395 if f:
396 396 repo.ui.note(_("moving %s to %s\n") % (f, fd))
397 397 audit(f)
398 398 t = wctx.filectx(f).data()
399 399 repo.wwrite(fd, t, flags)
400 400 util.unlinkpath(repo.wjoin(f))
401 401 if f2:
402 402 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
403 403 t = mctx.filectx(f2).data()
404 404 repo.wwrite(fd, t, flags)
405 405 updated += 1
406 406 elif m == "dr": # divergent renames
407 407 fl = a[2]
408 408 repo.ui.warn(_("note: possible conflict - %s was renamed "
409 409 "multiple times to:\n") % f)
410 410 for nf in fl:
411 411 repo.ui.warn(" %s\n" % nf)
412 412 elif m == "e": # exec
413 413 flags = a[2]
414 414 repo.wopener.audit(f)
415 415 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
416 416 ms.commit()
417 417 repo.ui.progress(_('updating'), None, total=numupdates, unit=_('files'))
418 418
419 419 return updated, merged, removed, unresolved
420 420
421 421 def recordupdates(repo, action, branchmerge):
422 422 "record merge actions to the dirstate"
423 423
424 424 for a in action:
425 425 f, m = a[:2]
426 426 if m == "r": # remove
427 427 if branchmerge:
428 428 repo.dirstate.remove(f)
429 429 else:
430 430 repo.dirstate.drop(f)
431 431 elif m == "a": # re-add
432 432 if not branchmerge:
433 433 repo.dirstate.add(f)
434 434 elif m == "f": # forget
435 435 repo.dirstate.drop(f)
436 436 elif m == "e": # exec change
437 437 repo.dirstate.normallookup(f)
438 438 elif m == "g": # get
439 439 if branchmerge:
440 440 repo.dirstate.otherparent(f)
441 441 else:
442 442 repo.dirstate.normal(f)
443 443 elif m == "m": # merge
444 444 f2, fd, flag, move = a[2:]
445 445 if branchmerge:
446 446 # We've done a branch merge, mark this file as merged
447 447 # so that we properly record the merger later
448 448 repo.dirstate.merge(fd)
449 449 if f != f2: # copy/rename
450 450 if move:
451 451 repo.dirstate.remove(f)
452 452 if f != fd:
453 453 repo.dirstate.copy(f, fd)
454 454 else:
455 455 repo.dirstate.copy(f2, fd)
456 456 else:
457 457 # We've update-merged a locally modified file, so
458 458 # we set the dirstate to emulate a normal checkout
459 459 # of that file some time in the past. Thus our
460 460 # merge will appear as a normal local file
461 461 # modification.
462 462 if f2 == fd: # file not locally copied/moved
463 463 repo.dirstate.normallookup(fd)
464 464 if move:
465 465 repo.dirstate.drop(f)
466 466 elif m == "d": # directory rename
467 467 f2, fd, flag = a[2:]
468 468 if not f2 and f not in repo.dirstate:
469 469 # untracked file moved
470 470 continue
471 471 if branchmerge:
472 472 repo.dirstate.add(fd)
473 473 if f:
474 474 repo.dirstate.remove(f)
475 475 repo.dirstate.copy(f, fd)
476 476 if f2:
477 477 repo.dirstate.copy(f2, fd)
478 478 else:
479 479 repo.dirstate.normal(fd)
480 480 if f:
481 481 repo.dirstate.drop(f)
482 482
483 483 def update(repo, node, branchmerge, force, partial, ancestor=None,
484 484 mergeancestor=False):
485 485 """
486 486 Perform a merge between the working directory and the given node
487 487
488 488 node = the node to update to, or None if unspecified
489 489 branchmerge = whether to merge between branches
490 490 force = whether to force branch merging or file overwriting
491 491 partial = a function to filter file lists (dirstate not updated)
492 492 mergeancestor = if false, merging with an ancestor (fast-forward)
493 493 is only allowed between different named branches. This flag
494 494 is used by rebase extension as a temporary fix and should be
495 495 avoided in general.
496 496
497 497 The table below shows all the behaviors of the update command
498 498 given the -c and -C or no options, whether the working directory
499 499 is dirty, whether a revision is specified, and the relationship of
500 500 the parent rev to the target rev (linear, on the same named
501 501 branch, or on another named branch).
502 502
503 503 This logic is tested by test-update-branches.t.
504 504
505 505 -c -C dirty rev | linear same cross
506 506 n n n n | ok (1) x
507 507 n n n y | ok ok ok
508 508 n n y * | merge (2) (2)
509 509 n y * * | --- discard ---
510 510 y n y * | --- (3) ---
511 511 y n n * | --- ok ---
512 512 y y * * | --- (4) ---
513 513
514 514 x = can't happen
515 515 * = don't-care
516 516 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
517 517 2 = abort: crosses branches (use 'hg merge' to merge or
518 518 use 'hg update -C' to discard changes)
519 519 3 = abort: uncommitted local changes
520 520 4 = incompatible options (checked in commands.py)
521 521
522 522 Return the same tuple as applyupdates().
523 523 """
524 524
525 525 onode = node
526 526 wlock = repo.wlock()
527 527 try:
528 528 wc = repo[None]
529 529 if node is None:
530 530 # tip of current branch
531 531 try:
532 node = repo.branchtags()[wc.branch()]
533 except KeyError:
532 node = repo.branchtip(wc.branch())
533 except error.RepoLookupError:
534 534 if wc.branch() == "default": # no default branch!
535 535 node = repo.lookup("tip") # update to tip
536 536 else:
537 537 raise util.Abort(_("branch %s not found") % wc.branch())
538 538 overwrite = force and not branchmerge
539 539 pl = wc.parents()
540 540 p1, p2 = pl[0], repo[node]
541 541 if ancestor:
542 542 pa = repo[ancestor]
543 543 else:
544 544 pa = p1.ancestor(p2)
545 545
546 546 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
547 547
548 548 ### check phase
549 549 if not overwrite and len(pl) > 1:
550 550 raise util.Abort(_("outstanding uncommitted merges"))
551 551 if branchmerge:
552 552 if pa == p2:
553 553 raise util.Abort(_("merging with a working directory ancestor"
554 554 " has no effect"))
555 555 elif pa == p1:
556 556 if not mergeancestor and p1.branch() == p2.branch():
557 557 raise util.Abort(_("nothing to merge"),
558 558 hint=_("use 'hg update' "
559 559 "or check 'hg heads'"))
560 560 if not force and (wc.files() or wc.deleted()):
561 561 raise util.Abort(_("outstanding uncommitted changes"),
562 562 hint=_("use 'hg status' to list changes"))
563 563 for s in wc.substate:
564 564 if wc.sub(s).dirty():
565 565 raise util.Abort(_("outstanding uncommitted changes in "
566 566 "subrepository '%s'") % s)
567 567
568 568 elif not overwrite:
569 569 if pa == p1 or pa == p2: # linear
570 570 pass # all good
571 571 elif wc.dirty(missing=True):
572 572 raise util.Abort(_("crosses branches (merge branches or use"
573 573 " --clean to discard changes)"))
574 574 elif onode is None:
575 575 raise util.Abort(_("crosses branches (merge branches or update"
576 576 " --check to force update)"))
577 577 else:
578 578 # Allow jumping branches if clean and specific rev given
579 579 pa = p1
580 580
581 581 ### calculate phase
582 582 action = []
583 583 folding = not util.checkcase(repo.path)
584 584 if folding:
585 585 # collision check is not needed for clean update
586 586 if (not branchmerge and
587 587 (force or not wc.dirty(missing=True, branch=False))):
588 588 _checkcollision(p2, None)
589 589 else:
590 590 _checkcollision(p2, wc)
591 591 if not force:
592 592 _checkunknown(repo, wc, p2)
593 593 action += _forgetremoved(wc, p2, branchmerge)
594 594 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
595 595
596 596 ### apply phase
597 597 if not branchmerge: # just jump to the new rev
598 598 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
599 599 if not partial:
600 600 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
601 601
602 602 stats = applyupdates(repo, action, wc, p2, pa, overwrite)
603 603
604 604 if not partial:
605 605 repo.setparents(fp1, fp2)
606 606 recordupdates(repo, action, branchmerge)
607 607 if not branchmerge:
608 608 repo.dirstate.setbranch(p2.branch())
609 609 finally:
610 610 wlock.release()
611 611
612 612 if not partial:
613 613 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
614 614 return stats
General Comments 0
You need to be logged in to leave comments. Login now