Show More
@@ -1,4093 +1,4098 b'' | |||||
1 | # commands.py - command processing for mercurial |
|
1 | # commands.py - command processing for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from node import hex, nullid, nullrev, short |
|
8 | from node import hex, nullid, nullrev, short | |
9 | from lock import release |
|
9 | from lock import release | |
10 | from i18n import _, gettext |
|
10 | from i18n import _, gettext | |
11 | import os, re, sys, difflib, time, tempfile |
|
11 | import os, re, sys, difflib, time, tempfile | |
12 | import hg, util, revlog, bundlerepo, extensions, copies, error |
|
12 | import hg, util, revlog, bundlerepo, extensions, copies, error | |
13 | import patch, help, mdiff, url, encoding, templatekw |
|
13 | import patch, help, mdiff, url, encoding, templatekw | |
14 | import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server |
|
14 | import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server | |
15 | import merge as mergemod |
|
15 | import merge as mergemod | |
16 | import minirst, revset |
|
16 | import minirst, revset | |
17 |
|
17 | |||
18 | # Commands start here, listed alphabetically |
|
18 | # Commands start here, listed alphabetically | |
19 |
|
19 | |||
20 | def add(ui, repo, *pats, **opts): |
|
20 | def add(ui, repo, *pats, **opts): | |
21 | """add the specified files on the next commit |
|
21 | """add the specified files on the next commit | |
22 |
|
22 | |||
23 | Schedule files to be version controlled and added to the |
|
23 | Schedule files to be version controlled and added to the | |
24 | repository. |
|
24 | repository. | |
25 |
|
25 | |||
26 | The files will be added to the repository at the next commit. To |
|
26 | The files will be added to the repository at the next commit. To | |
27 | undo an add before that, see :hg:`forget`. |
|
27 | undo an add before that, see :hg:`forget`. | |
28 |
|
28 | |||
29 | If no names are given, add all files to the repository. |
|
29 | If no names are given, add all files to the repository. | |
30 |
|
30 | |||
31 | .. container:: verbose |
|
31 | .. container:: verbose | |
32 |
|
32 | |||
33 | An example showing how new (unknown) files are added |
|
33 | An example showing how new (unknown) files are added | |
34 | automatically by :hg:`add`:: |
|
34 | automatically by :hg:`add`:: | |
35 |
|
35 | |||
36 | $ ls |
|
36 | $ ls | |
37 | foo.c |
|
37 | foo.c | |
38 | $ hg status |
|
38 | $ hg status | |
39 | ? foo.c |
|
39 | ? foo.c | |
40 | $ hg add |
|
40 | $ hg add | |
41 | adding foo.c |
|
41 | adding foo.c | |
42 | $ hg status |
|
42 | $ hg status | |
43 | A foo.c |
|
43 | A foo.c | |
44 | """ |
|
44 | """ | |
45 |
|
45 | |||
46 | bad = [] |
|
46 | bad = [] | |
47 | names = [] |
|
47 | names = [] | |
48 | m = cmdutil.match(repo, pats, opts) |
|
48 | m = cmdutil.match(repo, pats, opts) | |
49 | oldbad = m.bad |
|
49 | oldbad = m.bad | |
50 | m.bad = lambda x, y: bad.append(x) or oldbad(x, y) |
|
50 | m.bad = lambda x, y: bad.append(x) or oldbad(x, y) | |
51 |
|
51 | |||
52 | for f in repo.walk(m): |
|
52 | for f in repo.walk(m): | |
53 | exact = m.exact(f) |
|
53 | exact = m.exact(f) | |
54 | if exact or f not in repo.dirstate: |
|
54 | if exact or f not in repo.dirstate: | |
55 | names.append(f) |
|
55 | names.append(f) | |
56 | if ui.verbose or not exact: |
|
56 | if ui.verbose or not exact: | |
57 | ui.status(_('adding %s\n') % m.rel(f)) |
|
57 | ui.status(_('adding %s\n') % m.rel(f)) | |
58 | if not opts.get('dry_run'): |
|
58 | if not opts.get('dry_run'): | |
59 | bad += [f for f in repo.add(names) if f in m.files()] |
|
59 | bad += [f for f in repo.add(names) if f in m.files()] | |
60 | return bad and 1 or 0 |
|
60 | return bad and 1 or 0 | |
61 |
|
61 | |||
62 | def addremove(ui, repo, *pats, **opts): |
|
62 | def addremove(ui, repo, *pats, **opts): | |
63 | """add all new files, delete all missing files |
|
63 | """add all new files, delete all missing files | |
64 |
|
64 | |||
65 | Add all new files and remove all missing files from the |
|
65 | Add all new files and remove all missing files from the | |
66 | repository. |
|
66 | repository. | |
67 |
|
67 | |||
68 | New files are ignored if they match any of the patterns in |
|
68 | New files are ignored if they match any of the patterns in | |
69 | .hgignore. As with add, these changes take effect at the next |
|
69 | .hgignore. As with add, these changes take effect at the next | |
70 | commit. |
|
70 | commit. | |
71 |
|
71 | |||
72 | Use the -s/--similarity option to detect renamed files. With a |
|
72 | Use the -s/--similarity option to detect renamed files. With a | |
73 | parameter greater than 0, this compares every removed file with |
|
73 | parameter greater than 0, this compares every removed file with | |
74 | every added file and records those similar enough as renames. This |
|
74 | every added file and records those similar enough as renames. This | |
75 | option takes a percentage between 0 (disabled) and 100 (files must |
|
75 | option takes a percentage between 0 (disabled) and 100 (files must | |
76 | be identical) as its parameter. Detecting renamed files this way |
|
76 | be identical) as its parameter. Detecting renamed files this way | |
77 | can be expensive. |
|
77 | can be expensive. | |
78 |
|
78 | |||
79 | Returns 0 if all files are successfully added. |
|
79 | Returns 0 if all files are successfully added. | |
80 | """ |
|
80 | """ | |
81 | try: |
|
81 | try: | |
82 | sim = float(opts.get('similarity') or 0) |
|
82 | sim = float(opts.get('similarity') or 0) | |
83 | except ValueError: |
|
83 | except ValueError: | |
84 | raise util.Abort(_('similarity must be a number')) |
|
84 | raise util.Abort(_('similarity must be a number')) | |
85 | if sim < 0 or sim > 100: |
|
85 | if sim < 0 or sim > 100: | |
86 | raise util.Abort(_('similarity must be between 0 and 100')) |
|
86 | raise util.Abort(_('similarity must be between 0 and 100')) | |
87 | return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0) |
|
87 | return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0) | |
88 |
|
88 | |||
89 | def annotate(ui, repo, *pats, **opts): |
|
89 | def annotate(ui, repo, *pats, **opts): | |
90 | """show changeset information by line for each file |
|
90 | """show changeset information by line for each file | |
91 |
|
91 | |||
92 | List changes in files, showing the revision id responsible for |
|
92 | List changes in files, showing the revision id responsible for | |
93 | each line |
|
93 | each line | |
94 |
|
94 | |||
95 | This command is useful for discovering when a change was made and |
|
95 | This command is useful for discovering when a change was made and | |
96 | by whom. |
|
96 | by whom. | |
97 |
|
97 | |||
98 | Without the -a/--text option, annotate will avoid processing files |
|
98 | Without the -a/--text option, annotate will avoid processing files | |
99 | it detects as binary. With -a, annotate will annotate the file |
|
99 | it detects as binary. With -a, annotate will annotate the file | |
100 | anyway, although the results will probably be neither useful |
|
100 | anyway, although the results will probably be neither useful | |
101 | nor desirable. |
|
101 | nor desirable. | |
102 |
|
102 | |||
103 | Returns 0 on success. |
|
103 | Returns 0 on success. | |
104 | """ |
|
104 | """ | |
105 | if opts.get('follow'): |
|
105 | if opts.get('follow'): | |
106 | # --follow is deprecated and now just an alias for -f/--file |
|
106 | # --follow is deprecated and now just an alias for -f/--file | |
107 | # to mimic the behavior of Mercurial before version 1.5 |
|
107 | # to mimic the behavior of Mercurial before version 1.5 | |
108 | opts['file'] = 1 |
|
108 | opts['file'] = 1 | |
109 |
|
109 | |||
110 | datefunc = ui.quiet and util.shortdate or util.datestr |
|
110 | datefunc = ui.quiet and util.shortdate or util.datestr | |
111 | getdate = util.cachefunc(lambda x: datefunc(x[0].date())) |
|
111 | getdate = util.cachefunc(lambda x: datefunc(x[0].date())) | |
112 |
|
112 | |||
113 | if not pats: |
|
113 | if not pats: | |
114 | raise util.Abort(_('at least one filename or pattern is required')) |
|
114 | raise util.Abort(_('at least one filename or pattern is required')) | |
115 |
|
115 | |||
116 | opmap = [('user', lambda x: ui.shortuser(x[0].user())), |
|
116 | opmap = [('user', lambda x: ui.shortuser(x[0].user())), | |
117 | ('number', lambda x: str(x[0].rev())), |
|
117 | ('number', lambda x: str(x[0].rev())), | |
118 | ('changeset', lambda x: short(x[0].node())), |
|
118 | ('changeset', lambda x: short(x[0].node())), | |
119 | ('date', getdate), |
|
119 | ('date', getdate), | |
120 | ('file', lambda x: x[0].path()), |
|
120 | ('file', lambda x: x[0].path()), | |
121 | ] |
|
121 | ] | |
122 |
|
122 | |||
123 | if (not opts.get('user') and not opts.get('changeset') |
|
123 | if (not opts.get('user') and not opts.get('changeset') | |
124 | and not opts.get('date') and not opts.get('file')): |
|
124 | and not opts.get('date') and not opts.get('file')): | |
125 | opts['number'] = 1 |
|
125 | opts['number'] = 1 | |
126 |
|
126 | |||
127 | linenumber = opts.get('line_number') is not None |
|
127 | linenumber = opts.get('line_number') is not None | |
128 | if linenumber and (not opts.get('changeset')) and (not opts.get('number')): |
|
128 | if linenumber and (not opts.get('changeset')) and (not opts.get('number')): | |
129 | raise util.Abort(_('at least one of -n/-c is required for -l')) |
|
129 | raise util.Abort(_('at least one of -n/-c is required for -l')) | |
130 |
|
130 | |||
131 | funcmap = [func for op, func in opmap if opts.get(op)] |
|
131 | funcmap = [func for op, func in opmap if opts.get(op)] | |
132 | if linenumber: |
|
132 | if linenumber: | |
133 | lastfunc = funcmap[-1] |
|
133 | lastfunc = funcmap[-1] | |
134 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) |
|
134 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) | |
135 |
|
135 | |||
136 | ctx = repo[opts.get('rev')] |
|
136 | ctx = repo[opts.get('rev')] | |
137 | m = cmdutil.match(repo, pats, opts) |
|
137 | m = cmdutil.match(repo, pats, opts) | |
138 | follow = not opts.get('no_follow') |
|
138 | follow = not opts.get('no_follow') | |
139 | for abs in ctx.walk(m): |
|
139 | for abs in ctx.walk(m): | |
140 | fctx = ctx[abs] |
|
140 | fctx = ctx[abs] | |
141 | if not opts.get('text') and util.binary(fctx.data()): |
|
141 | if not opts.get('text') and util.binary(fctx.data()): | |
142 | ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) |
|
142 | ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) | |
143 | continue |
|
143 | continue | |
144 |
|
144 | |||
145 | lines = fctx.annotate(follow=follow, linenumber=linenumber) |
|
145 | lines = fctx.annotate(follow=follow, linenumber=linenumber) | |
146 | pieces = [] |
|
146 | pieces = [] | |
147 |
|
147 | |||
148 | for f in funcmap: |
|
148 | for f in funcmap: | |
149 | l = [f(n) for n, dummy in lines] |
|
149 | l = [f(n) for n, dummy in lines] | |
150 | if l: |
|
150 | if l: | |
151 | ml = max(map(len, l)) |
|
151 | ml = max(map(len, l)) | |
152 | pieces.append(["%*s" % (ml, x) for x in l]) |
|
152 | pieces.append(["%*s" % (ml, x) for x in l]) | |
153 |
|
153 | |||
154 | if pieces: |
|
154 | if pieces: | |
155 | for p, l in zip(zip(*pieces), lines): |
|
155 | for p, l in zip(zip(*pieces), lines): | |
156 | ui.write("%s: %s" % (" ".join(p), l[1])) |
|
156 | ui.write("%s: %s" % (" ".join(p), l[1])) | |
157 |
|
157 | |||
158 | def archive(ui, repo, dest, **opts): |
|
158 | def archive(ui, repo, dest, **opts): | |
159 | '''create an unversioned archive of a repository revision |
|
159 | '''create an unversioned archive of a repository revision | |
160 |
|
160 | |||
161 | By default, the revision used is the parent of the working |
|
161 | By default, the revision used is the parent of the working | |
162 | directory; use -r/--rev to specify a different revision. |
|
162 | directory; use -r/--rev to specify a different revision. | |
163 |
|
163 | |||
164 | The archive type is automatically detected based on file |
|
164 | The archive type is automatically detected based on file | |
165 | extension (or override using -t/--type). |
|
165 | extension (or override using -t/--type). | |
166 |
|
166 | |||
167 | Valid types are: |
|
167 | Valid types are: | |
168 |
|
168 | |||
169 | :``files``: a directory full of files (default) |
|
169 | :``files``: a directory full of files (default) | |
170 | :``tar``: tar archive, uncompressed |
|
170 | :``tar``: tar archive, uncompressed | |
171 | :``tbz2``: tar archive, compressed using bzip2 |
|
171 | :``tbz2``: tar archive, compressed using bzip2 | |
172 | :``tgz``: tar archive, compressed using gzip |
|
172 | :``tgz``: tar archive, compressed using gzip | |
173 | :``uzip``: zip archive, uncompressed |
|
173 | :``uzip``: zip archive, uncompressed | |
174 | :``zip``: zip archive, compressed using deflate |
|
174 | :``zip``: zip archive, compressed using deflate | |
175 |
|
175 | |||
176 | The exact name of the destination archive or directory is given |
|
176 | The exact name of the destination archive or directory is given | |
177 | using a format string; see :hg:`help export` for details. |
|
177 | using a format string; see :hg:`help export` for details. | |
178 |
|
178 | |||
179 | Each member added to an archive file has a directory prefix |
|
179 | Each member added to an archive file has a directory prefix | |
180 | prepended. Use -p/--prefix to specify a format string for the |
|
180 | prepended. Use -p/--prefix to specify a format string for the | |
181 | prefix. The default is the basename of the archive, with suffixes |
|
181 | prefix. The default is the basename of the archive, with suffixes | |
182 | removed. |
|
182 | removed. | |
183 |
|
183 | |||
184 | Returns 0 on success. |
|
184 | Returns 0 on success. | |
185 | ''' |
|
185 | ''' | |
186 |
|
186 | |||
187 | ctx = repo[opts.get('rev')] |
|
187 | ctx = repo[opts.get('rev')] | |
188 | if not ctx: |
|
188 | if not ctx: | |
189 | raise util.Abort(_('no working directory: please specify a revision')) |
|
189 | raise util.Abort(_('no working directory: please specify a revision')) | |
190 | node = ctx.node() |
|
190 | node = ctx.node() | |
191 | dest = cmdutil.make_filename(repo, dest, node) |
|
191 | dest = cmdutil.make_filename(repo, dest, node) | |
192 | if os.path.realpath(dest) == repo.root: |
|
192 | if os.path.realpath(dest) == repo.root: | |
193 | raise util.Abort(_('repository root cannot be destination')) |
|
193 | raise util.Abort(_('repository root cannot be destination')) | |
194 |
|
194 | |||
195 | def guess_type(): |
|
195 | def guess_type(): | |
196 | exttypes = { |
|
196 | exttypes = { | |
197 | 'tar': ['.tar'], |
|
197 | 'tar': ['.tar'], | |
198 | 'tbz2': ['.tbz2', '.tar.bz2'], |
|
198 | 'tbz2': ['.tbz2', '.tar.bz2'], | |
199 | 'tgz': ['.tgz', '.tar.gz'], |
|
199 | 'tgz': ['.tgz', '.tar.gz'], | |
200 | 'zip': ['.zip'], |
|
200 | 'zip': ['.zip'], | |
201 | } |
|
201 | } | |
202 |
|
202 | |||
203 | for type, extensions in exttypes.items(): |
|
203 | for type, extensions in exttypes.items(): | |
204 | if util.any(dest.endswith(ext) for ext in extensions): |
|
204 | if util.any(dest.endswith(ext) for ext in extensions): | |
205 | return type |
|
205 | return type | |
206 | return None |
|
206 | return None | |
207 |
|
207 | |||
208 | kind = opts.get('type') or guess_type() or 'files' |
|
208 | kind = opts.get('type') or guess_type() or 'files' | |
209 | prefix = opts.get('prefix') |
|
209 | prefix = opts.get('prefix') | |
210 |
|
210 | |||
211 | if dest == '-': |
|
211 | if dest == '-': | |
212 | if kind == 'files': |
|
212 | if kind == 'files': | |
213 | raise util.Abort(_('cannot archive plain files to stdout')) |
|
213 | raise util.Abort(_('cannot archive plain files to stdout')) | |
214 | dest = sys.stdout |
|
214 | dest = sys.stdout | |
215 | if not prefix: |
|
215 | if not prefix: | |
216 | prefix = os.path.basename(repo.root) + '-%h' |
|
216 | prefix = os.path.basename(repo.root) + '-%h' | |
217 |
|
217 | |||
218 | prefix = cmdutil.make_filename(repo, prefix, node) |
|
218 | prefix = cmdutil.make_filename(repo, prefix, node) | |
219 | matchfn = cmdutil.match(repo, [], opts) |
|
219 | matchfn = cmdutil.match(repo, [], opts) | |
220 | archival.archive(repo, dest, node, kind, not opts.get('no_decode'), |
|
220 | archival.archive(repo, dest, node, kind, not opts.get('no_decode'), | |
221 | matchfn, prefix) |
|
221 | matchfn, prefix) | |
222 |
|
222 | |||
223 | def backout(ui, repo, node=None, rev=None, **opts): |
|
223 | def backout(ui, repo, node=None, rev=None, **opts): | |
224 | '''reverse effect of earlier changeset |
|
224 | '''reverse effect of earlier changeset | |
225 |
|
225 | |||
226 | Commit the backed out changes as a new changeset. The new |
|
226 | Commit the backed out changes as a new changeset. The new | |
227 | changeset is a child of the backed out changeset. |
|
227 | changeset is a child of the backed out changeset. | |
228 |
|
228 | |||
229 | If you backout a changeset other than the tip, a new head is |
|
229 | If you backout a changeset other than the tip, a new head is | |
230 | created. This head will be the new tip and you should merge this |
|
230 | created. This head will be the new tip and you should merge this | |
231 | backout changeset with another head. |
|
231 | backout changeset with another head. | |
232 |
|
232 | |||
233 | The --merge option remembers the parent of the working directory |
|
233 | The --merge option remembers the parent of the working directory | |
234 | before starting the backout, then merges the new head with that |
|
234 | before starting the backout, then merges the new head with that | |
235 | changeset afterwards. This saves you from doing the merge by hand. |
|
235 | changeset afterwards. This saves you from doing the merge by hand. | |
236 | The result of this merge is not committed, as with a normal merge. |
|
236 | The result of this merge is not committed, as with a normal merge. | |
237 |
|
237 | |||
238 | See :hg:`help dates` for a list of formats valid for -d/--date. |
|
238 | See :hg:`help dates` for a list of formats valid for -d/--date. | |
239 |
|
239 | |||
240 | Returns 0 on success. |
|
240 | Returns 0 on success. | |
241 | ''' |
|
241 | ''' | |
242 | if rev and node: |
|
242 | if rev and node: | |
243 | raise util.Abort(_("please specify just one revision")) |
|
243 | raise util.Abort(_("please specify just one revision")) | |
244 |
|
244 | |||
245 | if not rev: |
|
245 | if not rev: | |
246 | rev = node |
|
246 | rev = node | |
247 |
|
247 | |||
248 | if not rev: |
|
248 | if not rev: | |
249 | raise util.Abort(_("please specify a revision to backout")) |
|
249 | raise util.Abort(_("please specify a revision to backout")) | |
250 |
|
250 | |||
251 | date = opts.get('date') |
|
251 | date = opts.get('date') | |
252 | if date: |
|
252 | if date: | |
253 | opts['date'] = util.parsedate(date) |
|
253 | opts['date'] = util.parsedate(date) | |
254 |
|
254 | |||
255 | cmdutil.bail_if_changed(repo) |
|
255 | cmdutil.bail_if_changed(repo) | |
256 | node = repo.lookup(rev) |
|
256 | node = repo.lookup(rev) | |
257 |
|
257 | |||
258 | op1, op2 = repo.dirstate.parents() |
|
258 | op1, op2 = repo.dirstate.parents() | |
259 | a = repo.changelog.ancestor(op1, node) |
|
259 | a = repo.changelog.ancestor(op1, node) | |
260 | if a != node: |
|
260 | if a != node: | |
261 | raise util.Abort(_('cannot backout change on a different branch')) |
|
261 | raise util.Abort(_('cannot backout change on a different branch')) | |
262 |
|
262 | |||
263 | p1, p2 = repo.changelog.parents(node) |
|
263 | p1, p2 = repo.changelog.parents(node) | |
264 | if p1 == nullid: |
|
264 | if p1 == nullid: | |
265 | raise util.Abort(_('cannot backout a change with no parents')) |
|
265 | raise util.Abort(_('cannot backout a change with no parents')) | |
266 | if p2 != nullid: |
|
266 | if p2 != nullid: | |
267 | if not opts.get('parent'): |
|
267 | if not opts.get('parent'): | |
268 | raise util.Abort(_('cannot backout a merge changeset without ' |
|
268 | raise util.Abort(_('cannot backout a merge changeset without ' | |
269 | '--parent')) |
|
269 | '--parent')) | |
270 | p = repo.lookup(opts['parent']) |
|
270 | p = repo.lookup(opts['parent']) | |
271 | if p not in (p1, p2): |
|
271 | if p not in (p1, p2): | |
272 | raise util.Abort(_('%s is not a parent of %s') % |
|
272 | raise util.Abort(_('%s is not a parent of %s') % | |
273 | (short(p), short(node))) |
|
273 | (short(p), short(node))) | |
274 | parent = p |
|
274 | parent = p | |
275 | else: |
|
275 | else: | |
276 | if opts.get('parent'): |
|
276 | if opts.get('parent'): | |
277 | raise util.Abort(_('cannot use --parent on non-merge changeset')) |
|
277 | raise util.Abort(_('cannot use --parent on non-merge changeset')) | |
278 | parent = p1 |
|
278 | parent = p1 | |
279 |
|
279 | |||
280 | # the backout should appear on the same branch |
|
280 | # the backout should appear on the same branch | |
281 | branch = repo.dirstate.branch() |
|
281 | branch = repo.dirstate.branch() | |
282 | hg.clean(repo, node, show_stats=False) |
|
282 | hg.clean(repo, node, show_stats=False) | |
283 | repo.dirstate.setbranch(branch) |
|
283 | repo.dirstate.setbranch(branch) | |
284 | revert_opts = opts.copy() |
|
284 | revert_opts = opts.copy() | |
285 | revert_opts['date'] = None |
|
285 | revert_opts['date'] = None | |
286 | revert_opts['all'] = True |
|
286 | revert_opts['all'] = True | |
287 | revert_opts['rev'] = hex(parent) |
|
287 | revert_opts['rev'] = hex(parent) | |
288 | revert_opts['no_backup'] = None |
|
288 | revert_opts['no_backup'] = None | |
289 | revert(ui, repo, **revert_opts) |
|
289 | revert(ui, repo, **revert_opts) | |
290 | commit_opts = opts.copy() |
|
290 | commit_opts = opts.copy() | |
291 | commit_opts['addremove'] = False |
|
291 | commit_opts['addremove'] = False | |
292 | if not commit_opts['message'] and not commit_opts['logfile']: |
|
292 | if not commit_opts['message'] and not commit_opts['logfile']: | |
293 | # we don't translate commit messages |
|
293 | # we don't translate commit messages | |
294 | commit_opts['message'] = "Backed out changeset %s" % short(node) |
|
294 | commit_opts['message'] = "Backed out changeset %s" % short(node) | |
295 | commit_opts['force_editor'] = True |
|
295 | commit_opts['force_editor'] = True | |
296 | commit(ui, repo, **commit_opts) |
|
296 | commit(ui, repo, **commit_opts) | |
297 | def nice(node): |
|
297 | def nice(node): | |
298 | return '%d:%s' % (repo.changelog.rev(node), short(node)) |
|
298 | return '%d:%s' % (repo.changelog.rev(node), short(node)) | |
299 | ui.status(_('changeset %s backs out changeset %s\n') % |
|
299 | ui.status(_('changeset %s backs out changeset %s\n') % | |
300 | (nice(repo.changelog.tip()), nice(node))) |
|
300 | (nice(repo.changelog.tip()), nice(node))) | |
301 | if op1 != node: |
|
301 | if op1 != node: | |
302 | hg.clean(repo, op1, show_stats=False) |
|
302 | hg.clean(repo, op1, show_stats=False) | |
303 | if opts.get('merge'): |
|
303 | if opts.get('merge'): | |
304 | ui.status(_('merging with changeset %s\n') |
|
304 | ui.status(_('merging with changeset %s\n') | |
305 | % nice(repo.changelog.tip())) |
|
305 | % nice(repo.changelog.tip())) | |
306 | hg.merge(repo, hex(repo.changelog.tip())) |
|
306 | hg.merge(repo, hex(repo.changelog.tip())) | |
307 | else: |
|
307 | else: | |
308 | ui.status(_('the backout changeset is a new head - ' |
|
308 | ui.status(_('the backout changeset is a new head - ' | |
309 | 'do not forget to merge\n')) |
|
309 | 'do not forget to merge\n')) | |
310 | ui.status(_('(use "backout --merge" ' |
|
310 | ui.status(_('(use "backout --merge" ' | |
311 | 'if you want to auto-merge)\n')) |
|
311 | 'if you want to auto-merge)\n')) | |
312 |
|
312 | |||
313 | def bisect(ui, repo, rev=None, extra=None, command=None, |
|
313 | def bisect(ui, repo, rev=None, extra=None, command=None, | |
314 | reset=None, good=None, bad=None, skip=None, noupdate=None): |
|
314 | reset=None, good=None, bad=None, skip=None, noupdate=None): | |
315 | """subdivision search of changesets |
|
315 | """subdivision search of changesets | |
316 |
|
316 | |||
317 | This command helps to find changesets which introduce problems. To |
|
317 | This command helps to find changesets which introduce problems. To | |
318 | use, mark the earliest changeset you know exhibits the problem as |
|
318 | use, mark the earliest changeset you know exhibits the problem as | |
319 | bad, then mark the latest changeset which is free from the problem |
|
319 | bad, then mark the latest changeset which is free from the problem | |
320 | as good. Bisect will update your working directory to a revision |
|
320 | as good. Bisect will update your working directory to a revision | |
321 | for testing (unless the -U/--noupdate option is specified). Once |
|
321 | for testing (unless the -U/--noupdate option is specified). Once | |
322 | you have performed tests, mark the working directory as good or |
|
322 | you have performed tests, mark the working directory as good or | |
323 | bad, and bisect will either update to another candidate changeset |
|
323 | bad, and bisect will either update to another candidate changeset | |
324 | or announce that it has found the bad revision. |
|
324 | or announce that it has found the bad revision. | |
325 |
|
325 | |||
326 | As a shortcut, you can also use the revision argument to mark a |
|
326 | As a shortcut, you can also use the revision argument to mark a | |
327 | revision as good or bad without checking it out first. |
|
327 | revision as good or bad without checking it out first. | |
328 |
|
328 | |||
329 | If you supply a command, it will be used for automatic bisection. |
|
329 | If you supply a command, it will be used for automatic bisection. | |
330 | Its exit status will be used to mark revisions as good or bad: |
|
330 | Its exit status will be used to mark revisions as good or bad: | |
331 | status 0 means good, 125 means to skip the revision, 127 |
|
331 | status 0 means good, 125 means to skip the revision, 127 | |
332 | (command not found) will abort the bisection, and any other |
|
332 | (command not found) will abort the bisection, and any other | |
333 | non-zero exit status means the revision is bad. |
|
333 | non-zero exit status means the revision is bad. | |
334 |
|
334 | |||
335 | Returns 0 on success. |
|
335 | Returns 0 on success. | |
336 | """ |
|
336 | """ | |
337 | def print_result(nodes, good): |
|
337 | def print_result(nodes, good): | |
338 | displayer = cmdutil.show_changeset(ui, repo, {}) |
|
338 | displayer = cmdutil.show_changeset(ui, repo, {}) | |
339 | if len(nodes) == 1: |
|
339 | if len(nodes) == 1: | |
340 | # narrowed it down to a single revision |
|
340 | # narrowed it down to a single revision | |
341 | if good: |
|
341 | if good: | |
342 | ui.write(_("The first good revision is:\n")) |
|
342 | ui.write(_("The first good revision is:\n")) | |
343 | else: |
|
343 | else: | |
344 | ui.write(_("The first bad revision is:\n")) |
|
344 | ui.write(_("The first bad revision is:\n")) | |
345 | displayer.show(repo[nodes[0]]) |
|
345 | displayer.show(repo[nodes[0]]) | |
346 | else: |
|
346 | else: | |
347 | # multiple possible revisions |
|
347 | # multiple possible revisions | |
348 | if good: |
|
348 | if good: | |
349 | ui.write(_("Due to skipped revisions, the first " |
|
349 | ui.write(_("Due to skipped revisions, the first " | |
350 | "good revision could be any of:\n")) |
|
350 | "good revision could be any of:\n")) | |
351 | else: |
|
351 | else: | |
352 | ui.write(_("Due to skipped revisions, the first " |
|
352 | ui.write(_("Due to skipped revisions, the first " | |
353 | "bad revision could be any of:\n")) |
|
353 | "bad revision could be any of:\n")) | |
354 | for n in nodes: |
|
354 | for n in nodes: | |
355 | displayer.show(repo[n]) |
|
355 | displayer.show(repo[n]) | |
356 | displayer.close() |
|
356 | displayer.close() | |
357 |
|
357 | |||
358 | def check_state(state, interactive=True): |
|
358 | def check_state(state, interactive=True): | |
359 | if not state['good'] or not state['bad']: |
|
359 | if not state['good'] or not state['bad']: | |
360 | if (good or bad or skip or reset) and interactive: |
|
360 | if (good or bad or skip or reset) and interactive: | |
361 | return |
|
361 | return | |
362 | if not state['good']: |
|
362 | if not state['good']: | |
363 | raise util.Abort(_('cannot bisect (no known good revisions)')) |
|
363 | raise util.Abort(_('cannot bisect (no known good revisions)')) | |
364 | else: |
|
364 | else: | |
365 | raise util.Abort(_('cannot bisect (no known bad revisions)')) |
|
365 | raise util.Abort(_('cannot bisect (no known bad revisions)')) | |
366 | return True |
|
366 | return True | |
367 |
|
367 | |||
368 | # backward compatibility |
|
368 | # backward compatibility | |
369 | if rev in "good bad reset init".split(): |
|
369 | if rev in "good bad reset init".split(): | |
370 | ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n")) |
|
370 | ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n")) | |
371 | cmd, rev, extra = rev, extra, None |
|
371 | cmd, rev, extra = rev, extra, None | |
372 | if cmd == "good": |
|
372 | if cmd == "good": | |
373 | good = True |
|
373 | good = True | |
374 | elif cmd == "bad": |
|
374 | elif cmd == "bad": | |
375 | bad = True |
|
375 | bad = True | |
376 | else: |
|
376 | else: | |
377 | reset = True |
|
377 | reset = True | |
378 | elif extra or good + bad + skip + reset + bool(command) > 1: |
|
378 | elif extra or good + bad + skip + reset + bool(command) > 1: | |
379 | raise util.Abort(_('incompatible arguments')) |
|
379 | raise util.Abort(_('incompatible arguments')) | |
380 |
|
380 | |||
381 | if reset: |
|
381 | if reset: | |
382 | p = repo.join("bisect.state") |
|
382 | p = repo.join("bisect.state") | |
383 | if os.path.exists(p): |
|
383 | if os.path.exists(p): | |
384 | os.unlink(p) |
|
384 | os.unlink(p) | |
385 | return |
|
385 | return | |
386 |
|
386 | |||
387 | state = hbisect.load_state(repo) |
|
387 | state = hbisect.load_state(repo) | |
388 |
|
388 | |||
389 | if command: |
|
389 | if command: | |
390 | changesets = 1 |
|
390 | changesets = 1 | |
391 | try: |
|
391 | try: | |
392 | while changesets: |
|
392 | while changesets: | |
393 | # update state |
|
393 | # update state | |
394 | status = util.system(command) |
|
394 | status = util.system(command) | |
395 | if status == 125: |
|
395 | if status == 125: | |
396 | transition = "skip" |
|
396 | transition = "skip" | |
397 | elif status == 0: |
|
397 | elif status == 0: | |
398 | transition = "good" |
|
398 | transition = "good" | |
399 | # status < 0 means process was killed |
|
399 | # status < 0 means process was killed | |
400 | elif status == 127: |
|
400 | elif status == 127: | |
401 | raise util.Abort(_("failed to execute %s") % command) |
|
401 | raise util.Abort(_("failed to execute %s") % command) | |
402 | elif status < 0: |
|
402 | elif status < 0: | |
403 | raise util.Abort(_("%s killed") % command) |
|
403 | raise util.Abort(_("%s killed") % command) | |
404 | else: |
|
404 | else: | |
405 | transition = "bad" |
|
405 | transition = "bad" | |
406 | ctx = repo[rev or '.'] |
|
406 | ctx = repo[rev or '.'] | |
407 | state[transition].append(ctx.node()) |
|
407 | state[transition].append(ctx.node()) | |
408 | ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition)) |
|
408 | ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition)) | |
409 | check_state(state, interactive=False) |
|
409 | check_state(state, interactive=False) | |
410 | # bisect |
|
410 | # bisect | |
411 | nodes, changesets, good = hbisect.bisect(repo.changelog, state) |
|
411 | nodes, changesets, good = hbisect.bisect(repo.changelog, state) | |
412 | # update to next check |
|
412 | # update to next check | |
413 | cmdutil.bail_if_changed(repo) |
|
413 | cmdutil.bail_if_changed(repo) | |
414 | hg.clean(repo, nodes[0], show_stats=False) |
|
414 | hg.clean(repo, nodes[0], show_stats=False) | |
415 | finally: |
|
415 | finally: | |
416 | hbisect.save_state(repo, state) |
|
416 | hbisect.save_state(repo, state) | |
417 | print_result(nodes, good) |
|
417 | print_result(nodes, good) | |
418 | return |
|
418 | return | |
419 |
|
419 | |||
420 | # update state |
|
420 | # update state | |
421 | node = repo.lookup(rev or '.') |
|
421 | node = repo.lookup(rev or '.') | |
422 | if good or bad or skip: |
|
422 | if good or bad or skip: | |
423 | if good: |
|
423 | if good: | |
424 | state['good'].append(node) |
|
424 | state['good'].append(node) | |
425 | elif bad: |
|
425 | elif bad: | |
426 | state['bad'].append(node) |
|
426 | state['bad'].append(node) | |
427 | elif skip: |
|
427 | elif skip: | |
428 | state['skip'].append(node) |
|
428 | state['skip'].append(node) | |
429 | hbisect.save_state(repo, state) |
|
429 | hbisect.save_state(repo, state) | |
430 |
|
430 | |||
431 | if not check_state(state): |
|
431 | if not check_state(state): | |
432 | return |
|
432 | return | |
433 |
|
433 | |||
434 | # actually bisect |
|
434 | # actually bisect | |
435 | nodes, changesets, good = hbisect.bisect(repo.changelog, state) |
|
435 | nodes, changesets, good = hbisect.bisect(repo.changelog, state) | |
436 | if changesets == 0: |
|
436 | if changesets == 0: | |
437 | print_result(nodes, good) |
|
437 | print_result(nodes, good) | |
438 | else: |
|
438 | else: | |
439 | assert len(nodes) == 1 # only a single node can be tested next |
|
439 | assert len(nodes) == 1 # only a single node can be tested next | |
440 | node = nodes[0] |
|
440 | node = nodes[0] | |
441 | # compute the approximate number of remaining tests |
|
441 | # compute the approximate number of remaining tests | |
442 | tests, size = 0, 2 |
|
442 | tests, size = 0, 2 | |
443 | while size <= changesets: |
|
443 | while size <= changesets: | |
444 | tests, size = tests + 1, size * 2 |
|
444 | tests, size = tests + 1, size * 2 | |
445 | rev = repo.changelog.rev(node) |
|
445 | rev = repo.changelog.rev(node) | |
446 | ui.write(_("Testing changeset %d:%s " |
|
446 | ui.write(_("Testing changeset %d:%s " | |
447 | "(%d changesets remaining, ~%d tests)\n") |
|
447 | "(%d changesets remaining, ~%d tests)\n") | |
448 | % (rev, short(node), changesets, tests)) |
|
448 | % (rev, short(node), changesets, tests)) | |
449 | if not noupdate: |
|
449 | if not noupdate: | |
450 | cmdutil.bail_if_changed(repo) |
|
450 | cmdutil.bail_if_changed(repo) | |
451 | return hg.clean(repo, node) |
|
451 | return hg.clean(repo, node) | |
452 |
|
452 | |||
453 | def branch(ui, repo, label=None, **opts): |
|
453 | def branch(ui, repo, label=None, **opts): | |
454 | """set or show the current branch name |
|
454 | """set or show the current branch name | |
455 |
|
455 | |||
456 | With no argument, show the current branch name. With one argument, |
|
456 | With no argument, show the current branch name. With one argument, | |
457 | set the working directory branch name (the branch will not exist |
|
457 | set the working directory branch name (the branch will not exist | |
458 | in the repository until the next commit). Standard practice |
|
458 | in the repository until the next commit). Standard practice | |
459 | recommends that primary development take place on the 'default' |
|
459 | recommends that primary development take place on the 'default' | |
460 | branch. |
|
460 | branch. | |
461 |
|
461 | |||
462 | Unless -f/--force is specified, branch will not let you set a |
|
462 | Unless -f/--force is specified, branch will not let you set a | |
463 | branch name that already exists, even if it's inactive. |
|
463 | branch name that already exists, even if it's inactive. | |
464 |
|
464 | |||
465 | Use -C/--clean to reset the working directory branch to that of |
|
465 | Use -C/--clean to reset the working directory branch to that of | |
466 | the parent of the working directory, negating a previous branch |
|
466 | the parent of the working directory, negating a previous branch | |
467 | change. |
|
467 | change. | |
468 |
|
468 | |||
469 | Use the command :hg:`update` to switch to an existing branch. Use |
|
469 | Use the command :hg:`update` to switch to an existing branch. Use | |
470 | :hg:`commit --close-branch` to mark this branch as closed. |
|
470 | :hg:`commit --close-branch` to mark this branch as closed. | |
471 |
|
471 | |||
472 | Returns 0 on success. |
|
472 | Returns 0 on success. | |
473 | """ |
|
473 | """ | |
474 |
|
474 | |||
475 | if opts.get('clean'): |
|
475 | if opts.get('clean'): | |
476 | label = repo[None].parents()[0].branch() |
|
476 | label = repo[None].parents()[0].branch() | |
477 | repo.dirstate.setbranch(label) |
|
477 | repo.dirstate.setbranch(label) | |
478 | ui.status(_('reset working directory to branch %s\n') % label) |
|
478 | ui.status(_('reset working directory to branch %s\n') % label) | |
479 | elif label: |
|
479 | elif label: | |
480 | utflabel = encoding.fromlocal(label) |
|
480 | utflabel = encoding.fromlocal(label) | |
481 | if not opts.get('force') and utflabel in repo.branchtags(): |
|
481 | if not opts.get('force') and utflabel in repo.branchtags(): | |
482 | if label not in [p.branch() for p in repo.parents()]: |
|
482 | if label not in [p.branch() for p in repo.parents()]: | |
483 | raise util.Abort(_('a branch of the same name already exists' |
|
483 | raise util.Abort(_('a branch of the same name already exists' | |
484 | " (use 'hg update' to switch to it)")) |
|
484 | " (use 'hg update' to switch to it)")) | |
485 | repo.dirstate.setbranch(utflabel) |
|
485 | repo.dirstate.setbranch(utflabel) | |
486 | ui.status(_('marked working directory as branch %s\n') % label) |
|
486 | ui.status(_('marked working directory as branch %s\n') % label) | |
487 | else: |
|
487 | else: | |
488 | ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch())) |
|
488 | ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch())) | |
489 |
|
489 | |||
490 | def branches(ui, repo, active=False, closed=False): |
|
490 | def branches(ui, repo, active=False, closed=False): | |
491 | """list repository named branches |
|
491 | """list repository named branches | |
492 |
|
492 | |||
493 | List the repository's named branches, indicating which ones are |
|
493 | List the repository's named branches, indicating which ones are | |
494 | inactive. If -c/--closed is specified, also list branches which have |
|
494 | inactive. If -c/--closed is specified, also list branches which have | |
495 | been marked closed (see :hg:`commit --close-branch`). |
|
495 | been marked closed (see :hg:`commit --close-branch`). | |
496 |
|
496 | |||
497 | If -a/--active is specified, only show active branches. A branch |
|
497 | If -a/--active is specified, only show active branches. A branch | |
498 | is considered active if it contains repository heads. |
|
498 | is considered active if it contains repository heads. | |
499 |
|
499 | |||
500 | Use the command :hg:`update` to switch to an existing branch. |
|
500 | Use the command :hg:`update` to switch to an existing branch. | |
501 |
|
501 | |||
502 | Returns 0. |
|
502 | Returns 0. | |
503 | """ |
|
503 | """ | |
504 |
|
504 | |||
505 | hexfunc = ui.debugflag and hex or short |
|
505 | hexfunc = ui.debugflag and hex or short | |
506 | activebranches = [repo[n].branch() for n in repo.heads()] |
|
506 | activebranches = [repo[n].branch() for n in repo.heads()] | |
507 | def testactive(tag, node): |
|
507 | def testactive(tag, node): | |
508 | realhead = tag in activebranches |
|
508 | realhead = tag in activebranches | |
509 | open = node in repo.branchheads(tag, closed=False) |
|
509 | open = node in repo.branchheads(tag, closed=False) | |
510 | return realhead and open |
|
510 | return realhead and open | |
511 | branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag) |
|
511 | branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag) | |
512 | for tag, node in repo.branchtags().items()], |
|
512 | for tag, node in repo.branchtags().items()], | |
513 | reverse=True) |
|
513 | reverse=True) | |
514 |
|
514 | |||
515 | for isactive, node, tag in branches: |
|
515 | for isactive, node, tag in branches: | |
516 | if (not active) or isactive: |
|
516 | if (not active) or isactive: | |
517 | encodedtag = encoding.tolocal(tag) |
|
517 | encodedtag = encoding.tolocal(tag) | |
518 | if ui.quiet: |
|
518 | if ui.quiet: | |
519 | ui.write("%s\n" % encodedtag) |
|
519 | ui.write("%s\n" % encodedtag) | |
520 | else: |
|
520 | else: | |
521 | hn = repo.lookup(node) |
|
521 | hn = repo.lookup(node) | |
522 | if isactive: |
|
522 | if isactive: | |
523 | notice = '' |
|
523 | notice = '' | |
524 | elif hn not in repo.branchheads(tag, closed=False): |
|
524 | elif hn not in repo.branchheads(tag, closed=False): | |
525 | if not closed: |
|
525 | if not closed: | |
526 | continue |
|
526 | continue | |
527 | notice = _(' (closed)') |
|
527 | notice = _(' (closed)') | |
528 | else: |
|
528 | else: | |
529 | notice = _(' (inactive)') |
|
529 | notice = _(' (inactive)') | |
530 | rev = str(node).rjust(31 - encoding.colwidth(encodedtag)) |
|
530 | rev = str(node).rjust(31 - encoding.colwidth(encodedtag)) | |
531 | data = encodedtag, rev, hexfunc(hn), notice |
|
531 | data = encodedtag, rev, hexfunc(hn), notice | |
532 | ui.write("%s %s:%s%s\n" % data) |
|
532 | ui.write("%s %s:%s%s\n" % data) | |
533 |
|
533 | |||
534 | def bundle(ui, repo, fname, dest=None, **opts): |
|
534 | def bundle(ui, repo, fname, dest=None, **opts): | |
535 | """create a changegroup file |
|
535 | """create a changegroup file | |
536 |
|
536 | |||
537 | Generate a compressed changegroup file collecting changesets not |
|
537 | Generate a compressed changegroup file collecting changesets not | |
538 | known to be in another repository. |
|
538 | known to be in another repository. | |
539 |
|
539 | |||
540 | If you omit the destination repository, then hg assumes the |
|
540 | If you omit the destination repository, then hg assumes the | |
541 | destination will have all the nodes you specify with --base |
|
541 | destination will have all the nodes you specify with --base | |
542 | parameters. To create a bundle containing all changesets, use |
|
542 | parameters. To create a bundle containing all changesets, use | |
543 | -a/--all (or --base null). |
|
543 | -a/--all (or --base null). | |
544 |
|
544 | |||
545 | You can change compression method with the -t/--type option. |
|
545 | You can change compression method with the -t/--type option. | |
546 | The available compression methods are: none, bzip2, and |
|
546 | The available compression methods are: none, bzip2, and | |
547 | gzip (by default, bundles are compressed using bzip2). |
|
547 | gzip (by default, bundles are compressed using bzip2). | |
548 |
|
548 | |||
549 | The bundle file can then be transferred using conventional means |
|
549 | The bundle file can then be transferred using conventional means | |
550 | and applied to another repository with the unbundle or pull |
|
550 | and applied to another repository with the unbundle or pull | |
551 | command. This is useful when direct push and pull are not |
|
551 | command. This is useful when direct push and pull are not | |
552 | available or when exporting an entire repository is undesirable. |
|
552 | available or when exporting an entire repository is undesirable. | |
553 |
|
553 | |||
554 | Applying bundles preserves all changeset contents including |
|
554 | Applying bundles preserves all changeset contents including | |
555 | permissions, copy/rename information, and revision history. |
|
555 | permissions, copy/rename information, and revision history. | |
556 |
|
556 | |||
557 | Returns 0 on success, 1 if no changes found. |
|
557 | Returns 0 on success, 1 if no changes found. | |
558 | """ |
|
558 | """ | |
559 | revs = opts.get('rev') or None |
|
559 | revs = opts.get('rev') or None | |
560 | if revs: |
|
560 | if revs: | |
561 | revs = [repo.lookup(rev) for rev in revs] |
|
561 | revs = [repo.lookup(rev) for rev in revs] | |
562 | if opts.get('all'): |
|
562 | if opts.get('all'): | |
563 | base = ['null'] |
|
563 | base = ['null'] | |
564 | else: |
|
564 | else: | |
565 | base = opts.get('base') |
|
565 | base = opts.get('base') | |
566 | if base: |
|
566 | if base: | |
567 | if dest: |
|
567 | if dest: | |
568 | raise util.Abort(_("--base is incompatible with specifying " |
|
568 | raise util.Abort(_("--base is incompatible with specifying " | |
569 | "a destination")) |
|
569 | "a destination")) | |
570 | base = [repo.lookup(rev) for rev in base] |
|
570 | base = [repo.lookup(rev) for rev in base] | |
571 | # create the right base |
|
571 | # create the right base | |
572 | # XXX: nodesbetween / changegroup* should be "fixed" instead |
|
572 | # XXX: nodesbetween / changegroup* should be "fixed" instead | |
573 | o = [] |
|
573 | o = [] | |
574 | has = set((nullid,)) |
|
574 | has = set((nullid,)) | |
575 | for n in base: |
|
575 | for n in base: | |
576 | has.update(repo.changelog.reachable(n)) |
|
576 | has.update(repo.changelog.reachable(n)) | |
577 | if revs: |
|
577 | if revs: | |
578 | visit = list(revs) |
|
578 | visit = list(revs) | |
579 | has.difference_update(revs) |
|
579 | has.difference_update(revs) | |
580 | else: |
|
580 | else: | |
581 | visit = repo.changelog.heads() |
|
581 | visit = repo.changelog.heads() | |
582 | seen = {} |
|
582 | seen = {} | |
583 | while visit: |
|
583 | while visit: | |
584 | n = visit.pop(0) |
|
584 | n = visit.pop(0) | |
585 | parents = [p for p in repo.changelog.parents(n) if p not in has] |
|
585 | parents = [p for p in repo.changelog.parents(n) if p not in has] | |
586 | if len(parents) == 0: |
|
586 | if len(parents) == 0: | |
587 | if n not in has: |
|
587 | if n not in has: | |
588 | o.append(n) |
|
588 | o.append(n) | |
589 | else: |
|
589 | else: | |
590 | for p in parents: |
|
590 | for p in parents: | |
591 | if p not in seen: |
|
591 | if p not in seen: | |
592 | seen[p] = 1 |
|
592 | seen[p] = 1 | |
593 | visit.append(p) |
|
593 | visit.append(p) | |
594 | else: |
|
594 | else: | |
595 | dest = ui.expandpath(dest or 'default-push', dest or 'default') |
|
595 | dest = ui.expandpath(dest or 'default-push', dest or 'default') | |
596 | dest, branches = hg.parseurl(dest, opts.get('branch')) |
|
596 | dest, branches = hg.parseurl(dest, opts.get('branch')) | |
597 | other = hg.repository(hg.remoteui(repo, opts), dest) |
|
597 | other = hg.repository(hg.remoteui(repo, opts), dest) | |
598 | revs, checkout = hg.addbranchrevs(repo, other, branches, revs) |
|
598 | revs, checkout = hg.addbranchrevs(repo, other, branches, revs) | |
599 | o = repo.findoutgoing(other, force=opts.get('force')) |
|
599 | o = repo.findoutgoing(other, force=opts.get('force')) | |
600 |
|
600 | |||
601 | if not o: |
|
601 | if not o: | |
602 | ui.status(_("no changes found\n")) |
|
602 | ui.status(_("no changes found\n")) | |
603 | return 1 |
|
603 | return 1 | |
604 |
|
604 | |||
605 | if revs: |
|
605 | if revs: | |
606 | cg = repo.changegroupsubset(o, revs, 'bundle') |
|
606 | cg = repo.changegroupsubset(o, revs, 'bundle') | |
607 | else: |
|
607 | else: | |
608 | cg = repo.changegroup(o, 'bundle') |
|
608 | cg = repo.changegroup(o, 'bundle') | |
609 |
|
609 | |||
610 | bundletype = opts.get('type', 'bzip2').lower() |
|
610 | bundletype = opts.get('type', 'bzip2').lower() | |
611 | btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} |
|
611 | btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} | |
612 | bundletype = btypes.get(bundletype) |
|
612 | bundletype = btypes.get(bundletype) | |
613 | if bundletype not in changegroup.bundletypes: |
|
613 | if bundletype not in changegroup.bundletypes: | |
614 | raise util.Abort(_('unknown bundle type specified with --type')) |
|
614 | raise util.Abort(_('unknown bundle type specified with --type')) | |
615 |
|
615 | |||
616 | changegroup.writebundle(cg, fname, bundletype) |
|
616 | changegroup.writebundle(cg, fname, bundletype) | |
617 |
|
617 | |||
618 | def cat(ui, repo, file1, *pats, **opts): |
|
618 | def cat(ui, repo, file1, *pats, **opts): | |
619 | """output the current or given revision of files |
|
619 | """output the current or given revision of files | |
620 |
|
620 | |||
621 | Print the specified files as they were at the given revision. If |
|
621 | Print the specified files as they were at the given revision. If | |
622 | no revision is given, the parent of the working directory is used, |
|
622 | no revision is given, the parent of the working directory is used, | |
623 | or tip if no revision is checked out. |
|
623 | or tip if no revision is checked out. | |
624 |
|
624 | |||
625 | Output may be to a file, in which case the name of the file is |
|
625 | Output may be to a file, in which case the name of the file is | |
626 | given using a format string. The formatting rules are the same as |
|
626 | given using a format string. The formatting rules are the same as | |
627 | for the export command, with the following additions: |
|
627 | for the export command, with the following additions: | |
628 |
|
628 | |||
629 | :``%s``: basename of file being printed |
|
629 | :``%s``: basename of file being printed | |
630 | :``%d``: dirname of file being printed, or '.' if in repository root |
|
630 | :``%d``: dirname of file being printed, or '.' if in repository root | |
631 | :``%p``: root-relative path name of file being printed |
|
631 | :``%p``: root-relative path name of file being printed | |
632 |
|
632 | |||
633 | Returns 0 on success. |
|
633 | Returns 0 on success. | |
634 | """ |
|
634 | """ | |
635 | ctx = repo[opts.get('rev')] |
|
635 | ctx = repo[opts.get('rev')] | |
636 | err = 1 |
|
636 | err = 1 | |
637 | m = cmdutil.match(repo, (file1,) + pats, opts) |
|
637 | m = cmdutil.match(repo, (file1,) + pats, opts) | |
638 | for abs in ctx.walk(m): |
|
638 | for abs in ctx.walk(m): | |
639 | fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs) |
|
639 | fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs) | |
640 | data = ctx[abs].data() |
|
640 | data = ctx[abs].data() | |
641 | if opts.get('decode'): |
|
641 | if opts.get('decode'): | |
642 | data = repo.wwritedata(abs, data) |
|
642 | data = repo.wwritedata(abs, data) | |
643 | fp.write(data) |
|
643 | fp.write(data) | |
644 | err = 0 |
|
644 | err = 0 | |
645 | return err |
|
645 | return err | |
646 |
|
646 | |||
647 | def clone(ui, source, dest=None, **opts): |
|
647 | def clone(ui, source, dest=None, **opts): | |
648 | """make a copy of an existing repository |
|
648 | """make a copy of an existing repository | |
649 |
|
649 | |||
650 | Create a copy of an existing repository in a new directory. |
|
650 | Create a copy of an existing repository in a new directory. | |
651 |
|
651 | |||
652 | If no destination directory name is specified, it defaults to the |
|
652 | If no destination directory name is specified, it defaults to the | |
653 | basename of the source. |
|
653 | basename of the source. | |
654 |
|
654 | |||
655 | The location of the source is added to the new repository's |
|
655 | The location of the source is added to the new repository's | |
656 | .hg/hgrc file, as the default to be used for future pulls. |
|
656 | .hg/hgrc file, as the default to be used for future pulls. | |
657 |
|
657 | |||
658 | See :hg:`help urls` for valid source format details. |
|
658 | See :hg:`help urls` for valid source format details. | |
659 |
|
659 | |||
660 | It is possible to specify an ``ssh://`` URL as the destination, but no |
|
660 | It is possible to specify an ``ssh://`` URL as the destination, but no | |
661 | .hg/hgrc and working directory will be created on the remote side. |
|
661 | .hg/hgrc and working directory will be created on the remote side. | |
662 | Please see :hg:`help urls` for important details about ``ssh://`` URLs. |
|
662 | Please see :hg:`help urls` for important details about ``ssh://`` URLs. | |
663 |
|
663 | |||
664 | A set of changesets (tags, or branch names) to pull may be specified |
|
664 | A set of changesets (tags, or branch names) to pull may be specified | |
665 | by listing each changeset (tag, or branch name) with -r/--rev. |
|
665 | by listing each changeset (tag, or branch name) with -r/--rev. | |
666 | If -r/--rev is used, the cloned repository will contain only a subset |
|
666 | If -r/--rev is used, the cloned repository will contain only a subset | |
667 | of the changesets of the source repository. Only the set of changesets |
|
667 | of the changesets of the source repository. Only the set of changesets | |
668 | defined by all -r/--rev options (including all their ancestors) |
|
668 | defined by all -r/--rev options (including all their ancestors) | |
669 | will be pulled into the destination repository. |
|
669 | will be pulled into the destination repository. | |
670 | No subsequent changesets (including subsequent tags) will be present |
|
670 | No subsequent changesets (including subsequent tags) will be present | |
671 | in the destination. |
|
671 | in the destination. | |
672 |
|
672 | |||
673 | Using -r/--rev (or 'clone src#rev dest') implies --pull, even for |
|
673 | Using -r/--rev (or 'clone src#rev dest') implies --pull, even for | |
674 | local source repositories. |
|
674 | local source repositories. | |
675 |
|
675 | |||
676 | For efficiency, hardlinks are used for cloning whenever the source |
|
676 | For efficiency, hardlinks are used for cloning whenever the source | |
677 | and destination are on the same filesystem (note this applies only |
|
677 | and destination are on the same filesystem (note this applies only | |
678 | to the repository data, not to the working directory). Some |
|
678 | to the repository data, not to the working directory). Some | |
679 | filesystems, such as AFS, implement hardlinking incorrectly, but |
|
679 | filesystems, such as AFS, implement hardlinking incorrectly, but | |
680 | do not report errors. In these cases, use the --pull option to |
|
680 | do not report errors. In these cases, use the --pull option to | |
681 | avoid hardlinking. |
|
681 | avoid hardlinking. | |
682 |
|
682 | |||
683 | In some cases, you can clone repositories and the working directory |
|
683 | In some cases, you can clone repositories and the working directory | |
684 | using full hardlinks with :: |
|
684 | using full hardlinks with :: | |
685 |
|
685 | |||
686 | $ cp -al REPO REPOCLONE |
|
686 | $ cp -al REPO REPOCLONE | |
687 |
|
687 | |||
688 | This is the fastest way to clone, but it is not always safe. The |
|
688 | This is the fastest way to clone, but it is not always safe. The | |
689 | operation is not atomic (making sure REPO is not modified during |
|
689 | operation is not atomic (making sure REPO is not modified during | |
690 | the operation is up to you) and you have to make sure your editor |
|
690 | the operation is up to you) and you have to make sure your editor | |
691 | breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, |
|
691 | breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, | |
692 | this is not compatible with certain extensions that place their |
|
692 | this is not compatible with certain extensions that place their | |
693 | metadata under the .hg directory, such as mq. |
|
693 | metadata under the .hg directory, such as mq. | |
694 |
|
694 | |||
695 | Mercurial will update the working directory to the first applicable |
|
695 | Mercurial will update the working directory to the first applicable | |
696 | revision from this list: |
|
696 | revision from this list: | |
697 |
|
697 | |||
698 | a) null if -U or the source repository has no changesets |
|
698 | a) null if -U or the source repository has no changesets | |
699 | b) if -u . and the source repository is local, the first parent of |
|
699 | b) if -u . and the source repository is local, the first parent of | |
700 | the source repository's working directory |
|
700 | the source repository's working directory | |
701 | c) the changeset specified with -u (if a branch name, this means the |
|
701 | c) the changeset specified with -u (if a branch name, this means the | |
702 | latest head of that branch) |
|
702 | latest head of that branch) | |
703 | d) the changeset specified with -r |
|
703 | d) the changeset specified with -r | |
704 | e) the tipmost head specified with -b |
|
704 | e) the tipmost head specified with -b | |
705 | f) the tipmost head specified with the url#branch source syntax |
|
705 | f) the tipmost head specified with the url#branch source syntax | |
706 | g) the tipmost head of the default branch |
|
706 | g) the tipmost head of the default branch | |
707 | h) tip |
|
707 | h) tip | |
708 |
|
708 | |||
709 | Returns 0 on success. |
|
709 | Returns 0 on success. | |
710 | """ |
|
710 | """ | |
711 | if opts.get('noupdate') and opts.get('updaterev'): |
|
711 | if opts.get('noupdate') and opts.get('updaterev'): | |
712 | raise util.Abort(_("cannot specify both --noupdate and --updaterev")) |
|
712 | raise util.Abort(_("cannot specify both --noupdate and --updaterev")) | |
713 |
|
713 | |||
714 | r = hg.clone(hg.remoteui(ui, opts), source, dest, |
|
714 | r = hg.clone(hg.remoteui(ui, opts), source, dest, | |
715 | pull=opts.get('pull'), |
|
715 | pull=opts.get('pull'), | |
716 | stream=opts.get('uncompressed'), |
|
716 | stream=opts.get('uncompressed'), | |
717 | rev=opts.get('rev'), |
|
717 | rev=opts.get('rev'), | |
718 | update=opts.get('updaterev') or not opts.get('noupdate'), |
|
718 | update=opts.get('updaterev') or not opts.get('noupdate'), | |
719 | branch=opts.get('branch')) |
|
719 | branch=opts.get('branch')) | |
720 |
|
720 | |||
721 | return r is None |
|
721 | return r is None | |
722 |
|
722 | |||
723 | def commit(ui, repo, *pats, **opts): |
|
723 | def commit(ui, repo, *pats, **opts): | |
724 | """commit the specified files or all outstanding changes |
|
724 | """commit the specified files or all outstanding changes | |
725 |
|
725 | |||
726 | Commit changes to the given files into the repository. Unlike a |
|
726 | Commit changes to the given files into the repository. Unlike a | |
727 | centralized RCS, this operation is a local operation. See |
|
727 | centralized RCS, this operation is a local operation. See | |
728 | :hg:`push` for a way to actively distribute your changes. |
|
728 | :hg:`push` for a way to actively distribute your changes. | |
729 |
|
729 | |||
730 | If a list of files is omitted, all changes reported by :hg:`status` |
|
730 | If a list of files is omitted, all changes reported by :hg:`status` | |
731 | will be committed. |
|
731 | will be committed. | |
732 |
|
732 | |||
733 | If you are committing the result of a merge, do not provide any |
|
733 | If you are committing the result of a merge, do not provide any | |
734 | filenames or -I/-X filters. |
|
734 | filenames or -I/-X filters. | |
735 |
|
735 | |||
736 | If no commit message is specified, the configured editor is |
|
736 | If no commit message is specified, the configured editor is | |
737 | started to prompt you for a message. |
|
737 | started to prompt you for a message. | |
738 |
|
738 | |||
739 | See :hg:`help dates` for a list of formats valid for -d/--date. |
|
739 | See :hg:`help dates` for a list of formats valid for -d/--date. | |
740 |
|
740 | |||
741 | Returns 0 on success, 1 if nothing changed. |
|
741 | Returns 0 on success, 1 if nothing changed. | |
742 | """ |
|
742 | """ | |
743 | extra = {} |
|
743 | extra = {} | |
744 | if opts.get('close_branch'): |
|
744 | if opts.get('close_branch'): | |
745 | if repo['.'].node() not in repo.branchheads(): |
|
745 | if repo['.'].node() not in repo.branchheads(): | |
746 | # The topo heads set is included in the branch heads set of the |
|
746 | # The topo heads set is included in the branch heads set of the | |
747 | # current branch, so it's sufficient to test branchheads |
|
747 | # current branch, so it's sufficient to test branchheads | |
748 | raise util.Abort(_('can only close branch heads')) |
|
748 | raise util.Abort(_('can only close branch heads')) | |
749 | extra['close'] = 1 |
|
749 | extra['close'] = 1 | |
750 | e = cmdutil.commiteditor |
|
750 | e = cmdutil.commiteditor | |
751 | if opts.get('force_editor'): |
|
751 | if opts.get('force_editor'): | |
752 | e = cmdutil.commitforceeditor |
|
752 | e = cmdutil.commitforceeditor | |
753 |
|
753 | |||
754 | def commitfunc(ui, repo, message, match, opts): |
|
754 | def commitfunc(ui, repo, message, match, opts): | |
755 | return repo.commit(message, opts.get('user'), opts.get('date'), match, |
|
755 | return repo.commit(message, opts.get('user'), opts.get('date'), match, | |
756 | editor=e, extra=extra) |
|
756 | editor=e, extra=extra) | |
757 |
|
757 | |||
758 | branch = repo[None].branch() |
|
758 | branch = repo[None].branch() | |
759 | bheads = repo.branchheads(branch) |
|
759 | bheads = repo.branchheads(branch) | |
760 |
|
760 | |||
761 | node = cmdutil.commit(ui, repo, commitfunc, pats, opts) |
|
761 | node = cmdutil.commit(ui, repo, commitfunc, pats, opts) | |
762 | if not node: |
|
762 | if not node: | |
763 | ui.status(_("nothing changed\n")) |
|
763 | ui.status(_("nothing changed\n")) | |
764 | return 1 |
|
764 | return 1 | |
765 |
|
765 | |||
766 | ctx = repo[node] |
|
766 | ctx = repo[node] | |
767 | parents = ctx.parents() |
|
767 | parents = ctx.parents() | |
768 |
|
768 | |||
769 | if bheads and [x for x in parents |
|
769 | if bheads and [x for x in parents | |
770 | if x.node() not in bheads and x.branch() == branch]: |
|
770 | if x.node() not in bheads and x.branch() == branch]: | |
771 | ui.status(_('created new head\n')) |
|
771 | ui.status(_('created new head\n')) | |
772 |
|
772 | |||
773 | if not opts.get('close_branch'): |
|
773 | if not opts.get('close_branch'): | |
774 | for r in parents: |
|
774 | for r in parents: | |
775 | if r.extra().get('close'): |
|
775 | if r.extra().get('close'): | |
776 | ui.status(_('reopening closed branch head %d\n') % r) |
|
776 | ui.status(_('reopening closed branch head %d\n') % r) | |
777 |
|
777 | |||
778 | if ui.debugflag: |
|
778 | if ui.debugflag: | |
779 | ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex())) |
|
779 | ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex())) | |
780 | elif ui.verbose: |
|
780 | elif ui.verbose: | |
781 | ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx)) |
|
781 | ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx)) | |
782 |
|
782 | |||
783 | def copy(ui, repo, *pats, **opts): |
|
783 | def copy(ui, repo, *pats, **opts): | |
784 | """mark files as copied for the next commit |
|
784 | """mark files as copied for the next commit | |
785 |
|
785 | |||
786 | Mark dest as having copies of source files. If dest is a |
|
786 | Mark dest as having copies of source files. If dest is a | |
787 | directory, copies are put in that directory. If dest is a file, |
|
787 | directory, copies are put in that directory. If dest is a file, | |
788 | the source must be a single file. |
|
788 | the source must be a single file. | |
789 |
|
789 | |||
790 | By default, this command copies the contents of files as they |
|
790 | By default, this command copies the contents of files as they | |
791 | exist in the working directory. If invoked with -A/--after, the |
|
791 | exist in the working directory. If invoked with -A/--after, the | |
792 | operation is recorded, but no copying is performed. |
|
792 | operation is recorded, but no copying is performed. | |
793 |
|
793 | |||
794 | This command takes effect with the next commit. To undo a copy |
|
794 | This command takes effect with the next commit. To undo a copy | |
795 | before that, see :hg:`revert`. |
|
795 | before that, see :hg:`revert`. | |
796 |
|
796 | |||
797 | Returns 0 on success, 1 if errors are encountered. |
|
797 | Returns 0 on success, 1 if errors are encountered. | |
798 | """ |
|
798 | """ | |
799 | wlock = repo.wlock(False) |
|
799 | wlock = repo.wlock(False) | |
800 | try: |
|
800 | try: | |
801 | return cmdutil.copy(ui, repo, pats, opts) |
|
801 | return cmdutil.copy(ui, repo, pats, opts) | |
802 | finally: |
|
802 | finally: | |
803 | wlock.release() |
|
803 | wlock.release() | |
804 |
|
804 | |||
805 | def debugancestor(ui, repo, *args): |
|
805 | def debugancestor(ui, repo, *args): | |
806 | """find the ancestor revision of two revisions in a given index""" |
|
806 | """find the ancestor revision of two revisions in a given index""" | |
807 | if len(args) == 3: |
|
807 | if len(args) == 3: | |
808 | index, rev1, rev2 = args |
|
808 | index, rev1, rev2 = args | |
809 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), index) |
|
809 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), index) | |
810 | lookup = r.lookup |
|
810 | lookup = r.lookup | |
811 | elif len(args) == 2: |
|
811 | elif len(args) == 2: | |
812 | if not repo: |
|
812 | if not repo: | |
813 | raise util.Abort(_("There is no Mercurial repository here " |
|
813 | raise util.Abort(_("There is no Mercurial repository here " | |
814 | "(.hg not found)")) |
|
814 | "(.hg not found)")) | |
815 | rev1, rev2 = args |
|
815 | rev1, rev2 = args | |
816 | r = repo.changelog |
|
816 | r = repo.changelog | |
817 | lookup = repo.lookup |
|
817 | lookup = repo.lookup | |
818 | else: |
|
818 | else: | |
819 | raise util.Abort(_('either two or three arguments required')) |
|
819 | raise util.Abort(_('either two or three arguments required')) | |
820 | a = r.ancestor(lookup(rev1), lookup(rev2)) |
|
820 | a = r.ancestor(lookup(rev1), lookup(rev2)) | |
821 | ui.write("%d:%s\n" % (r.rev(a), hex(a))) |
|
821 | ui.write("%d:%s\n" % (r.rev(a), hex(a))) | |
822 |
|
822 | |||
823 | def debugcommands(ui, cmd='', *args): |
|
823 | def debugcommands(ui, cmd='', *args): | |
824 | """list all available commands and options""" |
|
824 | """list all available commands and options""" | |
825 | for cmd, vals in sorted(table.iteritems()): |
|
825 | for cmd, vals in sorted(table.iteritems()): | |
826 | cmd = cmd.split('|')[0].strip('^') |
|
826 | cmd = cmd.split('|')[0].strip('^') | |
827 | opts = ', '.join([i[1] for i in vals[1]]) |
|
827 | opts = ', '.join([i[1] for i in vals[1]]) | |
828 | ui.write('%s: %s\n' % (cmd, opts)) |
|
828 | ui.write('%s: %s\n' % (cmd, opts)) | |
829 |
|
829 | |||
830 | def debugcomplete(ui, cmd='', **opts): |
|
830 | def debugcomplete(ui, cmd='', **opts): | |
831 | """returns the completion list associated with the given command""" |
|
831 | """returns the completion list associated with the given command""" | |
832 |
|
832 | |||
833 | if opts.get('options'): |
|
833 | if opts.get('options'): | |
834 | options = [] |
|
834 | options = [] | |
835 | otables = [globalopts] |
|
835 | otables = [globalopts] | |
836 | if cmd: |
|
836 | if cmd: | |
837 | aliases, entry = cmdutil.findcmd(cmd, table, False) |
|
837 | aliases, entry = cmdutil.findcmd(cmd, table, False) | |
838 | otables.append(entry[1]) |
|
838 | otables.append(entry[1]) | |
839 | for t in otables: |
|
839 | for t in otables: | |
840 | for o in t: |
|
840 | for o in t: | |
841 | if "(DEPRECATED)" in o[3]: |
|
841 | if "(DEPRECATED)" in o[3]: | |
842 | continue |
|
842 | continue | |
843 | if o[0]: |
|
843 | if o[0]: | |
844 | options.append('-%s' % o[0]) |
|
844 | options.append('-%s' % o[0]) | |
845 | options.append('--%s' % o[1]) |
|
845 | options.append('--%s' % o[1]) | |
846 | ui.write("%s\n" % "\n".join(options)) |
|
846 | ui.write("%s\n" % "\n".join(options)) | |
847 | return |
|
847 | return | |
848 |
|
848 | |||
849 | cmdlist = cmdutil.findpossible(cmd, table) |
|
849 | cmdlist = cmdutil.findpossible(cmd, table) | |
850 | if ui.verbose: |
|
850 | if ui.verbose: | |
851 | cmdlist = [' '.join(c[0]) for c in cmdlist.values()] |
|
851 | cmdlist = [' '.join(c[0]) for c in cmdlist.values()] | |
852 | ui.write("%s\n" % "\n".join(sorted(cmdlist))) |
|
852 | ui.write("%s\n" % "\n".join(sorted(cmdlist))) | |
853 |
|
853 | |||
854 | def debugfsinfo(ui, path = "."): |
|
854 | def debugfsinfo(ui, path = "."): | |
855 | """show information detected about current filesystem""" |
|
855 | """show information detected about current filesystem""" | |
856 | open('.debugfsinfo', 'w').write('') |
|
856 | open('.debugfsinfo', 'w').write('') | |
857 | ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) |
|
857 | ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) | |
858 | ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) |
|
858 | ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) | |
859 | ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo') |
|
859 | ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo') | |
860 | and 'yes' or 'no')) |
|
860 | and 'yes' or 'no')) | |
861 | os.unlink('.debugfsinfo') |
|
861 | os.unlink('.debugfsinfo') | |
862 |
|
862 | |||
863 | def debugrebuildstate(ui, repo, rev="tip"): |
|
863 | def debugrebuildstate(ui, repo, rev="tip"): | |
864 | """rebuild the dirstate as it would look like for the given revision""" |
|
864 | """rebuild the dirstate as it would look like for the given revision""" | |
865 | ctx = repo[rev] |
|
865 | ctx = repo[rev] | |
866 | wlock = repo.wlock() |
|
866 | wlock = repo.wlock() | |
867 | try: |
|
867 | try: | |
868 | repo.dirstate.rebuild(ctx.node(), ctx.manifest()) |
|
868 | repo.dirstate.rebuild(ctx.node(), ctx.manifest()) | |
869 | finally: |
|
869 | finally: | |
870 | wlock.release() |
|
870 | wlock.release() | |
871 |
|
871 | |||
872 | def debugcheckstate(ui, repo): |
|
872 | def debugcheckstate(ui, repo): | |
873 | """validate the correctness of the current dirstate""" |
|
873 | """validate the correctness of the current dirstate""" | |
874 | parent1, parent2 = repo.dirstate.parents() |
|
874 | parent1, parent2 = repo.dirstate.parents() | |
875 | m1 = repo[parent1].manifest() |
|
875 | m1 = repo[parent1].manifest() | |
876 | m2 = repo[parent2].manifest() |
|
876 | m2 = repo[parent2].manifest() | |
877 | errors = 0 |
|
877 | errors = 0 | |
878 | for f in repo.dirstate: |
|
878 | for f in repo.dirstate: | |
879 | state = repo.dirstate[f] |
|
879 | state = repo.dirstate[f] | |
880 | if state in "nr" and f not in m1: |
|
880 | if state in "nr" and f not in m1: | |
881 | ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) |
|
881 | ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) | |
882 | errors += 1 |
|
882 | errors += 1 | |
883 | if state in "a" and f in m1: |
|
883 | if state in "a" and f in m1: | |
884 | ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) |
|
884 | ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) | |
885 | errors += 1 |
|
885 | errors += 1 | |
886 | if state in "m" and f not in m1 and f not in m2: |
|
886 | if state in "m" and f not in m1 and f not in m2: | |
887 | ui.warn(_("%s in state %s, but not in either manifest\n") % |
|
887 | ui.warn(_("%s in state %s, but not in either manifest\n") % | |
888 | (f, state)) |
|
888 | (f, state)) | |
889 | errors += 1 |
|
889 | errors += 1 | |
890 | for f in m1: |
|
890 | for f in m1: | |
891 | state = repo.dirstate[f] |
|
891 | state = repo.dirstate[f] | |
892 | if state not in "nrm": |
|
892 | if state not in "nrm": | |
893 | ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) |
|
893 | ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) | |
894 | errors += 1 |
|
894 | errors += 1 | |
895 | if errors: |
|
895 | if errors: | |
896 | error = _(".hg/dirstate inconsistent with current parent's manifest") |
|
896 | error = _(".hg/dirstate inconsistent with current parent's manifest") | |
897 | raise util.Abort(error) |
|
897 | raise util.Abort(error) | |
898 |
|
898 | |||
899 | def showconfig(ui, repo, *values, **opts): |
|
899 | def showconfig(ui, repo, *values, **opts): | |
900 | """show combined config settings from all hgrc files |
|
900 | """show combined config settings from all hgrc files | |
901 |
|
901 | |||
902 | With no arguments, print names and values of all config items. |
|
902 | With no arguments, print names and values of all config items. | |
903 |
|
903 | |||
904 | With one argument of the form section.name, print just the value |
|
904 | With one argument of the form section.name, print just the value | |
905 | of that config item. |
|
905 | of that config item. | |
906 |
|
906 | |||
907 | With multiple arguments, print names and values of all config |
|
907 | With multiple arguments, print names and values of all config | |
908 | items with matching section names. |
|
908 | items with matching section names. | |
909 |
|
909 | |||
910 | With --debug, the source (filename and line number) is printed |
|
910 | With --debug, the source (filename and line number) is printed | |
911 | for each config item. |
|
911 | for each config item. | |
912 |
|
912 | |||
913 | Returns 0 on success. |
|
913 | Returns 0 on success. | |
914 | """ |
|
914 | """ | |
915 |
|
915 | |||
916 | for f in util.rcpath(): |
|
916 | for f in util.rcpath(): | |
917 | ui.debug(_('read config from: %s\n') % f) |
|
917 | ui.debug(_('read config from: %s\n') % f) | |
918 | untrusted = bool(opts.get('untrusted')) |
|
918 | untrusted = bool(opts.get('untrusted')) | |
919 | if values: |
|
919 | if values: | |
920 | if len([v for v in values if '.' in v]) > 1: |
|
920 | if len([v for v in values if '.' in v]) > 1: | |
921 | raise util.Abort(_('only one config item permitted')) |
|
921 | raise util.Abort(_('only one config item permitted')) | |
922 | for section, name, value in ui.walkconfig(untrusted=untrusted): |
|
922 | for section, name, value in ui.walkconfig(untrusted=untrusted): | |
923 | sectname = section + '.' + name |
|
923 | sectname = section + '.' + name | |
924 | if values: |
|
924 | if values: | |
925 | for v in values: |
|
925 | for v in values: | |
926 | if v == section: |
|
926 | if v == section: | |
927 | ui.debug('%s: ' % |
|
927 | ui.debug('%s: ' % | |
928 | ui.configsource(section, name, untrusted)) |
|
928 | ui.configsource(section, name, untrusted)) | |
929 | ui.write('%s=%s\n' % (sectname, value)) |
|
929 | ui.write('%s=%s\n' % (sectname, value)) | |
930 | elif v == sectname: |
|
930 | elif v == sectname: | |
931 | ui.debug('%s: ' % |
|
931 | ui.debug('%s: ' % | |
932 | ui.configsource(section, name, untrusted)) |
|
932 | ui.configsource(section, name, untrusted)) | |
933 | ui.write(value, '\n') |
|
933 | ui.write(value, '\n') | |
934 | else: |
|
934 | else: | |
935 | ui.debug('%s: ' % |
|
935 | ui.debug('%s: ' % | |
936 | ui.configsource(section, name, untrusted)) |
|
936 | ui.configsource(section, name, untrusted)) | |
937 | ui.write('%s=%s\n' % (sectname, value)) |
|
937 | ui.write('%s=%s\n' % (sectname, value)) | |
938 |
|
938 | |||
939 | def debugrevspec(ui, repo, expr): |
|
939 | def debugrevspec(ui, repo, expr): | |
940 | '''parse and apply a revision specification''' |
|
940 | '''parse and apply a revision specification''' | |
941 | if ui.verbose: |
|
941 | if ui.verbose: | |
942 | tree = revset.parse(expr) |
|
942 | tree = revset.parse(expr) | |
943 | ui.note(tree, "\n") |
|
943 | ui.note(tree, "\n") | |
944 | func = revset.match(expr) |
|
944 | func = revset.match(expr) | |
945 | for c in func(repo, range(len(repo))): |
|
945 | for c in func(repo, range(len(repo))): | |
946 | ui.write("%s\n" % c) |
|
946 | ui.write("%s\n" % c) | |
947 |
|
947 | |||
948 | def debugsetparents(ui, repo, rev1, rev2=None): |
|
948 | def debugsetparents(ui, repo, rev1, rev2=None): | |
949 | """manually set the parents of the current working directory |
|
949 | """manually set the parents of the current working directory | |
950 |
|
950 | |||
951 | This is useful for writing repository conversion tools, but should |
|
951 | This is useful for writing repository conversion tools, but should | |
952 | be used with care. |
|
952 | be used with care. | |
953 |
|
953 | |||
954 | Returns 0 on success. |
|
954 | Returns 0 on success. | |
955 | """ |
|
955 | """ | |
956 |
|
956 | |||
957 | if not rev2: |
|
957 | if not rev2: | |
958 | rev2 = hex(nullid) |
|
958 | rev2 = hex(nullid) | |
959 |
|
959 | |||
960 | wlock = repo.wlock() |
|
960 | wlock = repo.wlock() | |
961 | try: |
|
961 | try: | |
962 | repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) |
|
962 | repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) | |
963 | finally: |
|
963 | finally: | |
964 | wlock.release() |
|
964 | wlock.release() | |
965 |
|
965 | |||
966 | def debugstate(ui, repo, nodates=None): |
|
966 | def debugstate(ui, repo, nodates=None): | |
967 | """show the contents of the current dirstate""" |
|
967 | """show the contents of the current dirstate""" | |
968 | timestr = "" |
|
968 | timestr = "" | |
969 | showdate = not nodates |
|
969 | showdate = not nodates | |
970 | for file_, ent in sorted(repo.dirstate._map.iteritems()): |
|
970 | for file_, ent in sorted(repo.dirstate._map.iteritems()): | |
971 | if showdate: |
|
971 | if showdate: | |
972 | if ent[3] == -1: |
|
972 | if ent[3] == -1: | |
973 | # Pad or slice to locale representation |
|
973 | # Pad or slice to locale representation | |
974 | locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", |
|
974 | locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", | |
975 | time.localtime(0))) |
|
975 | time.localtime(0))) | |
976 | timestr = 'unset' |
|
976 | timestr = 'unset' | |
977 | timestr = (timestr[:locale_len] + |
|
977 | timestr = (timestr[:locale_len] + | |
978 | ' ' * (locale_len - len(timestr))) |
|
978 | ' ' * (locale_len - len(timestr))) | |
979 | else: |
|
979 | else: | |
980 | timestr = time.strftime("%Y-%m-%d %H:%M:%S ", |
|
980 | timestr = time.strftime("%Y-%m-%d %H:%M:%S ", | |
981 | time.localtime(ent[3])) |
|
981 | time.localtime(ent[3])) | |
982 | if ent[1] & 020000: |
|
982 | if ent[1] & 020000: | |
983 | mode = 'lnk' |
|
983 | mode = 'lnk' | |
984 | else: |
|
984 | else: | |
985 | mode = '%3o' % (ent[1] & 0777) |
|
985 | mode = '%3o' % (ent[1] & 0777) | |
986 | ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) |
|
986 | ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) | |
987 | for f in repo.dirstate.copies(): |
|
987 | for f in repo.dirstate.copies(): | |
988 | ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) |
|
988 | ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) | |
989 |
|
989 | |||
990 | def debugsub(ui, repo, rev=None): |
|
990 | def debugsub(ui, repo, rev=None): | |
991 | if rev == '': |
|
991 | if rev == '': | |
992 | rev = None |
|
992 | rev = None | |
993 | for k, v in sorted(repo[rev].substate.items()): |
|
993 | for k, v in sorted(repo[rev].substate.items()): | |
994 | ui.write('path %s\n' % k) |
|
994 | ui.write('path %s\n' % k) | |
995 | ui.write(' source %s\n' % v[0]) |
|
995 | ui.write(' source %s\n' % v[0]) | |
996 | ui.write(' revision %s\n' % v[1]) |
|
996 | ui.write(' revision %s\n' % v[1]) | |
997 |
|
997 | |||
998 | def debugdata(ui, file_, rev): |
|
998 | def debugdata(ui, file_, rev): | |
999 | """dump the contents of a data file revision""" |
|
999 | """dump the contents of a data file revision""" | |
1000 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i") |
|
1000 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i") | |
1001 | try: |
|
1001 | try: | |
1002 | ui.write(r.revision(r.lookup(rev))) |
|
1002 | ui.write(r.revision(r.lookup(rev))) | |
1003 | except KeyError: |
|
1003 | except KeyError: | |
1004 | raise util.Abort(_('invalid revision identifier %s') % rev) |
|
1004 | raise util.Abort(_('invalid revision identifier %s') % rev) | |
1005 |
|
1005 | |||
1006 | def debugdate(ui, date, range=None, **opts): |
|
1006 | def debugdate(ui, date, range=None, **opts): | |
1007 | """parse and display a date""" |
|
1007 | """parse and display a date""" | |
1008 | if opts["extended"]: |
|
1008 | if opts["extended"]: | |
1009 | d = util.parsedate(date, util.extendeddateformats) |
|
1009 | d = util.parsedate(date, util.extendeddateformats) | |
1010 | else: |
|
1010 | else: | |
1011 | d = util.parsedate(date) |
|
1011 | d = util.parsedate(date) | |
1012 | ui.write("internal: %s %s\n" % d) |
|
1012 | ui.write("internal: %s %s\n" % d) | |
1013 | ui.write("standard: %s\n" % util.datestr(d)) |
|
1013 | ui.write("standard: %s\n" % util.datestr(d)) | |
1014 | if range: |
|
1014 | if range: | |
1015 | m = util.matchdate(range) |
|
1015 | m = util.matchdate(range) | |
1016 | ui.write("match: %s\n" % m(d[0])) |
|
1016 | ui.write("match: %s\n" % m(d[0])) | |
1017 |
|
1017 | |||
1018 | def debugindex(ui, file_): |
|
1018 | def debugindex(ui, file_): | |
1019 | """dump the contents of an index file""" |
|
1019 | """dump the contents of an index file""" | |
1020 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) |
|
1020 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) | |
1021 | ui.write(" rev offset length base linkrev" |
|
1021 | ui.write(" rev offset length base linkrev" | |
1022 | " nodeid p1 p2\n") |
|
1022 | " nodeid p1 p2\n") | |
1023 | for i in r: |
|
1023 | for i in r: | |
1024 | node = r.node(i) |
|
1024 | node = r.node(i) | |
1025 | try: |
|
1025 | try: | |
1026 | pp = r.parents(node) |
|
1026 | pp = r.parents(node) | |
1027 | except: |
|
1027 | except: | |
1028 | pp = [nullid, nullid] |
|
1028 | pp = [nullid, nullid] | |
1029 | ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( |
|
1029 | ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( | |
1030 | i, r.start(i), r.length(i), r.base(i), r.linkrev(i), |
|
1030 | i, r.start(i), r.length(i), r.base(i), r.linkrev(i), | |
1031 | short(node), short(pp[0]), short(pp[1]))) |
|
1031 | short(node), short(pp[0]), short(pp[1]))) | |
1032 |
|
1032 | |||
1033 | def debugindexdot(ui, file_): |
|
1033 | def debugindexdot(ui, file_): | |
1034 | """dump an index DAG as a graphviz dot file""" |
|
1034 | """dump an index DAG as a graphviz dot file""" | |
1035 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) |
|
1035 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) | |
1036 | ui.write("digraph G {\n") |
|
1036 | ui.write("digraph G {\n") | |
1037 | for i in r: |
|
1037 | for i in r: | |
1038 | node = r.node(i) |
|
1038 | node = r.node(i) | |
1039 | pp = r.parents(node) |
|
1039 | pp = r.parents(node) | |
1040 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) |
|
1040 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) | |
1041 | if pp[1] != nullid: |
|
1041 | if pp[1] != nullid: | |
1042 | ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) |
|
1042 | ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) | |
1043 | ui.write("}\n") |
|
1043 | ui.write("}\n") | |
1044 |
|
1044 | |||
1045 | def debuginstall(ui): |
|
1045 | def debuginstall(ui): | |
1046 | '''test Mercurial installation |
|
1046 | '''test Mercurial installation | |
1047 |
|
1047 | |||
1048 | Returns 0 on success. |
|
1048 | Returns 0 on success. | |
1049 | ''' |
|
1049 | ''' | |
1050 |
|
1050 | |||
1051 | def writetemp(contents): |
|
1051 | def writetemp(contents): | |
1052 | (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-") |
|
1052 | (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-") | |
1053 | f = os.fdopen(fd, "wb") |
|
1053 | f = os.fdopen(fd, "wb") | |
1054 | f.write(contents) |
|
1054 | f.write(contents) | |
1055 | f.close() |
|
1055 | f.close() | |
1056 | return name |
|
1056 | return name | |
1057 |
|
1057 | |||
1058 | problems = 0 |
|
1058 | problems = 0 | |
1059 |
|
1059 | |||
1060 | # encoding |
|
1060 | # encoding | |
1061 | ui.status(_("Checking encoding (%s)...\n") % encoding.encoding) |
|
1061 | ui.status(_("Checking encoding (%s)...\n") % encoding.encoding) | |
1062 | try: |
|
1062 | try: | |
1063 | encoding.fromlocal("test") |
|
1063 | encoding.fromlocal("test") | |
1064 | except util.Abort, inst: |
|
1064 | except util.Abort, inst: | |
1065 | ui.write(" %s\n" % inst) |
|
1065 | ui.write(" %s\n" % inst) | |
1066 | ui.write(_(" (check that your locale is properly set)\n")) |
|
1066 | ui.write(_(" (check that your locale is properly set)\n")) | |
1067 | problems += 1 |
|
1067 | problems += 1 | |
1068 |
|
1068 | |||
1069 | # compiled modules |
|
1069 | # compiled modules | |
1070 | ui.status(_("Checking extensions...\n")) |
|
1070 | ui.status(_("Checking extensions...\n")) | |
1071 | try: |
|
1071 | try: | |
1072 | import bdiff, mpatch, base85 |
|
1072 | import bdiff, mpatch, base85 | |
1073 | except Exception, inst: |
|
1073 | except Exception, inst: | |
1074 | ui.write(" %s\n" % inst) |
|
1074 | ui.write(" %s\n" % inst) | |
1075 | ui.write(_(" One or more extensions could not be found")) |
|
1075 | ui.write(_(" One or more extensions could not be found")) | |
1076 | ui.write(_(" (check that you compiled the extensions)\n")) |
|
1076 | ui.write(_(" (check that you compiled the extensions)\n")) | |
1077 | problems += 1 |
|
1077 | problems += 1 | |
1078 |
|
1078 | |||
1079 | # templates |
|
1079 | # templates | |
1080 | ui.status(_("Checking templates...\n")) |
|
1080 | ui.status(_("Checking templates...\n")) | |
1081 | try: |
|
1081 | try: | |
1082 | import templater |
|
1082 | import templater | |
1083 | templater.templater(templater.templatepath("map-cmdline.default")) |
|
1083 | templater.templater(templater.templatepath("map-cmdline.default")) | |
1084 | except Exception, inst: |
|
1084 | except Exception, inst: | |
1085 | ui.write(" %s\n" % inst) |
|
1085 | ui.write(" %s\n" % inst) | |
1086 | ui.write(_(" (templates seem to have been installed incorrectly)\n")) |
|
1086 | ui.write(_(" (templates seem to have been installed incorrectly)\n")) | |
1087 | problems += 1 |
|
1087 | problems += 1 | |
1088 |
|
1088 | |||
1089 | # patch |
|
1089 | # patch | |
1090 | ui.status(_("Checking patch...\n")) |
|
1090 | ui.status(_("Checking patch...\n")) | |
1091 | patchproblems = 0 |
|
1091 | patchproblems = 0 | |
1092 | a = "1\n2\n3\n4\n" |
|
1092 | a = "1\n2\n3\n4\n" | |
1093 | b = "1\n2\n3\ninsert\n4\n" |
|
1093 | b = "1\n2\n3\ninsert\n4\n" | |
1094 | fa = writetemp(a) |
|
1094 | fa = writetemp(a) | |
1095 | d = mdiff.unidiff(a, None, b, None, os.path.basename(fa), |
|
1095 | d = mdiff.unidiff(a, None, b, None, os.path.basename(fa), | |
1096 | os.path.basename(fa)) |
|
1096 | os.path.basename(fa)) | |
1097 | fd = writetemp(d) |
|
1097 | fd = writetemp(d) | |
1098 |
|
1098 | |||
1099 | files = {} |
|
1099 | files = {} | |
1100 | try: |
|
1100 | try: | |
1101 | patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files) |
|
1101 | patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files) | |
1102 | except util.Abort, e: |
|
1102 | except util.Abort, e: | |
1103 | ui.write(_(" patch call failed:\n")) |
|
1103 | ui.write(_(" patch call failed:\n")) | |
1104 | ui.write(" " + str(e) + "\n") |
|
1104 | ui.write(" " + str(e) + "\n") | |
1105 | patchproblems += 1 |
|
1105 | patchproblems += 1 | |
1106 | else: |
|
1106 | else: | |
1107 | if list(files) != [os.path.basename(fa)]: |
|
1107 | if list(files) != [os.path.basename(fa)]: | |
1108 | ui.write(_(" unexpected patch output!\n")) |
|
1108 | ui.write(_(" unexpected patch output!\n")) | |
1109 | patchproblems += 1 |
|
1109 | patchproblems += 1 | |
1110 | a = open(fa).read() |
|
1110 | a = open(fa).read() | |
1111 | if a != b: |
|
1111 | if a != b: | |
1112 | ui.write(_(" patch test failed!\n")) |
|
1112 | ui.write(_(" patch test failed!\n")) | |
1113 | patchproblems += 1 |
|
1113 | patchproblems += 1 | |
1114 |
|
1114 | |||
1115 | if patchproblems: |
|
1115 | if patchproblems: | |
1116 | if ui.config('ui', 'patch'): |
|
1116 | if ui.config('ui', 'patch'): | |
1117 | ui.write(_(" (Current patch tool may be incompatible with patch," |
|
1117 | ui.write(_(" (Current patch tool may be incompatible with patch," | |
1118 | " or misconfigured. Please check your .hgrc file)\n")) |
|
1118 | " or misconfigured. Please check your .hgrc file)\n")) | |
1119 | else: |
|
1119 | else: | |
1120 | ui.write(_(" Internal patcher failure, please report this error" |
|
1120 | ui.write(_(" Internal patcher failure, please report this error" | |
1121 | " to http://mercurial.selenic.com/bts/\n")) |
|
1121 | " to http://mercurial.selenic.com/bts/\n")) | |
1122 | problems += patchproblems |
|
1122 | problems += patchproblems | |
1123 |
|
1123 | |||
1124 | os.unlink(fa) |
|
1124 | os.unlink(fa) | |
1125 | os.unlink(fd) |
|
1125 | os.unlink(fd) | |
1126 |
|
1126 | |||
1127 | # editor |
|
1127 | # editor | |
1128 | ui.status(_("Checking commit editor...\n")) |
|
1128 | ui.status(_("Checking commit editor...\n")) | |
1129 | editor = ui.geteditor() |
|
1129 | editor = ui.geteditor() | |
1130 | cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0]) |
|
1130 | cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0]) | |
1131 | if not cmdpath: |
|
1131 | if not cmdpath: | |
1132 | if editor == 'vi': |
|
1132 | if editor == 'vi': | |
1133 | ui.write(_(" No commit editor set and can't find vi in PATH\n")) |
|
1133 | ui.write(_(" No commit editor set and can't find vi in PATH\n")) | |
1134 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) |
|
1134 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) | |
1135 | else: |
|
1135 | else: | |
1136 | ui.write(_(" Can't find editor '%s' in PATH\n") % editor) |
|
1136 | ui.write(_(" Can't find editor '%s' in PATH\n") % editor) | |
1137 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) |
|
1137 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) | |
1138 | problems += 1 |
|
1138 | problems += 1 | |
1139 |
|
1139 | |||
1140 | # check username |
|
1140 | # check username | |
1141 | ui.status(_("Checking username...\n")) |
|
1141 | ui.status(_("Checking username...\n")) | |
1142 | try: |
|
1142 | try: | |
1143 | user = ui.username() |
|
1143 | user = ui.username() | |
1144 | except util.Abort, e: |
|
1144 | except util.Abort, e: | |
1145 | ui.write(" %s\n" % e) |
|
1145 | ui.write(" %s\n" % e) | |
1146 | ui.write(_(" (specify a username in your .hgrc file)\n")) |
|
1146 | ui.write(_(" (specify a username in your .hgrc file)\n")) | |
1147 | problems += 1 |
|
1147 | problems += 1 | |
1148 |
|
1148 | |||
1149 | if not problems: |
|
1149 | if not problems: | |
1150 | ui.status(_("No problems detected\n")) |
|
1150 | ui.status(_("No problems detected\n")) | |
1151 | else: |
|
1151 | else: | |
1152 | ui.write(_("%s problems detected," |
|
1152 | ui.write(_("%s problems detected," | |
1153 | " please check your install!\n") % problems) |
|
1153 | " please check your install!\n") % problems) | |
1154 |
|
1154 | |||
1155 | return problems |
|
1155 | return problems | |
1156 |
|
1156 | |||
1157 | def debugrename(ui, repo, file1, *pats, **opts): |
|
1157 | def debugrename(ui, repo, file1, *pats, **opts): | |
1158 | """dump rename information""" |
|
1158 | """dump rename information""" | |
1159 |
|
1159 | |||
1160 | ctx = repo[opts.get('rev')] |
|
1160 | ctx = repo[opts.get('rev')] | |
1161 | m = cmdutil.match(repo, (file1,) + pats, opts) |
|
1161 | m = cmdutil.match(repo, (file1,) + pats, opts) | |
1162 | for abs in ctx.walk(m): |
|
1162 | for abs in ctx.walk(m): | |
1163 | fctx = ctx[abs] |
|
1163 | fctx = ctx[abs] | |
1164 | o = fctx.filelog().renamed(fctx.filenode()) |
|
1164 | o = fctx.filelog().renamed(fctx.filenode()) | |
1165 | rel = m.rel(abs) |
|
1165 | rel = m.rel(abs) | |
1166 | if o: |
|
1166 | if o: | |
1167 | ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) |
|
1167 | ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) | |
1168 | else: |
|
1168 | else: | |
1169 | ui.write(_("%s not renamed\n") % rel) |
|
1169 | ui.write(_("%s not renamed\n") % rel) | |
1170 |
|
1170 | |||
1171 | def debugwalk(ui, repo, *pats, **opts): |
|
1171 | def debugwalk(ui, repo, *pats, **opts): | |
1172 | """show how files match on given patterns""" |
|
1172 | """show how files match on given patterns""" | |
1173 | m = cmdutil.match(repo, pats, opts) |
|
1173 | m = cmdutil.match(repo, pats, opts) | |
1174 | items = list(repo.walk(m)) |
|
1174 | items = list(repo.walk(m)) | |
1175 | if not items: |
|
1175 | if not items: | |
1176 | return |
|
1176 | return | |
1177 | fmt = 'f %%-%ds %%-%ds %%s' % ( |
|
1177 | fmt = 'f %%-%ds %%-%ds %%s' % ( | |
1178 | max([len(abs) for abs in items]), |
|
1178 | max([len(abs) for abs in items]), | |
1179 | max([len(m.rel(abs)) for abs in items])) |
|
1179 | max([len(m.rel(abs)) for abs in items])) | |
1180 | for abs in items: |
|
1180 | for abs in items: | |
1181 | line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') |
|
1181 | line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') | |
1182 | ui.write("%s\n" % line.rstrip()) |
|
1182 | ui.write("%s\n" % line.rstrip()) | |
1183 |
|
1183 | |||
1184 | def diff(ui, repo, *pats, **opts): |
|
1184 | def diff(ui, repo, *pats, **opts): | |
1185 | """diff repository (or selected files) |
|
1185 | """diff repository (or selected files) | |
1186 |
|
1186 | |||
1187 | Show differences between revisions for the specified files. |
|
1187 | Show differences between revisions for the specified files. | |
1188 |
|
1188 | |||
1189 | Differences between files are shown using the unified diff format. |
|
1189 | Differences between files are shown using the unified diff format. | |
1190 |
|
1190 | |||
1191 | NOTE: diff may generate unexpected results for merges, as it will |
|
1191 | NOTE: diff may generate unexpected results for merges, as it will | |
1192 | default to comparing against the working directory's first parent |
|
1192 | default to comparing against the working directory's first parent | |
1193 | changeset if no revisions are specified. |
|
1193 | changeset if no revisions are specified. | |
1194 |
|
1194 | |||
1195 | When two revision arguments are given, then changes are shown |
|
1195 | When two revision arguments are given, then changes are shown | |
1196 | between those revisions. If only one revision is specified then |
|
1196 | between those revisions. If only one revision is specified then | |
1197 | that revision is compared to the working directory, and, when no |
|
1197 | that revision is compared to the working directory, and, when no | |
1198 | revisions are specified, the working directory files are compared |
|
1198 | revisions are specified, the working directory files are compared | |
1199 | to its parent. |
|
1199 | to its parent. | |
1200 |
|
1200 | |||
1201 | Alternatively you can specify -c/--change with a revision to see |
|
1201 | Alternatively you can specify -c/--change with a revision to see | |
1202 | the changes in that changeset relative to its first parent. |
|
1202 | the changes in that changeset relative to its first parent. | |
1203 |
|
1203 | |||
1204 | Without the -a/--text option, diff will avoid generating diffs of |
|
1204 | Without the -a/--text option, diff will avoid generating diffs of | |
1205 | files it detects as binary. With -a, diff will generate a diff |
|
1205 | files it detects as binary. With -a, diff will generate a diff | |
1206 | anyway, probably with undesirable results. |
|
1206 | anyway, probably with undesirable results. | |
1207 |
|
1207 | |||
1208 | Use the -g/--git option to generate diffs in the git extended diff |
|
1208 | Use the -g/--git option to generate diffs in the git extended diff | |
1209 | format. For more information, read :hg:`help diffs`. |
|
1209 | format. For more information, read :hg:`help diffs`. | |
1210 |
|
1210 | |||
1211 | Returns 0 on success. |
|
1211 | Returns 0 on success. | |
1212 | """ |
|
1212 | """ | |
1213 |
|
1213 | |||
1214 | revs = opts.get('rev') |
|
1214 | revs = opts.get('rev') | |
1215 | change = opts.get('change') |
|
1215 | change = opts.get('change') | |
1216 | stat = opts.get('stat') |
|
1216 | stat = opts.get('stat') | |
1217 | reverse = opts.get('reverse') |
|
1217 | reverse = opts.get('reverse') | |
1218 |
|
1218 | |||
1219 | if revs and change: |
|
1219 | if revs and change: | |
1220 | msg = _('cannot specify --rev and --change at the same time') |
|
1220 | msg = _('cannot specify --rev and --change at the same time') | |
1221 | raise util.Abort(msg) |
|
1221 | raise util.Abort(msg) | |
1222 | elif change: |
|
1222 | elif change: | |
1223 | node2 = repo.lookup(change) |
|
1223 | node2 = repo.lookup(change) | |
1224 | node1 = repo[node2].parents()[0].node() |
|
1224 | node1 = repo[node2].parents()[0].node() | |
1225 | else: |
|
1225 | else: | |
1226 | node1, node2 = cmdutil.revpair(repo, revs) |
|
1226 | node1, node2 = cmdutil.revpair(repo, revs) | |
1227 |
|
1227 | |||
1228 | if reverse: |
|
1228 | if reverse: | |
1229 | node1, node2 = node2, node1 |
|
1229 | node1, node2 = node2, node1 | |
1230 |
|
1230 | |||
1231 | diffopts = patch.diffopts(ui, opts) |
|
1231 | diffopts = patch.diffopts(ui, opts) | |
1232 | m = cmdutil.match(repo, pats, opts) |
|
1232 | m = cmdutil.match(repo, pats, opts) | |
1233 | cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat) |
|
1233 | cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat) | |
1234 |
|
1234 | |||
1235 | def export(ui, repo, *changesets, **opts): |
|
1235 | def export(ui, repo, *changesets, **opts): | |
1236 | """dump the header and diffs for one or more changesets |
|
1236 | """dump the header and diffs for one or more changesets | |
1237 |
|
1237 | |||
1238 | Print the changeset header and diffs for one or more revisions. |
|
1238 | Print the changeset header and diffs for one or more revisions. | |
1239 |
|
1239 | |||
1240 | The information shown in the changeset header is: author, date, |
|
1240 | The information shown in the changeset header is: author, date, | |
1241 | branch name (if non-default), changeset hash, parent(s) and commit |
|
1241 | branch name (if non-default), changeset hash, parent(s) and commit | |
1242 | comment. |
|
1242 | comment. | |
1243 |
|
1243 | |||
1244 | NOTE: export may generate unexpected diff output for merge |
|
1244 | NOTE: export may generate unexpected diff output for merge | |
1245 | changesets, as it will compare the merge changeset against its |
|
1245 | changesets, as it will compare the merge changeset against its | |
1246 | first parent only. |
|
1246 | first parent only. | |
1247 |
|
1247 | |||
1248 | Output may be to a file, in which case the name of the file is |
|
1248 | Output may be to a file, in which case the name of the file is | |
1249 | given using a format string. The formatting rules are as follows: |
|
1249 | given using a format string. The formatting rules are as follows: | |
1250 |
|
1250 | |||
1251 | :``%%``: literal "%" character |
|
1251 | :``%%``: literal "%" character | |
1252 | :``%H``: changeset hash (40 bytes of hexadecimal) |
|
1252 | :``%H``: changeset hash (40 bytes of hexadecimal) | |
1253 | :``%N``: number of patches being generated |
|
1253 | :``%N``: number of patches being generated | |
1254 | :``%R``: changeset revision number |
|
1254 | :``%R``: changeset revision number | |
1255 | :``%b``: basename of the exporting repository |
|
1255 | :``%b``: basename of the exporting repository | |
1256 | :``%h``: short-form changeset hash (12 bytes of hexadecimal) |
|
1256 | :``%h``: short-form changeset hash (12 bytes of hexadecimal) | |
1257 | :``%n``: zero-padded sequence number, starting at 1 |
|
1257 | :``%n``: zero-padded sequence number, starting at 1 | |
1258 | :``%r``: zero-padded changeset revision number |
|
1258 | :``%r``: zero-padded changeset revision number | |
1259 |
|
1259 | |||
1260 | Without the -a/--text option, export will avoid generating diffs |
|
1260 | Without the -a/--text option, export will avoid generating diffs | |
1261 | of files it detects as binary. With -a, export will generate a |
|
1261 | of files it detects as binary. With -a, export will generate a | |
1262 | diff anyway, probably with undesirable results. |
|
1262 | diff anyway, probably with undesirable results. | |
1263 |
|
1263 | |||
1264 | Use the -g/--git option to generate diffs in the git extended diff |
|
1264 | Use the -g/--git option to generate diffs in the git extended diff | |
1265 | format. See :hg:`help diffs` for more information. |
|
1265 | format. See :hg:`help diffs` for more information. | |
1266 |
|
1266 | |||
1267 | With the --switch-parent option, the diff will be against the |
|
1267 | With the --switch-parent option, the diff will be against the | |
1268 | second parent. It can be useful to review a merge. |
|
1268 | second parent. It can be useful to review a merge. | |
1269 |
|
1269 | |||
1270 | Returns 0 on success. |
|
1270 | Returns 0 on success. | |
1271 | """ |
|
1271 | """ | |
1272 | changesets += tuple(opts.get('rev', [])) |
|
1272 | changesets += tuple(opts.get('rev', [])) | |
1273 | if not changesets: |
|
1273 | if not changesets: | |
1274 | raise util.Abort(_("export requires at least one changeset")) |
|
1274 | raise util.Abort(_("export requires at least one changeset")) | |
1275 | revs = cmdutil.revrange(repo, changesets) |
|
1275 | revs = cmdutil.revrange(repo, changesets) | |
1276 | if len(revs) > 1: |
|
1276 | if len(revs) > 1: | |
1277 | ui.note(_('exporting patches:\n')) |
|
1277 | ui.note(_('exporting patches:\n')) | |
1278 | else: |
|
1278 | else: | |
1279 | ui.note(_('exporting patch:\n')) |
|
1279 | ui.note(_('exporting patch:\n')) | |
1280 | cmdutil.export(repo, revs, template=opts.get('output'), |
|
1280 | cmdutil.export(repo, revs, template=opts.get('output'), | |
1281 | switch_parent=opts.get('switch_parent'), |
|
1281 | switch_parent=opts.get('switch_parent'), | |
1282 | opts=patch.diffopts(ui, opts)) |
|
1282 | opts=patch.diffopts(ui, opts)) | |
1283 |
|
1283 | |||
1284 | def forget(ui, repo, *pats, **opts): |
|
1284 | def forget(ui, repo, *pats, **opts): | |
1285 | """forget the specified files on the next commit |
|
1285 | """forget the specified files on the next commit | |
1286 |
|
1286 | |||
1287 | Mark the specified files so they will no longer be tracked |
|
1287 | Mark the specified files so they will no longer be tracked | |
1288 | after the next commit. |
|
1288 | after the next commit. | |
1289 |
|
1289 | |||
1290 | This only removes files from the current branch, not from the |
|
1290 | This only removes files from the current branch, not from the | |
1291 | entire project history, and it does not delete them from the |
|
1291 | entire project history, and it does not delete them from the | |
1292 | working directory. |
|
1292 | working directory. | |
1293 |
|
1293 | |||
1294 | To undo a forget before the next commit, see :hg:`add`. |
|
1294 | To undo a forget before the next commit, see :hg:`add`. | |
1295 |
|
1295 | |||
1296 | Returns 0 on success. |
|
1296 | Returns 0 on success. | |
1297 | """ |
|
1297 | """ | |
1298 |
|
1298 | |||
1299 | if not pats: |
|
1299 | if not pats: | |
1300 | raise util.Abort(_('no files specified')) |
|
1300 | raise util.Abort(_('no files specified')) | |
1301 |
|
1301 | |||
1302 | m = cmdutil.match(repo, pats, opts) |
|
1302 | m = cmdutil.match(repo, pats, opts) | |
1303 | s = repo.status(match=m, clean=True) |
|
1303 | s = repo.status(match=m, clean=True) | |
1304 | forget = sorted(s[0] + s[1] + s[3] + s[6]) |
|
1304 | forget = sorted(s[0] + s[1] + s[3] + s[6]) | |
1305 | errs = 0 |
|
1305 | errs = 0 | |
1306 |
|
1306 | |||
1307 | for f in m.files(): |
|
1307 | for f in m.files(): | |
1308 | if f not in repo.dirstate and not os.path.isdir(m.rel(f)): |
|
1308 | if f not in repo.dirstate and not os.path.isdir(m.rel(f)): | |
1309 | ui.warn(_('not removing %s: file is already untracked\n') |
|
1309 | ui.warn(_('not removing %s: file is already untracked\n') | |
1310 | % m.rel(f)) |
|
1310 | % m.rel(f)) | |
1311 | errs = 1 |
|
1311 | errs = 1 | |
1312 |
|
1312 | |||
1313 | for f in forget: |
|
1313 | for f in forget: | |
1314 | if ui.verbose or not m.exact(f): |
|
1314 | if ui.verbose or not m.exact(f): | |
1315 | ui.status(_('removing %s\n') % m.rel(f)) |
|
1315 | ui.status(_('removing %s\n') % m.rel(f)) | |
1316 |
|
1316 | |||
1317 | repo.remove(forget, unlink=False) |
|
1317 | repo.remove(forget, unlink=False) | |
1318 | return errs |
|
1318 | return errs | |
1319 |
|
1319 | |||
1320 | def grep(ui, repo, pattern, *pats, **opts): |
|
1320 | def grep(ui, repo, pattern, *pats, **opts): | |
1321 | """search for a pattern in specified files and revisions |
|
1321 | """search for a pattern in specified files and revisions | |
1322 |
|
1322 | |||
1323 | Search revisions of files for a regular expression. |
|
1323 | Search revisions of files for a regular expression. | |
1324 |
|
1324 | |||
1325 | This command behaves differently than Unix grep. It only accepts |
|
1325 | This command behaves differently than Unix grep. It only accepts | |
1326 | Python/Perl regexps. It searches repository history, not the |
|
1326 | Python/Perl regexps. It searches repository history, not the | |
1327 | working directory. It always prints the revision number in which a |
|
1327 | working directory. It always prints the revision number in which a | |
1328 | match appears. |
|
1328 | match appears. | |
1329 |
|
1329 | |||
1330 | By default, grep only prints output for the first revision of a |
|
1330 | By default, grep only prints output for the first revision of a | |
1331 | file in which it finds a match. To get it to print every revision |
|
1331 | file in which it finds a match. To get it to print every revision | |
1332 | that contains a change in match status ("-" for a match that |
|
1332 | that contains a change in match status ("-" for a match that | |
1333 | becomes a non-match, or "+" for a non-match that becomes a match), |
|
1333 | becomes a non-match, or "+" for a non-match that becomes a match), | |
1334 | use the --all flag. |
|
1334 | use the --all flag. | |
1335 |
|
1335 | |||
1336 | Returns 0 if a match is found, 1 otherwise. |
|
1336 | Returns 0 if a match is found, 1 otherwise. | |
1337 | """ |
|
1337 | """ | |
1338 | reflags = 0 |
|
1338 | reflags = 0 | |
1339 | if opts.get('ignore_case'): |
|
1339 | if opts.get('ignore_case'): | |
1340 | reflags |= re.I |
|
1340 | reflags |= re.I | |
1341 | try: |
|
1341 | try: | |
1342 | regexp = re.compile(pattern, reflags) |
|
1342 | regexp = re.compile(pattern, reflags) | |
1343 | except Exception, inst: |
|
1343 | except Exception, inst: | |
1344 | ui.warn(_("grep: invalid match pattern: %s\n") % inst) |
|
1344 | ui.warn(_("grep: invalid match pattern: %s\n") % inst) | |
1345 | return 1 |
|
1345 | return 1 | |
1346 | sep, eol = ':', '\n' |
|
1346 | sep, eol = ':', '\n' | |
1347 | if opts.get('print0'): |
|
1347 | if opts.get('print0'): | |
1348 | sep = eol = '\0' |
|
1348 | sep = eol = '\0' | |
1349 |
|
1349 | |||
1350 | getfile = util.lrucachefunc(repo.file) |
|
1350 | getfile = util.lrucachefunc(repo.file) | |
1351 |
|
1351 | |||
1352 | def matchlines(body): |
|
1352 | def matchlines(body): | |
1353 | begin = 0 |
|
1353 | begin = 0 | |
1354 | linenum = 0 |
|
1354 | linenum = 0 | |
1355 | while True: |
|
1355 | while True: | |
1356 | match = regexp.search(body, begin) |
|
1356 | match = regexp.search(body, begin) | |
1357 | if not match: |
|
1357 | if not match: | |
1358 | break |
|
1358 | break | |
1359 | mstart, mend = match.span() |
|
1359 | mstart, mend = match.span() | |
1360 | linenum += body.count('\n', begin, mstart) + 1 |
|
1360 | linenum += body.count('\n', begin, mstart) + 1 | |
1361 | lstart = body.rfind('\n', begin, mstart) + 1 or begin |
|
1361 | lstart = body.rfind('\n', begin, mstart) + 1 or begin | |
1362 | begin = body.find('\n', mend) + 1 or len(body) |
|
1362 | begin = body.find('\n', mend) + 1 or len(body) | |
1363 | lend = begin - 1 |
|
1363 | lend = begin - 1 | |
1364 | yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] |
|
1364 | yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] | |
1365 |
|
1365 | |||
1366 | class linestate(object): |
|
1366 | class linestate(object): | |
1367 | def __init__(self, line, linenum, colstart, colend): |
|
1367 | def __init__(self, line, linenum, colstart, colend): | |
1368 | self.line = line |
|
1368 | self.line = line | |
1369 | self.linenum = linenum |
|
1369 | self.linenum = linenum | |
1370 | self.colstart = colstart |
|
1370 | self.colstart = colstart | |
1371 | self.colend = colend |
|
1371 | self.colend = colend | |
1372 |
|
1372 | |||
1373 | def __hash__(self): |
|
1373 | def __hash__(self): | |
1374 | return hash((self.linenum, self.line)) |
|
1374 | return hash((self.linenum, self.line)) | |
1375 |
|
1375 | |||
1376 | def __eq__(self, other): |
|
1376 | def __eq__(self, other): | |
1377 | return self.line == other.line |
|
1377 | return self.line == other.line | |
1378 |
|
1378 | |||
1379 | matches = {} |
|
1379 | matches = {} | |
1380 | copies = {} |
|
1380 | copies = {} | |
1381 | def grepbody(fn, rev, body): |
|
1381 | def grepbody(fn, rev, body): | |
1382 | matches[rev].setdefault(fn, []) |
|
1382 | matches[rev].setdefault(fn, []) | |
1383 | m = matches[rev][fn] |
|
1383 | m = matches[rev][fn] | |
1384 | for lnum, cstart, cend, line in matchlines(body): |
|
1384 | for lnum, cstart, cend, line in matchlines(body): | |
1385 | s = linestate(line, lnum, cstart, cend) |
|
1385 | s = linestate(line, lnum, cstart, cend) | |
1386 | m.append(s) |
|
1386 | m.append(s) | |
1387 |
|
1387 | |||
1388 | def difflinestates(a, b): |
|
1388 | def difflinestates(a, b): | |
1389 | sm = difflib.SequenceMatcher(None, a, b) |
|
1389 | sm = difflib.SequenceMatcher(None, a, b) | |
1390 | for tag, alo, ahi, blo, bhi in sm.get_opcodes(): |
|
1390 | for tag, alo, ahi, blo, bhi in sm.get_opcodes(): | |
1391 | if tag == 'insert': |
|
1391 | if tag == 'insert': | |
1392 | for i in xrange(blo, bhi): |
|
1392 | for i in xrange(blo, bhi): | |
1393 | yield ('+', b[i]) |
|
1393 | yield ('+', b[i]) | |
1394 | elif tag == 'delete': |
|
1394 | elif tag == 'delete': | |
1395 | for i in xrange(alo, ahi): |
|
1395 | for i in xrange(alo, ahi): | |
1396 | yield ('-', a[i]) |
|
1396 | yield ('-', a[i]) | |
1397 | elif tag == 'replace': |
|
1397 | elif tag == 'replace': | |
1398 | for i in xrange(alo, ahi): |
|
1398 | for i in xrange(alo, ahi): | |
1399 | yield ('-', a[i]) |
|
1399 | yield ('-', a[i]) | |
1400 | for i in xrange(blo, bhi): |
|
1400 | for i in xrange(blo, bhi): | |
1401 | yield ('+', b[i]) |
|
1401 | yield ('+', b[i]) | |
1402 |
|
1402 | |||
1403 | def display(fn, ctx, pstates, states): |
|
1403 | def display(fn, ctx, pstates, states): | |
1404 | rev = ctx.rev() |
|
1404 | rev = ctx.rev() | |
1405 | datefunc = ui.quiet and util.shortdate or util.datestr |
|
1405 | datefunc = ui.quiet and util.shortdate or util.datestr | |
1406 | found = False |
|
1406 | found = False | |
1407 | filerevmatches = {} |
|
1407 | filerevmatches = {} | |
1408 | if opts.get('all'): |
|
1408 | if opts.get('all'): | |
1409 | iter = difflinestates(pstates, states) |
|
1409 | iter = difflinestates(pstates, states) | |
1410 | else: |
|
1410 | else: | |
1411 | iter = [('', l) for l in states] |
|
1411 | iter = [('', l) for l in states] | |
1412 | for change, l in iter: |
|
1412 | for change, l in iter: | |
1413 | cols = [fn, str(rev)] |
|
1413 | cols = [fn, str(rev)] | |
1414 | before, match, after = None, None, None |
|
1414 | before, match, after = None, None, None | |
1415 | if opts.get('line_number'): |
|
1415 | if opts.get('line_number'): | |
1416 | cols.append(str(l.linenum)) |
|
1416 | cols.append(str(l.linenum)) | |
1417 | if opts.get('all'): |
|
1417 | if opts.get('all'): | |
1418 | cols.append(change) |
|
1418 | cols.append(change) | |
1419 | if opts.get('user'): |
|
1419 | if opts.get('user'): | |
1420 | cols.append(ui.shortuser(ctx.user())) |
|
1420 | cols.append(ui.shortuser(ctx.user())) | |
1421 | if opts.get('date'): |
|
1421 | if opts.get('date'): | |
1422 | cols.append(datefunc(ctx.date())) |
|
1422 | cols.append(datefunc(ctx.date())) | |
1423 | if opts.get('files_with_matches'): |
|
1423 | if opts.get('files_with_matches'): | |
1424 | c = (fn, rev) |
|
1424 | c = (fn, rev) | |
1425 | if c in filerevmatches: |
|
1425 | if c in filerevmatches: | |
1426 | continue |
|
1426 | continue | |
1427 | filerevmatches[c] = 1 |
|
1427 | filerevmatches[c] = 1 | |
1428 | else: |
|
1428 | else: | |
1429 | before = l.line[:l.colstart] |
|
1429 | before = l.line[:l.colstart] | |
1430 | match = l.line[l.colstart:l.colend] |
|
1430 | match = l.line[l.colstart:l.colend] | |
1431 | after = l.line[l.colend:] |
|
1431 | after = l.line[l.colend:] | |
1432 | ui.write(sep.join(cols)) |
|
1432 | ui.write(sep.join(cols)) | |
1433 | if before is not None: |
|
1433 | if before is not None: | |
1434 | ui.write(sep + before) |
|
1434 | ui.write(sep + before) | |
1435 | ui.write(match, label='grep.match') |
|
1435 | ui.write(match, label='grep.match') | |
1436 | ui.write(after) |
|
1436 | ui.write(after) | |
1437 | ui.write(eol) |
|
1437 | ui.write(eol) | |
1438 | found = True |
|
1438 | found = True | |
1439 | return found |
|
1439 | return found | |
1440 |
|
1440 | |||
1441 | skip = {} |
|
1441 | skip = {} | |
1442 | revfiles = {} |
|
1442 | revfiles = {} | |
1443 | matchfn = cmdutil.match(repo, pats, opts) |
|
1443 | matchfn = cmdutil.match(repo, pats, opts) | |
1444 | found = False |
|
1444 | found = False | |
1445 | follow = opts.get('follow') |
|
1445 | follow = opts.get('follow') | |
1446 |
|
1446 | |||
1447 | def prep(ctx, fns): |
|
1447 | def prep(ctx, fns): | |
1448 | rev = ctx.rev() |
|
1448 | rev = ctx.rev() | |
1449 | pctx = ctx.parents()[0] |
|
1449 | pctx = ctx.parents()[0] | |
1450 | parent = pctx.rev() |
|
1450 | parent = pctx.rev() | |
1451 | matches.setdefault(rev, {}) |
|
1451 | matches.setdefault(rev, {}) | |
1452 | matches.setdefault(parent, {}) |
|
1452 | matches.setdefault(parent, {}) | |
1453 | files = revfiles.setdefault(rev, []) |
|
1453 | files = revfiles.setdefault(rev, []) | |
1454 | for fn in fns: |
|
1454 | for fn in fns: | |
1455 | flog = getfile(fn) |
|
1455 | flog = getfile(fn) | |
1456 | try: |
|
1456 | try: | |
1457 | fnode = ctx.filenode(fn) |
|
1457 | fnode = ctx.filenode(fn) | |
1458 | except error.LookupError: |
|
1458 | except error.LookupError: | |
1459 | continue |
|
1459 | continue | |
1460 |
|
1460 | |||
1461 | copied = flog.renamed(fnode) |
|
1461 | copied = flog.renamed(fnode) | |
1462 | copy = follow and copied and copied[0] |
|
1462 | copy = follow and copied and copied[0] | |
1463 | if copy: |
|
1463 | if copy: | |
1464 | copies.setdefault(rev, {})[fn] = copy |
|
1464 | copies.setdefault(rev, {})[fn] = copy | |
1465 | if fn in skip: |
|
1465 | if fn in skip: | |
1466 | if copy: |
|
1466 | if copy: | |
1467 | skip[copy] = True |
|
1467 | skip[copy] = True | |
1468 | continue |
|
1468 | continue | |
1469 | files.append(fn) |
|
1469 | files.append(fn) | |
1470 |
|
1470 | |||
1471 | if fn not in matches[rev]: |
|
1471 | if fn not in matches[rev]: | |
1472 | grepbody(fn, rev, flog.read(fnode)) |
|
1472 | grepbody(fn, rev, flog.read(fnode)) | |
1473 |
|
1473 | |||
1474 | pfn = copy or fn |
|
1474 | pfn = copy or fn | |
1475 | if pfn not in matches[parent]: |
|
1475 | if pfn not in matches[parent]: | |
1476 | try: |
|
1476 | try: | |
1477 | fnode = pctx.filenode(pfn) |
|
1477 | fnode = pctx.filenode(pfn) | |
1478 | grepbody(pfn, parent, flog.read(fnode)) |
|
1478 | grepbody(pfn, parent, flog.read(fnode)) | |
1479 | except error.LookupError: |
|
1479 | except error.LookupError: | |
1480 | pass |
|
1480 | pass | |
1481 |
|
1481 | |||
1482 | for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): |
|
1482 | for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): | |
1483 | rev = ctx.rev() |
|
1483 | rev = ctx.rev() | |
1484 | parent = ctx.parents()[0].rev() |
|
1484 | parent = ctx.parents()[0].rev() | |
1485 | for fn in sorted(revfiles.get(rev, [])): |
|
1485 | for fn in sorted(revfiles.get(rev, [])): | |
1486 | states = matches[rev][fn] |
|
1486 | states = matches[rev][fn] | |
1487 | copy = copies.get(rev, {}).get(fn) |
|
1487 | copy = copies.get(rev, {}).get(fn) | |
1488 | if fn in skip: |
|
1488 | if fn in skip: | |
1489 | if copy: |
|
1489 | if copy: | |
1490 | skip[copy] = True |
|
1490 | skip[copy] = True | |
1491 | continue |
|
1491 | continue | |
1492 | pstates = matches.get(parent, {}).get(copy or fn, []) |
|
1492 | pstates = matches.get(parent, {}).get(copy or fn, []) | |
1493 | if pstates or states: |
|
1493 | if pstates or states: | |
1494 | r = display(fn, ctx, pstates, states) |
|
1494 | r = display(fn, ctx, pstates, states) | |
1495 | found = found or r |
|
1495 | found = found or r | |
1496 | if r and not opts.get('all'): |
|
1496 | if r and not opts.get('all'): | |
1497 | skip[fn] = True |
|
1497 | skip[fn] = True | |
1498 | if copy: |
|
1498 | if copy: | |
1499 | skip[copy] = True |
|
1499 | skip[copy] = True | |
1500 | del matches[rev] |
|
1500 | del matches[rev] | |
1501 | del revfiles[rev] |
|
1501 | del revfiles[rev] | |
1502 |
|
1502 | |||
1503 | return not found |
|
1503 | return not found | |
1504 |
|
1504 | |||
1505 | def heads(ui, repo, *branchrevs, **opts): |
|
1505 | def heads(ui, repo, *branchrevs, **opts): | |
1506 | """show current repository heads or show branch heads |
|
1506 | """show current repository heads or show branch heads | |
1507 |
|
1507 | |||
1508 | With no arguments, show all repository branch heads. |
|
1508 | With no arguments, show all repository branch heads. | |
1509 |
|
1509 | |||
1510 | Repository "heads" are changesets with no child changesets. They are |
|
1510 | Repository "heads" are changesets with no child changesets. They are | |
1511 | where development generally takes place and are the usual targets |
|
1511 | where development generally takes place and are the usual targets | |
1512 | for update and merge operations. Branch heads are changesets that have |
|
1512 | for update and merge operations. Branch heads are changesets that have | |
1513 | no child changeset on the same branch. |
|
1513 | no child changeset on the same branch. | |
1514 |
|
1514 | |||
1515 | If one or more REVs are given, only branch heads on the branches |
|
1515 | If one or more REVs are given, only branch heads on the branches | |
1516 | associated with the specified changesets are shown. |
|
1516 | associated with the specified changesets are shown. | |
1517 |
|
1517 | |||
1518 | If -c/--closed is specified, also show branch heads marked closed |
|
1518 | If -c/--closed is specified, also show branch heads marked closed | |
1519 | (see :hg:`commit --close-branch`). |
|
1519 | (see :hg:`commit --close-branch`). | |
1520 |
|
1520 | |||
1521 | If STARTREV is specified, only those heads that are descendants of |
|
1521 | If STARTREV is specified, only those heads that are descendants of | |
1522 | STARTREV will be displayed. |
|
1522 | STARTREV will be displayed. | |
1523 |
|
1523 | |||
1524 | If -t/--topo is specified, named branch mechanics will be ignored and only |
|
1524 | If -t/--topo is specified, named branch mechanics will be ignored and only | |
1525 | changesets without children will be shown. |
|
1525 | changesets without children will be shown. | |
1526 |
|
1526 | |||
1527 | Returns 0 if matching heads are found, 1 if not. |
|
1527 | Returns 0 if matching heads are found, 1 if not. | |
1528 | """ |
|
1528 | """ | |
1529 |
|
1529 | |||
1530 | if opts.get('rev'): |
|
1530 | if opts.get('rev'): | |
1531 | start = repo.lookup(opts['rev']) |
|
1531 | start = repo.lookup(opts['rev']) | |
1532 | else: |
|
1532 | else: | |
1533 | start = None |
|
1533 | start = None | |
1534 |
|
1534 | |||
1535 | if opts.get('topo'): |
|
1535 | if opts.get('topo'): | |
1536 | heads = [repo[h] for h in repo.heads(start)] |
|
1536 | heads = [repo[h] for h in repo.heads(start)] | |
1537 | else: |
|
1537 | else: | |
1538 | heads = [] |
|
1538 | heads = [] | |
1539 | for b, ls in repo.branchmap().iteritems(): |
|
1539 | for b, ls in repo.branchmap().iteritems(): | |
1540 | if start is None: |
|
1540 | if start is None: | |
1541 | heads += [repo[h] for h in ls] |
|
1541 | heads += [repo[h] for h in ls] | |
1542 | continue |
|
1542 | continue | |
1543 | startrev = repo.changelog.rev(start) |
|
1543 | startrev = repo.changelog.rev(start) | |
1544 | descendants = set(repo.changelog.descendants(startrev)) |
|
1544 | descendants = set(repo.changelog.descendants(startrev)) | |
1545 | descendants.add(startrev) |
|
1545 | descendants.add(startrev) | |
1546 | rev = repo.changelog.rev |
|
1546 | rev = repo.changelog.rev | |
1547 | heads += [repo[h] for h in ls if rev(h) in descendants] |
|
1547 | heads += [repo[h] for h in ls if rev(h) in descendants] | |
1548 |
|
1548 | |||
1549 | if branchrevs: |
|
1549 | if branchrevs: | |
1550 | decode, encode = encoding.fromlocal, encoding.tolocal |
|
1550 | decode, encode = encoding.fromlocal, encoding.tolocal | |
1551 | branches = set(repo[decode(br)].branch() for br in branchrevs) |
|
1551 | branches = set(repo[decode(br)].branch() for br in branchrevs) | |
1552 | heads = [h for h in heads if h.branch() in branches] |
|
1552 | heads = [h for h in heads if h.branch() in branches] | |
1553 |
|
1553 | |||
1554 | if not opts.get('closed'): |
|
1554 | if not opts.get('closed'): | |
1555 | heads = [h for h in heads if not h.extra().get('close')] |
|
1555 | heads = [h for h in heads if not h.extra().get('close')] | |
1556 |
|
1556 | |||
1557 | if opts.get('active') and branchrevs: |
|
1557 | if opts.get('active') and branchrevs: | |
1558 | dagheads = repo.heads(start) |
|
1558 | dagheads = repo.heads(start) | |
1559 | heads = [h for h in heads if h.node() in dagheads] |
|
1559 | heads = [h for h in heads if h.node() in dagheads] | |
1560 |
|
1560 | |||
1561 | if branchrevs: |
|
1561 | if branchrevs: | |
1562 | haveheads = set(h.branch() for h in heads) |
|
1562 | haveheads = set(h.branch() for h in heads) | |
1563 | if branches - haveheads: |
|
1563 | if branches - haveheads: | |
1564 | headless = ', '.join(encode(b) for b in branches - haveheads) |
|
1564 | headless = ', '.join(encode(b) for b in branches - haveheads) | |
1565 | msg = _('no open branch heads found on branches %s') |
|
1565 | msg = _('no open branch heads found on branches %s') | |
1566 | if opts.get('rev'): |
|
1566 | if opts.get('rev'): | |
1567 | msg += _(' (started at %s)' % opts['rev']) |
|
1567 | msg += _(' (started at %s)' % opts['rev']) | |
1568 | ui.warn((msg + '\n') % headless) |
|
1568 | ui.warn((msg + '\n') % headless) | |
1569 |
|
1569 | |||
1570 | if not heads: |
|
1570 | if not heads: | |
1571 | return 1 |
|
1571 | return 1 | |
1572 |
|
1572 | |||
1573 | heads = sorted(heads, key=lambda x: -x.rev()) |
|
1573 | heads = sorted(heads, key=lambda x: -x.rev()) | |
1574 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
1574 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
1575 | for ctx in heads: |
|
1575 | for ctx in heads: | |
1576 | displayer.show(ctx) |
|
1576 | displayer.show(ctx) | |
1577 | displayer.close() |
|
1577 | displayer.close() | |
1578 |
|
1578 | |||
1579 | def help_(ui, name=None, with_version=False, unknowncmd=False): |
|
1579 | def help_(ui, name=None, with_version=False, unknowncmd=False): | |
1580 | """show help for a given topic or a help overview |
|
1580 | """show help for a given topic or a help overview | |
1581 |
|
1581 | |||
1582 | With no arguments, print a list of commands with short help messages. |
|
1582 | With no arguments, print a list of commands with short help messages. | |
1583 |
|
1583 | |||
1584 | Given a topic, extension, or command name, print help for that |
|
1584 | Given a topic, extension, or command name, print help for that | |
1585 | topic. |
|
1585 | topic. | |
1586 |
|
1586 | |||
1587 | Returns 0 if successful. |
|
1587 | Returns 0 if successful. | |
1588 | """ |
|
1588 | """ | |
1589 | option_lists = [] |
|
1589 | option_lists = [] | |
1590 | textwidth = util.termwidth() - 2 |
|
1590 | textwidth = util.termwidth() - 2 | |
1591 |
|
1591 | |||
1592 | def addglobalopts(aliases): |
|
1592 | def addglobalopts(aliases): | |
1593 | if ui.verbose: |
|
1593 | if ui.verbose: | |
1594 | option_lists.append((_("global options:"), globalopts)) |
|
1594 | option_lists.append((_("global options:"), globalopts)) | |
1595 | if name == 'shortlist': |
|
1595 | if name == 'shortlist': | |
1596 | option_lists.append((_('use "hg help" for the full list ' |
|
1596 | option_lists.append((_('use "hg help" for the full list ' | |
1597 | 'of commands'), ())) |
|
1597 | 'of commands'), ())) | |
1598 | else: |
|
1598 | else: | |
1599 | if name == 'shortlist': |
|
1599 | if name == 'shortlist': | |
1600 | msg = _('use "hg help" for the full list of commands ' |
|
1600 | msg = _('use "hg help" for the full list of commands ' | |
1601 | 'or "hg -v" for details') |
|
1601 | 'or "hg -v" for details') | |
1602 | elif aliases: |
|
1602 | elif aliases: | |
1603 | msg = _('use "hg -v help%s" to show aliases and ' |
|
1603 | msg = _('use "hg -v help%s" to show aliases and ' | |
1604 | 'global options') % (name and " " + name or "") |
|
1604 | 'global options') % (name and " " + name or "") | |
1605 | else: |
|
1605 | else: | |
1606 | msg = _('use "hg -v help %s" to show global options') % name |
|
1606 | msg = _('use "hg -v help %s" to show global options') % name | |
1607 | option_lists.append((msg, ())) |
|
1607 | option_lists.append((msg, ())) | |
1608 |
|
1608 | |||
1609 | def helpcmd(name): |
|
1609 | def helpcmd(name): | |
1610 | if with_version: |
|
1610 | if with_version: | |
1611 | version_(ui) |
|
1611 | version_(ui) | |
1612 | ui.write('\n') |
|
1612 | ui.write('\n') | |
1613 |
|
1613 | |||
1614 | try: |
|
1614 | try: | |
1615 | aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd) |
|
1615 | aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd) | |
1616 | except error.AmbiguousCommand, inst: |
|
1616 | except error.AmbiguousCommand, inst: | |
1617 | # py3k fix: except vars can't be used outside the scope of the |
|
1617 | # py3k fix: except vars can't be used outside the scope of the | |
1618 | # except block, nor can be used inside a lambda. python issue4617 |
|
1618 | # except block, nor can be used inside a lambda. python issue4617 | |
1619 | prefix = inst.args[0] |
|
1619 | prefix = inst.args[0] | |
1620 | select = lambda c: c.lstrip('^').startswith(prefix) |
|
1620 | select = lambda c: c.lstrip('^').startswith(prefix) | |
1621 | helplist(_('list of commands:\n\n'), select) |
|
1621 | helplist(_('list of commands:\n\n'), select) | |
1622 | return |
|
1622 | return | |
1623 |
|
1623 | |||
1624 | # check if it's an invalid alias and display its error if it is |
|
1624 | # check if it's an invalid alias and display its error if it is | |
1625 | if getattr(entry[0], 'badalias', False): |
|
1625 | if getattr(entry[0], 'badalias', False): | |
1626 | if not unknowncmd: |
|
1626 | if not unknowncmd: | |
1627 | entry[0](ui) |
|
1627 | entry[0](ui) | |
1628 | return |
|
1628 | return | |
1629 |
|
1629 | |||
1630 | # synopsis |
|
1630 | # synopsis | |
1631 | if len(entry) > 2: |
|
1631 | if len(entry) > 2: | |
1632 | if entry[2].startswith('hg'): |
|
1632 | if entry[2].startswith('hg'): | |
1633 | ui.write("%s\n" % entry[2]) |
|
1633 | ui.write("%s\n" % entry[2]) | |
1634 | else: |
|
1634 | else: | |
1635 | ui.write('hg %s %s\n' % (aliases[0], entry[2])) |
|
1635 | ui.write('hg %s %s\n' % (aliases[0], entry[2])) | |
1636 | else: |
|
1636 | else: | |
1637 | ui.write('hg %s\n' % aliases[0]) |
|
1637 | ui.write('hg %s\n' % aliases[0]) | |
1638 |
|
1638 | |||
1639 | # aliases |
|
1639 | # aliases | |
1640 | if not ui.quiet and len(aliases) > 1: |
|
1640 | if not ui.quiet and len(aliases) > 1: | |
1641 | ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) |
|
1641 | ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) | |
1642 |
|
1642 | |||
1643 | # description |
|
1643 | # description | |
1644 | doc = gettext(entry[0].__doc__) |
|
1644 | doc = gettext(entry[0].__doc__) | |
1645 | if not doc: |
|
1645 | if not doc: | |
1646 | doc = _("(no help text available)") |
|
1646 | doc = _("(no help text available)") | |
1647 | if hasattr(entry[0], 'definition'): # aliased command |
|
1647 | if hasattr(entry[0], 'definition'): # aliased command | |
1648 | doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc) |
|
1648 | doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc) | |
1649 | if ui.quiet: |
|
1649 | if ui.quiet: | |
1650 | doc = doc.splitlines()[0] |
|
1650 | doc = doc.splitlines()[0] | |
1651 | keep = ui.verbose and ['verbose'] or [] |
|
1651 | keep = ui.verbose and ['verbose'] or [] | |
1652 | formatted, pruned = minirst.format(doc, textwidth, keep=keep) |
|
1652 | formatted, pruned = minirst.format(doc, textwidth, keep=keep) | |
1653 | ui.write("\n%s\n" % formatted) |
|
1653 | ui.write("\n%s\n" % formatted) | |
1654 | if pruned: |
|
1654 | if pruned: | |
1655 | ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name) |
|
1655 | ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name) | |
1656 |
|
1656 | |||
1657 | if not ui.quiet: |
|
1657 | if not ui.quiet: | |
1658 | # options |
|
1658 | # options | |
1659 | if entry[1]: |
|
1659 | if entry[1]: | |
1660 | option_lists.append((_("options:\n"), entry[1])) |
|
1660 | option_lists.append((_("options:\n"), entry[1])) | |
1661 |
|
1661 | |||
1662 | addglobalopts(False) |
|
1662 | addglobalopts(False) | |
1663 |
|
1663 | |||
1664 | def helplist(header, select=None): |
|
1664 | def helplist(header, select=None): | |
1665 | h = {} |
|
1665 | h = {} | |
1666 | cmds = {} |
|
1666 | cmds = {} | |
1667 | for c, e in table.iteritems(): |
|
1667 | for c, e in table.iteritems(): | |
1668 | f = c.split("|", 1)[0] |
|
1668 | f = c.split("|", 1)[0] | |
1669 | if select and not select(f): |
|
1669 | if select and not select(f): | |
1670 | continue |
|
1670 | continue | |
1671 | if (not select and name != 'shortlist' and |
|
1671 | if (not select and name != 'shortlist' and | |
1672 | e[0].__module__ != __name__): |
|
1672 | e[0].__module__ != __name__): | |
1673 | continue |
|
1673 | continue | |
1674 | if name == "shortlist" and not f.startswith("^"): |
|
1674 | if name == "shortlist" and not f.startswith("^"): | |
1675 | continue |
|
1675 | continue | |
1676 | f = f.lstrip("^") |
|
1676 | f = f.lstrip("^") | |
1677 | if not ui.debugflag and f.startswith("debug"): |
|
1677 | if not ui.debugflag and f.startswith("debug"): | |
1678 | continue |
|
1678 | continue | |
1679 | doc = e[0].__doc__ |
|
1679 | doc = e[0].__doc__ | |
1680 | if doc and 'DEPRECATED' in doc and not ui.verbose: |
|
1680 | if doc and 'DEPRECATED' in doc and not ui.verbose: | |
1681 | continue |
|
1681 | continue | |
1682 | doc = gettext(doc) |
|
1682 | doc = gettext(doc) | |
1683 | if not doc: |
|
1683 | if not doc: | |
1684 | doc = _("(no help text available)") |
|
1684 | doc = _("(no help text available)") | |
1685 | h[f] = doc.splitlines()[0].rstrip() |
|
1685 | h[f] = doc.splitlines()[0].rstrip() | |
1686 | cmds[f] = c.lstrip("^") |
|
1686 | cmds[f] = c.lstrip("^") | |
1687 |
|
1687 | |||
1688 | if not h: |
|
1688 | if not h: | |
1689 | ui.status(_('no commands defined\n')) |
|
1689 | ui.status(_('no commands defined\n')) | |
1690 | return |
|
1690 | return | |
1691 |
|
1691 | |||
1692 | ui.status(header) |
|
1692 | ui.status(header) | |
1693 | fns = sorted(h) |
|
1693 | fns = sorted(h) | |
1694 | m = max(map(len, fns)) |
|
1694 | m = max(map(len, fns)) | |
1695 | for f in fns: |
|
1695 | for f in fns: | |
1696 | if ui.verbose: |
|
1696 | if ui.verbose: | |
1697 | commands = cmds[f].replace("|",", ") |
|
1697 | commands = cmds[f].replace("|",", ") | |
1698 | ui.write(" %s:\n %s\n"%(commands, h[f])) |
|
1698 | ui.write(" %s:\n %s\n"%(commands, h[f])) | |
1699 | else: |
|
1699 | else: | |
1700 |
ui.write(' |
|
1700 | ui.write('%s\n' % (util.wrap(h[f], | |
|
1701 | initindent=' %-*s ' % (m, f), | |||
|
1702 | hangindent=' ' * (m + 4)))) | |||
1701 |
|
1703 | |||
1702 | if not ui.quiet: |
|
1704 | if not ui.quiet: | |
1703 | addglobalopts(True) |
|
1705 | addglobalopts(True) | |
1704 |
|
1706 | |||
1705 | def helptopic(name): |
|
1707 | def helptopic(name): | |
1706 | for names, header, doc in help.helptable: |
|
1708 | for names, header, doc in help.helptable: | |
1707 | if name in names: |
|
1709 | if name in names: | |
1708 | break |
|
1710 | break | |
1709 | else: |
|
1711 | else: | |
1710 | raise error.UnknownCommand(name) |
|
1712 | raise error.UnknownCommand(name) | |
1711 |
|
1713 | |||
1712 | # description |
|
1714 | # description | |
1713 | if not doc: |
|
1715 | if not doc: | |
1714 | doc = _("(no help text available)") |
|
1716 | doc = _("(no help text available)") | |
1715 | if hasattr(doc, '__call__'): |
|
1717 | if hasattr(doc, '__call__'): | |
1716 | doc = doc() |
|
1718 | doc = doc() | |
1717 |
|
1719 | |||
1718 | ui.write("%s\n\n" % header) |
|
1720 | ui.write("%s\n\n" % header) | |
1719 | ui.write("%s\n" % minirst.format(doc, textwidth, indent=4)) |
|
1721 | ui.write("%s\n" % minirst.format(doc, textwidth, indent=4)) | |
1720 |
|
1722 | |||
1721 | def helpext(name): |
|
1723 | def helpext(name): | |
1722 | try: |
|
1724 | try: | |
1723 | mod = extensions.find(name) |
|
1725 | mod = extensions.find(name) | |
1724 | doc = gettext(mod.__doc__) or _('no help text available') |
|
1726 | doc = gettext(mod.__doc__) or _('no help text available') | |
1725 | except KeyError: |
|
1727 | except KeyError: | |
1726 | mod = None |
|
1728 | mod = None | |
1727 | doc = extensions.disabledext(name) |
|
1729 | doc = extensions.disabledext(name) | |
1728 | if not doc: |
|
1730 | if not doc: | |
1729 | raise error.UnknownCommand(name) |
|
1731 | raise error.UnknownCommand(name) | |
1730 |
|
1732 | |||
1731 | if '\n' not in doc: |
|
1733 | if '\n' not in doc: | |
1732 | head, tail = doc, "" |
|
1734 | head, tail = doc, "" | |
1733 | else: |
|
1735 | else: | |
1734 | head, tail = doc.split('\n', 1) |
|
1736 | head, tail = doc.split('\n', 1) | |
1735 | ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head)) |
|
1737 | ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head)) | |
1736 | if tail: |
|
1738 | if tail: | |
1737 | ui.write(minirst.format(tail, textwidth)) |
|
1739 | ui.write(minirst.format(tail, textwidth)) | |
1738 | ui.status('\n\n') |
|
1740 | ui.status('\n\n') | |
1739 |
|
1741 | |||
1740 | if mod: |
|
1742 | if mod: | |
1741 | try: |
|
1743 | try: | |
1742 | ct = mod.cmdtable |
|
1744 | ct = mod.cmdtable | |
1743 | except AttributeError: |
|
1745 | except AttributeError: | |
1744 | ct = {} |
|
1746 | ct = {} | |
1745 | modcmds = set([c.split('|', 1)[0] for c in ct]) |
|
1747 | modcmds = set([c.split('|', 1)[0] for c in ct]) | |
1746 | helplist(_('list of commands:\n\n'), modcmds.__contains__) |
|
1748 | helplist(_('list of commands:\n\n'), modcmds.__contains__) | |
1747 | else: |
|
1749 | else: | |
1748 | ui.write(_('use "hg help extensions" for information on enabling ' |
|
1750 | ui.write(_('use "hg help extensions" for information on enabling ' | |
1749 | 'extensions\n')) |
|
1751 | 'extensions\n')) | |
1750 |
|
1752 | |||
1751 | def helpextcmd(name): |
|
1753 | def helpextcmd(name): | |
1752 | cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict')) |
|
1754 | cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict')) | |
1753 | doc = gettext(mod.__doc__).splitlines()[0] |
|
1755 | doc = gettext(mod.__doc__).splitlines()[0] | |
1754 |
|
1756 | |||
1755 | msg = help.listexts(_("'%s' is provided by the following " |
|
1757 | msg = help.listexts(_("'%s' is provided by the following " | |
1756 | "extension:") % cmd, {ext: doc}, len(ext), |
|
1758 | "extension:") % cmd, {ext: doc}, len(ext), | |
1757 | indent=4) |
|
1759 | indent=4) | |
1758 | ui.write(minirst.format(msg, textwidth)) |
|
1760 | ui.write(minirst.format(msg, textwidth)) | |
1759 | ui.write('\n\n') |
|
1761 | ui.write('\n\n') | |
1760 | ui.write(_('use "hg help extensions" for information on enabling ' |
|
1762 | ui.write(_('use "hg help extensions" for information on enabling ' | |
1761 | 'extensions\n')) |
|
1763 | 'extensions\n')) | |
1762 |
|
1764 | |||
1763 | if name and name != 'shortlist': |
|
1765 | if name and name != 'shortlist': | |
1764 | i = None |
|
1766 | i = None | |
1765 | if unknowncmd: |
|
1767 | if unknowncmd: | |
1766 | queries = (helpextcmd,) |
|
1768 | queries = (helpextcmd,) | |
1767 | else: |
|
1769 | else: | |
1768 | queries = (helptopic, helpcmd, helpext, helpextcmd) |
|
1770 | queries = (helptopic, helpcmd, helpext, helpextcmd) | |
1769 | for f in queries: |
|
1771 | for f in queries: | |
1770 | try: |
|
1772 | try: | |
1771 | f(name) |
|
1773 | f(name) | |
1772 | i = None |
|
1774 | i = None | |
1773 | break |
|
1775 | break | |
1774 | except error.UnknownCommand, inst: |
|
1776 | except error.UnknownCommand, inst: | |
1775 | i = inst |
|
1777 | i = inst | |
1776 | if i: |
|
1778 | if i: | |
1777 | raise i |
|
1779 | raise i | |
1778 |
|
1780 | |||
1779 | else: |
|
1781 | else: | |
1780 | # program name |
|
1782 | # program name | |
1781 | if ui.verbose or with_version: |
|
1783 | if ui.verbose or with_version: | |
1782 | version_(ui) |
|
1784 | version_(ui) | |
1783 | else: |
|
1785 | else: | |
1784 | ui.status(_("Mercurial Distributed SCM\n")) |
|
1786 | ui.status(_("Mercurial Distributed SCM\n")) | |
1785 | ui.status('\n') |
|
1787 | ui.status('\n') | |
1786 |
|
1788 | |||
1787 | # list of commands |
|
1789 | # list of commands | |
1788 | if name == "shortlist": |
|
1790 | if name == "shortlist": | |
1789 | header = _('basic commands:\n\n') |
|
1791 | header = _('basic commands:\n\n') | |
1790 | else: |
|
1792 | else: | |
1791 | header = _('list of commands:\n\n') |
|
1793 | header = _('list of commands:\n\n') | |
1792 |
|
1794 | |||
1793 | helplist(header) |
|
1795 | helplist(header) | |
1794 | if name != 'shortlist': |
|
1796 | if name != 'shortlist': | |
1795 | exts, maxlength = extensions.enabled() |
|
1797 | exts, maxlength = extensions.enabled() | |
1796 | text = help.listexts(_('enabled extensions:'), exts, maxlength) |
|
1798 | text = help.listexts(_('enabled extensions:'), exts, maxlength) | |
1797 | if text: |
|
1799 | if text: | |
1798 | ui.write("\n%s\n" % minirst.format(text, textwidth)) |
|
1800 | ui.write("\n%s\n" % minirst.format(text, textwidth)) | |
1799 |
|
1801 | |||
1800 | # list all option lists |
|
1802 | # list all option lists | |
1801 | opt_output = [] |
|
1803 | opt_output = [] | |
1802 | for title, options in option_lists: |
|
1804 | for title, options in option_lists: | |
1803 | opt_output.append(("\n%s" % title, None)) |
|
1805 | opt_output.append(("\n%s" % title, None)) | |
1804 | for shortopt, longopt, default, desc in options: |
|
1806 | for shortopt, longopt, default, desc in options: | |
1805 | if _("DEPRECATED") in desc and not ui.verbose: |
|
1807 | if _("DEPRECATED") in desc and not ui.verbose: | |
1806 | continue |
|
1808 | continue | |
1807 | opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, |
|
1809 | opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, | |
1808 | longopt and " --%s" % longopt), |
|
1810 | longopt and " --%s" % longopt), | |
1809 | "%s%s" % (desc, |
|
1811 | "%s%s" % (desc, | |
1810 | default |
|
1812 | default | |
1811 | and _(" (default: %s)") % default |
|
1813 | and _(" (default: %s)") % default | |
1812 | or ""))) |
|
1814 | or ""))) | |
1813 |
|
1815 | |||
1814 | if not name: |
|
1816 | if not name: | |
1815 | ui.write(_("\nadditional help topics:\n\n")) |
|
1817 | ui.write(_("\nadditional help topics:\n\n")) | |
1816 | topics = [] |
|
1818 | topics = [] | |
1817 | for names, header, doc in help.helptable: |
|
1819 | for names, header, doc in help.helptable: | |
1818 | topics.append((sorted(names, key=len, reverse=True)[0], header)) |
|
1820 | topics.append((sorted(names, key=len, reverse=True)[0], header)) | |
1819 | topics_len = max([len(s[0]) for s in topics]) |
|
1821 | topics_len = max([len(s[0]) for s in topics]) | |
1820 | for t, desc in topics: |
|
1822 | for t, desc in topics: | |
1821 | ui.write(" %-*s %s\n" % (topics_len, t, desc)) |
|
1823 | ui.write(" %-*s %s\n" % (topics_len, t, desc)) | |
1822 |
|
1824 | |||
1823 | if opt_output: |
|
1825 | if opt_output: | |
1824 | opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0]) |
|
1826 | opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0]) | |
1825 | for first, second in opt_output: |
|
1827 | for first, second in opt_output: | |
1826 | if second: |
|
1828 | if second: | |
1827 | second = util.wrap(second, opts_len + 3) |
|
1829 | initindent = ' %-*s ' % (opts_len, first) | |
1828 | ui.write(" %-*s %s\n" % (opts_len, first, second)) |
|
1830 | hangindent = ' ' * (opts_len + 3) | |
|
1831 | ui.write('%s\n' % (util.wrap(second, | |||
|
1832 | initindent=initindent, | |||
|
1833 | hangindent=hangindent))) | |||
1829 | else: |
|
1834 | else: | |
1830 | ui.write("%s\n" % first) |
|
1835 | ui.write("%s\n" % first) | |
1831 |
|
1836 | |||
1832 | def identify(ui, repo, source=None, |
|
1837 | def identify(ui, repo, source=None, | |
1833 | rev=None, num=None, id=None, branch=None, tags=None): |
|
1838 | rev=None, num=None, id=None, branch=None, tags=None): | |
1834 | """identify the working copy or specified revision |
|
1839 | """identify the working copy or specified revision | |
1835 |
|
1840 | |||
1836 | With no revision, print a summary of the current state of the |
|
1841 | With no revision, print a summary of the current state of the | |
1837 | repository. |
|
1842 | repository. | |
1838 |
|
1843 | |||
1839 | Specifying a path to a repository root or Mercurial bundle will |
|
1844 | Specifying a path to a repository root or Mercurial bundle will | |
1840 | cause lookup to operate on that repository/bundle. |
|
1845 | cause lookup to operate on that repository/bundle. | |
1841 |
|
1846 | |||
1842 | This summary identifies the repository state using one or two |
|
1847 | This summary identifies the repository state using one or two | |
1843 | parent hash identifiers, followed by a "+" if there are |
|
1848 | parent hash identifiers, followed by a "+" if there are | |
1844 | uncommitted changes in the working directory, a list of tags for |
|
1849 | uncommitted changes in the working directory, a list of tags for | |
1845 | this revision and a branch name for non-default branches. |
|
1850 | this revision and a branch name for non-default branches. | |
1846 |
|
1851 | |||
1847 | Returns 0 if successful. |
|
1852 | Returns 0 if successful. | |
1848 | """ |
|
1853 | """ | |
1849 |
|
1854 | |||
1850 | if not repo and not source: |
|
1855 | if not repo and not source: | |
1851 | raise util.Abort(_("There is no Mercurial repository here " |
|
1856 | raise util.Abort(_("There is no Mercurial repository here " | |
1852 | "(.hg not found)")) |
|
1857 | "(.hg not found)")) | |
1853 |
|
1858 | |||
1854 | hexfunc = ui.debugflag and hex or short |
|
1859 | hexfunc = ui.debugflag and hex or short | |
1855 | default = not (num or id or branch or tags) |
|
1860 | default = not (num or id or branch or tags) | |
1856 | output = [] |
|
1861 | output = [] | |
1857 |
|
1862 | |||
1858 | revs = [] |
|
1863 | revs = [] | |
1859 | if source: |
|
1864 | if source: | |
1860 | source, branches = hg.parseurl(ui.expandpath(source)) |
|
1865 | source, branches = hg.parseurl(ui.expandpath(source)) | |
1861 | repo = hg.repository(ui, source) |
|
1866 | repo = hg.repository(ui, source) | |
1862 | revs, checkout = hg.addbranchrevs(repo, repo, branches, None) |
|
1867 | revs, checkout = hg.addbranchrevs(repo, repo, branches, None) | |
1863 |
|
1868 | |||
1864 | if not repo.local(): |
|
1869 | if not repo.local(): | |
1865 | if not rev and revs: |
|
1870 | if not rev and revs: | |
1866 | rev = revs[0] |
|
1871 | rev = revs[0] | |
1867 | if not rev: |
|
1872 | if not rev: | |
1868 | rev = "tip" |
|
1873 | rev = "tip" | |
1869 | if num or branch or tags: |
|
1874 | if num or branch or tags: | |
1870 | raise util.Abort( |
|
1875 | raise util.Abort( | |
1871 | "can't query remote revision number, branch, or tags") |
|
1876 | "can't query remote revision number, branch, or tags") | |
1872 | output = [hexfunc(repo.lookup(rev))] |
|
1877 | output = [hexfunc(repo.lookup(rev))] | |
1873 | elif not rev: |
|
1878 | elif not rev: | |
1874 | ctx = repo[None] |
|
1879 | ctx = repo[None] | |
1875 | parents = ctx.parents() |
|
1880 | parents = ctx.parents() | |
1876 | changed = False |
|
1881 | changed = False | |
1877 | if default or id or num: |
|
1882 | if default or id or num: | |
1878 | changed = util.any(repo.status()) |
|
1883 | changed = util.any(repo.status()) | |
1879 | if default or id: |
|
1884 | if default or id: | |
1880 | output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]), |
|
1885 | output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]), | |
1881 | (changed) and "+" or "")] |
|
1886 | (changed) and "+" or "")] | |
1882 | if num: |
|
1887 | if num: | |
1883 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), |
|
1888 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), | |
1884 | (changed) and "+" or "")) |
|
1889 | (changed) and "+" or "")) | |
1885 | else: |
|
1890 | else: | |
1886 | ctx = repo[rev] |
|
1891 | ctx = repo[rev] | |
1887 | if default or id: |
|
1892 | if default or id: | |
1888 | output = [hexfunc(ctx.node())] |
|
1893 | output = [hexfunc(ctx.node())] | |
1889 | if num: |
|
1894 | if num: | |
1890 | output.append(str(ctx.rev())) |
|
1895 | output.append(str(ctx.rev())) | |
1891 |
|
1896 | |||
1892 | if repo.local() and default and not ui.quiet: |
|
1897 | if repo.local() and default and not ui.quiet: | |
1893 | b = encoding.tolocal(ctx.branch()) |
|
1898 | b = encoding.tolocal(ctx.branch()) | |
1894 | if b != 'default': |
|
1899 | if b != 'default': | |
1895 | output.append("(%s)" % b) |
|
1900 | output.append("(%s)" % b) | |
1896 |
|
1901 | |||
1897 | # multiple tags for a single parent separated by '/' |
|
1902 | # multiple tags for a single parent separated by '/' | |
1898 | t = "/".join(ctx.tags()) |
|
1903 | t = "/".join(ctx.tags()) | |
1899 | if t: |
|
1904 | if t: | |
1900 | output.append(t) |
|
1905 | output.append(t) | |
1901 |
|
1906 | |||
1902 | if branch: |
|
1907 | if branch: | |
1903 | output.append(encoding.tolocal(ctx.branch())) |
|
1908 | output.append(encoding.tolocal(ctx.branch())) | |
1904 |
|
1909 | |||
1905 | if tags: |
|
1910 | if tags: | |
1906 | output.extend(ctx.tags()) |
|
1911 | output.extend(ctx.tags()) | |
1907 |
|
1912 | |||
1908 | ui.write("%s\n" % ' '.join(output)) |
|
1913 | ui.write("%s\n" % ' '.join(output)) | |
1909 |
|
1914 | |||
1910 | def import_(ui, repo, patch1, *patches, **opts): |
|
1915 | def import_(ui, repo, patch1, *patches, **opts): | |
1911 | """import an ordered set of patches |
|
1916 | """import an ordered set of patches | |
1912 |
|
1917 | |||
1913 | Import a list of patches and commit them individually (unless |
|
1918 | Import a list of patches and commit them individually (unless | |
1914 | --no-commit is specified). |
|
1919 | --no-commit is specified). | |
1915 |
|
1920 | |||
1916 | If there are outstanding changes in the working directory, import |
|
1921 | If there are outstanding changes in the working directory, import | |
1917 | will abort unless given the -f/--force flag. |
|
1922 | will abort unless given the -f/--force flag. | |
1918 |
|
1923 | |||
1919 | You can import a patch straight from a mail message. Even patches |
|
1924 | You can import a patch straight from a mail message. Even patches | |
1920 | as attachments work (to use the body part, it must have type |
|
1925 | as attachments work (to use the body part, it must have type | |
1921 | text/plain or text/x-patch). From and Subject headers of email |
|
1926 | text/plain or text/x-patch). From and Subject headers of email | |
1922 | message are used as default committer and commit message. All |
|
1927 | message are used as default committer and commit message. All | |
1923 | text/plain body parts before first diff are added to commit |
|
1928 | text/plain body parts before first diff are added to commit | |
1924 | message. |
|
1929 | message. | |
1925 |
|
1930 | |||
1926 | If the imported patch was generated by :hg:`export`, user and |
|
1931 | If the imported patch was generated by :hg:`export`, user and | |
1927 | description from patch override values from message headers and |
|
1932 | description from patch override values from message headers and | |
1928 | body. Values given on command line with -m/--message and -u/--user |
|
1933 | body. Values given on command line with -m/--message and -u/--user | |
1929 | override these. |
|
1934 | override these. | |
1930 |
|
1935 | |||
1931 | If --exact is specified, import will set the working directory to |
|
1936 | If --exact is specified, import will set the working directory to | |
1932 | the parent of each patch before applying it, and will abort if the |
|
1937 | the parent of each patch before applying it, and will abort if the | |
1933 | resulting changeset has a different ID than the one recorded in |
|
1938 | resulting changeset has a different ID than the one recorded in | |
1934 | the patch. This may happen due to character set problems or other |
|
1939 | the patch. This may happen due to character set problems or other | |
1935 | deficiencies in the text patch format. |
|
1940 | deficiencies in the text patch format. | |
1936 |
|
1941 | |||
1937 | With -s/--similarity, hg will attempt to discover renames and |
|
1942 | With -s/--similarity, hg will attempt to discover renames and | |
1938 | copies in the patch in the same way as 'addremove'. |
|
1943 | copies in the patch in the same way as 'addremove'. | |
1939 |
|
1944 | |||
1940 | To read a patch from standard input, use "-" as the patch name. If |
|
1945 | To read a patch from standard input, use "-" as the patch name. If | |
1941 | a URL is specified, the patch will be downloaded from it. |
|
1946 | a URL is specified, the patch will be downloaded from it. | |
1942 | See :hg:`help dates` for a list of formats valid for -d/--date. |
|
1947 | See :hg:`help dates` for a list of formats valid for -d/--date. | |
1943 |
|
1948 | |||
1944 | Returns 0 on success. |
|
1949 | Returns 0 on success. | |
1945 | """ |
|
1950 | """ | |
1946 | patches = (patch1,) + patches |
|
1951 | patches = (patch1,) + patches | |
1947 |
|
1952 | |||
1948 | date = opts.get('date') |
|
1953 | date = opts.get('date') | |
1949 | if date: |
|
1954 | if date: | |
1950 | opts['date'] = util.parsedate(date) |
|
1955 | opts['date'] = util.parsedate(date) | |
1951 |
|
1956 | |||
1952 | try: |
|
1957 | try: | |
1953 | sim = float(opts.get('similarity') or 0) |
|
1958 | sim = float(opts.get('similarity') or 0) | |
1954 | except ValueError: |
|
1959 | except ValueError: | |
1955 | raise util.Abort(_('similarity must be a number')) |
|
1960 | raise util.Abort(_('similarity must be a number')) | |
1956 | if sim < 0 or sim > 100: |
|
1961 | if sim < 0 or sim > 100: | |
1957 | raise util.Abort(_('similarity must be between 0 and 100')) |
|
1962 | raise util.Abort(_('similarity must be between 0 and 100')) | |
1958 |
|
1963 | |||
1959 | if opts.get('exact') or not opts.get('force'): |
|
1964 | if opts.get('exact') or not opts.get('force'): | |
1960 | cmdutil.bail_if_changed(repo) |
|
1965 | cmdutil.bail_if_changed(repo) | |
1961 |
|
1966 | |||
1962 | d = opts["base"] |
|
1967 | d = opts["base"] | |
1963 | strip = opts["strip"] |
|
1968 | strip = opts["strip"] | |
1964 | wlock = lock = None |
|
1969 | wlock = lock = None | |
1965 |
|
1970 | |||
1966 | def tryone(ui, hunk): |
|
1971 | def tryone(ui, hunk): | |
1967 | tmpname, message, user, date, branch, nodeid, p1, p2 = \ |
|
1972 | tmpname, message, user, date, branch, nodeid, p1, p2 = \ | |
1968 | patch.extract(ui, hunk) |
|
1973 | patch.extract(ui, hunk) | |
1969 |
|
1974 | |||
1970 | if not tmpname: |
|
1975 | if not tmpname: | |
1971 | return None |
|
1976 | return None | |
1972 | commitid = _('to working directory') |
|
1977 | commitid = _('to working directory') | |
1973 |
|
1978 | |||
1974 | try: |
|
1979 | try: | |
1975 | cmdline_message = cmdutil.logmessage(opts) |
|
1980 | cmdline_message = cmdutil.logmessage(opts) | |
1976 | if cmdline_message: |
|
1981 | if cmdline_message: | |
1977 | # pickup the cmdline msg |
|
1982 | # pickup the cmdline msg | |
1978 | message = cmdline_message |
|
1983 | message = cmdline_message | |
1979 | elif message: |
|
1984 | elif message: | |
1980 | # pickup the patch msg |
|
1985 | # pickup the patch msg | |
1981 | message = message.strip() |
|
1986 | message = message.strip() | |
1982 | else: |
|
1987 | else: | |
1983 | # launch the editor |
|
1988 | # launch the editor | |
1984 | message = None |
|
1989 | message = None | |
1985 | ui.debug('message:\n%s\n' % message) |
|
1990 | ui.debug('message:\n%s\n' % message) | |
1986 |
|
1991 | |||
1987 | wp = repo.parents() |
|
1992 | wp = repo.parents() | |
1988 | if opts.get('exact'): |
|
1993 | if opts.get('exact'): | |
1989 | if not nodeid or not p1: |
|
1994 | if not nodeid or not p1: | |
1990 | raise util.Abort(_('not a Mercurial patch')) |
|
1995 | raise util.Abort(_('not a Mercurial patch')) | |
1991 | p1 = repo.lookup(p1) |
|
1996 | p1 = repo.lookup(p1) | |
1992 | p2 = repo.lookup(p2 or hex(nullid)) |
|
1997 | p2 = repo.lookup(p2 or hex(nullid)) | |
1993 |
|
1998 | |||
1994 | if p1 != wp[0].node(): |
|
1999 | if p1 != wp[0].node(): | |
1995 | hg.clean(repo, p1) |
|
2000 | hg.clean(repo, p1) | |
1996 | repo.dirstate.setparents(p1, p2) |
|
2001 | repo.dirstate.setparents(p1, p2) | |
1997 | elif p2: |
|
2002 | elif p2: | |
1998 | try: |
|
2003 | try: | |
1999 | p1 = repo.lookup(p1) |
|
2004 | p1 = repo.lookup(p1) | |
2000 | p2 = repo.lookup(p2) |
|
2005 | p2 = repo.lookup(p2) | |
2001 | if p1 == wp[0].node(): |
|
2006 | if p1 == wp[0].node(): | |
2002 | repo.dirstate.setparents(p1, p2) |
|
2007 | repo.dirstate.setparents(p1, p2) | |
2003 | except error.RepoError: |
|
2008 | except error.RepoError: | |
2004 | pass |
|
2009 | pass | |
2005 | if opts.get('exact') or opts.get('import_branch'): |
|
2010 | if opts.get('exact') or opts.get('import_branch'): | |
2006 | repo.dirstate.setbranch(branch or 'default') |
|
2011 | repo.dirstate.setbranch(branch or 'default') | |
2007 |
|
2012 | |||
2008 | files = {} |
|
2013 | files = {} | |
2009 | try: |
|
2014 | try: | |
2010 | patch.patch(tmpname, ui, strip=strip, cwd=repo.root, |
|
2015 | patch.patch(tmpname, ui, strip=strip, cwd=repo.root, | |
2011 | files=files, eolmode=None) |
|
2016 | files=files, eolmode=None) | |
2012 | finally: |
|
2017 | finally: | |
2013 | files = patch.updatedir(ui, repo, files, |
|
2018 | files = patch.updatedir(ui, repo, files, | |
2014 | similarity=sim / 100.0) |
|
2019 | similarity=sim / 100.0) | |
2015 | if not opts.get('no_commit'): |
|
2020 | if not opts.get('no_commit'): | |
2016 | if opts.get('exact'): |
|
2021 | if opts.get('exact'): | |
2017 | m = None |
|
2022 | m = None | |
2018 | else: |
|
2023 | else: | |
2019 | m = cmdutil.matchfiles(repo, files or []) |
|
2024 | m = cmdutil.matchfiles(repo, files or []) | |
2020 | n = repo.commit(message, opts.get('user') or user, |
|
2025 | n = repo.commit(message, opts.get('user') or user, | |
2021 | opts.get('date') or date, match=m, |
|
2026 | opts.get('date') or date, match=m, | |
2022 | editor=cmdutil.commiteditor) |
|
2027 | editor=cmdutil.commiteditor) | |
2023 | if opts.get('exact'): |
|
2028 | if opts.get('exact'): | |
2024 | if hex(n) != nodeid: |
|
2029 | if hex(n) != nodeid: | |
2025 | repo.rollback() |
|
2030 | repo.rollback() | |
2026 | raise util.Abort(_('patch is damaged' |
|
2031 | raise util.Abort(_('patch is damaged' | |
2027 | ' or loses information')) |
|
2032 | ' or loses information')) | |
2028 | # Force a dirstate write so that the next transaction |
|
2033 | # Force a dirstate write so that the next transaction | |
2029 | # backups an up-do-date file. |
|
2034 | # backups an up-do-date file. | |
2030 | repo.dirstate.write() |
|
2035 | repo.dirstate.write() | |
2031 | if n: |
|
2036 | if n: | |
2032 | commitid = short(n) |
|
2037 | commitid = short(n) | |
2033 |
|
2038 | |||
2034 | return commitid |
|
2039 | return commitid | |
2035 | finally: |
|
2040 | finally: | |
2036 | os.unlink(tmpname) |
|
2041 | os.unlink(tmpname) | |
2037 |
|
2042 | |||
2038 | try: |
|
2043 | try: | |
2039 | wlock = repo.wlock() |
|
2044 | wlock = repo.wlock() | |
2040 | lock = repo.lock() |
|
2045 | lock = repo.lock() | |
2041 | lastcommit = None |
|
2046 | lastcommit = None | |
2042 | for p in patches: |
|
2047 | for p in patches: | |
2043 | pf = os.path.join(d, p) |
|
2048 | pf = os.path.join(d, p) | |
2044 |
|
2049 | |||
2045 | if pf == '-': |
|
2050 | if pf == '-': | |
2046 | ui.status(_("applying patch from stdin\n")) |
|
2051 | ui.status(_("applying patch from stdin\n")) | |
2047 | pf = sys.stdin |
|
2052 | pf = sys.stdin | |
2048 | else: |
|
2053 | else: | |
2049 | ui.status(_("applying %s\n") % p) |
|
2054 | ui.status(_("applying %s\n") % p) | |
2050 | pf = url.open(ui, pf) |
|
2055 | pf = url.open(ui, pf) | |
2051 |
|
2056 | |||
2052 | haspatch = False |
|
2057 | haspatch = False | |
2053 | for hunk in patch.split(pf): |
|
2058 | for hunk in patch.split(pf): | |
2054 | commitid = tryone(ui, hunk) |
|
2059 | commitid = tryone(ui, hunk) | |
2055 | if commitid: |
|
2060 | if commitid: | |
2056 | haspatch = True |
|
2061 | haspatch = True | |
2057 | if lastcommit: |
|
2062 | if lastcommit: | |
2058 | ui.status(_('applied %s\n') % lastcommit) |
|
2063 | ui.status(_('applied %s\n') % lastcommit) | |
2059 | lastcommit = commitid |
|
2064 | lastcommit = commitid | |
2060 |
|
2065 | |||
2061 | if not haspatch: |
|
2066 | if not haspatch: | |
2062 | raise util.Abort(_('no diffs found')) |
|
2067 | raise util.Abort(_('no diffs found')) | |
2063 |
|
2068 | |||
2064 | finally: |
|
2069 | finally: | |
2065 | release(lock, wlock) |
|
2070 | release(lock, wlock) | |
2066 |
|
2071 | |||
2067 | def incoming(ui, repo, source="default", **opts): |
|
2072 | def incoming(ui, repo, source="default", **opts): | |
2068 | """show new changesets found in source |
|
2073 | """show new changesets found in source | |
2069 |
|
2074 | |||
2070 | Show new changesets found in the specified path/URL or the default |
|
2075 | Show new changesets found in the specified path/URL or the default | |
2071 | pull location. These are the changesets that would have been pulled |
|
2076 | pull location. These are the changesets that would have been pulled | |
2072 | if a pull at the time you issued this command. |
|
2077 | if a pull at the time you issued this command. | |
2073 |
|
2078 | |||
2074 | For remote repository, using --bundle avoids downloading the |
|
2079 | For remote repository, using --bundle avoids downloading the | |
2075 | changesets twice if the incoming is followed by a pull. |
|
2080 | changesets twice if the incoming is followed by a pull. | |
2076 |
|
2081 | |||
2077 | See pull for valid source format details. |
|
2082 | See pull for valid source format details. | |
2078 |
|
2083 | |||
2079 | Returns 0 if there are incoming changes, 1 otherwise. |
|
2084 | Returns 0 if there are incoming changes, 1 otherwise. | |
2080 | """ |
|
2085 | """ | |
2081 | limit = cmdutil.loglimit(opts) |
|
2086 | limit = cmdutil.loglimit(opts) | |
2082 | source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) |
|
2087 | source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) | |
2083 | other = hg.repository(hg.remoteui(repo, opts), source) |
|
2088 | other = hg.repository(hg.remoteui(repo, opts), source) | |
2084 | ui.status(_('comparing with %s\n') % url.hidepassword(source)) |
|
2089 | ui.status(_('comparing with %s\n') % url.hidepassword(source)) | |
2085 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) |
|
2090 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) | |
2086 | if revs: |
|
2091 | if revs: | |
2087 | revs = [other.lookup(rev) for rev in revs] |
|
2092 | revs = [other.lookup(rev) for rev in revs] | |
2088 | common, incoming, rheads = repo.findcommonincoming(other, heads=revs, |
|
2093 | common, incoming, rheads = repo.findcommonincoming(other, heads=revs, | |
2089 | force=opts["force"]) |
|
2094 | force=opts["force"]) | |
2090 | if not incoming: |
|
2095 | if not incoming: | |
2091 | try: |
|
2096 | try: | |
2092 | os.unlink(opts["bundle"]) |
|
2097 | os.unlink(opts["bundle"]) | |
2093 | except: |
|
2098 | except: | |
2094 | pass |
|
2099 | pass | |
2095 | ui.status(_("no changes found\n")) |
|
2100 | ui.status(_("no changes found\n")) | |
2096 | return 1 |
|
2101 | return 1 | |
2097 |
|
2102 | |||
2098 | cleanup = None |
|
2103 | cleanup = None | |
2099 | try: |
|
2104 | try: | |
2100 | fname = opts["bundle"] |
|
2105 | fname = opts["bundle"] | |
2101 | if fname or not other.local(): |
|
2106 | if fname or not other.local(): | |
2102 | # create a bundle (uncompressed if other repo is not local) |
|
2107 | # create a bundle (uncompressed if other repo is not local) | |
2103 |
|
2108 | |||
2104 | if revs is None and other.capable('changegroupsubset'): |
|
2109 | if revs is None and other.capable('changegroupsubset'): | |
2105 | revs = rheads |
|
2110 | revs = rheads | |
2106 |
|
2111 | |||
2107 | if revs is None: |
|
2112 | if revs is None: | |
2108 | cg = other.changegroup(incoming, "incoming") |
|
2113 | cg = other.changegroup(incoming, "incoming") | |
2109 | else: |
|
2114 | else: | |
2110 | cg = other.changegroupsubset(incoming, revs, 'incoming') |
|
2115 | cg = other.changegroupsubset(incoming, revs, 'incoming') | |
2111 | bundletype = other.local() and "HG10BZ" or "HG10UN" |
|
2116 | bundletype = other.local() and "HG10BZ" or "HG10UN" | |
2112 | fname = cleanup = changegroup.writebundle(cg, fname, bundletype) |
|
2117 | fname = cleanup = changegroup.writebundle(cg, fname, bundletype) | |
2113 | # keep written bundle? |
|
2118 | # keep written bundle? | |
2114 | if opts["bundle"]: |
|
2119 | if opts["bundle"]: | |
2115 | cleanup = None |
|
2120 | cleanup = None | |
2116 | if not other.local(): |
|
2121 | if not other.local(): | |
2117 | # use the created uncompressed bundlerepo |
|
2122 | # use the created uncompressed bundlerepo | |
2118 | other = bundlerepo.bundlerepository(ui, repo.root, fname) |
|
2123 | other = bundlerepo.bundlerepository(ui, repo.root, fname) | |
2119 |
|
2124 | |||
2120 | o = other.changelog.nodesbetween(incoming, revs)[0] |
|
2125 | o = other.changelog.nodesbetween(incoming, revs)[0] | |
2121 | if opts.get('newest_first'): |
|
2126 | if opts.get('newest_first'): | |
2122 | o.reverse() |
|
2127 | o.reverse() | |
2123 | displayer = cmdutil.show_changeset(ui, other, opts) |
|
2128 | displayer = cmdutil.show_changeset(ui, other, opts) | |
2124 | count = 0 |
|
2129 | count = 0 | |
2125 | for n in o: |
|
2130 | for n in o: | |
2126 | if limit is not None and count >= limit: |
|
2131 | if limit is not None and count >= limit: | |
2127 | break |
|
2132 | break | |
2128 | parents = [p for p in other.changelog.parents(n) if p != nullid] |
|
2133 | parents = [p for p in other.changelog.parents(n) if p != nullid] | |
2129 | if opts.get('no_merges') and len(parents) == 2: |
|
2134 | if opts.get('no_merges') and len(parents) == 2: | |
2130 | continue |
|
2135 | continue | |
2131 | count += 1 |
|
2136 | count += 1 | |
2132 | displayer.show(other[n]) |
|
2137 | displayer.show(other[n]) | |
2133 | displayer.close() |
|
2138 | displayer.close() | |
2134 | finally: |
|
2139 | finally: | |
2135 | if hasattr(other, 'close'): |
|
2140 | if hasattr(other, 'close'): | |
2136 | other.close() |
|
2141 | other.close() | |
2137 | if cleanup: |
|
2142 | if cleanup: | |
2138 | os.unlink(cleanup) |
|
2143 | os.unlink(cleanup) | |
2139 |
|
2144 | |||
2140 | def init(ui, dest=".", **opts): |
|
2145 | def init(ui, dest=".", **opts): | |
2141 | """create a new repository in the given directory |
|
2146 | """create a new repository in the given directory | |
2142 |
|
2147 | |||
2143 | Initialize a new repository in the given directory. If the given |
|
2148 | Initialize a new repository in the given directory. If the given | |
2144 | directory does not exist, it will be created. |
|
2149 | directory does not exist, it will be created. | |
2145 |
|
2150 | |||
2146 | If no directory is given, the current directory is used. |
|
2151 | If no directory is given, the current directory is used. | |
2147 |
|
2152 | |||
2148 | It is possible to specify an ``ssh://`` URL as the destination. |
|
2153 | It is possible to specify an ``ssh://`` URL as the destination. | |
2149 | See :hg:`help urls` for more information. |
|
2154 | See :hg:`help urls` for more information. | |
2150 |
|
2155 | |||
2151 | Returns 0 on success. |
|
2156 | Returns 0 on success. | |
2152 | """ |
|
2157 | """ | |
2153 | hg.repository(hg.remoteui(ui, opts), dest, create=1) |
|
2158 | hg.repository(hg.remoteui(ui, opts), dest, create=1) | |
2154 |
|
2159 | |||
2155 | def locate(ui, repo, *pats, **opts): |
|
2160 | def locate(ui, repo, *pats, **opts): | |
2156 | """locate files matching specific patterns |
|
2161 | """locate files matching specific patterns | |
2157 |
|
2162 | |||
2158 | Print files under Mercurial control in the working directory whose |
|
2163 | Print files under Mercurial control in the working directory whose | |
2159 | names match the given patterns. |
|
2164 | names match the given patterns. | |
2160 |
|
2165 | |||
2161 | By default, this command searches all directories in the working |
|
2166 | By default, this command searches all directories in the working | |
2162 | directory. To search just the current directory and its |
|
2167 | directory. To search just the current directory and its | |
2163 | subdirectories, use "--include .". |
|
2168 | subdirectories, use "--include .". | |
2164 |
|
2169 | |||
2165 | If no patterns are given to match, this command prints the names |
|
2170 | If no patterns are given to match, this command prints the names | |
2166 | of all files under Mercurial control in the working directory. |
|
2171 | of all files under Mercurial control in the working directory. | |
2167 |
|
2172 | |||
2168 | If you want to feed the output of this command into the "xargs" |
|
2173 | If you want to feed the output of this command into the "xargs" | |
2169 | command, use the -0 option to both this command and "xargs". This |
|
2174 | command, use the -0 option to both this command and "xargs". This | |
2170 | will avoid the problem of "xargs" treating single filenames that |
|
2175 | will avoid the problem of "xargs" treating single filenames that | |
2171 | contain whitespace as multiple filenames. |
|
2176 | contain whitespace as multiple filenames. | |
2172 |
|
2177 | |||
2173 | Returns 0 if a match is found, 1 otherwise. |
|
2178 | Returns 0 if a match is found, 1 otherwise. | |
2174 | """ |
|
2179 | """ | |
2175 | end = opts.get('print0') and '\0' or '\n' |
|
2180 | end = opts.get('print0') and '\0' or '\n' | |
2176 | rev = opts.get('rev') or None |
|
2181 | rev = opts.get('rev') or None | |
2177 |
|
2182 | |||
2178 | ret = 1 |
|
2183 | ret = 1 | |
2179 | m = cmdutil.match(repo, pats, opts, default='relglob') |
|
2184 | m = cmdutil.match(repo, pats, opts, default='relglob') | |
2180 | m.bad = lambda x, y: False |
|
2185 | m.bad = lambda x, y: False | |
2181 | for abs in repo[rev].walk(m): |
|
2186 | for abs in repo[rev].walk(m): | |
2182 | if not rev and abs not in repo.dirstate: |
|
2187 | if not rev and abs not in repo.dirstate: | |
2183 | continue |
|
2188 | continue | |
2184 | if opts.get('fullpath'): |
|
2189 | if opts.get('fullpath'): | |
2185 | ui.write(repo.wjoin(abs), end) |
|
2190 | ui.write(repo.wjoin(abs), end) | |
2186 | else: |
|
2191 | else: | |
2187 | ui.write(((pats and m.rel(abs)) or abs), end) |
|
2192 | ui.write(((pats and m.rel(abs)) or abs), end) | |
2188 | ret = 0 |
|
2193 | ret = 0 | |
2189 |
|
2194 | |||
2190 | return ret |
|
2195 | return ret | |
2191 |
|
2196 | |||
2192 | def log(ui, repo, *pats, **opts): |
|
2197 | def log(ui, repo, *pats, **opts): | |
2193 | """show revision history of entire repository or files |
|
2198 | """show revision history of entire repository or files | |
2194 |
|
2199 | |||
2195 | Print the revision history of the specified files or the entire |
|
2200 | Print the revision history of the specified files or the entire | |
2196 | project. |
|
2201 | project. | |
2197 |
|
2202 | |||
2198 | File history is shown without following rename or copy history of |
|
2203 | File history is shown without following rename or copy history of | |
2199 | files. Use -f/--follow with a filename to follow history across |
|
2204 | files. Use -f/--follow with a filename to follow history across | |
2200 | renames and copies. --follow without a filename will only show |
|
2205 | renames and copies. --follow without a filename will only show | |
2201 | ancestors or descendants of the starting revision. --follow-first |
|
2206 | ancestors or descendants of the starting revision. --follow-first | |
2202 | only follows the first parent of merge revisions. |
|
2207 | only follows the first parent of merge revisions. | |
2203 |
|
2208 | |||
2204 | If no revision range is specified, the default is tip:0 unless |
|
2209 | If no revision range is specified, the default is tip:0 unless | |
2205 | --follow is set, in which case the working directory parent is |
|
2210 | --follow is set, in which case the working directory parent is | |
2206 | used as the starting revision. |
|
2211 | used as the starting revision. | |
2207 |
|
2212 | |||
2208 | See :hg:`help dates` for a list of formats valid for -d/--date. |
|
2213 | See :hg:`help dates` for a list of formats valid for -d/--date. | |
2209 |
|
2214 | |||
2210 | By default this command prints revision number and changeset id, |
|
2215 | By default this command prints revision number and changeset id, | |
2211 | tags, non-trivial parents, user, date and time, and a summary for |
|
2216 | tags, non-trivial parents, user, date and time, and a summary for | |
2212 | each commit. When the -v/--verbose switch is used, the list of |
|
2217 | each commit. When the -v/--verbose switch is used, the list of | |
2213 | changed files and full commit message are shown. |
|
2218 | changed files and full commit message are shown. | |
2214 |
|
2219 | |||
2215 | NOTE: log -p/--patch may generate unexpected diff output for merge |
|
2220 | NOTE: log -p/--patch may generate unexpected diff output for merge | |
2216 | changesets, as it will only compare the merge changeset against |
|
2221 | changesets, as it will only compare the merge changeset against | |
2217 | its first parent. Also, only files different from BOTH parents |
|
2222 | its first parent. Also, only files different from BOTH parents | |
2218 | will appear in files:. |
|
2223 | will appear in files:. | |
2219 |
|
2224 | |||
2220 | Returns 0 on success. |
|
2225 | Returns 0 on success. | |
2221 | """ |
|
2226 | """ | |
2222 |
|
2227 | |||
2223 | matchfn = cmdutil.match(repo, pats, opts) |
|
2228 | matchfn = cmdutil.match(repo, pats, opts) | |
2224 | limit = cmdutil.loglimit(opts) |
|
2229 | limit = cmdutil.loglimit(opts) | |
2225 | count = 0 |
|
2230 | count = 0 | |
2226 |
|
2231 | |||
2227 | endrev = None |
|
2232 | endrev = None | |
2228 | if opts.get('copies') and opts.get('rev'): |
|
2233 | if opts.get('copies') and opts.get('rev'): | |
2229 | endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1 |
|
2234 | endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1 | |
2230 |
|
2235 | |||
2231 | df = False |
|
2236 | df = False | |
2232 | if opts["date"]: |
|
2237 | if opts["date"]: | |
2233 | df = util.matchdate(opts["date"]) |
|
2238 | df = util.matchdate(opts["date"]) | |
2234 |
|
2239 | |||
2235 | branches = opts.get('branch', []) + opts.get('only_branch', []) |
|
2240 | branches = opts.get('branch', []) + opts.get('only_branch', []) | |
2236 | opts['branch'] = [repo.lookupbranch(b) for b in branches] |
|
2241 | opts['branch'] = [repo.lookupbranch(b) for b in branches] | |
2237 |
|
2242 | |||
2238 | displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn) |
|
2243 | displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn) | |
2239 | def prep(ctx, fns): |
|
2244 | def prep(ctx, fns): | |
2240 | rev = ctx.rev() |
|
2245 | rev = ctx.rev() | |
2241 | parents = [p for p in repo.changelog.parentrevs(rev) |
|
2246 | parents = [p for p in repo.changelog.parentrevs(rev) | |
2242 | if p != nullrev] |
|
2247 | if p != nullrev] | |
2243 | if opts.get('no_merges') and len(parents) == 2: |
|
2248 | if opts.get('no_merges') and len(parents) == 2: | |
2244 | return |
|
2249 | return | |
2245 | if opts.get('only_merges') and len(parents) != 2: |
|
2250 | if opts.get('only_merges') and len(parents) != 2: | |
2246 | return |
|
2251 | return | |
2247 | if opts.get('branch') and ctx.branch() not in opts['branch']: |
|
2252 | if opts.get('branch') and ctx.branch() not in opts['branch']: | |
2248 | return |
|
2253 | return | |
2249 | if df and not df(ctx.date()[0]): |
|
2254 | if df and not df(ctx.date()[0]): | |
2250 | return |
|
2255 | return | |
2251 | if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]: |
|
2256 | if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]: | |
2252 | return |
|
2257 | return | |
2253 | if opts.get('keyword'): |
|
2258 | if opts.get('keyword'): | |
2254 | for k in [kw.lower() for kw in opts['keyword']]: |
|
2259 | for k in [kw.lower() for kw in opts['keyword']]: | |
2255 | if (k in ctx.user().lower() or |
|
2260 | if (k in ctx.user().lower() or | |
2256 | k in ctx.description().lower() or |
|
2261 | k in ctx.description().lower() or | |
2257 | k in " ".join(ctx.files()).lower()): |
|
2262 | k in " ".join(ctx.files()).lower()): | |
2258 | break |
|
2263 | break | |
2259 | else: |
|
2264 | else: | |
2260 | return |
|
2265 | return | |
2261 |
|
2266 | |||
2262 | copies = None |
|
2267 | copies = None | |
2263 | if opts.get('copies') and rev: |
|
2268 | if opts.get('copies') and rev: | |
2264 | copies = [] |
|
2269 | copies = [] | |
2265 | getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) |
|
2270 | getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) | |
2266 | for fn in ctx.files(): |
|
2271 | for fn in ctx.files(): | |
2267 | rename = getrenamed(fn, rev) |
|
2272 | rename = getrenamed(fn, rev) | |
2268 | if rename: |
|
2273 | if rename: | |
2269 | copies.append((fn, rename[0])) |
|
2274 | copies.append((fn, rename[0])) | |
2270 |
|
2275 | |||
2271 | displayer.show(ctx, copies=copies) |
|
2276 | displayer.show(ctx, copies=copies) | |
2272 |
|
2277 | |||
2273 | for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): |
|
2278 | for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): | |
2274 | if count == limit: |
|
2279 | if count == limit: | |
2275 | break |
|
2280 | break | |
2276 | if displayer.flush(ctx.rev()): |
|
2281 | if displayer.flush(ctx.rev()): | |
2277 | count += 1 |
|
2282 | count += 1 | |
2278 | displayer.close() |
|
2283 | displayer.close() | |
2279 |
|
2284 | |||
2280 | def manifest(ui, repo, node=None, rev=None): |
|
2285 | def manifest(ui, repo, node=None, rev=None): | |
2281 | """output the current or given revision of the project manifest |
|
2286 | """output the current or given revision of the project manifest | |
2282 |
|
2287 | |||
2283 | Print a list of version controlled files for the given revision. |
|
2288 | Print a list of version controlled files for the given revision. | |
2284 | If no revision is given, the first parent of the working directory |
|
2289 | If no revision is given, the first parent of the working directory | |
2285 | is used, or the null revision if no revision is checked out. |
|
2290 | is used, or the null revision if no revision is checked out. | |
2286 |
|
2291 | |||
2287 | With -v, print file permissions, symlink and executable bits. |
|
2292 | With -v, print file permissions, symlink and executable bits. | |
2288 | With --debug, print file revision hashes. |
|
2293 | With --debug, print file revision hashes. | |
2289 |
|
2294 | |||
2290 | Returns 0 on success. |
|
2295 | Returns 0 on success. | |
2291 | """ |
|
2296 | """ | |
2292 |
|
2297 | |||
2293 | if rev and node: |
|
2298 | if rev and node: | |
2294 | raise util.Abort(_("please specify just one revision")) |
|
2299 | raise util.Abort(_("please specify just one revision")) | |
2295 |
|
2300 | |||
2296 | if not node: |
|
2301 | if not node: | |
2297 | node = rev |
|
2302 | node = rev | |
2298 |
|
2303 | |||
2299 | decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} |
|
2304 | decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} | |
2300 | ctx = repo[node] |
|
2305 | ctx = repo[node] | |
2301 | for f in ctx: |
|
2306 | for f in ctx: | |
2302 | if ui.debugflag: |
|
2307 | if ui.debugflag: | |
2303 | ui.write("%40s " % hex(ctx.manifest()[f])) |
|
2308 | ui.write("%40s " % hex(ctx.manifest()[f])) | |
2304 | if ui.verbose: |
|
2309 | if ui.verbose: | |
2305 | ui.write(decor[ctx.flags(f)]) |
|
2310 | ui.write(decor[ctx.flags(f)]) | |
2306 | ui.write("%s\n" % f) |
|
2311 | ui.write("%s\n" % f) | |
2307 |
|
2312 | |||
2308 | def merge(ui, repo, node=None, **opts): |
|
2313 | def merge(ui, repo, node=None, **opts): | |
2309 | """merge working directory with another revision |
|
2314 | """merge working directory with another revision | |
2310 |
|
2315 | |||
2311 | The current working directory is updated with all changes made in |
|
2316 | The current working directory is updated with all changes made in | |
2312 | the requested revision since the last common predecessor revision. |
|
2317 | the requested revision since the last common predecessor revision. | |
2313 |
|
2318 | |||
2314 | Files that changed between either parent are marked as changed for |
|
2319 | Files that changed between either parent are marked as changed for | |
2315 | the next commit and a commit must be performed before any further |
|
2320 | the next commit and a commit must be performed before any further | |
2316 | updates to the repository are allowed. The next commit will have |
|
2321 | updates to the repository are allowed. The next commit will have | |
2317 | two parents. |
|
2322 | two parents. | |
2318 |
|
2323 | |||
2319 | If no revision is specified, the working directory's parent is a |
|
2324 | If no revision is specified, the working directory's parent is a | |
2320 | head revision, and the current branch contains exactly one other |
|
2325 | head revision, and the current branch contains exactly one other | |
2321 | head, the other head is merged with by default. Otherwise, an |
|
2326 | head, the other head is merged with by default. Otherwise, an | |
2322 | explicit revision with which to merge with must be provided. |
|
2327 | explicit revision with which to merge with must be provided. | |
2323 |
|
2328 | |||
2324 | Returns 0 on success, 1 if there are unresolved files. |
|
2329 | Returns 0 on success, 1 if there are unresolved files. | |
2325 | """ |
|
2330 | """ | |
2326 |
|
2331 | |||
2327 | if opts.get('rev') and node: |
|
2332 | if opts.get('rev') and node: | |
2328 | raise util.Abort(_("please specify just one revision")) |
|
2333 | raise util.Abort(_("please specify just one revision")) | |
2329 | if not node: |
|
2334 | if not node: | |
2330 | node = opts.get('rev') |
|
2335 | node = opts.get('rev') | |
2331 |
|
2336 | |||
2332 | if not node: |
|
2337 | if not node: | |
2333 | branch = repo.changectx(None).branch() |
|
2338 | branch = repo.changectx(None).branch() | |
2334 | bheads = repo.branchheads(branch) |
|
2339 | bheads = repo.branchheads(branch) | |
2335 | if len(bheads) > 2: |
|
2340 | if len(bheads) > 2: | |
2336 | ui.warn(_("abort: branch '%s' has %d heads - " |
|
2341 | ui.warn(_("abort: branch '%s' has %d heads - " | |
2337 | "please merge with an explicit rev\n") |
|
2342 | "please merge with an explicit rev\n") | |
2338 | % (branch, len(bheads))) |
|
2343 | % (branch, len(bheads))) | |
2339 | ui.status(_("(run 'hg heads .' to see heads)\n")) |
|
2344 | ui.status(_("(run 'hg heads .' to see heads)\n")) | |
2340 | return False |
|
2345 | return False | |
2341 |
|
2346 | |||
2342 | parent = repo.dirstate.parents()[0] |
|
2347 | parent = repo.dirstate.parents()[0] | |
2343 | if len(bheads) == 1: |
|
2348 | if len(bheads) == 1: | |
2344 | if len(repo.heads()) > 1: |
|
2349 | if len(repo.heads()) > 1: | |
2345 | ui.warn(_("abort: branch '%s' has one head - " |
|
2350 | ui.warn(_("abort: branch '%s' has one head - " | |
2346 | "please merge with an explicit rev\n" % branch)) |
|
2351 | "please merge with an explicit rev\n" % branch)) | |
2347 | ui.status(_("(run 'hg heads' to see all heads)\n")) |
|
2352 | ui.status(_("(run 'hg heads' to see all heads)\n")) | |
2348 | return False |
|
2353 | return False | |
2349 | msg = _('there is nothing to merge') |
|
2354 | msg = _('there is nothing to merge') | |
2350 | if parent != repo.lookup(repo[None].branch()): |
|
2355 | if parent != repo.lookup(repo[None].branch()): | |
2351 | msg = _('%s - use "hg update" instead') % msg |
|
2356 | msg = _('%s - use "hg update" instead') % msg | |
2352 | raise util.Abort(msg) |
|
2357 | raise util.Abort(msg) | |
2353 |
|
2358 | |||
2354 | if parent not in bheads: |
|
2359 | if parent not in bheads: | |
2355 | raise util.Abort(_('working dir not at a head rev - ' |
|
2360 | raise util.Abort(_('working dir not at a head rev - ' | |
2356 | 'use "hg update" or merge with an explicit rev')) |
|
2361 | 'use "hg update" or merge with an explicit rev')) | |
2357 | node = parent == bheads[0] and bheads[-1] or bheads[0] |
|
2362 | node = parent == bheads[0] and bheads[-1] or bheads[0] | |
2358 |
|
2363 | |||
2359 | if opts.get('preview'): |
|
2364 | if opts.get('preview'): | |
2360 | # find nodes that are ancestors of p2 but not of p1 |
|
2365 | # find nodes that are ancestors of p2 but not of p1 | |
2361 | p1 = repo.lookup('.') |
|
2366 | p1 = repo.lookup('.') | |
2362 | p2 = repo.lookup(node) |
|
2367 | p2 = repo.lookup(node) | |
2363 | nodes = repo.changelog.findmissing(common=[p1], heads=[p2]) |
|
2368 | nodes = repo.changelog.findmissing(common=[p1], heads=[p2]) | |
2364 |
|
2369 | |||
2365 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
2370 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
2366 | for node in nodes: |
|
2371 | for node in nodes: | |
2367 | displayer.show(repo[node]) |
|
2372 | displayer.show(repo[node]) | |
2368 | displayer.close() |
|
2373 | displayer.close() | |
2369 | return 0 |
|
2374 | return 0 | |
2370 |
|
2375 | |||
2371 | return hg.merge(repo, node, force=opts.get('force')) |
|
2376 | return hg.merge(repo, node, force=opts.get('force')) | |
2372 |
|
2377 | |||
2373 | def outgoing(ui, repo, dest=None, **opts): |
|
2378 | def outgoing(ui, repo, dest=None, **opts): | |
2374 | """show changesets not found in the destination |
|
2379 | """show changesets not found in the destination | |
2375 |
|
2380 | |||
2376 | Show changesets not found in the specified destination repository |
|
2381 | Show changesets not found in the specified destination repository | |
2377 | or the default push location. These are the changesets that would |
|
2382 | or the default push location. These are the changesets that would | |
2378 | be pushed if a push was requested. |
|
2383 | be pushed if a push was requested. | |
2379 |
|
2384 | |||
2380 | See pull for details of valid destination formats. |
|
2385 | See pull for details of valid destination formats. | |
2381 |
|
2386 | |||
2382 | Returns 0 if there are outgoing changes, 1 otherwise. |
|
2387 | Returns 0 if there are outgoing changes, 1 otherwise. | |
2383 | """ |
|
2388 | """ | |
2384 | limit = cmdutil.loglimit(opts) |
|
2389 | limit = cmdutil.loglimit(opts) | |
2385 | dest = ui.expandpath(dest or 'default-push', dest or 'default') |
|
2390 | dest = ui.expandpath(dest or 'default-push', dest or 'default') | |
2386 | dest, branches = hg.parseurl(dest, opts.get('branch')) |
|
2391 | dest, branches = hg.parseurl(dest, opts.get('branch')) | |
2387 | revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) |
|
2392 | revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) | |
2388 | if revs: |
|
2393 | if revs: | |
2389 | revs = [repo.lookup(rev) for rev in revs] |
|
2394 | revs = [repo.lookup(rev) for rev in revs] | |
2390 |
|
2395 | |||
2391 | other = hg.repository(hg.remoteui(repo, opts), dest) |
|
2396 | other = hg.repository(hg.remoteui(repo, opts), dest) | |
2392 | ui.status(_('comparing with %s\n') % url.hidepassword(dest)) |
|
2397 | ui.status(_('comparing with %s\n') % url.hidepassword(dest)) | |
2393 | o = repo.findoutgoing(other, force=opts.get('force')) |
|
2398 | o = repo.findoutgoing(other, force=opts.get('force')) | |
2394 | if not o: |
|
2399 | if not o: | |
2395 | ui.status(_("no changes found\n")) |
|
2400 | ui.status(_("no changes found\n")) | |
2396 | return 1 |
|
2401 | return 1 | |
2397 | o = repo.changelog.nodesbetween(o, revs)[0] |
|
2402 | o = repo.changelog.nodesbetween(o, revs)[0] | |
2398 | if opts.get('newest_first'): |
|
2403 | if opts.get('newest_first'): | |
2399 | o.reverse() |
|
2404 | o.reverse() | |
2400 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
2405 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
2401 | count = 0 |
|
2406 | count = 0 | |
2402 | for n in o: |
|
2407 | for n in o: | |
2403 | if limit is not None and count >= limit: |
|
2408 | if limit is not None and count >= limit: | |
2404 | break |
|
2409 | break | |
2405 | parents = [p for p in repo.changelog.parents(n) if p != nullid] |
|
2410 | parents = [p for p in repo.changelog.parents(n) if p != nullid] | |
2406 | if opts.get('no_merges') and len(parents) == 2: |
|
2411 | if opts.get('no_merges') and len(parents) == 2: | |
2407 | continue |
|
2412 | continue | |
2408 | count += 1 |
|
2413 | count += 1 | |
2409 | displayer.show(repo[n]) |
|
2414 | displayer.show(repo[n]) | |
2410 | displayer.close() |
|
2415 | displayer.close() | |
2411 |
|
2416 | |||
2412 | def parents(ui, repo, file_=None, **opts): |
|
2417 | def parents(ui, repo, file_=None, **opts): | |
2413 | """show the parents of the working directory or revision |
|
2418 | """show the parents of the working directory or revision | |
2414 |
|
2419 | |||
2415 | Print the working directory's parent revisions. If a revision is |
|
2420 | Print the working directory's parent revisions. If a revision is | |
2416 | given via -r/--rev, the parent of that revision will be printed. |
|
2421 | given via -r/--rev, the parent of that revision will be printed. | |
2417 | If a file argument is given, the revision in which the file was |
|
2422 | If a file argument is given, the revision in which the file was | |
2418 | last changed (before the working directory revision or the |
|
2423 | last changed (before the working directory revision or the | |
2419 | argument to --rev if given) is printed. |
|
2424 | argument to --rev if given) is printed. | |
2420 |
|
2425 | |||
2421 | Returns 0 on success. |
|
2426 | Returns 0 on success. | |
2422 | """ |
|
2427 | """ | |
2423 | rev = opts.get('rev') |
|
2428 | rev = opts.get('rev') | |
2424 | if rev: |
|
2429 | if rev: | |
2425 | ctx = repo[rev] |
|
2430 | ctx = repo[rev] | |
2426 | else: |
|
2431 | else: | |
2427 | ctx = repo[None] |
|
2432 | ctx = repo[None] | |
2428 |
|
2433 | |||
2429 | if file_: |
|
2434 | if file_: | |
2430 | m = cmdutil.match(repo, (file_,), opts) |
|
2435 | m = cmdutil.match(repo, (file_,), opts) | |
2431 | if m.anypats() or len(m.files()) != 1: |
|
2436 | if m.anypats() or len(m.files()) != 1: | |
2432 | raise util.Abort(_('can only specify an explicit filename')) |
|
2437 | raise util.Abort(_('can only specify an explicit filename')) | |
2433 | file_ = m.files()[0] |
|
2438 | file_ = m.files()[0] | |
2434 | filenodes = [] |
|
2439 | filenodes = [] | |
2435 | for cp in ctx.parents(): |
|
2440 | for cp in ctx.parents(): | |
2436 | if not cp: |
|
2441 | if not cp: | |
2437 | continue |
|
2442 | continue | |
2438 | try: |
|
2443 | try: | |
2439 | filenodes.append(cp.filenode(file_)) |
|
2444 | filenodes.append(cp.filenode(file_)) | |
2440 | except error.LookupError: |
|
2445 | except error.LookupError: | |
2441 | pass |
|
2446 | pass | |
2442 | if not filenodes: |
|
2447 | if not filenodes: | |
2443 | raise util.Abort(_("'%s' not found in manifest!") % file_) |
|
2448 | raise util.Abort(_("'%s' not found in manifest!") % file_) | |
2444 | fl = repo.file(file_) |
|
2449 | fl = repo.file(file_) | |
2445 | p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes] |
|
2450 | p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes] | |
2446 | else: |
|
2451 | else: | |
2447 | p = [cp.node() for cp in ctx.parents()] |
|
2452 | p = [cp.node() for cp in ctx.parents()] | |
2448 |
|
2453 | |||
2449 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
2454 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
2450 | for n in p: |
|
2455 | for n in p: | |
2451 | if n != nullid: |
|
2456 | if n != nullid: | |
2452 | displayer.show(repo[n]) |
|
2457 | displayer.show(repo[n]) | |
2453 | displayer.close() |
|
2458 | displayer.close() | |
2454 |
|
2459 | |||
2455 | def paths(ui, repo, search=None): |
|
2460 | def paths(ui, repo, search=None): | |
2456 | """show aliases for remote repositories |
|
2461 | """show aliases for remote repositories | |
2457 |
|
2462 | |||
2458 | Show definition of symbolic path name NAME. If no name is given, |
|
2463 | Show definition of symbolic path name NAME. If no name is given, | |
2459 | show definition of all available names. |
|
2464 | show definition of all available names. | |
2460 |
|
2465 | |||
2461 | Path names are defined in the [paths] section of |
|
2466 | Path names are defined in the [paths] section of | |
2462 | ``/etc/mercurial/hgrc`` and ``$HOME/.hgrc``. If run inside a |
|
2467 | ``/etc/mercurial/hgrc`` and ``$HOME/.hgrc``. If run inside a | |
2463 | repository, ``.hg/hgrc`` is used, too. |
|
2468 | repository, ``.hg/hgrc`` is used, too. | |
2464 |
|
2469 | |||
2465 | The path names ``default`` and ``default-push`` have a special |
|
2470 | The path names ``default`` and ``default-push`` have a special | |
2466 | meaning. When performing a push or pull operation, they are used |
|
2471 | meaning. When performing a push or pull operation, they are used | |
2467 | as fallbacks if no location is specified on the command-line. |
|
2472 | as fallbacks if no location is specified on the command-line. | |
2468 | When ``default-push`` is set, it will be used for push and |
|
2473 | When ``default-push`` is set, it will be used for push and | |
2469 | ``default`` will be used for pull; otherwise ``default`` is used |
|
2474 | ``default`` will be used for pull; otherwise ``default`` is used | |
2470 | as the fallback for both. When cloning a repository, the clone |
|
2475 | as the fallback for both. When cloning a repository, the clone | |
2471 | source is written as ``default`` in ``.hg/hgrc``. Note that |
|
2476 | source is written as ``default`` in ``.hg/hgrc``. Note that | |
2472 | ``default`` and ``default-push`` apply to all inbound (e.g. |
|
2477 | ``default`` and ``default-push`` apply to all inbound (e.g. | |
2473 | :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and |
|
2478 | :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and | |
2474 | :hg:`bundle`) operations. |
|
2479 | :hg:`bundle`) operations. | |
2475 |
|
2480 | |||
2476 | See :hg:`help urls` for more information. |
|
2481 | See :hg:`help urls` for more information. | |
2477 | """ |
|
2482 | """ | |
2478 | if search: |
|
2483 | if search: | |
2479 | for name, path in ui.configitems("paths"): |
|
2484 | for name, path in ui.configitems("paths"): | |
2480 | if name == search: |
|
2485 | if name == search: | |
2481 | ui.write("%s\n" % url.hidepassword(path)) |
|
2486 | ui.write("%s\n" % url.hidepassword(path)) | |
2482 | return |
|
2487 | return | |
2483 | ui.warn(_("not found!\n")) |
|
2488 | ui.warn(_("not found!\n")) | |
2484 | return 1 |
|
2489 | return 1 | |
2485 | else: |
|
2490 | else: | |
2486 | for name, path in ui.configitems("paths"): |
|
2491 | for name, path in ui.configitems("paths"): | |
2487 | ui.write("%s = %s\n" % (name, url.hidepassword(path))) |
|
2492 | ui.write("%s = %s\n" % (name, url.hidepassword(path))) | |
2488 |
|
2493 | |||
2489 | def postincoming(ui, repo, modheads, optupdate, checkout): |
|
2494 | def postincoming(ui, repo, modheads, optupdate, checkout): | |
2490 | if modheads == 0: |
|
2495 | if modheads == 0: | |
2491 | return |
|
2496 | return | |
2492 | if optupdate: |
|
2497 | if optupdate: | |
2493 | if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout: |
|
2498 | if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout: | |
2494 | return hg.update(repo, checkout) |
|
2499 | return hg.update(repo, checkout) | |
2495 | else: |
|
2500 | else: | |
2496 | ui.status(_("not updating, since new heads added\n")) |
|
2501 | ui.status(_("not updating, since new heads added\n")) | |
2497 | if modheads > 1: |
|
2502 | if modheads > 1: | |
2498 | ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) |
|
2503 | ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) | |
2499 | else: |
|
2504 | else: | |
2500 | ui.status(_("(run 'hg update' to get a working copy)\n")) |
|
2505 | ui.status(_("(run 'hg update' to get a working copy)\n")) | |
2501 |
|
2506 | |||
2502 | def pull(ui, repo, source="default", **opts): |
|
2507 | def pull(ui, repo, source="default", **opts): | |
2503 | """pull changes from the specified source |
|
2508 | """pull changes from the specified source | |
2504 |
|
2509 | |||
2505 | Pull changes from a remote repository to a local one. |
|
2510 | Pull changes from a remote repository to a local one. | |
2506 |
|
2511 | |||
2507 | This finds all changes from the repository at the specified path |
|
2512 | This finds all changes from the repository at the specified path | |
2508 | or URL and adds them to a local repository (the current one unless |
|
2513 | or URL and adds them to a local repository (the current one unless | |
2509 | -R is specified). By default, this does not update the copy of the |
|
2514 | -R is specified). By default, this does not update the copy of the | |
2510 | project in the working directory. |
|
2515 | project in the working directory. | |
2511 |
|
2516 | |||
2512 | Use :hg:`incoming` if you want to see what would have been added |
|
2517 | Use :hg:`incoming` if you want to see what would have been added | |
2513 | by a pull at the time you issued this command. If you then decide |
|
2518 | by a pull at the time you issued this command. If you then decide | |
2514 | to add those changes to the repository, you should use :hg:`pull |
|
2519 | to add those changes to the repository, you should use :hg:`pull | |
2515 | -r X` where ``X`` is the last changeset listed by :hg:`incoming`. |
|
2520 | -r X` where ``X`` is the last changeset listed by :hg:`incoming`. | |
2516 |
|
2521 | |||
2517 | If SOURCE is omitted, the 'default' path will be used. |
|
2522 | If SOURCE is omitted, the 'default' path will be used. | |
2518 | See :hg:`help urls` for more information. |
|
2523 | See :hg:`help urls` for more information. | |
2519 |
|
2524 | |||
2520 | Returns 0 on success, 1 if an update had unresolved files. |
|
2525 | Returns 0 on success, 1 if an update had unresolved files. | |
2521 | """ |
|
2526 | """ | |
2522 | source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) |
|
2527 | source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) | |
2523 | other = hg.repository(hg.remoteui(repo, opts), source) |
|
2528 | other = hg.repository(hg.remoteui(repo, opts), source) | |
2524 | ui.status(_('pulling from %s\n') % url.hidepassword(source)) |
|
2529 | ui.status(_('pulling from %s\n') % url.hidepassword(source)) | |
2525 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) |
|
2530 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) | |
2526 | if revs: |
|
2531 | if revs: | |
2527 | try: |
|
2532 | try: | |
2528 | revs = [other.lookup(rev) for rev in revs] |
|
2533 | revs = [other.lookup(rev) for rev in revs] | |
2529 | except error.CapabilityError: |
|
2534 | except error.CapabilityError: | |
2530 | err = _("Other repository doesn't support revision lookup, " |
|
2535 | err = _("Other repository doesn't support revision lookup, " | |
2531 | "so a rev cannot be specified.") |
|
2536 | "so a rev cannot be specified.") | |
2532 | raise util.Abort(err) |
|
2537 | raise util.Abort(err) | |
2533 |
|
2538 | |||
2534 | modheads = repo.pull(other, heads=revs, force=opts.get('force')) |
|
2539 | modheads = repo.pull(other, heads=revs, force=opts.get('force')) | |
2535 | if checkout: |
|
2540 | if checkout: | |
2536 | checkout = str(repo.changelog.rev(other.lookup(checkout))) |
|
2541 | checkout = str(repo.changelog.rev(other.lookup(checkout))) | |
2537 | return postincoming(ui, repo, modheads, opts.get('update'), checkout) |
|
2542 | return postincoming(ui, repo, modheads, opts.get('update'), checkout) | |
2538 |
|
2543 | |||
2539 | def push(ui, repo, dest=None, **opts): |
|
2544 | def push(ui, repo, dest=None, **opts): | |
2540 | """push changes to the specified destination |
|
2545 | """push changes to the specified destination | |
2541 |
|
2546 | |||
2542 | Push changesets from the local repository to the specified |
|
2547 | Push changesets from the local repository to the specified | |
2543 | destination. |
|
2548 | destination. | |
2544 |
|
2549 | |||
2545 | This operation is symmetrical to pull: it is identical to a pull |
|
2550 | This operation is symmetrical to pull: it is identical to a pull | |
2546 | in the destination repository from the current one. |
|
2551 | in the destination repository from the current one. | |
2547 |
|
2552 | |||
2548 | By default, push will not allow creation of new heads at the |
|
2553 | By default, push will not allow creation of new heads at the | |
2549 | destination, since multiple heads would make it unclear which head |
|
2554 | destination, since multiple heads would make it unclear which head | |
2550 | to use. In this situation, it is recommended to pull and merge |
|
2555 | to use. In this situation, it is recommended to pull and merge | |
2551 | before pushing. |
|
2556 | before pushing. | |
2552 |
|
2557 | |||
2553 | Use --new-branch if you want to allow push to create a new named |
|
2558 | Use --new-branch if you want to allow push to create a new named | |
2554 | branch that is not present at the destination. This allows you to |
|
2559 | branch that is not present at the destination. This allows you to | |
2555 | only create a new branch without forcing other changes. |
|
2560 | only create a new branch without forcing other changes. | |
2556 |
|
2561 | |||
2557 | Use -f/--force to override the default behavior and push all |
|
2562 | Use -f/--force to override the default behavior and push all | |
2558 | changesets on all branches. |
|
2563 | changesets on all branches. | |
2559 |
|
2564 | |||
2560 | If -r/--rev is used, the specified revision and all its ancestors |
|
2565 | If -r/--rev is used, the specified revision and all its ancestors | |
2561 | will be pushed to the remote repository. |
|
2566 | will be pushed to the remote repository. | |
2562 |
|
2567 | |||
2563 | Please see :hg:`help urls` for important details about ``ssh://`` |
|
2568 | Please see :hg:`help urls` for important details about ``ssh://`` | |
2564 | URLs. If DESTINATION is omitted, a default path will be used. |
|
2569 | URLs. If DESTINATION is omitted, a default path will be used. | |
2565 |
|
2570 | |||
2566 | Returns 0 if push was successful, 1 if nothing to push. |
|
2571 | Returns 0 if push was successful, 1 if nothing to push. | |
2567 | """ |
|
2572 | """ | |
2568 | dest = ui.expandpath(dest or 'default-push', dest or 'default') |
|
2573 | dest = ui.expandpath(dest or 'default-push', dest or 'default') | |
2569 | dest, branches = hg.parseurl(dest, opts.get('branch')) |
|
2574 | dest, branches = hg.parseurl(dest, opts.get('branch')) | |
2570 | revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) |
|
2575 | revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) | |
2571 | other = hg.repository(hg.remoteui(repo, opts), dest) |
|
2576 | other = hg.repository(hg.remoteui(repo, opts), dest) | |
2572 | ui.status(_('pushing to %s\n') % url.hidepassword(dest)) |
|
2577 | ui.status(_('pushing to %s\n') % url.hidepassword(dest)) | |
2573 | if revs: |
|
2578 | if revs: | |
2574 | revs = [repo.lookup(rev) for rev in revs] |
|
2579 | revs = [repo.lookup(rev) for rev in revs] | |
2575 |
|
2580 | |||
2576 | # push subrepos depth-first for coherent ordering |
|
2581 | # push subrepos depth-first for coherent ordering | |
2577 | c = repo[''] |
|
2582 | c = repo[''] | |
2578 | subs = c.substate # only repos that are committed |
|
2583 | subs = c.substate # only repos that are committed | |
2579 | for s in sorted(subs): |
|
2584 | for s in sorted(subs): | |
2580 | if not c.sub(s).push(opts.get('force')): |
|
2585 | if not c.sub(s).push(opts.get('force')): | |
2581 | return False |
|
2586 | return False | |
2582 |
|
2587 | |||
2583 | r = repo.push(other, opts.get('force'), revs=revs, |
|
2588 | r = repo.push(other, opts.get('force'), revs=revs, | |
2584 | newbranch=opts.get('new_branch')) |
|
2589 | newbranch=opts.get('new_branch')) | |
2585 | return r == 0 |
|
2590 | return r == 0 | |
2586 |
|
2591 | |||
2587 | def recover(ui, repo): |
|
2592 | def recover(ui, repo): | |
2588 | """roll back an interrupted transaction |
|
2593 | """roll back an interrupted transaction | |
2589 |
|
2594 | |||
2590 | Recover from an interrupted commit or pull. |
|
2595 | Recover from an interrupted commit or pull. | |
2591 |
|
2596 | |||
2592 | This command tries to fix the repository status after an |
|
2597 | This command tries to fix the repository status after an | |
2593 | interrupted operation. It should only be necessary when Mercurial |
|
2598 | interrupted operation. It should only be necessary when Mercurial | |
2594 | suggests it. |
|
2599 | suggests it. | |
2595 |
|
2600 | |||
2596 | Returns 0 if successful, 1 if nothing to recover or verify fails. |
|
2601 | Returns 0 if successful, 1 if nothing to recover or verify fails. | |
2597 | """ |
|
2602 | """ | |
2598 | if repo.recover(): |
|
2603 | if repo.recover(): | |
2599 | return hg.verify(repo) |
|
2604 | return hg.verify(repo) | |
2600 | return 1 |
|
2605 | return 1 | |
2601 |
|
2606 | |||
2602 | def remove(ui, repo, *pats, **opts): |
|
2607 | def remove(ui, repo, *pats, **opts): | |
2603 | """remove the specified files on the next commit |
|
2608 | """remove the specified files on the next commit | |
2604 |
|
2609 | |||
2605 | Schedule the indicated files for removal from the repository. |
|
2610 | Schedule the indicated files for removal from the repository. | |
2606 |
|
2611 | |||
2607 | This only removes files from the current branch, not from the |
|
2612 | This only removes files from the current branch, not from the | |
2608 | entire project history. -A/--after can be used to remove only |
|
2613 | entire project history. -A/--after can be used to remove only | |
2609 | files that have already been deleted, -f/--force can be used to |
|
2614 | files that have already been deleted, -f/--force can be used to | |
2610 | force deletion, and -Af can be used to remove files from the next |
|
2615 | force deletion, and -Af can be used to remove files from the next | |
2611 | revision without deleting them from the working directory. |
|
2616 | revision without deleting them from the working directory. | |
2612 |
|
2617 | |||
2613 | The following table details the behavior of remove for different |
|
2618 | The following table details the behavior of remove for different | |
2614 | file states (columns) and option combinations (rows). The file |
|
2619 | file states (columns) and option combinations (rows). The file | |
2615 | states are Added [A], Clean [C], Modified [M] and Missing [!] (as |
|
2620 | states are Added [A], Clean [C], Modified [M] and Missing [!] (as | |
2616 | reported by :hg:`status`). The actions are Warn, Remove (from |
|
2621 | reported by :hg:`status`). The actions are Warn, Remove (from | |
2617 | branch) and Delete (from disk):: |
|
2622 | branch) and Delete (from disk):: | |
2618 |
|
2623 | |||
2619 | A C M ! |
|
2624 | A C M ! | |
2620 | none W RD W R |
|
2625 | none W RD W R | |
2621 | -f R RD RD R |
|
2626 | -f R RD RD R | |
2622 | -A W W W R |
|
2627 | -A W W W R | |
2623 | -Af R R R R |
|
2628 | -Af R R R R | |
2624 |
|
2629 | |||
2625 | This command schedules the files to be removed at the next commit. |
|
2630 | This command schedules the files to be removed at the next commit. | |
2626 | To undo a remove before that, see :hg:`revert`. |
|
2631 | To undo a remove before that, see :hg:`revert`. | |
2627 |
|
2632 | |||
2628 | Returns 0 on success, 1 if any warnings encountered. |
|
2633 | Returns 0 on success, 1 if any warnings encountered. | |
2629 | """ |
|
2634 | """ | |
2630 |
|
2635 | |||
2631 | ret = 0 |
|
2636 | ret = 0 | |
2632 | after, force = opts.get('after'), opts.get('force') |
|
2637 | after, force = opts.get('after'), opts.get('force') | |
2633 | if not pats and not after: |
|
2638 | if not pats and not after: | |
2634 | raise util.Abort(_('no files specified')) |
|
2639 | raise util.Abort(_('no files specified')) | |
2635 |
|
2640 | |||
2636 | m = cmdutil.match(repo, pats, opts) |
|
2641 | m = cmdutil.match(repo, pats, opts) | |
2637 | s = repo.status(match=m, clean=True) |
|
2642 | s = repo.status(match=m, clean=True) | |
2638 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] |
|
2643 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] | |
2639 |
|
2644 | |||
2640 | for f in m.files(): |
|
2645 | for f in m.files(): | |
2641 | if f not in repo.dirstate and not os.path.isdir(m.rel(f)): |
|
2646 | if f not in repo.dirstate and not os.path.isdir(m.rel(f)): | |
2642 | ui.warn(_('not removing %s: file is untracked\n') % m.rel(f)) |
|
2647 | ui.warn(_('not removing %s: file is untracked\n') % m.rel(f)) | |
2643 | ret = 1 |
|
2648 | ret = 1 | |
2644 |
|
2649 | |||
2645 | def warn(files, reason): |
|
2650 | def warn(files, reason): | |
2646 | for f in files: |
|
2651 | for f in files: | |
2647 | ui.warn(_('not removing %s: file %s (use -f to force removal)\n') |
|
2652 | ui.warn(_('not removing %s: file %s (use -f to force removal)\n') | |
2648 | % (m.rel(f), reason)) |
|
2653 | % (m.rel(f), reason)) | |
2649 | ret = 1 |
|
2654 | ret = 1 | |
2650 |
|
2655 | |||
2651 | if force: |
|
2656 | if force: | |
2652 | remove, forget = modified + deleted + clean, added |
|
2657 | remove, forget = modified + deleted + clean, added | |
2653 | elif after: |
|
2658 | elif after: | |
2654 | remove, forget = deleted, [] |
|
2659 | remove, forget = deleted, [] | |
2655 | warn(modified + added + clean, _('still exists')) |
|
2660 | warn(modified + added + clean, _('still exists')) | |
2656 | else: |
|
2661 | else: | |
2657 | remove, forget = deleted + clean, [] |
|
2662 | remove, forget = deleted + clean, [] | |
2658 | warn(modified, _('is modified')) |
|
2663 | warn(modified, _('is modified')) | |
2659 | warn(added, _('has been marked for add')) |
|
2664 | warn(added, _('has been marked for add')) | |
2660 |
|
2665 | |||
2661 | for f in sorted(remove + forget): |
|
2666 | for f in sorted(remove + forget): | |
2662 | if ui.verbose or not m.exact(f): |
|
2667 | if ui.verbose or not m.exact(f): | |
2663 | ui.status(_('removing %s\n') % m.rel(f)) |
|
2668 | ui.status(_('removing %s\n') % m.rel(f)) | |
2664 |
|
2669 | |||
2665 | repo.forget(forget) |
|
2670 | repo.forget(forget) | |
2666 | repo.remove(remove, unlink=not after) |
|
2671 | repo.remove(remove, unlink=not after) | |
2667 | return ret |
|
2672 | return ret | |
2668 |
|
2673 | |||
2669 | def rename(ui, repo, *pats, **opts): |
|
2674 | def rename(ui, repo, *pats, **opts): | |
2670 | """rename files; equivalent of copy + remove |
|
2675 | """rename files; equivalent of copy + remove | |
2671 |
|
2676 | |||
2672 | Mark dest as copies of sources; mark sources for deletion. If dest |
|
2677 | Mark dest as copies of sources; mark sources for deletion. If dest | |
2673 | is a directory, copies are put in that directory. If dest is a |
|
2678 | is a directory, copies are put in that directory. If dest is a | |
2674 | file, there can only be one source. |
|
2679 | file, there can only be one source. | |
2675 |
|
2680 | |||
2676 | By default, this command copies the contents of files as they |
|
2681 | By default, this command copies the contents of files as they | |
2677 | exist in the working directory. If invoked with -A/--after, the |
|
2682 | exist in the working directory. If invoked with -A/--after, the | |
2678 | operation is recorded, but no copying is performed. |
|
2683 | operation is recorded, but no copying is performed. | |
2679 |
|
2684 | |||
2680 | This command takes effect at the next commit. To undo a rename |
|
2685 | This command takes effect at the next commit. To undo a rename | |
2681 | before that, see :hg:`revert`. |
|
2686 | before that, see :hg:`revert`. | |
2682 |
|
2687 | |||
2683 | Returns 0 on success, 1 if errors are encountered. |
|
2688 | Returns 0 on success, 1 if errors are encountered. | |
2684 | """ |
|
2689 | """ | |
2685 | wlock = repo.wlock(False) |
|
2690 | wlock = repo.wlock(False) | |
2686 | try: |
|
2691 | try: | |
2687 | return cmdutil.copy(ui, repo, pats, opts, rename=True) |
|
2692 | return cmdutil.copy(ui, repo, pats, opts, rename=True) | |
2688 | finally: |
|
2693 | finally: | |
2689 | wlock.release() |
|
2694 | wlock.release() | |
2690 |
|
2695 | |||
2691 | def resolve(ui, repo, *pats, **opts): |
|
2696 | def resolve(ui, repo, *pats, **opts): | |
2692 | """various operations to help finish a merge |
|
2697 | """various operations to help finish a merge | |
2693 |
|
2698 | |||
2694 | This command includes several actions that are often useful while |
|
2699 | This command includes several actions that are often useful while | |
2695 | performing a merge, after running ``merge`` but before running |
|
2700 | performing a merge, after running ``merge`` but before running | |
2696 | ``commit``. (It is only meaningful if your working directory has |
|
2701 | ``commit``. (It is only meaningful if your working directory has | |
2697 | two parents.) It is most relevant for merges with unresolved |
|
2702 | two parents.) It is most relevant for merges with unresolved | |
2698 | conflicts, which are typically a result of non-interactive merging with |
|
2703 | conflicts, which are typically a result of non-interactive merging with | |
2699 | ``internal:merge`` or a command-line merge tool like ``diff3``. |
|
2704 | ``internal:merge`` or a command-line merge tool like ``diff3``. | |
2700 |
|
2705 | |||
2701 | The available actions are: |
|
2706 | The available actions are: | |
2702 |
|
2707 | |||
2703 | 1) list files that were merged with conflicts (U, for unresolved) |
|
2708 | 1) list files that were merged with conflicts (U, for unresolved) | |
2704 | and without conflicts (R, for resolved): ``hg resolve -l`` |
|
2709 | and without conflicts (R, for resolved): ``hg resolve -l`` | |
2705 | (this is like ``status`` for merges) |
|
2710 | (this is like ``status`` for merges) | |
2706 | 2) record that you have resolved conflicts in certain files: |
|
2711 | 2) record that you have resolved conflicts in certain files: | |
2707 | ``hg resolve -m [file ...]`` (default: mark all unresolved files) |
|
2712 | ``hg resolve -m [file ...]`` (default: mark all unresolved files) | |
2708 | 3) forget that you have resolved conflicts in certain files: |
|
2713 | 3) forget that you have resolved conflicts in certain files: | |
2709 | ``hg resolve -u [file ...]`` (default: unmark all resolved files) |
|
2714 | ``hg resolve -u [file ...]`` (default: unmark all resolved files) | |
2710 | 4) discard your current attempt(s) at resolving conflicts and |
|
2715 | 4) discard your current attempt(s) at resolving conflicts and | |
2711 | restart the merge from scratch: ``hg resolve file...`` |
|
2716 | restart the merge from scratch: ``hg resolve file...`` | |
2712 | (or ``-a`` for all unresolved files) |
|
2717 | (or ``-a`` for all unresolved files) | |
2713 |
|
2718 | |||
2714 | Note that Mercurial will not let you commit files with unresolved merge |
|
2719 | Note that Mercurial will not let you commit files with unresolved merge | |
2715 | conflicts. You must use ``hg resolve -m ...`` before you can commit |
|
2720 | conflicts. You must use ``hg resolve -m ...`` before you can commit | |
2716 | after a conflicting merge. |
|
2721 | after a conflicting merge. | |
2717 |
|
2722 | |||
2718 | Returns 0 on success, 1 if any files fail a resolve attempt. |
|
2723 | Returns 0 on success, 1 if any files fail a resolve attempt. | |
2719 | """ |
|
2724 | """ | |
2720 |
|
2725 | |||
2721 | all, mark, unmark, show, nostatus = \ |
|
2726 | all, mark, unmark, show, nostatus = \ | |
2722 | [opts.get(o) for o in 'all mark unmark list no_status'.split()] |
|
2727 | [opts.get(o) for o in 'all mark unmark list no_status'.split()] | |
2723 |
|
2728 | |||
2724 | if (show and (mark or unmark)) or (mark and unmark): |
|
2729 | if (show and (mark or unmark)) or (mark and unmark): | |
2725 | raise util.Abort(_("too many options specified")) |
|
2730 | raise util.Abort(_("too many options specified")) | |
2726 | if pats and all: |
|
2731 | if pats and all: | |
2727 | raise util.Abort(_("can't specify --all and patterns")) |
|
2732 | raise util.Abort(_("can't specify --all and patterns")) | |
2728 | if not (all or pats or show or mark or unmark): |
|
2733 | if not (all or pats or show or mark or unmark): | |
2729 | raise util.Abort(_('no files or directories specified; ' |
|
2734 | raise util.Abort(_('no files or directories specified; ' | |
2730 | 'use --all to remerge all files')) |
|
2735 | 'use --all to remerge all files')) | |
2731 |
|
2736 | |||
2732 | ms = mergemod.mergestate(repo) |
|
2737 | ms = mergemod.mergestate(repo) | |
2733 | m = cmdutil.match(repo, pats, opts) |
|
2738 | m = cmdutil.match(repo, pats, opts) | |
2734 | ret = 0 |
|
2739 | ret = 0 | |
2735 |
|
2740 | |||
2736 | for f in ms: |
|
2741 | for f in ms: | |
2737 | if m(f): |
|
2742 | if m(f): | |
2738 | if show: |
|
2743 | if show: | |
2739 | if nostatus: |
|
2744 | if nostatus: | |
2740 | ui.write("%s\n" % f) |
|
2745 | ui.write("%s\n" % f) | |
2741 | else: |
|
2746 | else: | |
2742 | ui.write("%s %s\n" % (ms[f].upper(), f), |
|
2747 | ui.write("%s %s\n" % (ms[f].upper(), f), | |
2743 | label='resolve.' + |
|
2748 | label='resolve.' + | |
2744 | {'u': 'unresolved', 'r': 'resolved'}[ms[f]]) |
|
2749 | {'u': 'unresolved', 'r': 'resolved'}[ms[f]]) | |
2745 | elif mark: |
|
2750 | elif mark: | |
2746 | ms.mark(f, "r") |
|
2751 | ms.mark(f, "r") | |
2747 | elif unmark: |
|
2752 | elif unmark: | |
2748 | ms.mark(f, "u") |
|
2753 | ms.mark(f, "u") | |
2749 | else: |
|
2754 | else: | |
2750 | wctx = repo[None] |
|
2755 | wctx = repo[None] | |
2751 | mctx = wctx.parents()[-1] |
|
2756 | mctx = wctx.parents()[-1] | |
2752 |
|
2757 | |||
2753 | # backup pre-resolve (merge uses .orig for its own purposes) |
|
2758 | # backup pre-resolve (merge uses .orig for its own purposes) | |
2754 | a = repo.wjoin(f) |
|
2759 | a = repo.wjoin(f) | |
2755 | util.copyfile(a, a + ".resolve") |
|
2760 | util.copyfile(a, a + ".resolve") | |
2756 |
|
2761 | |||
2757 | # resolve file |
|
2762 | # resolve file | |
2758 | if ms.resolve(f, wctx, mctx): |
|
2763 | if ms.resolve(f, wctx, mctx): | |
2759 | ret = 1 |
|
2764 | ret = 1 | |
2760 |
|
2765 | |||
2761 | # replace filemerge's .orig file with our resolve file |
|
2766 | # replace filemerge's .orig file with our resolve file | |
2762 | util.rename(a + ".resolve", a + ".orig") |
|
2767 | util.rename(a + ".resolve", a + ".orig") | |
2763 | return ret |
|
2768 | return ret | |
2764 |
|
2769 | |||
2765 | def revert(ui, repo, *pats, **opts): |
|
2770 | def revert(ui, repo, *pats, **opts): | |
2766 | """restore individual files or directories to an earlier state |
|
2771 | """restore individual files or directories to an earlier state | |
2767 |
|
2772 | |||
2768 | (Use update -r to check out earlier revisions, revert does not |
|
2773 | (Use update -r to check out earlier revisions, revert does not | |
2769 | change the working directory parents.) |
|
2774 | change the working directory parents.) | |
2770 |
|
2775 | |||
2771 | With no revision specified, revert the named files or directories |
|
2776 | With no revision specified, revert the named files or directories | |
2772 | to the contents they had in the parent of the working directory. |
|
2777 | to the contents they had in the parent of the working directory. | |
2773 | This restores the contents of the affected files to an unmodified |
|
2778 | This restores the contents of the affected files to an unmodified | |
2774 | state and unschedules adds, removes, copies, and renames. If the |
|
2779 | state and unschedules adds, removes, copies, and renames. If the | |
2775 | working directory has two parents, you must explicitly specify a |
|
2780 | working directory has two parents, you must explicitly specify a | |
2776 | revision. |
|
2781 | revision. | |
2777 |
|
2782 | |||
2778 | Using the -r/--rev option, revert the given files or directories |
|
2783 | Using the -r/--rev option, revert the given files or directories | |
2779 | to their contents as of a specific revision. This can be helpful |
|
2784 | to their contents as of a specific revision. This can be helpful | |
2780 | to "roll back" some or all of an earlier change. See :hg:`help |
|
2785 | to "roll back" some or all of an earlier change. See :hg:`help | |
2781 | dates` for a list of formats valid for -d/--date. |
|
2786 | dates` for a list of formats valid for -d/--date. | |
2782 |
|
2787 | |||
2783 | Revert modifies the working directory. It does not commit any |
|
2788 | Revert modifies the working directory. It does not commit any | |
2784 | changes, or change the parent of the working directory. If you |
|
2789 | changes, or change the parent of the working directory. If you | |
2785 | revert to a revision other than the parent of the working |
|
2790 | revert to a revision other than the parent of the working | |
2786 | directory, the reverted files will thus appear modified |
|
2791 | directory, the reverted files will thus appear modified | |
2787 | afterwards. |
|
2792 | afterwards. | |
2788 |
|
2793 | |||
2789 | If a file has been deleted, it is restored. If the executable mode |
|
2794 | If a file has been deleted, it is restored. If the executable mode | |
2790 | of a file was changed, it is reset. |
|
2795 | of a file was changed, it is reset. | |
2791 |
|
2796 | |||
2792 | If names are given, all files matching the names are reverted. |
|
2797 | If names are given, all files matching the names are reverted. | |
2793 | If no arguments are given, no files are reverted. |
|
2798 | If no arguments are given, no files are reverted. | |
2794 |
|
2799 | |||
2795 | Modified files are saved with a .orig suffix before reverting. |
|
2800 | Modified files are saved with a .orig suffix before reverting. | |
2796 | To disable these backups, use --no-backup. |
|
2801 | To disable these backups, use --no-backup. | |
2797 |
|
2802 | |||
2798 | Returns 0 on success. |
|
2803 | Returns 0 on success. | |
2799 | """ |
|
2804 | """ | |
2800 |
|
2805 | |||
2801 | if opts["date"]: |
|
2806 | if opts["date"]: | |
2802 | if opts["rev"]: |
|
2807 | if opts["rev"]: | |
2803 | raise util.Abort(_("you can't specify a revision and a date")) |
|
2808 | raise util.Abort(_("you can't specify a revision and a date")) | |
2804 | opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) |
|
2809 | opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) | |
2805 |
|
2810 | |||
2806 | if not pats and not opts.get('all'): |
|
2811 | if not pats and not opts.get('all'): | |
2807 | raise util.Abort(_('no files or directories specified; ' |
|
2812 | raise util.Abort(_('no files or directories specified; ' | |
2808 | 'use --all to revert the whole repo')) |
|
2813 | 'use --all to revert the whole repo')) | |
2809 |
|
2814 | |||
2810 | parent, p2 = repo.dirstate.parents() |
|
2815 | parent, p2 = repo.dirstate.parents() | |
2811 | if not opts.get('rev') and p2 != nullid: |
|
2816 | if not opts.get('rev') and p2 != nullid: | |
2812 | raise util.Abort(_('uncommitted merge - please provide a ' |
|
2817 | raise util.Abort(_('uncommitted merge - please provide a ' | |
2813 | 'specific revision')) |
|
2818 | 'specific revision')) | |
2814 | ctx = repo[opts.get('rev')] |
|
2819 | ctx = repo[opts.get('rev')] | |
2815 | node = ctx.node() |
|
2820 | node = ctx.node() | |
2816 | mf = ctx.manifest() |
|
2821 | mf = ctx.manifest() | |
2817 | if node == parent: |
|
2822 | if node == parent: | |
2818 | pmf = mf |
|
2823 | pmf = mf | |
2819 | else: |
|
2824 | else: | |
2820 | pmf = None |
|
2825 | pmf = None | |
2821 |
|
2826 | |||
2822 | # need all matching names in dirstate and manifest of target rev, |
|
2827 | # need all matching names in dirstate and manifest of target rev, | |
2823 | # so have to walk both. do not print errors if files exist in one |
|
2828 | # so have to walk both. do not print errors if files exist in one | |
2824 | # but not other. |
|
2829 | # but not other. | |
2825 |
|
2830 | |||
2826 | names = {} |
|
2831 | names = {} | |
2827 |
|
2832 | |||
2828 | wlock = repo.wlock() |
|
2833 | wlock = repo.wlock() | |
2829 | try: |
|
2834 | try: | |
2830 | # walk dirstate. |
|
2835 | # walk dirstate. | |
2831 |
|
2836 | |||
2832 | m = cmdutil.match(repo, pats, opts) |
|
2837 | m = cmdutil.match(repo, pats, opts) | |
2833 | m.bad = lambda x, y: False |
|
2838 | m.bad = lambda x, y: False | |
2834 | for abs in repo.walk(m): |
|
2839 | for abs in repo.walk(m): | |
2835 | names[abs] = m.rel(abs), m.exact(abs) |
|
2840 | names[abs] = m.rel(abs), m.exact(abs) | |
2836 |
|
2841 | |||
2837 | # walk target manifest. |
|
2842 | # walk target manifest. | |
2838 |
|
2843 | |||
2839 | def badfn(path, msg): |
|
2844 | def badfn(path, msg): | |
2840 | if path in names: |
|
2845 | if path in names: | |
2841 | return |
|
2846 | return | |
2842 | path_ = path + '/' |
|
2847 | path_ = path + '/' | |
2843 | for f in names: |
|
2848 | for f in names: | |
2844 | if f.startswith(path_): |
|
2849 | if f.startswith(path_): | |
2845 | return |
|
2850 | return | |
2846 | ui.warn("%s: %s\n" % (m.rel(path), msg)) |
|
2851 | ui.warn("%s: %s\n" % (m.rel(path), msg)) | |
2847 |
|
2852 | |||
2848 | m = cmdutil.match(repo, pats, opts) |
|
2853 | m = cmdutil.match(repo, pats, opts) | |
2849 | m.bad = badfn |
|
2854 | m.bad = badfn | |
2850 | for abs in repo[node].walk(m): |
|
2855 | for abs in repo[node].walk(m): | |
2851 | if abs not in names: |
|
2856 | if abs not in names: | |
2852 | names[abs] = m.rel(abs), m.exact(abs) |
|
2857 | names[abs] = m.rel(abs), m.exact(abs) | |
2853 |
|
2858 | |||
2854 | m = cmdutil.matchfiles(repo, names) |
|
2859 | m = cmdutil.matchfiles(repo, names) | |
2855 | changes = repo.status(match=m)[:4] |
|
2860 | changes = repo.status(match=m)[:4] | |
2856 | modified, added, removed, deleted = map(set, changes) |
|
2861 | modified, added, removed, deleted = map(set, changes) | |
2857 |
|
2862 | |||
2858 | # if f is a rename, also revert the source |
|
2863 | # if f is a rename, also revert the source | |
2859 | cwd = repo.getcwd() |
|
2864 | cwd = repo.getcwd() | |
2860 | for f in added: |
|
2865 | for f in added: | |
2861 | src = repo.dirstate.copied(f) |
|
2866 | src = repo.dirstate.copied(f) | |
2862 | if src and src not in names and repo.dirstate[src] == 'r': |
|
2867 | if src and src not in names and repo.dirstate[src] == 'r': | |
2863 | removed.add(src) |
|
2868 | removed.add(src) | |
2864 | names[src] = (repo.pathto(src, cwd), True) |
|
2869 | names[src] = (repo.pathto(src, cwd), True) | |
2865 |
|
2870 | |||
2866 | def removeforget(abs): |
|
2871 | def removeforget(abs): | |
2867 | if repo.dirstate[abs] == 'a': |
|
2872 | if repo.dirstate[abs] == 'a': | |
2868 | return _('forgetting %s\n') |
|
2873 | return _('forgetting %s\n') | |
2869 | return _('removing %s\n') |
|
2874 | return _('removing %s\n') | |
2870 |
|
2875 | |||
2871 | revert = ([], _('reverting %s\n')) |
|
2876 | revert = ([], _('reverting %s\n')) | |
2872 | add = ([], _('adding %s\n')) |
|
2877 | add = ([], _('adding %s\n')) | |
2873 | remove = ([], removeforget) |
|
2878 | remove = ([], removeforget) | |
2874 | undelete = ([], _('undeleting %s\n')) |
|
2879 | undelete = ([], _('undeleting %s\n')) | |
2875 |
|
2880 | |||
2876 | disptable = ( |
|
2881 | disptable = ( | |
2877 | # dispatch table: |
|
2882 | # dispatch table: | |
2878 | # file state |
|
2883 | # file state | |
2879 | # action if in target manifest |
|
2884 | # action if in target manifest | |
2880 | # action if not in target manifest |
|
2885 | # action if not in target manifest | |
2881 | # make backup if in target manifest |
|
2886 | # make backup if in target manifest | |
2882 | # make backup if not in target manifest |
|
2887 | # make backup if not in target manifest | |
2883 | (modified, revert, remove, True, True), |
|
2888 | (modified, revert, remove, True, True), | |
2884 | (added, revert, remove, True, False), |
|
2889 | (added, revert, remove, True, False), | |
2885 | (removed, undelete, None, False, False), |
|
2890 | (removed, undelete, None, False, False), | |
2886 | (deleted, revert, remove, False, False), |
|
2891 | (deleted, revert, remove, False, False), | |
2887 | ) |
|
2892 | ) | |
2888 |
|
2893 | |||
2889 | for abs, (rel, exact) in sorted(names.items()): |
|
2894 | for abs, (rel, exact) in sorted(names.items()): | |
2890 | mfentry = mf.get(abs) |
|
2895 | mfentry = mf.get(abs) | |
2891 | target = repo.wjoin(abs) |
|
2896 | target = repo.wjoin(abs) | |
2892 | def handle(xlist, dobackup): |
|
2897 | def handle(xlist, dobackup): | |
2893 | xlist[0].append(abs) |
|
2898 | xlist[0].append(abs) | |
2894 | if dobackup and not opts.get('no_backup') and util.lexists(target): |
|
2899 | if dobackup and not opts.get('no_backup') and util.lexists(target): | |
2895 | bakname = "%s.orig" % rel |
|
2900 | bakname = "%s.orig" % rel | |
2896 | ui.note(_('saving current version of %s as %s\n') % |
|
2901 | ui.note(_('saving current version of %s as %s\n') % | |
2897 | (rel, bakname)) |
|
2902 | (rel, bakname)) | |
2898 | if not opts.get('dry_run'): |
|
2903 | if not opts.get('dry_run'): | |
2899 | util.copyfile(target, bakname) |
|
2904 | util.copyfile(target, bakname) | |
2900 | if ui.verbose or not exact: |
|
2905 | if ui.verbose or not exact: | |
2901 | msg = xlist[1] |
|
2906 | msg = xlist[1] | |
2902 | if not isinstance(msg, basestring): |
|
2907 | if not isinstance(msg, basestring): | |
2903 | msg = msg(abs) |
|
2908 | msg = msg(abs) | |
2904 | ui.status(msg % rel) |
|
2909 | ui.status(msg % rel) | |
2905 | for table, hitlist, misslist, backuphit, backupmiss in disptable: |
|
2910 | for table, hitlist, misslist, backuphit, backupmiss in disptable: | |
2906 | if abs not in table: |
|
2911 | if abs not in table: | |
2907 | continue |
|
2912 | continue | |
2908 | # file has changed in dirstate |
|
2913 | # file has changed in dirstate | |
2909 | if mfentry: |
|
2914 | if mfentry: | |
2910 | handle(hitlist, backuphit) |
|
2915 | handle(hitlist, backuphit) | |
2911 | elif misslist is not None: |
|
2916 | elif misslist is not None: | |
2912 | handle(misslist, backupmiss) |
|
2917 | handle(misslist, backupmiss) | |
2913 | break |
|
2918 | break | |
2914 | else: |
|
2919 | else: | |
2915 | if abs not in repo.dirstate: |
|
2920 | if abs not in repo.dirstate: | |
2916 | if mfentry: |
|
2921 | if mfentry: | |
2917 | handle(add, True) |
|
2922 | handle(add, True) | |
2918 | elif exact: |
|
2923 | elif exact: | |
2919 | ui.warn(_('file not managed: %s\n') % rel) |
|
2924 | ui.warn(_('file not managed: %s\n') % rel) | |
2920 | continue |
|
2925 | continue | |
2921 | # file has not changed in dirstate |
|
2926 | # file has not changed in dirstate | |
2922 | if node == parent: |
|
2927 | if node == parent: | |
2923 | if exact: |
|
2928 | if exact: | |
2924 | ui.warn(_('no changes needed to %s\n') % rel) |
|
2929 | ui.warn(_('no changes needed to %s\n') % rel) | |
2925 | continue |
|
2930 | continue | |
2926 | if pmf is None: |
|
2931 | if pmf is None: | |
2927 | # only need parent manifest in this unlikely case, |
|
2932 | # only need parent manifest in this unlikely case, | |
2928 | # so do not read by default |
|
2933 | # so do not read by default | |
2929 | pmf = repo[parent].manifest() |
|
2934 | pmf = repo[parent].manifest() | |
2930 | if abs in pmf: |
|
2935 | if abs in pmf: | |
2931 | if mfentry: |
|
2936 | if mfentry: | |
2932 | # if version of file is same in parent and target |
|
2937 | # if version of file is same in parent and target | |
2933 | # manifests, do nothing |
|
2938 | # manifests, do nothing | |
2934 | if (pmf[abs] != mfentry or |
|
2939 | if (pmf[abs] != mfentry or | |
2935 | pmf.flags(abs) != mf.flags(abs)): |
|
2940 | pmf.flags(abs) != mf.flags(abs)): | |
2936 | handle(revert, False) |
|
2941 | handle(revert, False) | |
2937 | else: |
|
2942 | else: | |
2938 | handle(remove, False) |
|
2943 | handle(remove, False) | |
2939 |
|
2944 | |||
2940 | if not opts.get('dry_run'): |
|
2945 | if not opts.get('dry_run'): | |
2941 | def checkout(f): |
|
2946 | def checkout(f): | |
2942 | fc = ctx[f] |
|
2947 | fc = ctx[f] | |
2943 | repo.wwrite(f, fc.data(), fc.flags()) |
|
2948 | repo.wwrite(f, fc.data(), fc.flags()) | |
2944 |
|
2949 | |||
2945 | audit_path = util.path_auditor(repo.root) |
|
2950 | audit_path = util.path_auditor(repo.root) | |
2946 | for f in remove[0]: |
|
2951 | for f in remove[0]: | |
2947 | if repo.dirstate[f] == 'a': |
|
2952 | if repo.dirstate[f] == 'a': | |
2948 | repo.dirstate.forget(f) |
|
2953 | repo.dirstate.forget(f) | |
2949 | continue |
|
2954 | continue | |
2950 | audit_path(f) |
|
2955 | audit_path(f) | |
2951 | try: |
|
2956 | try: | |
2952 | util.unlink(repo.wjoin(f)) |
|
2957 | util.unlink(repo.wjoin(f)) | |
2953 | except OSError: |
|
2958 | except OSError: | |
2954 | pass |
|
2959 | pass | |
2955 | repo.dirstate.remove(f) |
|
2960 | repo.dirstate.remove(f) | |
2956 |
|
2961 | |||
2957 | normal = None |
|
2962 | normal = None | |
2958 | if node == parent: |
|
2963 | if node == parent: | |
2959 | # We're reverting to our parent. If possible, we'd like status |
|
2964 | # We're reverting to our parent. If possible, we'd like status | |
2960 | # to report the file as clean. We have to use normallookup for |
|
2965 | # to report the file as clean. We have to use normallookup for | |
2961 | # merges to avoid losing information about merged/dirty files. |
|
2966 | # merges to avoid losing information about merged/dirty files. | |
2962 | if p2 != nullid: |
|
2967 | if p2 != nullid: | |
2963 | normal = repo.dirstate.normallookup |
|
2968 | normal = repo.dirstate.normallookup | |
2964 | else: |
|
2969 | else: | |
2965 | normal = repo.dirstate.normal |
|
2970 | normal = repo.dirstate.normal | |
2966 | for f in revert[0]: |
|
2971 | for f in revert[0]: | |
2967 | checkout(f) |
|
2972 | checkout(f) | |
2968 | if normal: |
|
2973 | if normal: | |
2969 | normal(f) |
|
2974 | normal(f) | |
2970 |
|
2975 | |||
2971 | for f in add[0]: |
|
2976 | for f in add[0]: | |
2972 | checkout(f) |
|
2977 | checkout(f) | |
2973 | repo.dirstate.add(f) |
|
2978 | repo.dirstate.add(f) | |
2974 |
|
2979 | |||
2975 | normal = repo.dirstate.normallookup |
|
2980 | normal = repo.dirstate.normallookup | |
2976 | if node == parent and p2 == nullid: |
|
2981 | if node == parent and p2 == nullid: | |
2977 | normal = repo.dirstate.normal |
|
2982 | normal = repo.dirstate.normal | |
2978 | for f in undelete[0]: |
|
2983 | for f in undelete[0]: | |
2979 | checkout(f) |
|
2984 | checkout(f) | |
2980 | normal(f) |
|
2985 | normal(f) | |
2981 |
|
2986 | |||
2982 | finally: |
|
2987 | finally: | |
2983 | wlock.release() |
|
2988 | wlock.release() | |
2984 |
|
2989 | |||
2985 | def rollback(ui, repo, **opts): |
|
2990 | def rollback(ui, repo, **opts): | |
2986 | """roll back the last transaction (dangerous) |
|
2991 | """roll back the last transaction (dangerous) | |
2987 |
|
2992 | |||
2988 | This command should be used with care. There is only one level of |
|
2993 | This command should be used with care. There is only one level of | |
2989 | rollback, and there is no way to undo a rollback. It will also |
|
2994 | rollback, and there is no way to undo a rollback. It will also | |
2990 | restore the dirstate at the time of the last transaction, losing |
|
2995 | restore the dirstate at the time of the last transaction, losing | |
2991 | any dirstate changes since that time. This command does not alter |
|
2996 | any dirstate changes since that time. This command does not alter | |
2992 | the working directory. |
|
2997 | the working directory. | |
2993 |
|
2998 | |||
2994 | Transactions are used to encapsulate the effects of all commands |
|
2999 | Transactions are used to encapsulate the effects of all commands | |
2995 | that create new changesets or propagate existing changesets into a |
|
3000 | that create new changesets or propagate existing changesets into a | |
2996 | repository. For example, the following commands are transactional, |
|
3001 | repository. For example, the following commands are transactional, | |
2997 | and their effects can be rolled back: |
|
3002 | and their effects can be rolled back: | |
2998 |
|
3003 | |||
2999 | - commit |
|
3004 | - commit | |
3000 | - import |
|
3005 | - import | |
3001 | - pull |
|
3006 | - pull | |
3002 | - push (with this repository as the destination) |
|
3007 | - push (with this repository as the destination) | |
3003 | - unbundle |
|
3008 | - unbundle | |
3004 |
|
3009 | |||
3005 | This command is not intended for use on public repositories. Once |
|
3010 | This command is not intended for use on public repositories. Once | |
3006 | changes are visible for pull by other users, rolling a transaction |
|
3011 | changes are visible for pull by other users, rolling a transaction | |
3007 | back locally is ineffective (someone else may already have pulled |
|
3012 | back locally is ineffective (someone else may already have pulled | |
3008 | the changes). Furthermore, a race is possible with readers of the |
|
3013 | the changes). Furthermore, a race is possible with readers of the | |
3009 | repository; for example an in-progress pull from the repository |
|
3014 | repository; for example an in-progress pull from the repository | |
3010 | may fail if a rollback is performed. |
|
3015 | may fail if a rollback is performed. | |
3011 |
|
3016 | |||
3012 | Returns 0 on success, 1 if no rollback data is available. |
|
3017 | Returns 0 on success, 1 if no rollback data is available. | |
3013 | """ |
|
3018 | """ | |
3014 | return repo.rollback(opts.get('dry_run')) |
|
3019 | return repo.rollback(opts.get('dry_run')) | |
3015 |
|
3020 | |||
3016 | def root(ui, repo): |
|
3021 | def root(ui, repo): | |
3017 | """print the root (top) of the current working directory |
|
3022 | """print the root (top) of the current working directory | |
3018 |
|
3023 | |||
3019 | Print the root directory of the current repository. |
|
3024 | Print the root directory of the current repository. | |
3020 |
|
3025 | |||
3021 | Returns 0 on success. |
|
3026 | Returns 0 on success. | |
3022 | """ |
|
3027 | """ | |
3023 | ui.write(repo.root + "\n") |
|
3028 | ui.write(repo.root + "\n") | |
3024 |
|
3029 | |||
3025 | def serve(ui, repo, **opts): |
|
3030 | def serve(ui, repo, **opts): | |
3026 | """start stand-alone webserver |
|
3031 | """start stand-alone webserver | |
3027 |
|
3032 | |||
3028 | Start a local HTTP repository browser and pull server. You can use |
|
3033 | Start a local HTTP repository browser and pull server. You can use | |
3029 | this for ad-hoc sharing and browing of repositories. It is |
|
3034 | this for ad-hoc sharing and browing of repositories. It is | |
3030 | recommended to use a real web server to serve a repository for |
|
3035 | recommended to use a real web server to serve a repository for | |
3031 | longer periods of time. |
|
3036 | longer periods of time. | |
3032 |
|
3037 | |||
3033 | Please note that the server does not implement access control. |
|
3038 | Please note that the server does not implement access control. | |
3034 | This means that, by default, anybody can read from the server and |
|
3039 | This means that, by default, anybody can read from the server and | |
3035 | nobody can write to it by default. Set the ``web.allow_push`` |
|
3040 | nobody can write to it by default. Set the ``web.allow_push`` | |
3036 | option to ``*`` to allow everybody to push to the server. You |
|
3041 | option to ``*`` to allow everybody to push to the server. You | |
3037 | should use a real web server if you need to authenticate users. |
|
3042 | should use a real web server if you need to authenticate users. | |
3038 |
|
3043 | |||
3039 | By default, the server logs accesses to stdout and errors to |
|
3044 | By default, the server logs accesses to stdout and errors to | |
3040 | stderr. Use the -A/--accesslog and -E/--errorlog options to log to |
|
3045 | stderr. Use the -A/--accesslog and -E/--errorlog options to log to | |
3041 | files. |
|
3046 | files. | |
3042 |
|
3047 | |||
3043 | To have the server choose a free port number to listen on, specify |
|
3048 | To have the server choose a free port number to listen on, specify | |
3044 | a port number of 0; in this case, the server will print the port |
|
3049 | a port number of 0; in this case, the server will print the port | |
3045 | number it uses. |
|
3050 | number it uses. | |
3046 |
|
3051 | |||
3047 | Returns 0 on success. |
|
3052 | Returns 0 on success. | |
3048 | """ |
|
3053 | """ | |
3049 |
|
3054 | |||
3050 | if opts["stdio"]: |
|
3055 | if opts["stdio"]: | |
3051 | if repo is None: |
|
3056 | if repo is None: | |
3052 | raise error.RepoError(_("There is no Mercurial repository here" |
|
3057 | raise error.RepoError(_("There is no Mercurial repository here" | |
3053 | " (.hg not found)")) |
|
3058 | " (.hg not found)")) | |
3054 | s = sshserver.sshserver(ui, repo) |
|
3059 | s = sshserver.sshserver(ui, repo) | |
3055 | s.serve_forever() |
|
3060 | s.serve_forever() | |
3056 |
|
3061 | |||
3057 | # this way we can check if something was given in the command-line |
|
3062 | # this way we can check if something was given in the command-line | |
3058 | if opts.get('port'): |
|
3063 | if opts.get('port'): | |
3059 | opts['port'] = int(opts.get('port')) |
|
3064 | opts['port'] = int(opts.get('port')) | |
3060 |
|
3065 | |||
3061 | baseui = repo and repo.baseui or ui |
|
3066 | baseui = repo and repo.baseui or ui | |
3062 | optlist = ("name templates style address port prefix ipv6" |
|
3067 | optlist = ("name templates style address port prefix ipv6" | |
3063 | " accesslog errorlog certificate encoding") |
|
3068 | " accesslog errorlog certificate encoding") | |
3064 | for o in optlist.split(): |
|
3069 | for o in optlist.split(): | |
3065 | val = opts.get(o, '') |
|
3070 | val = opts.get(o, '') | |
3066 | if val in (None, ''): # should check against default options instead |
|
3071 | if val in (None, ''): # should check against default options instead | |
3067 | continue |
|
3072 | continue | |
3068 | baseui.setconfig("web", o, val) |
|
3073 | baseui.setconfig("web", o, val) | |
3069 | if repo and repo.ui != baseui: |
|
3074 | if repo and repo.ui != baseui: | |
3070 | repo.ui.setconfig("web", o, val) |
|
3075 | repo.ui.setconfig("web", o, val) | |
3071 |
|
3076 | |||
3072 | o = opts.get('web_conf') or opts.get('webdir_conf') |
|
3077 | o = opts.get('web_conf') or opts.get('webdir_conf') | |
3073 | if not o: |
|
3078 | if not o: | |
3074 | if not repo: |
|
3079 | if not repo: | |
3075 | raise error.RepoError(_("There is no Mercurial repository" |
|
3080 | raise error.RepoError(_("There is no Mercurial repository" | |
3076 | " here (.hg not found)")) |
|
3081 | " here (.hg not found)")) | |
3077 | o = repo.root |
|
3082 | o = repo.root | |
3078 |
|
3083 | |||
3079 | app = hgweb.hgweb(o, baseui=ui) |
|
3084 | app = hgweb.hgweb(o, baseui=ui) | |
3080 |
|
3085 | |||
3081 | class service(object): |
|
3086 | class service(object): | |
3082 | def init(self): |
|
3087 | def init(self): | |
3083 | util.set_signal_handler() |
|
3088 | util.set_signal_handler() | |
3084 | self.httpd = hgweb.server.create_server(ui, app) |
|
3089 | self.httpd = hgweb.server.create_server(ui, app) | |
3085 |
|
3090 | |||
3086 | if opts['port'] and not ui.verbose: |
|
3091 | if opts['port'] and not ui.verbose: | |
3087 | return |
|
3092 | return | |
3088 |
|
3093 | |||
3089 | if self.httpd.prefix: |
|
3094 | if self.httpd.prefix: | |
3090 | prefix = self.httpd.prefix.strip('/') + '/' |
|
3095 | prefix = self.httpd.prefix.strip('/') + '/' | |
3091 | else: |
|
3096 | else: | |
3092 | prefix = '' |
|
3097 | prefix = '' | |
3093 |
|
3098 | |||
3094 | port = ':%d' % self.httpd.port |
|
3099 | port = ':%d' % self.httpd.port | |
3095 | if port == ':80': |
|
3100 | if port == ':80': | |
3096 | port = '' |
|
3101 | port = '' | |
3097 |
|
3102 | |||
3098 | bindaddr = self.httpd.addr |
|
3103 | bindaddr = self.httpd.addr | |
3099 | if bindaddr == '0.0.0.0': |
|
3104 | if bindaddr == '0.0.0.0': | |
3100 | bindaddr = '*' |
|
3105 | bindaddr = '*' | |
3101 | elif ':' in bindaddr: # IPv6 |
|
3106 | elif ':' in bindaddr: # IPv6 | |
3102 | bindaddr = '[%s]' % bindaddr |
|
3107 | bindaddr = '[%s]' % bindaddr | |
3103 |
|
3108 | |||
3104 | fqaddr = self.httpd.fqaddr |
|
3109 | fqaddr = self.httpd.fqaddr | |
3105 | if ':' in fqaddr: |
|
3110 | if ':' in fqaddr: | |
3106 | fqaddr = '[%s]' % fqaddr |
|
3111 | fqaddr = '[%s]' % fqaddr | |
3107 | if opts['port']: |
|
3112 | if opts['port']: | |
3108 | write = ui.status |
|
3113 | write = ui.status | |
3109 | else: |
|
3114 | else: | |
3110 | write = ui.write |
|
3115 | write = ui.write | |
3111 | write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % |
|
3116 | write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % | |
3112 | (fqaddr, port, prefix, bindaddr, self.httpd.port)) |
|
3117 | (fqaddr, port, prefix, bindaddr, self.httpd.port)) | |
3113 |
|
3118 | |||
3114 | def run(self): |
|
3119 | def run(self): | |
3115 | self.httpd.serve_forever() |
|
3120 | self.httpd.serve_forever() | |
3116 |
|
3121 | |||
3117 | service = service() |
|
3122 | service = service() | |
3118 |
|
3123 | |||
3119 | cmdutil.service(opts, initfn=service.init, runfn=service.run) |
|
3124 | cmdutil.service(opts, initfn=service.init, runfn=service.run) | |
3120 |
|
3125 | |||
3121 | def status(ui, repo, *pats, **opts): |
|
3126 | def status(ui, repo, *pats, **opts): | |
3122 | """show changed files in the working directory |
|
3127 | """show changed files in the working directory | |
3123 |
|
3128 | |||
3124 | Show status of files in the repository. If names are given, only |
|
3129 | Show status of files in the repository. If names are given, only | |
3125 | files that match are shown. Files that are clean or ignored or |
|
3130 | files that match are shown. Files that are clean or ignored or | |
3126 | the source of a copy/move operation, are not listed unless |
|
3131 | the source of a copy/move operation, are not listed unless | |
3127 | -c/--clean, -i/--ignored, -C/--copies or -A/--all are given. |
|
3132 | -c/--clean, -i/--ignored, -C/--copies or -A/--all are given. | |
3128 | Unless options described with "show only ..." are given, the |
|
3133 | Unless options described with "show only ..." are given, the | |
3129 | options -mardu are used. |
|
3134 | options -mardu are used. | |
3130 |
|
3135 | |||
3131 | Option -q/--quiet hides untracked (unknown and ignored) files |
|
3136 | Option -q/--quiet hides untracked (unknown and ignored) files | |
3132 | unless explicitly requested with -u/--unknown or -i/--ignored. |
|
3137 | unless explicitly requested with -u/--unknown or -i/--ignored. | |
3133 |
|
3138 | |||
3134 | NOTE: status may appear to disagree with diff if permissions have |
|
3139 | NOTE: status may appear to disagree with diff if permissions have | |
3135 | changed or a merge has occurred. The standard diff format does not |
|
3140 | changed or a merge has occurred. The standard diff format does not | |
3136 | report permission changes and diff only reports changes relative |
|
3141 | report permission changes and diff only reports changes relative | |
3137 | to one merge parent. |
|
3142 | to one merge parent. | |
3138 |
|
3143 | |||
3139 | If one revision is given, it is used as the base revision. |
|
3144 | If one revision is given, it is used as the base revision. | |
3140 | If two revisions are given, the differences between them are |
|
3145 | If two revisions are given, the differences between them are | |
3141 | shown. The --change option can also be used as a shortcut to list |
|
3146 | shown. The --change option can also be used as a shortcut to list | |
3142 | the changed files of a revision from its first parent. |
|
3147 | the changed files of a revision from its first parent. | |
3143 |
|
3148 | |||
3144 | The codes used to show the status of files are:: |
|
3149 | The codes used to show the status of files are:: | |
3145 |
|
3150 | |||
3146 | M = modified |
|
3151 | M = modified | |
3147 | A = added |
|
3152 | A = added | |
3148 | R = removed |
|
3153 | R = removed | |
3149 | C = clean |
|
3154 | C = clean | |
3150 | ! = missing (deleted by non-hg command, but still tracked) |
|
3155 | ! = missing (deleted by non-hg command, but still tracked) | |
3151 | ? = not tracked |
|
3156 | ? = not tracked | |
3152 | I = ignored |
|
3157 | I = ignored | |
3153 | = origin of the previous file listed as A (added) |
|
3158 | = origin of the previous file listed as A (added) | |
3154 |
|
3159 | |||
3155 | Returns 0 on success. |
|
3160 | Returns 0 on success. | |
3156 | """ |
|
3161 | """ | |
3157 |
|
3162 | |||
3158 | revs = opts.get('rev') |
|
3163 | revs = opts.get('rev') | |
3159 | change = opts.get('change') |
|
3164 | change = opts.get('change') | |
3160 |
|
3165 | |||
3161 | if revs and change: |
|
3166 | if revs and change: | |
3162 | msg = _('cannot specify --rev and --change at the same time') |
|
3167 | msg = _('cannot specify --rev and --change at the same time') | |
3163 | raise util.Abort(msg) |
|
3168 | raise util.Abort(msg) | |
3164 | elif change: |
|
3169 | elif change: | |
3165 | node2 = repo.lookup(change) |
|
3170 | node2 = repo.lookup(change) | |
3166 | node1 = repo[node2].parents()[0].node() |
|
3171 | node1 = repo[node2].parents()[0].node() | |
3167 | else: |
|
3172 | else: | |
3168 | node1, node2 = cmdutil.revpair(repo, revs) |
|
3173 | node1, node2 = cmdutil.revpair(repo, revs) | |
3169 |
|
3174 | |||
3170 | cwd = (pats and repo.getcwd()) or '' |
|
3175 | cwd = (pats and repo.getcwd()) or '' | |
3171 | end = opts.get('print0') and '\0' or '\n' |
|
3176 | end = opts.get('print0') and '\0' or '\n' | |
3172 | copy = {} |
|
3177 | copy = {} | |
3173 | states = 'modified added removed deleted unknown ignored clean'.split() |
|
3178 | states = 'modified added removed deleted unknown ignored clean'.split() | |
3174 | show = [k for k in states if opts.get(k)] |
|
3179 | show = [k for k in states if opts.get(k)] | |
3175 | if opts.get('all'): |
|
3180 | if opts.get('all'): | |
3176 | show += ui.quiet and (states[:4] + ['clean']) or states |
|
3181 | show += ui.quiet and (states[:4] + ['clean']) or states | |
3177 | if not show: |
|
3182 | if not show: | |
3178 | show = ui.quiet and states[:4] or states[:5] |
|
3183 | show = ui.quiet and states[:4] or states[:5] | |
3179 |
|
3184 | |||
3180 | stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts), |
|
3185 | stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts), | |
3181 | 'ignored' in show, 'clean' in show, 'unknown' in show) |
|
3186 | 'ignored' in show, 'clean' in show, 'unknown' in show) | |
3182 | changestates = zip(states, 'MAR!?IC', stat) |
|
3187 | changestates = zip(states, 'MAR!?IC', stat) | |
3183 |
|
3188 | |||
3184 | if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'): |
|
3189 | if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'): | |
3185 | ctxn = repo[nullid] |
|
3190 | ctxn = repo[nullid] | |
3186 | ctx1 = repo[node1] |
|
3191 | ctx1 = repo[node1] | |
3187 | ctx2 = repo[node2] |
|
3192 | ctx2 = repo[node2] | |
3188 | added = stat[1] |
|
3193 | added = stat[1] | |
3189 | if node2 is None: |
|
3194 | if node2 is None: | |
3190 | added = stat[0] + stat[1] # merged? |
|
3195 | added = stat[0] + stat[1] # merged? | |
3191 |
|
3196 | |||
3192 | for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems(): |
|
3197 | for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems(): | |
3193 | if k in added: |
|
3198 | if k in added: | |
3194 | copy[k] = v |
|
3199 | copy[k] = v | |
3195 | elif v in added: |
|
3200 | elif v in added: | |
3196 | copy[v] = k |
|
3201 | copy[v] = k | |
3197 |
|
3202 | |||
3198 | for state, char, files in changestates: |
|
3203 | for state, char, files in changestates: | |
3199 | if state in show: |
|
3204 | if state in show: | |
3200 | format = "%s %%s%s" % (char, end) |
|
3205 | format = "%s %%s%s" % (char, end) | |
3201 | if opts.get('no_status'): |
|
3206 | if opts.get('no_status'): | |
3202 | format = "%%s%s" % end |
|
3207 | format = "%%s%s" % end | |
3203 |
|
3208 | |||
3204 | for f in files: |
|
3209 | for f in files: | |
3205 | ui.write(format % repo.pathto(f, cwd), |
|
3210 | ui.write(format % repo.pathto(f, cwd), | |
3206 | label='status.' + state) |
|
3211 | label='status.' + state) | |
3207 | if f in copy: |
|
3212 | if f in copy: | |
3208 | ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end), |
|
3213 | ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end), | |
3209 | label='status.copied') |
|
3214 | label='status.copied') | |
3210 |
|
3215 | |||
3211 | def summary(ui, repo, **opts): |
|
3216 | def summary(ui, repo, **opts): | |
3212 | """summarize working directory state |
|
3217 | """summarize working directory state | |
3213 |
|
3218 | |||
3214 | This generates a brief summary of the working directory state, |
|
3219 | This generates a brief summary of the working directory state, | |
3215 | including parents, branch, commit status, and available updates. |
|
3220 | including parents, branch, commit status, and available updates. | |
3216 |
|
3221 | |||
3217 | With the --remote option, this will check the default paths for |
|
3222 | With the --remote option, this will check the default paths for | |
3218 | incoming and outgoing changes. This can be time-consuming. |
|
3223 | incoming and outgoing changes. This can be time-consuming. | |
3219 |
|
3224 | |||
3220 | Returns 0 on success. |
|
3225 | Returns 0 on success. | |
3221 | """ |
|
3226 | """ | |
3222 |
|
3227 | |||
3223 | ctx = repo[None] |
|
3228 | ctx = repo[None] | |
3224 | parents = ctx.parents() |
|
3229 | parents = ctx.parents() | |
3225 | pnode = parents[0].node() |
|
3230 | pnode = parents[0].node() | |
3226 |
|
3231 | |||
3227 | for p in parents: |
|
3232 | for p in parents: | |
3228 | # label with log.changeset (instead of log.parent) since this |
|
3233 | # label with log.changeset (instead of log.parent) since this | |
3229 | # shows a working directory parent *changeset*: |
|
3234 | # shows a working directory parent *changeset*: | |
3230 | ui.write(_('parent: %d:%s ') % (p.rev(), str(p)), |
|
3235 | ui.write(_('parent: %d:%s ') % (p.rev(), str(p)), | |
3231 | label='log.changeset') |
|
3236 | label='log.changeset') | |
3232 | ui.write(' '.join(p.tags()), label='log.tag') |
|
3237 | ui.write(' '.join(p.tags()), label='log.tag') | |
3233 | if p.rev() == -1: |
|
3238 | if p.rev() == -1: | |
3234 | if not len(repo): |
|
3239 | if not len(repo): | |
3235 | ui.write(_(' (empty repository)')) |
|
3240 | ui.write(_(' (empty repository)')) | |
3236 | else: |
|
3241 | else: | |
3237 | ui.write(_(' (no revision checked out)')) |
|
3242 | ui.write(_(' (no revision checked out)')) | |
3238 | ui.write('\n') |
|
3243 | ui.write('\n') | |
3239 | if p.description(): |
|
3244 | if p.description(): | |
3240 | ui.status(' ' + p.description().splitlines()[0].strip() + '\n', |
|
3245 | ui.status(' ' + p.description().splitlines()[0].strip() + '\n', | |
3241 | label='log.summary') |
|
3246 | label='log.summary') | |
3242 |
|
3247 | |||
3243 | branch = ctx.branch() |
|
3248 | branch = ctx.branch() | |
3244 | bheads = repo.branchheads(branch) |
|
3249 | bheads = repo.branchheads(branch) | |
3245 | m = _('branch: %s\n') % branch |
|
3250 | m = _('branch: %s\n') % branch | |
3246 | if branch != 'default': |
|
3251 | if branch != 'default': | |
3247 | ui.write(m, label='log.branch') |
|
3252 | ui.write(m, label='log.branch') | |
3248 | else: |
|
3253 | else: | |
3249 | ui.status(m, label='log.branch') |
|
3254 | ui.status(m, label='log.branch') | |
3250 |
|
3255 | |||
3251 | st = list(repo.status(unknown=True))[:6] |
|
3256 | st = list(repo.status(unknown=True))[:6] | |
3252 |
|
3257 | |||
3253 | ms = mergemod.mergestate(repo) |
|
3258 | ms = mergemod.mergestate(repo) | |
3254 | st.append([f for f in ms if ms[f] == 'u']) |
|
3259 | st.append([f for f in ms if ms[f] == 'u']) | |
3255 |
|
3260 | |||
3256 | subs = [s for s in ctx.substate if ctx.sub(s).dirty()] |
|
3261 | subs = [s for s in ctx.substate if ctx.sub(s).dirty()] | |
3257 | st.append(subs) |
|
3262 | st.append(subs) | |
3258 |
|
3263 | |||
3259 | labels = [ui.label(_('%d modified'), 'status.modified'), |
|
3264 | labels = [ui.label(_('%d modified'), 'status.modified'), | |
3260 | ui.label(_('%d added'), 'status.added'), |
|
3265 | ui.label(_('%d added'), 'status.added'), | |
3261 | ui.label(_('%d removed'), 'status.removed'), |
|
3266 | ui.label(_('%d removed'), 'status.removed'), | |
3262 | ui.label(_('%d deleted'), 'status.deleted'), |
|
3267 | ui.label(_('%d deleted'), 'status.deleted'), | |
3263 | ui.label(_('%d unknown'), 'status.unknown'), |
|
3268 | ui.label(_('%d unknown'), 'status.unknown'), | |
3264 | ui.label(_('%d ignored'), 'status.ignored'), |
|
3269 | ui.label(_('%d ignored'), 'status.ignored'), | |
3265 | ui.label(_('%d unresolved'), 'resolve.unresolved'), |
|
3270 | ui.label(_('%d unresolved'), 'resolve.unresolved'), | |
3266 | ui.label(_('%d subrepos'), 'status.modified')] |
|
3271 | ui.label(_('%d subrepos'), 'status.modified')] | |
3267 | t = [] |
|
3272 | t = [] | |
3268 | for s, l in zip(st, labels): |
|
3273 | for s, l in zip(st, labels): | |
3269 | if s: |
|
3274 | if s: | |
3270 | t.append(l % len(s)) |
|
3275 | t.append(l % len(s)) | |
3271 |
|
3276 | |||
3272 | t = ', '.join(t) |
|
3277 | t = ', '.join(t) | |
3273 | cleanworkdir = False |
|
3278 | cleanworkdir = False | |
3274 |
|
3279 | |||
3275 | if len(parents) > 1: |
|
3280 | if len(parents) > 1: | |
3276 | t += _(' (merge)') |
|
3281 | t += _(' (merge)') | |
3277 | elif branch != parents[0].branch(): |
|
3282 | elif branch != parents[0].branch(): | |
3278 | t += _(' (new branch)') |
|
3283 | t += _(' (new branch)') | |
3279 | elif (parents[0].extra().get('close') and |
|
3284 | elif (parents[0].extra().get('close') and | |
3280 | pnode in repo.branchheads(branch, closed=True)): |
|
3285 | pnode in repo.branchheads(branch, closed=True)): | |
3281 | t += _(' (head closed)') |
|
3286 | t += _(' (head closed)') | |
3282 | elif (not st[0] and not st[1] and not st[2] and not st[7]): |
|
3287 | elif (not st[0] and not st[1] and not st[2] and not st[7]): | |
3283 | t += _(' (clean)') |
|
3288 | t += _(' (clean)') | |
3284 | cleanworkdir = True |
|
3289 | cleanworkdir = True | |
3285 | elif pnode not in bheads: |
|
3290 | elif pnode not in bheads: | |
3286 | t += _(' (new branch head)') |
|
3291 | t += _(' (new branch head)') | |
3287 |
|
3292 | |||
3288 | if cleanworkdir: |
|
3293 | if cleanworkdir: | |
3289 | ui.status(_('commit: %s\n') % t.strip()) |
|
3294 | ui.status(_('commit: %s\n') % t.strip()) | |
3290 | else: |
|
3295 | else: | |
3291 | ui.write(_('commit: %s\n') % t.strip()) |
|
3296 | ui.write(_('commit: %s\n') % t.strip()) | |
3292 |
|
3297 | |||
3293 | # all ancestors of branch heads - all ancestors of parent = new csets |
|
3298 | # all ancestors of branch heads - all ancestors of parent = new csets | |
3294 | new = [0] * len(repo) |
|
3299 | new = [0] * len(repo) | |
3295 | cl = repo.changelog |
|
3300 | cl = repo.changelog | |
3296 | for a in [cl.rev(n) for n in bheads]: |
|
3301 | for a in [cl.rev(n) for n in bheads]: | |
3297 | new[a] = 1 |
|
3302 | new[a] = 1 | |
3298 | for a in cl.ancestors(*[cl.rev(n) for n in bheads]): |
|
3303 | for a in cl.ancestors(*[cl.rev(n) for n in bheads]): | |
3299 | new[a] = 1 |
|
3304 | new[a] = 1 | |
3300 | for a in [p.rev() for p in parents]: |
|
3305 | for a in [p.rev() for p in parents]: | |
3301 | if a >= 0: |
|
3306 | if a >= 0: | |
3302 | new[a] = 0 |
|
3307 | new[a] = 0 | |
3303 | for a in cl.ancestors(*[p.rev() for p in parents]): |
|
3308 | for a in cl.ancestors(*[p.rev() for p in parents]): | |
3304 | new[a] = 0 |
|
3309 | new[a] = 0 | |
3305 | new = sum(new) |
|
3310 | new = sum(new) | |
3306 |
|
3311 | |||
3307 | if new == 0: |
|
3312 | if new == 0: | |
3308 | ui.status(_('update: (current)\n')) |
|
3313 | ui.status(_('update: (current)\n')) | |
3309 | elif pnode not in bheads: |
|
3314 | elif pnode not in bheads: | |
3310 | ui.write(_('update: %d new changesets (update)\n') % new) |
|
3315 | ui.write(_('update: %d new changesets (update)\n') % new) | |
3311 | else: |
|
3316 | else: | |
3312 | ui.write(_('update: %d new changesets, %d branch heads (merge)\n') % |
|
3317 | ui.write(_('update: %d new changesets, %d branch heads (merge)\n') % | |
3313 | (new, len(bheads))) |
|
3318 | (new, len(bheads))) | |
3314 |
|
3319 | |||
3315 | if opts.get('remote'): |
|
3320 | if opts.get('remote'): | |
3316 | t = [] |
|
3321 | t = [] | |
3317 | source, branches = hg.parseurl(ui.expandpath('default')) |
|
3322 | source, branches = hg.parseurl(ui.expandpath('default')) | |
3318 | other = hg.repository(hg.remoteui(repo, {}), source) |
|
3323 | other = hg.repository(hg.remoteui(repo, {}), source) | |
3319 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) |
|
3324 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) | |
3320 | ui.debug('comparing with %s\n' % url.hidepassword(source)) |
|
3325 | ui.debug('comparing with %s\n' % url.hidepassword(source)) | |
3321 | repo.ui.pushbuffer() |
|
3326 | repo.ui.pushbuffer() | |
3322 | common, incoming, rheads = repo.findcommonincoming(other) |
|
3327 | common, incoming, rheads = repo.findcommonincoming(other) | |
3323 | repo.ui.popbuffer() |
|
3328 | repo.ui.popbuffer() | |
3324 | if incoming: |
|
3329 | if incoming: | |
3325 | t.append(_('1 or more incoming')) |
|
3330 | t.append(_('1 or more incoming')) | |
3326 |
|
3331 | |||
3327 | dest, branches = hg.parseurl(ui.expandpath('default-push', 'default')) |
|
3332 | dest, branches = hg.parseurl(ui.expandpath('default-push', 'default')) | |
3328 | revs, checkout = hg.addbranchrevs(repo, repo, branches, None) |
|
3333 | revs, checkout = hg.addbranchrevs(repo, repo, branches, None) | |
3329 | other = hg.repository(hg.remoteui(repo, {}), dest) |
|
3334 | other = hg.repository(hg.remoteui(repo, {}), dest) | |
3330 | ui.debug('comparing with %s\n' % url.hidepassword(dest)) |
|
3335 | ui.debug('comparing with %s\n' % url.hidepassword(dest)) | |
3331 | repo.ui.pushbuffer() |
|
3336 | repo.ui.pushbuffer() | |
3332 | o = repo.findoutgoing(other) |
|
3337 | o = repo.findoutgoing(other) | |
3333 | repo.ui.popbuffer() |
|
3338 | repo.ui.popbuffer() | |
3334 | o = repo.changelog.nodesbetween(o, None)[0] |
|
3339 | o = repo.changelog.nodesbetween(o, None)[0] | |
3335 | if o: |
|
3340 | if o: | |
3336 | t.append(_('%d outgoing') % len(o)) |
|
3341 | t.append(_('%d outgoing') % len(o)) | |
3337 |
|
3342 | |||
3338 | if t: |
|
3343 | if t: | |
3339 | ui.write(_('remote: %s\n') % (', '.join(t))) |
|
3344 | ui.write(_('remote: %s\n') % (', '.join(t))) | |
3340 | else: |
|
3345 | else: | |
3341 | ui.status(_('remote: (synced)\n')) |
|
3346 | ui.status(_('remote: (synced)\n')) | |
3342 |
|
3347 | |||
3343 | def tag(ui, repo, name1, *names, **opts): |
|
3348 | def tag(ui, repo, name1, *names, **opts): | |
3344 | """add one or more tags for the current or given revision |
|
3349 | """add one or more tags for the current or given revision | |
3345 |
|
3350 | |||
3346 | Name a particular revision using <name>. |
|
3351 | Name a particular revision using <name>. | |
3347 |
|
3352 | |||
3348 | Tags are used to name particular revisions of the repository and are |
|
3353 | Tags are used to name particular revisions of the repository and are | |
3349 | very useful to compare different revisions, to go back to significant |
|
3354 | very useful to compare different revisions, to go back to significant | |
3350 | earlier versions or to mark branch points as releases, etc. |
|
3355 | earlier versions or to mark branch points as releases, etc. | |
3351 |
|
3356 | |||
3352 | If no revision is given, the parent of the working directory is |
|
3357 | If no revision is given, the parent of the working directory is | |
3353 | used, or tip if no revision is checked out. |
|
3358 | used, or tip if no revision is checked out. | |
3354 |
|
3359 | |||
3355 | To facilitate version control, distribution, and merging of tags, |
|
3360 | To facilitate version control, distribution, and merging of tags, | |
3356 | they are stored as a file named ".hgtags" which is managed |
|
3361 | they are stored as a file named ".hgtags" which is managed | |
3357 | similarly to other project files and can be hand-edited if |
|
3362 | similarly to other project files and can be hand-edited if | |
3358 | necessary. The file '.hg/localtags' is used for local tags (not |
|
3363 | necessary. The file '.hg/localtags' is used for local tags (not | |
3359 | shared among repositories). |
|
3364 | shared among repositories). | |
3360 |
|
3365 | |||
3361 | See :hg:`help dates` for a list of formats valid for -d/--date. |
|
3366 | See :hg:`help dates` for a list of formats valid for -d/--date. | |
3362 |
|
3367 | |||
3363 | Since tag names have priority over branch names during revision |
|
3368 | Since tag names have priority over branch names during revision | |
3364 | lookup, using an existing branch name as a tag name is discouraged. |
|
3369 | lookup, using an existing branch name as a tag name is discouraged. | |
3365 |
|
3370 | |||
3366 | Returns 0 on success. |
|
3371 | Returns 0 on success. | |
3367 | """ |
|
3372 | """ | |
3368 |
|
3373 | |||
3369 | rev_ = "." |
|
3374 | rev_ = "." | |
3370 | names = [t.strip() for t in (name1,) + names] |
|
3375 | names = [t.strip() for t in (name1,) + names] | |
3371 | if len(names) != len(set(names)): |
|
3376 | if len(names) != len(set(names)): | |
3372 | raise util.Abort(_('tag names must be unique')) |
|
3377 | raise util.Abort(_('tag names must be unique')) | |
3373 | for n in names: |
|
3378 | for n in names: | |
3374 | if n in ['tip', '.', 'null']: |
|
3379 | if n in ['tip', '.', 'null']: | |
3375 | raise util.Abort(_('the name \'%s\' is reserved') % n) |
|
3380 | raise util.Abort(_('the name \'%s\' is reserved') % n) | |
3376 | if opts.get('rev') and opts.get('remove'): |
|
3381 | if opts.get('rev') and opts.get('remove'): | |
3377 | raise util.Abort(_("--rev and --remove are incompatible")) |
|
3382 | raise util.Abort(_("--rev and --remove are incompatible")) | |
3378 | if opts.get('rev'): |
|
3383 | if opts.get('rev'): | |
3379 | rev_ = opts['rev'] |
|
3384 | rev_ = opts['rev'] | |
3380 | message = opts.get('message') |
|
3385 | message = opts.get('message') | |
3381 | if opts.get('remove'): |
|
3386 | if opts.get('remove'): | |
3382 | expectedtype = opts.get('local') and 'local' or 'global' |
|
3387 | expectedtype = opts.get('local') and 'local' or 'global' | |
3383 | for n in names: |
|
3388 | for n in names: | |
3384 | if not repo.tagtype(n): |
|
3389 | if not repo.tagtype(n): | |
3385 | raise util.Abort(_('tag \'%s\' does not exist') % n) |
|
3390 | raise util.Abort(_('tag \'%s\' does not exist') % n) | |
3386 | if repo.tagtype(n) != expectedtype: |
|
3391 | if repo.tagtype(n) != expectedtype: | |
3387 | if expectedtype == 'global': |
|
3392 | if expectedtype == 'global': | |
3388 | raise util.Abort(_('tag \'%s\' is not a global tag') % n) |
|
3393 | raise util.Abort(_('tag \'%s\' is not a global tag') % n) | |
3389 | else: |
|
3394 | else: | |
3390 | raise util.Abort(_('tag \'%s\' is not a local tag') % n) |
|
3395 | raise util.Abort(_('tag \'%s\' is not a local tag') % n) | |
3391 | rev_ = nullid |
|
3396 | rev_ = nullid | |
3392 | if not message: |
|
3397 | if not message: | |
3393 | # we don't translate commit messages |
|
3398 | # we don't translate commit messages | |
3394 | message = 'Removed tag %s' % ', '.join(names) |
|
3399 | message = 'Removed tag %s' % ', '.join(names) | |
3395 | elif not opts.get('force'): |
|
3400 | elif not opts.get('force'): | |
3396 | for n in names: |
|
3401 | for n in names: | |
3397 | if n in repo.tags(): |
|
3402 | if n in repo.tags(): | |
3398 | raise util.Abort(_('tag \'%s\' already exists ' |
|
3403 | raise util.Abort(_('tag \'%s\' already exists ' | |
3399 | '(use -f to force)') % n) |
|
3404 | '(use -f to force)') % n) | |
3400 | if not rev_ and repo.dirstate.parents()[1] != nullid: |
|
3405 | if not rev_ and repo.dirstate.parents()[1] != nullid: | |
3401 | raise util.Abort(_('uncommitted merge - please provide a ' |
|
3406 | raise util.Abort(_('uncommitted merge - please provide a ' | |
3402 | 'specific revision')) |
|
3407 | 'specific revision')) | |
3403 | r = repo[rev_].node() |
|
3408 | r = repo[rev_].node() | |
3404 |
|
3409 | |||
3405 | if not message: |
|
3410 | if not message: | |
3406 | # we don't translate commit messages |
|
3411 | # we don't translate commit messages | |
3407 | message = ('Added tag %s for changeset %s' % |
|
3412 | message = ('Added tag %s for changeset %s' % | |
3408 | (', '.join(names), short(r))) |
|
3413 | (', '.join(names), short(r))) | |
3409 |
|
3414 | |||
3410 | date = opts.get('date') |
|
3415 | date = opts.get('date') | |
3411 | if date: |
|
3416 | if date: | |
3412 | date = util.parsedate(date) |
|
3417 | date = util.parsedate(date) | |
3413 |
|
3418 | |||
3414 | if opts.get('edit'): |
|
3419 | if opts.get('edit'): | |
3415 | message = ui.edit(message, ui.username()) |
|
3420 | message = ui.edit(message, ui.username()) | |
3416 |
|
3421 | |||
3417 | repo.tag(names, r, message, opts.get('local'), opts.get('user'), date) |
|
3422 | repo.tag(names, r, message, opts.get('local'), opts.get('user'), date) | |
3418 |
|
3423 | |||
3419 | def tags(ui, repo): |
|
3424 | def tags(ui, repo): | |
3420 | """list repository tags |
|
3425 | """list repository tags | |
3421 |
|
3426 | |||
3422 | This lists both regular and local tags. When the -v/--verbose |
|
3427 | This lists both regular and local tags. When the -v/--verbose | |
3423 | switch is used, a third column "local" is printed for local tags. |
|
3428 | switch is used, a third column "local" is printed for local tags. | |
3424 |
|
3429 | |||
3425 | Returns 0 on success. |
|
3430 | Returns 0 on success. | |
3426 | """ |
|
3431 | """ | |
3427 |
|
3432 | |||
3428 | hexfunc = ui.debugflag and hex or short |
|
3433 | hexfunc = ui.debugflag and hex or short | |
3429 | tagtype = "" |
|
3434 | tagtype = "" | |
3430 |
|
3435 | |||
3431 | for t, n in reversed(repo.tagslist()): |
|
3436 | for t, n in reversed(repo.tagslist()): | |
3432 | if ui.quiet: |
|
3437 | if ui.quiet: | |
3433 | ui.write("%s\n" % t) |
|
3438 | ui.write("%s\n" % t) | |
3434 | continue |
|
3439 | continue | |
3435 |
|
3440 | |||
3436 | try: |
|
3441 | try: | |
3437 | hn = hexfunc(n) |
|
3442 | hn = hexfunc(n) | |
3438 | r = "%5d:%s" % (repo.changelog.rev(n), hn) |
|
3443 | r = "%5d:%s" % (repo.changelog.rev(n), hn) | |
3439 | except error.LookupError: |
|
3444 | except error.LookupError: | |
3440 | r = " ?:%s" % hn |
|
3445 | r = " ?:%s" % hn | |
3441 | else: |
|
3446 | else: | |
3442 | spaces = " " * (30 - encoding.colwidth(t)) |
|
3447 | spaces = " " * (30 - encoding.colwidth(t)) | |
3443 | if ui.verbose: |
|
3448 | if ui.verbose: | |
3444 | if repo.tagtype(t) == 'local': |
|
3449 | if repo.tagtype(t) == 'local': | |
3445 | tagtype = " local" |
|
3450 | tagtype = " local" | |
3446 | else: |
|
3451 | else: | |
3447 | tagtype = "" |
|
3452 | tagtype = "" | |
3448 | ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype)) |
|
3453 | ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype)) | |
3449 |
|
3454 | |||
3450 | def tip(ui, repo, **opts): |
|
3455 | def tip(ui, repo, **opts): | |
3451 | """show the tip revision |
|
3456 | """show the tip revision | |
3452 |
|
3457 | |||
3453 | The tip revision (usually just called the tip) is the changeset |
|
3458 | The tip revision (usually just called the tip) is the changeset | |
3454 | most recently added to the repository (and therefore the most |
|
3459 | most recently added to the repository (and therefore the most | |
3455 | recently changed head). |
|
3460 | recently changed head). | |
3456 |
|
3461 | |||
3457 | If you have just made a commit, that commit will be the tip. If |
|
3462 | If you have just made a commit, that commit will be the tip. If | |
3458 | you have just pulled changes from another repository, the tip of |
|
3463 | you have just pulled changes from another repository, the tip of | |
3459 | that repository becomes the current tip. The "tip" tag is special |
|
3464 | that repository becomes the current tip. The "tip" tag is special | |
3460 | and cannot be renamed or assigned to a different changeset. |
|
3465 | and cannot be renamed or assigned to a different changeset. | |
3461 |
|
3466 | |||
3462 | Returns 0 on success. |
|
3467 | Returns 0 on success. | |
3463 | """ |
|
3468 | """ | |
3464 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
3469 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
3465 | displayer.show(repo[len(repo) - 1]) |
|
3470 | displayer.show(repo[len(repo) - 1]) | |
3466 | displayer.close() |
|
3471 | displayer.close() | |
3467 |
|
3472 | |||
3468 | def unbundle(ui, repo, fname1, *fnames, **opts): |
|
3473 | def unbundle(ui, repo, fname1, *fnames, **opts): | |
3469 | """apply one or more changegroup files |
|
3474 | """apply one or more changegroup files | |
3470 |
|
3475 | |||
3471 | Apply one or more compressed changegroup files generated by the |
|
3476 | Apply one or more compressed changegroup files generated by the | |
3472 | bundle command. |
|
3477 | bundle command. | |
3473 |
|
3478 | |||
3474 | Returns 0 on success, 1 if an update has unresolved files. |
|
3479 | Returns 0 on success, 1 if an update has unresolved files. | |
3475 | """ |
|
3480 | """ | |
3476 | fnames = (fname1,) + fnames |
|
3481 | fnames = (fname1,) + fnames | |
3477 |
|
3482 | |||
3478 | lock = repo.lock() |
|
3483 | lock = repo.lock() | |
3479 | try: |
|
3484 | try: | |
3480 | for fname in fnames: |
|
3485 | for fname in fnames: | |
3481 | f = url.open(ui, fname) |
|
3486 | f = url.open(ui, fname) | |
3482 | gen = changegroup.readbundle(f, fname) |
|
3487 | gen = changegroup.readbundle(f, fname) | |
3483 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) |
|
3488 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) | |
3484 | finally: |
|
3489 | finally: | |
3485 | lock.release() |
|
3490 | lock.release() | |
3486 |
|
3491 | |||
3487 | return postincoming(ui, repo, modheads, opts.get('update'), None) |
|
3492 | return postincoming(ui, repo, modheads, opts.get('update'), None) | |
3488 |
|
3493 | |||
3489 | def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False): |
|
3494 | def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False): | |
3490 | """update working directory (or switch revisions) |
|
3495 | """update working directory (or switch revisions) | |
3491 |
|
3496 | |||
3492 | Update the repository's working directory to the specified |
|
3497 | Update the repository's working directory to the specified | |
3493 | changeset. |
|
3498 | changeset. | |
3494 |
|
3499 | |||
3495 | If no changeset is specified, attempt to update to the head of the |
|
3500 | If no changeset is specified, attempt to update to the head of the | |
3496 | current branch. If this head is a descendant of the working |
|
3501 | current branch. If this head is a descendant of the working | |
3497 | directory's parent, update to it, otherwise abort. |
|
3502 | directory's parent, update to it, otherwise abort. | |
3498 |
|
3503 | |||
3499 | The following rules apply when the working directory contains |
|
3504 | The following rules apply when the working directory contains | |
3500 | uncommitted changes: |
|
3505 | uncommitted changes: | |
3501 |
|
3506 | |||
3502 | 1. If neither -c/--check nor -C/--clean is specified, and if |
|
3507 | 1. If neither -c/--check nor -C/--clean is specified, and if | |
3503 | the requested changeset is an ancestor or descendant of |
|
3508 | the requested changeset is an ancestor or descendant of | |
3504 | the working directory's parent, the uncommitted changes |
|
3509 | the working directory's parent, the uncommitted changes | |
3505 | are merged into the requested changeset and the merged |
|
3510 | are merged into the requested changeset and the merged | |
3506 | result is left uncommitted. If the requested changeset is |
|
3511 | result is left uncommitted. If the requested changeset is | |
3507 | not an ancestor or descendant (that is, it is on another |
|
3512 | not an ancestor or descendant (that is, it is on another | |
3508 | branch), the update is aborted and the uncommitted changes |
|
3513 | branch), the update is aborted and the uncommitted changes | |
3509 | are preserved. |
|
3514 | are preserved. | |
3510 |
|
3515 | |||
3511 | 2. With the -c/--check option, the update is aborted and the |
|
3516 | 2. With the -c/--check option, the update is aborted and the | |
3512 | uncommitted changes are preserved. |
|
3517 | uncommitted changes are preserved. | |
3513 |
|
3518 | |||
3514 | 3. With the -C/--clean option, uncommitted changes are discarded and |
|
3519 | 3. With the -C/--clean option, uncommitted changes are discarded and | |
3515 | the working directory is updated to the requested changeset. |
|
3520 | the working directory is updated to the requested changeset. | |
3516 |
|
3521 | |||
3517 | Use null as the changeset to remove the working directory (like |
|
3522 | Use null as the changeset to remove the working directory (like | |
3518 | :hg:`clone -U`). |
|
3523 | :hg:`clone -U`). | |
3519 |
|
3524 | |||
3520 | If you want to update just one file to an older changeset, use :hg:`revert`. |
|
3525 | If you want to update just one file to an older changeset, use :hg:`revert`. | |
3521 |
|
3526 | |||
3522 | See :hg:`help dates` for a list of formats valid for -d/--date. |
|
3527 | See :hg:`help dates` for a list of formats valid for -d/--date. | |
3523 |
|
3528 | |||
3524 | Returns 0 on success, 1 if there are unresolved files. |
|
3529 | Returns 0 on success, 1 if there are unresolved files. | |
3525 | """ |
|
3530 | """ | |
3526 | if rev and node: |
|
3531 | if rev and node: | |
3527 | raise util.Abort(_("please specify just one revision")) |
|
3532 | raise util.Abort(_("please specify just one revision")) | |
3528 |
|
3533 | |||
3529 | if not rev: |
|
3534 | if not rev: | |
3530 | rev = node |
|
3535 | rev = node | |
3531 |
|
3536 | |||
3532 | if check and clean: |
|
3537 | if check and clean: | |
3533 | raise util.Abort(_("cannot specify both -c/--check and -C/--clean")) |
|
3538 | raise util.Abort(_("cannot specify both -c/--check and -C/--clean")) | |
3534 |
|
3539 | |||
3535 | if check: |
|
3540 | if check: | |
3536 | # we could use dirty() but we can ignore merge and branch trivia |
|
3541 | # we could use dirty() but we can ignore merge and branch trivia | |
3537 | c = repo[None] |
|
3542 | c = repo[None] | |
3538 | if c.modified() or c.added() or c.removed(): |
|
3543 | if c.modified() or c.added() or c.removed(): | |
3539 | raise util.Abort(_("uncommitted local changes")) |
|
3544 | raise util.Abort(_("uncommitted local changes")) | |
3540 |
|
3545 | |||
3541 | if date: |
|
3546 | if date: | |
3542 | if rev: |
|
3547 | if rev: | |
3543 | raise util.Abort(_("you can't specify a revision and a date")) |
|
3548 | raise util.Abort(_("you can't specify a revision and a date")) | |
3544 | rev = cmdutil.finddate(ui, repo, date) |
|
3549 | rev = cmdutil.finddate(ui, repo, date) | |
3545 |
|
3550 | |||
3546 | if clean or check: |
|
3551 | if clean or check: | |
3547 | return hg.clean(repo, rev) |
|
3552 | return hg.clean(repo, rev) | |
3548 | else: |
|
3553 | else: | |
3549 | return hg.update(repo, rev) |
|
3554 | return hg.update(repo, rev) | |
3550 |
|
3555 | |||
3551 | def verify(ui, repo): |
|
3556 | def verify(ui, repo): | |
3552 | """verify the integrity of the repository |
|
3557 | """verify the integrity of the repository | |
3553 |
|
3558 | |||
3554 | Verify the integrity of the current repository. |
|
3559 | Verify the integrity of the current repository. | |
3555 |
|
3560 | |||
3556 | This will perform an extensive check of the repository's |
|
3561 | This will perform an extensive check of the repository's | |
3557 | integrity, validating the hashes and checksums of each entry in |
|
3562 | integrity, validating the hashes and checksums of each entry in | |
3558 | the changelog, manifest, and tracked files, as well as the |
|
3563 | the changelog, manifest, and tracked files, as well as the | |
3559 | integrity of their crosslinks and indices. |
|
3564 | integrity of their crosslinks and indices. | |
3560 |
|
3565 | |||
3561 | Returns 0 on success, 1 if errors are encountered. |
|
3566 | Returns 0 on success, 1 if errors are encountered. | |
3562 | """ |
|
3567 | """ | |
3563 | return hg.verify(repo) |
|
3568 | return hg.verify(repo) | |
3564 |
|
3569 | |||
3565 | def version_(ui): |
|
3570 | def version_(ui): | |
3566 | """output version and copyright information""" |
|
3571 | """output version and copyright information""" | |
3567 | ui.write(_("Mercurial Distributed SCM (version %s)\n") |
|
3572 | ui.write(_("Mercurial Distributed SCM (version %s)\n") | |
3568 | % util.version()) |
|
3573 | % util.version()) | |
3569 | ui.status(_( |
|
3574 | ui.status(_( | |
3570 | "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" |
|
3575 | "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" | |
3571 | "This is free software; see the source for copying conditions. " |
|
3576 | "This is free software; see the source for copying conditions. " | |
3572 | "There is NO\nwarranty; " |
|
3577 | "There is NO\nwarranty; " | |
3573 | "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" |
|
3578 | "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" | |
3574 | )) |
|
3579 | )) | |
3575 |
|
3580 | |||
3576 | # Command options and aliases are listed here, alphabetically |
|
3581 | # Command options and aliases are listed here, alphabetically | |
3577 |
|
3582 | |||
3578 | globalopts = [ |
|
3583 | globalopts = [ | |
3579 | ('R', 'repository', '', |
|
3584 | ('R', 'repository', '', | |
3580 | _('repository root directory or name of overlay bundle file')), |
|
3585 | _('repository root directory or name of overlay bundle file')), | |
3581 | ('', 'cwd', '', _('change working directory')), |
|
3586 | ('', 'cwd', '', _('change working directory')), | |
3582 | ('y', 'noninteractive', None, |
|
3587 | ('y', 'noninteractive', None, | |
3583 | _('do not prompt, assume \'yes\' for any required answers')), |
|
3588 | _('do not prompt, assume \'yes\' for any required answers')), | |
3584 | ('q', 'quiet', None, _('suppress output')), |
|
3589 | ('q', 'quiet', None, _('suppress output')), | |
3585 | ('v', 'verbose', None, _('enable additional output')), |
|
3590 | ('v', 'verbose', None, _('enable additional output')), | |
3586 | ('', 'config', [], |
|
3591 | ('', 'config', [], | |
3587 | _('set/override config option (use \'section.name=value\')')), |
|
3592 | _('set/override config option (use \'section.name=value\')')), | |
3588 | ('', 'debug', None, _('enable debugging output')), |
|
3593 | ('', 'debug', None, _('enable debugging output')), | |
3589 | ('', 'debugger', None, _('start debugger')), |
|
3594 | ('', 'debugger', None, _('start debugger')), | |
3590 | ('', 'encoding', encoding.encoding, _('set the charset encoding')), |
|
3595 | ('', 'encoding', encoding.encoding, _('set the charset encoding')), | |
3591 | ('', 'encodingmode', encoding.encodingmode, |
|
3596 | ('', 'encodingmode', encoding.encodingmode, | |
3592 | _('set the charset encoding mode')), |
|
3597 | _('set the charset encoding mode')), | |
3593 | ('', 'traceback', None, _('always print a traceback on exception')), |
|
3598 | ('', 'traceback', None, _('always print a traceback on exception')), | |
3594 | ('', 'time', None, _('time how long the command takes')), |
|
3599 | ('', 'time', None, _('time how long the command takes')), | |
3595 | ('', 'profile', None, _('print command execution profile')), |
|
3600 | ('', 'profile', None, _('print command execution profile')), | |
3596 | ('', 'version', None, _('output version information and exit')), |
|
3601 | ('', 'version', None, _('output version information and exit')), | |
3597 | ('h', 'help', None, _('display help and exit')), |
|
3602 | ('h', 'help', None, _('display help and exit')), | |
3598 | ] |
|
3603 | ] | |
3599 |
|
3604 | |||
3600 | dryrunopts = [('n', 'dry-run', None, |
|
3605 | dryrunopts = [('n', 'dry-run', None, | |
3601 | _('do not perform actions, just print output'))] |
|
3606 | _('do not perform actions, just print output'))] | |
3602 |
|
3607 | |||
3603 | remoteopts = [ |
|
3608 | remoteopts = [ | |
3604 | ('e', 'ssh', '', _('specify ssh command to use')), |
|
3609 | ('e', 'ssh', '', _('specify ssh command to use')), | |
3605 | ('', 'remotecmd', '', _('specify hg command to run on the remote side')), |
|
3610 | ('', 'remotecmd', '', _('specify hg command to run on the remote side')), | |
3606 | ] |
|
3611 | ] | |
3607 |
|
3612 | |||
3608 | walkopts = [ |
|
3613 | walkopts = [ | |
3609 | ('I', 'include', [], _('include names matching the given patterns')), |
|
3614 | ('I', 'include', [], _('include names matching the given patterns')), | |
3610 | ('X', 'exclude', [], _('exclude names matching the given patterns')), |
|
3615 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
3611 | ] |
|
3616 | ] | |
3612 |
|
3617 | |||
3613 | commitopts = [ |
|
3618 | commitopts = [ | |
3614 | ('m', 'message', '', _('use <text> as commit message')), |
|
3619 | ('m', 'message', '', _('use <text> as commit message')), | |
3615 | ('l', 'logfile', '', _('read commit message from <file>')), |
|
3620 | ('l', 'logfile', '', _('read commit message from <file>')), | |
3616 | ] |
|
3621 | ] | |
3617 |
|
3622 | |||
3618 | commitopts2 = [ |
|
3623 | commitopts2 = [ | |
3619 | ('d', 'date', '', _('record datecode as commit date')), |
|
3624 | ('d', 'date', '', _('record datecode as commit date')), | |
3620 | ('u', 'user', '', _('record the specified user as committer')), |
|
3625 | ('u', 'user', '', _('record the specified user as committer')), | |
3621 | ] |
|
3626 | ] | |
3622 |
|
3627 | |||
3623 | templateopts = [ |
|
3628 | templateopts = [ | |
3624 | ('', 'style', '', _('display using template map file')), |
|
3629 | ('', 'style', '', _('display using template map file')), | |
3625 | ('', 'template', '', _('display with template')), |
|
3630 | ('', 'template', '', _('display with template')), | |
3626 | ] |
|
3631 | ] | |
3627 |
|
3632 | |||
3628 | logopts = [ |
|
3633 | logopts = [ | |
3629 | ('p', 'patch', None, _('show patch')), |
|
3634 | ('p', 'patch', None, _('show patch')), | |
3630 | ('g', 'git', None, _('use git extended diff format')), |
|
3635 | ('g', 'git', None, _('use git extended diff format')), | |
3631 | ('l', 'limit', '', _('limit number of changes displayed')), |
|
3636 | ('l', 'limit', '', _('limit number of changes displayed')), | |
3632 | ('M', 'no-merges', None, _('do not show merges')), |
|
3637 | ('M', 'no-merges', None, _('do not show merges')), | |
3633 | ('', 'stat', None, _('output diffstat-style summary of changes')), |
|
3638 | ('', 'stat', None, _('output diffstat-style summary of changes')), | |
3634 | ] + templateopts |
|
3639 | ] + templateopts | |
3635 |
|
3640 | |||
3636 | diffopts = [ |
|
3641 | diffopts = [ | |
3637 | ('a', 'text', None, _('treat all files as text')), |
|
3642 | ('a', 'text', None, _('treat all files as text')), | |
3638 | ('g', 'git', None, _('use git extended diff format')), |
|
3643 | ('g', 'git', None, _('use git extended diff format')), | |
3639 | ('', 'nodates', None, _('omit dates from diff headers')) |
|
3644 | ('', 'nodates', None, _('omit dates from diff headers')) | |
3640 | ] |
|
3645 | ] | |
3641 |
|
3646 | |||
3642 | diffopts2 = [ |
|
3647 | diffopts2 = [ | |
3643 | ('p', 'show-function', None, _('show which function each change is in')), |
|
3648 | ('p', 'show-function', None, _('show which function each change is in')), | |
3644 | ('', 'reverse', None, _('produce a diff that undoes the changes')), |
|
3649 | ('', 'reverse', None, _('produce a diff that undoes the changes')), | |
3645 | ('w', 'ignore-all-space', None, |
|
3650 | ('w', 'ignore-all-space', None, | |
3646 | _('ignore white space when comparing lines')), |
|
3651 | _('ignore white space when comparing lines')), | |
3647 | ('b', 'ignore-space-change', None, |
|
3652 | ('b', 'ignore-space-change', None, | |
3648 | _('ignore changes in the amount of white space')), |
|
3653 | _('ignore changes in the amount of white space')), | |
3649 | ('B', 'ignore-blank-lines', None, |
|
3654 | ('B', 'ignore-blank-lines', None, | |
3650 | _('ignore changes whose lines are all blank')), |
|
3655 | _('ignore changes whose lines are all blank')), | |
3651 | ('U', 'unified', '', _('number of lines of context to show')), |
|
3656 | ('U', 'unified', '', _('number of lines of context to show')), | |
3652 | ('', 'stat', None, _('output diffstat-style summary of changes')), |
|
3657 | ('', 'stat', None, _('output diffstat-style summary of changes')), | |
3653 | ] |
|
3658 | ] | |
3654 |
|
3659 | |||
3655 | similarityopts = [ |
|
3660 | similarityopts = [ | |
3656 | ('s', 'similarity', '', |
|
3661 | ('s', 'similarity', '', | |
3657 | _('guess renamed files by similarity (0<=s<=100)')) |
|
3662 | _('guess renamed files by similarity (0<=s<=100)')) | |
3658 | ] |
|
3663 | ] | |
3659 |
|
3664 | |||
3660 | table = { |
|
3665 | table = { | |
3661 | "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')), |
|
3666 | "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')), | |
3662 | "addremove": |
|
3667 | "addremove": | |
3663 | (addremove, similarityopts + walkopts + dryrunopts, |
|
3668 | (addremove, similarityopts + walkopts + dryrunopts, | |
3664 | _('[OPTION]... [FILE]...')), |
|
3669 | _('[OPTION]... [FILE]...')), | |
3665 | "^annotate|blame": |
|
3670 | "^annotate|blame": | |
3666 | (annotate, |
|
3671 | (annotate, | |
3667 | [('r', 'rev', '', _('annotate the specified revision')), |
|
3672 | [('r', 'rev', '', _('annotate the specified revision')), | |
3668 | ('', 'follow', None, |
|
3673 | ('', 'follow', None, | |
3669 | _('follow copies/renames and list the filename (DEPRECATED)')), |
|
3674 | _('follow copies/renames and list the filename (DEPRECATED)')), | |
3670 | ('', 'no-follow', None, _("don't follow copies and renames")), |
|
3675 | ('', 'no-follow', None, _("don't follow copies and renames")), | |
3671 | ('a', 'text', None, _('treat all files as text')), |
|
3676 | ('a', 'text', None, _('treat all files as text')), | |
3672 | ('u', 'user', None, _('list the author (long with -v)')), |
|
3677 | ('u', 'user', None, _('list the author (long with -v)')), | |
3673 | ('f', 'file', None, _('list the filename')), |
|
3678 | ('f', 'file', None, _('list the filename')), | |
3674 | ('d', 'date', None, _('list the date (short with -q)')), |
|
3679 | ('d', 'date', None, _('list the date (short with -q)')), | |
3675 | ('n', 'number', None, _('list the revision number (default)')), |
|
3680 | ('n', 'number', None, _('list the revision number (default)')), | |
3676 | ('c', 'changeset', None, _('list the changeset')), |
|
3681 | ('c', 'changeset', None, _('list the changeset')), | |
3677 | ('l', 'line-number', None, |
|
3682 | ('l', 'line-number', None, | |
3678 | _('show line number at the first appearance')) |
|
3683 | _('show line number at the first appearance')) | |
3679 | ] + walkopts, |
|
3684 | ] + walkopts, | |
3680 | _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')), |
|
3685 | _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')), | |
3681 | "archive": |
|
3686 | "archive": | |
3682 | (archive, |
|
3687 | (archive, | |
3683 | [('', 'no-decode', None, _('do not pass files through decoders')), |
|
3688 | [('', 'no-decode', None, _('do not pass files through decoders')), | |
3684 | ('p', 'prefix', '', _('directory prefix for files in archive')), |
|
3689 | ('p', 'prefix', '', _('directory prefix for files in archive')), | |
3685 | ('r', 'rev', '', _('revision to distribute')), |
|
3690 | ('r', 'rev', '', _('revision to distribute')), | |
3686 | ('t', 'type', '', _('type of distribution to create')), |
|
3691 | ('t', 'type', '', _('type of distribution to create')), | |
3687 | ] + walkopts, |
|
3692 | ] + walkopts, | |
3688 | _('[OPTION]... DEST')), |
|
3693 | _('[OPTION]... DEST')), | |
3689 | "backout": |
|
3694 | "backout": | |
3690 | (backout, |
|
3695 | (backout, | |
3691 | [('', 'merge', None, |
|
3696 | [('', 'merge', None, | |
3692 | _('merge with old dirstate parent after backout')), |
|
3697 | _('merge with old dirstate parent after backout')), | |
3693 | ('', 'parent', '', _('parent to choose when backing out merge')), |
|
3698 | ('', 'parent', '', _('parent to choose when backing out merge')), | |
3694 | ('r', 'rev', '', _('revision to backout')), |
|
3699 | ('r', 'rev', '', _('revision to backout')), | |
3695 | ] + walkopts + commitopts + commitopts2, |
|
3700 | ] + walkopts + commitopts + commitopts2, | |
3696 | _('[OPTION]... [-r] REV')), |
|
3701 | _('[OPTION]... [-r] REV')), | |
3697 | "bisect": |
|
3702 | "bisect": | |
3698 | (bisect, |
|
3703 | (bisect, | |
3699 | [('r', 'reset', False, _('reset bisect state')), |
|
3704 | [('r', 'reset', False, _('reset bisect state')), | |
3700 | ('g', 'good', False, _('mark changeset good')), |
|
3705 | ('g', 'good', False, _('mark changeset good')), | |
3701 | ('b', 'bad', False, _('mark changeset bad')), |
|
3706 | ('b', 'bad', False, _('mark changeset bad')), | |
3702 | ('s', 'skip', False, _('skip testing changeset')), |
|
3707 | ('s', 'skip', False, _('skip testing changeset')), | |
3703 | ('c', 'command', '', _('use command to check changeset state')), |
|
3708 | ('c', 'command', '', _('use command to check changeset state')), | |
3704 | ('U', 'noupdate', False, _('do not update to target'))], |
|
3709 | ('U', 'noupdate', False, _('do not update to target'))], | |
3705 | _("[-gbsr] [-U] [-c CMD] [REV]")), |
|
3710 | _("[-gbsr] [-U] [-c CMD] [REV]")), | |
3706 | "branch": |
|
3711 | "branch": | |
3707 | (branch, |
|
3712 | (branch, | |
3708 | [('f', 'force', None, |
|
3713 | [('f', 'force', None, | |
3709 | _('set branch name even if it shadows an existing branch')), |
|
3714 | _('set branch name even if it shadows an existing branch')), | |
3710 | ('C', 'clean', None, _('reset branch name to parent branch name'))], |
|
3715 | ('C', 'clean', None, _('reset branch name to parent branch name'))], | |
3711 | _('[-fC] [NAME]')), |
|
3716 | _('[-fC] [NAME]')), | |
3712 | "branches": |
|
3717 | "branches": | |
3713 | (branches, |
|
3718 | (branches, | |
3714 | [('a', 'active', False, |
|
3719 | [('a', 'active', False, | |
3715 | _('show only branches that have unmerged heads')), |
|
3720 | _('show only branches that have unmerged heads')), | |
3716 | ('c', 'closed', False, |
|
3721 | ('c', 'closed', False, | |
3717 | _('show normal and closed branches'))], |
|
3722 | _('show normal and closed branches'))], | |
3718 | _('[-ac]')), |
|
3723 | _('[-ac]')), | |
3719 | "bundle": |
|
3724 | "bundle": | |
3720 | (bundle, |
|
3725 | (bundle, | |
3721 | [('f', 'force', None, |
|
3726 | [('f', 'force', None, | |
3722 | _('run even when the destination is unrelated')), |
|
3727 | _('run even when the destination is unrelated')), | |
3723 | ('r', 'rev', [], |
|
3728 | ('r', 'rev', [], | |
3724 | _('a changeset intended to be added to the destination')), |
|
3729 | _('a changeset intended to be added to the destination')), | |
3725 | ('b', 'branch', [], |
|
3730 | ('b', 'branch', [], | |
3726 | _('a specific branch you would like to bundle')), |
|
3731 | _('a specific branch you would like to bundle')), | |
3727 | ('', 'base', [], |
|
3732 | ('', 'base', [], | |
3728 | _('a base changeset assumed to be available at the destination')), |
|
3733 | _('a base changeset assumed to be available at the destination')), | |
3729 | ('a', 'all', None, _('bundle all changesets in the repository')), |
|
3734 | ('a', 'all', None, _('bundle all changesets in the repository')), | |
3730 | ('t', 'type', 'bzip2', _('bundle compression type to use')), |
|
3735 | ('t', 'type', 'bzip2', _('bundle compression type to use')), | |
3731 | ] + remoteopts, |
|
3736 | ] + remoteopts, | |
3732 | _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')), |
|
3737 | _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')), | |
3733 | "cat": |
|
3738 | "cat": | |
3734 | (cat, |
|
3739 | (cat, | |
3735 | [('o', 'output', '', _('print output to file with formatted name')), |
|
3740 | [('o', 'output', '', _('print output to file with formatted name')), | |
3736 | ('r', 'rev', '', _('print the given revision')), |
|
3741 | ('r', 'rev', '', _('print the given revision')), | |
3737 | ('', 'decode', None, _('apply any matching decode filter')), |
|
3742 | ('', 'decode', None, _('apply any matching decode filter')), | |
3738 | ] + walkopts, |
|
3743 | ] + walkopts, | |
3739 | _('[OPTION]... FILE...')), |
|
3744 | _('[OPTION]... FILE...')), | |
3740 | "^clone": |
|
3745 | "^clone": | |
3741 | (clone, |
|
3746 | (clone, | |
3742 | [('U', 'noupdate', None, |
|
3747 | [('U', 'noupdate', None, | |
3743 | _('the clone will include an empty working copy (only a repository)')), |
|
3748 | _('the clone will include an empty working copy (only a repository)')), | |
3744 | ('u', 'updaterev', '', |
|
3749 | ('u', 'updaterev', '', | |
3745 | _('revision, tag or branch to check out')), |
|
3750 | _('revision, tag or branch to check out')), | |
3746 | ('r', 'rev', [], |
|
3751 | ('r', 'rev', [], | |
3747 | _('include the specified changeset')), |
|
3752 | _('include the specified changeset')), | |
3748 | ('b', 'branch', [], |
|
3753 | ('b', 'branch', [], | |
3749 | _('clone only the specified branch')), |
|
3754 | _('clone only the specified branch')), | |
3750 | ('', 'pull', None, _('use pull protocol to copy metadata')), |
|
3755 | ('', 'pull', None, _('use pull protocol to copy metadata')), | |
3751 | ('', 'uncompressed', None, |
|
3756 | ('', 'uncompressed', None, | |
3752 | _('use uncompressed transfer (fast over LAN)')), |
|
3757 | _('use uncompressed transfer (fast over LAN)')), | |
3753 | ] + remoteopts, |
|
3758 | ] + remoteopts, | |
3754 | _('[OPTION]... SOURCE [DEST]')), |
|
3759 | _('[OPTION]... SOURCE [DEST]')), | |
3755 | "^commit|ci": |
|
3760 | "^commit|ci": | |
3756 | (commit, |
|
3761 | (commit, | |
3757 | [('A', 'addremove', None, |
|
3762 | [('A', 'addremove', None, | |
3758 | _('mark new/missing files as added/removed before committing')), |
|
3763 | _('mark new/missing files as added/removed before committing')), | |
3759 | ('', 'close-branch', None, |
|
3764 | ('', 'close-branch', None, | |
3760 | _('mark a branch as closed, hiding it from the branch list')), |
|
3765 | _('mark a branch as closed, hiding it from the branch list')), | |
3761 | ] + walkopts + commitopts + commitopts2, |
|
3766 | ] + walkopts + commitopts + commitopts2, | |
3762 | _('[OPTION]... [FILE]...')), |
|
3767 | _('[OPTION]... [FILE]...')), | |
3763 | "copy|cp": |
|
3768 | "copy|cp": | |
3764 | (copy, |
|
3769 | (copy, | |
3765 | [('A', 'after', None, _('record a copy that has already occurred')), |
|
3770 | [('A', 'after', None, _('record a copy that has already occurred')), | |
3766 | ('f', 'force', None, |
|
3771 | ('f', 'force', None, | |
3767 | _('forcibly copy over an existing managed file')), |
|
3772 | _('forcibly copy over an existing managed file')), | |
3768 | ] + walkopts + dryrunopts, |
|
3773 | ] + walkopts + dryrunopts, | |
3769 | _('[OPTION]... [SOURCE]... DEST')), |
|
3774 | _('[OPTION]... [SOURCE]... DEST')), | |
3770 | "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')), |
|
3775 | "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')), | |
3771 | "debugcheckstate": (debugcheckstate, [], ''), |
|
3776 | "debugcheckstate": (debugcheckstate, [], ''), | |
3772 | "debugcommands": (debugcommands, [], _('[COMMAND]')), |
|
3777 | "debugcommands": (debugcommands, [], _('[COMMAND]')), | |
3773 | "debugcomplete": |
|
3778 | "debugcomplete": | |
3774 | (debugcomplete, |
|
3779 | (debugcomplete, | |
3775 | [('o', 'options', None, _('show the command options'))], |
|
3780 | [('o', 'options', None, _('show the command options'))], | |
3776 | _('[-o] CMD')), |
|
3781 | _('[-o] CMD')), | |
3777 | "debugdate": |
|
3782 | "debugdate": | |
3778 | (debugdate, |
|
3783 | (debugdate, | |
3779 | [('e', 'extended', None, _('try extended date formats'))], |
|
3784 | [('e', 'extended', None, _('try extended date formats'))], | |
3780 | _('[-e] DATE [RANGE]')), |
|
3785 | _('[-e] DATE [RANGE]')), | |
3781 | "debugdata": (debugdata, [], _('FILE REV')), |
|
3786 | "debugdata": (debugdata, [], _('FILE REV')), | |
3782 | "debugfsinfo": (debugfsinfo, [], _('[PATH]')), |
|
3787 | "debugfsinfo": (debugfsinfo, [], _('[PATH]')), | |
3783 | "debugindex": (debugindex, [], _('FILE')), |
|
3788 | "debugindex": (debugindex, [], _('FILE')), | |
3784 | "debugindexdot": (debugindexdot, [], _('FILE')), |
|
3789 | "debugindexdot": (debugindexdot, [], _('FILE')), | |
3785 | "debuginstall": (debuginstall, [], ''), |
|
3790 | "debuginstall": (debuginstall, [], ''), | |
3786 | "debugrebuildstate": |
|
3791 | "debugrebuildstate": | |
3787 | (debugrebuildstate, |
|
3792 | (debugrebuildstate, | |
3788 | [('r', 'rev', '', _('revision to rebuild to'))], |
|
3793 | [('r', 'rev', '', _('revision to rebuild to'))], | |
3789 | _('[-r REV] [REV]')), |
|
3794 | _('[-r REV] [REV]')), | |
3790 | "debugrename": |
|
3795 | "debugrename": | |
3791 | (debugrename, |
|
3796 | (debugrename, | |
3792 | [('r', 'rev', '', _('revision to debug'))], |
|
3797 | [('r', 'rev', '', _('revision to debug'))], | |
3793 | _('[-r REV] FILE')), |
|
3798 | _('[-r REV] FILE')), | |
3794 | "debugrevspec": |
|
3799 | "debugrevspec": | |
3795 | (debugrevspec, [], ('REVSPEC')), |
|
3800 | (debugrevspec, [], ('REVSPEC')), | |
3796 | "debugsetparents": |
|
3801 | "debugsetparents": | |
3797 | (debugsetparents, [], _('REV1 [REV2]')), |
|
3802 | (debugsetparents, [], _('REV1 [REV2]')), | |
3798 | "debugstate": |
|
3803 | "debugstate": | |
3799 | (debugstate, |
|
3804 | (debugstate, | |
3800 | [('', 'nodates', None, _('do not display the saved mtime'))], |
|
3805 | [('', 'nodates', None, _('do not display the saved mtime'))], | |
3801 | _('[OPTION]...')), |
|
3806 | _('[OPTION]...')), | |
3802 | "debugsub": |
|
3807 | "debugsub": | |
3803 | (debugsub, |
|
3808 | (debugsub, | |
3804 | [('r', 'rev', '', _('revision to check'))], |
|
3809 | [('r', 'rev', '', _('revision to check'))], | |
3805 | _('[-r REV] [REV]')), |
|
3810 | _('[-r REV] [REV]')), | |
3806 | "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')), |
|
3811 | "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')), | |
3807 | "^diff": |
|
3812 | "^diff": | |
3808 | (diff, |
|
3813 | (diff, | |
3809 | [('r', 'rev', [], _('revision')), |
|
3814 | [('r', 'rev', [], _('revision')), | |
3810 | ('c', 'change', '', _('change made by revision')) |
|
3815 | ('c', 'change', '', _('change made by revision')) | |
3811 | ] + diffopts + diffopts2 + walkopts, |
|
3816 | ] + diffopts + diffopts2 + walkopts, | |
3812 | _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')), |
|
3817 | _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')), | |
3813 | "^export": |
|
3818 | "^export": | |
3814 | (export, |
|
3819 | (export, | |
3815 | [('o', 'output', '', _('print output to file with formatted name')), |
|
3820 | [('o', 'output', '', _('print output to file with formatted name')), | |
3816 | ('', 'switch-parent', None, _('diff against the second parent')), |
|
3821 | ('', 'switch-parent', None, _('diff against the second parent')), | |
3817 | ('r', 'rev', [], _('revisions to export')), |
|
3822 | ('r', 'rev', [], _('revisions to export')), | |
3818 | ] + diffopts, |
|
3823 | ] + diffopts, | |
3819 | _('[OPTION]... [-o OUTFILESPEC] REV...')), |
|
3824 | _('[OPTION]... [-o OUTFILESPEC] REV...')), | |
3820 | "^forget": |
|
3825 | "^forget": | |
3821 | (forget, |
|
3826 | (forget, | |
3822 | [] + walkopts, |
|
3827 | [] + walkopts, | |
3823 | _('[OPTION]... FILE...')), |
|
3828 | _('[OPTION]... FILE...')), | |
3824 | "grep": |
|
3829 | "grep": | |
3825 | (grep, |
|
3830 | (grep, | |
3826 | [('0', 'print0', None, _('end fields with NUL')), |
|
3831 | [('0', 'print0', None, _('end fields with NUL')), | |
3827 | ('', 'all', None, _('print all revisions that match')), |
|
3832 | ('', 'all', None, _('print all revisions that match')), | |
3828 | ('f', 'follow', None, |
|
3833 | ('f', 'follow', None, | |
3829 | _('follow changeset history,' |
|
3834 | _('follow changeset history,' | |
3830 | ' or file history across copies and renames')), |
|
3835 | ' or file history across copies and renames')), | |
3831 | ('i', 'ignore-case', None, _('ignore case when matching')), |
|
3836 | ('i', 'ignore-case', None, _('ignore case when matching')), | |
3832 | ('l', 'files-with-matches', None, |
|
3837 | ('l', 'files-with-matches', None, | |
3833 | _('print only filenames and revisions that match')), |
|
3838 | _('print only filenames and revisions that match')), | |
3834 | ('n', 'line-number', None, _('print matching line numbers')), |
|
3839 | ('n', 'line-number', None, _('print matching line numbers')), | |
3835 | ('r', 'rev', [], _('only search files changed within revision range')), |
|
3840 | ('r', 'rev', [], _('only search files changed within revision range')), | |
3836 | ('u', 'user', None, _('list the author (long with -v)')), |
|
3841 | ('u', 'user', None, _('list the author (long with -v)')), | |
3837 | ('d', 'date', None, _('list the date (short with -q)')), |
|
3842 | ('d', 'date', None, _('list the date (short with -q)')), | |
3838 | ] + walkopts, |
|
3843 | ] + walkopts, | |
3839 | _('[OPTION]... PATTERN [FILE]...')), |
|
3844 | _('[OPTION]... PATTERN [FILE]...')), | |
3840 | "heads": |
|
3845 | "heads": | |
3841 | (heads, |
|
3846 | (heads, | |
3842 | [('r', 'rev', '', _('show only heads which are descendants of REV')), |
|
3847 | [('r', 'rev', '', _('show only heads which are descendants of REV')), | |
3843 | ('t', 'topo', False, _('show topological heads only')), |
|
3848 | ('t', 'topo', False, _('show topological heads only')), | |
3844 | ('a', 'active', False, |
|
3849 | ('a', 'active', False, | |
3845 | _('show active branchheads only [DEPRECATED]')), |
|
3850 | _('show active branchheads only [DEPRECATED]')), | |
3846 | ('c', 'closed', False, |
|
3851 | ('c', 'closed', False, | |
3847 | _('show normal and closed branch heads')), |
|
3852 | _('show normal and closed branch heads')), | |
3848 | ] + templateopts, |
|
3853 | ] + templateopts, | |
3849 | _('[-ac] [-r REV] [REV]...')), |
|
3854 | _('[-ac] [-r REV] [REV]...')), | |
3850 | "help": (help_, [], _('[TOPIC]')), |
|
3855 | "help": (help_, [], _('[TOPIC]')), | |
3851 | "identify|id": |
|
3856 | "identify|id": | |
3852 | (identify, |
|
3857 | (identify, | |
3853 | [('r', 'rev', '', _('identify the specified revision')), |
|
3858 | [('r', 'rev', '', _('identify the specified revision')), | |
3854 | ('n', 'num', None, _('show local revision number')), |
|
3859 | ('n', 'num', None, _('show local revision number')), | |
3855 | ('i', 'id', None, _('show global revision id')), |
|
3860 | ('i', 'id', None, _('show global revision id')), | |
3856 | ('b', 'branch', None, _('show branch')), |
|
3861 | ('b', 'branch', None, _('show branch')), | |
3857 | ('t', 'tags', None, _('show tags'))], |
|
3862 | ('t', 'tags', None, _('show tags'))], | |
3858 | _('[-nibt] [-r REV] [SOURCE]')), |
|
3863 | _('[-nibt] [-r REV] [SOURCE]')), | |
3859 | "import|patch": |
|
3864 | "import|patch": | |
3860 | (import_, |
|
3865 | (import_, | |
3861 | [('p', 'strip', 1, |
|
3866 | [('p', 'strip', 1, | |
3862 | _('directory strip option for patch. This has the same ' |
|
3867 | _('directory strip option for patch. This has the same ' | |
3863 | 'meaning as the corresponding patch option')), |
|
3868 | 'meaning as the corresponding patch option')), | |
3864 | ('b', 'base', '', _('base path')), |
|
3869 | ('b', 'base', '', _('base path')), | |
3865 | ('f', 'force', None, |
|
3870 | ('f', 'force', None, | |
3866 | _('skip check for outstanding uncommitted changes')), |
|
3871 | _('skip check for outstanding uncommitted changes')), | |
3867 | ('', 'no-commit', None, |
|
3872 | ('', 'no-commit', None, | |
3868 | _("don't commit, just update the working directory")), |
|
3873 | _("don't commit, just update the working directory")), | |
3869 | ('', 'exact', None, |
|
3874 | ('', 'exact', None, | |
3870 | _('apply patch to the nodes from which it was generated')), |
|
3875 | _('apply patch to the nodes from which it was generated')), | |
3871 | ('', 'import-branch', None, |
|
3876 | ('', 'import-branch', None, | |
3872 | _('use any branch information in patch (implied by --exact)'))] + |
|
3877 | _('use any branch information in patch (implied by --exact)'))] + | |
3873 | commitopts + commitopts2 + similarityopts, |
|
3878 | commitopts + commitopts2 + similarityopts, | |
3874 | _('[OPTION]... PATCH...')), |
|
3879 | _('[OPTION]... PATCH...')), | |
3875 | "incoming|in": |
|
3880 | "incoming|in": | |
3876 | (incoming, |
|
3881 | (incoming, | |
3877 | [('f', 'force', None, |
|
3882 | [('f', 'force', None, | |
3878 | _('run even if remote repository is unrelated')), |
|
3883 | _('run even if remote repository is unrelated')), | |
3879 | ('n', 'newest-first', None, _('show newest record first')), |
|
3884 | ('n', 'newest-first', None, _('show newest record first')), | |
3880 | ('', 'bundle', '', _('file to store the bundles into')), |
|
3885 | ('', 'bundle', '', _('file to store the bundles into')), | |
3881 | ('r', 'rev', [], |
|
3886 | ('r', 'rev', [], | |
3882 | _('a remote changeset intended to be added')), |
|
3887 | _('a remote changeset intended to be added')), | |
3883 | ('b', 'branch', [], |
|
3888 | ('b', 'branch', [], | |
3884 | _('a specific branch you would like to pull')), |
|
3889 | _('a specific branch you would like to pull')), | |
3885 | ] + logopts + remoteopts, |
|
3890 | ] + logopts + remoteopts, | |
3886 | _('[-p] [-n] [-M] [-f] [-r REV]...' |
|
3891 | _('[-p] [-n] [-M] [-f] [-r REV]...' | |
3887 | ' [--bundle FILENAME] [SOURCE]')), |
|
3892 | ' [--bundle FILENAME] [SOURCE]')), | |
3888 | "^init": |
|
3893 | "^init": | |
3889 | (init, |
|
3894 | (init, | |
3890 | remoteopts, |
|
3895 | remoteopts, | |
3891 | _('[-e CMD] [--remotecmd CMD] [DEST]')), |
|
3896 | _('[-e CMD] [--remotecmd CMD] [DEST]')), | |
3892 | "locate": |
|
3897 | "locate": | |
3893 | (locate, |
|
3898 | (locate, | |
3894 | [('r', 'rev', '', _('search the repository as it is in REV')), |
|
3899 | [('r', 'rev', '', _('search the repository as it is in REV')), | |
3895 | ('0', 'print0', None, |
|
3900 | ('0', 'print0', None, | |
3896 | _('end filenames with NUL, for use with xargs')), |
|
3901 | _('end filenames with NUL, for use with xargs')), | |
3897 | ('f', 'fullpath', None, |
|
3902 | ('f', 'fullpath', None, | |
3898 | _('print complete paths from the filesystem root')), |
|
3903 | _('print complete paths from the filesystem root')), | |
3899 | ] + walkopts, |
|
3904 | ] + walkopts, | |
3900 | _('[OPTION]... [PATTERN]...')), |
|
3905 | _('[OPTION]... [PATTERN]...')), | |
3901 | "^log|history": |
|
3906 | "^log|history": | |
3902 | (log, |
|
3907 | (log, | |
3903 | [('f', 'follow', None, |
|
3908 | [('f', 'follow', None, | |
3904 | _('follow changeset history,' |
|
3909 | _('follow changeset history,' | |
3905 | ' or file history across copies and renames')), |
|
3910 | ' or file history across copies and renames')), | |
3906 | ('', 'follow-first', None, |
|
3911 | ('', 'follow-first', None, | |
3907 | _('only follow the first parent of merge changesets')), |
|
3912 | _('only follow the first parent of merge changesets')), | |
3908 | ('d', 'date', '', _('show revisions matching date spec')), |
|
3913 | ('d', 'date', '', _('show revisions matching date spec')), | |
3909 | ('C', 'copies', None, _('show copied files')), |
|
3914 | ('C', 'copies', None, _('show copied files')), | |
3910 | ('k', 'keyword', [], _('do case-insensitive search for a keyword')), |
|
3915 | ('k', 'keyword', [], _('do case-insensitive search for a keyword')), | |
3911 | ('r', 'rev', [], _('show the specified revision or range')), |
|
3916 | ('r', 'rev', [], _('show the specified revision or range')), | |
3912 | ('', 'removed', None, _('include revisions where files were removed')), |
|
3917 | ('', 'removed', None, _('include revisions where files were removed')), | |
3913 | ('m', 'only-merges', None, _('show only merges')), |
|
3918 | ('m', 'only-merges', None, _('show only merges')), | |
3914 | ('u', 'user', [], _('revisions committed by user')), |
|
3919 | ('u', 'user', [], _('revisions committed by user')), | |
3915 | ('', 'only-branch', [], |
|
3920 | ('', 'only-branch', [], | |
3916 | _('show only changesets within the given named branch (DEPRECATED)')), |
|
3921 | _('show only changesets within the given named branch (DEPRECATED)')), | |
3917 | ('b', 'branch', [], |
|
3922 | ('b', 'branch', [], | |
3918 | _('show changesets within the given named branch')), |
|
3923 | _('show changesets within the given named branch')), | |
3919 | ('P', 'prune', [], |
|
3924 | ('P', 'prune', [], | |
3920 | _('do not display revision or any of its ancestors')), |
|
3925 | _('do not display revision or any of its ancestors')), | |
3921 | ] + logopts + walkopts, |
|
3926 | ] + logopts + walkopts, | |
3922 | _('[OPTION]... [FILE]')), |
|
3927 | _('[OPTION]... [FILE]')), | |
3923 | "manifest": |
|
3928 | "manifest": | |
3924 | (manifest, |
|
3929 | (manifest, | |
3925 | [('r', 'rev', '', _('revision to display'))], |
|
3930 | [('r', 'rev', '', _('revision to display'))], | |
3926 | _('[-r REV]')), |
|
3931 | _('[-r REV]')), | |
3927 | "^merge": |
|
3932 | "^merge": | |
3928 | (merge, |
|
3933 | (merge, | |
3929 | [('f', 'force', None, _('force a merge with outstanding changes')), |
|
3934 | [('f', 'force', None, _('force a merge with outstanding changes')), | |
3930 | ('r', 'rev', '', _('revision to merge')), |
|
3935 | ('r', 'rev', '', _('revision to merge')), | |
3931 | ('P', 'preview', None, |
|
3936 | ('P', 'preview', None, | |
3932 | _('review revisions to merge (no merge is performed)'))], |
|
3937 | _('review revisions to merge (no merge is performed)'))], | |
3933 | _('[-P] [-f] [[-r] REV]')), |
|
3938 | _('[-P] [-f] [[-r] REV]')), | |
3934 | "outgoing|out": |
|
3939 | "outgoing|out": | |
3935 | (outgoing, |
|
3940 | (outgoing, | |
3936 | [('f', 'force', None, |
|
3941 | [('f', 'force', None, | |
3937 | _('run even when the destination is unrelated')), |
|
3942 | _('run even when the destination is unrelated')), | |
3938 | ('r', 'rev', [], |
|
3943 | ('r', 'rev', [], | |
3939 | _('a changeset intended to be included in the destination')), |
|
3944 | _('a changeset intended to be included in the destination')), | |
3940 | ('n', 'newest-first', None, _('show newest record first')), |
|
3945 | ('n', 'newest-first', None, _('show newest record first')), | |
3941 | ('b', 'branch', [], |
|
3946 | ('b', 'branch', [], | |
3942 | _('a specific branch you would like to push')), |
|
3947 | _('a specific branch you would like to push')), | |
3943 | ] + logopts + remoteopts, |
|
3948 | ] + logopts + remoteopts, | |
3944 | _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')), |
|
3949 | _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')), | |
3945 | "parents": |
|
3950 | "parents": | |
3946 | (parents, |
|
3951 | (parents, | |
3947 | [('r', 'rev', '', _('show parents of the specified revision')), |
|
3952 | [('r', 'rev', '', _('show parents of the specified revision')), | |
3948 | ] + templateopts, |
|
3953 | ] + templateopts, | |
3949 | _('[-r REV] [FILE]')), |
|
3954 | _('[-r REV] [FILE]')), | |
3950 | "paths": (paths, [], _('[NAME]')), |
|
3955 | "paths": (paths, [], _('[NAME]')), | |
3951 | "^pull": |
|
3956 | "^pull": | |
3952 | (pull, |
|
3957 | (pull, | |
3953 | [('u', 'update', None, |
|
3958 | [('u', 'update', None, | |
3954 | _('update to new branch head if changesets were pulled')), |
|
3959 | _('update to new branch head if changesets were pulled')), | |
3955 | ('f', 'force', None, |
|
3960 | ('f', 'force', None, | |
3956 | _('run even when remote repository is unrelated')), |
|
3961 | _('run even when remote repository is unrelated')), | |
3957 | ('r', 'rev', [], |
|
3962 | ('r', 'rev', [], | |
3958 | _('a remote changeset intended to be added')), |
|
3963 | _('a remote changeset intended to be added')), | |
3959 | ('b', 'branch', [], |
|
3964 | ('b', 'branch', [], | |
3960 | _('a specific branch you would like to pull')), |
|
3965 | _('a specific branch you would like to pull')), | |
3961 | ] + remoteopts, |
|
3966 | ] + remoteopts, | |
3962 | _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')), |
|
3967 | _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')), | |
3963 | "^push": |
|
3968 | "^push": | |
3964 | (push, |
|
3969 | (push, | |
3965 | [('f', 'force', None, _('force push')), |
|
3970 | [('f', 'force', None, _('force push')), | |
3966 | ('r', 'rev', [], |
|
3971 | ('r', 'rev', [], | |
3967 | _('a changeset intended to be included in the destination')), |
|
3972 | _('a changeset intended to be included in the destination')), | |
3968 | ('b', 'branch', [], |
|
3973 | ('b', 'branch', [], | |
3969 | _('a specific branch you would like to push')), |
|
3974 | _('a specific branch you would like to push')), | |
3970 | ('', 'new-branch', False, _('allow pushing a new branch')), |
|
3975 | ('', 'new-branch', False, _('allow pushing a new branch')), | |
3971 | ] + remoteopts, |
|
3976 | ] + remoteopts, | |
3972 | _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')), |
|
3977 | _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')), | |
3973 | "recover": (recover, []), |
|
3978 | "recover": (recover, []), | |
3974 | "^remove|rm": |
|
3979 | "^remove|rm": | |
3975 | (remove, |
|
3980 | (remove, | |
3976 | [('A', 'after', None, _('record delete for missing files')), |
|
3981 | [('A', 'after', None, _('record delete for missing files')), | |
3977 | ('f', 'force', None, |
|
3982 | ('f', 'force', None, | |
3978 | _('remove (and delete) file even if added or modified')), |
|
3983 | _('remove (and delete) file even if added or modified')), | |
3979 | ] + walkopts, |
|
3984 | ] + walkopts, | |
3980 | _('[OPTION]... FILE...')), |
|
3985 | _('[OPTION]... FILE...')), | |
3981 | "rename|mv": |
|
3986 | "rename|mv": | |
3982 | (rename, |
|
3987 | (rename, | |
3983 | [('A', 'after', None, _('record a rename that has already occurred')), |
|
3988 | [('A', 'after', None, _('record a rename that has already occurred')), | |
3984 | ('f', 'force', None, |
|
3989 | ('f', 'force', None, | |
3985 | _('forcibly copy over an existing managed file')), |
|
3990 | _('forcibly copy over an existing managed file')), | |
3986 | ] + walkopts + dryrunopts, |
|
3991 | ] + walkopts + dryrunopts, | |
3987 | _('[OPTION]... SOURCE... DEST')), |
|
3992 | _('[OPTION]... SOURCE... DEST')), | |
3988 | "resolve": |
|
3993 | "resolve": | |
3989 | (resolve, |
|
3994 | (resolve, | |
3990 | [('a', 'all', None, _('select all unresolved files')), |
|
3995 | [('a', 'all', None, _('select all unresolved files')), | |
3991 | ('l', 'list', None, _('list state of files needing merge')), |
|
3996 | ('l', 'list', None, _('list state of files needing merge')), | |
3992 | ('m', 'mark', None, _('mark files as resolved')), |
|
3997 | ('m', 'mark', None, _('mark files as resolved')), | |
3993 | ('u', 'unmark', None, _('unmark files as resolved')), |
|
3998 | ('u', 'unmark', None, _('unmark files as resolved')), | |
3994 | ('n', 'no-status', None, _('hide status prefix'))] |
|
3999 | ('n', 'no-status', None, _('hide status prefix'))] | |
3995 | + walkopts, |
|
4000 | + walkopts, | |
3996 | _('[OPTION]... [FILE]...')), |
|
4001 | _('[OPTION]... [FILE]...')), | |
3997 | "revert": |
|
4002 | "revert": | |
3998 | (revert, |
|
4003 | (revert, | |
3999 | [('a', 'all', None, _('revert all changes when no arguments given')), |
|
4004 | [('a', 'all', None, _('revert all changes when no arguments given')), | |
4000 | ('d', 'date', '', _('tipmost revision matching date')), |
|
4005 | ('d', 'date', '', _('tipmost revision matching date')), | |
4001 | ('r', 'rev', '', _('revert to the specified revision')), |
|
4006 | ('r', 'rev', '', _('revert to the specified revision')), | |
4002 | ('', 'no-backup', None, _('do not save backup copies of files')), |
|
4007 | ('', 'no-backup', None, _('do not save backup copies of files')), | |
4003 | ] + walkopts + dryrunopts, |
|
4008 | ] + walkopts + dryrunopts, | |
4004 | _('[OPTION]... [-r REV] [NAME]...')), |
|
4009 | _('[OPTION]... [-r REV] [NAME]...')), | |
4005 | "rollback": (rollback, dryrunopts), |
|
4010 | "rollback": (rollback, dryrunopts), | |
4006 | "root": (root, []), |
|
4011 | "root": (root, []), | |
4007 | "^serve": |
|
4012 | "^serve": | |
4008 | (serve, |
|
4013 | (serve, | |
4009 | [('A', 'accesslog', '', _('name of access log file to write to')), |
|
4014 | [('A', 'accesslog', '', _('name of access log file to write to')), | |
4010 | ('d', 'daemon', None, _('run server in background')), |
|
4015 | ('d', 'daemon', None, _('run server in background')), | |
4011 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), |
|
4016 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), | |
4012 | ('E', 'errorlog', '', _('name of error log file to write to')), |
|
4017 | ('E', 'errorlog', '', _('name of error log file to write to')), | |
4013 | # use string type, then we can check if something was passed |
|
4018 | # use string type, then we can check if something was passed | |
4014 | ('p', 'port', '', _('port to listen on (default: 8000)')), |
|
4019 | ('p', 'port', '', _('port to listen on (default: 8000)')), | |
4015 | ('a', 'address', '', |
|
4020 | ('a', 'address', '', | |
4016 | _('address to listen on (default: all interfaces)')), |
|
4021 | _('address to listen on (default: all interfaces)')), | |
4017 | ('', 'prefix', '', |
|
4022 | ('', 'prefix', '', | |
4018 | _('prefix path to serve from (default: server root)')), |
|
4023 | _('prefix path to serve from (default: server root)')), | |
4019 | ('n', 'name', '', |
|
4024 | ('n', 'name', '', | |
4020 | _('name to show in web pages (default: working directory)')), |
|
4025 | _('name to show in web pages (default: working directory)')), | |
4021 | ('', 'web-conf', '', _('name of the hgweb config file' |
|
4026 | ('', 'web-conf', '', _('name of the hgweb config file' | |
4022 | ' (serve more than one repository)')), |
|
4027 | ' (serve more than one repository)')), | |
4023 | ('', 'webdir-conf', '', _('name of the hgweb config file' |
|
4028 | ('', 'webdir-conf', '', _('name of the hgweb config file' | |
4024 | ' (DEPRECATED)')), |
|
4029 | ' (DEPRECATED)')), | |
4025 | ('', 'pid-file', '', _('name of file to write process ID to')), |
|
4030 | ('', 'pid-file', '', _('name of file to write process ID to')), | |
4026 | ('', 'stdio', None, _('for remote clients')), |
|
4031 | ('', 'stdio', None, _('for remote clients')), | |
4027 | ('t', 'templates', '', _('web templates to use')), |
|
4032 | ('t', 'templates', '', _('web templates to use')), | |
4028 | ('', 'style', '', _('template style to use')), |
|
4033 | ('', 'style', '', _('template style to use')), | |
4029 | ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), |
|
4034 | ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), | |
4030 | ('', 'certificate', '', _('SSL certificate file'))], |
|
4035 | ('', 'certificate', '', _('SSL certificate file'))], | |
4031 | _('[OPTION]...')), |
|
4036 | _('[OPTION]...')), | |
4032 | "showconfig|debugconfig": |
|
4037 | "showconfig|debugconfig": | |
4033 | (showconfig, |
|
4038 | (showconfig, | |
4034 | [('u', 'untrusted', None, _('show untrusted configuration options'))], |
|
4039 | [('u', 'untrusted', None, _('show untrusted configuration options'))], | |
4035 | _('[-u] [NAME]...')), |
|
4040 | _('[-u] [NAME]...')), | |
4036 | "^summary|sum": |
|
4041 | "^summary|sum": | |
4037 | (summary, |
|
4042 | (summary, | |
4038 | [('', 'remote', None, _('check for push and pull'))], '[--remote]'), |
|
4043 | [('', 'remote', None, _('check for push and pull'))], '[--remote]'), | |
4039 | "^status|st": |
|
4044 | "^status|st": | |
4040 | (status, |
|
4045 | (status, | |
4041 | [('A', 'all', None, _('show status of all files')), |
|
4046 | [('A', 'all', None, _('show status of all files')), | |
4042 | ('m', 'modified', None, _('show only modified files')), |
|
4047 | ('m', 'modified', None, _('show only modified files')), | |
4043 | ('a', 'added', None, _('show only added files')), |
|
4048 | ('a', 'added', None, _('show only added files')), | |
4044 | ('r', 'removed', None, _('show only removed files')), |
|
4049 | ('r', 'removed', None, _('show only removed files')), | |
4045 | ('d', 'deleted', None, _('show only deleted (but tracked) files')), |
|
4050 | ('d', 'deleted', None, _('show only deleted (but tracked) files')), | |
4046 | ('c', 'clean', None, _('show only files without changes')), |
|
4051 | ('c', 'clean', None, _('show only files without changes')), | |
4047 | ('u', 'unknown', None, _('show only unknown (not tracked) files')), |
|
4052 | ('u', 'unknown', None, _('show only unknown (not tracked) files')), | |
4048 | ('i', 'ignored', None, _('show only ignored files')), |
|
4053 | ('i', 'ignored', None, _('show only ignored files')), | |
4049 | ('n', 'no-status', None, _('hide status prefix')), |
|
4054 | ('n', 'no-status', None, _('hide status prefix')), | |
4050 | ('C', 'copies', None, _('show source of copied files')), |
|
4055 | ('C', 'copies', None, _('show source of copied files')), | |
4051 | ('0', 'print0', None, |
|
4056 | ('0', 'print0', None, | |
4052 | _('end filenames with NUL, for use with xargs')), |
|
4057 | _('end filenames with NUL, for use with xargs')), | |
4053 | ('', 'rev', [], _('show difference from revision')), |
|
4058 | ('', 'rev', [], _('show difference from revision')), | |
4054 | ('', 'change', '', _('list the changed files of a revision')), |
|
4059 | ('', 'change', '', _('list the changed files of a revision')), | |
4055 | ] + walkopts, |
|
4060 | ] + walkopts, | |
4056 | _('[OPTION]... [FILE]...')), |
|
4061 | _('[OPTION]... [FILE]...')), | |
4057 | "tag": |
|
4062 | "tag": | |
4058 | (tag, |
|
4063 | (tag, | |
4059 | [('f', 'force', None, _('replace existing tag')), |
|
4064 | [('f', 'force', None, _('replace existing tag')), | |
4060 | ('l', 'local', None, _('make the tag local')), |
|
4065 | ('l', 'local', None, _('make the tag local')), | |
4061 | ('r', 'rev', '', _('revision to tag')), |
|
4066 | ('r', 'rev', '', _('revision to tag')), | |
4062 | ('', 'remove', None, _('remove a tag')), |
|
4067 | ('', 'remove', None, _('remove a tag')), | |
4063 | # -l/--local is already there, commitopts cannot be used |
|
4068 | # -l/--local is already there, commitopts cannot be used | |
4064 | ('e', 'edit', None, _('edit commit message')), |
|
4069 | ('e', 'edit', None, _('edit commit message')), | |
4065 | ('m', 'message', '', _('use <text> as commit message')), |
|
4070 | ('m', 'message', '', _('use <text> as commit message')), | |
4066 | ] + commitopts2, |
|
4071 | ] + commitopts2, | |
4067 | _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')), |
|
4072 | _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')), | |
4068 | "tags": (tags, [], ''), |
|
4073 | "tags": (tags, [], ''), | |
4069 | "tip": |
|
4074 | "tip": | |
4070 | (tip, |
|
4075 | (tip, | |
4071 | [('p', 'patch', None, _('show patch')), |
|
4076 | [('p', 'patch', None, _('show patch')), | |
4072 | ('g', 'git', None, _('use git extended diff format')), |
|
4077 | ('g', 'git', None, _('use git extended diff format')), | |
4073 | ] + templateopts, |
|
4078 | ] + templateopts, | |
4074 | _('[-p] [-g]')), |
|
4079 | _('[-p] [-g]')), | |
4075 | "unbundle": |
|
4080 | "unbundle": | |
4076 | (unbundle, |
|
4081 | (unbundle, | |
4077 | [('u', 'update', None, |
|
4082 | [('u', 'update', None, | |
4078 | _('update to new branch head if changesets were unbundled'))], |
|
4083 | _('update to new branch head if changesets were unbundled'))], | |
4079 | _('[-u] FILE...')), |
|
4084 | _('[-u] FILE...')), | |
4080 | "^update|up|checkout|co": |
|
4085 | "^update|up|checkout|co": | |
4081 | (update, |
|
4086 | (update, | |
4082 | [('C', 'clean', None, _('discard uncommitted changes (no backup)')), |
|
4087 | [('C', 'clean', None, _('discard uncommitted changes (no backup)')), | |
4083 | ('c', 'check', None, _('check for uncommitted changes')), |
|
4088 | ('c', 'check', None, _('check for uncommitted changes')), | |
4084 | ('d', 'date', '', _('tipmost revision matching date')), |
|
4089 | ('d', 'date', '', _('tipmost revision matching date')), | |
4085 | ('r', 'rev', '', _('revision'))], |
|
4090 | ('r', 'rev', '', _('revision'))], | |
4086 | _('[-c] [-C] [-d DATE] [[-r] REV]')), |
|
4091 | _('[-c] [-C] [-d DATE] [[-r] REV]')), | |
4087 | "verify": (verify, []), |
|
4092 | "verify": (verify, []), | |
4088 | "version": (version_, []), |
|
4093 | "version": (version_, []), | |
4089 | } |
|
4094 | } | |
4090 |
|
4095 | |||
4091 | norepo = ("clone init version help debugcommands debugcomplete debugdata" |
|
4096 | norepo = ("clone init version help debugcommands debugcomplete debugdata" | |
4092 | " debugindex debugindexdot debugdate debuginstall debugfsinfo") |
|
4097 | " debugindex debugindexdot debugdate debuginstall debugfsinfo") | |
4093 | optionalrepo = ("identify paths serve showconfig debugancestor") |
|
4098 | optionalrepo = ("identify paths serve showconfig debugancestor") |
@@ -1,77 +1,77 b'' | |||||
1 | # encoding.py - character transcoding support for Mercurial |
|
1 | # encoding.py - character transcoding support for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others |
|
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | import error |
|
8 | import error | |
9 | import sys, unicodedata, locale, os |
|
9 | import sys, unicodedata, locale, os | |
10 |
|
10 | |||
11 | _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'} |
|
11 | _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'} | |
12 |
|
12 | |||
13 | try: |
|
13 | try: | |
14 | encoding = os.environ.get("HGENCODING") |
|
14 | encoding = os.environ.get("HGENCODING") | |
15 | if sys.platform == 'darwin' and not encoding: |
|
15 | if sys.platform == 'darwin' and not encoding: | |
16 | # On darwin, getpreferredencoding ignores the locale environment and |
|
16 | # On darwin, getpreferredencoding ignores the locale environment and | |
17 | # always returns mac-roman. We override this if the environment is |
|
17 | # always returns mac-roman. We override this if the environment is | |
18 | # not C (has been customized by the user). |
|
18 | # not C (has been customized by the user). | |
19 | lc = locale.setlocale(locale.LC_CTYPE, '') |
|
19 | lc = locale.setlocale(locale.LC_CTYPE, '') | |
20 | if lc == 'UTF-8': |
|
20 | if lc == 'UTF-8': | |
21 | locale.setlocale(locale.LC_CTYPE, 'en_US.UTF-8') |
|
21 | locale.setlocale(locale.LC_CTYPE, 'en_US.UTF-8') | |
22 | encoding = locale.getlocale()[1] |
|
22 | encoding = locale.getlocale()[1] | |
23 | if not encoding: |
|
23 | if not encoding: | |
24 | encoding = locale.getpreferredencoding() or 'ascii' |
|
24 | encoding = locale.getpreferredencoding() or 'ascii' | |
25 | encoding = _encodingfixup.get(encoding, encoding) |
|
25 | encoding = _encodingfixup.get(encoding, encoding) | |
26 | except locale.Error: |
|
26 | except locale.Error: | |
27 | encoding = 'ascii' |
|
27 | encoding = 'ascii' | |
28 | encodingmode = os.environ.get("HGENCODINGMODE", "strict") |
|
28 | encodingmode = os.environ.get("HGENCODINGMODE", "strict") | |
29 | fallbackencoding = 'ISO-8859-1' |
|
29 | fallbackencoding = 'ISO-8859-1' | |
30 |
|
30 | |||
31 | def tolocal(s): |
|
31 | def tolocal(s): | |
32 | """ |
|
32 | """ | |
33 | Convert a string from internal UTF-8 to local encoding |
|
33 | Convert a string from internal UTF-8 to local encoding | |
34 |
|
34 | |||
35 | All internal strings should be UTF-8 but some repos before the |
|
35 | All internal strings should be UTF-8 but some repos before the | |
36 | implementation of locale support may contain latin1 or possibly |
|
36 | implementation of locale support may contain latin1 or possibly | |
37 | other character sets. We attempt to decode everything strictly |
|
37 | other character sets. We attempt to decode everything strictly | |
38 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and |
|
38 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and | |
39 | replace unknown characters. |
|
39 | replace unknown characters. | |
40 | """ |
|
40 | """ | |
41 | for e in ('UTF-8', fallbackencoding): |
|
41 | for e in ('UTF-8', fallbackencoding): | |
42 | try: |
|
42 | try: | |
43 | u = s.decode(e) # attempt strict decoding |
|
43 | u = s.decode(e) # attempt strict decoding | |
44 | return u.encode(encoding, "replace") |
|
44 | return u.encode(encoding, "replace") | |
45 | except LookupError, k: |
|
45 | except LookupError, k: | |
46 | raise error.Abort("%s, please check your locale settings" % k) |
|
46 | raise error.Abort("%s, please check your locale settings" % k) | |
47 | except UnicodeDecodeError: |
|
47 | except UnicodeDecodeError: | |
48 | pass |
|
48 | pass | |
49 | u = s.decode("utf-8", "replace") # last ditch |
|
49 | u = s.decode("utf-8", "replace") # last ditch | |
50 | return u.encode(encoding, "replace") |
|
50 | return u.encode(encoding, "replace") | |
51 |
|
51 | |||
52 | def fromlocal(s): |
|
52 | def fromlocal(s): | |
53 | """ |
|
53 | """ | |
54 | Convert a string from the local character encoding to UTF-8 |
|
54 | Convert a string from the local character encoding to UTF-8 | |
55 |
|
55 | |||
56 | We attempt to decode strings using the encoding mode set by |
|
56 | We attempt to decode strings using the encoding mode set by | |
57 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown |
|
57 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown | |
58 | characters will cause an error message. Other modes include |
|
58 | characters will cause an error message. Other modes include | |
59 | 'replace', which replaces unknown characters with a special |
|
59 | 'replace', which replaces unknown characters with a special | |
60 | Unicode character, and 'ignore', which drops the character. |
|
60 | Unicode character, and 'ignore', which drops the character. | |
61 | """ |
|
61 | """ | |
62 | try: |
|
62 | try: | |
63 | return s.decode(encoding, encodingmode).encode("utf-8") |
|
63 | return s.decode(encoding, encodingmode).encode("utf-8") | |
64 | except UnicodeDecodeError, inst: |
|
64 | except UnicodeDecodeError, inst: | |
65 | sub = s[max(0, inst.start - 10):inst.start + 10] |
|
65 | sub = s[max(0, inst.start - 10):inst.start + 10] | |
66 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) |
|
66 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) | |
67 | except LookupError, k: |
|
67 | except LookupError, k: | |
68 | raise error.Abort("%s, please check your locale settings" % k) |
|
68 | raise error.Abort("%s, please check your locale settings" % k) | |
69 |
|
69 | |||
70 | def colwidth(s): |
|
70 | def colwidth(s): | |
71 | "Find the column width of a UTF-8 string for display" |
|
71 | "Find the column width of a UTF-8 string for display" | |
72 | d = s.decode(encoding, 'replace') |
|
72 | d = s.decode(encoding, 'replace') | |
73 | if hasattr(unicodedata, 'east_asian_width'): |
|
73 | if hasattr(unicodedata, 'east_asian_width'): | |
74 | w = unicodedata.east_asian_width |
|
74 | w = unicodedata.east_asian_width | |
75 | return sum([w(c) in 'WF' and 2 or 1 for c in d]) |
|
75 | return sum([w(c) in 'WFA' and 2 or 1 for c in d]) | |
76 | return len(d) |
|
76 | return len(d) | |
77 |
|
77 |
@@ -1,385 +1,385 b'' | |||||
1 | # minirst.py - minimal reStructuredText parser |
|
1 | # minirst.py - minimal reStructuredText parser | |
2 | # |
|
2 | # | |
3 | # Copyright 2009, 2010 Matt Mackall <mpm@selenic.com> and others |
|
3 | # Copyright 2009, 2010 Matt Mackall <mpm@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """simplified reStructuredText parser. |
|
8 | """simplified reStructuredText parser. | |
9 |
|
9 | |||
10 | This parser knows just enough about reStructuredText to parse the |
|
10 | This parser knows just enough about reStructuredText to parse the | |
11 | Mercurial docstrings. |
|
11 | Mercurial docstrings. | |
12 |
|
12 | |||
13 | It cheats in a major way: nested blocks are not really nested. They |
|
13 | It cheats in a major way: nested blocks are not really nested. They | |
14 | are just indented blocks that look like they are nested. This relies |
|
14 | are just indented blocks that look like they are nested. This relies | |
15 | on the user to keep the right indentation for the blocks. |
|
15 | on the user to keep the right indentation for the blocks. | |
16 |
|
16 | |||
17 | It only supports a small subset of reStructuredText: |
|
17 | It only supports a small subset of reStructuredText: | |
18 |
|
18 | |||
19 | - sections |
|
19 | - sections | |
20 |
|
20 | |||
21 | - paragraphs |
|
21 | - paragraphs | |
22 |
|
22 | |||
23 | - literal blocks |
|
23 | - literal blocks | |
24 |
|
24 | |||
25 | - definition lists |
|
25 | - definition lists | |
26 |
|
26 | |||
27 | - bullet lists (items must start with '-') |
|
27 | - bullet lists (items must start with '-') | |
28 |
|
28 | |||
29 | - enumerated lists (no autonumbering) |
|
29 | - enumerated lists (no autonumbering) | |
30 |
|
30 | |||
31 | - field lists (colons cannot be escaped) |
|
31 | - field lists (colons cannot be escaped) | |
32 |
|
32 | |||
33 | - option lists (supports only long options without arguments) |
|
33 | - option lists (supports only long options without arguments) | |
34 |
|
34 | |||
35 | - inline literals (no other inline markup is not recognized) |
|
35 | - inline literals (no other inline markup is not recognized) | |
36 | """ |
|
36 | """ | |
37 |
|
37 | |||
38 |
import re, sys |
|
38 | import re, sys | |
39 |
|
39 | import util | ||
40 |
|
40 | |||
41 | def findblocks(text): |
|
41 | def findblocks(text): | |
42 | """Find continuous blocks of lines in text. |
|
42 | """Find continuous blocks of lines in text. | |
43 |
|
43 | |||
44 | Returns a list of dictionaries representing the blocks. Each block |
|
44 | Returns a list of dictionaries representing the blocks. Each block | |
45 | has an 'indent' field and a 'lines' field. |
|
45 | has an 'indent' field and a 'lines' field. | |
46 | """ |
|
46 | """ | |
47 | blocks = [[]] |
|
47 | blocks = [[]] | |
48 | lines = text.splitlines() |
|
48 | lines = text.splitlines() | |
49 | for line in lines: |
|
49 | for line in lines: | |
50 | if line.strip(): |
|
50 | if line.strip(): | |
51 | blocks[-1].append(line) |
|
51 | blocks[-1].append(line) | |
52 | elif blocks[-1]: |
|
52 | elif blocks[-1]: | |
53 | blocks.append([]) |
|
53 | blocks.append([]) | |
54 | if not blocks[-1]: |
|
54 | if not blocks[-1]: | |
55 | del blocks[-1] |
|
55 | del blocks[-1] | |
56 |
|
56 | |||
57 | for i, block in enumerate(blocks): |
|
57 | for i, block in enumerate(blocks): | |
58 | indent = min((len(l) - len(l.lstrip())) for l in block) |
|
58 | indent = min((len(l) - len(l.lstrip())) for l in block) | |
59 | blocks[i] = dict(indent=indent, lines=[l[indent:] for l in block]) |
|
59 | blocks[i] = dict(indent=indent, lines=[l[indent:] for l in block]) | |
60 | return blocks |
|
60 | return blocks | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | def findliteralblocks(blocks): |
|
63 | def findliteralblocks(blocks): | |
64 | """Finds literal blocks and adds a 'type' field to the blocks. |
|
64 | """Finds literal blocks and adds a 'type' field to the blocks. | |
65 |
|
65 | |||
66 | Literal blocks are given the type 'literal', all other blocks are |
|
66 | Literal blocks are given the type 'literal', all other blocks are | |
67 | given type the 'paragraph'. |
|
67 | given type the 'paragraph'. | |
68 | """ |
|
68 | """ | |
69 | i = 0 |
|
69 | i = 0 | |
70 | while i < len(blocks): |
|
70 | while i < len(blocks): | |
71 | # Searching for a block that looks like this: |
|
71 | # Searching for a block that looks like this: | |
72 | # |
|
72 | # | |
73 | # +------------------------------+ |
|
73 | # +------------------------------+ | |
74 | # | paragraph | |
|
74 | # | paragraph | | |
75 | # | (ends with "::") | |
|
75 | # | (ends with "::") | | |
76 | # +------------------------------+ |
|
76 | # +------------------------------+ | |
77 | # +---------------------------+ |
|
77 | # +---------------------------+ | |
78 | # | indented literal block | |
|
78 | # | indented literal block | | |
79 | # +---------------------------+ |
|
79 | # +---------------------------+ | |
80 | blocks[i]['type'] = 'paragraph' |
|
80 | blocks[i]['type'] = 'paragraph' | |
81 | if blocks[i]['lines'][-1].endswith('::') and i + 1 < len(blocks): |
|
81 | if blocks[i]['lines'][-1].endswith('::') and i + 1 < len(blocks): | |
82 | indent = blocks[i]['indent'] |
|
82 | indent = blocks[i]['indent'] | |
83 | adjustment = blocks[i + 1]['indent'] - indent |
|
83 | adjustment = blocks[i + 1]['indent'] - indent | |
84 |
|
84 | |||
85 | if blocks[i]['lines'] == ['::']: |
|
85 | if blocks[i]['lines'] == ['::']: | |
86 | # Expanded form: remove block |
|
86 | # Expanded form: remove block | |
87 | del blocks[i] |
|
87 | del blocks[i] | |
88 | i -= 1 |
|
88 | i -= 1 | |
89 | elif blocks[i]['lines'][-1].endswith(' ::'): |
|
89 | elif blocks[i]['lines'][-1].endswith(' ::'): | |
90 | # Partially minimized form: remove space and both |
|
90 | # Partially minimized form: remove space and both | |
91 | # colons. |
|
91 | # colons. | |
92 | blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] |
|
92 | blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] | |
93 | else: |
|
93 | else: | |
94 | # Fully minimized form: remove just one colon. |
|
94 | # Fully minimized form: remove just one colon. | |
95 | blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-1] |
|
95 | blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-1] | |
96 |
|
96 | |||
97 | # List items are formatted with a hanging indent. We must |
|
97 | # List items are formatted with a hanging indent. We must | |
98 | # correct for this here while we still have the original |
|
98 | # correct for this here while we still have the original | |
99 | # information on the indentation of the subsequent literal |
|
99 | # information on the indentation of the subsequent literal | |
100 | # blocks available. |
|
100 | # blocks available. | |
101 | m = _bulletre.match(blocks[i]['lines'][0]) |
|
101 | m = _bulletre.match(blocks[i]['lines'][0]) | |
102 | if m: |
|
102 | if m: | |
103 | indent += m.end() |
|
103 | indent += m.end() | |
104 | adjustment -= m.end() |
|
104 | adjustment -= m.end() | |
105 |
|
105 | |||
106 | # Mark the following indented blocks. |
|
106 | # Mark the following indented blocks. | |
107 | while i + 1 < len(blocks) and blocks[i + 1]['indent'] > indent: |
|
107 | while i + 1 < len(blocks) and blocks[i + 1]['indent'] > indent: | |
108 | blocks[i + 1]['type'] = 'literal' |
|
108 | blocks[i + 1]['type'] = 'literal' | |
109 | blocks[i + 1]['indent'] -= adjustment |
|
109 | blocks[i + 1]['indent'] -= adjustment | |
110 | i += 1 |
|
110 | i += 1 | |
111 | i += 1 |
|
111 | i += 1 | |
112 | return blocks |
|
112 | return blocks | |
113 |
|
113 | |||
114 | _bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') |
|
114 | _bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') | |
115 | _optionre = re.compile(r'^(--[a-z-]+)((?:[ =][a-zA-Z][\w-]*)? +)(.*)$') |
|
115 | _optionre = re.compile(r'^(--[a-z-]+)((?:[ =][a-zA-Z][\w-]*)? +)(.*)$') | |
116 | _fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)') |
|
116 | _fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)') | |
117 | _definitionre = re.compile(r'[^ ]') |
|
117 | _definitionre = re.compile(r'[^ ]') | |
118 |
|
118 | |||
119 | def splitparagraphs(blocks): |
|
119 | def splitparagraphs(blocks): | |
120 | """Split paragraphs into lists.""" |
|
120 | """Split paragraphs into lists.""" | |
121 | # Tuples with (list type, item regexp, single line items?). Order |
|
121 | # Tuples with (list type, item regexp, single line items?). Order | |
122 | # matters: definition lists has the least specific regexp and must |
|
122 | # matters: definition lists has the least specific regexp and must | |
123 | # come last. |
|
123 | # come last. | |
124 | listtypes = [('bullet', _bulletre, True), |
|
124 | listtypes = [('bullet', _bulletre, True), | |
125 | ('option', _optionre, True), |
|
125 | ('option', _optionre, True), | |
126 | ('field', _fieldre, True), |
|
126 | ('field', _fieldre, True), | |
127 | ('definition', _definitionre, False)] |
|
127 | ('definition', _definitionre, False)] | |
128 |
|
128 | |||
129 | def match(lines, i, itemre, singleline): |
|
129 | def match(lines, i, itemre, singleline): | |
130 | """Does itemre match an item at line i? |
|
130 | """Does itemre match an item at line i? | |
131 |
|
131 | |||
132 | A list item can be followed by an idented line or another list |
|
132 | A list item can be followed by an idented line or another list | |
133 | item (but only if singleline is True). |
|
133 | item (but only if singleline is True). | |
134 | """ |
|
134 | """ | |
135 | line1 = lines[i] |
|
135 | line1 = lines[i] | |
136 | line2 = i + 1 < len(lines) and lines[i + 1] or '' |
|
136 | line2 = i + 1 < len(lines) and lines[i + 1] or '' | |
137 | if not itemre.match(line1): |
|
137 | if not itemre.match(line1): | |
138 | return False |
|
138 | return False | |
139 | if singleline: |
|
139 | if singleline: | |
140 | return line2 == '' or line2[0] == ' ' or itemre.match(line2) |
|
140 | return line2 == '' or line2[0] == ' ' or itemre.match(line2) | |
141 | else: |
|
141 | else: | |
142 | return line2.startswith(' ') |
|
142 | return line2.startswith(' ') | |
143 |
|
143 | |||
144 | i = 0 |
|
144 | i = 0 | |
145 | while i < len(blocks): |
|
145 | while i < len(blocks): | |
146 | if blocks[i]['type'] == 'paragraph': |
|
146 | if blocks[i]['type'] == 'paragraph': | |
147 | lines = blocks[i]['lines'] |
|
147 | lines = blocks[i]['lines'] | |
148 | for type, itemre, singleline in listtypes: |
|
148 | for type, itemre, singleline in listtypes: | |
149 | if match(lines, 0, itemre, singleline): |
|
149 | if match(lines, 0, itemre, singleline): | |
150 | items = [] |
|
150 | items = [] | |
151 | for j, line in enumerate(lines): |
|
151 | for j, line in enumerate(lines): | |
152 | if match(lines, j, itemre, singleline): |
|
152 | if match(lines, j, itemre, singleline): | |
153 | items.append(dict(type=type, lines=[], |
|
153 | items.append(dict(type=type, lines=[], | |
154 | indent=blocks[i]['indent'])) |
|
154 | indent=blocks[i]['indent'])) | |
155 | items[-1]['lines'].append(line) |
|
155 | items[-1]['lines'].append(line) | |
156 | blocks[i:i + 1] = items |
|
156 | blocks[i:i + 1] = items | |
157 | break |
|
157 | break | |
158 | i += 1 |
|
158 | i += 1 | |
159 | return blocks |
|
159 | return blocks | |
160 |
|
160 | |||
161 |
|
161 | |||
162 | _fieldwidth = 12 |
|
162 | _fieldwidth = 12 | |
163 |
|
163 | |||
164 | def updatefieldlists(blocks): |
|
164 | def updatefieldlists(blocks): | |
165 | """Find key and maximum key width for field lists.""" |
|
165 | """Find key and maximum key width for field lists.""" | |
166 | i = 0 |
|
166 | i = 0 | |
167 | while i < len(blocks): |
|
167 | while i < len(blocks): | |
168 | if blocks[i]['type'] != 'field': |
|
168 | if blocks[i]['type'] != 'field': | |
169 | i += 1 |
|
169 | i += 1 | |
170 | continue |
|
170 | continue | |
171 |
|
171 | |||
172 | keywidth = 0 |
|
172 | keywidth = 0 | |
173 | j = i |
|
173 | j = i | |
174 | while j < len(blocks) and blocks[j]['type'] == 'field': |
|
174 | while j < len(blocks) and blocks[j]['type'] == 'field': | |
175 | m = _fieldre.match(blocks[j]['lines'][0]) |
|
175 | m = _fieldre.match(blocks[j]['lines'][0]) | |
176 | key, rest = m.groups() |
|
176 | key, rest = m.groups() | |
177 | blocks[j]['lines'][0] = rest |
|
177 | blocks[j]['lines'][0] = rest | |
178 | blocks[j]['key'] = key |
|
178 | blocks[j]['key'] = key | |
179 | keywidth = max(keywidth, len(key)) |
|
179 | keywidth = max(keywidth, len(key)) | |
180 | j += 1 |
|
180 | j += 1 | |
181 |
|
181 | |||
182 | for block in blocks[i:j]: |
|
182 | for block in blocks[i:j]: | |
183 | block['keywidth'] = keywidth |
|
183 | block['keywidth'] = keywidth | |
184 | i = j + 1 |
|
184 | i = j + 1 | |
185 |
|
185 | |||
186 | return blocks |
|
186 | return blocks | |
187 |
|
187 | |||
188 |
|
188 | |||
189 | def prunecontainers(blocks, keep): |
|
189 | def prunecontainers(blocks, keep): | |
190 | """Prune unwanted containers. |
|
190 | """Prune unwanted containers. | |
191 |
|
191 | |||
192 | The blocks must have a 'type' field, i.e., they should have been |
|
192 | The blocks must have a 'type' field, i.e., they should have been | |
193 | run through findliteralblocks first. |
|
193 | run through findliteralblocks first. | |
194 | """ |
|
194 | """ | |
195 | pruned = [] |
|
195 | pruned = [] | |
196 | i = 0 |
|
196 | i = 0 | |
197 | while i + 1 < len(blocks): |
|
197 | while i + 1 < len(blocks): | |
198 | # Searching for a block that looks like this: |
|
198 | # Searching for a block that looks like this: | |
199 | # |
|
199 | # | |
200 | # +-------+---------------------------+ |
|
200 | # +-------+---------------------------+ | |
201 | # | ".. container ::" type | |
|
201 | # | ".. container ::" type | | |
202 | # +---+ | |
|
202 | # +---+ | | |
203 | # | blocks | |
|
203 | # | blocks | | |
204 | # +-------------------------------+ |
|
204 | # +-------------------------------+ | |
205 | if (blocks[i]['type'] == 'paragraph' and |
|
205 | if (blocks[i]['type'] == 'paragraph' and | |
206 | blocks[i]['lines'][0].startswith('.. container::')): |
|
206 | blocks[i]['lines'][0].startswith('.. container::')): | |
207 | indent = blocks[i]['indent'] |
|
207 | indent = blocks[i]['indent'] | |
208 | adjustment = blocks[i + 1]['indent'] - indent |
|
208 | adjustment = blocks[i + 1]['indent'] - indent | |
209 | containertype = blocks[i]['lines'][0][15:] |
|
209 | containertype = blocks[i]['lines'][0][15:] | |
210 | prune = containertype not in keep |
|
210 | prune = containertype not in keep | |
211 | if prune: |
|
211 | if prune: | |
212 | pruned.append(containertype) |
|
212 | pruned.append(containertype) | |
213 |
|
213 | |||
214 | # Always delete "..container:: type" block |
|
214 | # Always delete "..container:: type" block | |
215 | del blocks[i] |
|
215 | del blocks[i] | |
216 | j = i |
|
216 | j = i | |
217 | while j < len(blocks) and blocks[j]['indent'] > indent: |
|
217 | while j < len(blocks) and blocks[j]['indent'] > indent: | |
218 | if prune: |
|
218 | if prune: | |
219 | del blocks[j] |
|
219 | del blocks[j] | |
220 | i -= 1 # adjust outer index |
|
220 | i -= 1 # adjust outer index | |
221 | else: |
|
221 | else: | |
222 | blocks[j]['indent'] -= adjustment |
|
222 | blocks[j]['indent'] -= adjustment | |
223 | j += 1 |
|
223 | j += 1 | |
224 | i += 1 |
|
224 | i += 1 | |
225 | return blocks, pruned |
|
225 | return blocks, pruned | |
226 |
|
226 | |||
227 |
|
227 | |||
228 | _sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""") |
|
228 | _sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""") | |
229 |
|
229 | |||
230 | def findsections(blocks): |
|
230 | def findsections(blocks): | |
231 | """Finds sections. |
|
231 | """Finds sections. | |
232 |
|
232 | |||
233 | The blocks must have a 'type' field, i.e., they should have been |
|
233 | The blocks must have a 'type' field, i.e., they should have been | |
234 | run through findliteralblocks first. |
|
234 | run through findliteralblocks first. | |
235 | """ |
|
235 | """ | |
236 | for block in blocks: |
|
236 | for block in blocks: | |
237 | # Searching for a block that looks like this: |
|
237 | # Searching for a block that looks like this: | |
238 | # |
|
238 | # | |
239 | # +------------------------------+ |
|
239 | # +------------------------------+ | |
240 | # | Section title | |
|
240 | # | Section title | | |
241 | # | ------------- | |
|
241 | # | ------------- | | |
242 | # +------------------------------+ |
|
242 | # +------------------------------+ | |
243 | if (block['type'] == 'paragraph' and |
|
243 | if (block['type'] == 'paragraph' and | |
244 | len(block['lines']) == 2 and |
|
244 | len(block['lines']) == 2 and | |
245 | len(block['lines'][0]) == len(block['lines'][1]) and |
|
245 | len(block['lines'][0]) == len(block['lines'][1]) and | |
246 | _sectionre.match(block['lines'][1])): |
|
246 | _sectionre.match(block['lines'][1])): | |
247 | block['underline'] = block['lines'][1][0] |
|
247 | block['underline'] = block['lines'][1][0] | |
248 | block['type'] = 'section' |
|
248 | block['type'] = 'section' | |
249 | del block['lines'][1] |
|
249 | del block['lines'][1] | |
250 | return blocks |
|
250 | return blocks | |
251 |
|
251 | |||
252 |
|
252 | |||
253 | def inlineliterals(blocks): |
|
253 | def inlineliterals(blocks): | |
254 | for b in blocks: |
|
254 | for b in blocks: | |
255 | if b['type'] in ('paragraph', 'section'): |
|
255 | if b['type'] in ('paragraph', 'section'): | |
256 | b['lines'] = [l.replace('``', '"') for l in b['lines']] |
|
256 | b['lines'] = [l.replace('``', '"') for l in b['lines']] | |
257 | return blocks |
|
257 | return blocks | |
258 |
|
258 | |||
259 |
|
259 | |||
260 | def hgrole(blocks): |
|
260 | def hgrole(blocks): | |
261 | for b in blocks: |
|
261 | for b in blocks: | |
262 | if b['type'] in ('paragraph', 'section'): |
|
262 | if b['type'] in ('paragraph', 'section'): | |
263 | # Turn :hg:`command` into "hg command". This also works |
|
263 | # Turn :hg:`command` into "hg command". This also works | |
264 | # when there is a line break in the command and relies on |
|
264 | # when there is a line break in the command and relies on | |
265 | # the fact that we have no stray back-quotes in the input |
|
265 | # the fact that we have no stray back-quotes in the input | |
266 | # (run the blocks through inlineliterals first). |
|
266 | # (run the blocks through inlineliterals first). | |
267 | b['lines'] = [l.replace(':hg:`', '"hg ').replace('`', '"') |
|
267 | b['lines'] = [l.replace(':hg:`', '"hg ').replace('`', '"') | |
268 | for l in b['lines']] |
|
268 | for l in b['lines']] | |
269 | return blocks |
|
269 | return blocks | |
270 |
|
270 | |||
271 |
|
271 | |||
272 | def addmargins(blocks): |
|
272 | def addmargins(blocks): | |
273 | """Adds empty blocks for vertical spacing. |
|
273 | """Adds empty blocks for vertical spacing. | |
274 |
|
274 | |||
275 | This groups bullets, options, and definitions together with no vertical |
|
275 | This groups bullets, options, and definitions together with no vertical | |
276 | space between them, and adds an empty block between all other blocks. |
|
276 | space between them, and adds an empty block between all other blocks. | |
277 | """ |
|
277 | """ | |
278 | i = 1 |
|
278 | i = 1 | |
279 | while i < len(blocks): |
|
279 | while i < len(blocks): | |
280 | if (blocks[i]['type'] == blocks[i - 1]['type'] and |
|
280 | if (blocks[i]['type'] == blocks[i - 1]['type'] and | |
281 | blocks[i]['type'] in ('bullet', 'option', 'field')): |
|
281 | blocks[i]['type'] in ('bullet', 'option', 'field')): | |
282 | i += 1 |
|
282 | i += 1 | |
283 | else: |
|
283 | else: | |
284 | blocks.insert(i, dict(lines=[''], indent=0, type='margin')) |
|
284 | blocks.insert(i, dict(lines=[''], indent=0, type='margin')) | |
285 | i += 2 |
|
285 | i += 2 | |
286 | return blocks |
|
286 | return blocks | |
287 |
|
287 | |||
288 |
|
288 | |||
289 | def formatblock(block, width): |
|
289 | def formatblock(block, width): | |
290 | """Format a block according to width.""" |
|
290 | """Format a block according to width.""" | |
291 | if width <= 0: |
|
291 | if width <= 0: | |
292 | width = 78 |
|
292 | width = 78 | |
293 | indent = ' ' * block['indent'] |
|
293 | indent = ' ' * block['indent'] | |
294 | if block['type'] == 'margin': |
|
294 | if block['type'] == 'margin': | |
295 | return '' |
|
295 | return '' | |
296 | if block['type'] == 'literal': |
|
296 | if block['type'] == 'literal': | |
297 | indent += ' ' |
|
297 | indent += ' ' | |
298 | return indent + ('\n' + indent).join(block['lines']) |
|
298 | return indent + ('\n' + indent).join(block['lines']) | |
299 | if block['type'] == 'section': |
|
299 | if block['type'] == 'section': | |
300 | underline = len(block['lines'][0]) * block['underline'] |
|
300 | underline = len(block['lines'][0]) * block['underline'] | |
301 | return "%s%s\n%s%s" % (indent, block['lines'][0],indent, underline) |
|
301 | return "%s%s\n%s%s" % (indent, block['lines'][0],indent, underline) | |
302 | if block['type'] == 'definition': |
|
302 | if block['type'] == 'definition': | |
303 | term = indent + block['lines'][0] |
|
303 | term = indent + block['lines'][0] | |
304 | hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip()) |
|
304 | hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip()) | |
305 | defindent = indent + hang * ' ' |
|
305 | defindent = indent + hang * ' ' | |
306 | text = ' '.join(map(str.strip, block['lines'][1:])) |
|
306 | text = ' '.join(map(str.strip, block['lines'][1:])) | |
307 |
return |
|
307 | return '%s\n%s' % (term, util.wrap(text, width=width, | |
308 |
|
|
308 | initindent=defindent, | |
309 |
|
|
309 | hangindent=defindent)) | |
310 | subindent = indent |
|
310 | subindent = indent | |
311 | if block['type'] == 'bullet': |
|
311 | if block['type'] == 'bullet': | |
312 | if block['lines'][0].startswith('| '): |
|
312 | if block['lines'][0].startswith('| '): | |
313 | # Remove bullet for line blocks and add no extra |
|
313 | # Remove bullet for line blocks and add no extra | |
314 | # indention. |
|
314 | # indention. | |
315 | block['lines'][0] = block['lines'][0][2:] |
|
315 | block['lines'][0] = block['lines'][0][2:] | |
316 | else: |
|
316 | else: | |
317 | m = _bulletre.match(block['lines'][0]) |
|
317 | m = _bulletre.match(block['lines'][0]) | |
318 | subindent = indent + m.end() * ' ' |
|
318 | subindent = indent + m.end() * ' ' | |
319 | elif block['type'] == 'field': |
|
319 | elif block['type'] == 'field': | |
320 | keywidth = block['keywidth'] |
|
320 | keywidth = block['keywidth'] | |
321 | key = block['key'] |
|
321 | key = block['key'] | |
322 |
|
322 | |||
323 | subindent = indent + _fieldwidth * ' ' |
|
323 | subindent = indent + _fieldwidth * ' ' | |
324 | if len(key) + 2 > _fieldwidth: |
|
324 | if len(key) + 2 > _fieldwidth: | |
325 | # key too large, use full line width |
|
325 | # key too large, use full line width | |
326 | key = key.ljust(width) |
|
326 | key = key.ljust(width) | |
327 | elif keywidth + 2 < _fieldwidth: |
|
327 | elif keywidth + 2 < _fieldwidth: | |
328 | # all keys are small, add only two spaces |
|
328 | # all keys are small, add only two spaces | |
329 | key = key.ljust(keywidth + 2) |
|
329 | key = key.ljust(keywidth + 2) | |
330 | subindent = indent + (keywidth + 2) * ' ' |
|
330 | subindent = indent + (keywidth + 2) * ' ' | |
331 | else: |
|
331 | else: | |
332 | # mixed sizes, use fieldwidth for this one |
|
332 | # mixed sizes, use fieldwidth for this one | |
333 | key = key.ljust(_fieldwidth) |
|
333 | key = key.ljust(_fieldwidth) | |
334 | block['lines'][0] = key + block['lines'][0] |
|
334 | block['lines'][0] = key + block['lines'][0] | |
335 | elif block['type'] == 'option': |
|
335 | elif block['type'] == 'option': | |
336 | m = _optionre.match(block['lines'][0]) |
|
336 | m = _optionre.match(block['lines'][0]) | |
337 | option, arg, rest = m.groups() |
|
337 | option, arg, rest = m.groups() | |
338 | subindent = indent + (len(option) + len(arg)) * ' ' |
|
338 | subindent = indent + (len(option) + len(arg)) * ' ' | |
339 |
|
339 | |||
340 | text = ' '.join(map(str.strip, block['lines'])) |
|
340 | text = ' '.join(map(str.strip, block['lines'])) | |
341 |
return |
|
341 | return util.wrap(text, width=width, | |
342 |
|
|
342 | initindent=indent, | |
343 |
|
|
343 | hangindent=subindent) | |
344 |
|
344 | |||
345 |
|
345 | |||
346 | def format(text, width, indent=0, keep=None): |
|
346 | def format(text, width, indent=0, keep=None): | |
347 | """Parse and format the text according to width.""" |
|
347 | """Parse and format the text according to width.""" | |
348 | blocks = findblocks(text) |
|
348 | blocks = findblocks(text) | |
349 | for b in blocks: |
|
349 | for b in blocks: | |
350 | b['indent'] += indent |
|
350 | b['indent'] += indent | |
351 | blocks = findliteralblocks(blocks) |
|
351 | blocks = findliteralblocks(blocks) | |
352 | blocks, pruned = prunecontainers(blocks, keep or []) |
|
352 | blocks, pruned = prunecontainers(blocks, keep or []) | |
353 | blocks = findsections(blocks) |
|
353 | blocks = findsections(blocks) | |
354 | blocks = inlineliterals(blocks) |
|
354 | blocks = inlineliterals(blocks) | |
355 | blocks = hgrole(blocks) |
|
355 | blocks = hgrole(blocks) | |
356 | blocks = splitparagraphs(blocks) |
|
356 | blocks = splitparagraphs(blocks) | |
357 | blocks = updatefieldlists(blocks) |
|
357 | blocks = updatefieldlists(blocks) | |
358 | blocks = addmargins(blocks) |
|
358 | blocks = addmargins(blocks) | |
359 | text = '\n'.join(formatblock(b, width) for b in blocks) |
|
359 | text = '\n'.join(formatblock(b, width) for b in blocks) | |
360 | if keep is None: |
|
360 | if keep is None: | |
361 | return text |
|
361 | return text | |
362 | else: |
|
362 | else: | |
363 | return text, pruned |
|
363 | return text, pruned | |
364 |
|
364 | |||
365 |
|
365 | |||
366 | if __name__ == "__main__": |
|
366 | if __name__ == "__main__": | |
367 | from pprint import pprint |
|
367 | from pprint import pprint | |
368 |
|
368 | |||
369 | def debug(func, *args): |
|
369 | def debug(func, *args): | |
370 | blocks = func(*args) |
|
370 | blocks = func(*args) | |
371 | print "*** after %s:" % func.__name__ |
|
371 | print "*** after %s:" % func.__name__ | |
372 | pprint(blocks) |
|
372 | pprint(blocks) | |
373 |
|
373 | |||
374 | return blocks |
|
374 | return blocks | |
375 |
|
375 | |||
376 | text = open(sys.argv[1]).read() |
|
376 | text = open(sys.argv[1]).read() | |
377 | blocks = debug(findblocks, text) |
|
377 | blocks = debug(findblocks, text) | |
378 | blocks = debug(findliteralblocks, blocks) |
|
378 | blocks = debug(findliteralblocks, blocks) | |
379 | blocks, pruned = debug(prunecontainers, blocks, sys.argv[2:]) |
|
379 | blocks, pruned = debug(prunecontainers, blocks, sys.argv[2:]) | |
380 | blocks = debug(inlineliterals, blocks) |
|
380 | blocks = debug(inlineliterals, blocks) | |
381 | blocks = debug(splitparagraphs, blocks) |
|
381 | blocks = debug(splitparagraphs, blocks) | |
382 | blocks = debug(updatefieldlists, blocks) |
|
382 | blocks = debug(updatefieldlists, blocks) | |
383 | blocks = debug(findsections, blocks) |
|
383 | blocks = debug(findsections, blocks) | |
384 | blocks = debug(addmargins, blocks) |
|
384 | blocks = debug(addmargins, blocks) | |
385 | print '\n'.join(formatblock(b, 30) for b in blocks) |
|
385 | print '\n'.join(formatblock(b, 30) for b in blocks) |
@@ -1,218 +1,220 b'' | |||||
1 | # template-filters.py - common template expansion filters |
|
1 | # template-filters.py - common template expansion filters | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2008 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 |
import cgi, re, os, time, urllib |
|
8 | import cgi, re, os, time, urllib | |
9 | import util, encoding |
|
9 | import util, encoding | |
10 |
|
10 | |||
11 | def stringify(thing): |
|
11 | def stringify(thing): | |
12 | '''turn nested template iterator into string.''' |
|
12 | '''turn nested template iterator into string.''' | |
13 | if hasattr(thing, '__iter__') and not isinstance(thing, str): |
|
13 | if hasattr(thing, '__iter__') and not isinstance(thing, str): | |
14 | return "".join([stringify(t) for t in thing if t is not None]) |
|
14 | return "".join([stringify(t) for t in thing if t is not None]) | |
15 | return str(thing) |
|
15 | return str(thing) | |
16 |
|
16 | |||
17 | agescales = [("year", 3600 * 24 * 365), |
|
17 | agescales = [("year", 3600 * 24 * 365), | |
18 | ("month", 3600 * 24 * 30), |
|
18 | ("month", 3600 * 24 * 30), | |
19 | ("week", 3600 * 24 * 7), |
|
19 | ("week", 3600 * 24 * 7), | |
20 | ("day", 3600 * 24), |
|
20 | ("day", 3600 * 24), | |
21 | ("hour", 3600), |
|
21 | ("hour", 3600), | |
22 | ("minute", 60), |
|
22 | ("minute", 60), | |
23 | ("second", 1)] |
|
23 | ("second", 1)] | |
24 |
|
24 | |||
25 | def age(date): |
|
25 | def age(date): | |
26 | '''turn a (timestamp, tzoff) tuple into an age string.''' |
|
26 | '''turn a (timestamp, tzoff) tuple into an age string.''' | |
27 |
|
27 | |||
28 | def plural(t, c): |
|
28 | def plural(t, c): | |
29 | if c == 1: |
|
29 | if c == 1: | |
30 | return t |
|
30 | return t | |
31 | return t + "s" |
|
31 | return t + "s" | |
32 | def fmt(t, c): |
|
32 | def fmt(t, c): | |
33 | return "%d %s" % (c, plural(t, c)) |
|
33 | return "%d %s" % (c, plural(t, c)) | |
34 |
|
34 | |||
35 | now = time.time() |
|
35 | now = time.time() | |
36 | then = date[0] |
|
36 | then = date[0] | |
37 | if then > now: |
|
37 | if then > now: | |
38 | return 'in the future' |
|
38 | return 'in the future' | |
39 |
|
39 | |||
40 | delta = max(1, int(now - then)) |
|
40 | delta = max(1, int(now - then)) | |
41 | if delta > agescales[0][1] * 2: |
|
41 | if delta > agescales[0][1] * 2: | |
42 | return util.shortdate(date) |
|
42 | return util.shortdate(date) | |
43 |
|
43 | |||
44 | for t, s in agescales: |
|
44 | for t, s in agescales: | |
45 | n = delta // s |
|
45 | n = delta // s | |
46 | if n >= 2 or s == 1: |
|
46 | if n >= 2 or s == 1: | |
47 | return '%s ago' % fmt(t, n) |
|
47 | return '%s ago' % fmt(t, n) | |
48 |
|
48 | |||
49 | para_re = None |
|
49 | para_re = None | |
50 | space_re = None |
|
50 | space_re = None | |
51 |
|
51 | |||
52 | def fill(text, width): |
|
52 | def fill(text, width): | |
53 | '''fill many paragraphs.''' |
|
53 | '''fill many paragraphs.''' | |
54 | global para_re, space_re |
|
54 | global para_re, space_re | |
55 | if para_re is None: |
|
55 | if para_re is None: | |
56 | para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) |
|
56 | para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) | |
57 | space_re = re.compile(r' +') |
|
57 | space_re = re.compile(r' +') | |
58 |
|
58 | |||
59 | def findparas(): |
|
59 | def findparas(): | |
60 | start = 0 |
|
60 | start = 0 | |
61 | while True: |
|
61 | while True: | |
62 | m = para_re.search(text, start) |
|
62 | m = para_re.search(text, start) | |
63 | if not m: |
|
63 | if not m: | |
64 | w = len(text) |
|
64 | uctext = unicode(text[start:], encoding.encoding) | |
65 | while w > start and text[w - 1].isspace(): |
|
65 | w = len(uctext) | |
|
66 | while 0 < w and uctext[w - 1].isspace(): | |||
66 | w -= 1 |
|
67 | w -= 1 | |
67 | yield text[start:w], text[w:] |
|
68 | yield (uctext[:w].encode(encoding.encoding), | |
|
69 | uctext[w:].encode(encoding.encoding)) | |||
68 | break |
|
70 | break | |
69 | yield text[start:m.start(0)], m.group(1) |
|
71 | yield text[start:m.start(0)], m.group(1) | |
70 | start = m.end(1) |
|
72 | start = m.end(1) | |
71 |
|
73 | |||
72 |
return "".join([space_re.sub(' ', |
|
74 | return "".join([space_re.sub(' ', util.wrap(para, width=width)) + rest | |
73 | for para, rest in findparas()]) |
|
75 | for para, rest in findparas()]) | |
74 |
|
76 | |||
75 | def firstline(text): |
|
77 | def firstline(text): | |
76 | '''return the first line of text''' |
|
78 | '''return the first line of text''' | |
77 | try: |
|
79 | try: | |
78 | return text.splitlines(True)[0].rstrip('\r\n') |
|
80 | return text.splitlines(True)[0].rstrip('\r\n') | |
79 | except IndexError: |
|
81 | except IndexError: | |
80 | return '' |
|
82 | return '' | |
81 |
|
83 | |||
82 | def nl2br(text): |
|
84 | def nl2br(text): | |
83 | '''replace raw newlines with xhtml line breaks.''' |
|
85 | '''replace raw newlines with xhtml line breaks.''' | |
84 | return text.replace('\n', '<br/>\n') |
|
86 | return text.replace('\n', '<br/>\n') | |
85 |
|
87 | |||
86 | def obfuscate(text): |
|
88 | def obfuscate(text): | |
87 | text = unicode(text, encoding.encoding, 'replace') |
|
89 | text = unicode(text, encoding.encoding, 'replace') | |
88 | return ''.join(['&#%d;' % ord(c) for c in text]) |
|
90 | return ''.join(['&#%d;' % ord(c) for c in text]) | |
89 |
|
91 | |||
90 | def domain(author): |
|
92 | def domain(author): | |
91 | '''get domain of author, or empty string if none.''' |
|
93 | '''get domain of author, or empty string if none.''' | |
92 | f = author.find('@') |
|
94 | f = author.find('@') | |
93 | if f == -1: |
|
95 | if f == -1: | |
94 | return '' |
|
96 | return '' | |
95 | author = author[f + 1:] |
|
97 | author = author[f + 1:] | |
96 | f = author.find('>') |
|
98 | f = author.find('>') | |
97 | if f >= 0: |
|
99 | if f >= 0: | |
98 | author = author[:f] |
|
100 | author = author[:f] | |
99 | return author |
|
101 | return author | |
100 |
|
102 | |||
101 | def person(author): |
|
103 | def person(author): | |
102 | '''get name of author, or else username.''' |
|
104 | '''get name of author, or else username.''' | |
103 | if not '@' in author: |
|
105 | if not '@' in author: | |
104 | return author |
|
106 | return author | |
105 | f = author.find('<') |
|
107 | f = author.find('<') | |
106 | if f == -1: |
|
108 | if f == -1: | |
107 | return util.shortuser(author) |
|
109 | return util.shortuser(author) | |
108 | return author[:f].rstrip() |
|
110 | return author[:f].rstrip() | |
109 |
|
111 | |||
110 | def indent(text, prefix): |
|
112 | def indent(text, prefix): | |
111 | '''indent each non-empty line of text after first with prefix.''' |
|
113 | '''indent each non-empty line of text after first with prefix.''' | |
112 | lines = text.splitlines() |
|
114 | lines = text.splitlines() | |
113 | num_lines = len(lines) |
|
115 | num_lines = len(lines) | |
114 | endswithnewline = text[-1:] == '\n' |
|
116 | endswithnewline = text[-1:] == '\n' | |
115 | def indenter(): |
|
117 | def indenter(): | |
116 | for i in xrange(num_lines): |
|
118 | for i in xrange(num_lines): | |
117 | l = lines[i] |
|
119 | l = lines[i] | |
118 | if i and l.strip(): |
|
120 | if i and l.strip(): | |
119 | yield prefix |
|
121 | yield prefix | |
120 | yield l |
|
122 | yield l | |
121 | if i < num_lines - 1 or endswithnewline: |
|
123 | if i < num_lines - 1 or endswithnewline: | |
122 | yield '\n' |
|
124 | yield '\n' | |
123 | return "".join(indenter()) |
|
125 | return "".join(indenter()) | |
124 |
|
126 | |||
125 | def permissions(flags): |
|
127 | def permissions(flags): | |
126 | if "l" in flags: |
|
128 | if "l" in flags: | |
127 | return "lrwxrwxrwx" |
|
129 | return "lrwxrwxrwx" | |
128 | if "x" in flags: |
|
130 | if "x" in flags: | |
129 | return "-rwxr-xr-x" |
|
131 | return "-rwxr-xr-x" | |
130 | return "-rw-r--r--" |
|
132 | return "-rw-r--r--" | |
131 |
|
133 | |||
132 | def xmlescape(text): |
|
134 | def xmlescape(text): | |
133 | text = (text |
|
135 | text = (text | |
134 | .replace('&', '&') |
|
136 | .replace('&', '&') | |
135 | .replace('<', '<') |
|
137 | .replace('<', '<') | |
136 | .replace('>', '>') |
|
138 | .replace('>', '>') | |
137 | .replace('"', '"') |
|
139 | .replace('"', '"') | |
138 | .replace("'", ''')) # ' invalid in HTML |
|
140 | .replace("'", ''')) # ' invalid in HTML | |
139 | return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) |
|
141 | return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) | |
140 |
|
142 | |||
141 | _escapes = [ |
|
143 | _escapes = [ | |
142 | ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), |
|
144 | ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), | |
143 | ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), |
|
145 | ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), | |
144 | ] |
|
146 | ] | |
145 |
|
147 | |||
146 | def jsonescape(s): |
|
148 | def jsonescape(s): | |
147 | for k, v in _escapes: |
|
149 | for k, v in _escapes: | |
148 | s = s.replace(k, v) |
|
150 | s = s.replace(k, v) | |
149 | return s |
|
151 | return s | |
150 |
|
152 | |||
151 | def json(obj): |
|
153 | def json(obj): | |
152 | if obj is None or obj is False or obj is True: |
|
154 | if obj is None or obj is False or obj is True: | |
153 | return {None: 'null', False: 'false', True: 'true'}[obj] |
|
155 | return {None: 'null', False: 'false', True: 'true'}[obj] | |
154 | elif isinstance(obj, int) or isinstance(obj, float): |
|
156 | elif isinstance(obj, int) or isinstance(obj, float): | |
155 | return str(obj) |
|
157 | return str(obj) | |
156 | elif isinstance(obj, str): |
|
158 | elif isinstance(obj, str): | |
157 | return '"%s"' % jsonescape(obj) |
|
159 | return '"%s"' % jsonescape(obj) | |
158 | elif isinstance(obj, unicode): |
|
160 | elif isinstance(obj, unicode): | |
159 | return json(obj.encode('utf-8')) |
|
161 | return json(obj.encode('utf-8')) | |
160 | elif hasattr(obj, 'keys'): |
|
162 | elif hasattr(obj, 'keys'): | |
161 | out = [] |
|
163 | out = [] | |
162 | for k, v in obj.iteritems(): |
|
164 | for k, v in obj.iteritems(): | |
163 | s = '%s: %s' % (json(k), json(v)) |
|
165 | s = '%s: %s' % (json(k), json(v)) | |
164 | out.append(s) |
|
166 | out.append(s) | |
165 | return '{' + ', '.join(out) + '}' |
|
167 | return '{' + ', '.join(out) + '}' | |
166 | elif hasattr(obj, '__iter__'): |
|
168 | elif hasattr(obj, '__iter__'): | |
167 | out = [] |
|
169 | out = [] | |
168 | for i in obj: |
|
170 | for i in obj: | |
169 | out.append(json(i)) |
|
171 | out.append(json(i)) | |
170 | return '[' + ', '.join(out) + ']' |
|
172 | return '[' + ', '.join(out) + ']' | |
171 | else: |
|
173 | else: | |
172 | raise TypeError('cannot encode type %s' % obj.__class__.__name__) |
|
174 | raise TypeError('cannot encode type %s' % obj.__class__.__name__) | |
173 |
|
175 | |||
174 | def stripdir(text): |
|
176 | def stripdir(text): | |
175 | '''Treat the text as path and strip a directory level, if possible.''' |
|
177 | '''Treat the text as path and strip a directory level, if possible.''' | |
176 | dir = os.path.dirname(text) |
|
178 | dir = os.path.dirname(text) | |
177 | if dir == "": |
|
179 | if dir == "": | |
178 | return os.path.basename(text) |
|
180 | return os.path.basename(text) | |
179 | else: |
|
181 | else: | |
180 | return dir |
|
182 | return dir | |
181 |
|
183 | |||
182 | def nonempty(str): |
|
184 | def nonempty(str): | |
183 | return str or "(none)" |
|
185 | return str or "(none)" | |
184 |
|
186 | |||
185 | filters = { |
|
187 | filters = { | |
186 | "addbreaks": nl2br, |
|
188 | "addbreaks": nl2br, | |
187 | "basename": os.path.basename, |
|
189 | "basename": os.path.basename, | |
188 | "stripdir": stripdir, |
|
190 | "stripdir": stripdir, | |
189 | "age": age, |
|
191 | "age": age, | |
190 | "date": lambda x: util.datestr(x), |
|
192 | "date": lambda x: util.datestr(x), | |
191 | "domain": domain, |
|
193 | "domain": domain, | |
192 | "email": util.email, |
|
194 | "email": util.email, | |
193 | "escape": lambda x: cgi.escape(x, True), |
|
195 | "escape": lambda x: cgi.escape(x, True), | |
194 | "fill68": lambda x: fill(x, width=68), |
|
196 | "fill68": lambda x: fill(x, width=68), | |
195 | "fill76": lambda x: fill(x, width=76), |
|
197 | "fill76": lambda x: fill(x, width=76), | |
196 | "firstline": firstline, |
|
198 | "firstline": firstline, | |
197 | "tabindent": lambda x: indent(x, '\t'), |
|
199 | "tabindent": lambda x: indent(x, '\t'), | |
198 | "hgdate": lambda x: "%d %d" % x, |
|
200 | "hgdate": lambda x: "%d %d" % x, | |
199 | "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'), |
|
201 | "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'), | |
200 | "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'), |
|
202 | "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'), | |
201 | "json": json, |
|
203 | "json": json, | |
202 | "jsonescape": jsonescape, |
|
204 | "jsonescape": jsonescape, | |
203 | "localdate": lambda x: (x[0], util.makedate()[1]), |
|
205 | "localdate": lambda x: (x[0], util.makedate()[1]), | |
204 | "nonempty": nonempty, |
|
206 | "nonempty": nonempty, | |
205 | "obfuscate": obfuscate, |
|
207 | "obfuscate": obfuscate, | |
206 | "permissions": permissions, |
|
208 | "permissions": permissions, | |
207 | "person": person, |
|
209 | "person": person, | |
208 | "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"), |
|
210 | "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"), | |
209 | "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"), |
|
211 | "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"), | |
210 | "short": lambda x: x[:12], |
|
212 | "short": lambda x: x[:12], | |
211 | "shortdate": util.shortdate, |
|
213 | "shortdate": util.shortdate, | |
212 | "stringify": stringify, |
|
214 | "stringify": stringify, | |
213 | "strip": lambda x: x.strip(), |
|
215 | "strip": lambda x: x.strip(), | |
214 | "urlescape": lambda x: urllib.quote(x), |
|
216 | "urlescape": lambda x: urllib.quote(x), | |
215 | "user": lambda x: util.shortuser(x), |
|
217 | "user": lambda x: util.shortuser(x), | |
216 | "stringescape": lambda x: x.encode('string_escape'), |
|
218 | "stringescape": lambda x: x.encode('string_escape'), | |
217 | "xmlescape": xmlescape, |
|
219 | "xmlescape": xmlescape, | |
218 | } |
|
220 | } |
@@ -1,1350 +1,1378 b'' | |||||
1 | # util.py - Mercurial utility functions and platform specfic implementations |
|
1 | # util.py - Mercurial utility functions and platform specfic implementations | |
2 | # |
|
2 | # | |
3 | # Copyright 2005 K. Thananchayan <thananck@yahoo.com> |
|
3 | # Copyright 2005 K. Thananchayan <thananck@yahoo.com> | |
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
6 | # |
|
6 | # | |
7 | # This software may be used and distributed according to the terms of the |
|
7 | # This software may be used and distributed according to the terms of the | |
8 | # GNU General Public License version 2 or any later version. |
|
8 | # GNU General Public License version 2 or any later version. | |
9 |
|
9 | |||
10 | """Mercurial utility functions and platform specfic implementations. |
|
10 | """Mercurial utility functions and platform specfic implementations. | |
11 |
|
11 | |||
12 | This contains helper routines that are independent of the SCM core and |
|
12 | This contains helper routines that are independent of the SCM core and | |
13 | hide platform-specific details from the core. |
|
13 | hide platform-specific details from the core. | |
14 | """ |
|
14 | """ | |
15 |
|
15 | |||
16 | from i18n import _ |
|
16 | from i18n import _ | |
17 | import error, osutil, encoding |
|
17 | import error, osutil, encoding | |
18 | import cStringIO, errno, re, shutil, sys, tempfile, traceback |
|
18 | import cStringIO, errno, re, shutil, sys, tempfile, traceback | |
19 | import os, stat, time, calendar, textwrap, signal |
|
19 | import os, stat, time, calendar, textwrap, unicodedata, signal | |
20 | import imp |
|
20 | import imp | |
21 |
|
21 | |||
22 | # Python compatibility |
|
22 | # Python compatibility | |
23 |
|
23 | |||
24 | def sha1(s): |
|
24 | def sha1(s): | |
25 | return _fastsha1(s) |
|
25 | return _fastsha1(s) | |
26 |
|
26 | |||
27 | def _fastsha1(s): |
|
27 | def _fastsha1(s): | |
28 | # This function will import sha1 from hashlib or sha (whichever is |
|
28 | # This function will import sha1 from hashlib or sha (whichever is | |
29 | # available) and overwrite itself with it on the first call. |
|
29 | # available) and overwrite itself with it on the first call. | |
30 | # Subsequent calls will go directly to the imported function. |
|
30 | # Subsequent calls will go directly to the imported function. | |
31 | try: |
|
31 | try: | |
32 | from hashlib import sha1 as _sha1 |
|
32 | from hashlib import sha1 as _sha1 | |
33 | except ImportError: |
|
33 | except ImportError: | |
34 | from sha import sha as _sha1 |
|
34 | from sha import sha as _sha1 | |
35 | global _fastsha1, sha1 |
|
35 | global _fastsha1, sha1 | |
36 | _fastsha1 = sha1 = _sha1 |
|
36 | _fastsha1 = sha1 = _sha1 | |
37 | return _sha1(s) |
|
37 | return _sha1(s) | |
38 |
|
38 | |||
39 | import __builtin__ |
|
39 | import __builtin__ | |
40 |
|
40 | |||
41 | def fakebuffer(sliceable, offset=0): |
|
41 | def fakebuffer(sliceable, offset=0): | |
42 | return sliceable[offset:] |
|
42 | return sliceable[offset:] | |
43 | if not hasattr(__builtin__, 'buffer'): |
|
43 | if not hasattr(__builtin__, 'buffer'): | |
44 | __builtin__.buffer = fakebuffer |
|
44 | __builtin__.buffer = fakebuffer | |
45 |
|
45 | |||
46 | import subprocess |
|
46 | import subprocess | |
47 | closefds = os.name == 'posix' |
|
47 | closefds = os.name == 'posix' | |
48 |
|
48 | |||
49 | def popen2(cmd, env=None, newlines=False): |
|
49 | def popen2(cmd, env=None, newlines=False): | |
50 | # Setting bufsize to -1 lets the system decide the buffer size. |
|
50 | # Setting bufsize to -1 lets the system decide the buffer size. | |
51 | # The default for bufsize is 0, meaning unbuffered. This leads to |
|
51 | # The default for bufsize is 0, meaning unbuffered. This leads to | |
52 | # poor performance on Mac OS X: http://bugs.python.org/issue4194 |
|
52 | # poor performance on Mac OS X: http://bugs.python.org/issue4194 | |
53 | p = subprocess.Popen(cmd, shell=True, bufsize=-1, |
|
53 | p = subprocess.Popen(cmd, shell=True, bufsize=-1, | |
54 | close_fds=closefds, |
|
54 | close_fds=closefds, | |
55 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
|
55 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, | |
56 | universal_newlines=newlines, |
|
56 | universal_newlines=newlines, | |
57 | env=env) |
|
57 | env=env) | |
58 | return p.stdin, p.stdout |
|
58 | return p.stdin, p.stdout | |
59 |
|
59 | |||
60 | def popen3(cmd, env=None, newlines=False): |
|
60 | def popen3(cmd, env=None, newlines=False): | |
61 | p = subprocess.Popen(cmd, shell=True, bufsize=-1, |
|
61 | p = subprocess.Popen(cmd, shell=True, bufsize=-1, | |
62 | close_fds=closefds, |
|
62 | close_fds=closefds, | |
63 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
|
63 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, | |
64 | stderr=subprocess.PIPE, |
|
64 | stderr=subprocess.PIPE, | |
65 | universal_newlines=newlines, |
|
65 | universal_newlines=newlines, | |
66 | env=env) |
|
66 | env=env) | |
67 | return p.stdin, p.stdout, p.stderr |
|
67 | return p.stdin, p.stdout, p.stderr | |
68 |
|
68 | |||
69 | def version(): |
|
69 | def version(): | |
70 | """Return version information if available.""" |
|
70 | """Return version information if available.""" | |
71 | try: |
|
71 | try: | |
72 | import __version__ |
|
72 | import __version__ | |
73 | return __version__.version |
|
73 | return __version__.version | |
74 | except ImportError: |
|
74 | except ImportError: | |
75 | return 'unknown' |
|
75 | return 'unknown' | |
76 |
|
76 | |||
77 | # used by parsedate |
|
77 | # used by parsedate | |
78 | defaultdateformats = ( |
|
78 | defaultdateformats = ( | |
79 | '%Y-%m-%d %H:%M:%S', |
|
79 | '%Y-%m-%d %H:%M:%S', | |
80 | '%Y-%m-%d %I:%M:%S%p', |
|
80 | '%Y-%m-%d %I:%M:%S%p', | |
81 | '%Y-%m-%d %H:%M', |
|
81 | '%Y-%m-%d %H:%M', | |
82 | '%Y-%m-%d %I:%M%p', |
|
82 | '%Y-%m-%d %I:%M%p', | |
83 | '%Y-%m-%d', |
|
83 | '%Y-%m-%d', | |
84 | '%m-%d', |
|
84 | '%m-%d', | |
85 | '%m/%d', |
|
85 | '%m/%d', | |
86 | '%m/%d/%y', |
|
86 | '%m/%d/%y', | |
87 | '%m/%d/%Y', |
|
87 | '%m/%d/%Y', | |
88 | '%a %b %d %H:%M:%S %Y', |
|
88 | '%a %b %d %H:%M:%S %Y', | |
89 | '%a %b %d %I:%M:%S%p %Y', |
|
89 | '%a %b %d %I:%M:%S%p %Y', | |
90 | '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822" |
|
90 | '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822" | |
91 | '%b %d %H:%M:%S %Y', |
|
91 | '%b %d %H:%M:%S %Y', | |
92 | '%b %d %I:%M:%S%p %Y', |
|
92 | '%b %d %I:%M:%S%p %Y', | |
93 | '%b %d %H:%M:%S', |
|
93 | '%b %d %H:%M:%S', | |
94 | '%b %d %I:%M:%S%p', |
|
94 | '%b %d %I:%M:%S%p', | |
95 | '%b %d %H:%M', |
|
95 | '%b %d %H:%M', | |
96 | '%b %d %I:%M%p', |
|
96 | '%b %d %I:%M%p', | |
97 | '%b %d %Y', |
|
97 | '%b %d %Y', | |
98 | '%b %d', |
|
98 | '%b %d', | |
99 | '%H:%M:%S', |
|
99 | '%H:%M:%S', | |
100 | '%I:%M:%S%p', |
|
100 | '%I:%M:%S%p', | |
101 | '%H:%M', |
|
101 | '%H:%M', | |
102 | '%I:%M%p', |
|
102 | '%I:%M%p', | |
103 | ) |
|
103 | ) | |
104 |
|
104 | |||
105 | extendeddateformats = defaultdateformats + ( |
|
105 | extendeddateformats = defaultdateformats + ( | |
106 | "%Y", |
|
106 | "%Y", | |
107 | "%Y-%m", |
|
107 | "%Y-%m", | |
108 | "%b", |
|
108 | "%b", | |
109 | "%b %Y", |
|
109 | "%b %Y", | |
110 | ) |
|
110 | ) | |
111 |
|
111 | |||
112 | def cachefunc(func): |
|
112 | def cachefunc(func): | |
113 | '''cache the result of function calls''' |
|
113 | '''cache the result of function calls''' | |
114 | # XXX doesn't handle keywords args |
|
114 | # XXX doesn't handle keywords args | |
115 | cache = {} |
|
115 | cache = {} | |
116 | if func.func_code.co_argcount == 1: |
|
116 | if func.func_code.co_argcount == 1: | |
117 | # we gain a small amount of time because |
|
117 | # we gain a small amount of time because | |
118 | # we don't need to pack/unpack the list |
|
118 | # we don't need to pack/unpack the list | |
119 | def f(arg): |
|
119 | def f(arg): | |
120 | if arg not in cache: |
|
120 | if arg not in cache: | |
121 | cache[arg] = func(arg) |
|
121 | cache[arg] = func(arg) | |
122 | return cache[arg] |
|
122 | return cache[arg] | |
123 | else: |
|
123 | else: | |
124 | def f(*args): |
|
124 | def f(*args): | |
125 | if args not in cache: |
|
125 | if args not in cache: | |
126 | cache[args] = func(*args) |
|
126 | cache[args] = func(*args) | |
127 | return cache[args] |
|
127 | return cache[args] | |
128 |
|
128 | |||
129 | return f |
|
129 | return f | |
130 |
|
130 | |||
131 | def lrucachefunc(func): |
|
131 | def lrucachefunc(func): | |
132 | '''cache most recent results of function calls''' |
|
132 | '''cache most recent results of function calls''' | |
133 | cache = {} |
|
133 | cache = {} | |
134 | order = [] |
|
134 | order = [] | |
135 | if func.func_code.co_argcount == 1: |
|
135 | if func.func_code.co_argcount == 1: | |
136 | def f(arg): |
|
136 | def f(arg): | |
137 | if arg not in cache: |
|
137 | if arg not in cache: | |
138 | if len(cache) > 20: |
|
138 | if len(cache) > 20: | |
139 | del cache[order.pop(0)] |
|
139 | del cache[order.pop(0)] | |
140 | cache[arg] = func(arg) |
|
140 | cache[arg] = func(arg) | |
141 | else: |
|
141 | else: | |
142 | order.remove(arg) |
|
142 | order.remove(arg) | |
143 | order.append(arg) |
|
143 | order.append(arg) | |
144 | return cache[arg] |
|
144 | return cache[arg] | |
145 | else: |
|
145 | else: | |
146 | def f(*args): |
|
146 | def f(*args): | |
147 | if args not in cache: |
|
147 | if args not in cache: | |
148 | if len(cache) > 20: |
|
148 | if len(cache) > 20: | |
149 | del cache[order.pop(0)] |
|
149 | del cache[order.pop(0)] | |
150 | cache[args] = func(*args) |
|
150 | cache[args] = func(*args) | |
151 | else: |
|
151 | else: | |
152 | order.remove(args) |
|
152 | order.remove(args) | |
153 | order.append(args) |
|
153 | order.append(args) | |
154 | return cache[args] |
|
154 | return cache[args] | |
155 |
|
155 | |||
156 | return f |
|
156 | return f | |
157 |
|
157 | |||
158 | class propertycache(object): |
|
158 | class propertycache(object): | |
159 | def __init__(self, func): |
|
159 | def __init__(self, func): | |
160 | self.func = func |
|
160 | self.func = func | |
161 | self.name = func.__name__ |
|
161 | self.name = func.__name__ | |
162 | def __get__(self, obj, type=None): |
|
162 | def __get__(self, obj, type=None): | |
163 | result = self.func(obj) |
|
163 | result = self.func(obj) | |
164 | setattr(obj, self.name, result) |
|
164 | setattr(obj, self.name, result) | |
165 | return result |
|
165 | return result | |
166 |
|
166 | |||
167 | def pipefilter(s, cmd): |
|
167 | def pipefilter(s, cmd): | |
168 | '''filter string S through command CMD, returning its output''' |
|
168 | '''filter string S through command CMD, returning its output''' | |
169 | p = subprocess.Popen(cmd, shell=True, close_fds=closefds, |
|
169 | p = subprocess.Popen(cmd, shell=True, close_fds=closefds, | |
170 | stdin=subprocess.PIPE, stdout=subprocess.PIPE) |
|
170 | stdin=subprocess.PIPE, stdout=subprocess.PIPE) | |
171 | pout, perr = p.communicate(s) |
|
171 | pout, perr = p.communicate(s) | |
172 | return pout |
|
172 | return pout | |
173 |
|
173 | |||
174 | def tempfilter(s, cmd): |
|
174 | def tempfilter(s, cmd): | |
175 | '''filter string S through a pair of temporary files with CMD. |
|
175 | '''filter string S through a pair of temporary files with CMD. | |
176 | CMD is used as a template to create the real command to be run, |
|
176 | CMD is used as a template to create the real command to be run, | |
177 | with the strings INFILE and OUTFILE replaced by the real names of |
|
177 | with the strings INFILE and OUTFILE replaced by the real names of | |
178 | the temporary files generated.''' |
|
178 | the temporary files generated.''' | |
179 | inname, outname = None, None |
|
179 | inname, outname = None, None | |
180 | try: |
|
180 | try: | |
181 | infd, inname = tempfile.mkstemp(prefix='hg-filter-in-') |
|
181 | infd, inname = tempfile.mkstemp(prefix='hg-filter-in-') | |
182 | fp = os.fdopen(infd, 'wb') |
|
182 | fp = os.fdopen(infd, 'wb') | |
183 | fp.write(s) |
|
183 | fp.write(s) | |
184 | fp.close() |
|
184 | fp.close() | |
185 | outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-') |
|
185 | outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-') | |
186 | os.close(outfd) |
|
186 | os.close(outfd) | |
187 | cmd = cmd.replace('INFILE', inname) |
|
187 | cmd = cmd.replace('INFILE', inname) | |
188 | cmd = cmd.replace('OUTFILE', outname) |
|
188 | cmd = cmd.replace('OUTFILE', outname) | |
189 | code = os.system(cmd) |
|
189 | code = os.system(cmd) | |
190 | if sys.platform == 'OpenVMS' and code & 1: |
|
190 | if sys.platform == 'OpenVMS' and code & 1: | |
191 | code = 0 |
|
191 | code = 0 | |
192 | if code: |
|
192 | if code: | |
193 | raise Abort(_("command '%s' failed: %s") % |
|
193 | raise Abort(_("command '%s' failed: %s") % | |
194 | (cmd, explain_exit(code))) |
|
194 | (cmd, explain_exit(code))) | |
195 | return open(outname, 'rb').read() |
|
195 | return open(outname, 'rb').read() | |
196 | finally: |
|
196 | finally: | |
197 | try: |
|
197 | try: | |
198 | if inname: |
|
198 | if inname: | |
199 | os.unlink(inname) |
|
199 | os.unlink(inname) | |
200 | except: |
|
200 | except: | |
201 | pass |
|
201 | pass | |
202 | try: |
|
202 | try: | |
203 | if outname: |
|
203 | if outname: | |
204 | os.unlink(outname) |
|
204 | os.unlink(outname) | |
205 | except: |
|
205 | except: | |
206 | pass |
|
206 | pass | |
207 |
|
207 | |||
208 | filtertable = { |
|
208 | filtertable = { | |
209 | 'tempfile:': tempfilter, |
|
209 | 'tempfile:': tempfilter, | |
210 | 'pipe:': pipefilter, |
|
210 | 'pipe:': pipefilter, | |
211 | } |
|
211 | } | |
212 |
|
212 | |||
213 | def filter(s, cmd): |
|
213 | def filter(s, cmd): | |
214 | "filter a string through a command that transforms its input to its output" |
|
214 | "filter a string through a command that transforms its input to its output" | |
215 | for name, fn in filtertable.iteritems(): |
|
215 | for name, fn in filtertable.iteritems(): | |
216 | if cmd.startswith(name): |
|
216 | if cmd.startswith(name): | |
217 | return fn(s, cmd[len(name):].lstrip()) |
|
217 | return fn(s, cmd[len(name):].lstrip()) | |
218 | return pipefilter(s, cmd) |
|
218 | return pipefilter(s, cmd) | |
219 |
|
219 | |||
220 | def binary(s): |
|
220 | def binary(s): | |
221 | """return true if a string is binary data""" |
|
221 | """return true if a string is binary data""" | |
222 | return bool(s and '\0' in s) |
|
222 | return bool(s and '\0' in s) | |
223 |
|
223 | |||
224 | def increasingchunks(source, min=1024, max=65536): |
|
224 | def increasingchunks(source, min=1024, max=65536): | |
225 | '''return no less than min bytes per chunk while data remains, |
|
225 | '''return no less than min bytes per chunk while data remains, | |
226 | doubling min after each chunk until it reaches max''' |
|
226 | doubling min after each chunk until it reaches max''' | |
227 | def log2(x): |
|
227 | def log2(x): | |
228 | if not x: |
|
228 | if not x: | |
229 | return 0 |
|
229 | return 0 | |
230 | i = 0 |
|
230 | i = 0 | |
231 | while x: |
|
231 | while x: | |
232 | x >>= 1 |
|
232 | x >>= 1 | |
233 | i += 1 |
|
233 | i += 1 | |
234 | return i - 1 |
|
234 | return i - 1 | |
235 |
|
235 | |||
236 | buf = [] |
|
236 | buf = [] | |
237 | blen = 0 |
|
237 | blen = 0 | |
238 | for chunk in source: |
|
238 | for chunk in source: | |
239 | buf.append(chunk) |
|
239 | buf.append(chunk) | |
240 | blen += len(chunk) |
|
240 | blen += len(chunk) | |
241 | if blen >= min: |
|
241 | if blen >= min: | |
242 | if min < max: |
|
242 | if min < max: | |
243 | min = min << 1 |
|
243 | min = min << 1 | |
244 | nmin = 1 << log2(blen) |
|
244 | nmin = 1 << log2(blen) | |
245 | if nmin > min: |
|
245 | if nmin > min: | |
246 | min = nmin |
|
246 | min = nmin | |
247 | if min > max: |
|
247 | if min > max: | |
248 | min = max |
|
248 | min = max | |
249 | yield ''.join(buf) |
|
249 | yield ''.join(buf) | |
250 | blen = 0 |
|
250 | blen = 0 | |
251 | buf = [] |
|
251 | buf = [] | |
252 | if buf: |
|
252 | if buf: | |
253 | yield ''.join(buf) |
|
253 | yield ''.join(buf) | |
254 |
|
254 | |||
255 | Abort = error.Abort |
|
255 | Abort = error.Abort | |
256 |
|
256 | |||
257 | def always(fn): |
|
257 | def always(fn): | |
258 | return True |
|
258 | return True | |
259 |
|
259 | |||
260 | def never(fn): |
|
260 | def never(fn): | |
261 | return False |
|
261 | return False | |
262 |
|
262 | |||
263 | def pathto(root, n1, n2): |
|
263 | def pathto(root, n1, n2): | |
264 | '''return the relative path from one place to another. |
|
264 | '''return the relative path from one place to another. | |
265 | root should use os.sep to separate directories |
|
265 | root should use os.sep to separate directories | |
266 | n1 should use os.sep to separate directories |
|
266 | n1 should use os.sep to separate directories | |
267 | n2 should use "/" to separate directories |
|
267 | n2 should use "/" to separate directories | |
268 | returns an os.sep-separated path. |
|
268 | returns an os.sep-separated path. | |
269 |
|
269 | |||
270 | If n1 is a relative path, it's assumed it's |
|
270 | If n1 is a relative path, it's assumed it's | |
271 | relative to root. |
|
271 | relative to root. | |
272 | n2 should always be relative to root. |
|
272 | n2 should always be relative to root. | |
273 | ''' |
|
273 | ''' | |
274 | if not n1: |
|
274 | if not n1: | |
275 | return localpath(n2) |
|
275 | return localpath(n2) | |
276 | if os.path.isabs(n1): |
|
276 | if os.path.isabs(n1): | |
277 | if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: |
|
277 | if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: | |
278 | return os.path.join(root, localpath(n2)) |
|
278 | return os.path.join(root, localpath(n2)) | |
279 | n2 = '/'.join((pconvert(root), n2)) |
|
279 | n2 = '/'.join((pconvert(root), n2)) | |
280 | a, b = splitpath(n1), n2.split('/') |
|
280 | a, b = splitpath(n1), n2.split('/') | |
281 | a.reverse() |
|
281 | a.reverse() | |
282 | b.reverse() |
|
282 | b.reverse() | |
283 | while a and b and a[-1] == b[-1]: |
|
283 | while a and b and a[-1] == b[-1]: | |
284 | a.pop() |
|
284 | a.pop() | |
285 | b.pop() |
|
285 | b.pop() | |
286 | b.reverse() |
|
286 | b.reverse() | |
287 | return os.sep.join((['..'] * len(a)) + b) or '.' |
|
287 | return os.sep.join((['..'] * len(a)) + b) or '.' | |
288 |
|
288 | |||
289 | def canonpath(root, cwd, myname): |
|
289 | def canonpath(root, cwd, myname): | |
290 | """return the canonical path of myname, given cwd and root""" |
|
290 | """return the canonical path of myname, given cwd and root""" | |
291 | if endswithsep(root): |
|
291 | if endswithsep(root): | |
292 | rootsep = root |
|
292 | rootsep = root | |
293 | else: |
|
293 | else: | |
294 | rootsep = root + os.sep |
|
294 | rootsep = root + os.sep | |
295 | name = myname |
|
295 | name = myname | |
296 | if not os.path.isabs(name): |
|
296 | if not os.path.isabs(name): | |
297 | name = os.path.join(root, cwd, name) |
|
297 | name = os.path.join(root, cwd, name) | |
298 | name = os.path.normpath(name) |
|
298 | name = os.path.normpath(name) | |
299 | audit_path = path_auditor(root) |
|
299 | audit_path = path_auditor(root) | |
300 | if name != rootsep and name.startswith(rootsep): |
|
300 | if name != rootsep and name.startswith(rootsep): | |
301 | name = name[len(rootsep):] |
|
301 | name = name[len(rootsep):] | |
302 | audit_path(name) |
|
302 | audit_path(name) | |
303 | return pconvert(name) |
|
303 | return pconvert(name) | |
304 | elif name == root: |
|
304 | elif name == root: | |
305 | return '' |
|
305 | return '' | |
306 | else: |
|
306 | else: | |
307 | # Determine whether `name' is in the hierarchy at or beneath `root', |
|
307 | # Determine whether `name' is in the hierarchy at or beneath `root', | |
308 | # by iterating name=dirname(name) until that causes no change (can't |
|
308 | # by iterating name=dirname(name) until that causes no change (can't | |
309 | # check name == '/', because that doesn't work on windows). For each |
|
309 | # check name == '/', because that doesn't work on windows). For each | |
310 | # `name', compare dev/inode numbers. If they match, the list `rel' |
|
310 | # `name', compare dev/inode numbers. If they match, the list `rel' | |
311 | # holds the reversed list of components making up the relative file |
|
311 | # holds the reversed list of components making up the relative file | |
312 | # name we want. |
|
312 | # name we want. | |
313 | root_st = os.stat(root) |
|
313 | root_st = os.stat(root) | |
314 | rel = [] |
|
314 | rel = [] | |
315 | while True: |
|
315 | while True: | |
316 | try: |
|
316 | try: | |
317 | name_st = os.stat(name) |
|
317 | name_st = os.stat(name) | |
318 | except OSError: |
|
318 | except OSError: | |
319 | break |
|
319 | break | |
320 | if samestat(name_st, root_st): |
|
320 | if samestat(name_st, root_st): | |
321 | if not rel: |
|
321 | if not rel: | |
322 | # name was actually the same as root (maybe a symlink) |
|
322 | # name was actually the same as root (maybe a symlink) | |
323 | return '' |
|
323 | return '' | |
324 | rel.reverse() |
|
324 | rel.reverse() | |
325 | name = os.path.join(*rel) |
|
325 | name = os.path.join(*rel) | |
326 | audit_path(name) |
|
326 | audit_path(name) | |
327 | return pconvert(name) |
|
327 | return pconvert(name) | |
328 | dirname, basename = os.path.split(name) |
|
328 | dirname, basename = os.path.split(name) | |
329 | rel.append(basename) |
|
329 | rel.append(basename) | |
330 | if dirname == name: |
|
330 | if dirname == name: | |
331 | break |
|
331 | break | |
332 | name = dirname |
|
332 | name = dirname | |
333 |
|
333 | |||
334 | raise Abort('%s not under root' % myname) |
|
334 | raise Abort('%s not under root' % myname) | |
335 |
|
335 | |||
336 | _hgexecutable = None |
|
336 | _hgexecutable = None | |
337 |
|
337 | |||
338 | def main_is_frozen(): |
|
338 | def main_is_frozen(): | |
339 | """return True if we are a frozen executable. |
|
339 | """return True if we are a frozen executable. | |
340 |
|
340 | |||
341 | The code supports py2exe (most common, Windows only) and tools/freeze |
|
341 | The code supports py2exe (most common, Windows only) and tools/freeze | |
342 | (portable, not much used). |
|
342 | (portable, not much used). | |
343 | """ |
|
343 | """ | |
344 | return (hasattr(sys, "frozen") or # new py2exe |
|
344 | return (hasattr(sys, "frozen") or # new py2exe | |
345 | hasattr(sys, "importers") or # old py2exe |
|
345 | hasattr(sys, "importers") or # old py2exe | |
346 | imp.is_frozen("__main__")) # tools/freeze |
|
346 | imp.is_frozen("__main__")) # tools/freeze | |
347 |
|
347 | |||
348 | def hgexecutable(): |
|
348 | def hgexecutable(): | |
349 | """return location of the 'hg' executable. |
|
349 | """return location of the 'hg' executable. | |
350 |
|
350 | |||
351 | Defaults to $HG or 'hg' in the search path. |
|
351 | Defaults to $HG or 'hg' in the search path. | |
352 | """ |
|
352 | """ | |
353 | if _hgexecutable is None: |
|
353 | if _hgexecutable is None: | |
354 | hg = os.environ.get('HG') |
|
354 | hg = os.environ.get('HG') | |
355 | if hg: |
|
355 | if hg: | |
356 | set_hgexecutable(hg) |
|
356 | set_hgexecutable(hg) | |
357 | elif main_is_frozen(): |
|
357 | elif main_is_frozen(): | |
358 | set_hgexecutable(sys.executable) |
|
358 | set_hgexecutable(sys.executable) | |
359 | else: |
|
359 | else: | |
360 | exe = find_exe('hg') or os.path.basename(sys.argv[0]) |
|
360 | exe = find_exe('hg') or os.path.basename(sys.argv[0]) | |
361 | set_hgexecutable(exe) |
|
361 | set_hgexecutable(exe) | |
362 | return _hgexecutable |
|
362 | return _hgexecutable | |
363 |
|
363 | |||
364 | def set_hgexecutable(path): |
|
364 | def set_hgexecutable(path): | |
365 | """set location of the 'hg' executable""" |
|
365 | """set location of the 'hg' executable""" | |
366 | global _hgexecutable |
|
366 | global _hgexecutable | |
367 | _hgexecutable = path |
|
367 | _hgexecutable = path | |
368 |
|
368 | |||
369 | def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None): |
|
369 | def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None): | |
370 | '''enhanced shell command execution. |
|
370 | '''enhanced shell command execution. | |
371 | run with environment maybe modified, maybe in different dir. |
|
371 | run with environment maybe modified, maybe in different dir. | |
372 |
|
372 | |||
373 | if command fails and onerr is None, return status. if ui object, |
|
373 | if command fails and onerr is None, return status. if ui object, | |
374 | print error message and return status, else raise onerr object as |
|
374 | print error message and return status, else raise onerr object as | |
375 | exception.''' |
|
375 | exception.''' | |
376 | def py2shell(val): |
|
376 | def py2shell(val): | |
377 | 'convert python object into string that is useful to shell' |
|
377 | 'convert python object into string that is useful to shell' | |
378 | if val is None or val is False: |
|
378 | if val is None or val is False: | |
379 | return '0' |
|
379 | return '0' | |
380 | if val is True: |
|
380 | if val is True: | |
381 | return '1' |
|
381 | return '1' | |
382 | return str(val) |
|
382 | return str(val) | |
383 | origcmd = cmd |
|
383 | origcmd = cmd | |
384 | if os.name == 'nt': |
|
384 | if os.name == 'nt': | |
385 | cmd = '"%s"' % cmd |
|
385 | cmd = '"%s"' % cmd | |
386 | env = dict(os.environ) |
|
386 | env = dict(os.environ) | |
387 | env.update((k, py2shell(v)) for k, v in environ.iteritems()) |
|
387 | env.update((k, py2shell(v)) for k, v in environ.iteritems()) | |
388 | env['HG'] = hgexecutable() |
|
388 | env['HG'] = hgexecutable() | |
389 | rc = subprocess.call(cmd, shell=True, close_fds=closefds, |
|
389 | rc = subprocess.call(cmd, shell=True, close_fds=closefds, | |
390 | env=env, cwd=cwd) |
|
390 | env=env, cwd=cwd) | |
391 | if sys.platform == 'OpenVMS' and rc & 1: |
|
391 | if sys.platform == 'OpenVMS' and rc & 1: | |
392 | rc = 0 |
|
392 | rc = 0 | |
393 | if rc and onerr: |
|
393 | if rc and onerr: | |
394 | errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]), |
|
394 | errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]), | |
395 | explain_exit(rc)[0]) |
|
395 | explain_exit(rc)[0]) | |
396 | if errprefix: |
|
396 | if errprefix: | |
397 | errmsg = '%s: %s' % (errprefix, errmsg) |
|
397 | errmsg = '%s: %s' % (errprefix, errmsg) | |
398 | try: |
|
398 | try: | |
399 | onerr.warn(errmsg + '\n') |
|
399 | onerr.warn(errmsg + '\n') | |
400 | except AttributeError: |
|
400 | except AttributeError: | |
401 | raise onerr(errmsg) |
|
401 | raise onerr(errmsg) | |
402 | return rc |
|
402 | return rc | |
403 |
|
403 | |||
404 | def checksignature(func): |
|
404 | def checksignature(func): | |
405 | '''wrap a function with code to check for calling errors''' |
|
405 | '''wrap a function with code to check for calling errors''' | |
406 | def check(*args, **kwargs): |
|
406 | def check(*args, **kwargs): | |
407 | try: |
|
407 | try: | |
408 | return func(*args, **kwargs) |
|
408 | return func(*args, **kwargs) | |
409 | except TypeError: |
|
409 | except TypeError: | |
410 | if len(traceback.extract_tb(sys.exc_info()[2])) == 1: |
|
410 | if len(traceback.extract_tb(sys.exc_info()[2])) == 1: | |
411 | raise error.SignatureError |
|
411 | raise error.SignatureError | |
412 | raise |
|
412 | raise | |
413 |
|
413 | |||
414 | return check |
|
414 | return check | |
415 |
|
415 | |||
416 | # os.path.lexists is not available on python2.3 |
|
416 | # os.path.lexists is not available on python2.3 | |
417 | def lexists(filename): |
|
417 | def lexists(filename): | |
418 | "test whether a file with this name exists. does not follow symlinks" |
|
418 | "test whether a file with this name exists. does not follow symlinks" | |
419 | try: |
|
419 | try: | |
420 | os.lstat(filename) |
|
420 | os.lstat(filename) | |
421 | except: |
|
421 | except: | |
422 | return False |
|
422 | return False | |
423 | return True |
|
423 | return True | |
424 |
|
424 | |||
425 | def unlink(f): |
|
425 | def unlink(f): | |
426 | """unlink and remove the directory if it is empty""" |
|
426 | """unlink and remove the directory if it is empty""" | |
427 | os.unlink(f) |
|
427 | os.unlink(f) | |
428 | # try removing directories that might now be empty |
|
428 | # try removing directories that might now be empty | |
429 | try: |
|
429 | try: | |
430 | os.removedirs(os.path.dirname(f)) |
|
430 | os.removedirs(os.path.dirname(f)) | |
431 | except OSError: |
|
431 | except OSError: | |
432 | pass |
|
432 | pass | |
433 |
|
433 | |||
434 | def copyfile(src, dest): |
|
434 | def copyfile(src, dest): | |
435 | "copy a file, preserving mode and atime/mtime" |
|
435 | "copy a file, preserving mode and atime/mtime" | |
436 | if os.path.islink(src): |
|
436 | if os.path.islink(src): | |
437 | try: |
|
437 | try: | |
438 | os.unlink(dest) |
|
438 | os.unlink(dest) | |
439 | except: |
|
439 | except: | |
440 | pass |
|
440 | pass | |
441 | os.symlink(os.readlink(src), dest) |
|
441 | os.symlink(os.readlink(src), dest) | |
442 | else: |
|
442 | else: | |
443 | try: |
|
443 | try: | |
444 | shutil.copyfile(src, dest) |
|
444 | shutil.copyfile(src, dest) | |
445 | shutil.copystat(src, dest) |
|
445 | shutil.copystat(src, dest) | |
446 | except shutil.Error, inst: |
|
446 | except shutil.Error, inst: | |
447 | raise Abort(str(inst)) |
|
447 | raise Abort(str(inst)) | |
448 |
|
448 | |||
449 | def copyfiles(src, dst, hardlink=None): |
|
449 | def copyfiles(src, dst, hardlink=None): | |
450 | """Copy a directory tree using hardlinks if possible""" |
|
450 | """Copy a directory tree using hardlinks if possible""" | |
451 |
|
451 | |||
452 | if hardlink is None: |
|
452 | if hardlink is None: | |
453 | hardlink = (os.stat(src).st_dev == |
|
453 | hardlink = (os.stat(src).st_dev == | |
454 | os.stat(os.path.dirname(dst)).st_dev) |
|
454 | os.stat(os.path.dirname(dst)).st_dev) | |
455 |
|
455 | |||
456 | num = 0 |
|
456 | num = 0 | |
457 | if os.path.isdir(src): |
|
457 | if os.path.isdir(src): | |
458 | os.mkdir(dst) |
|
458 | os.mkdir(dst) | |
459 | for name, kind in osutil.listdir(src): |
|
459 | for name, kind in osutil.listdir(src): | |
460 | srcname = os.path.join(src, name) |
|
460 | srcname = os.path.join(src, name) | |
461 | dstname = os.path.join(dst, name) |
|
461 | dstname = os.path.join(dst, name) | |
462 | hardlink, n = copyfiles(srcname, dstname, hardlink) |
|
462 | hardlink, n = copyfiles(srcname, dstname, hardlink) | |
463 | num += n |
|
463 | num += n | |
464 | else: |
|
464 | else: | |
465 | if hardlink: |
|
465 | if hardlink: | |
466 | try: |
|
466 | try: | |
467 | os_link(src, dst) |
|
467 | os_link(src, dst) | |
468 | except (IOError, OSError): |
|
468 | except (IOError, OSError): | |
469 | hardlink = False |
|
469 | hardlink = False | |
470 | shutil.copy(src, dst) |
|
470 | shutil.copy(src, dst) | |
471 | else: |
|
471 | else: | |
472 | shutil.copy(src, dst) |
|
472 | shutil.copy(src, dst) | |
473 | num += 1 |
|
473 | num += 1 | |
474 |
|
474 | |||
475 | return hardlink, num |
|
475 | return hardlink, num | |
476 |
|
476 | |||
477 | class path_auditor(object): |
|
477 | class path_auditor(object): | |
478 | '''ensure that a filesystem path contains no banned components. |
|
478 | '''ensure that a filesystem path contains no banned components. | |
479 | the following properties of a path are checked: |
|
479 | the following properties of a path are checked: | |
480 |
|
480 | |||
481 | - under top-level .hg |
|
481 | - under top-level .hg | |
482 | - starts at the root of a windows drive |
|
482 | - starts at the root of a windows drive | |
483 | - contains ".." |
|
483 | - contains ".." | |
484 | - traverses a symlink (e.g. a/symlink_here/b) |
|
484 | - traverses a symlink (e.g. a/symlink_here/b) | |
485 | - inside a nested repository''' |
|
485 | - inside a nested repository''' | |
486 |
|
486 | |||
487 | def __init__(self, root): |
|
487 | def __init__(self, root): | |
488 | self.audited = set() |
|
488 | self.audited = set() | |
489 | self.auditeddir = set() |
|
489 | self.auditeddir = set() | |
490 | self.root = root |
|
490 | self.root = root | |
491 |
|
491 | |||
492 | def __call__(self, path): |
|
492 | def __call__(self, path): | |
493 | if path in self.audited: |
|
493 | if path in self.audited: | |
494 | return |
|
494 | return | |
495 | normpath = os.path.normcase(path) |
|
495 | normpath = os.path.normcase(path) | |
496 | parts = splitpath(normpath) |
|
496 | parts = splitpath(normpath) | |
497 | if (os.path.splitdrive(path)[0] |
|
497 | if (os.path.splitdrive(path)[0] | |
498 | or parts[0].lower() in ('.hg', '.hg.', '') |
|
498 | or parts[0].lower() in ('.hg', '.hg.', '') | |
499 | or os.pardir in parts): |
|
499 | or os.pardir in parts): | |
500 | raise Abort(_("path contains illegal component: %s") % path) |
|
500 | raise Abort(_("path contains illegal component: %s") % path) | |
501 | if '.hg' in path.lower(): |
|
501 | if '.hg' in path.lower(): | |
502 | lparts = [p.lower() for p in parts] |
|
502 | lparts = [p.lower() for p in parts] | |
503 | for p in '.hg', '.hg.': |
|
503 | for p in '.hg', '.hg.': | |
504 | if p in lparts[1:]: |
|
504 | if p in lparts[1:]: | |
505 | pos = lparts.index(p) |
|
505 | pos = lparts.index(p) | |
506 | base = os.path.join(*parts[:pos]) |
|
506 | base = os.path.join(*parts[:pos]) | |
507 | raise Abort(_('path %r is inside repo %r') % (path, base)) |
|
507 | raise Abort(_('path %r is inside repo %r') % (path, base)) | |
508 | def check(prefix): |
|
508 | def check(prefix): | |
509 | curpath = os.path.join(self.root, prefix) |
|
509 | curpath = os.path.join(self.root, prefix) | |
510 | try: |
|
510 | try: | |
511 | st = os.lstat(curpath) |
|
511 | st = os.lstat(curpath) | |
512 | except OSError, err: |
|
512 | except OSError, err: | |
513 | # EINVAL can be raised as invalid path syntax under win32. |
|
513 | # EINVAL can be raised as invalid path syntax under win32. | |
514 | # They must be ignored for patterns can be checked too. |
|
514 | # They must be ignored for patterns can be checked too. | |
515 | if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): |
|
515 | if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): | |
516 | raise |
|
516 | raise | |
517 | else: |
|
517 | else: | |
518 | if stat.S_ISLNK(st.st_mode): |
|
518 | if stat.S_ISLNK(st.st_mode): | |
519 | raise Abort(_('path %r traverses symbolic link %r') % |
|
519 | raise Abort(_('path %r traverses symbolic link %r') % | |
520 | (path, prefix)) |
|
520 | (path, prefix)) | |
521 | elif (stat.S_ISDIR(st.st_mode) and |
|
521 | elif (stat.S_ISDIR(st.st_mode) and | |
522 | os.path.isdir(os.path.join(curpath, '.hg'))): |
|
522 | os.path.isdir(os.path.join(curpath, '.hg'))): | |
523 | raise Abort(_('path %r is inside repo %r') % |
|
523 | raise Abort(_('path %r is inside repo %r') % | |
524 | (path, prefix)) |
|
524 | (path, prefix)) | |
525 | parts.pop() |
|
525 | parts.pop() | |
526 | prefixes = [] |
|
526 | prefixes = [] | |
527 | while parts: |
|
527 | while parts: | |
528 | prefix = os.sep.join(parts) |
|
528 | prefix = os.sep.join(parts) | |
529 | if prefix in self.auditeddir: |
|
529 | if prefix in self.auditeddir: | |
530 | break |
|
530 | break | |
531 | check(prefix) |
|
531 | check(prefix) | |
532 | prefixes.append(prefix) |
|
532 | prefixes.append(prefix) | |
533 | parts.pop() |
|
533 | parts.pop() | |
534 |
|
534 | |||
535 | self.audited.add(path) |
|
535 | self.audited.add(path) | |
536 | # only add prefixes to the cache after checking everything: we don't |
|
536 | # only add prefixes to the cache after checking everything: we don't | |
537 | # want to add "foo/bar/baz" before checking if there's a "foo/.hg" |
|
537 | # want to add "foo/bar/baz" before checking if there's a "foo/.hg" | |
538 | self.auditeddir.update(prefixes) |
|
538 | self.auditeddir.update(prefixes) | |
539 |
|
539 | |||
540 | def nlinks(pathname): |
|
540 | def nlinks(pathname): | |
541 | """Return number of hardlinks for the given file.""" |
|
541 | """Return number of hardlinks for the given file.""" | |
542 | return os.lstat(pathname).st_nlink |
|
542 | return os.lstat(pathname).st_nlink | |
543 |
|
543 | |||
544 | if hasattr(os, 'link'): |
|
544 | if hasattr(os, 'link'): | |
545 | os_link = os.link |
|
545 | os_link = os.link | |
546 | else: |
|
546 | else: | |
547 | def os_link(src, dst): |
|
547 | def os_link(src, dst): | |
548 | raise OSError(0, _("Hardlinks not supported")) |
|
548 | raise OSError(0, _("Hardlinks not supported")) | |
549 |
|
549 | |||
550 | def lookup_reg(key, name=None, scope=None): |
|
550 | def lookup_reg(key, name=None, scope=None): | |
551 | return None |
|
551 | return None | |
552 |
|
552 | |||
553 | def hidewindow(): |
|
553 | def hidewindow(): | |
554 | """Hide current shell window. |
|
554 | """Hide current shell window. | |
555 |
|
555 | |||
556 | Used to hide the window opened when starting asynchronous |
|
556 | Used to hide the window opened when starting asynchronous | |
557 | child process under Windows, unneeded on other systems. |
|
557 | child process under Windows, unneeded on other systems. | |
558 | """ |
|
558 | """ | |
559 | pass |
|
559 | pass | |
560 |
|
560 | |||
561 | if os.name == 'nt': |
|
561 | if os.name == 'nt': | |
562 | from windows import * |
|
562 | from windows import * | |
563 | else: |
|
563 | else: | |
564 | from posix import * |
|
564 | from posix import * | |
565 |
|
565 | |||
566 | def makelock(info, pathname): |
|
566 | def makelock(info, pathname): | |
567 | try: |
|
567 | try: | |
568 | return os.symlink(info, pathname) |
|
568 | return os.symlink(info, pathname) | |
569 | except OSError, why: |
|
569 | except OSError, why: | |
570 | if why.errno == errno.EEXIST: |
|
570 | if why.errno == errno.EEXIST: | |
571 | raise |
|
571 | raise | |
572 | except AttributeError: # no symlink in os |
|
572 | except AttributeError: # no symlink in os | |
573 | pass |
|
573 | pass | |
574 |
|
574 | |||
575 | ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL) |
|
575 | ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL) | |
576 | os.write(ld, info) |
|
576 | os.write(ld, info) | |
577 | os.close(ld) |
|
577 | os.close(ld) | |
578 |
|
578 | |||
579 | def readlock(pathname): |
|
579 | def readlock(pathname): | |
580 | try: |
|
580 | try: | |
581 | return os.readlink(pathname) |
|
581 | return os.readlink(pathname) | |
582 | except OSError, why: |
|
582 | except OSError, why: | |
583 | if why.errno not in (errno.EINVAL, errno.ENOSYS): |
|
583 | if why.errno not in (errno.EINVAL, errno.ENOSYS): | |
584 | raise |
|
584 | raise | |
585 | except AttributeError: # no symlink in os |
|
585 | except AttributeError: # no symlink in os | |
586 | pass |
|
586 | pass | |
587 | return posixfile(pathname).read() |
|
587 | return posixfile(pathname).read() | |
588 |
|
588 | |||
589 | def fstat(fp): |
|
589 | def fstat(fp): | |
590 | '''stat file object that may not have fileno method.''' |
|
590 | '''stat file object that may not have fileno method.''' | |
591 | try: |
|
591 | try: | |
592 | return os.fstat(fp.fileno()) |
|
592 | return os.fstat(fp.fileno()) | |
593 | except AttributeError: |
|
593 | except AttributeError: | |
594 | return os.stat(fp.name) |
|
594 | return os.stat(fp.name) | |
595 |
|
595 | |||
596 | # File system features |
|
596 | # File system features | |
597 |
|
597 | |||
598 | def checkcase(path): |
|
598 | def checkcase(path): | |
599 | """ |
|
599 | """ | |
600 | Check whether the given path is on a case-sensitive filesystem |
|
600 | Check whether the given path is on a case-sensitive filesystem | |
601 |
|
601 | |||
602 | Requires a path (like /foo/.hg) ending with a foldable final |
|
602 | Requires a path (like /foo/.hg) ending with a foldable final | |
603 | directory component. |
|
603 | directory component. | |
604 | """ |
|
604 | """ | |
605 | s1 = os.stat(path) |
|
605 | s1 = os.stat(path) | |
606 | d, b = os.path.split(path) |
|
606 | d, b = os.path.split(path) | |
607 | p2 = os.path.join(d, b.upper()) |
|
607 | p2 = os.path.join(d, b.upper()) | |
608 | if path == p2: |
|
608 | if path == p2: | |
609 | p2 = os.path.join(d, b.lower()) |
|
609 | p2 = os.path.join(d, b.lower()) | |
610 | try: |
|
610 | try: | |
611 | s2 = os.stat(p2) |
|
611 | s2 = os.stat(p2) | |
612 | if s2 == s1: |
|
612 | if s2 == s1: | |
613 | return False |
|
613 | return False | |
614 | return True |
|
614 | return True | |
615 | except: |
|
615 | except: | |
616 | return True |
|
616 | return True | |
617 |
|
617 | |||
618 | _fspathcache = {} |
|
618 | _fspathcache = {} | |
619 | def fspath(name, root): |
|
619 | def fspath(name, root): | |
620 | '''Get name in the case stored in the filesystem |
|
620 | '''Get name in the case stored in the filesystem | |
621 |
|
621 | |||
622 | The name is either relative to root, or it is an absolute path starting |
|
622 | The name is either relative to root, or it is an absolute path starting | |
623 | with root. Note that this function is unnecessary, and should not be |
|
623 | with root. Note that this function is unnecessary, and should not be | |
624 | called, for case-sensitive filesystems (simply because it's expensive). |
|
624 | called, for case-sensitive filesystems (simply because it's expensive). | |
625 | ''' |
|
625 | ''' | |
626 | # If name is absolute, make it relative |
|
626 | # If name is absolute, make it relative | |
627 | if name.lower().startswith(root.lower()): |
|
627 | if name.lower().startswith(root.lower()): | |
628 | l = len(root) |
|
628 | l = len(root) | |
629 | if name[l] == os.sep or name[l] == os.altsep: |
|
629 | if name[l] == os.sep or name[l] == os.altsep: | |
630 | l = l + 1 |
|
630 | l = l + 1 | |
631 | name = name[l:] |
|
631 | name = name[l:] | |
632 |
|
632 | |||
633 | if not os.path.exists(os.path.join(root, name)): |
|
633 | if not os.path.exists(os.path.join(root, name)): | |
634 | return None |
|
634 | return None | |
635 |
|
635 | |||
636 | seps = os.sep |
|
636 | seps = os.sep | |
637 | if os.altsep: |
|
637 | if os.altsep: | |
638 | seps = seps + os.altsep |
|
638 | seps = seps + os.altsep | |
639 | # Protect backslashes. This gets silly very quickly. |
|
639 | # Protect backslashes. This gets silly very quickly. | |
640 | seps.replace('\\','\\\\') |
|
640 | seps.replace('\\','\\\\') | |
641 | pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) |
|
641 | pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) | |
642 | dir = os.path.normcase(os.path.normpath(root)) |
|
642 | dir = os.path.normcase(os.path.normpath(root)) | |
643 | result = [] |
|
643 | result = [] | |
644 | for part, sep in pattern.findall(name): |
|
644 | for part, sep in pattern.findall(name): | |
645 | if sep: |
|
645 | if sep: | |
646 | result.append(sep) |
|
646 | result.append(sep) | |
647 | continue |
|
647 | continue | |
648 |
|
648 | |||
649 | if dir not in _fspathcache: |
|
649 | if dir not in _fspathcache: | |
650 | _fspathcache[dir] = os.listdir(dir) |
|
650 | _fspathcache[dir] = os.listdir(dir) | |
651 | contents = _fspathcache[dir] |
|
651 | contents = _fspathcache[dir] | |
652 |
|
652 | |||
653 | lpart = part.lower() |
|
653 | lpart = part.lower() | |
654 | lenp = len(part) |
|
654 | lenp = len(part) | |
655 | for n in contents: |
|
655 | for n in contents: | |
656 | if lenp == len(n) and n.lower() == lpart: |
|
656 | if lenp == len(n) and n.lower() == lpart: | |
657 | result.append(n) |
|
657 | result.append(n) | |
658 | break |
|
658 | break | |
659 | else: |
|
659 | else: | |
660 | # Cannot happen, as the file exists! |
|
660 | # Cannot happen, as the file exists! | |
661 | result.append(part) |
|
661 | result.append(part) | |
662 | dir = os.path.join(dir, lpart) |
|
662 | dir = os.path.join(dir, lpart) | |
663 |
|
663 | |||
664 | return ''.join(result) |
|
664 | return ''.join(result) | |
665 |
|
665 | |||
666 | def checkexec(path): |
|
666 | def checkexec(path): | |
667 | """ |
|
667 | """ | |
668 | Check whether the given path is on a filesystem with UNIX-like exec flags |
|
668 | Check whether the given path is on a filesystem with UNIX-like exec flags | |
669 |
|
669 | |||
670 | Requires a directory (like /foo/.hg) |
|
670 | Requires a directory (like /foo/.hg) | |
671 | """ |
|
671 | """ | |
672 |
|
672 | |||
673 | # VFAT on some Linux versions can flip mode but it doesn't persist |
|
673 | # VFAT on some Linux versions can flip mode but it doesn't persist | |
674 | # a FS remount. Frequently we can detect it if files are created |
|
674 | # a FS remount. Frequently we can detect it if files are created | |
675 | # with exec bit on. |
|
675 | # with exec bit on. | |
676 |
|
676 | |||
677 | try: |
|
677 | try: | |
678 | EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH |
|
678 | EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | |
679 | fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-') |
|
679 | fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-') | |
680 | try: |
|
680 | try: | |
681 | os.close(fh) |
|
681 | os.close(fh) | |
682 | m = os.stat(fn).st_mode & 0777 |
|
682 | m = os.stat(fn).st_mode & 0777 | |
683 | new_file_has_exec = m & EXECFLAGS |
|
683 | new_file_has_exec = m & EXECFLAGS | |
684 | os.chmod(fn, m ^ EXECFLAGS) |
|
684 | os.chmod(fn, m ^ EXECFLAGS) | |
685 | exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m) |
|
685 | exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m) | |
686 | finally: |
|
686 | finally: | |
687 | os.unlink(fn) |
|
687 | os.unlink(fn) | |
688 | except (IOError, OSError): |
|
688 | except (IOError, OSError): | |
689 | # we don't care, the user probably won't be able to commit anyway |
|
689 | # we don't care, the user probably won't be able to commit anyway | |
690 | return False |
|
690 | return False | |
691 | return not (new_file_has_exec or exec_flags_cannot_flip) |
|
691 | return not (new_file_has_exec or exec_flags_cannot_flip) | |
692 |
|
692 | |||
693 | def checklink(path): |
|
693 | def checklink(path): | |
694 | """check whether the given path is on a symlink-capable filesystem""" |
|
694 | """check whether the given path is on a symlink-capable filesystem""" | |
695 | # mktemp is not racy because symlink creation will fail if the |
|
695 | # mktemp is not racy because symlink creation will fail if the | |
696 | # file already exists |
|
696 | # file already exists | |
697 | name = tempfile.mktemp(dir=path, prefix='hg-checklink-') |
|
697 | name = tempfile.mktemp(dir=path, prefix='hg-checklink-') | |
698 | try: |
|
698 | try: | |
699 | os.symlink(".", name) |
|
699 | os.symlink(".", name) | |
700 | os.unlink(name) |
|
700 | os.unlink(name) | |
701 | return True |
|
701 | return True | |
702 | except (OSError, AttributeError): |
|
702 | except (OSError, AttributeError): | |
703 | return False |
|
703 | return False | |
704 |
|
704 | |||
705 | def needbinarypatch(): |
|
705 | def needbinarypatch(): | |
706 | """return True if patches should be applied in binary mode by default.""" |
|
706 | """return True if patches should be applied in binary mode by default.""" | |
707 | return os.name == 'nt' |
|
707 | return os.name == 'nt' | |
708 |
|
708 | |||
709 | def endswithsep(path): |
|
709 | def endswithsep(path): | |
710 | '''Check path ends with os.sep or os.altsep.''' |
|
710 | '''Check path ends with os.sep or os.altsep.''' | |
711 | return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) |
|
711 | return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) | |
712 |
|
712 | |||
713 | def splitpath(path): |
|
713 | def splitpath(path): | |
714 | '''Split path by os.sep. |
|
714 | '''Split path by os.sep. | |
715 | Note that this function does not use os.altsep because this is |
|
715 | Note that this function does not use os.altsep because this is | |
716 | an alternative of simple "xxx.split(os.sep)". |
|
716 | an alternative of simple "xxx.split(os.sep)". | |
717 | It is recommended to use os.path.normpath() before using this |
|
717 | It is recommended to use os.path.normpath() before using this | |
718 | function if need.''' |
|
718 | function if need.''' | |
719 | return path.split(os.sep) |
|
719 | return path.split(os.sep) | |
720 |
|
720 | |||
721 | def gui(): |
|
721 | def gui(): | |
722 | '''Are we running in a GUI?''' |
|
722 | '''Are we running in a GUI?''' | |
723 | return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") |
|
723 | return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") | |
724 |
|
724 | |||
725 | def mktempcopy(name, emptyok=False, createmode=None): |
|
725 | def mktempcopy(name, emptyok=False, createmode=None): | |
726 | """Create a temporary file with the same contents from name |
|
726 | """Create a temporary file with the same contents from name | |
727 |
|
727 | |||
728 | The permission bits are copied from the original file. |
|
728 | The permission bits are copied from the original file. | |
729 |
|
729 | |||
730 | If the temporary file is going to be truncated immediately, you |
|
730 | If the temporary file is going to be truncated immediately, you | |
731 | can use emptyok=True as an optimization. |
|
731 | can use emptyok=True as an optimization. | |
732 |
|
732 | |||
733 | Returns the name of the temporary file. |
|
733 | Returns the name of the temporary file. | |
734 | """ |
|
734 | """ | |
735 | d, fn = os.path.split(name) |
|
735 | d, fn = os.path.split(name) | |
736 | fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d) |
|
736 | fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d) | |
737 | os.close(fd) |
|
737 | os.close(fd) | |
738 | # Temporary files are created with mode 0600, which is usually not |
|
738 | # Temporary files are created with mode 0600, which is usually not | |
739 | # what we want. If the original file already exists, just copy |
|
739 | # what we want. If the original file already exists, just copy | |
740 | # its mode. Otherwise, manually obey umask. |
|
740 | # its mode. Otherwise, manually obey umask. | |
741 | try: |
|
741 | try: | |
742 | st_mode = os.lstat(name).st_mode & 0777 |
|
742 | st_mode = os.lstat(name).st_mode & 0777 | |
743 | except OSError, inst: |
|
743 | except OSError, inst: | |
744 | if inst.errno != errno.ENOENT: |
|
744 | if inst.errno != errno.ENOENT: | |
745 | raise |
|
745 | raise | |
746 | st_mode = createmode |
|
746 | st_mode = createmode | |
747 | if st_mode is None: |
|
747 | if st_mode is None: | |
748 | st_mode = ~umask |
|
748 | st_mode = ~umask | |
749 | st_mode &= 0666 |
|
749 | st_mode &= 0666 | |
750 | os.chmod(temp, st_mode) |
|
750 | os.chmod(temp, st_mode) | |
751 | if emptyok: |
|
751 | if emptyok: | |
752 | return temp |
|
752 | return temp | |
753 | try: |
|
753 | try: | |
754 | try: |
|
754 | try: | |
755 | ifp = posixfile(name, "rb") |
|
755 | ifp = posixfile(name, "rb") | |
756 | except IOError, inst: |
|
756 | except IOError, inst: | |
757 | if inst.errno == errno.ENOENT: |
|
757 | if inst.errno == errno.ENOENT: | |
758 | return temp |
|
758 | return temp | |
759 | if not getattr(inst, 'filename', None): |
|
759 | if not getattr(inst, 'filename', None): | |
760 | inst.filename = name |
|
760 | inst.filename = name | |
761 | raise |
|
761 | raise | |
762 | ofp = posixfile(temp, "wb") |
|
762 | ofp = posixfile(temp, "wb") | |
763 | for chunk in filechunkiter(ifp): |
|
763 | for chunk in filechunkiter(ifp): | |
764 | ofp.write(chunk) |
|
764 | ofp.write(chunk) | |
765 | ifp.close() |
|
765 | ifp.close() | |
766 | ofp.close() |
|
766 | ofp.close() | |
767 | except: |
|
767 | except: | |
768 | try: os.unlink(temp) |
|
768 | try: os.unlink(temp) | |
769 | except: pass |
|
769 | except: pass | |
770 | raise |
|
770 | raise | |
771 | return temp |
|
771 | return temp | |
772 |
|
772 | |||
773 | class atomictempfile(object): |
|
773 | class atomictempfile(object): | |
774 | """file-like object that atomically updates a file |
|
774 | """file-like object that atomically updates a file | |
775 |
|
775 | |||
776 | All writes will be redirected to a temporary copy of the original |
|
776 | All writes will be redirected to a temporary copy of the original | |
777 | file. When rename is called, the copy is renamed to the original |
|
777 | file. When rename is called, the copy is renamed to the original | |
778 | name, making the changes visible. |
|
778 | name, making the changes visible. | |
779 | """ |
|
779 | """ | |
780 | def __init__(self, name, mode='w+b', createmode=None): |
|
780 | def __init__(self, name, mode='w+b', createmode=None): | |
781 | self.__name = name |
|
781 | self.__name = name | |
782 | self._fp = None |
|
782 | self._fp = None | |
783 | self.temp = mktempcopy(name, emptyok=('w' in mode), |
|
783 | self.temp = mktempcopy(name, emptyok=('w' in mode), | |
784 | createmode=createmode) |
|
784 | createmode=createmode) | |
785 | self._fp = posixfile(self.temp, mode) |
|
785 | self._fp = posixfile(self.temp, mode) | |
786 |
|
786 | |||
787 | def __getattr__(self, name): |
|
787 | def __getattr__(self, name): | |
788 | return getattr(self._fp, name) |
|
788 | return getattr(self._fp, name) | |
789 |
|
789 | |||
790 | def rename(self): |
|
790 | def rename(self): | |
791 | if not self._fp.closed: |
|
791 | if not self._fp.closed: | |
792 | self._fp.close() |
|
792 | self._fp.close() | |
793 | rename(self.temp, localpath(self.__name)) |
|
793 | rename(self.temp, localpath(self.__name)) | |
794 |
|
794 | |||
795 | def __del__(self): |
|
795 | def __del__(self): | |
796 | if not self._fp: |
|
796 | if not self._fp: | |
797 | return |
|
797 | return | |
798 | if not self._fp.closed: |
|
798 | if not self._fp.closed: | |
799 | try: |
|
799 | try: | |
800 | os.unlink(self.temp) |
|
800 | os.unlink(self.temp) | |
801 | except: pass |
|
801 | except: pass | |
802 | self._fp.close() |
|
802 | self._fp.close() | |
803 |
|
803 | |||
804 | def makedirs(name, mode=None): |
|
804 | def makedirs(name, mode=None): | |
805 | """recursive directory creation with parent mode inheritance""" |
|
805 | """recursive directory creation with parent mode inheritance""" | |
806 | try: |
|
806 | try: | |
807 | os.mkdir(name) |
|
807 | os.mkdir(name) | |
808 | if mode is not None: |
|
808 | if mode is not None: | |
809 | os.chmod(name, mode) |
|
809 | os.chmod(name, mode) | |
810 | return |
|
810 | return | |
811 | except OSError, err: |
|
811 | except OSError, err: | |
812 | if err.errno == errno.EEXIST: |
|
812 | if err.errno == errno.EEXIST: | |
813 | return |
|
813 | return | |
814 | if err.errno != errno.ENOENT: |
|
814 | if err.errno != errno.ENOENT: | |
815 | raise |
|
815 | raise | |
816 | parent = os.path.abspath(os.path.dirname(name)) |
|
816 | parent = os.path.abspath(os.path.dirname(name)) | |
817 | makedirs(parent, mode) |
|
817 | makedirs(parent, mode) | |
818 | makedirs(name, mode) |
|
818 | makedirs(name, mode) | |
819 |
|
819 | |||
820 | class opener(object): |
|
820 | class opener(object): | |
821 | """Open files relative to a base directory |
|
821 | """Open files relative to a base directory | |
822 |
|
822 | |||
823 | This class is used to hide the details of COW semantics and |
|
823 | This class is used to hide the details of COW semantics and | |
824 | remote file access from higher level code. |
|
824 | remote file access from higher level code. | |
825 | """ |
|
825 | """ | |
826 | def __init__(self, base, audit=True): |
|
826 | def __init__(self, base, audit=True): | |
827 | self.base = base |
|
827 | self.base = base | |
828 | if audit: |
|
828 | if audit: | |
829 | self.audit_path = path_auditor(base) |
|
829 | self.audit_path = path_auditor(base) | |
830 | else: |
|
830 | else: | |
831 | self.audit_path = always |
|
831 | self.audit_path = always | |
832 | self.createmode = None |
|
832 | self.createmode = None | |
833 |
|
833 | |||
834 | @propertycache |
|
834 | @propertycache | |
835 | def _can_symlink(self): |
|
835 | def _can_symlink(self): | |
836 | return checklink(self.base) |
|
836 | return checklink(self.base) | |
837 |
|
837 | |||
838 | def _fixfilemode(self, name): |
|
838 | def _fixfilemode(self, name): | |
839 | if self.createmode is None: |
|
839 | if self.createmode is None: | |
840 | return |
|
840 | return | |
841 | os.chmod(name, self.createmode & 0666) |
|
841 | os.chmod(name, self.createmode & 0666) | |
842 |
|
842 | |||
843 | def __call__(self, path, mode="r", text=False, atomictemp=False): |
|
843 | def __call__(self, path, mode="r", text=False, atomictemp=False): | |
844 | self.audit_path(path) |
|
844 | self.audit_path(path) | |
845 | f = os.path.join(self.base, path) |
|
845 | f = os.path.join(self.base, path) | |
846 |
|
846 | |||
847 | if not text and "b" not in mode: |
|
847 | if not text and "b" not in mode: | |
848 | mode += "b" # for that other OS |
|
848 | mode += "b" # for that other OS | |
849 |
|
849 | |||
850 | nlink = -1 |
|
850 | nlink = -1 | |
851 | if mode not in ("r", "rb"): |
|
851 | if mode not in ("r", "rb"): | |
852 | try: |
|
852 | try: | |
853 | nlink = nlinks(f) |
|
853 | nlink = nlinks(f) | |
854 | except OSError: |
|
854 | except OSError: | |
855 | nlink = 0 |
|
855 | nlink = 0 | |
856 | d = os.path.dirname(f) |
|
856 | d = os.path.dirname(f) | |
857 | if not os.path.isdir(d): |
|
857 | if not os.path.isdir(d): | |
858 | makedirs(d, self.createmode) |
|
858 | makedirs(d, self.createmode) | |
859 | if atomictemp: |
|
859 | if atomictemp: | |
860 | return atomictempfile(f, mode, self.createmode) |
|
860 | return atomictempfile(f, mode, self.createmode) | |
861 | if nlink > 1: |
|
861 | if nlink > 1: | |
862 | rename(mktempcopy(f), f) |
|
862 | rename(mktempcopy(f), f) | |
863 | fp = posixfile(f, mode) |
|
863 | fp = posixfile(f, mode) | |
864 | if nlink == 0: |
|
864 | if nlink == 0: | |
865 | self._fixfilemode(f) |
|
865 | self._fixfilemode(f) | |
866 | return fp |
|
866 | return fp | |
867 |
|
867 | |||
868 | def symlink(self, src, dst): |
|
868 | def symlink(self, src, dst): | |
869 | self.audit_path(dst) |
|
869 | self.audit_path(dst) | |
870 | linkname = os.path.join(self.base, dst) |
|
870 | linkname = os.path.join(self.base, dst) | |
871 | try: |
|
871 | try: | |
872 | os.unlink(linkname) |
|
872 | os.unlink(linkname) | |
873 | except OSError: |
|
873 | except OSError: | |
874 | pass |
|
874 | pass | |
875 |
|
875 | |||
876 | dirname = os.path.dirname(linkname) |
|
876 | dirname = os.path.dirname(linkname) | |
877 | if not os.path.exists(dirname): |
|
877 | if not os.path.exists(dirname): | |
878 | makedirs(dirname, self.createmode) |
|
878 | makedirs(dirname, self.createmode) | |
879 |
|
879 | |||
880 | if self._can_symlink: |
|
880 | if self._can_symlink: | |
881 | try: |
|
881 | try: | |
882 | os.symlink(src, linkname) |
|
882 | os.symlink(src, linkname) | |
883 | except OSError, err: |
|
883 | except OSError, err: | |
884 | raise OSError(err.errno, _('could not symlink to %r: %s') % |
|
884 | raise OSError(err.errno, _('could not symlink to %r: %s') % | |
885 | (src, err.strerror), linkname) |
|
885 | (src, err.strerror), linkname) | |
886 | else: |
|
886 | else: | |
887 | f = self(dst, "w") |
|
887 | f = self(dst, "w") | |
888 | f.write(src) |
|
888 | f.write(src) | |
889 | f.close() |
|
889 | f.close() | |
890 | self._fixfilemode(dst) |
|
890 | self._fixfilemode(dst) | |
891 |
|
891 | |||
892 | class chunkbuffer(object): |
|
892 | class chunkbuffer(object): | |
893 | """Allow arbitrary sized chunks of data to be efficiently read from an |
|
893 | """Allow arbitrary sized chunks of data to be efficiently read from an | |
894 | iterator over chunks of arbitrary size.""" |
|
894 | iterator over chunks of arbitrary size.""" | |
895 |
|
895 | |||
896 | def __init__(self, in_iter): |
|
896 | def __init__(self, in_iter): | |
897 | """in_iter is the iterator that's iterating over the input chunks. |
|
897 | """in_iter is the iterator that's iterating over the input chunks. | |
898 | targetsize is how big a buffer to try to maintain.""" |
|
898 | targetsize is how big a buffer to try to maintain.""" | |
899 | self.iter = iter(in_iter) |
|
899 | self.iter = iter(in_iter) | |
900 | self.buf = '' |
|
900 | self.buf = '' | |
901 | self.targetsize = 2**16 |
|
901 | self.targetsize = 2**16 | |
902 |
|
902 | |||
903 | def read(self, l): |
|
903 | def read(self, l): | |
904 | """Read L bytes of data from the iterator of chunks of data. |
|
904 | """Read L bytes of data from the iterator of chunks of data. | |
905 | Returns less than L bytes if the iterator runs dry.""" |
|
905 | Returns less than L bytes if the iterator runs dry.""" | |
906 | if l > len(self.buf) and self.iter: |
|
906 | if l > len(self.buf) and self.iter: | |
907 | # Clamp to a multiple of self.targetsize |
|
907 | # Clamp to a multiple of self.targetsize | |
908 | targetsize = max(l, self.targetsize) |
|
908 | targetsize = max(l, self.targetsize) | |
909 | collector = cStringIO.StringIO() |
|
909 | collector = cStringIO.StringIO() | |
910 | collector.write(self.buf) |
|
910 | collector.write(self.buf) | |
911 | collected = len(self.buf) |
|
911 | collected = len(self.buf) | |
912 | for chunk in self.iter: |
|
912 | for chunk in self.iter: | |
913 | collector.write(chunk) |
|
913 | collector.write(chunk) | |
914 | collected += len(chunk) |
|
914 | collected += len(chunk) | |
915 | if collected >= targetsize: |
|
915 | if collected >= targetsize: | |
916 | break |
|
916 | break | |
917 | if collected < targetsize: |
|
917 | if collected < targetsize: | |
918 | self.iter = False |
|
918 | self.iter = False | |
919 | self.buf = collector.getvalue() |
|
919 | self.buf = collector.getvalue() | |
920 | if len(self.buf) == l: |
|
920 | if len(self.buf) == l: | |
921 | s, self.buf = str(self.buf), '' |
|
921 | s, self.buf = str(self.buf), '' | |
922 | else: |
|
922 | else: | |
923 | s, self.buf = self.buf[:l], buffer(self.buf, l) |
|
923 | s, self.buf = self.buf[:l], buffer(self.buf, l) | |
924 | return s |
|
924 | return s | |
925 |
|
925 | |||
926 | def filechunkiter(f, size=65536, limit=None): |
|
926 | def filechunkiter(f, size=65536, limit=None): | |
927 | """Create a generator that produces the data in the file size |
|
927 | """Create a generator that produces the data in the file size | |
928 | (default 65536) bytes at a time, up to optional limit (default is |
|
928 | (default 65536) bytes at a time, up to optional limit (default is | |
929 | to read all data). Chunks may be less than size bytes if the |
|
929 | to read all data). Chunks may be less than size bytes if the | |
930 | chunk is the last chunk in the file, or the file is a socket or |
|
930 | chunk is the last chunk in the file, or the file is a socket or | |
931 | some other type of file that sometimes reads less data than is |
|
931 | some other type of file that sometimes reads less data than is | |
932 | requested.""" |
|
932 | requested.""" | |
933 | assert size >= 0 |
|
933 | assert size >= 0 | |
934 | assert limit is None or limit >= 0 |
|
934 | assert limit is None or limit >= 0 | |
935 | while True: |
|
935 | while True: | |
936 | if limit is None: |
|
936 | if limit is None: | |
937 | nbytes = size |
|
937 | nbytes = size | |
938 | else: |
|
938 | else: | |
939 | nbytes = min(limit, size) |
|
939 | nbytes = min(limit, size) | |
940 | s = nbytes and f.read(nbytes) |
|
940 | s = nbytes and f.read(nbytes) | |
941 | if not s: |
|
941 | if not s: | |
942 | break |
|
942 | break | |
943 | if limit: |
|
943 | if limit: | |
944 | limit -= len(s) |
|
944 | limit -= len(s) | |
945 | yield s |
|
945 | yield s | |
946 |
|
946 | |||
947 | def makedate(): |
|
947 | def makedate(): | |
948 | lt = time.localtime() |
|
948 | lt = time.localtime() | |
949 | if lt[8] == 1 and time.daylight: |
|
949 | if lt[8] == 1 and time.daylight: | |
950 | tz = time.altzone |
|
950 | tz = time.altzone | |
951 | else: |
|
951 | else: | |
952 | tz = time.timezone |
|
952 | tz = time.timezone | |
953 | return time.mktime(lt), tz |
|
953 | return time.mktime(lt), tz | |
954 |
|
954 | |||
955 | def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): |
|
955 | def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): | |
956 | """represent a (unixtime, offset) tuple as a localized time. |
|
956 | """represent a (unixtime, offset) tuple as a localized time. | |
957 | unixtime is seconds since the epoch, and offset is the time zone's |
|
957 | unixtime is seconds since the epoch, and offset is the time zone's | |
958 | number of seconds away from UTC. if timezone is false, do not |
|
958 | number of seconds away from UTC. if timezone is false, do not | |
959 | append time zone to string.""" |
|
959 | append time zone to string.""" | |
960 | t, tz = date or makedate() |
|
960 | t, tz = date or makedate() | |
961 | if "%1" in format or "%2" in format: |
|
961 | if "%1" in format or "%2" in format: | |
962 | sign = (tz > 0) and "-" or "+" |
|
962 | sign = (tz > 0) and "-" or "+" | |
963 | minutes = abs(tz) // 60 |
|
963 | minutes = abs(tz) // 60 | |
964 | format = format.replace("%1", "%c%02d" % (sign, minutes // 60)) |
|
964 | format = format.replace("%1", "%c%02d" % (sign, minutes // 60)) | |
965 | format = format.replace("%2", "%02d" % (minutes % 60)) |
|
965 | format = format.replace("%2", "%02d" % (minutes % 60)) | |
966 | s = time.strftime(format, time.gmtime(float(t) - tz)) |
|
966 | s = time.strftime(format, time.gmtime(float(t) - tz)) | |
967 | return s |
|
967 | return s | |
968 |
|
968 | |||
969 | def shortdate(date=None): |
|
969 | def shortdate(date=None): | |
970 | """turn (timestamp, tzoff) tuple into iso 8631 date.""" |
|
970 | """turn (timestamp, tzoff) tuple into iso 8631 date.""" | |
971 | return datestr(date, format='%Y-%m-%d') |
|
971 | return datestr(date, format='%Y-%m-%d') | |
972 |
|
972 | |||
973 | def strdate(string, format, defaults=[]): |
|
973 | def strdate(string, format, defaults=[]): | |
974 | """parse a localized time string and return a (unixtime, offset) tuple. |
|
974 | """parse a localized time string and return a (unixtime, offset) tuple. | |
975 | if the string cannot be parsed, ValueError is raised.""" |
|
975 | if the string cannot be parsed, ValueError is raised.""" | |
976 | def timezone(string): |
|
976 | def timezone(string): | |
977 | tz = string.split()[-1] |
|
977 | tz = string.split()[-1] | |
978 | if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit(): |
|
978 | if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit(): | |
979 | sign = (tz[0] == "+") and 1 or -1 |
|
979 | sign = (tz[0] == "+") and 1 or -1 | |
980 | hours = int(tz[1:3]) |
|
980 | hours = int(tz[1:3]) | |
981 | minutes = int(tz[3:5]) |
|
981 | minutes = int(tz[3:5]) | |
982 | return -sign * (hours * 60 + minutes) * 60 |
|
982 | return -sign * (hours * 60 + minutes) * 60 | |
983 | if tz == "GMT" or tz == "UTC": |
|
983 | if tz == "GMT" or tz == "UTC": | |
984 | return 0 |
|
984 | return 0 | |
985 | return None |
|
985 | return None | |
986 |
|
986 | |||
987 | # NOTE: unixtime = localunixtime + offset |
|
987 | # NOTE: unixtime = localunixtime + offset | |
988 | offset, date = timezone(string), string |
|
988 | offset, date = timezone(string), string | |
989 | if offset != None: |
|
989 | if offset != None: | |
990 | date = " ".join(string.split()[:-1]) |
|
990 | date = " ".join(string.split()[:-1]) | |
991 |
|
991 | |||
992 | # add missing elements from defaults |
|
992 | # add missing elements from defaults | |
993 | for part in defaults: |
|
993 | for part in defaults: | |
994 | found = [True for p in part if ("%"+p) in format] |
|
994 | found = [True for p in part if ("%"+p) in format] | |
995 | if not found: |
|
995 | if not found: | |
996 | date += "@" + defaults[part] |
|
996 | date += "@" + defaults[part] | |
997 | format += "@%" + part[0] |
|
997 | format += "@%" + part[0] | |
998 |
|
998 | |||
999 | timetuple = time.strptime(date, format) |
|
999 | timetuple = time.strptime(date, format) | |
1000 | localunixtime = int(calendar.timegm(timetuple)) |
|
1000 | localunixtime = int(calendar.timegm(timetuple)) | |
1001 | if offset is None: |
|
1001 | if offset is None: | |
1002 | # local timezone |
|
1002 | # local timezone | |
1003 | unixtime = int(time.mktime(timetuple)) |
|
1003 | unixtime = int(time.mktime(timetuple)) | |
1004 | offset = unixtime - localunixtime |
|
1004 | offset = unixtime - localunixtime | |
1005 | else: |
|
1005 | else: | |
1006 | unixtime = localunixtime + offset |
|
1006 | unixtime = localunixtime + offset | |
1007 | return unixtime, offset |
|
1007 | return unixtime, offset | |
1008 |
|
1008 | |||
1009 | def parsedate(date, formats=None, defaults=None): |
|
1009 | def parsedate(date, formats=None, defaults=None): | |
1010 | """parse a localized date/time string and return a (unixtime, offset) tuple. |
|
1010 | """parse a localized date/time string and return a (unixtime, offset) tuple. | |
1011 |
|
1011 | |||
1012 | The date may be a "unixtime offset" string or in one of the specified |
|
1012 | The date may be a "unixtime offset" string or in one of the specified | |
1013 | formats. If the date already is a (unixtime, offset) tuple, it is returned. |
|
1013 | formats. If the date already is a (unixtime, offset) tuple, it is returned. | |
1014 | """ |
|
1014 | """ | |
1015 | if not date: |
|
1015 | if not date: | |
1016 | return 0, 0 |
|
1016 | return 0, 0 | |
1017 | if isinstance(date, tuple) and len(date) == 2: |
|
1017 | if isinstance(date, tuple) and len(date) == 2: | |
1018 | return date |
|
1018 | return date | |
1019 | if not formats: |
|
1019 | if not formats: | |
1020 | formats = defaultdateformats |
|
1020 | formats = defaultdateformats | |
1021 | date = date.strip() |
|
1021 | date = date.strip() | |
1022 | try: |
|
1022 | try: | |
1023 | when, offset = map(int, date.split(' ')) |
|
1023 | when, offset = map(int, date.split(' ')) | |
1024 | except ValueError: |
|
1024 | except ValueError: | |
1025 | # fill out defaults |
|
1025 | # fill out defaults | |
1026 | if not defaults: |
|
1026 | if not defaults: | |
1027 | defaults = {} |
|
1027 | defaults = {} | |
1028 | now = makedate() |
|
1028 | now = makedate() | |
1029 | for part in "d mb yY HI M S".split(): |
|
1029 | for part in "d mb yY HI M S".split(): | |
1030 | if part not in defaults: |
|
1030 | if part not in defaults: | |
1031 | if part[0] in "HMS": |
|
1031 | if part[0] in "HMS": | |
1032 | defaults[part] = "00" |
|
1032 | defaults[part] = "00" | |
1033 | else: |
|
1033 | else: | |
1034 | defaults[part] = datestr(now, "%" + part[0]) |
|
1034 | defaults[part] = datestr(now, "%" + part[0]) | |
1035 |
|
1035 | |||
1036 | for format in formats: |
|
1036 | for format in formats: | |
1037 | try: |
|
1037 | try: | |
1038 | when, offset = strdate(date, format, defaults) |
|
1038 | when, offset = strdate(date, format, defaults) | |
1039 | except (ValueError, OverflowError): |
|
1039 | except (ValueError, OverflowError): | |
1040 | pass |
|
1040 | pass | |
1041 | else: |
|
1041 | else: | |
1042 | break |
|
1042 | break | |
1043 | else: |
|
1043 | else: | |
1044 | raise Abort(_('invalid date: %r ') % date) |
|
1044 | raise Abort(_('invalid date: %r ') % date) | |
1045 | # validate explicit (probably user-specified) date and |
|
1045 | # validate explicit (probably user-specified) date and | |
1046 | # time zone offset. values must fit in signed 32 bits for |
|
1046 | # time zone offset. values must fit in signed 32 bits for | |
1047 | # current 32-bit linux runtimes. timezones go from UTC-12 |
|
1047 | # current 32-bit linux runtimes. timezones go from UTC-12 | |
1048 | # to UTC+14 |
|
1048 | # to UTC+14 | |
1049 | if abs(when) > 0x7fffffff: |
|
1049 | if abs(when) > 0x7fffffff: | |
1050 | raise Abort(_('date exceeds 32 bits: %d') % when) |
|
1050 | raise Abort(_('date exceeds 32 bits: %d') % when) | |
1051 | if offset < -50400 or offset > 43200: |
|
1051 | if offset < -50400 or offset > 43200: | |
1052 | raise Abort(_('impossible time zone offset: %d') % offset) |
|
1052 | raise Abort(_('impossible time zone offset: %d') % offset) | |
1053 | return when, offset |
|
1053 | return when, offset | |
1054 |
|
1054 | |||
1055 | def matchdate(date): |
|
1055 | def matchdate(date): | |
1056 | """Return a function that matches a given date match specifier |
|
1056 | """Return a function that matches a given date match specifier | |
1057 |
|
1057 | |||
1058 | Formats include: |
|
1058 | Formats include: | |
1059 |
|
1059 | |||
1060 | '{date}' match a given date to the accuracy provided |
|
1060 | '{date}' match a given date to the accuracy provided | |
1061 |
|
1061 | |||
1062 | '<{date}' on or before a given date |
|
1062 | '<{date}' on or before a given date | |
1063 |
|
1063 | |||
1064 | '>{date}' on or after a given date |
|
1064 | '>{date}' on or after a given date | |
1065 |
|
1065 | |||
1066 | """ |
|
1066 | """ | |
1067 |
|
1067 | |||
1068 | def lower(date): |
|
1068 | def lower(date): | |
1069 | d = dict(mb="1", d="1") |
|
1069 | d = dict(mb="1", d="1") | |
1070 | return parsedate(date, extendeddateformats, d)[0] |
|
1070 | return parsedate(date, extendeddateformats, d)[0] | |
1071 |
|
1071 | |||
1072 | def upper(date): |
|
1072 | def upper(date): | |
1073 | d = dict(mb="12", HI="23", M="59", S="59") |
|
1073 | d = dict(mb="12", HI="23", M="59", S="59") | |
1074 | for days in "31 30 29".split(): |
|
1074 | for days in "31 30 29".split(): | |
1075 | try: |
|
1075 | try: | |
1076 | d["d"] = days |
|
1076 | d["d"] = days | |
1077 | return parsedate(date, extendeddateformats, d)[0] |
|
1077 | return parsedate(date, extendeddateformats, d)[0] | |
1078 | except: |
|
1078 | except: | |
1079 | pass |
|
1079 | pass | |
1080 | d["d"] = "28" |
|
1080 | d["d"] = "28" | |
1081 | return parsedate(date, extendeddateformats, d)[0] |
|
1081 | return parsedate(date, extendeddateformats, d)[0] | |
1082 |
|
1082 | |||
1083 | date = date.strip() |
|
1083 | date = date.strip() | |
1084 | if date[0] == "<": |
|
1084 | if date[0] == "<": | |
1085 | when = upper(date[1:]) |
|
1085 | when = upper(date[1:]) | |
1086 | return lambda x: x <= when |
|
1086 | return lambda x: x <= when | |
1087 | elif date[0] == ">": |
|
1087 | elif date[0] == ">": | |
1088 | when = lower(date[1:]) |
|
1088 | when = lower(date[1:]) | |
1089 | return lambda x: x >= when |
|
1089 | return lambda x: x >= when | |
1090 | elif date[0] == "-": |
|
1090 | elif date[0] == "-": | |
1091 | try: |
|
1091 | try: | |
1092 | days = int(date[1:]) |
|
1092 | days = int(date[1:]) | |
1093 | except ValueError: |
|
1093 | except ValueError: | |
1094 | raise Abort(_("invalid day spec: %s") % date[1:]) |
|
1094 | raise Abort(_("invalid day spec: %s") % date[1:]) | |
1095 | when = makedate()[0] - days * 3600 * 24 |
|
1095 | when = makedate()[0] - days * 3600 * 24 | |
1096 | return lambda x: x >= when |
|
1096 | return lambda x: x >= when | |
1097 | elif " to " in date: |
|
1097 | elif " to " in date: | |
1098 | a, b = date.split(" to ") |
|
1098 | a, b = date.split(" to ") | |
1099 | start, stop = lower(a), upper(b) |
|
1099 | start, stop = lower(a), upper(b) | |
1100 | return lambda x: x >= start and x <= stop |
|
1100 | return lambda x: x >= start and x <= stop | |
1101 | else: |
|
1101 | else: | |
1102 | start, stop = lower(date), upper(date) |
|
1102 | start, stop = lower(date), upper(date) | |
1103 | return lambda x: x >= start and x <= stop |
|
1103 | return lambda x: x >= start and x <= stop | |
1104 |
|
1104 | |||
1105 | def shortuser(user): |
|
1105 | def shortuser(user): | |
1106 | """Return a short representation of a user name or email address.""" |
|
1106 | """Return a short representation of a user name or email address.""" | |
1107 | f = user.find('@') |
|
1107 | f = user.find('@') | |
1108 | if f >= 0: |
|
1108 | if f >= 0: | |
1109 | user = user[:f] |
|
1109 | user = user[:f] | |
1110 | f = user.find('<') |
|
1110 | f = user.find('<') | |
1111 | if f >= 0: |
|
1111 | if f >= 0: | |
1112 | user = user[f + 1:] |
|
1112 | user = user[f + 1:] | |
1113 | f = user.find(' ') |
|
1113 | f = user.find(' ') | |
1114 | if f >= 0: |
|
1114 | if f >= 0: | |
1115 | user = user[:f] |
|
1115 | user = user[:f] | |
1116 | f = user.find('.') |
|
1116 | f = user.find('.') | |
1117 | if f >= 0: |
|
1117 | if f >= 0: | |
1118 | user = user[:f] |
|
1118 | user = user[:f] | |
1119 | return user |
|
1119 | return user | |
1120 |
|
1120 | |||
1121 | def email(author): |
|
1121 | def email(author): | |
1122 | '''get email of author.''' |
|
1122 | '''get email of author.''' | |
1123 | r = author.find('>') |
|
1123 | r = author.find('>') | |
1124 | if r == -1: |
|
1124 | if r == -1: | |
1125 | r = None |
|
1125 | r = None | |
1126 | return author[author.find('<') + 1:r] |
|
1126 | return author[author.find('<') + 1:r] | |
1127 |
|
1127 | |||
1128 | def ellipsis(text, maxlength=400): |
|
1128 | def ellipsis(text, maxlength=400): | |
1129 | """Trim string to at most maxlength (default: 400) characters.""" |
|
1129 | """Trim string to at most maxlength (default: 400) characters.""" | |
1130 | if len(text) <= maxlength: |
|
1130 | if len(text) <= maxlength: | |
1131 | return text |
|
1131 | return text | |
1132 | else: |
|
1132 | else: | |
1133 | return "%s..." % (text[:maxlength - 3]) |
|
1133 | return "%s..." % (text[:maxlength - 3]) | |
1134 |
|
1134 | |||
1135 | def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): |
|
1135 | def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): | |
1136 | '''yield every hg repository under path, recursively.''' |
|
1136 | '''yield every hg repository under path, recursively.''' | |
1137 | def errhandler(err): |
|
1137 | def errhandler(err): | |
1138 | if err.filename == path: |
|
1138 | if err.filename == path: | |
1139 | raise err |
|
1139 | raise err | |
1140 | if followsym and hasattr(os.path, 'samestat'): |
|
1140 | if followsym and hasattr(os.path, 'samestat'): | |
1141 | def _add_dir_if_not_there(dirlst, dirname): |
|
1141 | def _add_dir_if_not_there(dirlst, dirname): | |
1142 | match = False |
|
1142 | match = False | |
1143 | samestat = os.path.samestat |
|
1143 | samestat = os.path.samestat | |
1144 | dirstat = os.stat(dirname) |
|
1144 | dirstat = os.stat(dirname) | |
1145 | for lstdirstat in dirlst: |
|
1145 | for lstdirstat in dirlst: | |
1146 | if samestat(dirstat, lstdirstat): |
|
1146 | if samestat(dirstat, lstdirstat): | |
1147 | match = True |
|
1147 | match = True | |
1148 | break |
|
1148 | break | |
1149 | if not match: |
|
1149 | if not match: | |
1150 | dirlst.append(dirstat) |
|
1150 | dirlst.append(dirstat) | |
1151 | return not match |
|
1151 | return not match | |
1152 | else: |
|
1152 | else: | |
1153 | followsym = False |
|
1153 | followsym = False | |
1154 |
|
1154 | |||
1155 | if (seen_dirs is None) and followsym: |
|
1155 | if (seen_dirs is None) and followsym: | |
1156 | seen_dirs = [] |
|
1156 | seen_dirs = [] | |
1157 | _add_dir_if_not_there(seen_dirs, path) |
|
1157 | _add_dir_if_not_there(seen_dirs, path) | |
1158 | for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): |
|
1158 | for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): | |
1159 | dirs.sort() |
|
1159 | dirs.sort() | |
1160 | if '.hg' in dirs: |
|
1160 | if '.hg' in dirs: | |
1161 | yield root # found a repository |
|
1161 | yield root # found a repository | |
1162 | qroot = os.path.join(root, '.hg', 'patches') |
|
1162 | qroot = os.path.join(root, '.hg', 'patches') | |
1163 | if os.path.isdir(os.path.join(qroot, '.hg')): |
|
1163 | if os.path.isdir(os.path.join(qroot, '.hg')): | |
1164 | yield qroot # we have a patch queue repo here |
|
1164 | yield qroot # we have a patch queue repo here | |
1165 | if recurse: |
|
1165 | if recurse: | |
1166 | # avoid recursing inside the .hg directory |
|
1166 | # avoid recursing inside the .hg directory | |
1167 | dirs.remove('.hg') |
|
1167 | dirs.remove('.hg') | |
1168 | else: |
|
1168 | else: | |
1169 | dirs[:] = [] # don't descend further |
|
1169 | dirs[:] = [] # don't descend further | |
1170 | elif followsym: |
|
1170 | elif followsym: | |
1171 | newdirs = [] |
|
1171 | newdirs = [] | |
1172 | for d in dirs: |
|
1172 | for d in dirs: | |
1173 | fname = os.path.join(root, d) |
|
1173 | fname = os.path.join(root, d) | |
1174 | if _add_dir_if_not_there(seen_dirs, fname): |
|
1174 | if _add_dir_if_not_there(seen_dirs, fname): | |
1175 | if os.path.islink(fname): |
|
1175 | if os.path.islink(fname): | |
1176 | for hgname in walkrepos(fname, True, seen_dirs): |
|
1176 | for hgname in walkrepos(fname, True, seen_dirs): | |
1177 | yield hgname |
|
1177 | yield hgname | |
1178 | else: |
|
1178 | else: | |
1179 | newdirs.append(d) |
|
1179 | newdirs.append(d) | |
1180 | dirs[:] = newdirs |
|
1180 | dirs[:] = newdirs | |
1181 |
|
1181 | |||
1182 | _rcpath = None |
|
1182 | _rcpath = None | |
1183 |
|
1183 | |||
1184 | def os_rcpath(): |
|
1184 | def os_rcpath(): | |
1185 | '''return default os-specific hgrc search path''' |
|
1185 | '''return default os-specific hgrc search path''' | |
1186 | path = system_rcpath() |
|
1186 | path = system_rcpath() | |
1187 | path.extend(user_rcpath()) |
|
1187 | path.extend(user_rcpath()) | |
1188 | path = [os.path.normpath(f) for f in path] |
|
1188 | path = [os.path.normpath(f) for f in path] | |
1189 | return path |
|
1189 | return path | |
1190 |
|
1190 | |||
1191 | def rcpath(): |
|
1191 | def rcpath(): | |
1192 | '''return hgrc search path. if env var HGRCPATH is set, use it. |
|
1192 | '''return hgrc search path. if env var HGRCPATH is set, use it. | |
1193 | for each item in path, if directory, use files ending in .rc, |
|
1193 | for each item in path, if directory, use files ending in .rc, | |
1194 | else use item. |
|
1194 | else use item. | |
1195 | make HGRCPATH empty to only look in .hg/hgrc of current repo. |
|
1195 | make HGRCPATH empty to only look in .hg/hgrc of current repo. | |
1196 | if no HGRCPATH, use default os-specific path.''' |
|
1196 | if no HGRCPATH, use default os-specific path.''' | |
1197 | global _rcpath |
|
1197 | global _rcpath | |
1198 | if _rcpath is None: |
|
1198 | if _rcpath is None: | |
1199 | if 'HGRCPATH' in os.environ: |
|
1199 | if 'HGRCPATH' in os.environ: | |
1200 | _rcpath = [] |
|
1200 | _rcpath = [] | |
1201 | for p in os.environ['HGRCPATH'].split(os.pathsep): |
|
1201 | for p in os.environ['HGRCPATH'].split(os.pathsep): | |
1202 | if not p: |
|
1202 | if not p: | |
1203 | continue |
|
1203 | continue | |
1204 | p = expandpath(p) |
|
1204 | p = expandpath(p) | |
1205 | if os.path.isdir(p): |
|
1205 | if os.path.isdir(p): | |
1206 | for f, kind in osutil.listdir(p): |
|
1206 | for f, kind in osutil.listdir(p): | |
1207 | if f.endswith('.rc'): |
|
1207 | if f.endswith('.rc'): | |
1208 | _rcpath.append(os.path.join(p, f)) |
|
1208 | _rcpath.append(os.path.join(p, f)) | |
1209 | else: |
|
1209 | else: | |
1210 | _rcpath.append(p) |
|
1210 | _rcpath.append(p) | |
1211 | else: |
|
1211 | else: | |
1212 | _rcpath = os_rcpath() |
|
1212 | _rcpath = os_rcpath() | |
1213 | return _rcpath |
|
1213 | return _rcpath | |
1214 |
|
1214 | |||
1215 | def bytecount(nbytes): |
|
1215 | def bytecount(nbytes): | |
1216 | '''return byte count formatted as readable string, with units''' |
|
1216 | '''return byte count formatted as readable string, with units''' | |
1217 |
|
1217 | |||
1218 | units = ( |
|
1218 | units = ( | |
1219 | (100, 1 << 30, _('%.0f GB')), |
|
1219 | (100, 1 << 30, _('%.0f GB')), | |
1220 | (10, 1 << 30, _('%.1f GB')), |
|
1220 | (10, 1 << 30, _('%.1f GB')), | |
1221 | (1, 1 << 30, _('%.2f GB')), |
|
1221 | (1, 1 << 30, _('%.2f GB')), | |
1222 | (100, 1 << 20, _('%.0f MB')), |
|
1222 | (100, 1 << 20, _('%.0f MB')), | |
1223 | (10, 1 << 20, _('%.1f MB')), |
|
1223 | (10, 1 << 20, _('%.1f MB')), | |
1224 | (1, 1 << 20, _('%.2f MB')), |
|
1224 | (1, 1 << 20, _('%.2f MB')), | |
1225 | (100, 1 << 10, _('%.0f KB')), |
|
1225 | (100, 1 << 10, _('%.0f KB')), | |
1226 | (10, 1 << 10, _('%.1f KB')), |
|
1226 | (10, 1 << 10, _('%.1f KB')), | |
1227 | (1, 1 << 10, _('%.2f KB')), |
|
1227 | (1, 1 << 10, _('%.2f KB')), | |
1228 | (1, 1, _('%.0f bytes')), |
|
1228 | (1, 1, _('%.0f bytes')), | |
1229 | ) |
|
1229 | ) | |
1230 |
|
1230 | |||
1231 | for multiplier, divisor, format in units: |
|
1231 | for multiplier, divisor, format in units: | |
1232 | if nbytes >= divisor * multiplier: |
|
1232 | if nbytes >= divisor * multiplier: | |
1233 | return format % (nbytes / float(divisor)) |
|
1233 | return format % (nbytes / float(divisor)) | |
1234 | return units[-1][2] % nbytes |
|
1234 | return units[-1][2] % nbytes | |
1235 |
|
1235 | |||
1236 | def drop_scheme(scheme, path): |
|
1236 | def drop_scheme(scheme, path): | |
1237 | sc = scheme + ':' |
|
1237 | sc = scheme + ':' | |
1238 | if path.startswith(sc): |
|
1238 | if path.startswith(sc): | |
1239 | path = path[len(sc):] |
|
1239 | path = path[len(sc):] | |
1240 | if path.startswith('//'): |
|
1240 | if path.startswith('//'): | |
1241 | if scheme == 'file': |
|
1241 | if scheme == 'file': | |
1242 | i = path.find('/', 2) |
|
1242 | i = path.find('/', 2) | |
1243 | if i == -1: |
|
1243 | if i == -1: | |
1244 | return '' |
|
1244 | return '' | |
1245 | # On Windows, absolute paths are rooted at the current drive |
|
1245 | # On Windows, absolute paths are rooted at the current drive | |
1246 | # root. On POSIX they are rooted at the file system root. |
|
1246 | # root. On POSIX they are rooted at the file system root. | |
1247 | if os.name == 'nt': |
|
1247 | if os.name == 'nt': | |
1248 | droot = os.path.splitdrive(os.getcwd())[0] + '/' |
|
1248 | droot = os.path.splitdrive(os.getcwd())[0] + '/' | |
1249 | path = os.path.join(droot, path[i + 1:]) |
|
1249 | path = os.path.join(droot, path[i + 1:]) | |
1250 | else: |
|
1250 | else: | |
1251 | path = path[i:] |
|
1251 | path = path[i:] | |
1252 | else: |
|
1252 | else: | |
1253 | path = path[2:] |
|
1253 | path = path[2:] | |
1254 | return path |
|
1254 | return path | |
1255 |
|
1255 | |||
1256 | def uirepr(s): |
|
1256 | def uirepr(s): | |
1257 | # Avoid double backslash in Windows path repr() |
|
1257 | # Avoid double backslash in Windows path repr() | |
1258 | return repr(s).replace('\\\\', '\\') |
|
1258 | return repr(s).replace('\\\\', '\\') | |
1259 |
|
1259 | |||
1260 | def wrap(line, hangindent, width=None): |
|
1260 | #### naming convention of below implementation follows 'textwrap' module | |
|
1261 | ||||
|
1262 | class MBTextWrapper(textwrap.TextWrapper): | |||
|
1263 | def __init__(self, **kwargs): | |||
|
1264 | textwrap.TextWrapper.__init__(self, **kwargs) | |||
|
1265 | ||||
|
1266 | def _cutdown(self, str, space_left): | |||
|
1267 | l = 0 | |||
|
1268 | ucstr = unicode(str, encoding.encoding) | |||
|
1269 | w = unicodedata.east_asian_width | |||
|
1270 | for i in xrange(len(ucstr)): | |||
|
1271 | l += w(ucstr[i]) in 'WFA' and 2 or 1 | |||
|
1272 | if space_left < l: | |||
|
1273 | return (ucstr[:i].encode(encoding.encoding), | |||
|
1274 | ucstr[i:].encode(encoding.encoding)) | |||
|
1275 | return str, '' | |||
|
1276 | ||||
|
1277 | # ---------------------------------------- | |||
|
1278 | # overriding of base class | |||
|
1279 | ||||
|
1280 | def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): | |||
|
1281 | space_left = max(width - cur_len, 1) | |||
|
1282 | ||||
|
1283 | if self.break_long_words: | |||
|
1284 | cut, res = self._cutdown(reversed_chunks[-1], space_left) | |||
|
1285 | cur_line.append(cut) | |||
|
1286 | reversed_chunks[-1] = res | |||
|
1287 | elif not cur_line: | |||
|
1288 | cur_line.append(reversed_chunks.pop()) | |||
|
1289 | ||||
|
1290 | #### naming convention of above implementation follows 'textwrap' module | |||
|
1291 | ||||
|
1292 | def wrap(line, width=None, initindent='', hangindent=''): | |||
1261 | if width is None: |
|
1293 | if width is None: | |
1262 | width = termwidth() - 2 |
|
1294 | width = termwidth() - 2 | |
1263 | if width <= hangindent: |
|
1295 | maxindent = max(len(hangindent), len(initindent)) | |
|
1296 | if width <= maxindent: | |||
1264 | # adjust for weird terminal size |
|
1297 | # adjust for weird terminal size | |
1265 |
width = max(78, |
|
1298 | width = max(78, maxindent + 1) | |
1266 | padding = '\n' + ' ' * hangindent |
|
1299 | wrapper = MBTextWrapper(width=width, | |
1267 | # To avoid corrupting multi-byte characters in line, we must wrap |
|
1300 | initial_indent=initindent, | |
1268 | # a Unicode string instead of a bytestring. |
|
1301 | subsequent_indent=hangindent) | |
1269 | try: |
|
1302 | return wrapper.fill(line) | |
1270 | u = line.decode(encoding.encoding) |
|
|||
1271 | w = padding.join(textwrap.wrap(u, width=width - hangindent)) |
|
|||
1272 | return w.encode(encoding.encoding) |
|
|||
1273 | except UnicodeDecodeError: |
|
|||
1274 | return padding.join(textwrap.wrap(line, width=width - hangindent)) |
|
|||
1275 |
|
1303 | |||
1276 | def iterlines(iterator): |
|
1304 | def iterlines(iterator): | |
1277 | for chunk in iterator: |
|
1305 | for chunk in iterator: | |
1278 | for line in chunk.splitlines(): |
|
1306 | for line in chunk.splitlines(): | |
1279 | yield line |
|
1307 | yield line | |
1280 |
|
1308 | |||
1281 | def expandpath(path): |
|
1309 | def expandpath(path): | |
1282 | return os.path.expanduser(os.path.expandvars(path)) |
|
1310 | return os.path.expanduser(os.path.expandvars(path)) | |
1283 |
|
1311 | |||
1284 | def hgcmd(): |
|
1312 | def hgcmd(): | |
1285 | """Return the command used to execute current hg |
|
1313 | """Return the command used to execute current hg | |
1286 |
|
1314 | |||
1287 | This is different from hgexecutable() because on Windows we want |
|
1315 | This is different from hgexecutable() because on Windows we want | |
1288 | to avoid things opening new shell windows like batch files, so we |
|
1316 | to avoid things opening new shell windows like batch files, so we | |
1289 | get either the python call or current executable. |
|
1317 | get either the python call or current executable. | |
1290 | """ |
|
1318 | """ | |
1291 | if main_is_frozen(): |
|
1319 | if main_is_frozen(): | |
1292 | return [sys.executable] |
|
1320 | return [sys.executable] | |
1293 | return gethgcmd() |
|
1321 | return gethgcmd() | |
1294 |
|
1322 | |||
1295 | def rundetached(args, condfn): |
|
1323 | def rundetached(args, condfn): | |
1296 | """Execute the argument list in a detached process. |
|
1324 | """Execute the argument list in a detached process. | |
1297 |
|
1325 | |||
1298 | condfn is a callable which is called repeatedly and should return |
|
1326 | condfn is a callable which is called repeatedly and should return | |
1299 | True once the child process is known to have started successfully. |
|
1327 | True once the child process is known to have started successfully. | |
1300 | At this point, the child process PID is returned. If the child |
|
1328 | At this point, the child process PID is returned. If the child | |
1301 | process fails to start or finishes before condfn() evaluates to |
|
1329 | process fails to start or finishes before condfn() evaluates to | |
1302 | True, return -1. |
|
1330 | True, return -1. | |
1303 | """ |
|
1331 | """ | |
1304 | # Windows case is easier because the child process is either |
|
1332 | # Windows case is easier because the child process is either | |
1305 | # successfully starting and validating the condition or exiting |
|
1333 | # successfully starting and validating the condition or exiting | |
1306 | # on failure. We just poll on its PID. On Unix, if the child |
|
1334 | # on failure. We just poll on its PID. On Unix, if the child | |
1307 | # process fails to start, it will be left in a zombie state until |
|
1335 | # process fails to start, it will be left in a zombie state until | |
1308 | # the parent wait on it, which we cannot do since we expect a long |
|
1336 | # the parent wait on it, which we cannot do since we expect a long | |
1309 | # running process on success. Instead we listen for SIGCHLD telling |
|
1337 | # running process on success. Instead we listen for SIGCHLD telling | |
1310 | # us our child process terminated. |
|
1338 | # us our child process terminated. | |
1311 | terminated = set() |
|
1339 | terminated = set() | |
1312 | def handler(signum, frame): |
|
1340 | def handler(signum, frame): | |
1313 | terminated.add(os.wait()) |
|
1341 | terminated.add(os.wait()) | |
1314 | prevhandler = None |
|
1342 | prevhandler = None | |
1315 | if hasattr(signal, 'SIGCHLD'): |
|
1343 | if hasattr(signal, 'SIGCHLD'): | |
1316 | prevhandler = signal.signal(signal.SIGCHLD, handler) |
|
1344 | prevhandler = signal.signal(signal.SIGCHLD, handler) | |
1317 | try: |
|
1345 | try: | |
1318 | pid = spawndetached(args) |
|
1346 | pid = spawndetached(args) | |
1319 | while not condfn(): |
|
1347 | while not condfn(): | |
1320 | if ((pid in terminated or not testpid(pid)) |
|
1348 | if ((pid in terminated or not testpid(pid)) | |
1321 | and not condfn()): |
|
1349 | and not condfn()): | |
1322 | return -1 |
|
1350 | return -1 | |
1323 | time.sleep(0.1) |
|
1351 | time.sleep(0.1) | |
1324 | return pid |
|
1352 | return pid | |
1325 | finally: |
|
1353 | finally: | |
1326 | if prevhandler is not None: |
|
1354 | if prevhandler is not None: | |
1327 | signal.signal(signal.SIGCHLD, prevhandler) |
|
1355 | signal.signal(signal.SIGCHLD, prevhandler) | |
1328 |
|
1356 | |||
1329 | try: |
|
1357 | try: | |
1330 | any, all = any, all |
|
1358 | any, all = any, all | |
1331 | except NameError: |
|
1359 | except NameError: | |
1332 | def any(iterable): |
|
1360 | def any(iterable): | |
1333 | for i in iterable: |
|
1361 | for i in iterable: | |
1334 | if i: |
|
1362 | if i: | |
1335 | return True |
|
1363 | return True | |
1336 | return False |
|
1364 | return False | |
1337 |
|
1365 | |||
1338 | def all(iterable): |
|
1366 | def all(iterable): | |
1339 | for i in iterable: |
|
1367 | for i in iterable: | |
1340 | if not i: |
|
1368 | if not i: | |
1341 | return False |
|
1369 | return False | |
1342 | return True |
|
1370 | return True | |
1343 |
|
1371 | |||
1344 | def termwidth(): |
|
1372 | def termwidth(): | |
1345 | if 'COLUMNS' in os.environ: |
|
1373 | if 'COLUMNS' in os.environ: | |
1346 | try: |
|
1374 | try: | |
1347 | return int(os.environ['COLUMNS']) |
|
1375 | return int(os.environ['COLUMNS']) | |
1348 | except ValueError: |
|
1376 | except ValueError: | |
1349 | pass |
|
1377 | pass | |
1350 | return termwidth_() |
|
1378 | return termwidth_() |
@@ -1,174 +1,174 b'' | |||||
1 | adding changesets |
|
1 | adding changesets | |
2 | adding manifests |
|
2 | adding manifests | |
3 | adding file changes |
|
3 | adding file changes | |
4 | added 2 changesets with 2 changes to 1 files |
|
4 | added 2 changesets with 2 changes to 1 files | |
5 | (run 'hg update' to get a working copy) |
|
5 | (run 'hg update' to get a working copy) | |
6 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
6 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
7 | % should fail with encoding error |
|
7 | % should fail with encoding error | |
8 | M a |
|
8 | M a | |
9 | ? latin-1 |
|
9 | ? latin-1 | |
10 | ? latin-1-tag |
|
10 | ? latin-1-tag | |
11 | ? utf-8 |
|
11 | ? utf-8 | |
12 | transaction abort! |
|
12 | transaction abort! | |
13 | rollback completed |
|
13 | rollback completed | |
14 | abort: decoding near ' encoded: �': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)! |
|
14 | abort: decoding near ' encoded: �': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)! | |
15 | % these should work |
|
15 | % these should work | |
16 | marked working directory as branch � |
|
16 | marked working directory as branch � | |
17 | % hg log (ascii) |
|
17 | % hg log (ascii) | |
18 | changeset: 5:db5520b4645f |
|
18 | changeset: 5:db5520b4645f | |
19 | branch: ? |
|
19 | branch: ? | |
20 | tag: tip |
|
20 | tag: tip | |
21 | user: test |
|
21 | user: test | |
22 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
22 | date: Mon Jan 12 13:46:40 1970 +0000 | |
23 | summary: latin1 branch |
|
23 | summary: latin1 branch | |
24 |
|
24 | |||
25 | changeset: 4:9cff3c980b58 |
|
25 | changeset: 4:9cff3c980b58 | |
26 | user: test |
|
26 | user: test | |
27 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
27 | date: Mon Jan 12 13:46:40 1970 +0000 | |
28 | summary: Added tag ? for changeset 770b9b11621d |
|
28 | summary: Added tag ? for changeset 770b9b11621d | |
29 |
|
29 | |||
30 | changeset: 3:770b9b11621d |
|
30 | changeset: 3:770b9b11621d | |
31 | tag: ? |
|
31 | tag: ? | |
32 | user: test |
|
32 | user: test | |
33 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
33 | date: Mon Jan 12 13:46:40 1970 +0000 | |
34 | summary: utf-8 e' encoded: ? |
|
34 | summary: utf-8 e' encoded: ? | |
35 |
|
35 | |||
36 | changeset: 2:0572af48b948 |
|
36 | changeset: 2:0572af48b948 | |
37 | user: test |
|
37 | user: test | |
38 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
38 | date: Mon Jan 12 13:46:40 1970 +0000 | |
39 | summary: latin-1 e' encoded: ? |
|
39 | summary: latin-1 e' encoded: ? | |
40 |
|
40 | |||
41 | changeset: 1:0e5b7e3f9c4a |
|
41 | changeset: 1:0e5b7e3f9c4a | |
42 | user: test |
|
42 | user: test | |
43 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
43 | date: Mon Jan 12 13:46:40 1970 +0000 | |
44 | summary: koi8-r: ????? = u'\u0440\u0442\u0443\u0442\u044c' |
|
44 | summary: koi8-r: ????? = u'\u0440\u0442\u0443\u0442\u044c' | |
45 |
|
45 | |||
46 | changeset: 0:1e78a93102a3 |
|
46 | changeset: 0:1e78a93102a3 | |
47 | user: test |
|
47 | user: test | |
48 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
48 | date: Mon Jan 12 13:46:40 1970 +0000 | |
49 | summary: latin-1 e': ? = u'\xe9' |
|
49 | summary: latin-1 e': ? = u'\xe9' | |
50 |
|
50 | |||
51 | % hg log (latin-1) |
|
51 | % hg log (latin-1) | |
52 | changeset: 5:db5520b4645f |
|
52 | changeset: 5:db5520b4645f | |
53 | branch: � |
|
53 | branch: � | |
54 | tag: tip |
|
54 | tag: tip | |
55 | user: test |
|
55 | user: test | |
56 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
56 | date: Mon Jan 12 13:46:40 1970 +0000 | |
57 | summary: latin1 branch |
|
57 | summary: latin1 branch | |
58 |
|
58 | |||
59 | changeset: 4:9cff3c980b58 |
|
59 | changeset: 4:9cff3c980b58 | |
60 | user: test |
|
60 | user: test | |
61 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
61 | date: Mon Jan 12 13:46:40 1970 +0000 | |
62 | summary: Added tag � for changeset 770b9b11621d |
|
62 | summary: Added tag � for changeset 770b9b11621d | |
63 |
|
63 | |||
64 | changeset: 3:770b9b11621d |
|
64 | changeset: 3:770b9b11621d | |
65 | tag: � |
|
65 | tag: � | |
66 | user: test |
|
66 | user: test | |
67 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
67 | date: Mon Jan 12 13:46:40 1970 +0000 | |
68 | summary: utf-8 e' encoded: � |
|
68 | summary: utf-8 e' encoded: � | |
69 |
|
69 | |||
70 | changeset: 2:0572af48b948 |
|
70 | changeset: 2:0572af48b948 | |
71 | user: test |
|
71 | user: test | |
72 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
72 | date: Mon Jan 12 13:46:40 1970 +0000 | |
73 | summary: latin-1 e' encoded: � |
|
73 | summary: latin-1 e' encoded: � | |
74 |
|
74 | |||
75 | changeset: 1:0e5b7e3f9c4a |
|
75 | changeset: 1:0e5b7e3f9c4a | |
76 | user: test |
|
76 | user: test | |
77 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
77 | date: Mon Jan 12 13:46:40 1970 +0000 | |
78 | summary: koi8-r: ����� = u'\u0440\u0442\u0443\u0442\u044c' |
|
78 | summary: koi8-r: ����� = u'\u0440\u0442\u0443\u0442\u044c' | |
79 |
|
79 | |||
80 | changeset: 0:1e78a93102a3 |
|
80 | changeset: 0:1e78a93102a3 | |
81 | user: test |
|
81 | user: test | |
82 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
82 | date: Mon Jan 12 13:46:40 1970 +0000 | |
83 | summary: latin-1 e': � = u'\xe9' |
|
83 | summary: latin-1 e': � = u'\xe9' | |
84 |
|
84 | |||
85 | % hg log (utf-8) |
|
85 | % hg log (utf-8) | |
86 | changeset: 5:db5520b4645f |
|
86 | changeset: 5:db5520b4645f | |
87 | branch: é |
|
87 | branch: é | |
88 | tag: tip |
|
88 | tag: tip | |
89 | user: test |
|
89 | user: test | |
90 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
90 | date: Mon Jan 12 13:46:40 1970 +0000 | |
91 | summary: latin1 branch |
|
91 | summary: latin1 branch | |
92 |
|
92 | |||
93 | changeset: 4:9cff3c980b58 |
|
93 | changeset: 4:9cff3c980b58 | |
94 | user: test |
|
94 | user: test | |
95 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
95 | date: Mon Jan 12 13:46:40 1970 +0000 | |
96 | summary: Added tag é for changeset 770b9b11621d |
|
96 | summary: Added tag é for changeset 770b9b11621d | |
97 |
|
97 | |||
98 | changeset: 3:770b9b11621d |
|
98 | changeset: 3:770b9b11621d | |
99 | tag: é |
|
99 | tag: é | |
100 | user: test |
|
100 | user: test | |
101 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
101 | date: Mon Jan 12 13:46:40 1970 +0000 | |
102 | summary: utf-8 e' encoded: é |
|
102 | summary: utf-8 e' encoded: é | |
103 |
|
103 | |||
104 | changeset: 2:0572af48b948 |
|
104 | changeset: 2:0572af48b948 | |
105 | user: test |
|
105 | user: test | |
106 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
106 | date: Mon Jan 12 13:46:40 1970 +0000 | |
107 | summary: latin-1 e' encoded: é |
|
107 | summary: latin-1 e' encoded: é | |
108 |
|
108 | |||
109 | changeset: 1:0e5b7e3f9c4a |
|
109 | changeset: 1:0e5b7e3f9c4a | |
110 | user: test |
|
110 | user: test | |
111 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
111 | date: Mon Jan 12 13:46:40 1970 +0000 | |
112 | summary: koi8-r: ÒÔÕÔØ = u'\u0440\u0442\u0443\u0442\u044c' |
|
112 | summary: koi8-r: ÒÔÕÔØ = u'\u0440\u0442\u0443\u0442\u044c' | |
113 |
|
113 | |||
114 | changeset: 0:1e78a93102a3 |
|
114 | changeset: 0:1e78a93102a3 | |
115 | user: test |
|
115 | user: test | |
116 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
116 | date: Mon Jan 12 13:46:40 1970 +0000 | |
117 | summary: latin-1 e': é = u'\xe9' |
|
117 | summary: latin-1 e': é = u'\xe9' | |
118 |
|
118 | |||
119 | % hg tags (ascii) |
|
119 | % hg tags (ascii) | |
120 | tip 5:db5520b4645f |
|
120 | tip 5:db5520b4645f | |
121 | ? 3:770b9b11621d |
|
121 | ? 3:770b9b11621d | |
122 | % hg tags (latin-1) |
|
122 | % hg tags (latin-1) | |
123 | tip 5:db5520b4645f |
|
123 | tip 5:db5520b4645f | |
124 |
� |
|
124 | � 3:770b9b11621d | |
125 | % hg tags (utf-8) |
|
125 | % hg tags (utf-8) | |
126 | tip 5:db5520b4645f |
|
126 | tip 5:db5520b4645f | |
127 |
é |
|
127 | é 3:770b9b11621d | |
128 | % hg branches (ascii) |
|
128 | % hg branches (ascii) | |
129 | ? 5:db5520b4645f |
|
129 | ? 5:db5520b4645f | |
130 | default 4:9cff3c980b58 (inactive) |
|
130 | default 4:9cff3c980b58 (inactive) | |
131 | % hg branches (latin-1) |
|
131 | % hg branches (latin-1) | |
132 |
� |
|
132 | � 5:db5520b4645f | |
133 | default 4:9cff3c980b58 (inactive) |
|
133 | default 4:9cff3c980b58 (inactive) | |
134 | % hg branches (utf-8) |
|
134 | % hg branches (utf-8) | |
135 |
é |
|
135 | é 5:db5520b4645f | |
136 | default 4:9cff3c980b58 (inactive) |
|
136 | default 4:9cff3c980b58 (inactive) | |
137 | % hg log (utf-8) |
|
137 | % hg log (utf-8) | |
138 | changeset: 5:db5520b4645f |
|
138 | changeset: 5:db5520b4645f | |
139 | branch: é |
|
139 | branch: é | |
140 | tag: tip |
|
140 | tag: tip | |
141 | user: test |
|
141 | user: test | |
142 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
142 | date: Mon Jan 12 13:46:40 1970 +0000 | |
143 | summary: latin1 branch |
|
143 | summary: latin1 branch | |
144 |
|
144 | |||
145 | changeset: 4:9cff3c980b58 |
|
145 | changeset: 4:9cff3c980b58 | |
146 | user: test |
|
146 | user: test | |
147 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
147 | date: Mon Jan 12 13:46:40 1970 +0000 | |
148 | summary: Added tag é for changeset 770b9b11621d |
|
148 | summary: Added tag é for changeset 770b9b11621d | |
149 |
|
149 | |||
150 | changeset: 3:770b9b11621d |
|
150 | changeset: 3:770b9b11621d | |
151 | tag: é |
|
151 | tag: é | |
152 | user: test |
|
152 | user: test | |
153 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
153 | date: Mon Jan 12 13:46:40 1970 +0000 | |
154 | summary: utf-8 e' encoded: é |
|
154 | summary: utf-8 e' encoded: é | |
155 |
|
155 | |||
156 | changeset: 2:0572af48b948 |
|
156 | changeset: 2:0572af48b948 | |
157 | user: test |
|
157 | user: test | |
158 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
158 | date: Mon Jan 12 13:46:40 1970 +0000 | |
159 | summary: latin-1 e' encoded: é |
|
159 | summary: latin-1 e' encoded: é | |
160 |
|
160 | |||
161 | changeset: 1:0e5b7e3f9c4a |
|
161 | changeset: 1:0e5b7e3f9c4a | |
162 | user: test |
|
162 | user: test | |
163 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
163 | date: Mon Jan 12 13:46:40 1970 +0000 | |
164 | summary: koi8-r: ртуть = u'\u0440\u0442\u0443\u0442\u044c' |
|
164 | summary: koi8-r: ртуть = u'\u0440\u0442\u0443\u0442\u044c' | |
165 |
|
165 | |||
166 | changeset: 0:1e78a93102a3 |
|
166 | changeset: 0:1e78a93102a3 | |
167 | user: test |
|
167 | user: test | |
168 | date: Mon Jan 12 13:46:40 1970 +0000 |
|
168 | date: Mon Jan 12 13:46:40 1970 +0000 | |
169 | summary: latin-1 e': И = u'\xe9' |
|
169 | summary: latin-1 e': И = u'\xe9' | |
170 |
|
170 | |||
171 | % hg log (dolphin) |
|
171 | % hg log (dolphin) | |
172 | abort: unknown encoding: dolphin, please check your locale settings |
|
172 | abort: unknown encoding: dolphin, please check your locale settings | |
173 | abort: decoding near '�': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! |
|
173 | abort: decoding near '�': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! | |
174 | abort: branch name not in UTF-8! |
|
174 | abort: branch name not in UTF-8! |
General Comments 0
You need to be logged in to leave comments.
Login now