Show More
@@ -1,184 +1,184 | |||||
1 | # churn.py - create a graph of revisions count grouped by template |
|
1 | # churn.py - create a graph of revisions count grouped by template | |
2 | # |
|
2 | # | |
3 | # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net> |
|
3 | # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net> | |
4 | # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua> |
|
4 | # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms |
|
6 | # This software may be used and distributed according to the terms | |
7 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | # of the GNU General Public License, incorporated herein by reference. | |
8 | '''allow graphing the number of lines (or count of revisions) grouped by template''' |
|
8 | '''command to show certain statistics about revision history''' | |
9 |
|
9 | |||
10 | from mercurial.i18n import _ |
|
10 | from mercurial.i18n import _ | |
11 | from mercurial import patch, cmdutil, util, templater |
|
11 | from mercurial import patch, cmdutil, util, templater | |
12 | import os, sys |
|
12 | import os, sys | |
13 | import time, datetime |
|
13 | import time, datetime | |
14 |
|
14 | |||
15 | def get_tty_width(): |
|
15 | def get_tty_width(): | |
16 | if 'COLUMNS' in os.environ: |
|
16 | if 'COLUMNS' in os.environ: | |
17 | try: |
|
17 | try: | |
18 | return int(os.environ['COLUMNS']) |
|
18 | return int(os.environ['COLUMNS']) | |
19 | except ValueError: |
|
19 | except ValueError: | |
20 | pass |
|
20 | pass | |
21 | try: |
|
21 | try: | |
22 | import termios, array, fcntl |
|
22 | import termios, array, fcntl | |
23 | for dev in (sys.stdout, sys.stdin): |
|
23 | for dev in (sys.stdout, sys.stdin): | |
24 | try: |
|
24 | try: | |
25 | fd = dev.fileno() |
|
25 | fd = dev.fileno() | |
26 | if not os.isatty(fd): |
|
26 | if not os.isatty(fd): | |
27 | continue |
|
27 | continue | |
28 | arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8) |
|
28 | arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8) | |
29 | return array.array('h', arri)[1] |
|
29 | return array.array('h', arri)[1] | |
30 | except ValueError: |
|
30 | except ValueError: | |
31 | pass |
|
31 | pass | |
32 | except ImportError: |
|
32 | except ImportError: | |
33 | pass |
|
33 | pass | |
34 | return 80 |
|
34 | return 80 | |
35 |
|
35 | |||
36 | def maketemplater(ui, repo, tmpl): |
|
36 | def maketemplater(ui, repo, tmpl): | |
37 | tmpl = templater.parsestring(tmpl, quoted=False) |
|
37 | tmpl = templater.parsestring(tmpl, quoted=False) | |
38 | try: |
|
38 | try: | |
39 | t = cmdutil.changeset_templater(ui, repo, False, None, False) |
|
39 | t = cmdutil.changeset_templater(ui, repo, False, None, False) | |
40 | except SyntaxError, inst: |
|
40 | except SyntaxError, inst: | |
41 | raise util.Abort(inst.args[0]) |
|
41 | raise util.Abort(inst.args[0]) | |
42 | t.use_template(tmpl) |
|
42 | t.use_template(tmpl) | |
43 | return t |
|
43 | return t | |
44 |
|
44 | |||
45 | def changedlines(ui, repo, ctx1, ctx2): |
|
45 | def changedlines(ui, repo, ctx1, ctx2): | |
46 | lines = 0 |
|
46 | lines = 0 | |
47 | ui.pushbuffer() |
|
47 | ui.pushbuffer() | |
48 | patch.diff(repo, ctx1.node(), ctx2.node()) |
|
48 | patch.diff(repo, ctx1.node(), ctx2.node()) | |
49 | diff = ui.popbuffer() |
|
49 | diff = ui.popbuffer() | |
50 | for l in diff.split('\n'): |
|
50 | for l in diff.split('\n'): | |
51 | if (l.startswith("+") and not l.startswith("+++ ") or |
|
51 | if (l.startswith("+") and not l.startswith("+++ ") or | |
52 | l.startswith("-") and not l.startswith("--- ")): |
|
52 | l.startswith("-") and not l.startswith("--- ")): | |
53 | lines += 1 |
|
53 | lines += 1 | |
54 | return lines |
|
54 | return lines | |
55 |
|
55 | |||
56 | def countrate(ui, repo, amap, *pats, **opts): |
|
56 | def countrate(ui, repo, amap, *pats, **opts): | |
57 | """Calculate stats""" |
|
57 | """Calculate stats""" | |
58 | if opts.get('dateformat'): |
|
58 | if opts.get('dateformat'): | |
59 | def getkey(ctx): |
|
59 | def getkey(ctx): | |
60 | t, tz = ctx.date() |
|
60 | t, tz = ctx.date() | |
61 | date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) |
|
61 | date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) | |
62 | return date.strftime(opts['dateformat']) |
|
62 | return date.strftime(opts['dateformat']) | |
63 | else: |
|
63 | else: | |
64 | tmpl = opts.get('template', '{author|email}') |
|
64 | tmpl = opts.get('template', '{author|email}') | |
65 | tmpl = maketemplater(ui, repo, tmpl) |
|
65 | tmpl = maketemplater(ui, repo, tmpl) | |
66 | def getkey(ctx): |
|
66 | def getkey(ctx): | |
67 | ui.pushbuffer() |
|
67 | ui.pushbuffer() | |
68 | tmpl.show(changenode=ctx.node()) |
|
68 | tmpl.show(changenode=ctx.node()) | |
69 | return ui.popbuffer() |
|
69 | return ui.popbuffer() | |
70 |
|
70 | |||
71 | count = pct = 0 |
|
71 | count = pct = 0 | |
72 | rate = {} |
|
72 | rate = {} | |
73 | df = False |
|
73 | df = False | |
74 | if opts.get('date'): |
|
74 | if opts.get('date'): | |
75 | df = util.matchdate(opts['date']) |
|
75 | df = util.matchdate(opts['date']) | |
76 |
|
76 | |||
77 | get = util.cachefunc(lambda r: repo[r].changeset()) |
|
77 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
78 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) |
|
78 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) | |
79 | for st, rev, fns in changeiter: |
|
79 | for st, rev, fns in changeiter: | |
80 | if not st == 'add': |
|
80 | if not st == 'add': | |
81 | continue |
|
81 | continue | |
82 | if df and not df(get(rev)[2][0]): # doesn't match date format |
|
82 | if df and not df(get(rev)[2][0]): # doesn't match date format | |
83 | continue |
|
83 | continue | |
84 |
|
84 | |||
85 | ctx = repo[rev] |
|
85 | ctx = repo[rev] | |
86 | key = getkey(ctx) |
|
86 | key = getkey(ctx) | |
87 | key = amap.get(key, key) # alias remap |
|
87 | key = amap.get(key, key) # alias remap | |
88 | if opts.get('changesets'): |
|
88 | if opts.get('changesets'): | |
89 | rate[key] = rate.get(key, 0) + 1 |
|
89 | rate[key] = rate.get(key, 0) + 1 | |
90 | else: |
|
90 | else: | |
91 | parents = ctx.parents() |
|
91 | parents = ctx.parents() | |
92 | if len(parents) > 1: |
|
92 | if len(parents) > 1: | |
93 | ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) |
|
93 | ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) | |
94 | continue |
|
94 | continue | |
95 |
|
95 | |||
96 | ctx1 = parents[0] |
|
96 | ctx1 = parents[0] | |
97 | lines = changedlines(ui, repo, ctx1, ctx) |
|
97 | lines = changedlines(ui, repo, ctx1, ctx) | |
98 | rate[key] = rate.get(key, 0) + lines |
|
98 | rate[key] = rate.get(key, 0) + lines | |
99 |
|
99 | |||
100 | if opts.get('progress'): |
|
100 | if opts.get('progress'): | |
101 | count += 1 |
|
101 | count += 1 | |
102 | newpct = int(100.0 * count / max(len(repo), 1)) |
|
102 | newpct = int(100.0 * count / max(len(repo), 1)) | |
103 | if pct < newpct: |
|
103 | if pct < newpct: | |
104 | pct = newpct |
|
104 | pct = newpct | |
105 | ui.write(_("\rGenerating stats: %d%%") % pct) |
|
105 | ui.write(_("\rGenerating stats: %d%%") % pct) | |
106 | sys.stdout.flush() |
|
106 | sys.stdout.flush() | |
107 |
|
107 | |||
108 | if opts.get('progress'): |
|
108 | if opts.get('progress'): | |
109 | ui.write("\r") |
|
109 | ui.write("\r") | |
110 | sys.stdout.flush() |
|
110 | sys.stdout.flush() | |
111 |
|
111 | |||
112 | return rate |
|
112 | return rate | |
113 |
|
113 | |||
114 |
|
114 | |||
115 | def churn(ui, repo, *pats, **opts): |
|
115 | def churn(ui, repo, *pats, **opts): | |
116 | '''Graph count of revisions grouped by template |
|
116 | '''Graph count of revisions grouped by template | |
117 |
|
117 | |||
118 | Will graph count of changed lines or revisions grouped by template or |
|
118 | Will graph count of changed lines or revisions grouped by template or | |
119 | alternatively by date, if dateformat is used. In this case it will override |
|
119 | alternatively by date, if dateformat is used. In this case it will override | |
120 | template. |
|
120 | template. | |
121 |
|
121 | |||
122 | By default statistics are counted for number of changed lines. |
|
122 | By default statistics are counted for number of changed lines. | |
123 |
|
123 | |||
124 | Examples: |
|
124 | Examples: | |
125 |
|
125 | |||
126 | # display count of changed lines for every committer |
|
126 | # display count of changed lines for every committer | |
127 | hg churn -t '{author|email}' |
|
127 | hg churn -t '{author|email}' | |
128 |
|
128 | |||
129 | # display daily activity graph |
|
129 | # display daily activity graph | |
130 | hg churn -f '%H' -s -c |
|
130 | hg churn -f '%H' -s -c | |
131 |
|
131 | |||
132 | # display activity of developers by month |
|
132 | # display activity of developers by month | |
133 | hg churn -f '%Y-%m' -s -c |
|
133 | hg churn -f '%Y-%m' -s -c | |
134 |
|
134 | |||
135 | # display count of lines changed in every year |
|
135 | # display count of lines changed in every year | |
136 | hg churn -f '%Y' -s |
|
136 | hg churn -f '%Y' -s | |
137 |
|
137 | |||
138 | The map file format used to specify aliases is fairly simple: |
|
138 | The map file format used to specify aliases is fairly simple: | |
139 |
|
139 | |||
140 | <alias email> <actual email>''' |
|
140 | <alias email> <actual email>''' | |
141 | def pad(s, l): |
|
141 | def pad(s, l): | |
142 | return (s + " " * l)[:l] |
|
142 | return (s + " " * l)[:l] | |
143 |
|
143 | |||
144 | amap = {} |
|
144 | amap = {} | |
145 | aliases = opts.get('aliases') |
|
145 | aliases = opts.get('aliases') | |
146 | if aliases: |
|
146 | if aliases: | |
147 | for l in open(aliases, "r"): |
|
147 | for l in open(aliases, "r"): | |
148 | l = l.strip() |
|
148 | l = l.strip() | |
149 | alias, actual = l.split() |
|
149 | alias, actual = l.split() | |
150 | amap[alias] = actual |
|
150 | amap[alias] = actual | |
151 |
|
151 | |||
152 | rate = countrate(ui, repo, amap, *pats, **opts).items() |
|
152 | rate = countrate(ui, repo, amap, *pats, **opts).items() | |
153 | if not rate: |
|
153 | if not rate: | |
154 | return |
|
154 | return | |
155 |
|
155 | |||
156 | sortfn = ((not opts.get('sort')) and (lambda a, b: cmp(b[1], a[1])) or None) |
|
156 | sortfn = ((not opts.get('sort')) and (lambda a, b: cmp(b[1], a[1])) or None) | |
157 | rate.sort(sortfn) |
|
157 | rate.sort(sortfn) | |
158 |
|
158 | |||
159 | maxcount = float(max([v for k, v in rate])) |
|
159 | maxcount = float(max([v for k, v in rate])) | |
160 | maxname = max([len(k) for k, v in rate]) |
|
160 | maxname = max([len(k) for k, v in rate]) | |
161 |
|
161 | |||
162 | ttywidth = get_tty_width() |
|
162 | ttywidth = get_tty_width() | |
163 | ui.debug(_("assuming %i character terminal\n") % ttywidth) |
|
163 | ui.debug(_("assuming %i character terminal\n") % ttywidth) | |
164 | width = ttywidth - maxname - 2 - 6 - 2 - 2 |
|
164 | width = ttywidth - maxname - 2 - 6 - 2 - 2 | |
165 |
|
165 | |||
166 | for date, count in rate: |
|
166 | for date, count in rate: | |
167 | print "%s %6d %s" % (pad(date, maxname), count, |
|
167 | print "%s %6d %s" % (pad(date, maxname), count, | |
168 | "*" * int(count * width / maxcount)) |
|
168 | "*" * int(count * width / maxcount)) | |
169 |
|
169 | |||
170 |
|
170 | |||
171 | cmdtable = { |
|
171 | cmdtable = { | |
172 | "churn": |
|
172 | "churn": | |
173 | (churn, |
|
173 | (churn, | |
174 | [('r', 'rev', [], _('count rate for the specified revision or range')), |
|
174 | [('r', 'rev', [], _('count rate for the specified revision or range')), | |
175 | ('d', 'date', '', _('count rate for revs matching date spec')), |
|
175 | ('d', 'date', '', _('count rate for revs matching date spec')), | |
176 | ('t', 'template', '{author|email}', _('template to group changesets')), |
|
176 | ('t', 'template', '{author|email}', _('template to group changesets')), | |
177 | ('f', 'dateformat', '', |
|
177 | ('f', 'dateformat', '', | |
178 | _('strftime-compatible format for grouping by date')), |
|
178 | _('strftime-compatible format for grouping by date')), | |
179 | ('c', 'changesets', False, _('count rate by number of changesets')), |
|
179 | ('c', 'changesets', False, _('count rate by number of changesets')), | |
180 | ('s', 'sort', False, _('sort by key (default: sort by count)')), |
|
180 | ('s', 'sort', False, _('sort by key (default: sort by count)')), | |
181 | ('', 'aliases', '', _('file with email aliases')), |
|
181 | ('', 'aliases', '', _('file with email aliases')), | |
182 | ('', 'progress', None, _('show progress'))], |
|
182 | ('', 'progress', None, _('show progress'))], | |
183 |
_("hg |
|
183 | _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")), | |
184 | } |
|
184 | } |
@@ -1,61 +1,61 | |||||
1 | """a mercurial extension for syntax highlighting in hgweb |
|
1 | """syntax highlighting in hgweb, based on Pygments | |
2 |
|
2 | |||
3 | It depends on the pygments syntax highlighting library: |
|
3 | It depends on the pygments syntax highlighting library: | |
4 | http://pygments.org/ |
|
4 | http://pygments.org/ | |
5 |
|
5 | |||
6 | To enable the extension add this to hgrc: |
|
6 | To enable the extension add this to hgrc: | |
7 |
|
7 | |||
8 | [extensions] |
|
8 | [extensions] | |
9 | hgext.highlight = |
|
9 | hgext.highlight = | |
10 |
|
10 | |||
11 | There is a single configuration option: |
|
11 | There is a single configuration option: | |
12 |
|
12 | |||
13 | [web] |
|
13 | [web] | |
14 | pygments_style = <style> |
|
14 | pygments_style = <style> | |
15 |
|
15 | |||
16 | The default is 'colorful'. |
|
16 | The default is 'colorful'. | |
17 |
|
17 | |||
18 | -- Adam Hupp <adam@hupp.org> |
|
18 | -- Adam Hupp <adam@hupp.org> | |
19 | """ |
|
19 | """ | |
20 |
|
20 | |||
21 | import highlight |
|
21 | import highlight | |
22 | from mercurial.hgweb import webcommands, webutil, common |
|
22 | from mercurial.hgweb import webcommands, webutil, common | |
23 |
|
23 | |||
24 | web_filerevision = webcommands._filerevision |
|
24 | web_filerevision = webcommands._filerevision | |
25 | web_annotate = webcommands.annotate |
|
25 | web_annotate = webcommands.annotate | |
26 |
|
26 | |||
27 | def filerevision_highlight(web, tmpl, fctx): |
|
27 | def filerevision_highlight(web, tmpl, fctx): | |
28 | mt = ''.join(tmpl('mimetype', encoding=web.encoding)) |
|
28 | mt = ''.join(tmpl('mimetype', encoding=web.encoding)) | |
29 | # only pygmentize for mimetype containing 'html' so we both match |
|
29 | # only pygmentize for mimetype containing 'html' so we both match | |
30 | # 'text/html' and possibly 'application/xhtml+xml' in the future |
|
30 | # 'text/html' and possibly 'application/xhtml+xml' in the future | |
31 | # so that we don't have to touch the extension when the mimetype |
|
31 | # so that we don't have to touch the extension when the mimetype | |
32 | # for a template changes; also hgweb optimizes the case that a |
|
32 | # for a template changes; also hgweb optimizes the case that a | |
33 | # raw file is sent using rawfile() and doesn't call us, so we |
|
33 | # raw file is sent using rawfile() and doesn't call us, so we | |
34 | # can't clash with the file's content-type here in case we |
|
34 | # can't clash with the file's content-type here in case we | |
35 | # pygmentize a html file |
|
35 | # pygmentize a html file | |
36 | if 'html' in mt: |
|
36 | if 'html' in mt: | |
37 | style = web.config('web', 'pygments_style', 'colorful') |
|
37 | style = web.config('web', 'pygments_style', 'colorful') | |
38 | highlight.pygmentize('fileline', fctx, style, tmpl) |
|
38 | highlight.pygmentize('fileline', fctx, style, tmpl) | |
39 | return web_filerevision(web, tmpl, fctx) |
|
39 | return web_filerevision(web, tmpl, fctx) | |
40 |
|
40 | |||
41 | def annotate_highlight(web, req, tmpl): |
|
41 | def annotate_highlight(web, req, tmpl): | |
42 | mt = ''.join(tmpl('mimetype', encoding=web.encoding)) |
|
42 | mt = ''.join(tmpl('mimetype', encoding=web.encoding)) | |
43 | if 'html' in mt: |
|
43 | if 'html' in mt: | |
44 | fctx = webutil.filectx(web.repo, req) |
|
44 | fctx = webutil.filectx(web.repo, req) | |
45 | style = web.config('web', 'pygments_style', 'colorful') |
|
45 | style = web.config('web', 'pygments_style', 'colorful') | |
46 | highlight.pygmentize('annotateline', fctx, style, tmpl) |
|
46 | highlight.pygmentize('annotateline', fctx, style, tmpl) | |
47 | return web_annotate(web, req, tmpl) |
|
47 | return web_annotate(web, req, tmpl) | |
48 |
|
48 | |||
49 | def generate_css(web, req, tmpl): |
|
49 | def generate_css(web, req, tmpl): | |
50 | pg_style = web.config('web', 'pygments_style', 'colorful') |
|
50 | pg_style = web.config('web', 'pygments_style', 'colorful') | |
51 | fmter = highlight.HtmlFormatter(style = pg_style) |
|
51 | fmter = highlight.HtmlFormatter(style = pg_style) | |
52 | req.respond(common.HTTP_OK, 'text/css') |
|
52 | req.respond(common.HTTP_OK, 'text/css') | |
53 | return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')] |
|
53 | return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')] | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | # monkeypatch in the new version |
|
56 | # monkeypatch in the new version | |
57 |
|
57 | |||
58 | webcommands._filerevision = filerevision_highlight |
|
58 | webcommands._filerevision = filerevision_highlight | |
59 | webcommands.annotate = annotate_highlight |
|
59 | webcommands.annotate = annotate_highlight | |
60 | webcommands.highlightcss = generate_css |
|
60 | webcommands.highlightcss = generate_css | |
61 | webcommands.__all__.append('highlightcss') |
|
61 | webcommands.__all__.append('highlightcss') |
@@ -1,288 +1,287 | |||||
1 | # notify.py - email notifications for mercurial |
|
1 | # notify.py - email notifications for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
3 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 | # |
|
7 | ||
8 |
|
|
8 | '''hook extension to email notifications on commits/pushes | |
9 | # committed to a repo they subscribe to. |
|
9 | ||
10 | # |
|
10 | Subscriptions can be managed through hgrc. Default mode is to print | |
11 |
|
|
11 | messages to stdout, for testing and configuring. | |
12 | # configuring. |
|
12 | ||
13 | # |
|
13 | To use, configure notify extension and enable in hgrc like this: | |
14 | # to use, configure notify extension and enable in hgrc like this: |
|
14 | ||
15 | # |
|
15 | [extensions] | |
16 | # [extensions] |
|
16 | hgext.notify = | |
17 | # hgext.notify = |
|
17 | ||
18 | # |
|
18 | [hooks] | |
19 | # [hooks] |
|
19 | # one email for each incoming changeset | |
20 | # # one email for each incoming changeset |
|
20 | incoming.notify = python:hgext.notify.hook | |
21 | # incoming.notify = python:hgext.notify.hook |
|
21 | # batch emails when many changesets incoming at one time | |
22 | # # batch emails when many changesets incoming at one time |
|
22 | changegroup.notify = python:hgext.notify.hook | |
23 | # changegroup.notify = python:hgext.notify.hook |
|
23 | ||
24 | # |
|
24 | [notify] | |
25 | # [notify] |
|
25 | # config items go in here | |
26 | # # config items go in here |
|
26 | ||
27 | # |
|
27 | config items: | |
28 | # config items: |
|
28 | ||
29 | # |
|
29 | REQUIRED: | |
30 | # REQUIRED: |
|
30 | config = /path/to/file # file containing subscriptions | |
31 | # config = /path/to/file # file containing subscriptions |
|
31 | ||
32 | # |
|
32 | OPTIONAL: | |
33 | # OPTIONAL: |
|
33 | test = True # print messages to stdout for testing | |
34 |
|
|
34 | strip = 3 # number of slashes to strip for url paths | |
35 | # strip = 3 # number of slashes to strip for url paths |
|
35 | domain = example.com # domain to use if committer missing domain | |
36 | # domain = example.com # domain to use if committer missing domain |
|
36 | style = ... # style file to use when formatting email | |
37 |
|
|
37 | template = ... # template to use when formatting email | |
38 |
|
|
38 | incoming = ... # template to use when run as incoming hook | |
39 |
|
|
39 | changegroup = ... # template when run as changegroup hook | |
40 | # changegroup = ... # template when run as changegroup hook |
|
40 | maxdiff = 300 # max lines of diffs to include (0=none, -1=all) | |
41 | # maxdiff = 300 # max lines of diffs to include (0=none, -1=all) |
|
41 | maxsubject = 67 # truncate subject line longer than this | |
42 | # maxsubject = 67 # truncate subject line longer than this |
|
42 | diffstat = True # add a diffstat before the diff content | |
43 | # diffstat = True # add a diffstat before the diff content |
|
43 | sources = serve # notify if source of incoming changes in this list | |
44 | # sources = serve # notify if source of incoming changes in this list |
|
44 | # (serve == ssh or http, push, pull, bundle) | |
45 | # # (serve == ssh or http, push, pull, bundle) |
|
45 | [email] | |
46 | # [email] |
|
46 | from = user@host.com # email address to send as if none given | |
47 | # from = user@host.com # email address to send as if none given |
|
47 | [web] | |
48 | # [web] |
|
48 | baseurl = http://hgserver/... # root of hg web site for browsing commits | |
49 | # baseurl = http://hgserver/... # root of hg web site for browsing commits |
|
49 | ||
50 | # |
|
50 | notify config file has same format as regular hgrc. it has two | |
51 | # notify config file has same format as regular hgrc. it has two |
|
51 | sections so you can express subscriptions in whatever way is handier | |
52 | # sections so you can express subscriptions in whatever way is handier |
|
52 | for you. | |
53 | # for you. |
|
53 | ||
54 | # |
|
54 | [usersubs] | |
55 | # [usersubs] |
|
55 | # key is subscriber email, value is ","-separated list of glob patterns | |
56 | # # key is subscriber email, value is ","-separated list of glob patterns |
|
56 | user@host = pattern | |
57 | # user@host = pattern |
|
57 | ||
58 | # |
|
58 | [reposubs] | |
59 | # [reposubs] |
|
59 | # key is glob pattern, value is ","-separated list of subscriber emails | |
60 | # # key is glob pattern, value is ","-separated list of subscriber emails |
|
60 | pattern = user@host | |
61 | # pattern = user@host |
|
61 | ||
62 | # |
|
62 | glob patterns are matched against path to repo root. | |
63 | # glob patterns are matched against path to repo root. |
|
63 | ||
64 | # |
|
64 | if you like, you can put notify config file in repo that users can | |
65 | # if you like, you can put notify config file in repo that users can |
|
65 | push changes to, they can manage their own subscriptions.''' | |
66 | # push changes to, they can manage their own subscriptions. |
|
|||
67 |
|
66 | |||
68 | from mercurial.i18n import _ |
|
67 | from mercurial.i18n import _ | |
69 | from mercurial.node import bin, short |
|
68 | from mercurial.node import bin, short | |
70 | from mercurial import patch, cmdutil, templater, util, mail |
|
69 | from mercurial import patch, cmdutil, templater, util, mail | |
71 | import email.Parser, fnmatch, socket, time |
|
70 | import email.Parser, fnmatch, socket, time | |
72 |
|
71 | |||
73 | # template for single changeset can include email headers. |
|
72 | # template for single changeset can include email headers. | |
74 | single_template = ''' |
|
73 | single_template = ''' | |
75 | Subject: changeset in {webroot}: {desc|firstline|strip} |
|
74 | Subject: changeset in {webroot}: {desc|firstline|strip} | |
76 | From: {author} |
|
75 | From: {author} | |
77 |
|
76 | |||
78 | changeset {node|short} in {root} |
|
77 | changeset {node|short} in {root} | |
79 | details: {baseurl}{webroot}?cmd=changeset;node={node|short} |
|
78 | details: {baseurl}{webroot}?cmd=changeset;node={node|short} | |
80 | description: |
|
79 | description: | |
81 | \t{desc|tabindent|strip} |
|
80 | \t{desc|tabindent|strip} | |
82 | '''.lstrip() |
|
81 | '''.lstrip() | |
83 |
|
82 | |||
84 | # template for multiple changesets should not contain email headers, |
|
83 | # template for multiple changesets should not contain email headers, | |
85 | # because only first set of headers will be used and result will look |
|
84 | # because only first set of headers will be used and result will look | |
86 | # strange. |
|
85 | # strange. | |
87 | multiple_template = ''' |
|
86 | multiple_template = ''' | |
88 | changeset {node|short} in {root} |
|
87 | changeset {node|short} in {root} | |
89 | details: {baseurl}{webroot}?cmd=changeset;node={node|short} |
|
88 | details: {baseurl}{webroot}?cmd=changeset;node={node|short} | |
90 | summary: {desc|firstline} |
|
89 | summary: {desc|firstline} | |
91 | ''' |
|
90 | ''' | |
92 |
|
91 | |||
93 | deftemplates = { |
|
92 | deftemplates = { | |
94 | 'changegroup': multiple_template, |
|
93 | 'changegroup': multiple_template, | |
95 | } |
|
94 | } | |
96 |
|
95 | |||
97 | class notifier(object): |
|
96 | class notifier(object): | |
98 | '''email notification class.''' |
|
97 | '''email notification class.''' | |
99 |
|
98 | |||
100 | def __init__(self, ui, repo, hooktype): |
|
99 | def __init__(self, ui, repo, hooktype): | |
101 | self.ui = ui |
|
100 | self.ui = ui | |
102 | cfg = self.ui.config('notify', 'config') |
|
101 | cfg = self.ui.config('notify', 'config') | |
103 | if cfg: |
|
102 | if cfg: | |
104 | self.ui.readsections(cfg, 'usersubs', 'reposubs') |
|
103 | self.ui.readsections(cfg, 'usersubs', 'reposubs') | |
105 | self.repo = repo |
|
104 | self.repo = repo | |
106 | self.stripcount = int(self.ui.config('notify', 'strip', 0)) |
|
105 | self.stripcount = int(self.ui.config('notify', 'strip', 0)) | |
107 | self.root = self.strip(self.repo.root) |
|
106 | self.root = self.strip(self.repo.root) | |
108 | self.domain = self.ui.config('notify', 'domain') |
|
107 | self.domain = self.ui.config('notify', 'domain') | |
109 | self.charsets = mail._charsets(self.ui) |
|
108 | self.charsets = mail._charsets(self.ui) | |
110 | self.subs = self.subscribers() |
|
109 | self.subs = self.subscribers() | |
111 |
|
110 | |||
112 | mapfile = self.ui.config('notify', 'style') |
|
111 | mapfile = self.ui.config('notify', 'style') | |
113 | template = (self.ui.config('notify', hooktype) or |
|
112 | template = (self.ui.config('notify', hooktype) or | |
114 | self.ui.config('notify', 'template')) |
|
113 | self.ui.config('notify', 'template')) | |
115 | self.t = cmdutil.changeset_templater(self.ui, self.repo, |
|
114 | self.t = cmdutil.changeset_templater(self.ui, self.repo, | |
116 | False, mapfile, False) |
|
115 | False, mapfile, False) | |
117 | if not mapfile and not template: |
|
116 | if not mapfile and not template: | |
118 | template = deftemplates.get(hooktype) or single_template |
|
117 | template = deftemplates.get(hooktype) or single_template | |
119 | if template: |
|
118 | if template: | |
120 | template = templater.parsestring(template, quoted=False) |
|
119 | template = templater.parsestring(template, quoted=False) | |
121 | self.t.use_template(template) |
|
120 | self.t.use_template(template) | |
122 |
|
121 | |||
123 | def strip(self, path): |
|
122 | def strip(self, path): | |
124 | '''strip leading slashes from local path, turn into web-safe path.''' |
|
123 | '''strip leading slashes from local path, turn into web-safe path.''' | |
125 |
|
124 | |||
126 | path = util.pconvert(path) |
|
125 | path = util.pconvert(path) | |
127 | count = self.stripcount |
|
126 | count = self.stripcount | |
128 | while count > 0: |
|
127 | while count > 0: | |
129 | c = path.find('/') |
|
128 | c = path.find('/') | |
130 | if c == -1: |
|
129 | if c == -1: | |
131 | break |
|
130 | break | |
132 | path = path[c+1:] |
|
131 | path = path[c+1:] | |
133 | count -= 1 |
|
132 | count -= 1 | |
134 | return path |
|
133 | return path | |
135 |
|
134 | |||
136 | def fixmail(self, addr): |
|
135 | def fixmail(self, addr): | |
137 | '''try to clean up email addresses.''' |
|
136 | '''try to clean up email addresses.''' | |
138 |
|
137 | |||
139 | addr = util.email(addr.strip()) |
|
138 | addr = util.email(addr.strip()) | |
140 | if self.domain: |
|
139 | if self.domain: | |
141 | a = addr.find('@localhost') |
|
140 | a = addr.find('@localhost') | |
142 | if a != -1: |
|
141 | if a != -1: | |
143 | addr = addr[:a] |
|
142 | addr = addr[:a] | |
144 | if '@' not in addr: |
|
143 | if '@' not in addr: | |
145 | return addr + '@' + self.domain |
|
144 | return addr + '@' + self.domain | |
146 | return addr |
|
145 | return addr | |
147 |
|
146 | |||
148 | def subscribers(self): |
|
147 | def subscribers(self): | |
149 | '''return list of email addresses of subscribers to this repo.''' |
|
148 | '''return list of email addresses of subscribers to this repo.''' | |
150 |
|
149 | |||
151 | subs = {} |
|
150 | subs = {} | |
152 | for user, pats in self.ui.configitems('usersubs'): |
|
151 | for user, pats in self.ui.configitems('usersubs'): | |
153 | for pat in pats.split(','): |
|
152 | for pat in pats.split(','): | |
154 | if fnmatch.fnmatch(self.repo.root, pat.strip()): |
|
153 | if fnmatch.fnmatch(self.repo.root, pat.strip()): | |
155 | subs[self.fixmail(user)] = 1 |
|
154 | subs[self.fixmail(user)] = 1 | |
156 | for pat, users in self.ui.configitems('reposubs'): |
|
155 | for pat, users in self.ui.configitems('reposubs'): | |
157 | if fnmatch.fnmatch(self.repo.root, pat): |
|
156 | if fnmatch.fnmatch(self.repo.root, pat): | |
158 | for user in users.split(','): |
|
157 | for user in users.split(','): | |
159 | subs[self.fixmail(user)] = 1 |
|
158 | subs[self.fixmail(user)] = 1 | |
160 | subs = util.sort(subs) |
|
159 | subs = util.sort(subs) | |
161 | return [mail.addressencode(self.ui, s, self.charsets) for s in subs] |
|
160 | return [mail.addressencode(self.ui, s, self.charsets) for s in subs] | |
162 |
|
161 | |||
163 | def url(self, path=None): |
|
162 | def url(self, path=None): | |
164 | return self.ui.config('web', 'baseurl') + (path or self.root) |
|
163 | return self.ui.config('web', 'baseurl') + (path or self.root) | |
165 |
|
164 | |||
166 | def node(self, node): |
|
165 | def node(self, node): | |
167 | '''format one changeset.''' |
|
166 | '''format one changeset.''' | |
168 |
|
167 | |||
169 | self.t.show(changenode=node, changes=self.repo.changelog.read(node), |
|
168 | self.t.show(changenode=node, changes=self.repo.changelog.read(node), | |
170 | baseurl=self.ui.config('web', 'baseurl'), |
|
169 | baseurl=self.ui.config('web', 'baseurl'), | |
171 | root=self.repo.root, |
|
170 | root=self.repo.root, | |
172 | webroot=self.root) |
|
171 | webroot=self.root) | |
173 |
|
172 | |||
174 | def skipsource(self, source): |
|
173 | def skipsource(self, source): | |
175 | '''true if incoming changes from this source should be skipped.''' |
|
174 | '''true if incoming changes from this source should be skipped.''' | |
176 | ok_sources = self.ui.config('notify', 'sources', 'serve').split() |
|
175 | ok_sources = self.ui.config('notify', 'sources', 'serve').split() | |
177 | return source not in ok_sources |
|
176 | return source not in ok_sources | |
178 |
|
177 | |||
179 | def send(self, node, count, data): |
|
178 | def send(self, node, count, data): | |
180 | '''send message.''' |
|
179 | '''send message.''' | |
181 |
|
180 | |||
182 | p = email.Parser.Parser() |
|
181 | p = email.Parser.Parser() | |
183 | msg = p.parsestr(data) |
|
182 | msg = p.parsestr(data) | |
184 |
|
183 | |||
185 | # store sender and subject |
|
184 | # store sender and subject | |
186 | sender, subject = msg['From'], msg['Subject'] |
|
185 | sender, subject = msg['From'], msg['Subject'] | |
187 | # create fresh mime message from msg body |
|
186 | # create fresh mime message from msg body | |
188 | text = msg.get_payload() |
|
187 | text = msg.get_payload() | |
189 | # for notification prefer readability over data precision |
|
188 | # for notification prefer readability over data precision | |
190 | msg = mail.mimeencode(self.ui, text, self.charsets) |
|
189 | msg = mail.mimeencode(self.ui, text, self.charsets) | |
191 |
|
190 | |||
192 | def fix_subject(subject): |
|
191 | def fix_subject(subject): | |
193 | '''try to make subject line exist and be useful.''' |
|
192 | '''try to make subject line exist and be useful.''' | |
194 |
|
193 | |||
195 | if not subject: |
|
194 | if not subject: | |
196 | if count > 1: |
|
195 | if count > 1: | |
197 | subject = _('%s: %d new changesets') % (self.root, count) |
|
196 | subject = _('%s: %d new changesets') % (self.root, count) | |
198 | else: |
|
197 | else: | |
199 | changes = self.repo.changelog.read(node) |
|
198 | changes = self.repo.changelog.read(node) | |
200 | s = changes[4].lstrip().split('\n', 1)[0].rstrip() |
|
199 | s = changes[4].lstrip().split('\n', 1)[0].rstrip() | |
201 | subject = '%s: %s' % (self.root, s) |
|
200 | subject = '%s: %s' % (self.root, s) | |
202 | maxsubject = int(self.ui.config('notify', 'maxsubject', 67)) |
|
201 | maxsubject = int(self.ui.config('notify', 'maxsubject', 67)) | |
203 | if maxsubject and len(subject) > maxsubject: |
|
202 | if maxsubject and len(subject) > maxsubject: | |
204 | subject = subject[:maxsubject-3] + '...' |
|
203 | subject = subject[:maxsubject-3] + '...' | |
205 | msg['Subject'] = mail.headencode(self.ui, subject, self.charsets) |
|
204 | msg['Subject'] = mail.headencode(self.ui, subject, self.charsets) | |
206 |
|
205 | |||
207 | def fix_sender(sender): |
|
206 | def fix_sender(sender): | |
208 | '''try to make message have proper sender.''' |
|
207 | '''try to make message have proper sender.''' | |
209 |
|
208 | |||
210 | if not sender: |
|
209 | if not sender: | |
211 | sender = self.ui.config('email', 'from') or self.ui.username() |
|
210 | sender = self.ui.config('email', 'from') or self.ui.username() | |
212 | if '@' not in sender or '@localhost' in sender: |
|
211 | if '@' not in sender or '@localhost' in sender: | |
213 | sender = self.fixmail(sender) |
|
212 | sender = self.fixmail(sender) | |
214 | msg['From'] = mail.addressencode(self.ui, sender, self.charsets) |
|
213 | msg['From'] = mail.addressencode(self.ui, sender, self.charsets) | |
215 |
|
214 | |||
216 | msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2") |
|
215 | msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2") | |
217 | fix_subject(subject) |
|
216 | fix_subject(subject) | |
218 | fix_sender(sender) |
|
217 | fix_sender(sender) | |
219 |
|
218 | |||
220 | msg['X-Hg-Notification'] = 'changeset ' + short(node) |
|
219 | msg['X-Hg-Notification'] = 'changeset ' + short(node) | |
221 | if not msg['Message-Id']: |
|
220 | if not msg['Message-Id']: | |
222 | msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' % |
|
221 | msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' % | |
223 | (short(node), int(time.time()), |
|
222 | (short(node), int(time.time()), | |
224 | hash(self.repo.root), socket.getfqdn())) |
|
223 | hash(self.repo.root), socket.getfqdn())) | |
225 | msg['To'] = ', '.join(self.subs) |
|
224 | msg['To'] = ', '.join(self.subs) | |
226 |
|
225 | |||
227 | msgtext = msg.as_string(0) |
|
226 | msgtext = msg.as_string(0) | |
228 | if self.ui.configbool('notify', 'test', True): |
|
227 | if self.ui.configbool('notify', 'test', True): | |
229 | self.ui.write(msgtext) |
|
228 | self.ui.write(msgtext) | |
230 | if not msgtext.endswith('\n'): |
|
229 | if not msgtext.endswith('\n'): | |
231 | self.ui.write('\n') |
|
230 | self.ui.write('\n') | |
232 | else: |
|
231 | else: | |
233 | self.ui.status(_('notify: sending %d subscribers %d changes\n') % |
|
232 | self.ui.status(_('notify: sending %d subscribers %d changes\n') % | |
234 | (len(self.subs), count)) |
|
233 | (len(self.subs), count)) | |
235 | mail.sendmail(self.ui, util.email(msg['From']), |
|
234 | mail.sendmail(self.ui, util.email(msg['From']), | |
236 | self.subs, msgtext) |
|
235 | self.subs, msgtext) | |
237 |
|
236 | |||
238 | def diff(self, node, ref): |
|
237 | def diff(self, node, ref): | |
239 | maxdiff = int(self.ui.config('notify', 'maxdiff', 300)) |
|
238 | maxdiff = int(self.ui.config('notify', 'maxdiff', 300)) | |
240 | prev = self.repo.changelog.parents(node)[0] |
|
239 | prev = self.repo.changelog.parents(node)[0] | |
241 |
|
240 | |||
242 | self.ui.pushbuffer() |
|
241 | self.ui.pushbuffer() | |
243 | patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui)) |
|
242 | patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui)) | |
244 | difflines = self.ui.popbuffer().splitlines() |
|
243 | difflines = self.ui.popbuffer().splitlines() | |
245 |
|
244 | |||
246 | if self.ui.configbool('notify', 'diffstat', True): |
|
245 | if self.ui.configbool('notify', 'diffstat', True): | |
247 | s = patch.diffstat(difflines) |
|
246 | s = patch.diffstat(difflines) | |
248 | # s may be nil, don't include the header if it is |
|
247 | # s may be nil, don't include the header if it is | |
249 | if s: |
|
248 | if s: | |
250 | self.ui.write('\ndiffstat:\n\n%s' % s) |
|
249 | self.ui.write('\ndiffstat:\n\n%s' % s) | |
251 | if maxdiff == 0: |
|
250 | if maxdiff == 0: | |
252 | return |
|
251 | return | |
253 | if maxdiff > 0 and len(difflines) > maxdiff: |
|
252 | if maxdiff > 0 and len(difflines) > maxdiff: | |
254 | self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') % |
|
253 | self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') % | |
255 | (len(difflines), maxdiff)) |
|
254 | (len(difflines), maxdiff)) | |
256 | difflines = difflines[:maxdiff] |
|
255 | difflines = difflines[:maxdiff] | |
257 | elif difflines: |
|
256 | elif difflines: | |
258 | self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines)) |
|
257 | self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines)) | |
259 | self.ui.write("\n".join(difflines)) |
|
258 | self.ui.write("\n".join(difflines)) | |
260 |
|
259 | |||
261 | def hook(ui, repo, hooktype, node=None, source=None, **kwargs): |
|
260 | def hook(ui, repo, hooktype, node=None, source=None, **kwargs): | |
262 | '''send email notifications to interested subscribers. |
|
261 | '''send email notifications to interested subscribers. | |
263 |
|
262 | |||
264 | if used as changegroup hook, send one email for all changesets in |
|
263 | if used as changegroup hook, send one email for all changesets in | |
265 | changegroup. else send one email per changeset.''' |
|
264 | changegroup. else send one email per changeset.''' | |
266 | n = notifier(ui, repo, hooktype) |
|
265 | n = notifier(ui, repo, hooktype) | |
267 | if not n.subs: |
|
266 | if not n.subs: | |
268 | ui.debug(_('notify: no subscribers to repo %s\n') % n.root) |
|
267 | ui.debug(_('notify: no subscribers to repo %s\n') % n.root) | |
269 | return |
|
268 | return | |
270 | if n.skipsource(source): |
|
269 | if n.skipsource(source): | |
271 | ui.debug(_('notify: changes have source "%s" - skipping\n') % |
|
270 | ui.debug(_('notify: changes have source "%s" - skipping\n') % | |
272 | source) |
|
271 | source) | |
273 | return |
|
272 | return | |
274 | node = bin(node) |
|
273 | node = bin(node) | |
275 | ui.pushbuffer() |
|
274 | ui.pushbuffer() | |
276 | if hooktype == 'changegroup': |
|
275 | if hooktype == 'changegroup': | |
277 | start = repo[node].rev() |
|
276 | start = repo[node].rev() | |
278 | end = len(repo) |
|
277 | end = len(repo) | |
279 | count = end - start |
|
278 | count = end - start | |
280 | for rev in xrange(start, end): |
|
279 | for rev in xrange(start, end): | |
281 | n.node(repo[rev].node()) |
|
280 | n.node(repo[rev].node()) | |
282 | n.diff(node, repo.changelog.tip()) |
|
281 | n.diff(node, repo.changelog.tip()) | |
283 | else: |
|
282 | else: | |
284 | count = 1 |
|
283 | count = 1 | |
285 | n.node(node) |
|
284 | n.node(node) | |
286 | n.diff(node, node) |
|
285 | n.diff(node, node) | |
287 | data = ui.popbuffer() |
|
286 | data = ui.popbuffer() | |
288 | n.send(node, count, data) |
|
287 | n.send(node, count, data) |
@@ -1,401 +1,403 | |||||
1 | # rebase.py - rebasing feature for mercurial |
|
1 | # rebase.py - rebasing feature for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com> |
|
3 | # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | ''' Rebasing feature |
|
8 | '''move sets of revisions to a different ancestor | |
9 |
|
9 | |||
10 | This extension lets you rebase changesets in an existing Mercurial repository. |
|
10 | This extension lets you rebase changesets in an existing Mercurial repository. | |
11 |
|
11 | |||
12 | For more information: |
|
12 | For more information: | |
13 | http://www.selenic.com/mercurial/wiki/index.cgi/RebaseProject |
|
13 | http://www.selenic.com/mercurial/wiki/index.cgi/RebaseProject | |
14 | ''' |
|
14 | ''' | |
15 |
|
15 | |||
16 | from mercurial import util, repair, merge, cmdutil, dispatch, commands |
|
16 | from mercurial import util, repair, merge, cmdutil, dispatch, commands | |
17 | from mercurial.commands import templateopts |
|
17 | from mercurial.commands import templateopts | |
18 | from mercurial.node import nullrev |
|
18 | from mercurial.node import nullrev | |
19 | from mercurial.i18n import _ |
|
19 | from mercurial.i18n import _ | |
20 | import os, errno |
|
20 | import os, errno | |
21 |
|
21 | |||
22 | def rebase(ui, repo, **opts): |
|
22 | def rebase(ui, repo, **opts): | |
23 | """move changeset (and descendants) to a different branch |
|
23 | """move changeset (and descendants) to a different branch | |
24 |
|
24 | |||
25 | Rebase uses repeated merging to graft changesets from one part of history |
|
25 | Rebase uses repeated merging to graft changesets from one part of history | |
26 | onto another. This can be useful for linearizing local changes relative to |
|
26 | onto another. This can be useful for linearizing local changes relative to | |
27 | a master development tree. |
|
27 | a master development tree. | |
28 |
|
28 | |||
29 | If a rebase is interrupted to manually resolve a merge, it can be continued |
|
29 | If a rebase is interrupted to manually resolve a merge, it can be continued | |
30 | with --continue or aborted with --abort. |
|
30 | with --continue or aborted with --abort. | |
31 | """ |
|
31 | """ | |
32 | originalwd = target = source = None |
|
32 | originalwd = target = source = None | |
33 | external = nullrev |
|
33 | external = nullrev | |
34 | state = skipped = {} |
|
34 | state = skipped = {} | |
35 |
|
35 | |||
36 | lock = wlock = None |
|
36 | lock = wlock = None | |
37 | try: |
|
37 | try: | |
38 | lock = repo.lock() |
|
38 | lock = repo.lock() | |
39 | wlock = repo.wlock() |
|
39 | wlock = repo.wlock() | |
40 |
|
40 | |||
41 | # Validate input and define rebasing points |
|
41 | # Validate input and define rebasing points | |
42 | destf = opts.get('dest', None) |
|
42 | destf = opts.get('dest', None) | |
43 | srcf = opts.get('source', None) |
|
43 | srcf = opts.get('source', None) | |
44 | basef = opts.get('base', None) |
|
44 | basef = opts.get('base', None) | |
45 | contf = opts.get('continue') |
|
45 | contf = opts.get('continue') | |
46 | abortf = opts.get('abort') |
|
46 | abortf = opts.get('abort') | |
47 | collapsef = opts.get('collapse', False) |
|
47 | collapsef = opts.get('collapse', False) | |
48 | if contf or abortf: |
|
48 | if contf or abortf: | |
49 | if contf and abortf: |
|
49 | if contf and abortf: | |
50 | raise dispatch.ParseError('rebase', |
|
50 | raise dispatch.ParseError('rebase', | |
51 | _('cannot use both abort and continue')) |
|
51 | _('cannot use both abort and continue')) | |
52 | if collapsef: |
|
52 | if collapsef: | |
53 | raise dispatch.ParseError('rebase', |
|
53 | raise dispatch.ParseError('rebase', | |
54 | _('cannot use collapse with continue or abort')) |
|
54 | _('cannot use collapse with continue or abort')) | |
55 |
|
55 | |||
56 | if (srcf or basef or destf): |
|
56 | if (srcf or basef or destf): | |
57 | raise dispatch.ParseError('rebase', |
|
57 | raise dispatch.ParseError('rebase', | |
58 | _('abort and continue do not allow specifying revisions')) |
|
58 | _('abort and continue do not allow specifying revisions')) | |
59 |
|
59 | |||
60 | originalwd, target, state, collapsef, external = restorestatus(repo) |
|
60 | originalwd, target, state, collapsef, external = restorestatus(repo) | |
61 | if abortf: |
|
61 | if abortf: | |
62 | abort(repo, originalwd, target, state) |
|
62 | abort(repo, originalwd, target, state) | |
63 | return |
|
63 | return | |
64 | else: |
|
64 | else: | |
65 | if srcf and basef: |
|
65 | if srcf and basef: | |
66 | raise dispatch.ParseError('rebase', _('cannot specify both a ' |
|
66 | raise dispatch.ParseError('rebase', _('cannot specify both a ' | |
67 | 'revision and a base')) |
|
67 | 'revision and a base')) | |
68 | cmdutil.bail_if_changed(repo) |
|
68 | cmdutil.bail_if_changed(repo) | |
69 | result = buildstate(repo, destf, srcf, basef, collapsef) |
|
69 | result = buildstate(repo, destf, srcf, basef, collapsef) | |
70 | if result: |
|
70 | if result: | |
71 | originalwd, target, state, external = result |
|
71 | originalwd, target, state, external = result | |
72 | else: # Empty state built, nothing to rebase |
|
72 | else: # Empty state built, nothing to rebase | |
73 | repo.ui.status(_('nothing to rebase\n')) |
|
73 | repo.ui.status(_('nothing to rebase\n')) | |
74 | return |
|
74 | return | |
75 |
|
75 | |||
76 | # Rebase |
|
76 | # Rebase | |
77 | targetancestors = list(repo.changelog.ancestors(target)) |
|
77 | targetancestors = list(repo.changelog.ancestors(target)) | |
78 | targetancestors.append(target) |
|
78 | targetancestors.append(target) | |
79 |
|
79 | |||
80 | for rev in util.sort(state): |
|
80 | for rev in util.sort(state): | |
81 | if state[rev] == -1: |
|
81 | if state[rev] == -1: | |
82 | storestatus(repo, originalwd, target, state, collapsef, |
|
82 | storestatus(repo, originalwd, target, state, collapsef, | |
83 | external) |
|
83 | external) | |
84 | rebasenode(repo, rev, target, state, skipped, targetancestors, |
|
84 | rebasenode(repo, rev, target, state, skipped, targetancestors, | |
85 | collapsef) |
|
85 | collapsef) | |
86 | ui.note(_('rebase merging completed\n')) |
|
86 | ui.note(_('rebase merging completed\n')) | |
87 |
|
87 | |||
88 | if collapsef: |
|
88 | if collapsef: | |
89 | p1, p2 = defineparents(repo, min(state), target, |
|
89 | p1, p2 = defineparents(repo, min(state), target, | |
90 | state, targetancestors) |
|
90 | state, targetancestors) | |
91 | concludenode(repo, rev, p1, external, state, collapsef, |
|
91 | concludenode(repo, rev, p1, external, state, collapsef, | |
92 | last=True, skipped=skipped) |
|
92 | last=True, skipped=skipped) | |
93 |
|
93 | |||
94 | if 'qtip' in repo.tags(): |
|
94 | if 'qtip' in repo.tags(): | |
95 | updatemq(repo, state, skipped, **opts) |
|
95 | updatemq(repo, state, skipped, **opts) | |
96 |
|
96 | |||
97 | if not opts.get('keep'): |
|
97 | if not opts.get('keep'): | |
98 | # Remove no more useful revisions |
|
98 | # Remove no more useful revisions | |
99 | if (util.set(repo.changelog.descendants(min(state))) |
|
99 | if (util.set(repo.changelog.descendants(min(state))) | |
100 | - util.set(state.keys())): |
|
100 | - util.set(state.keys())): | |
101 | ui.warn(_("warning: new changesets detected on source branch, " |
|
101 | ui.warn(_("warning: new changesets detected on source branch, " | |
102 | "not stripping\n")) |
|
102 | "not stripping\n")) | |
103 | else: |
|
103 | else: | |
104 | repair.strip(repo.ui, repo, repo[min(state)].node(), "strip") |
|
104 | repair.strip(repo.ui, repo, repo[min(state)].node(), "strip") | |
105 |
|
105 | |||
106 | clearstatus(repo) |
|
106 | clearstatus(repo) | |
107 | ui.status(_("rebase completed\n")) |
|
107 | ui.status(_("rebase completed\n")) | |
|
108 | if os.path.exists(repo.sjoin('undo')): | |||
|
109 | util.unlink(repo.sjoin('undo')) | |||
108 | if skipped: |
|
110 | if skipped: | |
109 | ui.note(_("%d revisions have been skipped\n") % len(skipped)) |
|
111 | ui.note(_("%d revisions have been skipped\n") % len(skipped)) | |
110 | finally: |
|
112 | finally: | |
111 | del lock, wlock |
|
113 | del lock, wlock | |
112 |
|
114 | |||
113 | def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped={}): |
|
115 | def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped={}): | |
114 | """Skip commit if collapsing has been required and rev is not the last |
|
116 | """Skip commit if collapsing has been required and rev is not the last | |
115 | revision, commit otherwise |
|
117 | revision, commit otherwise | |
116 | """ |
|
118 | """ | |
117 | repo.dirstate.setparents(repo[p1].node(), repo[p2].node()) |
|
119 | repo.dirstate.setparents(repo[p1].node(), repo[p2].node()) | |
118 |
|
120 | |||
119 | if collapse and not last: |
|
121 | if collapse and not last: | |
120 | return None |
|
122 | return None | |
121 |
|
123 | |||
122 | # Commit, record the old nodeid |
|
124 | # Commit, record the old nodeid | |
123 | m, a, r = repo.status()[:3] |
|
125 | m, a, r = repo.status()[:3] | |
124 | newrev = nullrev |
|
126 | newrev = nullrev | |
125 | try: |
|
127 | try: | |
126 | if last: |
|
128 | if last: | |
127 | commitmsg = 'Collapsed revision' |
|
129 | commitmsg = 'Collapsed revision' | |
128 | for rebased in state: |
|
130 | for rebased in state: | |
129 | if rebased not in skipped: |
|
131 | if rebased not in skipped: | |
130 | commitmsg += '\n* %s' % repo[rebased].description() |
|
132 | commitmsg += '\n* %s' % repo[rebased].description() | |
131 | commitmsg = repo.ui.edit(commitmsg, repo.ui.username()) |
|
133 | commitmsg = repo.ui.edit(commitmsg, repo.ui.username()) | |
132 | else: |
|
134 | else: | |
133 | commitmsg = repo[rev].description() |
|
135 | commitmsg = repo[rev].description() | |
134 | # Commit might fail if unresolved files exist |
|
136 | # Commit might fail if unresolved files exist | |
135 | newrev = repo.commit(m+a+r, |
|
137 | newrev = repo.commit(m+a+r, | |
136 | text=commitmsg, |
|
138 | text=commitmsg, | |
137 | user=repo[rev].user(), |
|
139 | user=repo[rev].user(), | |
138 | date=repo[rev].date(), |
|
140 | date=repo[rev].date(), | |
139 | extra={'rebase_source': repo[rev].hex()}) |
|
141 | extra={'rebase_source': repo[rev].hex()}) | |
140 | return newrev |
|
142 | return newrev | |
141 | except util.Abort: |
|
143 | except util.Abort: | |
142 | # Invalidate the previous setparents |
|
144 | # Invalidate the previous setparents | |
143 | repo.dirstate.invalidate() |
|
145 | repo.dirstate.invalidate() | |
144 | raise |
|
146 | raise | |
145 |
|
147 | |||
146 | def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse): |
|
148 | def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse): | |
147 | 'Rebase a single revision' |
|
149 | 'Rebase a single revision' | |
148 | repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev].node())) |
|
150 | repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev].node())) | |
149 |
|
151 | |||
150 | p1, p2 = defineparents(repo, rev, target, state, targetancestors) |
|
152 | p1, p2 = defineparents(repo, rev, target, state, targetancestors) | |
151 |
|
153 | |||
152 | # Merge phase |
|
154 | # Merge phase | |
153 | if len(repo.parents()) != 2: |
|
155 | if len(repo.parents()) != 2: | |
154 | # Update to target and merge it with local |
|
156 | # Update to target and merge it with local | |
155 | merge.update(repo, p1, False, True, False) |
|
157 | merge.update(repo, p1, False, True, False) | |
156 | repo.dirstate.write() |
|
158 | repo.dirstate.write() | |
157 | stats = merge.update(repo, rev, True, False, False) |
|
159 | stats = merge.update(repo, rev, True, False, False) | |
158 |
|
160 | |||
159 | if stats[3] > 0: |
|
161 | if stats[3] > 0: | |
160 | raise util.Abort(_('fix unresolved conflicts with hg resolve then ' |
|
162 | raise util.Abort(_('fix unresolved conflicts with hg resolve then ' | |
161 | 'run hg rebase --continue')) |
|
163 | 'run hg rebase --continue')) | |
162 | else: # we have an interrupted rebase |
|
164 | else: # we have an interrupted rebase | |
163 | repo.ui.debug(_('resuming interrupted rebase\n')) |
|
165 | repo.ui.debug(_('resuming interrupted rebase\n')) | |
164 |
|
166 | |||
165 |
|
167 | |||
166 | newrev = concludenode(repo, rev, p1, p2, state, collapse) |
|
168 | newrev = concludenode(repo, rev, p1, p2, state, collapse) | |
167 |
|
169 | |||
168 | # Update the state |
|
170 | # Update the state | |
169 | if newrev is not None: |
|
171 | if newrev is not None: | |
170 | state[rev] = repo[newrev].rev() |
|
172 | state[rev] = repo[newrev].rev() | |
171 | else: |
|
173 | else: | |
172 | if not collapse: |
|
174 | if not collapse: | |
173 | repo.ui.note(_('no changes, revision %d skipped\n') % rev) |
|
175 | repo.ui.note(_('no changes, revision %d skipped\n') % rev) | |
174 | repo.ui.debug(_('next revision set to %s\n') % p1) |
|
176 | repo.ui.debug(_('next revision set to %s\n') % p1) | |
175 | skipped[rev] = True |
|
177 | skipped[rev] = True | |
176 | state[rev] = p1 |
|
178 | state[rev] = p1 | |
177 |
|
179 | |||
178 | def defineparents(repo, rev, target, state, targetancestors): |
|
180 | def defineparents(repo, rev, target, state, targetancestors): | |
179 | 'Return the new parent relationship of the revision that will be rebased' |
|
181 | 'Return the new parent relationship of the revision that will be rebased' | |
180 | parents = repo[rev].parents() |
|
182 | parents = repo[rev].parents() | |
181 | p1 = p2 = nullrev |
|
183 | p1 = p2 = nullrev | |
182 |
|
184 | |||
183 | P1n = parents[0].rev() |
|
185 | P1n = parents[0].rev() | |
184 | if P1n in targetancestors: |
|
186 | if P1n in targetancestors: | |
185 | p1 = target |
|
187 | p1 = target | |
186 | elif P1n in state: |
|
188 | elif P1n in state: | |
187 | p1 = state[P1n] |
|
189 | p1 = state[P1n] | |
188 | else: # P1n external |
|
190 | else: # P1n external | |
189 | p1 = target |
|
191 | p1 = target | |
190 | p2 = P1n |
|
192 | p2 = P1n | |
191 |
|
193 | |||
192 | if len(parents) == 2 and parents[1].rev() not in targetancestors: |
|
194 | if len(parents) == 2 and parents[1].rev() not in targetancestors: | |
193 | P2n = parents[1].rev() |
|
195 | P2n = parents[1].rev() | |
194 | # interesting second parent |
|
196 | # interesting second parent | |
195 | if P2n in state: |
|
197 | if P2n in state: | |
196 | if p1 == target: # P1n in targetancestors or external |
|
198 | if p1 == target: # P1n in targetancestors or external | |
197 | p1 = state[P2n] |
|
199 | p1 = state[P2n] | |
198 | else: |
|
200 | else: | |
199 | p2 = state[P2n] |
|
201 | p2 = state[P2n] | |
200 | else: # P2n external |
|
202 | else: # P2n external | |
201 | if p2 != nullrev: # P1n external too => rev is a merged revision |
|
203 | if p2 != nullrev: # P1n external too => rev is a merged revision | |
202 | raise util.Abort(_('cannot use revision %d as base, result ' |
|
204 | raise util.Abort(_('cannot use revision %d as base, result ' | |
203 | 'would have 3 parents') % rev) |
|
205 | 'would have 3 parents') % rev) | |
204 | p2 = P2n |
|
206 | p2 = P2n | |
205 | return p1, p2 |
|
207 | return p1, p2 | |
206 |
|
208 | |||
207 | def updatemq(repo, state, skipped, **opts): |
|
209 | def updatemq(repo, state, skipped, **opts): | |
208 | 'Update rebased mq patches - finalize and then import them' |
|
210 | 'Update rebased mq patches - finalize and then import them' | |
209 | mqrebase = {} |
|
211 | mqrebase = {} | |
210 | for p in repo.mq.applied: |
|
212 | for p in repo.mq.applied: | |
211 | if repo[p.rev].rev() in state: |
|
213 | if repo[p.rev].rev() in state: | |
212 | repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') % |
|
214 | repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') % | |
213 | (repo[p.rev].rev(), p.name)) |
|
215 | (repo[p.rev].rev(), p.name)) | |
214 | mqrebase[repo[p.rev].rev()] = p.name |
|
216 | mqrebase[repo[p.rev].rev()] = p.name | |
215 |
|
217 | |||
216 | if mqrebase: |
|
218 | if mqrebase: | |
217 | repo.mq.finish(repo, mqrebase.keys()) |
|
219 | repo.mq.finish(repo, mqrebase.keys()) | |
218 |
|
220 | |||
219 | # We must start import from the newest revision |
|
221 | # We must start import from the newest revision | |
220 | mq = mqrebase.keys() |
|
222 | mq = mqrebase.keys() | |
221 | mq.sort() |
|
223 | mq.sort() | |
222 | mq.reverse() |
|
224 | mq.reverse() | |
223 | for rev in mq: |
|
225 | for rev in mq: | |
224 | if rev not in skipped: |
|
226 | if rev not in skipped: | |
225 | repo.ui.debug(_('import mq patch %d (%s)\n') |
|
227 | repo.ui.debug(_('import mq patch %d (%s)\n') | |
226 | % (state[rev], mqrebase[rev])) |
|
228 | % (state[rev], mqrebase[rev])) | |
227 | repo.mq.qimport(repo, (), patchname=mqrebase[rev], |
|
229 | repo.mq.qimport(repo, (), patchname=mqrebase[rev], | |
228 | git=opts.get('git', False),rev=[str(state[rev])]) |
|
230 | git=opts.get('git', False),rev=[str(state[rev])]) | |
229 | repo.mq.save_dirty() |
|
231 | repo.mq.save_dirty() | |
230 |
|
232 | |||
231 | def storestatus(repo, originalwd, target, state, collapse, external): |
|
233 | def storestatus(repo, originalwd, target, state, collapse, external): | |
232 | 'Store the current status to allow recovery' |
|
234 | 'Store the current status to allow recovery' | |
233 | f = repo.opener("rebasestate", "w") |
|
235 | f = repo.opener("rebasestate", "w") | |
234 | f.write(repo[originalwd].hex() + '\n') |
|
236 | f.write(repo[originalwd].hex() + '\n') | |
235 | f.write(repo[target].hex() + '\n') |
|
237 | f.write(repo[target].hex() + '\n') | |
236 | f.write(repo[external].hex() + '\n') |
|
238 | f.write(repo[external].hex() + '\n') | |
237 | f.write('%d\n' % int(collapse)) |
|
239 | f.write('%d\n' % int(collapse)) | |
238 | for d, v in state.items(): |
|
240 | for d, v in state.items(): | |
239 | oldrev = repo[d].hex() |
|
241 | oldrev = repo[d].hex() | |
240 | newrev = repo[v].hex() |
|
242 | newrev = repo[v].hex() | |
241 | f.write("%s:%s\n" % (oldrev, newrev)) |
|
243 | f.write("%s:%s\n" % (oldrev, newrev)) | |
242 | f.close() |
|
244 | f.close() | |
243 | repo.ui.debug(_('rebase status stored\n')) |
|
245 | repo.ui.debug(_('rebase status stored\n')) | |
244 |
|
246 | |||
245 | def clearstatus(repo): |
|
247 | def clearstatus(repo): | |
246 | 'Remove the status files' |
|
248 | 'Remove the status files' | |
247 | if os.path.exists(repo.join("rebasestate")): |
|
249 | if os.path.exists(repo.join("rebasestate")): | |
248 | util.unlink(repo.join("rebasestate")) |
|
250 | util.unlink(repo.join("rebasestate")) | |
249 |
|
251 | |||
250 | def restorestatus(repo): |
|
252 | def restorestatus(repo): | |
251 | 'Restore a previously stored status' |
|
253 | 'Restore a previously stored status' | |
252 | try: |
|
254 | try: | |
253 | target = None |
|
255 | target = None | |
254 | collapse = False |
|
256 | collapse = False | |
255 | external = nullrev |
|
257 | external = nullrev | |
256 | state = {} |
|
258 | state = {} | |
257 | f = repo.opener("rebasestate") |
|
259 | f = repo.opener("rebasestate") | |
258 | for i, l in enumerate(f.read().splitlines()): |
|
260 | for i, l in enumerate(f.read().splitlines()): | |
259 | if i == 0: |
|
261 | if i == 0: | |
260 | originalwd = repo[l].rev() |
|
262 | originalwd = repo[l].rev() | |
261 | elif i == 1: |
|
263 | elif i == 1: | |
262 | target = repo[l].rev() |
|
264 | target = repo[l].rev() | |
263 | elif i == 2: |
|
265 | elif i == 2: | |
264 | external = repo[l].rev() |
|
266 | external = repo[l].rev() | |
265 | elif i == 3: |
|
267 | elif i == 3: | |
266 | collapse = bool(int(l)) |
|
268 | collapse = bool(int(l)) | |
267 | else: |
|
269 | else: | |
268 | oldrev, newrev = l.split(':') |
|
270 | oldrev, newrev = l.split(':') | |
269 | state[repo[oldrev].rev()] = repo[newrev].rev() |
|
271 | state[repo[oldrev].rev()] = repo[newrev].rev() | |
270 | repo.ui.debug(_('rebase status resumed\n')) |
|
272 | repo.ui.debug(_('rebase status resumed\n')) | |
271 | return originalwd, target, state, collapse, external |
|
273 | return originalwd, target, state, collapse, external | |
272 | except IOError, err: |
|
274 | except IOError, err: | |
273 | if err.errno != errno.ENOENT: |
|
275 | if err.errno != errno.ENOENT: | |
274 | raise |
|
276 | raise | |
275 | raise util.Abort(_('no rebase in progress')) |
|
277 | raise util.Abort(_('no rebase in progress')) | |
276 |
|
278 | |||
277 | def abort(repo, originalwd, target, state): |
|
279 | def abort(repo, originalwd, target, state): | |
278 | 'Restore the repository to its original state' |
|
280 | 'Restore the repository to its original state' | |
279 | if util.set(repo.changelog.descendants(target)) - util.set(state.values()): |
|
281 | if util.set(repo.changelog.descendants(target)) - util.set(state.values()): | |
280 | repo.ui.warn(_("warning: new changesets detected on target branch, " |
|
282 | repo.ui.warn(_("warning: new changesets detected on target branch, " | |
281 | "not stripping\n")) |
|
283 | "not stripping\n")) | |
282 | else: |
|
284 | else: | |
283 | # Strip from the first rebased revision |
|
285 | # Strip from the first rebased revision | |
284 | merge.update(repo, repo[originalwd].rev(), False, True, False) |
|
286 | merge.update(repo, repo[originalwd].rev(), False, True, False) | |
285 | rebased = filter(lambda x: x > -1, state.values()) |
|
287 | rebased = filter(lambda x: x > -1, state.values()) | |
286 | if rebased: |
|
288 | if rebased: | |
287 | strippoint = min(rebased) |
|
289 | strippoint = min(rebased) | |
288 | repair.strip(repo.ui, repo, repo[strippoint].node(), "strip") |
|
290 | repair.strip(repo.ui, repo, repo[strippoint].node(), "strip") | |
289 | clearstatus(repo) |
|
291 | clearstatus(repo) | |
290 | repo.ui.status(_('rebase aborted\n')) |
|
292 | repo.ui.status(_('rebase aborted\n')) | |
291 |
|
293 | |||
292 | def buildstate(repo, dest, src, base, collapse): |
|
294 | def buildstate(repo, dest, src, base, collapse): | |
293 | 'Define which revisions are going to be rebased and where' |
|
295 | 'Define which revisions are going to be rebased and where' | |
294 | state = {} |
|
296 | state = {} | |
295 | targetancestors = util.set() |
|
297 | targetancestors = util.set() | |
296 |
|
298 | |||
297 | if not dest: |
|
299 | if not dest: | |
298 | # Destination defaults to the latest revision in the current branch |
|
300 | # Destination defaults to the latest revision in the current branch | |
299 | branch = repo[None].branch() |
|
301 | branch = repo[None].branch() | |
300 | dest = repo[branch].rev() |
|
302 | dest = repo[branch].rev() | |
301 | else: |
|
303 | else: | |
302 | if 'qtip' in repo.tags() and (repo[dest].hex() in |
|
304 | if 'qtip' in repo.tags() and (repo[dest].hex() in | |
303 | [s.rev for s in repo.mq.applied]): |
|
305 | [s.rev for s in repo.mq.applied]): | |
304 | raise util.Abort(_('cannot rebase onto an applied mq patch')) |
|
306 | raise util.Abort(_('cannot rebase onto an applied mq patch')) | |
305 | dest = repo[dest].rev() |
|
307 | dest = repo[dest].rev() | |
306 |
|
308 | |||
307 | if src: |
|
309 | if src: | |
308 | commonbase = repo[src].ancestor(repo[dest]) |
|
310 | commonbase = repo[src].ancestor(repo[dest]) | |
309 | if commonbase == repo[src]: |
|
311 | if commonbase == repo[src]: | |
310 | raise util.Abort(_('cannot rebase an ancestor')) |
|
312 | raise util.Abort(_('cannot rebase an ancestor')) | |
311 | if commonbase == repo[dest]: |
|
313 | if commonbase == repo[dest]: | |
312 | raise util.Abort(_('cannot rebase a descendant')) |
|
314 | raise util.Abort(_('cannot rebase a descendant')) | |
313 | source = repo[src].rev() |
|
315 | source = repo[src].rev() | |
314 | else: |
|
316 | else: | |
315 | if base: |
|
317 | if base: | |
316 | cwd = repo[base].rev() |
|
318 | cwd = repo[base].rev() | |
317 | else: |
|
319 | else: | |
318 | cwd = repo['.'].rev() |
|
320 | cwd = repo['.'].rev() | |
319 |
|
321 | |||
320 | if cwd == dest: |
|
322 | if cwd == dest: | |
321 | repo.ui.debug(_('already working on current\n')) |
|
323 | repo.ui.debug(_('already working on current\n')) | |
322 | return None |
|
324 | return None | |
323 |
|
325 | |||
324 | targetancestors = util.set(repo.changelog.ancestors(dest)) |
|
326 | targetancestors = util.set(repo.changelog.ancestors(dest)) | |
325 | if cwd in targetancestors: |
|
327 | if cwd in targetancestors: | |
326 | repo.ui.debug(_('already working on the current branch\n')) |
|
328 | repo.ui.debug(_('already working on the current branch\n')) | |
327 | return None |
|
329 | return None | |
328 |
|
330 | |||
329 | cwdancestors = util.set(repo.changelog.ancestors(cwd)) |
|
331 | cwdancestors = util.set(repo.changelog.ancestors(cwd)) | |
330 | cwdancestors.add(cwd) |
|
332 | cwdancestors.add(cwd) | |
331 | rebasingbranch = cwdancestors - targetancestors |
|
333 | rebasingbranch = cwdancestors - targetancestors | |
332 | source = min(rebasingbranch) |
|
334 | source = min(rebasingbranch) | |
333 |
|
335 | |||
334 | repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source)) |
|
336 | repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source)) | |
335 | state = dict.fromkeys(repo.changelog.descendants(source), nullrev) |
|
337 | state = dict.fromkeys(repo.changelog.descendants(source), nullrev) | |
336 | external = nullrev |
|
338 | external = nullrev | |
337 | if collapse: |
|
339 | if collapse: | |
338 | if not targetancestors: |
|
340 | if not targetancestors: | |
339 | targetancestors = util.set(repo.changelog.ancestors(dest)) |
|
341 | targetancestors = util.set(repo.changelog.ancestors(dest)) | |
340 | for rev in state: |
|
342 | for rev in state: | |
341 | # Check externals and fail if there are more than one |
|
343 | # Check externals and fail if there are more than one | |
342 | for p in repo[rev].parents(): |
|
344 | for p in repo[rev].parents(): | |
343 | if (p.rev() not in state and p.rev() != source |
|
345 | if (p.rev() not in state and p.rev() != source | |
344 | and p.rev() not in targetancestors): |
|
346 | and p.rev() not in targetancestors): | |
345 | if external != nullrev: |
|
347 | if external != nullrev: | |
346 | raise util.Abort(_('unable to collapse, there is more ' |
|
348 | raise util.Abort(_('unable to collapse, there is more ' | |
347 | 'than one external parent')) |
|
349 | 'than one external parent')) | |
348 | external = p.rev() |
|
350 | external = p.rev() | |
349 |
|
351 | |||
350 | state[source] = nullrev |
|
352 | state[source] = nullrev | |
351 | return repo['.'].rev(), repo[dest].rev(), state, external |
|
353 | return repo['.'].rev(), repo[dest].rev(), state, external | |
352 |
|
354 | |||
353 | def pulldelegate(pullfunction, repo, *args, **opts): |
|
355 | def pulldelegate(pullfunction, repo, *args, **opts): | |
354 | 'Call rebase after pull if the latter has been invoked with --rebase' |
|
356 | 'Call rebase after pull if the latter has been invoked with --rebase' | |
355 | if opts.get('rebase'): |
|
357 | if opts.get('rebase'): | |
356 | if opts.get('update'): |
|
358 | if opts.get('update'): | |
357 | raise util.Abort(_('--update and --rebase are not compatible')) |
|
359 | raise util.Abort(_('--update and --rebase are not compatible')) | |
358 |
|
360 | |||
359 | cmdutil.bail_if_changed(repo) |
|
361 | cmdutil.bail_if_changed(repo) | |
360 | revsprepull = len(repo) |
|
362 | revsprepull = len(repo) | |
361 | pullfunction(repo.ui, repo, *args, **opts) |
|
363 | pullfunction(repo.ui, repo, *args, **opts) | |
362 | revspostpull = len(repo) |
|
364 | revspostpull = len(repo) | |
363 | if revspostpull > revsprepull: |
|
365 | if revspostpull > revsprepull: | |
364 | rebase(repo.ui, repo, **opts) |
|
366 | rebase(repo.ui, repo, **opts) | |
365 | else: |
|
367 | else: | |
366 | pullfunction(repo.ui, repo, *args, **opts) |
|
368 | pullfunction(repo.ui, repo, *args, **opts) | |
367 |
|
369 | |||
368 | def uisetup(ui): |
|
370 | def uisetup(ui): | |
369 | 'Replace pull with a decorator to provide --rebase option' |
|
371 | 'Replace pull with a decorator to provide --rebase option' | |
370 | # cribbed from color.py |
|
372 | # cribbed from color.py | |
371 | aliases, entry = cmdutil.findcmd(ui, 'pull', commands.table) |
|
373 | aliases, entry = cmdutil.findcmd(ui, 'pull', commands.table) | |
372 | for candidatekey, candidateentry in commands.table.iteritems(): |
|
374 | for candidatekey, candidateentry in commands.table.iteritems(): | |
373 | if candidateentry is entry: |
|
375 | if candidateentry is entry: | |
374 | cmdkey, cmdentry = candidatekey, entry |
|
376 | cmdkey, cmdentry = candidatekey, entry | |
375 | break |
|
377 | break | |
376 |
|
378 | |||
377 | decorator = lambda ui, repo, *args, **opts: \ |
|
379 | decorator = lambda ui, repo, *args, **opts: \ | |
378 | pulldelegate(cmdentry[0], repo, *args, **opts) |
|
380 | pulldelegate(cmdentry[0], repo, *args, **opts) | |
379 | # make sure 'hg help cmd' still works |
|
381 | # make sure 'hg help cmd' still works | |
380 | decorator.__doc__ = cmdentry[0].__doc__ |
|
382 | decorator.__doc__ = cmdentry[0].__doc__ | |
381 | decoratorentry = (decorator,) + cmdentry[1:] |
|
383 | decoratorentry = (decorator,) + cmdentry[1:] | |
382 | rebaseopt = ('', 'rebase', None, |
|
384 | rebaseopt = ('', 'rebase', None, | |
383 | _("rebase working directory to branch head")) |
|
385 | _("rebase working directory to branch head")) | |
384 | decoratorentry[1].append(rebaseopt) |
|
386 | decoratorentry[1].append(rebaseopt) | |
385 | commands.table[cmdkey] = decoratorentry |
|
387 | commands.table[cmdkey] = decoratorentry | |
386 |
|
388 | |||
387 | cmdtable = { |
|
389 | cmdtable = { | |
388 | "rebase": |
|
390 | "rebase": | |
389 | (rebase, |
|
391 | (rebase, | |
390 | [ |
|
392 | [ | |
391 | ('', 'keep', False, _('keep original revisions')), |
|
393 | ('', 'keep', False, _('keep original revisions')), | |
392 | ('s', 'source', '', _('rebase from a given revision')), |
|
394 | ('s', 'source', '', _('rebase from a given revision')), | |
393 | ('b', 'base', '', _('rebase from the base of a given revision')), |
|
395 | ('b', 'base', '', _('rebase from the base of a given revision')), | |
394 | ('d', 'dest', '', _('rebase onto a given revision')), |
|
396 | ('d', 'dest', '', _('rebase onto a given revision')), | |
395 | ('', 'collapse', False, _('collapse the rebased revisions')), |
|
397 | ('', 'collapse', False, _('collapse the rebased revisions')), | |
396 | ('c', 'continue', False, _('continue an interrupted rebase')), |
|
398 | ('c', 'continue', False, _('continue an interrupted rebase')), | |
397 | ('a', 'abort', False, _('abort an interrupted rebase')),] + |
|
399 | ('a', 'abort', False, _('abort an interrupted rebase')),] + | |
398 | templateopts, |
|
400 | templateopts, | |
399 | _('hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | ' |
|
401 | _('hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | ' | |
400 | '[--keep]')), |
|
402 | '[--keep]')), | |
401 | } |
|
403 | } |
@@ -1,3327 +1,3342 | |||||
1 | # commands.py - command processing for mercurial |
|
1 | # commands.py - command processing for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from node import hex, nullid, nullrev, short |
|
8 | from node import hex, nullid, nullrev, short | |
9 | from repo import RepoError, NoCapability |
|
9 | from repo import RepoError, NoCapability | |
10 | from i18n import _, gettext |
|
10 | from i18n import _, gettext | |
11 | import os, re, sys, urllib |
|
11 | import os, re, sys, urllib | |
12 | import hg, util, revlog, bundlerepo, extensions, copies |
|
12 | import hg, util, revlog, bundlerepo, extensions, copies | |
13 | import difflib, patch, time, help, mdiff, tempfile |
|
13 | import difflib, patch, time, help, mdiff, tempfile | |
14 | import version, socket |
|
14 | import version, socket | |
15 | import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect |
|
15 | import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect | |
16 | import merge as merge_ |
|
16 | import merge as merge_ | |
17 |
|
17 | |||
18 | # Commands start here, listed alphabetically |
|
18 | # Commands start here, listed alphabetically | |
19 |
|
19 | |||
20 | def add(ui, repo, *pats, **opts): |
|
20 | def add(ui, repo, *pats, **opts): | |
21 | """add the specified files on the next commit |
|
21 | """add the specified files on the next commit | |
22 |
|
22 | |||
23 | Schedule files to be version controlled and added to the repository. |
|
23 | Schedule files to be version controlled and added to the repository. | |
24 |
|
24 | |||
25 | The files will be added to the repository at the next commit. To |
|
25 | The files will be added to the repository at the next commit. To | |
26 | undo an add before that, see hg revert. |
|
26 | undo an add before that, see hg revert. | |
27 |
|
27 | |||
28 | If no names are given, add all files in the repository. |
|
28 | If no names are given, add all files in the repository. | |
29 | """ |
|
29 | """ | |
30 |
|
30 | |||
31 | rejected = None |
|
31 | rejected = None | |
32 | exacts = {} |
|
32 | exacts = {} | |
33 | names = [] |
|
33 | names = [] | |
34 | m = cmdutil.match(repo, pats, opts) |
|
34 | m = cmdutil.match(repo, pats, opts) | |
35 | m.bad = lambda x,y: True |
|
35 | m.bad = lambda x,y: True | |
36 | for abs in repo.walk(m): |
|
36 | for abs in repo.walk(m): | |
37 | if m.exact(abs): |
|
37 | if m.exact(abs): | |
38 | if ui.verbose: |
|
38 | if ui.verbose: | |
39 | ui.status(_('adding %s\n') % m.rel(abs)) |
|
39 | ui.status(_('adding %s\n') % m.rel(abs)) | |
40 | names.append(abs) |
|
40 | names.append(abs) | |
41 | exacts[abs] = 1 |
|
41 | exacts[abs] = 1 | |
42 | elif abs not in repo.dirstate: |
|
42 | elif abs not in repo.dirstate: | |
43 | ui.status(_('adding %s\n') % m.rel(abs)) |
|
43 | ui.status(_('adding %s\n') % m.rel(abs)) | |
44 | names.append(abs) |
|
44 | names.append(abs) | |
45 | if not opts.get('dry_run'): |
|
45 | if not opts.get('dry_run'): | |
46 | rejected = repo.add(names) |
|
46 | rejected = repo.add(names) | |
47 | rejected = [p for p in rejected if p in exacts] |
|
47 | rejected = [p for p in rejected if p in exacts] | |
48 | return rejected and 1 or 0 |
|
48 | return rejected and 1 or 0 | |
49 |
|
49 | |||
50 | def addremove(ui, repo, *pats, **opts): |
|
50 | def addremove(ui, repo, *pats, **opts): | |
51 | """add all new files, delete all missing files |
|
51 | """add all new files, delete all missing files | |
52 |
|
52 | |||
53 | Add all new files and remove all missing files from the repository. |
|
53 | Add all new files and remove all missing files from the repository. | |
54 |
|
54 | |||
55 | New files are ignored if they match any of the patterns in .hgignore. As |
|
55 | New files are ignored if they match any of the patterns in .hgignore. As | |
56 | with add, these changes take effect at the next commit. |
|
56 | with add, these changes take effect at the next commit. | |
57 |
|
57 | |||
58 | Use the -s option to detect renamed files. With a parameter > 0, |
|
58 | Use the -s option to detect renamed files. With a parameter > 0, | |
59 | this compares every removed file with every added file and records |
|
59 | this compares every removed file with every added file and records | |
60 | those similar enough as renames. This option takes a percentage |
|
60 | those similar enough as renames. This option takes a percentage | |
61 | between 0 (disabled) and 100 (files must be identical) as its |
|
61 | between 0 (disabled) and 100 (files must be identical) as its | |
62 | parameter. Detecting renamed files this way can be expensive. |
|
62 | parameter. Detecting renamed files this way can be expensive. | |
63 | """ |
|
63 | """ | |
64 | try: |
|
64 | try: | |
65 | sim = float(opts.get('similarity') or 0) |
|
65 | sim = float(opts.get('similarity') or 0) | |
66 | except ValueError: |
|
66 | except ValueError: | |
67 | raise util.Abort(_('similarity must be a number')) |
|
67 | raise util.Abort(_('similarity must be a number')) | |
68 | if sim < 0 or sim > 100: |
|
68 | if sim < 0 or sim > 100: | |
69 | raise util.Abort(_('similarity must be between 0 and 100')) |
|
69 | raise util.Abort(_('similarity must be between 0 and 100')) | |
70 | return cmdutil.addremove(repo, pats, opts, similarity=sim/100.) |
|
70 | return cmdutil.addremove(repo, pats, opts, similarity=sim/100.) | |
71 |
|
71 | |||
72 | def annotate(ui, repo, *pats, **opts): |
|
72 | def annotate(ui, repo, *pats, **opts): | |
73 | """show changeset information per file line |
|
73 | """show changeset information per file line | |
74 |
|
74 | |||
75 | List changes in files, showing the revision id responsible for each line |
|
75 | List changes in files, showing the revision id responsible for each line | |
76 |
|
76 | |||
77 | This command is useful to discover who did a change or when a change took |
|
77 | This command is useful to discover who did a change or when a change took | |
78 | place. |
|
78 | place. | |
79 |
|
79 | |||
80 | Without the -a option, annotate will avoid processing files it |
|
80 | Without the -a option, annotate will avoid processing files it | |
81 | detects as binary. With -a, annotate will generate an annotation |
|
81 | detects as binary. With -a, annotate will generate an annotation | |
82 | anyway, probably with undesirable results. |
|
82 | anyway, probably with undesirable results. | |
83 | """ |
|
83 | """ | |
84 | datefunc = ui.quiet and util.shortdate or util.datestr |
|
84 | datefunc = ui.quiet and util.shortdate or util.datestr | |
85 | getdate = util.cachefunc(lambda x: datefunc(x[0].date())) |
|
85 | getdate = util.cachefunc(lambda x: datefunc(x[0].date())) | |
86 |
|
86 | |||
87 | if not pats: |
|
87 | if not pats: | |
88 | raise util.Abort(_('at least one file name or pattern required')) |
|
88 | raise util.Abort(_('at least one file name or pattern required')) | |
89 |
|
89 | |||
90 | opmap = [('user', lambda x: ui.shortuser(x[0].user())), |
|
90 | opmap = [('user', lambda x: ui.shortuser(x[0].user())), | |
91 | ('number', lambda x: str(x[0].rev())), |
|
91 | ('number', lambda x: str(x[0].rev())), | |
92 | ('changeset', lambda x: short(x[0].node())), |
|
92 | ('changeset', lambda x: short(x[0].node())), | |
93 | ('date', getdate), |
|
93 | ('date', getdate), | |
94 | ('follow', lambda x: x[0].path()), |
|
94 | ('follow', lambda x: x[0].path()), | |
95 | ] |
|
95 | ] | |
96 |
|
96 | |||
97 |
if (not opts |
|
97 | if (not opts.get('user') and not opts.get('changeset') and not opts.get('date') | |
98 |
and not opts |
|
98 | and not opts.get('follow')): | |
99 | opts['number'] = 1 |
|
99 | opts['number'] = 1 | |
100 |
|
100 | |||
101 | linenumber = opts.get('line_number') is not None |
|
101 | linenumber = opts.get('line_number') is not None | |
102 |
if (linenumber and (not opts |
|
102 | if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))): | |
103 | raise util.Abort(_('at least one of -n/-c is required for -l')) |
|
103 | raise util.Abort(_('at least one of -n/-c is required for -l')) | |
104 |
|
104 | |||
105 | funcmap = [func for op, func in opmap if opts.get(op)] |
|
105 | funcmap = [func for op, func in opmap if opts.get(op)] | |
106 | if linenumber: |
|
106 | if linenumber: | |
107 | lastfunc = funcmap[-1] |
|
107 | lastfunc = funcmap[-1] | |
108 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) |
|
108 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) | |
109 |
|
109 | |||
110 |
ctx = repo[opts |
|
110 | ctx = repo[opts.get('rev')] | |
111 |
|
111 | |||
112 | m = cmdutil.match(repo, pats, opts) |
|
112 | m = cmdutil.match(repo, pats, opts) | |
113 | for abs in ctx.walk(m): |
|
113 | for abs in ctx.walk(m): | |
114 | fctx = ctx[abs] |
|
114 | fctx = ctx[abs] | |
115 |
if not opts |
|
115 | if not opts.get('text') and util.binary(fctx.data()): | |
116 | ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) |
|
116 | ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) | |
117 | continue |
|
117 | continue | |
118 |
|
118 | |||
119 | lines = fctx.annotate(follow=opts.get('follow'), |
|
119 | lines = fctx.annotate(follow=opts.get('follow'), | |
120 | linenumber=linenumber) |
|
120 | linenumber=linenumber) | |
121 | pieces = [] |
|
121 | pieces = [] | |
122 |
|
122 | |||
123 | for f in funcmap: |
|
123 | for f in funcmap: | |
124 | l = [f(n) for n, dummy in lines] |
|
124 | l = [f(n) for n, dummy in lines] | |
125 | if l: |
|
125 | if l: | |
126 | ml = max(map(len, l)) |
|
126 | ml = max(map(len, l)) | |
127 | pieces.append(["%*s" % (ml, x) for x in l]) |
|
127 | pieces.append(["%*s" % (ml, x) for x in l]) | |
128 |
|
128 | |||
129 | if pieces: |
|
129 | if pieces: | |
130 | for p, l in zip(zip(*pieces), lines): |
|
130 | for p, l in zip(zip(*pieces), lines): | |
131 | ui.write("%s: %s" % (" ".join(p), l[1])) |
|
131 | ui.write("%s: %s" % (" ".join(p), l[1])) | |
132 |
|
132 | |||
133 | def archive(ui, repo, dest, **opts): |
|
133 | def archive(ui, repo, dest, **opts): | |
134 | '''create unversioned archive of a repository revision |
|
134 | '''create unversioned archive of a repository revision | |
135 |
|
135 | |||
136 | By default, the revision used is the parent of the working |
|
136 | By default, the revision used is the parent of the working | |
137 | directory; use "-r" to specify a different revision. |
|
137 | directory; use "-r" to specify a different revision. | |
138 |
|
138 | |||
139 | To specify the type of archive to create, use "-t". Valid |
|
139 | To specify the type of archive to create, use "-t". Valid | |
140 | types are: |
|
140 | types are: | |
141 |
|
141 | |||
142 | "files" (default): a directory full of files |
|
142 | "files" (default): a directory full of files | |
143 | "tar": tar archive, uncompressed |
|
143 | "tar": tar archive, uncompressed | |
144 | "tbz2": tar archive, compressed using bzip2 |
|
144 | "tbz2": tar archive, compressed using bzip2 | |
145 | "tgz": tar archive, compressed using gzip |
|
145 | "tgz": tar archive, compressed using gzip | |
146 | "uzip": zip archive, uncompressed |
|
146 | "uzip": zip archive, uncompressed | |
147 | "zip": zip archive, compressed using deflate |
|
147 | "zip": zip archive, compressed using deflate | |
148 |
|
148 | |||
149 | The exact name of the destination archive or directory is given |
|
149 | The exact name of the destination archive or directory is given | |
150 | using a format string; see "hg help export" for details. |
|
150 | using a format string; see "hg help export" for details. | |
151 |
|
151 | |||
152 | Each member added to an archive file has a directory prefix |
|
152 | Each member added to an archive file has a directory prefix | |
153 | prepended. Use "-p" to specify a format string for the prefix. |
|
153 | prepended. Use "-p" to specify a format string for the prefix. | |
154 | The default is the basename of the archive, with suffixes removed. |
|
154 | The default is the basename of the archive, with suffixes removed. | |
155 | ''' |
|
155 | ''' | |
156 |
|
156 | |||
157 |
ctx = repo[opts |
|
157 | ctx = repo[opts.get('rev')] | |
158 | if not ctx: |
|
158 | if not ctx: | |
159 | raise util.Abort(_('repository has no revisions')) |
|
159 | raise util.Abort(_('repository has no revisions')) | |
160 | node = ctx.node() |
|
160 | node = ctx.node() | |
161 | dest = cmdutil.make_filename(repo, dest, node) |
|
161 | dest = cmdutil.make_filename(repo, dest, node) | |
162 | if os.path.realpath(dest) == repo.root: |
|
162 | if os.path.realpath(dest) == repo.root: | |
163 | raise util.Abort(_('repository root cannot be destination')) |
|
163 | raise util.Abort(_('repository root cannot be destination')) | |
164 | matchfn = cmdutil.match(repo, [], opts) |
|
164 | matchfn = cmdutil.match(repo, [], opts) | |
165 | kind = opts.get('type') or 'files' |
|
165 | kind = opts.get('type') or 'files' | |
166 |
prefix = opts |
|
166 | prefix = opts.get('prefix') | |
167 | if dest == '-': |
|
167 | if dest == '-': | |
168 | if kind == 'files': |
|
168 | if kind == 'files': | |
169 | raise util.Abort(_('cannot archive plain files to stdout')) |
|
169 | raise util.Abort(_('cannot archive plain files to stdout')) | |
170 | dest = sys.stdout |
|
170 | dest = sys.stdout | |
171 | if not prefix: prefix = os.path.basename(repo.root) + '-%h' |
|
171 | if not prefix: prefix = os.path.basename(repo.root) + '-%h' | |
172 | prefix = cmdutil.make_filename(repo, prefix, node) |
|
172 | prefix = cmdutil.make_filename(repo, prefix, node) | |
173 |
archival.archive(repo, dest, node, kind, not opts |
|
173 | archival.archive(repo, dest, node, kind, not opts.get('no_decode'), | |
174 | matchfn, prefix) |
|
174 | matchfn, prefix) | |
175 |
|
175 | |||
176 | def backout(ui, repo, node=None, rev=None, **opts): |
|
176 | def backout(ui, repo, node=None, rev=None, **opts): | |
177 | '''reverse effect of earlier changeset |
|
177 | '''reverse effect of earlier changeset | |
178 |
|
178 | |||
179 | Commit the backed out changes as a new changeset. The new |
|
179 | Commit the backed out changes as a new changeset. The new | |
180 | changeset is a child of the backed out changeset. |
|
180 | changeset is a child of the backed out changeset. | |
181 |
|
181 | |||
182 | If you back out a changeset other than the tip, a new head is |
|
182 | If you back out a changeset other than the tip, a new head is | |
183 | created. This head will be the new tip and you should merge this |
|
183 | created. This head will be the new tip and you should merge this | |
184 | backout changeset with another head (current one by default). |
|
184 | backout changeset with another head (current one by default). | |
185 |
|
185 | |||
186 | The --merge option remembers the parent of the working directory |
|
186 | The --merge option remembers the parent of the working directory | |
187 | before starting the backout, then merges the new head with that |
|
187 | before starting the backout, then merges the new head with that | |
188 | changeset afterwards. This saves you from doing the merge by |
|
188 | changeset afterwards. This saves you from doing the merge by | |
189 | hand. The result of this merge is not committed, as for a normal |
|
189 | hand. The result of this merge is not committed, as for a normal | |
190 | merge. |
|
190 | merge. | |
191 |
|
191 | |||
192 | See \'hg help dates\' for a list of formats valid for -d/--date. |
|
192 | See \'hg help dates\' for a list of formats valid for -d/--date. | |
193 | ''' |
|
193 | ''' | |
194 | if rev and node: |
|
194 | if rev and node: | |
195 | raise util.Abort(_("please specify just one revision")) |
|
195 | raise util.Abort(_("please specify just one revision")) | |
196 |
|
196 | |||
197 | if not rev: |
|
197 | if not rev: | |
198 | rev = node |
|
198 | rev = node | |
199 |
|
199 | |||
200 | if not rev: |
|
200 | if not rev: | |
201 | raise util.Abort(_("please specify a revision to backout")) |
|
201 | raise util.Abort(_("please specify a revision to backout")) | |
202 |
|
202 | |||
203 | date = opts.get('date') |
|
203 | date = opts.get('date') | |
204 | if date: |
|
204 | if date: | |
205 | opts['date'] = util.parsedate(date) |
|
205 | opts['date'] = util.parsedate(date) | |
206 |
|
206 | |||
207 | cmdutil.bail_if_changed(repo) |
|
207 | cmdutil.bail_if_changed(repo) | |
208 | node = repo.lookup(rev) |
|
208 | node = repo.lookup(rev) | |
209 |
|
209 | |||
210 | op1, op2 = repo.dirstate.parents() |
|
210 | op1, op2 = repo.dirstate.parents() | |
211 | a = repo.changelog.ancestor(op1, node) |
|
211 | a = repo.changelog.ancestor(op1, node) | |
212 | if a != node: |
|
212 | if a != node: | |
213 | raise util.Abort(_('cannot back out change on a different branch')) |
|
213 | raise util.Abort(_('cannot back out change on a different branch')) | |
214 |
|
214 | |||
215 | p1, p2 = repo.changelog.parents(node) |
|
215 | p1, p2 = repo.changelog.parents(node) | |
216 | if p1 == nullid: |
|
216 | if p1 == nullid: | |
217 | raise util.Abort(_('cannot back out a change with no parents')) |
|
217 | raise util.Abort(_('cannot back out a change with no parents')) | |
218 | if p2 != nullid: |
|
218 | if p2 != nullid: | |
219 |
if not opts |
|
219 | if not opts.get('parent'): | |
220 | raise util.Abort(_('cannot back out a merge changeset without ' |
|
220 | raise util.Abort(_('cannot back out a merge changeset without ' | |
221 | '--parent')) |
|
221 | '--parent')) | |
222 | p = repo.lookup(opts['parent']) |
|
222 | p = repo.lookup(opts['parent']) | |
223 | if p not in (p1, p2): |
|
223 | if p not in (p1, p2): | |
224 | raise util.Abort(_('%s is not a parent of %s') % |
|
224 | raise util.Abort(_('%s is not a parent of %s') % | |
225 | (short(p), short(node))) |
|
225 | (short(p), short(node))) | |
226 | parent = p |
|
226 | parent = p | |
227 | else: |
|
227 | else: | |
228 |
if opts |
|
228 | if opts.get('parent'): | |
229 | raise util.Abort(_('cannot use --parent on non-merge changeset')) |
|
229 | raise util.Abort(_('cannot use --parent on non-merge changeset')) | |
230 | parent = p1 |
|
230 | parent = p1 | |
231 |
|
231 | |||
232 | # the backout should appear on the same branch |
|
232 | # the backout should appear on the same branch | |
233 | branch = repo.dirstate.branch() |
|
233 | branch = repo.dirstate.branch() | |
234 | hg.clean(repo, node, show_stats=False) |
|
234 | hg.clean(repo, node, show_stats=False) | |
235 | repo.dirstate.setbranch(branch) |
|
235 | repo.dirstate.setbranch(branch) | |
236 | revert_opts = opts.copy() |
|
236 | revert_opts = opts.copy() | |
237 | revert_opts['date'] = None |
|
237 | revert_opts['date'] = None | |
238 | revert_opts['all'] = True |
|
238 | revert_opts['all'] = True | |
239 | revert_opts['rev'] = hex(parent) |
|
239 | revert_opts['rev'] = hex(parent) | |
240 | revert_opts['no_backup'] = None |
|
240 | revert_opts['no_backup'] = None | |
241 | revert(ui, repo, **revert_opts) |
|
241 | revert(ui, repo, **revert_opts) | |
242 | commit_opts = opts.copy() |
|
242 | commit_opts = opts.copy() | |
243 | commit_opts['addremove'] = False |
|
243 | commit_opts['addremove'] = False | |
244 | if not commit_opts['message'] and not commit_opts['logfile']: |
|
244 | if not commit_opts['message'] and not commit_opts['logfile']: | |
245 | commit_opts['message'] = _("Backed out changeset %s") % (short(node)) |
|
245 | commit_opts['message'] = _("Backed out changeset %s") % (short(node)) | |
246 | commit_opts['force_editor'] = True |
|
246 | commit_opts['force_editor'] = True | |
247 | commit(ui, repo, **commit_opts) |
|
247 | commit(ui, repo, **commit_opts) | |
248 | def nice(node): |
|
248 | def nice(node): | |
249 | return '%d:%s' % (repo.changelog.rev(node), short(node)) |
|
249 | return '%d:%s' % (repo.changelog.rev(node), short(node)) | |
250 | ui.status(_('changeset %s backs out changeset %s\n') % |
|
250 | ui.status(_('changeset %s backs out changeset %s\n') % | |
251 | (nice(repo.changelog.tip()), nice(node))) |
|
251 | (nice(repo.changelog.tip()), nice(node))) | |
252 | if op1 != node: |
|
252 | if op1 != node: | |
253 | hg.clean(repo, op1, show_stats=False) |
|
253 | hg.clean(repo, op1, show_stats=False) | |
254 |
if opts |
|
254 | if opts.get('merge'): | |
255 | ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip())) |
|
255 | ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip())) | |
256 | hg.merge(repo, hex(repo.changelog.tip())) |
|
256 | hg.merge(repo, hex(repo.changelog.tip())) | |
257 | else: |
|
257 | else: | |
258 | ui.status(_('the backout changeset is a new head - ' |
|
258 | ui.status(_('the backout changeset is a new head - ' | |
259 | 'do not forget to merge\n')) |
|
259 | 'do not forget to merge\n')) | |
260 | ui.status(_('(use "backout --merge" ' |
|
260 | ui.status(_('(use "backout --merge" ' | |
261 | 'if you want to auto-merge)\n')) |
|
261 | 'if you want to auto-merge)\n')) | |
262 |
|
262 | |||
263 | def bisect(ui, repo, rev=None, extra=None, |
|
263 | def bisect(ui, repo, rev=None, extra=None, | |
264 | reset=None, good=None, bad=None, skip=None, noupdate=None): |
|
264 | reset=None, good=None, bad=None, skip=None, noupdate=None): | |
265 | """subdivision search of changesets |
|
265 | """subdivision search of changesets | |
266 |
|
266 | |||
267 | This command helps to find changesets which introduce problems. |
|
267 | This command helps to find changesets which introduce problems. | |
268 | To use, mark the earliest changeset you know exhibits the problem |
|
268 | To use, mark the earliest changeset you know exhibits the problem | |
269 | as bad, then mark the latest changeset which is free from the |
|
269 | as bad, then mark the latest changeset which is free from the | |
270 | problem as good. Bisect will update your working directory to a |
|
270 | problem as good. Bisect will update your working directory to a | |
271 | revision for testing (unless the --noupdate option is specified). |
|
271 | revision for testing (unless the --noupdate option is specified). | |
272 | Once you have performed tests, mark the working directory as bad |
|
272 | Once you have performed tests, mark the working directory as bad | |
273 | or good and bisect will either update to another candidate changeset |
|
273 | or good and bisect will either update to another candidate changeset | |
274 | or announce that it has found the bad revision. |
|
274 | or announce that it has found the bad revision. | |
275 |
|
275 | |||
276 | As a shortcut, you can also use the revision argument to mark a |
|
276 | As a shortcut, you can also use the revision argument to mark a | |
277 | revision as good or bad without checking it out first. |
|
277 | revision as good or bad without checking it out first. | |
278 | """ |
|
278 | """ | |
279 | # backward compatibility |
|
279 | # backward compatibility | |
280 | if rev in "good bad reset init".split(): |
|
280 | if rev in "good bad reset init".split(): | |
281 | ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n")) |
|
281 | ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n")) | |
282 | cmd, rev, extra = rev, extra, None |
|
282 | cmd, rev, extra = rev, extra, None | |
283 | if cmd == "good": |
|
283 | if cmd == "good": | |
284 | good = True |
|
284 | good = True | |
285 | elif cmd == "bad": |
|
285 | elif cmd == "bad": | |
286 | bad = True |
|
286 | bad = True | |
287 | else: |
|
287 | else: | |
288 | reset = True |
|
288 | reset = True | |
289 | elif extra or good + bad + skip + reset > 1: |
|
289 | elif extra or good + bad + skip + reset > 1: | |
290 | raise util.Abort(_('incompatible arguments')) |
|
290 | raise util.Abort(_('incompatible arguments')) | |
291 |
|
291 | |||
292 | if reset: |
|
292 | if reset: | |
293 | p = repo.join("bisect.state") |
|
293 | p = repo.join("bisect.state") | |
294 | if os.path.exists(p): |
|
294 | if os.path.exists(p): | |
295 | os.unlink(p) |
|
295 | os.unlink(p) | |
296 | return |
|
296 | return | |
297 |
|
297 | |||
298 | # load state |
|
298 | # load state | |
299 | state = {'good': [], 'bad': [], 'skip': []} |
|
299 | state = {'good': [], 'bad': [], 'skip': []} | |
300 | if os.path.exists(repo.join("bisect.state")): |
|
300 | if os.path.exists(repo.join("bisect.state")): | |
301 | for l in repo.opener("bisect.state"): |
|
301 | for l in repo.opener("bisect.state"): | |
302 | kind, node = l[:-1].split() |
|
302 | kind, node = l[:-1].split() | |
303 | node = repo.lookup(node) |
|
303 | node = repo.lookup(node) | |
304 | if kind not in state: |
|
304 | if kind not in state: | |
305 | raise util.Abort(_("unknown bisect kind %s") % kind) |
|
305 | raise util.Abort(_("unknown bisect kind %s") % kind) | |
306 | state[kind].append(node) |
|
306 | state[kind].append(node) | |
307 |
|
307 | |||
308 | # update state |
|
308 | # update state | |
309 | node = repo.lookup(rev or '.') |
|
309 | node = repo.lookup(rev or '.') | |
310 | if good: |
|
310 | if good: | |
311 | state['good'].append(node) |
|
311 | state['good'].append(node) | |
312 | elif bad: |
|
312 | elif bad: | |
313 | state['bad'].append(node) |
|
313 | state['bad'].append(node) | |
314 | elif skip: |
|
314 | elif skip: | |
315 | state['skip'].append(node) |
|
315 | state['skip'].append(node) | |
316 |
|
316 | |||
317 | # save state |
|
317 | # save state | |
318 | f = repo.opener("bisect.state", "w", atomictemp=True) |
|
318 | f = repo.opener("bisect.state", "w", atomictemp=True) | |
319 | wlock = repo.wlock() |
|
319 | wlock = repo.wlock() | |
320 | try: |
|
320 | try: | |
321 | for kind in state: |
|
321 | for kind in state: | |
322 | for node in state[kind]: |
|
322 | for node in state[kind]: | |
323 | f.write("%s %s\n" % (kind, hex(node))) |
|
323 | f.write("%s %s\n" % (kind, hex(node))) | |
324 | f.rename() |
|
324 | f.rename() | |
325 | finally: |
|
325 | finally: | |
326 | del wlock |
|
326 | del wlock | |
327 |
|
327 | |||
328 | if not state['good'] or not state['bad']: |
|
328 | if not state['good'] or not state['bad']: | |
329 | if (good or bad or skip or reset): |
|
329 | if (good or bad or skip or reset): | |
330 | return |
|
330 | return | |
331 | if not state['good']: |
|
331 | if not state['good']: | |
332 | raise util.Abort(_('cannot bisect (no known good revisions)')) |
|
332 | raise util.Abort(_('cannot bisect (no known good revisions)')) | |
333 | else: |
|
333 | else: | |
334 | raise util.Abort(_('cannot bisect (no known bad revisions)')) |
|
334 | raise util.Abort(_('cannot bisect (no known bad revisions)')) | |
335 |
|
335 | |||
336 | # actually bisect |
|
336 | # actually bisect | |
337 | nodes, changesets, good = hbisect.bisect(repo.changelog, state) |
|
337 | nodes, changesets, good = hbisect.bisect(repo.changelog, state) | |
338 | if changesets == 0: |
|
338 | if changesets == 0: | |
339 | displayer = cmdutil.show_changeset(ui, repo, {}) |
|
339 | displayer = cmdutil.show_changeset(ui, repo, {}) | |
340 | transition = (good and "good" or "bad") |
|
340 | transition = (good and "good" or "bad") | |
341 | if len(nodes) == 1: |
|
341 | if len(nodes) == 1: | |
342 | # narrowed it down to a single revision |
|
342 | # narrowed it down to a single revision | |
343 | ui.write(_("The first %s revision is:\n") % transition) |
|
343 | ui.write(_("The first %s revision is:\n") % transition) | |
344 | displayer.show(changenode=nodes[0]) |
|
344 | displayer.show(changenode=nodes[0]) | |
345 | else: |
|
345 | else: | |
346 | # multiple possible revisions |
|
346 | # multiple possible revisions | |
347 | ui.write(_("Due to skipped revisions, the first " |
|
347 | ui.write(_("Due to skipped revisions, the first " | |
348 | "%s revision could be any of:\n") % transition) |
|
348 | "%s revision could be any of:\n") % transition) | |
349 | for n in nodes: |
|
349 | for n in nodes: | |
350 | displayer.show(changenode=n) |
|
350 | displayer.show(changenode=n) | |
351 | else: |
|
351 | else: | |
352 | assert len(nodes) == 1 # only a single node can be tested next |
|
352 | assert len(nodes) == 1 # only a single node can be tested next | |
353 | node = nodes[0] |
|
353 | node = nodes[0] | |
354 | # compute the approximate number of remaining tests |
|
354 | # compute the approximate number of remaining tests | |
355 | tests, size = 0, 2 |
|
355 | tests, size = 0, 2 | |
356 | while size <= changesets: |
|
356 | while size <= changesets: | |
357 | tests, size = tests + 1, size * 2 |
|
357 | tests, size = tests + 1, size * 2 | |
358 | rev = repo.changelog.rev(node) |
|
358 | rev = repo.changelog.rev(node) | |
359 | ui.write(_("Testing changeset %s:%s " |
|
359 | ui.write(_("Testing changeset %s:%s " | |
360 | "(%s changesets remaining, ~%s tests)\n") |
|
360 | "(%s changesets remaining, ~%s tests)\n") | |
361 | % (rev, short(node), changesets, tests)) |
|
361 | % (rev, short(node), changesets, tests)) | |
362 | if not noupdate: |
|
362 | if not noupdate: | |
363 | cmdutil.bail_if_changed(repo) |
|
363 | cmdutil.bail_if_changed(repo) | |
364 | return hg.clean(repo, node) |
|
364 | return hg.clean(repo, node) | |
365 |
|
365 | |||
366 | def branch(ui, repo, label=None, **opts): |
|
366 | def branch(ui, repo, label=None, **opts): | |
367 | """set or show the current branch name |
|
367 | """set or show the current branch name | |
368 |
|
368 | |||
369 | With no argument, show the current branch name. With one argument, |
|
369 | With no argument, show the current branch name. With one argument, | |
370 | set the working directory branch name (the branch does not exist in |
|
370 | set the working directory branch name (the branch does not exist in | |
371 | the repository until the next commit). |
|
371 | the repository until the next commit). | |
372 |
|
372 | |||
373 | Unless --force is specified, branch will not let you set a |
|
373 | Unless --force is specified, branch will not let you set a | |
374 | branch name that shadows an existing branch. |
|
374 | branch name that shadows an existing branch. | |
375 |
|
375 | |||
376 | Use --clean to reset the working directory branch to that of the |
|
376 | Use --clean to reset the working directory branch to that of the | |
377 | parent of the working directory, negating a previous branch change. |
|
377 | parent of the working directory, negating a previous branch change. | |
378 |
|
378 | |||
379 | Use the command 'hg update' to switch to an existing branch. |
|
379 | Use the command 'hg update' to switch to an existing branch. | |
380 | """ |
|
380 | """ | |
381 |
|
381 | |||
382 | if opts.get('clean'): |
|
382 | if opts.get('clean'): | |
383 | label = repo[None].parents()[0].branch() |
|
383 | label = repo[None].parents()[0].branch() | |
384 | repo.dirstate.setbranch(label) |
|
384 | repo.dirstate.setbranch(label) | |
385 | ui.status(_('reset working directory to branch %s\n') % label) |
|
385 | ui.status(_('reset working directory to branch %s\n') % label) | |
386 | elif label: |
|
386 | elif label: | |
387 | if not opts.get('force') and label in repo.branchtags(): |
|
387 | if not opts.get('force') and label in repo.branchtags(): | |
388 | if label not in [p.branch() for p in repo.parents()]: |
|
388 | if label not in [p.branch() for p in repo.parents()]: | |
389 | raise util.Abort(_('a branch of the same name already exists' |
|
389 | raise util.Abort(_('a branch of the same name already exists' | |
390 | ' (use --force to override)')) |
|
390 | ' (use --force to override)')) | |
391 | repo.dirstate.setbranch(util.fromlocal(label)) |
|
391 | repo.dirstate.setbranch(util.fromlocal(label)) | |
392 | ui.status(_('marked working directory as branch %s\n') % label) |
|
392 | ui.status(_('marked working directory as branch %s\n') % label) | |
393 | else: |
|
393 | else: | |
394 | ui.write("%s\n" % util.tolocal(repo.dirstate.branch())) |
|
394 | ui.write("%s\n" % util.tolocal(repo.dirstate.branch())) | |
395 |
|
395 | |||
396 | def branches(ui, repo, active=False): |
|
396 | def branches(ui, repo, active=False): | |
397 | """list repository named branches |
|
397 | """list repository named branches | |
398 |
|
398 | |||
399 | List the repository's named branches, indicating which ones are |
|
399 | List the repository's named branches, indicating which ones are | |
400 | inactive. If active is specified, only show active branches. |
|
400 | inactive. If active is specified, only show active branches. | |
401 |
|
401 | |||
402 | A branch is considered active if it contains repository heads. |
|
402 | A branch is considered active if it contains repository heads. | |
403 |
|
403 | |||
404 | Use the command 'hg update' to switch to an existing branch. |
|
404 | Use the command 'hg update' to switch to an existing branch. | |
405 | """ |
|
405 | """ | |
406 | hexfunc = ui.debugflag and hex or short |
|
406 | hexfunc = ui.debugflag and hex or short | |
407 | activebranches = [util.tolocal(repo[n].branch()) |
|
407 | activebranches = [util.tolocal(repo[n].branch()) | |
408 | for n in repo.heads()] |
|
408 | for n in repo.heads()] | |
409 | branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag) |
|
409 | branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag) | |
410 | for tag, node in repo.branchtags().items()]) |
|
410 | for tag, node in repo.branchtags().items()]) | |
411 | branches.reverse() |
|
411 | branches.reverse() | |
412 |
|
412 | |||
413 | for isactive, node, tag in branches: |
|
413 | for isactive, node, tag in branches: | |
414 | if (not active) or isactive: |
|
414 | if (not active) or isactive: | |
415 | if ui.quiet: |
|
415 | if ui.quiet: | |
416 | ui.write("%s\n" % tag) |
|
416 | ui.write("%s\n" % tag) | |
417 | else: |
|
417 | else: | |
418 | rev = str(node).rjust(31 - util.locallen(tag)) |
|
418 | rev = str(node).rjust(31 - util.locallen(tag)) | |
419 | isinactive = ((not isactive) and " (inactive)") or '' |
|
419 | isinactive = ((not isactive) and " (inactive)") or '' | |
420 | data = tag, rev, hexfunc(repo.lookup(node)), isinactive |
|
420 | data = tag, rev, hexfunc(repo.lookup(node)), isinactive | |
421 | ui.write("%s %s:%s%s\n" % data) |
|
421 | ui.write("%s %s:%s%s\n" % data) | |
422 |
|
422 | |||
423 | def bundle(ui, repo, fname, dest=None, **opts): |
|
423 | def bundle(ui, repo, fname, dest=None, **opts): | |
424 | """create a changegroup file |
|
424 | """create a changegroup file | |
425 |
|
425 | |||
426 | Generate a compressed changegroup file collecting changesets not |
|
426 | Generate a compressed changegroup file collecting changesets not | |
427 | found in the other repository. |
|
427 | found in the other repository. | |
428 |
|
428 | |||
429 | If no destination repository is specified the destination is |
|
429 | If no destination repository is specified the destination is | |
430 | assumed to have all the nodes specified by one or more --base |
|
430 | assumed to have all the nodes specified by one or more --base | |
431 | parameters. To create a bundle containing all changesets, use |
|
431 | parameters. To create a bundle containing all changesets, use | |
432 | --all (or --base null). To change the compression method applied, |
|
432 | --all (or --base null). To change the compression method applied, | |
433 | use the -t option (by default, bundles are compressed using bz2). |
|
433 | use the -t option (by default, bundles are compressed using bz2). | |
434 |
|
434 | |||
435 | The bundle file can then be transferred using conventional means and |
|
435 | The bundle file can then be transferred using conventional means and | |
436 | applied to another repository with the unbundle or pull command. |
|
436 | applied to another repository with the unbundle or pull command. | |
437 | This is useful when direct push and pull are not available or when |
|
437 | This is useful when direct push and pull are not available or when | |
438 | exporting an entire repository is undesirable. |
|
438 | exporting an entire repository is undesirable. | |
439 |
|
439 | |||
440 | Applying bundles preserves all changeset contents including |
|
440 | Applying bundles preserves all changeset contents including | |
441 | permissions, copy/rename information, and revision history. |
|
441 | permissions, copy/rename information, and revision history. | |
442 | """ |
|
442 | """ | |
443 | revs = opts.get('rev') or None |
|
443 | revs = opts.get('rev') or None | |
444 | if revs: |
|
444 | if revs: | |
445 | revs = [repo.lookup(rev) for rev in revs] |
|
445 | revs = [repo.lookup(rev) for rev in revs] | |
446 | if opts.get('all'): |
|
446 | if opts.get('all'): | |
447 | base = ['null'] |
|
447 | base = ['null'] | |
448 | else: |
|
448 | else: | |
449 | base = opts.get('base') |
|
449 | base = opts.get('base') | |
450 | if base: |
|
450 | if base: | |
451 | if dest: |
|
451 | if dest: | |
452 | raise util.Abort(_("--base is incompatible with specifiying " |
|
452 | raise util.Abort(_("--base is incompatible with specifiying " | |
453 | "a destination")) |
|
453 | "a destination")) | |
454 | base = [repo.lookup(rev) for rev in base] |
|
454 | base = [repo.lookup(rev) for rev in base] | |
455 | # create the right base |
|
455 | # create the right base | |
456 | # XXX: nodesbetween / changegroup* should be "fixed" instead |
|
456 | # XXX: nodesbetween / changegroup* should be "fixed" instead | |
457 | o = [] |
|
457 | o = [] | |
458 | has = {nullid: None} |
|
458 | has = {nullid: None} | |
459 | for n in base: |
|
459 | for n in base: | |
460 | has.update(repo.changelog.reachable(n)) |
|
460 | has.update(repo.changelog.reachable(n)) | |
461 | if revs: |
|
461 | if revs: | |
462 | visit = list(revs) |
|
462 | visit = list(revs) | |
463 | else: |
|
463 | else: | |
464 | visit = repo.changelog.heads() |
|
464 | visit = repo.changelog.heads() | |
465 | seen = {} |
|
465 | seen = {} | |
466 | while visit: |
|
466 | while visit: | |
467 | n = visit.pop(0) |
|
467 | n = visit.pop(0) | |
468 | parents = [p for p in repo.changelog.parents(n) if p not in has] |
|
468 | parents = [p for p in repo.changelog.parents(n) if p not in has] | |
469 | if len(parents) == 0: |
|
469 | if len(parents) == 0: | |
470 | o.insert(0, n) |
|
470 | o.insert(0, n) | |
471 | else: |
|
471 | else: | |
472 | for p in parents: |
|
472 | for p in parents: | |
473 | if p not in seen: |
|
473 | if p not in seen: | |
474 | seen[p] = 1 |
|
474 | seen[p] = 1 | |
475 | visit.append(p) |
|
475 | visit.append(p) | |
476 | else: |
|
476 | else: | |
477 | cmdutil.setremoteconfig(ui, opts) |
|
477 | cmdutil.setremoteconfig(ui, opts) | |
478 | dest, revs, checkout = hg.parseurl( |
|
478 | dest, revs, checkout = hg.parseurl( | |
479 | ui.expandpath(dest or 'default-push', dest or 'default'), revs) |
|
479 | ui.expandpath(dest or 'default-push', dest or 'default'), revs) | |
480 | other = hg.repository(ui, dest) |
|
480 | other = hg.repository(ui, dest) | |
481 |
o = repo.findoutgoing(other, force=opts |
|
481 | o = repo.findoutgoing(other, force=opts.get('force')) | |
482 |
|
482 | |||
483 | if revs: |
|
483 | if revs: | |
484 | cg = repo.changegroupsubset(o, revs, 'bundle') |
|
484 | cg = repo.changegroupsubset(o, revs, 'bundle') | |
485 | else: |
|
485 | else: | |
486 | cg = repo.changegroup(o, 'bundle') |
|
486 | cg = repo.changegroup(o, 'bundle') | |
487 |
|
487 | |||
488 | bundletype = opts.get('type', 'bzip2').lower() |
|
488 | bundletype = opts.get('type', 'bzip2').lower() | |
489 | btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} |
|
489 | btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} | |
490 | bundletype = btypes.get(bundletype) |
|
490 | bundletype = btypes.get(bundletype) | |
491 | if bundletype not in changegroup.bundletypes: |
|
491 | if bundletype not in changegroup.bundletypes: | |
492 | raise util.Abort(_('unknown bundle type specified with --type')) |
|
492 | raise util.Abort(_('unknown bundle type specified with --type')) | |
493 |
|
493 | |||
494 | changegroup.writebundle(cg, fname, bundletype) |
|
494 | changegroup.writebundle(cg, fname, bundletype) | |
495 |
|
495 | |||
496 | def cat(ui, repo, file1, *pats, **opts): |
|
496 | def cat(ui, repo, file1, *pats, **opts): | |
497 | """output the current or given revision of files |
|
497 | """output the current or given revision of files | |
498 |
|
498 | |||
499 | Print the specified files as they were at the given revision. |
|
499 | Print the specified files as they were at the given revision. | |
500 | If no revision is given, the parent of the working directory is used, |
|
500 | If no revision is given, the parent of the working directory is used, | |
501 | or tip if no revision is checked out. |
|
501 | or tip if no revision is checked out. | |
502 |
|
502 | |||
503 | Output may be to a file, in which case the name of the file is |
|
503 | Output may be to a file, in which case the name of the file is | |
504 | given using a format string. The formatting rules are the same as |
|
504 | given using a format string. The formatting rules are the same as | |
505 | for the export command, with the following additions: |
|
505 | for the export command, with the following additions: | |
506 |
|
506 | |||
507 | %s basename of file being printed |
|
507 | %s basename of file being printed | |
508 | %d dirname of file being printed, or '.' if in repo root |
|
508 | %d dirname of file being printed, or '.' if in repo root | |
509 | %p root-relative path name of file being printed |
|
509 | %p root-relative path name of file being printed | |
510 | """ |
|
510 | """ | |
511 |
ctx = repo[opts |
|
511 | ctx = repo[opts.get('rev')] | |
512 | err = 1 |
|
512 | err = 1 | |
513 | m = cmdutil.match(repo, (file1,) + pats, opts) |
|
513 | m = cmdutil.match(repo, (file1,) + pats, opts) | |
514 | for abs in ctx.walk(m): |
|
514 | for abs in ctx.walk(m): | |
515 |
fp = cmdutil.make_file(repo, opts |
|
515 | fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs) | |
516 | data = ctx[abs].data() |
|
516 | data = ctx[abs].data() | |
517 | if opts.get('decode'): |
|
517 | if opts.get('decode'): | |
518 | data = repo.wwritedata(abs, data) |
|
518 | data = repo.wwritedata(abs, data) | |
519 | fp.write(data) |
|
519 | fp.write(data) | |
520 | err = 0 |
|
520 | err = 0 | |
521 | return err |
|
521 | return err | |
522 |
|
522 | |||
523 | def clone(ui, source, dest=None, **opts): |
|
523 | def clone(ui, source, dest=None, **opts): | |
524 | """make a copy of an existing repository |
|
524 | """make a copy of an existing repository | |
525 |
|
525 | |||
526 | Create a copy of an existing repository in a new directory. |
|
526 | Create a copy of an existing repository in a new directory. | |
527 |
|
527 | |||
528 | If no destination directory name is specified, it defaults to the |
|
528 | If no destination directory name is specified, it defaults to the | |
529 | basename of the source. |
|
529 | basename of the source. | |
530 |
|
530 | |||
531 | The location of the source is added to the new repository's |
|
531 | The location of the source is added to the new repository's | |
532 | .hg/hgrc file, as the default to be used for future pulls. |
|
532 | .hg/hgrc file, as the default to be used for future pulls. | |
533 |
|
533 | |||
534 | For efficiency, hardlinks are used for cloning whenever the source |
|
534 | For efficiency, hardlinks are used for cloning whenever the source | |
535 | and destination are on the same filesystem (note this applies only |
|
535 | and destination are on the same filesystem (note this applies only | |
536 | to the repository data, not to the checked out files). Some |
|
536 | to the repository data, not to the checked out files). Some | |
537 | filesystems, such as AFS, implement hardlinking incorrectly, but |
|
537 | filesystems, such as AFS, implement hardlinking incorrectly, but | |
538 | do not report errors. In these cases, use the --pull option to |
|
538 | do not report errors. In these cases, use the --pull option to | |
539 | avoid hardlinking. |
|
539 | avoid hardlinking. | |
540 |
|
540 | |||
541 | In some cases, you can clone repositories and checked out files |
|
541 | In some cases, you can clone repositories and checked out files | |
542 | using full hardlinks with |
|
542 | using full hardlinks with | |
543 |
|
543 | |||
544 | $ cp -al REPO REPOCLONE |
|
544 | $ cp -al REPO REPOCLONE | |
545 |
|
545 | |||
546 | This is the fastest way to clone, but it is not always safe. The |
|
546 | This is the fastest way to clone, but it is not always safe. The | |
547 | operation is not atomic (making sure REPO is not modified during |
|
547 | operation is not atomic (making sure REPO is not modified during | |
548 | the operation is up to you) and you have to make sure your editor |
|
548 | the operation is up to you) and you have to make sure your editor | |
549 | breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, |
|
549 | breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, | |
550 | this is not compatible with certain extensions that place their |
|
550 | this is not compatible with certain extensions that place their | |
551 | metadata under the .hg directory, such as mq. |
|
551 | metadata under the .hg directory, such as mq. | |
552 |
|
552 | |||
553 | If you use the -r option to clone up to a specific revision, no |
|
553 | If you use the -r option to clone up to a specific revision, no | |
554 | subsequent revisions will be present in the cloned repository. |
|
554 | subsequent revisions will be present in the cloned repository. | |
555 | This option implies --pull, even on local repositories. |
|
555 | This option implies --pull, even on local repositories. | |
556 |
|
556 | |||
557 | If the -U option is used, the new clone will contain only a repository |
|
557 | If the -U option is used, the new clone will contain only a repository | |
558 | (.hg) and no working copy (the working copy parent is the null revision). |
|
558 | (.hg) and no working copy (the working copy parent is the null revision). | |
559 |
|
559 | |||
560 | See pull for valid source format details. |
|
560 | See pull for valid source format details. | |
561 |
|
561 | |||
562 | It is possible to specify an ssh:// URL as the destination, but no |
|
562 | It is possible to specify an ssh:// URL as the destination, but no | |
563 | .hg/hgrc and working directory will be created on the remote side. |
|
563 | .hg/hgrc and working directory will be created on the remote side. | |
564 | Look at the help text for the pull command for important details |
|
564 | Look at the help text for the pull command for important details | |
565 | about ssh:// URLs. |
|
565 | about ssh:// URLs. | |
566 | """ |
|
566 | """ | |
567 | cmdutil.setremoteconfig(ui, opts) |
|
567 | cmdutil.setremoteconfig(ui, opts) | |
568 | hg.clone(ui, source, dest, |
|
568 | hg.clone(ui, source, dest, | |
569 |
pull=opts |
|
569 | pull=opts.get('pull'), | |
570 |
stream=opts |
|
570 | stream=opts.get('uncompressed'), | |
571 |
rev=opts |
|
571 | rev=opts.get('rev'), | |
572 |
update=not opts |
|
572 | update=not opts.get('noupdate')) | |
573 |
|
573 | |||
574 | def commit(ui, repo, *pats, **opts): |
|
574 | def commit(ui, repo, *pats, **opts): | |
575 | """commit the specified files or all outstanding changes |
|
575 | """commit the specified files or all outstanding changes | |
576 |
|
576 | |||
577 | Commit changes to the given files into the repository. |
|
577 | Commit changes to the given files into the repository. | |
578 |
|
578 | |||
579 | If a list of files is omitted, all changes reported by "hg status" |
|
579 | If a list of files is omitted, all changes reported by "hg status" | |
580 | will be committed. |
|
580 | will be committed. | |
581 |
|
581 | |||
582 | If you are committing the result of a merge, do not provide any |
|
582 | If you are committing the result of a merge, do not provide any | |
583 | file names or -I/-X filters. |
|
583 | file names or -I/-X filters. | |
584 |
|
584 | |||
585 | If no commit message is specified, the configured editor is started to |
|
585 | If no commit message is specified, the configured editor is started to | |
586 | enter a message. |
|
586 | enter a message. | |
587 |
|
587 | |||
588 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
588 | See 'hg help dates' for a list of formats valid for -d/--date. | |
589 | """ |
|
589 | """ | |
590 | def commitfunc(ui, repo, message, match, opts): |
|
590 | def commitfunc(ui, repo, message, match, opts): | |
591 |
return repo.commit(match.files(), message, opts |
|
591 | return repo.commit(match.files(), message, opts.get('user'), opts.get('date'), | |
592 | match, force_editor=opts.get('force_editor')) |
|
592 | match, force_editor=opts.get('force_editor')) | |
593 |
|
593 | |||
594 | node = cmdutil.commit(ui, repo, commitfunc, pats, opts) |
|
594 | node = cmdutil.commit(ui, repo, commitfunc, pats, opts) | |
595 | if not node: |
|
595 | if not node: | |
596 | return |
|
596 | return | |
597 | cl = repo.changelog |
|
597 | cl = repo.changelog | |
598 | rev = cl.rev(node) |
|
598 | rev = cl.rev(node) | |
599 | parents = cl.parentrevs(rev) |
|
599 | parents = cl.parentrevs(rev) | |
600 | if rev - 1 in parents: |
|
600 | if rev - 1 in parents: | |
601 | # one of the parents was the old tip |
|
601 | # one of the parents was the old tip | |
602 | pass |
|
602 | pass | |
603 | elif (parents == (nullrev, nullrev) or |
|
603 | elif (parents == (nullrev, nullrev) or | |
604 | len(cl.heads(cl.node(parents[0]))) > 1 and |
|
604 | len(cl.heads(cl.node(parents[0]))) > 1 and | |
605 | (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)): |
|
605 | (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)): | |
606 | ui.status(_('created new head\n')) |
|
606 | ui.status(_('created new head\n')) | |
607 |
|
607 | |||
608 | if ui.debugflag: |
|
608 | if ui.debugflag: | |
609 | ui.write(_('committed changeset %d:%s\n') % (rev,hex(node))) |
|
609 | ui.write(_('committed changeset %d:%s\n') % (rev,hex(node))) | |
610 | elif ui.verbose: |
|
610 | elif ui.verbose: | |
611 | ui.write(_('committed changeset %d:%s\n') % (rev,short(node))) |
|
611 | ui.write(_('committed changeset %d:%s\n') % (rev,short(node))) | |
612 |
|
612 | |||
613 | def copy(ui, repo, *pats, **opts): |
|
613 | def copy(ui, repo, *pats, **opts): | |
614 | """mark files as copied for the next commit |
|
614 | """mark files as copied for the next commit | |
615 |
|
615 | |||
616 | Mark dest as having copies of source files. If dest is a |
|
616 | Mark dest as having copies of source files. If dest is a | |
617 | directory, copies are put in that directory. If dest is a file, |
|
617 | directory, copies are put in that directory. If dest is a file, | |
618 | there can only be one source. |
|
618 | there can only be one source. | |
619 |
|
619 | |||
620 | By default, this command copies the contents of files as they |
|
620 | By default, this command copies the contents of files as they | |
621 | stand in the working directory. If invoked with --after, the |
|
621 | stand in the working directory. If invoked with --after, the | |
622 | operation is recorded, but no copying is performed. |
|
622 | operation is recorded, but no copying is performed. | |
623 |
|
623 | |||
624 | This command takes effect in the next commit. To undo a copy |
|
624 | This command takes effect in the next commit. To undo a copy | |
625 | before that, see hg revert. |
|
625 | before that, see hg revert. | |
626 | """ |
|
626 | """ | |
627 | wlock = repo.wlock(False) |
|
627 | wlock = repo.wlock(False) | |
628 | try: |
|
628 | try: | |
629 | return cmdutil.copy(ui, repo, pats, opts) |
|
629 | return cmdutil.copy(ui, repo, pats, opts) | |
630 | finally: |
|
630 | finally: | |
631 | del wlock |
|
631 | del wlock | |
632 |
|
632 | |||
633 | def debugancestor(ui, repo, *args): |
|
633 | def debugancestor(ui, repo, *args): | |
634 | """find the ancestor revision of two revisions in a given index""" |
|
634 | """find the ancestor revision of two revisions in a given index""" | |
635 | if len(args) == 3: |
|
635 | if len(args) == 3: | |
636 | index, rev1, rev2 = args |
|
636 | index, rev1, rev2 = args | |
637 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), index) |
|
637 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), index) | |
638 | lookup = r.lookup |
|
638 | lookup = r.lookup | |
639 | elif len(args) == 2: |
|
639 | elif len(args) == 2: | |
640 | if not repo: |
|
640 | if not repo: | |
641 | raise util.Abort(_("There is no Mercurial repository here " |
|
641 | raise util.Abort(_("There is no Mercurial repository here " | |
642 | "(.hg not found)")) |
|
642 | "(.hg not found)")) | |
643 | rev1, rev2 = args |
|
643 | rev1, rev2 = args | |
644 | r = repo.changelog |
|
644 | r = repo.changelog | |
645 | lookup = repo.lookup |
|
645 | lookup = repo.lookup | |
646 | else: |
|
646 | else: | |
647 | raise util.Abort(_('either two or three arguments required')) |
|
647 | raise util.Abort(_('either two or three arguments required')) | |
648 | a = r.ancestor(lookup(rev1), lookup(rev2)) |
|
648 | a = r.ancestor(lookup(rev1), lookup(rev2)) | |
649 | ui.write("%d:%s\n" % (r.rev(a), hex(a))) |
|
649 | ui.write("%d:%s\n" % (r.rev(a), hex(a))) | |
650 |
|
650 | |||
651 | def debugcomplete(ui, cmd='', **opts): |
|
651 | def debugcomplete(ui, cmd='', **opts): | |
652 | """returns the completion list associated with the given command""" |
|
652 | """returns the completion list associated with the given command""" | |
653 |
|
653 | |||
654 |
if opts |
|
654 | if opts.get('options'): | |
655 | options = [] |
|
655 | options = [] | |
656 | otables = [globalopts] |
|
656 | otables = [globalopts] | |
657 | if cmd: |
|
657 | if cmd: | |
658 | aliases, entry = cmdutil.findcmd(ui, cmd, table) |
|
658 | aliases, entry = cmdutil.findcmd(ui, cmd, table) | |
659 | otables.append(entry[1]) |
|
659 | otables.append(entry[1]) | |
660 | for t in otables: |
|
660 | for t in otables: | |
661 | for o in t: |
|
661 | for o in t: | |
662 | if o[0]: |
|
662 | if o[0]: | |
663 | options.append('-%s' % o[0]) |
|
663 | options.append('-%s' % o[0]) | |
664 | options.append('--%s' % o[1]) |
|
664 | options.append('--%s' % o[1]) | |
665 | ui.write("%s\n" % "\n".join(options)) |
|
665 | ui.write("%s\n" % "\n".join(options)) | |
666 | return |
|
666 | return | |
667 |
|
667 | |||
668 | ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table)))) |
|
668 | ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table)))) | |
669 |
|
669 | |||
670 | def debugfsinfo(ui, path = "."): |
|
670 | def debugfsinfo(ui, path = "."): | |
671 | file('.debugfsinfo', 'w').write('') |
|
671 | file('.debugfsinfo', 'w').write('') | |
672 | ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) |
|
672 | ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) | |
673 | ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) |
|
673 | ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) | |
674 | ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo') |
|
674 | ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo') | |
675 | and 'yes' or 'no')) |
|
675 | and 'yes' or 'no')) | |
676 | os.unlink('.debugfsinfo') |
|
676 | os.unlink('.debugfsinfo') | |
677 |
|
677 | |||
678 | def debugrebuildstate(ui, repo, rev="tip"): |
|
678 | def debugrebuildstate(ui, repo, rev="tip"): | |
679 | """rebuild the dirstate as it would look like for the given revision""" |
|
679 | """rebuild the dirstate as it would look like for the given revision""" | |
680 | ctx = repo[rev] |
|
680 | ctx = repo[rev] | |
681 | wlock = repo.wlock() |
|
681 | wlock = repo.wlock() | |
682 | try: |
|
682 | try: | |
683 | repo.dirstate.rebuild(ctx.node(), ctx.manifest()) |
|
683 | repo.dirstate.rebuild(ctx.node(), ctx.manifest()) | |
684 | finally: |
|
684 | finally: | |
685 | del wlock |
|
685 | del wlock | |
686 |
|
686 | |||
687 | def debugcheckstate(ui, repo): |
|
687 | def debugcheckstate(ui, repo): | |
688 | """validate the correctness of the current dirstate""" |
|
688 | """validate the correctness of the current dirstate""" | |
689 | parent1, parent2 = repo.dirstate.parents() |
|
689 | parent1, parent2 = repo.dirstate.parents() | |
690 | m1 = repo[parent1].manifest() |
|
690 | m1 = repo[parent1].manifest() | |
691 | m2 = repo[parent2].manifest() |
|
691 | m2 = repo[parent2].manifest() | |
692 | errors = 0 |
|
692 | errors = 0 | |
693 | for f in repo.dirstate: |
|
693 | for f in repo.dirstate: | |
694 | state = repo.dirstate[f] |
|
694 | state = repo.dirstate[f] | |
695 | if state in "nr" and f not in m1: |
|
695 | if state in "nr" and f not in m1: | |
696 | ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) |
|
696 | ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) | |
697 | errors += 1 |
|
697 | errors += 1 | |
698 | if state in "a" and f in m1: |
|
698 | if state in "a" and f in m1: | |
699 | ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) |
|
699 | ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) | |
700 | errors += 1 |
|
700 | errors += 1 | |
701 | if state in "m" and f not in m1 and f not in m2: |
|
701 | if state in "m" and f not in m1 and f not in m2: | |
702 | ui.warn(_("%s in state %s, but not in either manifest\n") % |
|
702 | ui.warn(_("%s in state %s, but not in either manifest\n") % | |
703 | (f, state)) |
|
703 | (f, state)) | |
704 | errors += 1 |
|
704 | errors += 1 | |
705 | for f in m1: |
|
705 | for f in m1: | |
706 | state = repo.dirstate[f] |
|
706 | state = repo.dirstate[f] | |
707 | if state not in "nrm": |
|
707 | if state not in "nrm": | |
708 | ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) |
|
708 | ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) | |
709 | errors += 1 |
|
709 | errors += 1 | |
710 | if errors: |
|
710 | if errors: | |
711 | error = _(".hg/dirstate inconsistent with current parent's manifest") |
|
711 | error = _(".hg/dirstate inconsistent with current parent's manifest") | |
712 | raise util.Abort(error) |
|
712 | raise util.Abort(error) | |
713 |
|
713 | |||
714 | def showconfig(ui, repo, *values, **opts): |
|
714 | def showconfig(ui, repo, *values, **opts): | |
715 | """show combined config settings from all hgrc files |
|
715 | """show combined config settings from all hgrc files | |
716 |
|
716 | |||
717 | With no args, print names and values of all config items. |
|
717 | With no args, print names and values of all config items. | |
718 |
|
718 | |||
719 | With one arg of the form section.name, print just the value of |
|
719 | With one arg of the form section.name, print just the value of | |
720 | that config item. |
|
720 | that config item. | |
721 |
|
721 | |||
722 | With multiple args, print names and values of all config items |
|
722 | With multiple args, print names and values of all config items | |
723 | with matching section names.""" |
|
723 | with matching section names.""" | |
724 |
|
724 | |||
725 | untrusted = bool(opts.get('untrusted')) |
|
725 | untrusted = bool(opts.get('untrusted')) | |
726 | if values: |
|
726 | if values: | |
727 | if len([v for v in values if '.' in v]) > 1: |
|
727 | if len([v for v in values if '.' in v]) > 1: | |
728 | raise util.Abort(_('only one config item permitted')) |
|
728 | raise util.Abort(_('only one config item permitted')) | |
729 | for section, name, value in ui.walkconfig(untrusted=untrusted): |
|
729 | for section, name, value in ui.walkconfig(untrusted=untrusted): | |
730 | sectname = section + '.' + name |
|
730 | sectname = section + '.' + name | |
731 | if values: |
|
731 | if values: | |
732 | for v in values: |
|
732 | for v in values: | |
733 | if v == section: |
|
733 | if v == section: | |
734 | ui.write('%s=%s\n' % (sectname, value)) |
|
734 | ui.write('%s=%s\n' % (sectname, value)) | |
735 | elif v == sectname: |
|
735 | elif v == sectname: | |
736 | ui.write(value, '\n') |
|
736 | ui.write(value, '\n') | |
737 | else: |
|
737 | else: | |
738 | ui.write('%s=%s\n' % (sectname, value)) |
|
738 | ui.write('%s=%s\n' % (sectname, value)) | |
739 |
|
739 | |||
740 | def debugsetparents(ui, repo, rev1, rev2=None): |
|
740 | def debugsetparents(ui, repo, rev1, rev2=None): | |
741 | """manually set the parents of the current working directory |
|
741 | """manually set the parents of the current working directory | |
742 |
|
742 | |||
743 | This is useful for writing repository conversion tools, but should |
|
743 | This is useful for writing repository conversion tools, but should | |
744 | be used with care. |
|
744 | be used with care. | |
745 | """ |
|
745 | """ | |
746 |
|
746 | |||
747 | if not rev2: |
|
747 | if not rev2: | |
748 | rev2 = hex(nullid) |
|
748 | rev2 = hex(nullid) | |
749 |
|
749 | |||
750 | wlock = repo.wlock() |
|
750 | wlock = repo.wlock() | |
751 | try: |
|
751 | try: | |
752 | repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) |
|
752 | repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) | |
753 | finally: |
|
753 | finally: | |
754 | del wlock |
|
754 | del wlock | |
755 |
|
755 | |||
756 | def debugstate(ui, repo, nodates=None): |
|
756 | def debugstate(ui, repo, nodates=None): | |
757 | """show the contents of the current dirstate""" |
|
757 | """show the contents of the current dirstate""" | |
758 | timestr = "" |
|
758 | timestr = "" | |
759 | showdate = not nodates |
|
759 | showdate = not nodates | |
760 | for file_, ent in util.sort(repo.dirstate._map.items()): |
|
760 | for file_, ent in util.sort(repo.dirstate._map.items()): | |
761 | if showdate: |
|
761 | if showdate: | |
762 | if ent[3] == -1: |
|
762 | if ent[3] == -1: | |
763 | # Pad or slice to locale representation |
|
763 | # Pad or slice to locale representation | |
764 | locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0))) |
|
764 | locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0))) | |
765 | timestr = 'unset' |
|
765 | timestr = 'unset' | |
766 | timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr)) |
|
766 | timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr)) | |
767 | else: |
|
767 | else: | |
768 | timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])) |
|
768 | timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])) | |
769 | if ent[1] & 020000: |
|
769 | if ent[1] & 020000: | |
770 | mode = 'lnk' |
|
770 | mode = 'lnk' | |
771 | else: |
|
771 | else: | |
772 | mode = '%3o' % (ent[1] & 0777) |
|
772 | mode = '%3o' % (ent[1] & 0777) | |
773 | ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) |
|
773 | ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) | |
774 | for f in repo.dirstate.copies(): |
|
774 | for f in repo.dirstate.copies(): | |
775 | ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) |
|
775 | ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) | |
776 |
|
776 | |||
777 | def debugdata(ui, file_, rev): |
|
777 | def debugdata(ui, file_, rev): | |
778 | """dump the contents of a data file revision""" |
|
778 | """dump the contents of a data file revision""" | |
779 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i") |
|
779 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i") | |
780 | try: |
|
780 | try: | |
781 | ui.write(r.revision(r.lookup(rev))) |
|
781 | ui.write(r.revision(r.lookup(rev))) | |
782 | except KeyError: |
|
782 | except KeyError: | |
783 | raise util.Abort(_('invalid revision identifier %s') % rev) |
|
783 | raise util.Abort(_('invalid revision identifier %s') % rev) | |
784 |
|
784 | |||
785 | def debugdate(ui, date, range=None, **opts): |
|
785 | def debugdate(ui, date, range=None, **opts): | |
786 | """parse and display a date""" |
|
786 | """parse and display a date""" | |
787 | if opts["extended"]: |
|
787 | if opts["extended"]: | |
788 | d = util.parsedate(date, util.extendeddateformats) |
|
788 | d = util.parsedate(date, util.extendeddateformats) | |
789 | else: |
|
789 | else: | |
790 | d = util.parsedate(date) |
|
790 | d = util.parsedate(date) | |
791 | ui.write("internal: %s %s\n" % d) |
|
791 | ui.write("internal: %s %s\n" % d) | |
792 | ui.write("standard: %s\n" % util.datestr(d)) |
|
792 | ui.write("standard: %s\n" % util.datestr(d)) | |
793 | if range: |
|
793 | if range: | |
794 | m = util.matchdate(range) |
|
794 | m = util.matchdate(range) | |
795 | ui.write("match: %s\n" % m(d[0])) |
|
795 | ui.write("match: %s\n" % m(d[0])) | |
796 |
|
796 | |||
797 | def debugindex(ui, file_): |
|
797 | def debugindex(ui, file_): | |
798 | """dump the contents of an index file""" |
|
798 | """dump the contents of an index file""" | |
799 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) |
|
799 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) | |
800 | ui.write(" rev offset length base linkrev" + |
|
800 | ui.write(" rev offset length base linkrev" + | |
801 | " nodeid p1 p2\n") |
|
801 | " nodeid p1 p2\n") | |
802 | for i in r: |
|
802 | for i in r: | |
803 | node = r.node(i) |
|
803 | node = r.node(i) | |
804 | try: |
|
804 | try: | |
805 | pp = r.parents(node) |
|
805 | pp = r.parents(node) | |
806 | except: |
|
806 | except: | |
807 | pp = [nullid, nullid] |
|
807 | pp = [nullid, nullid] | |
808 | ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( |
|
808 | ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( | |
809 | i, r.start(i), r.length(i), r.base(i), r.linkrev(node), |
|
809 | i, r.start(i), r.length(i), r.base(i), r.linkrev(node), | |
810 | short(node), short(pp[0]), short(pp[1]))) |
|
810 | short(node), short(pp[0]), short(pp[1]))) | |
811 |
|
811 | |||
812 | def debugindexdot(ui, file_): |
|
812 | def debugindexdot(ui, file_): | |
813 | """dump an index DAG as a .dot file""" |
|
813 | """dump an index DAG as a .dot file""" | |
814 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) |
|
814 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) | |
815 | ui.write("digraph G {\n") |
|
815 | ui.write("digraph G {\n") | |
816 | for i in r: |
|
816 | for i in r: | |
817 | node = r.node(i) |
|
817 | node = r.node(i) | |
818 | pp = r.parents(node) |
|
818 | pp = r.parents(node) | |
819 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) |
|
819 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) | |
820 | if pp[1] != nullid: |
|
820 | if pp[1] != nullid: | |
821 | ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) |
|
821 | ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) | |
822 | ui.write("}\n") |
|
822 | ui.write("}\n") | |
823 |
|
823 | |||
824 | def debuginstall(ui): |
|
824 | def debuginstall(ui): | |
825 | '''test Mercurial installation''' |
|
825 | '''test Mercurial installation''' | |
826 |
|
826 | |||
827 | def writetemp(contents): |
|
827 | def writetemp(contents): | |
828 | (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-") |
|
828 | (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-") | |
829 | f = os.fdopen(fd, "wb") |
|
829 | f = os.fdopen(fd, "wb") | |
830 | f.write(contents) |
|
830 | f.write(contents) | |
831 | f.close() |
|
831 | f.close() | |
832 | return name |
|
832 | return name | |
833 |
|
833 | |||
834 | problems = 0 |
|
834 | problems = 0 | |
835 |
|
835 | |||
836 | # encoding |
|
836 | # encoding | |
837 | ui.status(_("Checking encoding (%s)...\n") % util._encoding) |
|
837 | ui.status(_("Checking encoding (%s)...\n") % util._encoding) | |
838 | try: |
|
838 | try: | |
839 | util.fromlocal("test") |
|
839 | util.fromlocal("test") | |
840 | except util.Abort, inst: |
|
840 | except util.Abort, inst: | |
841 | ui.write(" %s\n" % inst) |
|
841 | ui.write(" %s\n" % inst) | |
842 | ui.write(_(" (check that your locale is properly set)\n")) |
|
842 | ui.write(_(" (check that your locale is properly set)\n")) | |
843 | problems += 1 |
|
843 | problems += 1 | |
844 |
|
844 | |||
845 | # compiled modules |
|
845 | # compiled modules | |
846 | ui.status(_("Checking extensions...\n")) |
|
846 | ui.status(_("Checking extensions...\n")) | |
847 | try: |
|
847 | try: | |
848 | import bdiff, mpatch, base85 |
|
848 | import bdiff, mpatch, base85 | |
849 | except Exception, inst: |
|
849 | except Exception, inst: | |
850 | ui.write(" %s\n" % inst) |
|
850 | ui.write(" %s\n" % inst) | |
851 | ui.write(_(" One or more extensions could not be found")) |
|
851 | ui.write(_(" One or more extensions could not be found")) | |
852 | ui.write(_(" (check that you compiled the extensions)\n")) |
|
852 | ui.write(_(" (check that you compiled the extensions)\n")) | |
853 | problems += 1 |
|
853 | problems += 1 | |
854 |
|
854 | |||
855 | # templates |
|
855 | # templates | |
856 | ui.status(_("Checking templates...\n")) |
|
856 | ui.status(_("Checking templates...\n")) | |
857 | try: |
|
857 | try: | |
858 | import templater |
|
858 | import templater | |
859 | t = templater.templater(templater.templatepath("map-cmdline.default")) |
|
859 | t = templater.templater(templater.templatepath("map-cmdline.default")) | |
860 | except Exception, inst: |
|
860 | except Exception, inst: | |
861 | ui.write(" %s\n" % inst) |
|
861 | ui.write(" %s\n" % inst) | |
862 | ui.write(_(" (templates seem to have been installed incorrectly)\n")) |
|
862 | ui.write(_(" (templates seem to have been installed incorrectly)\n")) | |
863 | problems += 1 |
|
863 | problems += 1 | |
864 |
|
864 | |||
865 | # patch |
|
865 | # patch | |
866 | ui.status(_("Checking patch...\n")) |
|
866 | ui.status(_("Checking patch...\n")) | |
867 | patchproblems = 0 |
|
867 | patchproblems = 0 | |
868 | a = "1\n2\n3\n4\n" |
|
868 | a = "1\n2\n3\n4\n" | |
869 | b = "1\n2\n3\ninsert\n4\n" |
|
869 | b = "1\n2\n3\ninsert\n4\n" | |
870 | fa = writetemp(a) |
|
870 | fa = writetemp(a) | |
871 | d = mdiff.unidiff(a, None, b, None, os.path.basename(fa), |
|
871 | d = mdiff.unidiff(a, None, b, None, os.path.basename(fa), | |
872 | os.path.basename(fa)) |
|
872 | os.path.basename(fa)) | |
873 | fd = writetemp(d) |
|
873 | fd = writetemp(d) | |
874 |
|
874 | |||
875 | files = {} |
|
875 | files = {} | |
876 | try: |
|
876 | try: | |
877 | patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files) |
|
877 | patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files) | |
878 | except util.Abort, e: |
|
878 | except util.Abort, e: | |
879 | ui.write(_(" patch call failed:\n")) |
|
879 | ui.write(_(" patch call failed:\n")) | |
880 | ui.write(" " + str(e) + "\n") |
|
880 | ui.write(" " + str(e) + "\n") | |
881 | patchproblems += 1 |
|
881 | patchproblems += 1 | |
882 | else: |
|
882 | else: | |
883 | if list(files) != [os.path.basename(fa)]: |
|
883 | if list(files) != [os.path.basename(fa)]: | |
884 | ui.write(_(" unexpected patch output!\n")) |
|
884 | ui.write(_(" unexpected patch output!\n")) | |
885 | patchproblems += 1 |
|
885 | patchproblems += 1 | |
886 | a = file(fa).read() |
|
886 | a = file(fa).read() | |
887 | if a != b: |
|
887 | if a != b: | |
888 | ui.write(_(" patch test failed!\n")) |
|
888 | ui.write(_(" patch test failed!\n")) | |
889 | patchproblems += 1 |
|
889 | patchproblems += 1 | |
890 |
|
890 | |||
891 | if patchproblems: |
|
891 | if patchproblems: | |
892 | if ui.config('ui', 'patch'): |
|
892 | if ui.config('ui', 'patch'): | |
893 | ui.write(_(" (Current patch tool may be incompatible with patch," |
|
893 | ui.write(_(" (Current patch tool may be incompatible with patch," | |
894 | " or misconfigured. Please check your .hgrc file)\n")) |
|
894 | " or misconfigured. Please check your .hgrc file)\n")) | |
895 | else: |
|
895 | else: | |
896 | ui.write(_(" Internal patcher failure, please report this error" |
|
896 | ui.write(_(" Internal patcher failure, please report this error" | |
897 | " to http://www.selenic.com/mercurial/bts\n")) |
|
897 | " to http://www.selenic.com/mercurial/bts\n")) | |
898 | problems += patchproblems |
|
898 | problems += patchproblems | |
899 |
|
899 | |||
900 | os.unlink(fa) |
|
900 | os.unlink(fa) | |
901 | os.unlink(fd) |
|
901 | os.unlink(fd) | |
902 |
|
902 | |||
903 | # editor |
|
903 | # editor | |
904 | ui.status(_("Checking commit editor...\n")) |
|
904 | ui.status(_("Checking commit editor...\n")) | |
905 | editor = ui.geteditor() |
|
905 | editor = ui.geteditor() | |
906 | cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0]) |
|
906 | cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0]) | |
907 | if not cmdpath: |
|
907 | if not cmdpath: | |
908 | if editor == 'vi': |
|
908 | if editor == 'vi': | |
909 | ui.write(_(" No commit editor set and can't find vi in PATH\n")) |
|
909 | ui.write(_(" No commit editor set and can't find vi in PATH\n")) | |
910 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) |
|
910 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) | |
911 | else: |
|
911 | else: | |
912 | ui.write(_(" Can't find editor '%s' in PATH\n") % editor) |
|
912 | ui.write(_(" Can't find editor '%s' in PATH\n") % editor) | |
913 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) |
|
913 | ui.write(_(" (specify a commit editor in your .hgrc file)\n")) | |
914 | problems += 1 |
|
914 | problems += 1 | |
915 |
|
915 | |||
916 | # check username |
|
916 | # check username | |
917 | ui.status(_("Checking username...\n")) |
|
917 | ui.status(_("Checking username...\n")) | |
918 | user = os.environ.get("HGUSER") |
|
918 | user = os.environ.get("HGUSER") | |
919 | if user is None: |
|
919 | if user is None: | |
920 | user = ui.config("ui", "username") |
|
920 | user = ui.config("ui", "username") | |
921 | if user is None: |
|
921 | if user is None: | |
922 | user = os.environ.get("EMAIL") |
|
922 | user = os.environ.get("EMAIL") | |
923 | if not user: |
|
923 | if not user: | |
924 | ui.warn(" ") |
|
924 | ui.warn(" ") | |
925 | ui.username() |
|
925 | ui.username() | |
926 | ui.write(_(" (specify a username in your .hgrc file)\n")) |
|
926 | ui.write(_(" (specify a username in your .hgrc file)\n")) | |
927 |
|
927 | |||
928 | if not problems: |
|
928 | if not problems: | |
929 | ui.status(_("No problems detected\n")) |
|
929 | ui.status(_("No problems detected\n")) | |
930 | else: |
|
930 | else: | |
931 | ui.write(_("%s problems detected," |
|
931 | ui.write(_("%s problems detected," | |
932 | " please check your install!\n") % problems) |
|
932 | " please check your install!\n") % problems) | |
933 |
|
933 | |||
934 | return problems |
|
934 | return problems | |
935 |
|
935 | |||
936 | def debugrename(ui, repo, file1, *pats, **opts): |
|
936 | def debugrename(ui, repo, file1, *pats, **opts): | |
937 | """dump rename information""" |
|
937 | """dump rename information""" | |
938 |
|
938 | |||
939 | ctx = repo[opts.get('rev')] |
|
939 | ctx = repo[opts.get('rev')] | |
940 | m = cmdutil.match(repo, (file1,) + pats, opts) |
|
940 | m = cmdutil.match(repo, (file1,) + pats, opts) | |
941 | for abs in ctx.walk(m): |
|
941 | for abs in ctx.walk(m): | |
942 | fctx = ctx[abs] |
|
942 | fctx = ctx[abs] | |
943 | o = fctx.filelog().renamed(fctx.filenode()) |
|
943 | o = fctx.filelog().renamed(fctx.filenode()) | |
944 | rel = m.rel(abs) |
|
944 | rel = m.rel(abs) | |
945 | if o: |
|
945 | if o: | |
946 | ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) |
|
946 | ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) | |
947 | else: |
|
947 | else: | |
948 | ui.write(_("%s not renamed\n") % rel) |
|
948 | ui.write(_("%s not renamed\n") % rel) | |
949 |
|
949 | |||
950 | def debugwalk(ui, repo, *pats, **opts): |
|
950 | def debugwalk(ui, repo, *pats, **opts): | |
951 | """show how files match on given patterns""" |
|
951 | """show how files match on given patterns""" | |
952 | m = cmdutil.match(repo, pats, opts) |
|
952 | m = cmdutil.match(repo, pats, opts) | |
953 | items = list(repo.walk(m)) |
|
953 | items = list(repo.walk(m)) | |
954 | if not items: |
|
954 | if not items: | |
955 | return |
|
955 | return | |
956 | fmt = 'f %%-%ds %%-%ds %%s' % ( |
|
956 | fmt = 'f %%-%ds %%-%ds %%s' % ( | |
957 | max([len(abs) for abs in items]), |
|
957 | max([len(abs) for abs in items]), | |
958 | max([len(m.rel(abs)) for abs in items])) |
|
958 | max([len(m.rel(abs)) for abs in items])) | |
959 | for abs in items: |
|
959 | for abs in items: | |
960 | line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') |
|
960 | line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') | |
961 | ui.write("%s\n" % line.rstrip()) |
|
961 | ui.write("%s\n" % line.rstrip()) | |
962 |
|
962 | |||
963 | def diff(ui, repo, *pats, **opts): |
|
963 | def diff(ui, repo, *pats, **opts): | |
964 | """diff repository (or selected files) |
|
964 | """diff repository (or selected files) | |
965 |
|
965 | |||
966 | Show differences between revisions for the specified files. |
|
966 | Show differences between revisions for the specified files. | |
967 |
|
967 | |||
968 | Differences between files are shown using the unified diff format. |
|
968 | Differences between files are shown using the unified diff format. | |
969 |
|
969 | |||
970 | NOTE: diff may generate unexpected results for merges, as it will |
|
970 | NOTE: diff may generate unexpected results for merges, as it will | |
971 | default to comparing against the working directory's first parent |
|
971 | default to comparing against the working directory's first parent | |
972 | changeset if no revisions are specified. |
|
972 | changeset if no revisions are specified. | |
973 |
|
973 | |||
974 | When two revision arguments are given, then changes are shown |
|
974 | When two revision arguments are given, then changes are shown | |
975 | between those revisions. If only one revision is specified then |
|
975 | between those revisions. If only one revision is specified then | |
976 | that revision is compared to the working directory, and, when no |
|
976 | that revision is compared to the working directory, and, when no | |
977 | revisions are specified, the working directory files are compared |
|
977 | revisions are specified, the working directory files are compared | |
978 | to its parent. |
|
978 | to its parent. | |
979 |
|
979 | |||
980 | Without the -a option, diff will avoid generating diffs of files |
|
980 | Without the -a option, diff will avoid generating diffs of files | |
981 | it detects as binary. With -a, diff will generate a diff anyway, |
|
981 | it detects as binary. With -a, diff will generate a diff anyway, | |
982 | probably with undesirable results. |
|
982 | probably with undesirable results. | |
983 | """ |
|
983 | """ | |
984 |
node1, node2 = cmdutil.revpair(repo, opts |
|
984 | node1, node2 = cmdutil.revpair(repo, opts.get('rev')) | |
985 |
|
985 | |||
986 | m = cmdutil.match(repo, pats, opts) |
|
986 | m = cmdutil.match(repo, pats, opts) | |
987 | patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts)) |
|
987 | patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts)) | |
988 |
|
988 | |||
989 | def export(ui, repo, *changesets, **opts): |
|
989 | def export(ui, repo, *changesets, **opts): | |
990 | """dump the header and diffs for one or more changesets |
|
990 | """dump the header and diffs for one or more changesets | |
991 |
|
991 | |||
992 | Print the changeset header and diffs for one or more revisions. |
|
992 | Print the changeset header and diffs for one or more revisions. | |
993 |
|
993 | |||
994 | The information shown in the changeset header is: author, |
|
994 | The information shown in the changeset header is: author, | |
995 | changeset hash, parent(s) and commit comment. |
|
995 | changeset hash, parent(s) and commit comment. | |
996 |
|
996 | |||
997 | NOTE: export may generate unexpected diff output for merge changesets, |
|
997 | NOTE: export may generate unexpected diff output for merge changesets, | |
998 | as it will compare the merge changeset against its first parent only. |
|
998 | as it will compare the merge changeset against its first parent only. | |
999 |
|
999 | |||
1000 | Output may be to a file, in which case the name of the file is |
|
1000 | Output may be to a file, in which case the name of the file is | |
1001 | given using a format string. The formatting rules are as follows: |
|
1001 | given using a format string. The formatting rules are as follows: | |
1002 |
|
1002 | |||
1003 | %% literal "%" character |
|
1003 | %% literal "%" character | |
1004 | %H changeset hash (40 bytes of hexadecimal) |
|
1004 | %H changeset hash (40 bytes of hexadecimal) | |
1005 | %N number of patches being generated |
|
1005 | %N number of patches being generated | |
1006 | %R changeset revision number |
|
1006 | %R changeset revision number | |
1007 | %b basename of the exporting repository |
|
1007 | %b basename of the exporting repository | |
1008 | %h short-form changeset hash (12 bytes of hexadecimal) |
|
1008 | %h short-form changeset hash (12 bytes of hexadecimal) | |
1009 | %n zero-padded sequence number, starting at 1 |
|
1009 | %n zero-padded sequence number, starting at 1 | |
1010 | %r zero-padded changeset revision number |
|
1010 | %r zero-padded changeset revision number | |
1011 |
|
1011 | |||
1012 | Without the -a option, export will avoid generating diffs of files |
|
1012 | Without the -a option, export will avoid generating diffs of files | |
1013 | it detects as binary. With -a, export will generate a diff anyway, |
|
1013 | it detects as binary. With -a, export will generate a diff anyway, | |
1014 | probably with undesirable results. |
|
1014 | probably with undesirable results. | |
1015 |
|
1015 | |||
1016 | With the --switch-parent option, the diff will be against the second |
|
1016 | With the --switch-parent option, the diff will be against the second | |
1017 | parent. It can be useful to review a merge. |
|
1017 | parent. It can be useful to review a merge. | |
1018 | """ |
|
1018 | """ | |
1019 | if not changesets: |
|
1019 | if not changesets: | |
1020 | raise util.Abort(_("export requires at least one changeset")) |
|
1020 | raise util.Abort(_("export requires at least one changeset")) | |
1021 | revs = cmdutil.revrange(repo, changesets) |
|
1021 | revs = cmdutil.revrange(repo, changesets) | |
1022 | if len(revs) > 1: |
|
1022 | if len(revs) > 1: | |
1023 | ui.note(_('exporting patches:\n')) |
|
1023 | ui.note(_('exporting patches:\n')) | |
1024 | else: |
|
1024 | else: | |
1025 | ui.note(_('exporting patch:\n')) |
|
1025 | ui.note(_('exporting patch:\n')) | |
1026 |
patch.export(repo, revs, template=opts |
|
1026 | patch.export(repo, revs, template=opts.get('output'), | |
1027 |
switch_parent=opts |
|
1027 | switch_parent=opts.get('switch_parent'), | |
1028 | opts=patch.diffopts(ui, opts)) |
|
1028 | opts=patch.diffopts(ui, opts)) | |
1029 |
|
1029 | |||
1030 | def grep(ui, repo, pattern, *pats, **opts): |
|
1030 | def grep(ui, repo, pattern, *pats, **opts): | |
1031 | """search for a pattern in specified files and revisions |
|
1031 | """search for a pattern in specified files and revisions | |
1032 |
|
1032 | |||
1033 | Search revisions of files for a regular expression. |
|
1033 | Search revisions of files for a regular expression. | |
1034 |
|
1034 | |||
1035 | This command behaves differently than Unix grep. It only accepts |
|
1035 | This command behaves differently than Unix grep. It only accepts | |
1036 | Python/Perl regexps. It searches repository history, not the |
|
1036 | Python/Perl regexps. It searches repository history, not the | |
1037 | working directory. It always prints the revision number in which |
|
1037 | working directory. It always prints the revision number in which | |
1038 | a match appears. |
|
1038 | a match appears. | |
1039 |
|
1039 | |||
1040 | By default, grep only prints output for the first revision of a |
|
1040 | By default, grep only prints output for the first revision of a | |
1041 | file in which it finds a match. To get it to print every revision |
|
1041 | file in which it finds a match. To get it to print every revision | |
1042 | that contains a change in match status ("-" for a match that |
|
1042 | that contains a change in match status ("-" for a match that | |
1043 | becomes a non-match, or "+" for a non-match that becomes a match), |
|
1043 | becomes a non-match, or "+" for a non-match that becomes a match), | |
1044 | use the --all flag. |
|
1044 | use the --all flag. | |
1045 | """ |
|
1045 | """ | |
1046 | reflags = 0 |
|
1046 | reflags = 0 | |
1047 |
if opts |
|
1047 | if opts.get('ignore_case'): | |
1048 | reflags |= re.I |
|
1048 | reflags |= re.I | |
1049 | try: |
|
1049 | try: | |
1050 | regexp = re.compile(pattern, reflags) |
|
1050 | regexp = re.compile(pattern, reflags) | |
1051 | except Exception, inst: |
|
1051 | except Exception, inst: | |
1052 | ui.warn(_("grep: invalid match pattern: %s\n") % inst) |
|
1052 | ui.warn(_("grep: invalid match pattern: %s\n") % inst) | |
1053 | return None |
|
1053 | return None | |
1054 | sep, eol = ':', '\n' |
|
1054 | sep, eol = ':', '\n' | |
1055 |
if opts |
|
1055 | if opts.get('print0'): | |
1056 | sep = eol = '\0' |
|
1056 | sep = eol = '\0' | |
1057 |
|
1057 | |||
1058 | fcache = {} |
|
1058 | fcache = {} | |
1059 | def getfile(fn): |
|
1059 | def getfile(fn): | |
1060 | if fn not in fcache: |
|
1060 | if fn not in fcache: | |
1061 | fcache[fn] = repo.file(fn) |
|
1061 | fcache[fn] = repo.file(fn) | |
1062 | return fcache[fn] |
|
1062 | return fcache[fn] | |
1063 |
|
1063 | |||
1064 | def matchlines(body): |
|
1064 | def matchlines(body): | |
1065 | begin = 0 |
|
1065 | begin = 0 | |
1066 | linenum = 0 |
|
1066 | linenum = 0 | |
1067 | while True: |
|
1067 | while True: | |
1068 | match = regexp.search(body, begin) |
|
1068 | match = regexp.search(body, begin) | |
1069 | if not match: |
|
1069 | if not match: | |
1070 | break |
|
1070 | break | |
1071 | mstart, mend = match.span() |
|
1071 | mstart, mend = match.span() | |
1072 | linenum += body.count('\n', begin, mstart) + 1 |
|
1072 | linenum += body.count('\n', begin, mstart) + 1 | |
1073 | lstart = body.rfind('\n', begin, mstart) + 1 or begin |
|
1073 | lstart = body.rfind('\n', begin, mstart) + 1 or begin | |
1074 | lend = body.find('\n', mend) |
|
1074 | lend = body.find('\n', mend) | |
1075 | yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] |
|
1075 | yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] | |
1076 | begin = lend + 1 |
|
1076 | begin = lend + 1 | |
1077 |
|
1077 | |||
1078 | class linestate(object): |
|
1078 | class linestate(object): | |
1079 | def __init__(self, line, linenum, colstart, colend): |
|
1079 | def __init__(self, line, linenum, colstart, colend): | |
1080 | self.line = line |
|
1080 | self.line = line | |
1081 | self.linenum = linenum |
|
1081 | self.linenum = linenum | |
1082 | self.colstart = colstart |
|
1082 | self.colstart = colstart | |
1083 | self.colend = colend |
|
1083 | self.colend = colend | |
1084 |
|
1084 | |||
1085 | def __hash__(self): |
|
1085 | def __hash__(self): | |
1086 | return hash((self.linenum, self.line)) |
|
1086 | return hash((self.linenum, self.line)) | |
1087 |
|
1087 | |||
1088 | def __eq__(self, other): |
|
1088 | def __eq__(self, other): | |
1089 | return self.line == other.line |
|
1089 | return self.line == other.line | |
1090 |
|
1090 | |||
1091 | matches = {} |
|
1091 | matches = {} | |
1092 | copies = {} |
|
1092 | copies = {} | |
1093 | def grepbody(fn, rev, body): |
|
1093 | def grepbody(fn, rev, body): | |
1094 | matches[rev].setdefault(fn, []) |
|
1094 | matches[rev].setdefault(fn, []) | |
1095 | m = matches[rev][fn] |
|
1095 | m = matches[rev][fn] | |
1096 | for lnum, cstart, cend, line in matchlines(body): |
|
1096 | for lnum, cstart, cend, line in matchlines(body): | |
1097 | s = linestate(line, lnum, cstart, cend) |
|
1097 | s = linestate(line, lnum, cstart, cend) | |
1098 | m.append(s) |
|
1098 | m.append(s) | |
1099 |
|
1099 | |||
1100 | def difflinestates(a, b): |
|
1100 | def difflinestates(a, b): | |
1101 | sm = difflib.SequenceMatcher(None, a, b) |
|
1101 | sm = difflib.SequenceMatcher(None, a, b) | |
1102 | for tag, alo, ahi, blo, bhi in sm.get_opcodes(): |
|
1102 | for tag, alo, ahi, blo, bhi in sm.get_opcodes(): | |
1103 | if tag == 'insert': |
|
1103 | if tag == 'insert': | |
1104 | for i in xrange(blo, bhi): |
|
1104 | for i in xrange(blo, bhi): | |
1105 | yield ('+', b[i]) |
|
1105 | yield ('+', b[i]) | |
1106 | elif tag == 'delete': |
|
1106 | elif tag == 'delete': | |
1107 | for i in xrange(alo, ahi): |
|
1107 | for i in xrange(alo, ahi): | |
1108 | yield ('-', a[i]) |
|
1108 | yield ('-', a[i]) | |
1109 | elif tag == 'replace': |
|
1109 | elif tag == 'replace': | |
1110 | for i in xrange(alo, ahi): |
|
1110 | for i in xrange(alo, ahi): | |
1111 | yield ('-', a[i]) |
|
1111 | yield ('-', a[i]) | |
1112 | for i in xrange(blo, bhi): |
|
1112 | for i in xrange(blo, bhi): | |
1113 | yield ('+', b[i]) |
|
1113 | yield ('+', b[i]) | |
1114 |
|
1114 | |||
1115 | prev = {} |
|
1115 | prev = {} | |
1116 | def display(fn, rev, states, prevstates): |
|
1116 | def display(fn, rev, states, prevstates): | |
1117 | datefunc = ui.quiet and util.shortdate or util.datestr |
|
1117 | datefunc = ui.quiet and util.shortdate or util.datestr | |
1118 | found = False |
|
1118 | found = False | |
1119 | filerevmatches = {} |
|
1119 | filerevmatches = {} | |
1120 | r = prev.get(fn, -1) |
|
1120 | r = prev.get(fn, -1) | |
1121 |
if opts |
|
1121 | if opts.get('all'): | |
1122 | iter = difflinestates(states, prevstates) |
|
1122 | iter = difflinestates(states, prevstates) | |
1123 | else: |
|
1123 | else: | |
1124 | iter = [('', l) for l in prevstates] |
|
1124 | iter = [('', l) for l in prevstates] | |
1125 | for change, l in iter: |
|
1125 | for change, l in iter: | |
1126 | cols = [fn, str(r)] |
|
1126 | cols = [fn, str(r)] | |
1127 |
if opts |
|
1127 | if opts.get('line_number'): | |
1128 | cols.append(str(l.linenum)) |
|
1128 | cols.append(str(l.linenum)) | |
1129 |
if opts |
|
1129 | if opts.get('all'): | |
1130 | cols.append(change) |
|
1130 | cols.append(change) | |
1131 |
if opts |
|
1131 | if opts.get('user'): | |
1132 | cols.append(ui.shortuser(get(r)[1])) |
|
1132 | cols.append(ui.shortuser(get(r)[1])) | |
1133 | if opts.get('date'): |
|
1133 | if opts.get('date'): | |
1134 | cols.append(datefunc(get(r)[2])) |
|
1134 | cols.append(datefunc(get(r)[2])) | |
1135 |
if opts |
|
1135 | if opts.get('files_with_matches'): | |
1136 | c = (fn, r) |
|
1136 | c = (fn, r) | |
1137 | if c in filerevmatches: |
|
1137 | if c in filerevmatches: | |
1138 | continue |
|
1138 | continue | |
1139 | filerevmatches[c] = 1 |
|
1139 | filerevmatches[c] = 1 | |
1140 | else: |
|
1140 | else: | |
1141 | cols.append(l.line) |
|
1141 | cols.append(l.line) | |
1142 | ui.write(sep.join(cols), eol) |
|
1142 | ui.write(sep.join(cols), eol) | |
1143 | found = True |
|
1143 | found = True | |
1144 | return found |
|
1144 | return found | |
1145 |
|
1145 | |||
1146 | fstate = {} |
|
1146 | fstate = {} | |
1147 | skip = {} |
|
1147 | skip = {} | |
1148 | get = util.cachefunc(lambda r: repo[r].changeset()) |
|
1148 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
1149 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) |
|
1149 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) | |
1150 | found = False |
|
1150 | found = False | |
1151 | follow = opts.get('follow') |
|
1151 | follow = opts.get('follow') | |
1152 | for st, rev, fns in changeiter: |
|
1152 | for st, rev, fns in changeiter: | |
1153 | if st == 'window': |
|
1153 | if st == 'window': | |
1154 | matches.clear() |
|
1154 | matches.clear() | |
1155 | elif st == 'add': |
|
1155 | elif st == 'add': | |
1156 | ctx = repo[rev] |
|
1156 | ctx = repo[rev] | |
1157 | matches[rev] = {} |
|
1157 | matches[rev] = {} | |
1158 | for fn in fns: |
|
1158 | for fn in fns: | |
1159 | if fn in skip: |
|
1159 | if fn in skip: | |
1160 | continue |
|
1160 | continue | |
1161 | try: |
|
1161 | try: | |
1162 | grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn))) |
|
1162 | grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn))) | |
1163 | fstate.setdefault(fn, []) |
|
1163 | fstate.setdefault(fn, []) | |
1164 | if follow: |
|
1164 | if follow: | |
1165 | copied = getfile(fn).renamed(ctx.filenode(fn)) |
|
1165 | copied = getfile(fn).renamed(ctx.filenode(fn)) | |
1166 | if copied: |
|
1166 | if copied: | |
1167 | copies.setdefault(rev, {})[fn] = copied[0] |
|
1167 | copies.setdefault(rev, {})[fn] = copied[0] | |
1168 | except revlog.LookupError: |
|
1168 | except revlog.LookupError: | |
1169 | pass |
|
1169 | pass | |
1170 | elif st == 'iter': |
|
1170 | elif st == 'iter': | |
1171 | for fn, m in util.sort(matches[rev].items()): |
|
1171 | for fn, m in util.sort(matches[rev].items()): | |
1172 | copy = copies.get(rev, {}).get(fn) |
|
1172 | copy = copies.get(rev, {}).get(fn) | |
1173 | if fn in skip: |
|
1173 | if fn in skip: | |
1174 | if copy: |
|
1174 | if copy: | |
1175 | skip[copy] = True |
|
1175 | skip[copy] = True | |
1176 | continue |
|
1176 | continue | |
1177 | if fn in prev or fstate[fn]: |
|
1177 | if fn in prev or fstate[fn]: | |
1178 | r = display(fn, rev, m, fstate[fn]) |
|
1178 | r = display(fn, rev, m, fstate[fn]) | |
1179 | found = found or r |
|
1179 | found = found or r | |
1180 |
if r and not opts |
|
1180 | if r and not opts.get('all'): | |
1181 | skip[fn] = True |
|
1181 | skip[fn] = True | |
1182 | if copy: |
|
1182 | if copy: | |
1183 | skip[copy] = True |
|
1183 | skip[copy] = True | |
1184 | fstate[fn] = m |
|
1184 | fstate[fn] = m | |
1185 | if copy: |
|
1185 | if copy: | |
1186 | fstate[copy] = m |
|
1186 | fstate[copy] = m | |
1187 | prev[fn] = rev |
|
1187 | prev[fn] = rev | |
1188 |
|
1188 | |||
1189 | for fn, state in util.sort(fstate.items()): |
|
1189 | for fn, state in util.sort(fstate.items()): | |
1190 | if fn in skip: |
|
1190 | if fn in skip: | |
1191 | continue |
|
1191 | continue | |
1192 | if fn not in copies.get(prev[fn], {}): |
|
1192 | if fn not in copies.get(prev[fn], {}): | |
1193 | found = display(fn, rev, {}, state) or found |
|
1193 | found = display(fn, rev, {}, state) or found | |
1194 | return (not found and 1) or 0 |
|
1194 | return (not found and 1) or 0 | |
1195 |
|
1195 | |||
1196 | def heads(ui, repo, *branchrevs, **opts): |
|
1196 | def heads(ui, repo, *branchrevs, **opts): | |
1197 | """show current repository heads or show branch heads |
|
1197 | """show current repository heads or show branch heads | |
1198 |
|
1198 | |||
1199 | With no arguments, show all repository head changesets. |
|
1199 | With no arguments, show all repository head changesets. | |
1200 |
|
1200 | |||
1201 | If branch or revisions names are given this will show the heads of |
|
1201 | If branch or revisions names are given this will show the heads of | |
1202 | the specified branches or the branches those revisions are tagged |
|
1202 | the specified branches or the branches those revisions are tagged | |
1203 | with. |
|
1203 | with. | |
1204 |
|
1204 | |||
1205 | Repository "heads" are changesets that don't have child |
|
1205 | Repository "heads" are changesets that don't have child | |
1206 | changesets. They are where development generally takes place and |
|
1206 | changesets. They are where development generally takes place and | |
1207 | are the usual targets for update and merge operations. |
|
1207 | are the usual targets for update and merge operations. | |
1208 |
|
1208 | |||
1209 | Branch heads are changesets that have a given branch tag, but have |
|
1209 | Branch heads are changesets that have a given branch tag, but have | |
1210 | no child changesets with that tag. They are usually where |
|
1210 | no child changesets with that tag. They are usually where | |
1211 | development on the given branch takes place. |
|
1211 | development on the given branch takes place. | |
1212 | """ |
|
1212 | """ | |
1213 |
if opts |
|
1213 | if opts.get('rev'): | |
1214 | start = repo.lookup(opts['rev']) |
|
1214 | start = repo.lookup(opts['rev']) | |
1215 | else: |
|
1215 | else: | |
1216 | start = None |
|
1216 | start = None | |
1217 | if not branchrevs: |
|
1217 | if not branchrevs: | |
1218 | # Assume we're looking repo-wide heads if no revs were specified. |
|
1218 | # Assume we're looking repo-wide heads if no revs were specified. | |
1219 | heads = repo.heads(start) |
|
1219 | heads = repo.heads(start) | |
1220 | else: |
|
1220 | else: | |
1221 | heads = [] |
|
1221 | heads = [] | |
1222 | visitedset = util.set() |
|
1222 | visitedset = util.set() | |
1223 | for branchrev in branchrevs: |
|
1223 | for branchrev in branchrevs: | |
1224 | branch = repo[branchrev].branch() |
|
1224 | branch = repo[branchrev].branch() | |
1225 | if branch in visitedset: |
|
1225 | if branch in visitedset: | |
1226 | continue |
|
1226 | continue | |
1227 | visitedset.add(branch) |
|
1227 | visitedset.add(branch) | |
1228 | bheads = repo.branchheads(branch, start) |
|
1228 | bheads = repo.branchheads(branch, start) | |
1229 | if not bheads: |
|
1229 | if not bheads: | |
1230 | if branch != branchrev: |
|
1230 | if branch != branchrev: | |
1231 | ui.warn(_("no changes on branch %s containing %s are " |
|
1231 | ui.warn(_("no changes on branch %s containing %s are " | |
1232 | "reachable from %s\n") |
|
1232 | "reachable from %s\n") | |
1233 |
% (branch, branchrev, opts |
|
1233 | % (branch, branchrev, opts.get('rev'))) | |
1234 | else: |
|
1234 | else: | |
1235 | ui.warn(_("no changes on branch %s are reachable from %s\n") |
|
1235 | ui.warn(_("no changes on branch %s are reachable from %s\n") | |
1236 |
% (branch, opts |
|
1236 | % (branch, opts.get('rev'))) | |
1237 | heads.extend(bheads) |
|
1237 | heads.extend(bheads) | |
1238 | if not heads: |
|
1238 | if not heads: | |
1239 | return 1 |
|
1239 | return 1 | |
1240 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
1240 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
1241 | for n in heads: |
|
1241 | for n in heads: | |
1242 | displayer.show(changenode=n) |
|
1242 | displayer.show(changenode=n) | |
1243 |
|
1243 | |||
1244 | def help_(ui, name=None, with_version=False): |
|
1244 | def help_(ui, name=None, with_version=False): | |
1245 | """show help for a command, extension, or list of commands |
|
1245 | """show help for a command, extension, or list of commands | |
1246 |
|
1246 | |||
1247 | With no arguments, print a list of commands and short help. |
|
1247 | With no arguments, print a list of commands and short help. | |
1248 |
|
1248 | |||
1249 | Given a command name, print help for that command. |
|
1249 | Given a command name, print help for that command. | |
1250 |
|
1250 | |||
1251 | Given an extension name, print help for that extension, and the |
|
1251 | Given an extension name, print help for that extension, and the | |
1252 | commands it provides.""" |
|
1252 | commands it provides.""" | |
1253 | option_lists = [] |
|
1253 | option_lists = [] | |
1254 |
|
1254 | |||
1255 | def addglobalopts(aliases): |
|
1255 | def addglobalopts(aliases): | |
1256 | if ui.verbose: |
|
1256 | if ui.verbose: | |
1257 | option_lists.append((_("global options:"), globalopts)) |
|
1257 | option_lists.append((_("global options:"), globalopts)) | |
1258 | if name == 'shortlist': |
|
1258 | if name == 'shortlist': | |
1259 | option_lists.append((_('use "hg help" for the full list ' |
|
1259 | option_lists.append((_('use "hg help" for the full list ' | |
1260 | 'of commands'), ())) |
|
1260 | 'of commands'), ())) | |
1261 | else: |
|
1261 | else: | |
1262 | if name == 'shortlist': |
|
1262 | if name == 'shortlist': | |
1263 | msg = _('use "hg help" for the full list of commands ' |
|
1263 | msg = _('use "hg help" for the full list of commands ' | |
1264 | 'or "hg -v" for details') |
|
1264 | 'or "hg -v" for details') | |
1265 | elif aliases: |
|
1265 | elif aliases: | |
1266 | msg = _('use "hg -v help%s" to show aliases and ' |
|
1266 | msg = _('use "hg -v help%s" to show aliases and ' | |
1267 | 'global options') % (name and " " + name or "") |
|
1267 | 'global options') % (name and " " + name or "") | |
1268 | else: |
|
1268 | else: | |
1269 | msg = _('use "hg -v help %s" to show global options') % name |
|
1269 | msg = _('use "hg -v help %s" to show global options') % name | |
1270 | option_lists.append((msg, ())) |
|
1270 | option_lists.append((msg, ())) | |
1271 |
|
1271 | |||
1272 | def helpcmd(name): |
|
1272 | def helpcmd(name): | |
1273 | if with_version: |
|
1273 | if with_version: | |
1274 | version_(ui) |
|
1274 | version_(ui) | |
1275 | ui.write('\n') |
|
1275 | ui.write('\n') | |
1276 |
|
1276 | |||
1277 | try: |
|
1277 | try: | |
1278 | aliases, i = cmdutil.findcmd(ui, name, table) |
|
1278 | aliases, i = cmdutil.findcmd(ui, name, table) | |
1279 | except cmdutil.AmbiguousCommand, inst: |
|
1279 | except cmdutil.AmbiguousCommand, inst: | |
1280 | select = lambda c: c.lstrip('^').startswith(inst.args[0]) |
|
1280 | select = lambda c: c.lstrip('^').startswith(inst.args[0]) | |
1281 | helplist(_('list of commands:\n\n'), select) |
|
1281 | helplist(_('list of commands:\n\n'), select) | |
1282 | return |
|
1282 | return | |
1283 |
|
1283 | |||
1284 | # synopsis |
|
1284 | # synopsis | |
1285 | ui.write("%s\n" % i[2]) |
|
1285 | ui.write("%s\n" % i[2]) | |
1286 |
|
1286 | |||
1287 | # aliases |
|
1287 | # aliases | |
1288 | if not ui.quiet and len(aliases) > 1: |
|
1288 | if not ui.quiet and len(aliases) > 1: | |
1289 | ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) |
|
1289 | ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) | |
1290 |
|
1290 | |||
1291 | # description |
|
1291 | # description | |
1292 | doc = gettext(i[0].__doc__) |
|
1292 | doc = gettext(i[0].__doc__) | |
1293 | if not doc: |
|
1293 | if not doc: | |
1294 | doc = _("(No help text available)") |
|
1294 | doc = _("(No help text available)") | |
1295 | if ui.quiet: |
|
1295 | if ui.quiet: | |
1296 | doc = doc.splitlines(0)[0] |
|
1296 | doc = doc.splitlines(0)[0] | |
1297 | ui.write("\n%s\n" % doc.rstrip()) |
|
1297 | ui.write("\n%s\n" % doc.rstrip()) | |
1298 |
|
1298 | |||
1299 | if not ui.quiet: |
|
1299 | if not ui.quiet: | |
1300 | # options |
|
1300 | # options | |
1301 | if i[1]: |
|
1301 | if i[1]: | |
1302 | option_lists.append((_("options:\n"), i[1])) |
|
1302 | option_lists.append((_("options:\n"), i[1])) | |
1303 |
|
1303 | |||
1304 | addglobalopts(False) |
|
1304 | addglobalopts(False) | |
1305 |
|
1305 | |||
1306 | def helplist(header, select=None): |
|
1306 | def helplist(header, select=None): | |
1307 | h = {} |
|
1307 | h = {} | |
1308 | cmds = {} |
|
1308 | cmds = {} | |
1309 | for c, e in table.items(): |
|
1309 | for c, e in table.items(): | |
1310 | f = c.split("|", 1)[0] |
|
1310 | f = c.split("|", 1)[0] | |
1311 | if select and not select(f): |
|
1311 | if select and not select(f): | |
1312 | continue |
|
1312 | continue | |
|
1313 | if select is None and e[0].__module__ != __name__: | |||
|
1314 | continue | |||
1313 | if name == "shortlist" and not f.startswith("^"): |
|
1315 | if name == "shortlist" and not f.startswith("^"): | |
1314 | continue |
|
1316 | continue | |
1315 | f = f.lstrip("^") |
|
1317 | f = f.lstrip("^") | |
1316 | if not ui.debugflag and f.startswith("debug"): |
|
1318 | if not ui.debugflag and f.startswith("debug"): | |
1317 | continue |
|
1319 | continue | |
1318 | doc = gettext(e[0].__doc__) |
|
1320 | doc = gettext(e[0].__doc__) | |
1319 | if not doc: |
|
1321 | if not doc: | |
1320 | doc = _("(No help text available)") |
|
1322 | doc = _("(No help text available)") | |
1321 | h[f] = doc.splitlines(0)[0].rstrip() |
|
1323 | h[f] = doc.splitlines(0)[0].rstrip() | |
1322 | cmds[f] = c.lstrip("^") |
|
1324 | cmds[f] = c.lstrip("^") | |
1323 |
|
1325 | |||
1324 | if not h: |
|
1326 | if not h: | |
1325 | ui.status(_('no commands defined\n')) |
|
1327 | ui.status(_('no commands defined\n')) | |
1326 | return |
|
1328 | return | |
1327 |
|
1329 | |||
1328 | ui.status(header) |
|
1330 | ui.status(header) | |
1329 | fns = util.sort(h) |
|
1331 | fns = util.sort(h) | |
1330 | m = max(map(len, fns)) |
|
1332 | m = max(map(len, fns)) | |
1331 | for f in fns: |
|
1333 | for f in fns: | |
1332 | if ui.verbose: |
|
1334 | if ui.verbose: | |
1333 | commands = cmds[f].replace("|",", ") |
|
1335 | commands = cmds[f].replace("|",", ") | |
1334 | ui.write(" %s:\n %s\n"%(commands, h[f])) |
|
1336 | ui.write(" %s:\n %s\n"%(commands, h[f])) | |
1335 | else: |
|
1337 | else: | |
1336 | ui.write(' %-*s %s\n' % (m, f, h[f])) |
|
1338 | ui.write(' %-*s %s\n' % (m, f, h[f])) | |
1337 |
|
1339 | |||
|
1340 | exts = list(extensions.extensions()) | |||
|
1341 | if exts: | |||
|
1342 | ui.write(_('\nenabled extensions:\n\n')) | |||
|
1343 | maxlength = 0 | |||
|
1344 | exthelps = [] | |||
|
1345 | for ename, ext in exts: | |||
|
1346 | doc = (ext.__doc__ or _('(no help text available)')) | |||
|
1347 | ename = ename.split('.')[-1] | |||
|
1348 | maxlength = max(len(ename), maxlength) | |||
|
1349 | exthelps.append((ename, doc.splitlines(0)[0].strip())) | |||
|
1350 | for ename, text in exthelps: | |||
|
1351 | ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text)) | |||
|
1352 | ||||
1338 | if not ui.quiet: |
|
1353 | if not ui.quiet: | |
1339 | addglobalopts(True) |
|
1354 | addglobalopts(True) | |
1340 |
|
1355 | |||
1341 | def helptopic(name): |
|
1356 | def helptopic(name): | |
1342 | for names, header, doc in help.helptable: |
|
1357 | for names, header, doc in help.helptable: | |
1343 | if name in names: |
|
1358 | if name in names: | |
1344 | break |
|
1359 | break | |
1345 | else: |
|
1360 | else: | |
1346 | raise cmdutil.UnknownCommand(name) |
|
1361 | raise cmdutil.UnknownCommand(name) | |
1347 |
|
1362 | |||
1348 | # description |
|
1363 | # description | |
1349 | if not doc: |
|
1364 | if not doc: | |
1350 | doc = _("(No help text available)") |
|
1365 | doc = _("(No help text available)") | |
1351 | if callable(doc): |
|
1366 | if callable(doc): | |
1352 | doc = doc() |
|
1367 | doc = doc() | |
1353 |
|
1368 | |||
1354 | ui.write("%s\n" % header) |
|
1369 | ui.write("%s\n" % header) | |
1355 | ui.write("%s\n" % doc.rstrip()) |
|
1370 | ui.write("%s\n" % doc.rstrip()) | |
1356 |
|
1371 | |||
1357 | def helpext(name): |
|
1372 | def helpext(name): | |
1358 | try: |
|
1373 | try: | |
1359 | mod = extensions.find(name) |
|
1374 | mod = extensions.find(name) | |
1360 | except KeyError: |
|
1375 | except KeyError: | |
1361 | raise cmdutil.UnknownCommand(name) |
|
1376 | raise cmdutil.UnknownCommand(name) | |
1362 |
|
1377 | |||
1363 | doc = gettext(mod.__doc__) or _('No help text available') |
|
1378 | doc = gettext(mod.__doc__) or _('No help text available') | |
1364 | doc = doc.splitlines(0) |
|
1379 | doc = doc.splitlines(0) | |
1365 | ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0])) |
|
1380 | ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0])) | |
1366 | for d in doc[1:]: |
|
1381 | for d in doc[1:]: | |
1367 | ui.write(d, '\n') |
|
1382 | ui.write(d, '\n') | |
1368 |
|
1383 | |||
1369 | ui.status('\n') |
|
1384 | ui.status('\n') | |
1370 |
|
1385 | |||
1371 | try: |
|
1386 | try: | |
1372 | ct = mod.cmdtable |
|
1387 | ct = mod.cmdtable | |
1373 | except AttributeError: |
|
1388 | except AttributeError: | |
1374 | ct = {} |
|
1389 | ct = {} | |
1375 |
|
1390 | |||
1376 | modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct]) |
|
1391 | modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct]) | |
1377 | helplist(_('list of commands:\n\n'), modcmds.has_key) |
|
1392 | helplist(_('list of commands:\n\n'), modcmds.has_key) | |
1378 |
|
1393 | |||
1379 | if name and name != 'shortlist': |
|
1394 | if name and name != 'shortlist': | |
1380 | i = None |
|
1395 | i = None | |
1381 | for f in (helpcmd, helptopic, helpext): |
|
1396 | for f in (helpcmd, helptopic, helpext): | |
1382 | try: |
|
1397 | try: | |
1383 | f(name) |
|
1398 | f(name) | |
1384 | i = None |
|
1399 | i = None | |
1385 | break |
|
1400 | break | |
1386 | except cmdutil.UnknownCommand, inst: |
|
1401 | except cmdutil.UnknownCommand, inst: | |
1387 | i = inst |
|
1402 | i = inst | |
1388 | if i: |
|
1403 | if i: | |
1389 | raise i |
|
1404 | raise i | |
1390 |
|
1405 | |||
1391 | else: |
|
1406 | else: | |
1392 | # program name |
|
1407 | # program name | |
1393 | if ui.verbose or with_version: |
|
1408 | if ui.verbose or with_version: | |
1394 | version_(ui) |
|
1409 | version_(ui) | |
1395 | else: |
|
1410 | else: | |
1396 | ui.status(_("Mercurial Distributed SCM\n")) |
|
1411 | ui.status(_("Mercurial Distributed SCM\n")) | |
1397 | ui.status('\n') |
|
1412 | ui.status('\n') | |
1398 |
|
1413 | |||
1399 | # list of commands |
|
1414 | # list of commands | |
1400 | if name == "shortlist": |
|
1415 | if name == "shortlist": | |
1401 | header = _('basic commands:\n\n') |
|
1416 | header = _('basic commands:\n\n') | |
1402 | else: |
|
1417 | else: | |
1403 | header = _('list of commands:\n\n') |
|
1418 | header = _('list of commands:\n\n') | |
1404 |
|
1419 | |||
1405 | helplist(header) |
|
1420 | helplist(header) | |
1406 |
|
1421 | |||
1407 | # list all option lists |
|
1422 | # list all option lists | |
1408 | opt_output = [] |
|
1423 | opt_output = [] | |
1409 | for title, options in option_lists: |
|
1424 | for title, options in option_lists: | |
1410 | opt_output.append(("\n%s" % title, None)) |
|
1425 | opt_output.append(("\n%s" % title, None)) | |
1411 | for shortopt, longopt, default, desc in options: |
|
1426 | for shortopt, longopt, default, desc in options: | |
1412 | if "DEPRECATED" in desc and not ui.verbose: continue |
|
1427 | if "DEPRECATED" in desc and not ui.verbose: continue | |
1413 | opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, |
|
1428 | opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, | |
1414 | longopt and " --%s" % longopt), |
|
1429 | longopt and " --%s" % longopt), | |
1415 | "%s%s" % (desc, |
|
1430 | "%s%s" % (desc, | |
1416 | default |
|
1431 | default | |
1417 | and _(" (default: %s)") % default |
|
1432 | and _(" (default: %s)") % default | |
1418 | or ""))) |
|
1433 | or ""))) | |
1419 |
|
1434 | |||
1420 | if ui.verbose: |
|
1435 | if ui.verbose: | |
1421 | ui.write(_("\nspecial help topics:\n")) |
|
1436 | ui.write(_("\nspecial help topics:\n")) | |
1422 | topics = [] |
|
1437 | topics = [] | |
1423 | for names, header, doc in help.helptable: |
|
1438 | for names, header, doc in help.helptable: | |
1424 | topics.append((", ".join(names), header)) |
|
1439 | topics.append((", ".join(names), header)) | |
1425 | topics_len = max([len(s[0]) for s in topics]) |
|
1440 | topics_len = max([len(s[0]) for s in topics]) | |
1426 | for t, desc in topics: |
|
1441 | for t, desc in topics: | |
1427 | ui.write(" %-*s %s\n" % (topics_len, t, desc)) |
|
1442 | ui.write(" %-*s %s\n" % (topics_len, t, desc)) | |
1428 |
|
1443 | |||
1429 | if opt_output: |
|
1444 | if opt_output: | |
1430 | opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0]) |
|
1445 | opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0]) | |
1431 | for first, second in opt_output: |
|
1446 | for first, second in opt_output: | |
1432 | if second: |
|
1447 | if second: | |
1433 | ui.write(" %-*s %s\n" % (opts_len, first, second)) |
|
1448 | ui.write(" %-*s %s\n" % (opts_len, first, second)) | |
1434 | else: |
|
1449 | else: | |
1435 | ui.write("%s\n" % first) |
|
1450 | ui.write("%s\n" % first) | |
1436 |
|
1451 | |||
1437 | def identify(ui, repo, source=None, |
|
1452 | def identify(ui, repo, source=None, | |
1438 | rev=None, num=None, id=None, branch=None, tags=None): |
|
1453 | rev=None, num=None, id=None, branch=None, tags=None): | |
1439 | """identify the working copy or specified revision |
|
1454 | """identify the working copy or specified revision | |
1440 |
|
1455 | |||
1441 | With no revision, print a summary of the current state of the repo. |
|
1456 | With no revision, print a summary of the current state of the repo. | |
1442 |
|
1457 | |||
1443 | With a path, do a lookup in another repository. |
|
1458 | With a path, do a lookup in another repository. | |
1444 |
|
1459 | |||
1445 | This summary identifies the repository state using one or two parent |
|
1460 | This summary identifies the repository state using one or two parent | |
1446 | hash identifiers, followed by a "+" if there are uncommitted changes |
|
1461 | hash identifiers, followed by a "+" if there are uncommitted changes | |
1447 | in the working directory, a list of tags for this revision and a branch |
|
1462 | in the working directory, a list of tags for this revision and a branch | |
1448 | name for non-default branches. |
|
1463 | name for non-default branches. | |
1449 | """ |
|
1464 | """ | |
1450 |
|
1465 | |||
1451 | if not repo and not source: |
|
1466 | if not repo and not source: | |
1452 | raise util.Abort(_("There is no Mercurial repository here " |
|
1467 | raise util.Abort(_("There is no Mercurial repository here " | |
1453 | "(.hg not found)")) |
|
1468 | "(.hg not found)")) | |
1454 |
|
1469 | |||
1455 | hexfunc = ui.debugflag and hex or short |
|
1470 | hexfunc = ui.debugflag and hex or short | |
1456 | default = not (num or id or branch or tags) |
|
1471 | default = not (num or id or branch or tags) | |
1457 | output = [] |
|
1472 | output = [] | |
1458 |
|
1473 | |||
1459 | if source: |
|
1474 | if source: | |
1460 | source, revs, checkout = hg.parseurl(ui.expandpath(source), []) |
|
1475 | source, revs, checkout = hg.parseurl(ui.expandpath(source), []) | |
1461 | srepo = hg.repository(ui, source) |
|
1476 | srepo = hg.repository(ui, source) | |
1462 | if not rev and revs: |
|
1477 | if not rev and revs: | |
1463 | rev = revs[0] |
|
1478 | rev = revs[0] | |
1464 | if not rev: |
|
1479 | if not rev: | |
1465 | rev = "tip" |
|
1480 | rev = "tip" | |
1466 | if num or branch or tags: |
|
1481 | if num or branch or tags: | |
1467 | raise util.Abort( |
|
1482 | raise util.Abort( | |
1468 | "can't query remote revision number, branch, or tags") |
|
1483 | "can't query remote revision number, branch, or tags") | |
1469 | output = [hexfunc(srepo.lookup(rev))] |
|
1484 | output = [hexfunc(srepo.lookup(rev))] | |
1470 | elif not rev: |
|
1485 | elif not rev: | |
1471 | ctx = repo[None] |
|
1486 | ctx = repo[None] | |
1472 | parents = ctx.parents() |
|
1487 | parents = ctx.parents() | |
1473 | changed = False |
|
1488 | changed = False | |
1474 | if default or id or num: |
|
1489 | if default or id or num: | |
1475 | changed = ctx.files() + ctx.deleted() |
|
1490 | changed = ctx.files() + ctx.deleted() | |
1476 | if default or id: |
|
1491 | if default or id: | |
1477 | output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]), |
|
1492 | output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]), | |
1478 | (changed) and "+" or "")] |
|
1493 | (changed) and "+" or "")] | |
1479 | if num: |
|
1494 | if num: | |
1480 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), |
|
1495 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), | |
1481 | (changed) and "+" or "")) |
|
1496 | (changed) and "+" or "")) | |
1482 | else: |
|
1497 | else: | |
1483 | ctx = repo[rev] |
|
1498 | ctx = repo[rev] | |
1484 | if default or id: |
|
1499 | if default or id: | |
1485 | output = [hexfunc(ctx.node())] |
|
1500 | output = [hexfunc(ctx.node())] | |
1486 | if num: |
|
1501 | if num: | |
1487 | output.append(str(ctx.rev())) |
|
1502 | output.append(str(ctx.rev())) | |
1488 |
|
1503 | |||
1489 | if not source and default and not ui.quiet: |
|
1504 | if not source and default and not ui.quiet: | |
1490 | b = util.tolocal(ctx.branch()) |
|
1505 | b = util.tolocal(ctx.branch()) | |
1491 | if b != 'default': |
|
1506 | if b != 'default': | |
1492 | output.append("(%s)" % b) |
|
1507 | output.append("(%s)" % b) | |
1493 |
|
1508 | |||
1494 | # multiple tags for a single parent separated by '/' |
|
1509 | # multiple tags for a single parent separated by '/' | |
1495 | t = "/".join(ctx.tags()) |
|
1510 | t = "/".join(ctx.tags()) | |
1496 | if t: |
|
1511 | if t: | |
1497 | output.append(t) |
|
1512 | output.append(t) | |
1498 |
|
1513 | |||
1499 | if branch: |
|
1514 | if branch: | |
1500 | output.append(util.tolocal(ctx.branch())) |
|
1515 | output.append(util.tolocal(ctx.branch())) | |
1501 |
|
1516 | |||
1502 | if tags: |
|
1517 | if tags: | |
1503 | output.extend(ctx.tags()) |
|
1518 | output.extend(ctx.tags()) | |
1504 |
|
1519 | |||
1505 | ui.write("%s\n" % ' '.join(output)) |
|
1520 | ui.write("%s\n" % ' '.join(output)) | |
1506 |
|
1521 | |||
1507 | def import_(ui, repo, patch1, *patches, **opts): |
|
1522 | def import_(ui, repo, patch1, *patches, **opts): | |
1508 | """import an ordered set of patches |
|
1523 | """import an ordered set of patches | |
1509 |
|
1524 | |||
1510 | Import a list of patches and commit them individually. |
|
1525 | Import a list of patches and commit them individually. | |
1511 |
|
1526 | |||
1512 | If there are outstanding changes in the working directory, import |
|
1527 | If there are outstanding changes in the working directory, import | |
1513 | will abort unless given the -f flag. |
|
1528 | will abort unless given the -f flag. | |
1514 |
|
1529 | |||
1515 | You can import a patch straight from a mail message. Even patches |
|
1530 | You can import a patch straight from a mail message. Even patches | |
1516 | as attachments work (body part must be type text/plain or |
|
1531 | as attachments work (body part must be type text/plain or | |
1517 | text/x-patch to be used). From and Subject headers of email |
|
1532 | text/x-patch to be used). From and Subject headers of email | |
1518 | message are used as default committer and commit message. All |
|
1533 | message are used as default committer and commit message. All | |
1519 | text/plain body parts before first diff are added to commit |
|
1534 | text/plain body parts before first diff are added to commit | |
1520 | message. |
|
1535 | message. | |
1521 |
|
1536 | |||
1522 | If the imported patch was generated by hg export, user and description |
|
1537 | If the imported patch was generated by hg export, user and description | |
1523 | from patch override values from message headers and body. Values |
|
1538 | from patch override values from message headers and body. Values | |
1524 | given on command line with -m and -u override these. |
|
1539 | given on command line with -m and -u override these. | |
1525 |
|
1540 | |||
1526 | If --exact is specified, import will set the working directory |
|
1541 | If --exact is specified, import will set the working directory | |
1527 | to the parent of each patch before applying it, and will abort |
|
1542 | to the parent of each patch before applying it, and will abort | |
1528 | if the resulting changeset has a different ID than the one |
|
1543 | if the resulting changeset has a different ID than the one | |
1529 | recorded in the patch. This may happen due to character set |
|
1544 | recorded in the patch. This may happen due to character set | |
1530 | problems or other deficiencies in the text patch format. |
|
1545 | problems or other deficiencies in the text patch format. | |
1531 |
|
1546 | |||
1532 | To read a patch from standard input, use patch name "-". |
|
1547 | To read a patch from standard input, use patch name "-". | |
1533 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
1548 | See 'hg help dates' for a list of formats valid for -d/--date. | |
1534 | """ |
|
1549 | """ | |
1535 | patches = (patch1,) + patches |
|
1550 | patches = (patch1,) + patches | |
1536 |
|
1551 | |||
1537 | date = opts.get('date') |
|
1552 | date = opts.get('date') | |
1538 | if date: |
|
1553 | if date: | |
1539 | opts['date'] = util.parsedate(date) |
|
1554 | opts['date'] = util.parsedate(date) | |
1540 |
|
1555 | |||
1541 |
if opts.get('exact') or not opts |
|
1556 | if opts.get('exact') or not opts.get('force'): | |
1542 | cmdutil.bail_if_changed(repo) |
|
1557 | cmdutil.bail_if_changed(repo) | |
1543 |
|
1558 | |||
1544 | d = opts["base"] |
|
1559 | d = opts["base"] | |
1545 | strip = opts["strip"] |
|
1560 | strip = opts["strip"] | |
1546 | wlock = lock = None |
|
1561 | wlock = lock = None | |
1547 | try: |
|
1562 | try: | |
1548 | wlock = repo.wlock() |
|
1563 | wlock = repo.wlock() | |
1549 | lock = repo.lock() |
|
1564 | lock = repo.lock() | |
1550 | for p in patches: |
|
1565 | for p in patches: | |
1551 | pf = os.path.join(d, p) |
|
1566 | pf = os.path.join(d, p) | |
1552 |
|
1567 | |||
1553 | if pf == '-': |
|
1568 | if pf == '-': | |
1554 | ui.status(_("applying patch from stdin\n")) |
|
1569 | ui.status(_("applying patch from stdin\n")) | |
1555 | data = patch.extract(ui, sys.stdin) |
|
1570 | data = patch.extract(ui, sys.stdin) | |
1556 | else: |
|
1571 | else: | |
1557 | ui.status(_("applying %s\n") % p) |
|
1572 | ui.status(_("applying %s\n") % p) | |
1558 | if os.path.exists(pf): |
|
1573 | if os.path.exists(pf): | |
1559 | data = patch.extract(ui, file(pf, 'rb')) |
|
1574 | data = patch.extract(ui, file(pf, 'rb')) | |
1560 | else: |
|
1575 | else: | |
1561 | data = patch.extract(ui, urllib.urlopen(pf)) |
|
1576 | data = patch.extract(ui, urllib.urlopen(pf)) | |
1562 | tmpname, message, user, date, branch, nodeid, p1, p2 = data |
|
1577 | tmpname, message, user, date, branch, nodeid, p1, p2 = data | |
1563 |
|
1578 | |||
1564 | if tmpname is None: |
|
1579 | if tmpname is None: | |
1565 | raise util.Abort(_('no diffs found')) |
|
1580 | raise util.Abort(_('no diffs found')) | |
1566 |
|
1581 | |||
1567 | try: |
|
1582 | try: | |
1568 | cmdline_message = cmdutil.logmessage(opts) |
|
1583 | cmdline_message = cmdutil.logmessage(opts) | |
1569 | if cmdline_message: |
|
1584 | if cmdline_message: | |
1570 | # pickup the cmdline msg |
|
1585 | # pickup the cmdline msg | |
1571 | message = cmdline_message |
|
1586 | message = cmdline_message | |
1572 | elif message: |
|
1587 | elif message: | |
1573 | # pickup the patch msg |
|
1588 | # pickup the patch msg | |
1574 | message = message.strip() |
|
1589 | message = message.strip() | |
1575 | else: |
|
1590 | else: | |
1576 | # launch the editor |
|
1591 | # launch the editor | |
1577 | message = None |
|
1592 | message = None | |
1578 | ui.debug(_('message:\n%s\n') % message) |
|
1593 | ui.debug(_('message:\n%s\n') % message) | |
1579 |
|
1594 | |||
1580 | wp = repo.parents() |
|
1595 | wp = repo.parents() | |
1581 | if opts.get('exact'): |
|
1596 | if opts.get('exact'): | |
1582 | if not nodeid or not p1: |
|
1597 | if not nodeid or not p1: | |
1583 | raise util.Abort(_('not a mercurial patch')) |
|
1598 | raise util.Abort(_('not a mercurial patch')) | |
1584 | p1 = repo.lookup(p1) |
|
1599 | p1 = repo.lookup(p1) | |
1585 | p2 = repo.lookup(p2 or hex(nullid)) |
|
1600 | p2 = repo.lookup(p2 or hex(nullid)) | |
1586 |
|
1601 | |||
1587 | if p1 != wp[0].node(): |
|
1602 | if p1 != wp[0].node(): | |
1588 | hg.clean(repo, p1) |
|
1603 | hg.clean(repo, p1) | |
1589 | repo.dirstate.setparents(p1, p2) |
|
1604 | repo.dirstate.setparents(p1, p2) | |
1590 | elif p2: |
|
1605 | elif p2: | |
1591 | try: |
|
1606 | try: | |
1592 | p1 = repo.lookup(p1) |
|
1607 | p1 = repo.lookup(p1) | |
1593 | p2 = repo.lookup(p2) |
|
1608 | p2 = repo.lookup(p2) | |
1594 | if p1 == wp[0].node(): |
|
1609 | if p1 == wp[0].node(): | |
1595 | repo.dirstate.setparents(p1, p2) |
|
1610 | repo.dirstate.setparents(p1, p2) | |
1596 | except RepoError: |
|
1611 | except RepoError: | |
1597 | pass |
|
1612 | pass | |
1598 | if opts.get('exact') or opts.get('import_branch'): |
|
1613 | if opts.get('exact') or opts.get('import_branch'): | |
1599 | repo.dirstate.setbranch(branch or 'default') |
|
1614 | repo.dirstate.setbranch(branch or 'default') | |
1600 |
|
1615 | |||
1601 | files = {} |
|
1616 | files = {} | |
1602 | try: |
|
1617 | try: | |
1603 | fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root, |
|
1618 | fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root, | |
1604 | files=files) |
|
1619 | files=files) | |
1605 | finally: |
|
1620 | finally: | |
1606 | files = patch.updatedir(ui, repo, files) |
|
1621 | files = patch.updatedir(ui, repo, files) | |
1607 | if not opts.get('no_commit'): |
|
1622 | if not opts.get('no_commit'): | |
1608 | n = repo.commit(files, message, opts.get('user') or user, |
|
1623 | n = repo.commit(files, message, opts.get('user') or user, | |
1609 | opts.get('date') or date) |
|
1624 | opts.get('date') or date) | |
1610 | if opts.get('exact'): |
|
1625 | if opts.get('exact'): | |
1611 | if hex(n) != nodeid: |
|
1626 | if hex(n) != nodeid: | |
1612 | repo.rollback() |
|
1627 | repo.rollback() | |
1613 | raise util.Abort(_('patch is damaged' |
|
1628 | raise util.Abort(_('patch is damaged' | |
1614 | ' or loses information')) |
|
1629 | ' or loses information')) | |
1615 | # Force a dirstate write so that the next transaction |
|
1630 | # Force a dirstate write so that the next transaction | |
1616 | # backups an up-do-date file. |
|
1631 | # backups an up-do-date file. | |
1617 | repo.dirstate.write() |
|
1632 | repo.dirstate.write() | |
1618 | finally: |
|
1633 | finally: | |
1619 | os.unlink(tmpname) |
|
1634 | os.unlink(tmpname) | |
1620 | finally: |
|
1635 | finally: | |
1621 | del lock, wlock |
|
1636 | del lock, wlock | |
1622 |
|
1637 | |||
1623 | def incoming(ui, repo, source="default", **opts): |
|
1638 | def incoming(ui, repo, source="default", **opts): | |
1624 | """show new changesets found in source |
|
1639 | """show new changesets found in source | |
1625 |
|
1640 | |||
1626 | Show new changesets found in the specified path/URL or the default |
|
1641 | Show new changesets found in the specified path/URL or the default | |
1627 | pull location. These are the changesets that would be pulled if a pull |
|
1642 | pull location. These are the changesets that would be pulled if a pull | |
1628 | was requested. |
|
1643 | was requested. | |
1629 |
|
1644 | |||
1630 | For remote repository, using --bundle avoids downloading the changesets |
|
1645 | For remote repository, using --bundle avoids downloading the changesets | |
1631 | twice if the incoming is followed by a pull. |
|
1646 | twice if the incoming is followed by a pull. | |
1632 |
|
1647 | |||
1633 | See pull for valid source format details. |
|
1648 | See pull for valid source format details. | |
1634 | """ |
|
1649 | """ | |
1635 | limit = cmdutil.loglimit(opts) |
|
1650 | limit = cmdutil.loglimit(opts) | |
1636 |
source, revs, checkout = hg.parseurl(ui.expandpath(source), opts |
|
1651 | source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev')) | |
1637 | cmdutil.setremoteconfig(ui, opts) |
|
1652 | cmdutil.setremoteconfig(ui, opts) | |
1638 |
|
1653 | |||
1639 | other = hg.repository(ui, source) |
|
1654 | other = hg.repository(ui, source) | |
1640 | ui.status(_('comparing with %s\n') % util.hidepassword(source)) |
|
1655 | ui.status(_('comparing with %s\n') % util.hidepassword(source)) | |
1641 | if revs: |
|
1656 | if revs: | |
1642 | revs = [other.lookup(rev) for rev in revs] |
|
1657 | revs = [other.lookup(rev) for rev in revs] | |
1643 | incoming = repo.findincoming(other, heads=revs, force=opts["force"]) |
|
1658 | incoming = repo.findincoming(other, heads=revs, force=opts["force"]) | |
1644 | if not incoming: |
|
1659 | if not incoming: | |
1645 | try: |
|
1660 | try: | |
1646 | os.unlink(opts["bundle"]) |
|
1661 | os.unlink(opts["bundle"]) | |
1647 | except: |
|
1662 | except: | |
1648 | pass |
|
1663 | pass | |
1649 | ui.status(_("no changes found\n")) |
|
1664 | ui.status(_("no changes found\n")) | |
1650 | return 1 |
|
1665 | return 1 | |
1651 |
|
1666 | |||
1652 | cleanup = None |
|
1667 | cleanup = None | |
1653 | try: |
|
1668 | try: | |
1654 | fname = opts["bundle"] |
|
1669 | fname = opts["bundle"] | |
1655 | if fname or not other.local(): |
|
1670 | if fname or not other.local(): | |
1656 | # create a bundle (uncompressed if other repo is not local) |
|
1671 | # create a bundle (uncompressed if other repo is not local) | |
1657 | if revs is None: |
|
1672 | if revs is None: | |
1658 | cg = other.changegroup(incoming, "incoming") |
|
1673 | cg = other.changegroup(incoming, "incoming") | |
1659 | else: |
|
1674 | else: | |
1660 | cg = other.changegroupsubset(incoming, revs, 'incoming') |
|
1675 | cg = other.changegroupsubset(incoming, revs, 'incoming') | |
1661 | bundletype = other.local() and "HG10BZ" or "HG10UN" |
|
1676 | bundletype = other.local() and "HG10BZ" or "HG10UN" | |
1662 | fname = cleanup = changegroup.writebundle(cg, fname, bundletype) |
|
1677 | fname = cleanup = changegroup.writebundle(cg, fname, bundletype) | |
1663 | # keep written bundle? |
|
1678 | # keep written bundle? | |
1664 | if opts["bundle"]: |
|
1679 | if opts["bundle"]: | |
1665 | cleanup = None |
|
1680 | cleanup = None | |
1666 | if not other.local(): |
|
1681 | if not other.local(): | |
1667 | # use the created uncompressed bundlerepo |
|
1682 | # use the created uncompressed bundlerepo | |
1668 | other = bundlerepo.bundlerepository(ui, repo.root, fname) |
|
1683 | other = bundlerepo.bundlerepository(ui, repo.root, fname) | |
1669 |
|
1684 | |||
1670 | o = other.changelog.nodesbetween(incoming, revs)[0] |
|
1685 | o = other.changelog.nodesbetween(incoming, revs)[0] | |
1671 |
if opts |
|
1686 | if opts.get('newest_first'): | |
1672 | o.reverse() |
|
1687 | o.reverse() | |
1673 | displayer = cmdutil.show_changeset(ui, other, opts) |
|
1688 | displayer = cmdutil.show_changeset(ui, other, opts) | |
1674 | count = 0 |
|
1689 | count = 0 | |
1675 | for n in o: |
|
1690 | for n in o: | |
1676 | if count >= limit: |
|
1691 | if count >= limit: | |
1677 | break |
|
1692 | break | |
1678 | parents = [p for p in other.changelog.parents(n) if p != nullid] |
|
1693 | parents = [p for p in other.changelog.parents(n) if p != nullid] | |
1679 |
if opts |
|
1694 | if opts.get('no_merges') and len(parents) == 2: | |
1680 | continue |
|
1695 | continue | |
1681 | count += 1 |
|
1696 | count += 1 | |
1682 | displayer.show(changenode=n) |
|
1697 | displayer.show(changenode=n) | |
1683 | finally: |
|
1698 | finally: | |
1684 | if hasattr(other, 'close'): |
|
1699 | if hasattr(other, 'close'): | |
1685 | other.close() |
|
1700 | other.close() | |
1686 | if cleanup: |
|
1701 | if cleanup: | |
1687 | os.unlink(cleanup) |
|
1702 | os.unlink(cleanup) | |
1688 |
|
1703 | |||
1689 | def init(ui, dest=".", **opts): |
|
1704 | def init(ui, dest=".", **opts): | |
1690 | """create a new repository in the given directory |
|
1705 | """create a new repository in the given directory | |
1691 |
|
1706 | |||
1692 | Initialize a new repository in the given directory. If the given |
|
1707 | Initialize a new repository in the given directory. If the given | |
1693 | directory does not exist, it is created. |
|
1708 | directory does not exist, it is created. | |
1694 |
|
1709 | |||
1695 | If no directory is given, the current directory is used. |
|
1710 | If no directory is given, the current directory is used. | |
1696 |
|
1711 | |||
1697 | It is possible to specify an ssh:// URL as the destination. |
|
1712 | It is possible to specify an ssh:// URL as the destination. | |
1698 | Look at the help text for the pull command for important details |
|
1713 | Look at the help text for the pull command for important details | |
1699 | about ssh:// URLs. |
|
1714 | about ssh:// URLs. | |
1700 | """ |
|
1715 | """ | |
1701 | cmdutil.setremoteconfig(ui, opts) |
|
1716 | cmdutil.setremoteconfig(ui, opts) | |
1702 | hg.repository(ui, dest, create=1) |
|
1717 | hg.repository(ui, dest, create=1) | |
1703 |
|
1718 | |||
1704 | def locate(ui, repo, *pats, **opts): |
|
1719 | def locate(ui, repo, *pats, **opts): | |
1705 | """locate files matching specific patterns |
|
1720 | """locate files matching specific patterns | |
1706 |
|
1721 | |||
1707 | Print all files under Mercurial control whose names match the |
|
1722 | Print all files under Mercurial control whose names match the | |
1708 | given patterns. |
|
1723 | given patterns. | |
1709 |
|
1724 | |||
1710 | This command searches the entire repository by default. To search |
|
1725 | This command searches the entire repository by default. To search | |
1711 | just the current directory and its subdirectories, use |
|
1726 | just the current directory and its subdirectories, use | |
1712 | "--include .". |
|
1727 | "--include .". | |
1713 |
|
1728 | |||
1714 | If no patterns are given to match, this command prints all file |
|
1729 | If no patterns are given to match, this command prints all file | |
1715 | names. |
|
1730 | names. | |
1716 |
|
1731 | |||
1717 | If you want to feed the output of this command into the "xargs" |
|
1732 | If you want to feed the output of this command into the "xargs" | |
1718 | command, use the "-0" option to both this command and "xargs". |
|
1733 | command, use the "-0" option to both this command and "xargs". | |
1719 | This will avoid the problem of "xargs" treating single filenames |
|
1734 | This will avoid the problem of "xargs" treating single filenames | |
1720 | that contain white space as multiple filenames. |
|
1735 | that contain white space as multiple filenames. | |
1721 | """ |
|
1736 | """ | |
1722 |
end = opts |
|
1737 | end = opts.get('print0') and '\0' or '\n' | |
1723 | rev = opts.get('rev') or None |
|
1738 | rev = opts.get('rev') or None | |
1724 |
|
1739 | |||
1725 | ret = 1 |
|
1740 | ret = 1 | |
1726 | m = cmdutil.match(repo, pats, opts, default='relglob') |
|
1741 | m = cmdutil.match(repo, pats, opts, default='relglob') | |
1727 | m.bad = lambda x,y: False |
|
1742 | m.bad = lambda x,y: False | |
1728 | for abs in repo[rev].walk(m): |
|
1743 | for abs in repo[rev].walk(m): | |
1729 | if not rev and abs not in repo.dirstate: |
|
1744 | if not rev and abs not in repo.dirstate: | |
1730 | continue |
|
1745 | continue | |
1731 |
if opts |
|
1746 | if opts.get('fullpath'): | |
1732 | ui.write(os.path.join(repo.root, abs), end) |
|
1747 | ui.write(os.path.join(repo.root, abs), end) | |
1733 | else: |
|
1748 | else: | |
1734 | ui.write(((pats and m.rel(abs)) or abs), end) |
|
1749 | ui.write(((pats and m.rel(abs)) or abs), end) | |
1735 | ret = 0 |
|
1750 | ret = 0 | |
1736 |
|
1751 | |||
1737 | return ret |
|
1752 | return ret | |
1738 |
|
1753 | |||
1739 | def log(ui, repo, *pats, **opts): |
|
1754 | def log(ui, repo, *pats, **opts): | |
1740 | """show revision history of entire repository or files |
|
1755 | """show revision history of entire repository or files | |
1741 |
|
1756 | |||
1742 | Print the revision history of the specified files or the entire |
|
1757 | Print the revision history of the specified files or the entire | |
1743 | project. |
|
1758 | project. | |
1744 |
|
1759 | |||
1745 | File history is shown without following rename or copy history of |
|
1760 | File history is shown without following rename or copy history of | |
1746 | files. Use -f/--follow with a file name to follow history across |
|
1761 | files. Use -f/--follow with a file name to follow history across | |
1747 | renames and copies. --follow without a file name will only show |
|
1762 | renames and copies. --follow without a file name will only show | |
1748 | ancestors or descendants of the starting revision. --follow-first |
|
1763 | ancestors or descendants of the starting revision. --follow-first | |
1749 | only follows the first parent of merge revisions. |
|
1764 | only follows the first parent of merge revisions. | |
1750 |
|
1765 | |||
1751 | If no revision range is specified, the default is tip:0 unless |
|
1766 | If no revision range is specified, the default is tip:0 unless | |
1752 | --follow is set, in which case the working directory parent is |
|
1767 | --follow is set, in which case the working directory parent is | |
1753 | used as the starting revision. |
|
1768 | used as the starting revision. | |
1754 |
|
1769 | |||
1755 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
1770 | See 'hg help dates' for a list of formats valid for -d/--date. | |
1756 |
|
1771 | |||
1757 | By default this command outputs: changeset id and hash, tags, |
|
1772 | By default this command outputs: changeset id and hash, tags, | |
1758 | non-trivial parents, user, date and time, and a summary for each |
|
1773 | non-trivial parents, user, date and time, and a summary for each | |
1759 | commit. When the -v/--verbose switch is used, the list of changed |
|
1774 | commit. When the -v/--verbose switch is used, the list of changed | |
1760 | files and full commit message is shown. |
|
1775 | files and full commit message is shown. | |
1761 |
|
1776 | |||
1762 | NOTE: log -p may generate unexpected diff output for merge |
|
1777 | NOTE: log -p may generate unexpected diff output for merge | |
1763 | changesets, as it will compare the merge changeset against its |
|
1778 | changesets, as it will compare the merge changeset against its | |
1764 | first parent only. Also, the files: list will only reflect files |
|
1779 | first parent only. Also, the files: list will only reflect files | |
1765 | that are different from BOTH parents. |
|
1780 | that are different from BOTH parents. | |
1766 |
|
1781 | |||
1767 | """ |
|
1782 | """ | |
1768 |
|
1783 | |||
1769 | get = util.cachefunc(lambda r: repo[r].changeset()) |
|
1784 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
1770 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) |
|
1785 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) | |
1771 |
|
1786 | |||
1772 | limit = cmdutil.loglimit(opts) |
|
1787 | limit = cmdutil.loglimit(opts) | |
1773 | count = 0 |
|
1788 | count = 0 | |
1774 |
|
1789 | |||
1775 |
if opts |
|
1790 | if opts.get('copies') and opts.get('rev'): | |
1776 |
endrev = max(cmdutil.revrange(repo, opts |
|
1791 | endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1 | |
1777 | else: |
|
1792 | else: | |
1778 | endrev = len(repo) |
|
1793 | endrev = len(repo) | |
1779 | rcache = {} |
|
1794 | rcache = {} | |
1780 | ncache = {} |
|
1795 | ncache = {} | |
1781 | def getrenamed(fn, rev): |
|
1796 | def getrenamed(fn, rev): | |
1782 | '''looks up all renames for a file (up to endrev) the first |
|
1797 | '''looks up all renames for a file (up to endrev) the first | |
1783 | time the file is given. It indexes on the changerev and only |
|
1798 | time the file is given. It indexes on the changerev and only | |
1784 | parses the manifest if linkrev != changerev. |
|
1799 | parses the manifest if linkrev != changerev. | |
1785 | Returns rename info for fn at changerev rev.''' |
|
1800 | Returns rename info for fn at changerev rev.''' | |
1786 | if fn not in rcache: |
|
1801 | if fn not in rcache: | |
1787 | rcache[fn] = {} |
|
1802 | rcache[fn] = {} | |
1788 | ncache[fn] = {} |
|
1803 | ncache[fn] = {} | |
1789 | fl = repo.file(fn) |
|
1804 | fl = repo.file(fn) | |
1790 | for i in fl: |
|
1805 | for i in fl: | |
1791 | node = fl.node(i) |
|
1806 | node = fl.node(i) | |
1792 | lr = fl.linkrev(node) |
|
1807 | lr = fl.linkrev(node) | |
1793 | renamed = fl.renamed(node) |
|
1808 | renamed = fl.renamed(node) | |
1794 | rcache[fn][lr] = renamed |
|
1809 | rcache[fn][lr] = renamed | |
1795 | if renamed: |
|
1810 | if renamed: | |
1796 | ncache[fn][node] = renamed |
|
1811 | ncache[fn][node] = renamed | |
1797 | if lr >= endrev: |
|
1812 | if lr >= endrev: | |
1798 | break |
|
1813 | break | |
1799 | if rev in rcache[fn]: |
|
1814 | if rev in rcache[fn]: | |
1800 | return rcache[fn][rev] |
|
1815 | return rcache[fn][rev] | |
1801 |
|
1816 | |||
1802 | # If linkrev != rev (i.e. rev not found in rcache) fallback to |
|
1817 | # If linkrev != rev (i.e. rev not found in rcache) fallback to | |
1803 | # filectx logic. |
|
1818 | # filectx logic. | |
1804 |
|
1819 | |||
1805 | try: |
|
1820 | try: | |
1806 | return repo[rev][fn].renamed() |
|
1821 | return repo[rev][fn].renamed() | |
1807 | except revlog.LookupError: |
|
1822 | except revlog.LookupError: | |
1808 | pass |
|
1823 | pass | |
1809 | return None |
|
1824 | return None | |
1810 |
|
1825 | |||
1811 | df = False |
|
1826 | df = False | |
1812 | if opts["date"]: |
|
1827 | if opts["date"]: | |
1813 | df = util.matchdate(opts["date"]) |
|
1828 | df = util.matchdate(opts["date"]) | |
1814 |
|
1829 | |||
1815 |
only_branches = opts |
|
1830 | only_branches = opts.get('only_branch') | |
1816 |
|
1831 | |||
1817 | displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn) |
|
1832 | displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn) | |
1818 | for st, rev, fns in changeiter: |
|
1833 | for st, rev, fns in changeiter: | |
1819 | if st == 'add': |
|
1834 | if st == 'add': | |
1820 | changenode = repo.changelog.node(rev) |
|
1835 | changenode = repo.changelog.node(rev) | |
1821 | parents = [p for p in repo.changelog.parentrevs(rev) |
|
1836 | parents = [p for p in repo.changelog.parentrevs(rev) | |
1822 | if p != nullrev] |
|
1837 | if p != nullrev] | |
1823 |
if opts |
|
1838 | if opts.get('no_merges') and len(parents) == 2: | |
1824 | continue |
|
1839 | continue | |
1825 |
if opts |
|
1840 | if opts.get('only_merges') and len(parents) != 2: | |
1826 | continue |
|
1841 | continue | |
1827 |
|
1842 | |||
1828 | if only_branches: |
|
1843 | if only_branches: | |
1829 | revbranch = get(rev)[5]['branch'] |
|
1844 | revbranch = get(rev)[5]['branch'] | |
1830 | if revbranch not in only_branches: |
|
1845 | if revbranch not in only_branches: | |
1831 | continue |
|
1846 | continue | |
1832 |
|
1847 | |||
1833 | if df: |
|
1848 | if df: | |
1834 | changes = get(rev) |
|
1849 | changes = get(rev) | |
1835 | if not df(changes[2][0]): |
|
1850 | if not df(changes[2][0]): | |
1836 | continue |
|
1851 | continue | |
1837 |
|
1852 | |||
1838 |
if opts |
|
1853 | if opts.get('keyword'): | |
1839 | changes = get(rev) |
|
1854 | changes = get(rev) | |
1840 | miss = 0 |
|
1855 | miss = 0 | |
1841 | for k in [kw.lower() for kw in opts['keyword']]: |
|
1856 | for k in [kw.lower() for kw in opts['keyword']]: | |
1842 | if not (k in changes[1].lower() or |
|
1857 | if not (k in changes[1].lower() or | |
1843 | k in changes[4].lower() or |
|
1858 | k in changes[4].lower() or | |
1844 | k in " ".join(changes[3]).lower()): |
|
1859 | k in " ".join(changes[3]).lower()): | |
1845 | miss = 1 |
|
1860 | miss = 1 | |
1846 | break |
|
1861 | break | |
1847 | if miss: |
|
1862 | if miss: | |
1848 | continue |
|
1863 | continue | |
1849 |
|
1864 | |||
1850 | copies = [] |
|
1865 | copies = [] | |
1851 | if opts.get('copies') and rev: |
|
1866 | if opts.get('copies') and rev: | |
1852 | for fn in get(rev)[3]: |
|
1867 | for fn in get(rev)[3]: | |
1853 | rename = getrenamed(fn, rev) |
|
1868 | rename = getrenamed(fn, rev) | |
1854 | if rename: |
|
1869 | if rename: | |
1855 | copies.append((fn, rename[0])) |
|
1870 | copies.append((fn, rename[0])) | |
1856 | displayer.show(rev, changenode, copies=copies) |
|
1871 | displayer.show(rev, changenode, copies=copies) | |
1857 | elif st == 'iter': |
|
1872 | elif st == 'iter': | |
1858 | if count == limit: break |
|
1873 | if count == limit: break | |
1859 | if displayer.flush(rev): |
|
1874 | if displayer.flush(rev): | |
1860 | count += 1 |
|
1875 | count += 1 | |
1861 |
|
1876 | |||
1862 | def manifest(ui, repo, node=None, rev=None): |
|
1877 | def manifest(ui, repo, node=None, rev=None): | |
1863 | """output the current or given revision of the project manifest |
|
1878 | """output the current or given revision of the project manifest | |
1864 |
|
1879 | |||
1865 | Print a list of version controlled files for the given revision. |
|
1880 | Print a list of version controlled files for the given revision. | |
1866 | If no revision is given, the parent of the working directory is used, |
|
1881 | If no revision is given, the parent of the working directory is used, | |
1867 | or tip if no revision is checked out. |
|
1882 | or tip if no revision is checked out. | |
1868 |
|
1883 | |||
1869 | The manifest is the list of files being version controlled. If no revision |
|
1884 | The manifest is the list of files being version controlled. If no revision | |
1870 | is given then the first parent of the working directory is used. |
|
1885 | is given then the first parent of the working directory is used. | |
1871 |
|
1886 | |||
1872 | With -v flag, print file permissions, symlink and executable bits. With |
|
1887 | With -v flag, print file permissions, symlink and executable bits. With | |
1873 | --debug flag, print file revision hashes. |
|
1888 | --debug flag, print file revision hashes. | |
1874 | """ |
|
1889 | """ | |
1875 |
|
1890 | |||
1876 | if rev and node: |
|
1891 | if rev and node: | |
1877 | raise util.Abort(_("please specify just one revision")) |
|
1892 | raise util.Abort(_("please specify just one revision")) | |
1878 |
|
1893 | |||
1879 | if not node: |
|
1894 | if not node: | |
1880 | node = rev |
|
1895 | node = rev | |
1881 |
|
1896 | |||
1882 | decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} |
|
1897 | decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} | |
1883 | ctx = repo[node] |
|
1898 | ctx = repo[node] | |
1884 | for f in ctx: |
|
1899 | for f in ctx: | |
1885 | if ui.debugflag: |
|
1900 | if ui.debugflag: | |
1886 | ui.write("%40s " % hex(ctx.manifest()[f])) |
|
1901 | ui.write("%40s " % hex(ctx.manifest()[f])) | |
1887 | if ui.verbose: |
|
1902 | if ui.verbose: | |
1888 | ui.write(decor[ctx.flags(f)]) |
|
1903 | ui.write(decor[ctx.flags(f)]) | |
1889 | ui.write("%s\n" % f) |
|
1904 | ui.write("%s\n" % f) | |
1890 |
|
1905 | |||
1891 | def merge(ui, repo, node=None, force=None, rev=None): |
|
1906 | def merge(ui, repo, node=None, force=None, rev=None): | |
1892 | """merge working directory with another revision |
|
1907 | """merge working directory with another revision | |
1893 |
|
1908 | |||
1894 | Merge the contents of the current working directory and the |
|
1909 | Merge the contents of the current working directory and the | |
1895 | requested revision. Files that changed between either parent are |
|
1910 | requested revision. Files that changed between either parent are | |
1896 | marked as changed for the next commit and a commit must be |
|
1911 | marked as changed for the next commit and a commit must be | |
1897 | performed before any further updates are allowed. |
|
1912 | performed before any further updates are allowed. | |
1898 |
|
1913 | |||
1899 | If no revision is specified, the working directory's parent is a |
|
1914 | If no revision is specified, the working directory's parent is a | |
1900 | head revision, and the current branch contains exactly one other head, |
|
1915 | head revision, and the current branch contains exactly one other head, | |
1901 | the other head is merged with by default. Otherwise, an explicit |
|
1916 | the other head is merged with by default. Otherwise, an explicit | |
1902 | revision to merge with must be provided. |
|
1917 | revision to merge with must be provided. | |
1903 | """ |
|
1918 | """ | |
1904 |
|
1919 | |||
1905 | if rev and node: |
|
1920 | if rev and node: | |
1906 | raise util.Abort(_("please specify just one revision")) |
|
1921 | raise util.Abort(_("please specify just one revision")) | |
1907 | if not node: |
|
1922 | if not node: | |
1908 | node = rev |
|
1923 | node = rev | |
1909 |
|
1924 | |||
1910 | if not node: |
|
1925 | if not node: | |
1911 | branch = repo.changectx(None).branch() |
|
1926 | branch = repo.changectx(None).branch() | |
1912 | bheads = repo.branchheads(branch) |
|
1927 | bheads = repo.branchheads(branch) | |
1913 | if len(bheads) > 2: |
|
1928 | if len(bheads) > 2: | |
1914 | raise util.Abort(_("branch '%s' has %d heads - " |
|
1929 | raise util.Abort(_("branch '%s' has %d heads - " | |
1915 | "please merge with an explicit rev") % |
|
1930 | "please merge with an explicit rev") % | |
1916 | (branch, len(bheads))) |
|
1931 | (branch, len(bheads))) | |
1917 |
|
1932 | |||
1918 | parent = repo.dirstate.parents()[0] |
|
1933 | parent = repo.dirstate.parents()[0] | |
1919 | if len(bheads) == 1: |
|
1934 | if len(bheads) == 1: | |
1920 | if len(repo.heads()) > 1: |
|
1935 | if len(repo.heads()) > 1: | |
1921 | raise util.Abort(_("branch '%s' has one head - " |
|
1936 | raise util.Abort(_("branch '%s' has one head - " | |
1922 | "please merge with an explicit rev") % |
|
1937 | "please merge with an explicit rev") % | |
1923 | branch) |
|
1938 | branch) | |
1924 | msg = _('there is nothing to merge') |
|
1939 | msg = _('there is nothing to merge') | |
1925 | if parent != repo.lookup(repo[None].branch()): |
|
1940 | if parent != repo.lookup(repo[None].branch()): | |
1926 | msg = _('%s - use "hg update" instead') % msg |
|
1941 | msg = _('%s - use "hg update" instead') % msg | |
1927 | raise util.Abort(msg) |
|
1942 | raise util.Abort(msg) | |
1928 |
|
1943 | |||
1929 | if parent not in bheads: |
|
1944 | if parent not in bheads: | |
1930 | raise util.Abort(_('working dir not at a head rev - ' |
|
1945 | raise util.Abort(_('working dir not at a head rev - ' | |
1931 | 'use "hg update" or merge with an explicit rev')) |
|
1946 | 'use "hg update" or merge with an explicit rev')) | |
1932 | node = parent == bheads[0] and bheads[-1] or bheads[0] |
|
1947 | node = parent == bheads[0] and bheads[-1] or bheads[0] | |
1933 | return hg.merge(repo, node, force=force) |
|
1948 | return hg.merge(repo, node, force=force) | |
1934 |
|
1949 | |||
1935 | def outgoing(ui, repo, dest=None, **opts): |
|
1950 | def outgoing(ui, repo, dest=None, **opts): | |
1936 | """show changesets not found in destination |
|
1951 | """show changesets not found in destination | |
1937 |
|
1952 | |||
1938 | Show changesets not found in the specified destination repository or |
|
1953 | Show changesets not found in the specified destination repository or | |
1939 | the default push location. These are the changesets that would be pushed |
|
1954 | the default push location. These are the changesets that would be pushed | |
1940 | if a push was requested. |
|
1955 | if a push was requested. | |
1941 |
|
1956 | |||
1942 | See pull for valid destination format details. |
|
1957 | See pull for valid destination format details. | |
1943 | """ |
|
1958 | """ | |
1944 | limit = cmdutil.loglimit(opts) |
|
1959 | limit = cmdutil.loglimit(opts) | |
1945 | dest, revs, checkout = hg.parseurl( |
|
1960 | dest, revs, checkout = hg.parseurl( | |
1946 |
ui.expandpath(dest or 'default-push', dest or 'default'), opts |
|
1961 | ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev')) | |
1947 | cmdutil.setremoteconfig(ui, opts) |
|
1962 | cmdutil.setremoteconfig(ui, opts) | |
1948 | if revs: |
|
1963 | if revs: | |
1949 | revs = [repo.lookup(rev) for rev in revs] |
|
1964 | revs = [repo.lookup(rev) for rev in revs] | |
1950 |
|
1965 | |||
1951 | other = hg.repository(ui, dest) |
|
1966 | other = hg.repository(ui, dest) | |
1952 | ui.status(_('comparing with %s\n') % util.hidepassword(dest)) |
|
1967 | ui.status(_('comparing with %s\n') % util.hidepassword(dest)) | |
1953 |
o = repo.findoutgoing(other, force=opts |
|
1968 | o = repo.findoutgoing(other, force=opts.get('force')) | |
1954 | if not o: |
|
1969 | if not o: | |
1955 | ui.status(_("no changes found\n")) |
|
1970 | ui.status(_("no changes found\n")) | |
1956 | return 1 |
|
1971 | return 1 | |
1957 | o = repo.changelog.nodesbetween(o, revs)[0] |
|
1972 | o = repo.changelog.nodesbetween(o, revs)[0] | |
1958 |
if opts |
|
1973 | if opts.get('newest_first'): | |
1959 | o.reverse() |
|
1974 | o.reverse() | |
1960 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
1975 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
1961 | count = 0 |
|
1976 | count = 0 | |
1962 | for n in o: |
|
1977 | for n in o: | |
1963 | if count >= limit: |
|
1978 | if count >= limit: | |
1964 | break |
|
1979 | break | |
1965 | parents = [p for p in repo.changelog.parents(n) if p != nullid] |
|
1980 | parents = [p for p in repo.changelog.parents(n) if p != nullid] | |
1966 |
if opts |
|
1981 | if opts.get('no_merges') and len(parents) == 2: | |
1967 | continue |
|
1982 | continue | |
1968 | count += 1 |
|
1983 | count += 1 | |
1969 | displayer.show(changenode=n) |
|
1984 | displayer.show(changenode=n) | |
1970 |
|
1985 | |||
1971 | def parents(ui, repo, file_=None, **opts): |
|
1986 | def parents(ui, repo, file_=None, **opts): | |
1972 | """show the parents of the working dir or revision |
|
1987 | """show the parents of the working dir or revision | |
1973 |
|
1988 | |||
1974 | Print the working directory's parent revisions. If a |
|
1989 | Print the working directory's parent revisions. If a | |
1975 | revision is given via --rev, the parent of that revision |
|
1990 | revision is given via --rev, the parent of that revision | |
1976 | will be printed. If a file argument is given, revision in |
|
1991 | will be printed. If a file argument is given, revision in | |
1977 | which the file was last changed (before the working directory |
|
1992 | which the file was last changed (before the working directory | |
1978 | revision or the argument to --rev if given) is printed. |
|
1993 | revision or the argument to --rev if given) is printed. | |
1979 | """ |
|
1994 | """ | |
1980 | rev = opts.get('rev') |
|
1995 | rev = opts.get('rev') | |
1981 | if rev: |
|
1996 | if rev: | |
1982 | ctx = repo[rev] |
|
1997 | ctx = repo[rev] | |
1983 | else: |
|
1998 | else: | |
1984 | ctx = repo[None] |
|
1999 | ctx = repo[None] | |
1985 |
|
2000 | |||
1986 | if file_: |
|
2001 | if file_: | |
1987 | m = cmdutil.match(repo, (file_,), opts) |
|
2002 | m = cmdutil.match(repo, (file_,), opts) | |
1988 | if m.anypats() or len(m.files()) != 1: |
|
2003 | if m.anypats() or len(m.files()) != 1: | |
1989 | raise util.Abort(_('can only specify an explicit file name')) |
|
2004 | raise util.Abort(_('can only specify an explicit file name')) | |
1990 | file_ = m.files()[0] |
|
2005 | file_ = m.files()[0] | |
1991 | filenodes = [] |
|
2006 | filenodes = [] | |
1992 | for cp in ctx.parents(): |
|
2007 | for cp in ctx.parents(): | |
1993 | if not cp: |
|
2008 | if not cp: | |
1994 | continue |
|
2009 | continue | |
1995 | try: |
|
2010 | try: | |
1996 | filenodes.append(cp.filenode(file_)) |
|
2011 | filenodes.append(cp.filenode(file_)) | |
1997 | except revlog.LookupError: |
|
2012 | except revlog.LookupError: | |
1998 | pass |
|
2013 | pass | |
1999 | if not filenodes: |
|
2014 | if not filenodes: | |
2000 | raise util.Abort(_("'%s' not found in manifest!") % file_) |
|
2015 | raise util.Abort(_("'%s' not found in manifest!") % file_) | |
2001 | fl = repo.file(file_) |
|
2016 | fl = repo.file(file_) | |
2002 | p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes] |
|
2017 | p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes] | |
2003 | else: |
|
2018 | else: | |
2004 | p = [cp.node() for cp in ctx.parents()] |
|
2019 | p = [cp.node() for cp in ctx.parents()] | |
2005 |
|
2020 | |||
2006 | displayer = cmdutil.show_changeset(ui, repo, opts) |
|
2021 | displayer = cmdutil.show_changeset(ui, repo, opts) | |
2007 | for n in p: |
|
2022 | for n in p: | |
2008 | if n != nullid: |
|
2023 | if n != nullid: | |
2009 | displayer.show(changenode=n) |
|
2024 | displayer.show(changenode=n) | |
2010 |
|
2025 | |||
2011 | def paths(ui, repo, search=None): |
|
2026 | def paths(ui, repo, search=None): | |
2012 | """show definition of symbolic path names |
|
2027 | """show definition of symbolic path names | |
2013 |
|
2028 | |||
2014 | Show definition of symbolic path name NAME. If no name is given, show |
|
2029 | Show definition of symbolic path name NAME. If no name is given, show | |
2015 | definition of available names. |
|
2030 | definition of available names. | |
2016 |
|
2031 | |||
2017 | Path names are defined in the [paths] section of /etc/mercurial/hgrc |
|
2032 | Path names are defined in the [paths] section of /etc/mercurial/hgrc | |
2018 | and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too. |
|
2033 | and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too. | |
2019 | """ |
|
2034 | """ | |
2020 | if search: |
|
2035 | if search: | |
2021 | for name, path in ui.configitems("paths"): |
|
2036 | for name, path in ui.configitems("paths"): | |
2022 | if name == search: |
|
2037 | if name == search: | |
2023 | ui.write("%s\n" % util.hidepassword(path)) |
|
2038 | ui.write("%s\n" % util.hidepassword(path)) | |
2024 | return |
|
2039 | return | |
2025 | ui.warn(_("not found!\n")) |
|
2040 | ui.warn(_("not found!\n")) | |
2026 | return 1 |
|
2041 | return 1 | |
2027 | else: |
|
2042 | else: | |
2028 | for name, path in ui.configitems("paths"): |
|
2043 | for name, path in ui.configitems("paths"): | |
2029 | ui.write("%s = %s\n" % (name, util.hidepassword(path))) |
|
2044 | ui.write("%s = %s\n" % (name, util.hidepassword(path))) | |
2030 |
|
2045 | |||
2031 | def postincoming(ui, repo, modheads, optupdate, checkout): |
|
2046 | def postincoming(ui, repo, modheads, optupdate, checkout): | |
2032 | if modheads == 0: |
|
2047 | if modheads == 0: | |
2033 | return |
|
2048 | return | |
2034 | if optupdate: |
|
2049 | if optupdate: | |
2035 | if modheads <= 1 or checkout: |
|
2050 | if modheads <= 1 or checkout: | |
2036 | return hg.update(repo, checkout) |
|
2051 | return hg.update(repo, checkout) | |
2037 | else: |
|
2052 | else: | |
2038 | ui.status(_("not updating, since new heads added\n")) |
|
2053 | ui.status(_("not updating, since new heads added\n")) | |
2039 | if modheads > 1: |
|
2054 | if modheads > 1: | |
2040 | ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) |
|
2055 | ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) | |
2041 | else: |
|
2056 | else: | |
2042 | ui.status(_("(run 'hg update' to get a working copy)\n")) |
|
2057 | ui.status(_("(run 'hg update' to get a working copy)\n")) | |
2043 |
|
2058 | |||
2044 | def pull(ui, repo, source="default", **opts): |
|
2059 | def pull(ui, repo, source="default", **opts): | |
2045 | """pull changes from the specified source |
|
2060 | """pull changes from the specified source | |
2046 |
|
2061 | |||
2047 | Pull changes from a remote repository to a local one. |
|
2062 | Pull changes from a remote repository to a local one. | |
2048 |
|
2063 | |||
2049 | This finds all changes from the repository at the specified path |
|
2064 | This finds all changes from the repository at the specified path | |
2050 | or URL and adds them to the local repository. By default, this |
|
2065 | or URL and adds them to the local repository. By default, this | |
2051 | does not update the copy of the project in the working directory. |
|
2066 | does not update the copy of the project in the working directory. | |
2052 |
|
2067 | |||
2053 | Valid URLs are of the form: |
|
2068 | Valid URLs are of the form: | |
2054 |
|
2069 | |||
2055 | local/filesystem/path (or file://local/filesystem/path) |
|
2070 | local/filesystem/path (or file://local/filesystem/path) | |
2056 | http://[user[:pass]@]host[:port]/[path] |
|
2071 | http://[user[:pass]@]host[:port]/[path] | |
2057 | https://[user[:pass]@]host[:port]/[path] |
|
2072 | https://[user[:pass]@]host[:port]/[path] | |
2058 | ssh://[user[:pass]@]host[:port]/[path] |
|
2073 | ssh://[user[:pass]@]host[:port]/[path] | |
2059 | static-http://host[:port]/[path] |
|
2074 | static-http://host[:port]/[path] | |
2060 |
|
2075 | |||
2061 | Paths in the local filesystem can either point to Mercurial |
|
2076 | Paths in the local filesystem can either point to Mercurial | |
2062 | repositories or to bundle files (as created by 'hg bundle' or |
|
2077 | repositories or to bundle files (as created by 'hg bundle' or | |
2063 | 'hg incoming --bundle'). The static-http:// protocol, albeit slow, |
|
2078 | 'hg incoming --bundle'). The static-http:// protocol, albeit slow, | |
2064 | allows access to a Mercurial repository where you simply use a web |
|
2079 | allows access to a Mercurial repository where you simply use a web | |
2065 | server to publish the .hg directory as static content. |
|
2080 | server to publish the .hg directory as static content. | |
2066 |
|
2081 | |||
2067 | An optional identifier after # indicates a particular branch, tag, |
|
2082 | An optional identifier after # indicates a particular branch, tag, | |
2068 | or changeset to pull. |
|
2083 | or changeset to pull. | |
2069 |
|
2084 | |||
2070 | Some notes about using SSH with Mercurial: |
|
2085 | Some notes about using SSH with Mercurial: | |
2071 | - SSH requires an accessible shell account on the destination machine |
|
2086 | - SSH requires an accessible shell account on the destination machine | |
2072 | and a copy of hg in the remote path or specified with as remotecmd. |
|
2087 | and a copy of hg in the remote path or specified with as remotecmd. | |
2073 | - path is relative to the remote user's home directory by default. |
|
2088 | - path is relative to the remote user's home directory by default. | |
2074 | Use an extra slash at the start of a path to specify an absolute path: |
|
2089 | Use an extra slash at the start of a path to specify an absolute path: | |
2075 | ssh://example.com//tmp/repository |
|
2090 | ssh://example.com//tmp/repository | |
2076 | - Mercurial doesn't use its own compression via SSH; the right thing |
|
2091 | - Mercurial doesn't use its own compression via SSH; the right thing | |
2077 | to do is to configure it in your ~/.ssh/config, e.g.: |
|
2092 | to do is to configure it in your ~/.ssh/config, e.g.: | |
2078 | Host *.mylocalnetwork.example.com |
|
2093 | Host *.mylocalnetwork.example.com | |
2079 | Compression no |
|
2094 | Compression no | |
2080 | Host * |
|
2095 | Host * | |
2081 | Compression yes |
|
2096 | Compression yes | |
2082 | Alternatively specify "ssh -C" as your ssh command in your hgrc or |
|
2097 | Alternatively specify "ssh -C" as your ssh command in your hgrc or | |
2083 | with the --ssh command line option. |
|
2098 | with the --ssh command line option. | |
2084 | """ |
|
2099 | """ | |
2085 |
source, revs, checkout = hg.parseurl(ui.expandpath(source), opts |
|
2100 | source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev')) | |
2086 | cmdutil.setremoteconfig(ui, opts) |
|
2101 | cmdutil.setremoteconfig(ui, opts) | |
2087 |
|
2102 | |||
2088 | other = hg.repository(ui, source) |
|
2103 | other = hg.repository(ui, source) | |
2089 | ui.status(_('pulling from %s\n') % util.hidepassword(source)) |
|
2104 | ui.status(_('pulling from %s\n') % util.hidepassword(source)) | |
2090 | if revs: |
|
2105 | if revs: | |
2091 | try: |
|
2106 | try: | |
2092 | revs = [other.lookup(rev) for rev in revs] |
|
2107 | revs = [other.lookup(rev) for rev in revs] | |
2093 | except NoCapability: |
|
2108 | except NoCapability: | |
2094 | error = _("Other repository doesn't support revision lookup, " |
|
2109 | error = _("Other repository doesn't support revision lookup, " | |
2095 | "so a rev cannot be specified.") |
|
2110 | "so a rev cannot be specified.") | |
2096 | raise util.Abort(error) |
|
2111 | raise util.Abort(error) | |
2097 |
|
2112 | |||
2098 |
modheads = repo.pull(other, heads=revs, force=opts |
|
2113 | modheads = repo.pull(other, heads=revs, force=opts.get('force')) | |
2099 |
return postincoming(ui, repo, modheads, opts |
|
2114 | return postincoming(ui, repo, modheads, opts.get('update'), checkout) | |
2100 |
|
2115 | |||
2101 | def push(ui, repo, dest=None, **opts): |
|
2116 | def push(ui, repo, dest=None, **opts): | |
2102 | """push changes to the specified destination |
|
2117 | """push changes to the specified destination | |
2103 |
|
2118 | |||
2104 | Push changes from the local repository to the given destination. |
|
2119 | Push changes from the local repository to the given destination. | |
2105 |
|
2120 | |||
2106 | This is the symmetrical operation for pull. It helps to move |
|
2121 | This is the symmetrical operation for pull. It helps to move | |
2107 | changes from the current repository to a different one. If the |
|
2122 | changes from the current repository to a different one. If the | |
2108 | destination is local this is identical to a pull in that directory |
|
2123 | destination is local this is identical to a pull in that directory | |
2109 | from the current one. |
|
2124 | from the current one. | |
2110 |
|
2125 | |||
2111 | By default, push will refuse to run if it detects the result would |
|
2126 | By default, push will refuse to run if it detects the result would | |
2112 | increase the number of remote heads. This generally indicates the |
|
2127 | increase the number of remote heads. This generally indicates the | |
2113 | the client has forgotten to pull and merge before pushing. |
|
2128 | the client has forgotten to pull and merge before pushing. | |
2114 |
|
2129 | |||
2115 | Valid URLs are of the form: |
|
2130 | Valid URLs are of the form: | |
2116 |
|
2131 | |||
2117 | local/filesystem/path (or file://local/filesystem/path) |
|
2132 | local/filesystem/path (or file://local/filesystem/path) | |
2118 | ssh://[user[:pass]@]host[:port]/[path] |
|
2133 | ssh://[user[:pass]@]host[:port]/[path] | |
2119 | http://[user[:pass]@]host[:port]/[path] |
|
2134 | http://[user[:pass]@]host[:port]/[path] | |
2120 | https://[user[:pass]@]host[:port]/[path] |
|
2135 | https://[user[:pass]@]host[:port]/[path] | |
2121 |
|
2136 | |||
2122 | An optional identifier after # indicates a particular branch, tag, |
|
2137 | An optional identifier after # indicates a particular branch, tag, | |
2123 | or changeset to push. If -r is used, the named changeset and all its |
|
2138 | or changeset to push. If -r is used, the named changeset and all its | |
2124 | ancestors will be pushed to the remote repository. |
|
2139 | ancestors will be pushed to the remote repository. | |
2125 |
|
2140 | |||
2126 | Look at the help text for the pull command for important details |
|
2141 | Look at the help text for the pull command for important details | |
2127 | about ssh:// URLs. |
|
2142 | about ssh:// URLs. | |
2128 |
|
2143 | |||
2129 | Pushing to http:// and https:// URLs is only possible, if this |
|
2144 | Pushing to http:// and https:// URLs is only possible, if this | |
2130 | feature is explicitly enabled on the remote Mercurial server. |
|
2145 | feature is explicitly enabled on the remote Mercurial server. | |
2131 | """ |
|
2146 | """ | |
2132 | dest, revs, checkout = hg.parseurl( |
|
2147 | dest, revs, checkout = hg.parseurl( | |
2133 |
ui.expandpath(dest or 'default-push', dest or 'default'), opts |
|
2148 | ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev')) | |
2134 | cmdutil.setremoteconfig(ui, opts) |
|
2149 | cmdutil.setremoteconfig(ui, opts) | |
2135 |
|
2150 | |||
2136 | other = hg.repository(ui, dest) |
|
2151 | other = hg.repository(ui, dest) | |
2137 | ui.status(_('pushing to %s\n') % util.hidepassword(dest)) |
|
2152 | ui.status(_('pushing to %s\n') % util.hidepassword(dest)) | |
2138 | if revs: |
|
2153 | if revs: | |
2139 | revs = [repo.lookup(rev) for rev in revs] |
|
2154 | revs = [repo.lookup(rev) for rev in revs] | |
2140 |
r = repo.push(other, opts |
|
2155 | r = repo.push(other, opts.get('force'), revs=revs) | |
2141 | return r == 0 |
|
2156 | return r == 0 | |
2142 |
|
2157 | |||
2143 | def rawcommit(ui, repo, *pats, **opts): |
|
2158 | def rawcommit(ui, repo, *pats, **opts): | |
2144 | """raw commit interface (DEPRECATED) |
|
2159 | """raw commit interface (DEPRECATED) | |
2145 |
|
2160 | |||
2146 | (DEPRECATED) |
|
2161 | (DEPRECATED) | |
2147 | Lowlevel commit, for use in helper scripts. |
|
2162 | Lowlevel commit, for use in helper scripts. | |
2148 |
|
2163 | |||
2149 | This command is not intended to be used by normal users, as it is |
|
2164 | This command is not intended to be used by normal users, as it is | |
2150 | primarily useful for importing from other SCMs. |
|
2165 | primarily useful for importing from other SCMs. | |
2151 |
|
2166 | |||
2152 | This command is now deprecated and will be removed in a future |
|
2167 | This command is now deprecated and will be removed in a future | |
2153 | release, please use debugsetparents and commit instead. |
|
2168 | release, please use debugsetparents and commit instead. | |
2154 | """ |
|
2169 | """ | |
2155 |
|
2170 | |||
2156 | ui.warn(_("(the rawcommit command is deprecated)\n")) |
|
2171 | ui.warn(_("(the rawcommit command is deprecated)\n")) | |
2157 |
|
2172 | |||
2158 | message = cmdutil.logmessage(opts) |
|
2173 | message = cmdutil.logmessage(opts) | |
2159 |
|
2174 | |||
2160 | files = cmdutil.match(repo, pats, opts).files() |
|
2175 | files = cmdutil.match(repo, pats, opts).files() | |
2161 |
if opts |
|
2176 | if opts.get('files'): | |
2162 | files += open(opts['files']).read().splitlines() |
|
2177 | files += open(opts['files']).read().splitlines() | |
2163 |
|
2178 | |||
2164 | parents = [repo.lookup(p) for p in opts['parent']] |
|
2179 | parents = [repo.lookup(p) for p in opts['parent']] | |
2165 |
|
2180 | |||
2166 | try: |
|
2181 | try: | |
2167 | repo.rawcommit(files, message, opts['user'], opts['date'], *parents) |
|
2182 | repo.rawcommit(files, message, opts['user'], opts['date'], *parents) | |
2168 | except ValueError, inst: |
|
2183 | except ValueError, inst: | |
2169 | raise util.Abort(str(inst)) |
|
2184 | raise util.Abort(str(inst)) | |
2170 |
|
2185 | |||
2171 | def recover(ui, repo): |
|
2186 | def recover(ui, repo): | |
2172 | """roll back an interrupted transaction |
|
2187 | """roll back an interrupted transaction | |
2173 |
|
2188 | |||
2174 | Recover from an interrupted commit or pull. |
|
2189 | Recover from an interrupted commit or pull. | |
2175 |
|
2190 | |||
2176 | This command tries to fix the repository status after an interrupted |
|
2191 | This command tries to fix the repository status after an interrupted | |
2177 | operation. It should only be necessary when Mercurial suggests it. |
|
2192 | operation. It should only be necessary when Mercurial suggests it. | |
2178 | """ |
|
2193 | """ | |
2179 | if repo.recover(): |
|
2194 | if repo.recover(): | |
2180 | return hg.verify(repo) |
|
2195 | return hg.verify(repo) | |
2181 | return 1 |
|
2196 | return 1 | |
2182 |
|
2197 | |||
2183 | def remove(ui, repo, *pats, **opts): |
|
2198 | def remove(ui, repo, *pats, **opts): | |
2184 | """remove the specified files on the next commit |
|
2199 | """remove the specified files on the next commit | |
2185 |
|
2200 | |||
2186 | Schedule the indicated files for removal from the repository. |
|
2201 | Schedule the indicated files for removal from the repository. | |
2187 |
|
2202 | |||
2188 | This only removes files from the current branch, not from the entire |
|
2203 | This only removes files from the current branch, not from the entire | |
2189 | project history. -A can be used to remove only files that have already |
|
2204 | project history. -A can be used to remove only files that have already | |
2190 | been deleted, -f can be used to force deletion, and -Af can be used |
|
2205 | been deleted, -f can be used to force deletion, and -Af can be used | |
2191 | to remove files from the next revision without deleting them. |
|
2206 | to remove files from the next revision without deleting them. | |
2192 |
|
2207 | |||
2193 | The following table details the behavior of remove for different file |
|
2208 | The following table details the behavior of remove for different file | |
2194 | states (columns) and option combinations (rows). The file states are |
|
2209 | states (columns) and option combinations (rows). The file states are | |
2195 | Added, Clean, Modified and Missing (as reported by hg status). The |
|
2210 | Added, Clean, Modified and Missing (as reported by hg status). The | |
2196 | actions are Warn, Remove (from branch) and Delete (from disk). |
|
2211 | actions are Warn, Remove (from branch) and Delete (from disk). | |
2197 |
|
2212 | |||
2198 | A C M ! |
|
2213 | A C M ! | |
2199 | none W RD W R |
|
2214 | none W RD W R | |
2200 | -f R RD RD R |
|
2215 | -f R RD RD R | |
2201 | -A W W W R |
|
2216 | -A W W W R | |
2202 | -Af R R R R |
|
2217 | -Af R R R R | |
2203 |
|
2218 | |||
2204 | This command schedules the files to be removed at the next commit. |
|
2219 | This command schedules the files to be removed at the next commit. | |
2205 | To undo a remove before that, see hg revert. |
|
2220 | To undo a remove before that, see hg revert. | |
2206 | """ |
|
2221 | """ | |
2207 |
|
2222 | |||
2208 | after, force = opts.get('after'), opts.get('force') |
|
2223 | after, force = opts.get('after'), opts.get('force') | |
2209 | if not pats and not after: |
|
2224 | if not pats and not after: | |
2210 | raise util.Abort(_('no files specified')) |
|
2225 | raise util.Abort(_('no files specified')) | |
2211 |
|
2226 | |||
2212 | m = cmdutil.match(repo, pats, opts) |
|
2227 | m = cmdutil.match(repo, pats, opts) | |
2213 | s = repo.status(match=m, clean=True) |
|
2228 | s = repo.status(match=m, clean=True) | |
2214 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] |
|
2229 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] | |
2215 |
|
2230 | |||
2216 | def warn(files, reason): |
|
2231 | def warn(files, reason): | |
2217 | for f in files: |
|
2232 | for f in files: | |
2218 | ui.warn(_('not removing %s: file %s (use -f to force removal)\n') |
|
2233 | ui.warn(_('not removing %s: file %s (use -f to force removal)\n') | |
2219 | % (m.rel(f), reason)) |
|
2234 | % (m.rel(f), reason)) | |
2220 |
|
2235 | |||
2221 | if force: |
|
2236 | if force: | |
2222 | remove, forget = modified + deleted + clean, added |
|
2237 | remove, forget = modified + deleted + clean, added | |
2223 | elif after: |
|
2238 | elif after: | |
2224 | remove, forget = deleted, [] |
|
2239 | remove, forget = deleted, [] | |
2225 | warn(modified + added + clean, _('still exists')) |
|
2240 | warn(modified + added + clean, _('still exists')) | |
2226 | else: |
|
2241 | else: | |
2227 | remove, forget = deleted + clean, [] |
|
2242 | remove, forget = deleted + clean, [] | |
2228 | warn(modified, _('is modified')) |
|
2243 | warn(modified, _('is modified')) | |
2229 | warn(added, _('has been marked for add')) |
|
2244 | warn(added, _('has been marked for add')) | |
2230 |
|
2245 | |||
2231 | for f in util.sort(remove + forget): |
|
2246 | for f in util.sort(remove + forget): | |
2232 | if ui.verbose or not m.exact(f): |
|
2247 | if ui.verbose or not m.exact(f): | |
2233 | ui.status(_('removing %s\n') % m.rel(f)) |
|
2248 | ui.status(_('removing %s\n') % m.rel(f)) | |
2234 |
|
2249 | |||
2235 | repo.forget(forget) |
|
2250 | repo.forget(forget) | |
2236 | repo.remove(remove, unlink=not after) |
|
2251 | repo.remove(remove, unlink=not after) | |
2237 |
|
2252 | |||
2238 | def rename(ui, repo, *pats, **opts): |
|
2253 | def rename(ui, repo, *pats, **opts): | |
2239 | """rename files; equivalent of copy + remove |
|
2254 | """rename files; equivalent of copy + remove | |
2240 |
|
2255 | |||
2241 | Mark dest as copies of sources; mark sources for deletion. If |
|
2256 | Mark dest as copies of sources; mark sources for deletion. If | |
2242 | dest is a directory, copies are put in that directory. If dest is |
|
2257 | dest is a directory, copies are put in that directory. If dest is | |
2243 | a file, there can only be one source. |
|
2258 | a file, there can only be one source. | |
2244 |
|
2259 | |||
2245 | By default, this command copies the contents of files as they |
|
2260 | By default, this command copies the contents of files as they | |
2246 | stand in the working directory. If invoked with --after, the |
|
2261 | stand in the working directory. If invoked with --after, the | |
2247 | operation is recorded, but no copying is performed. |
|
2262 | operation is recorded, but no copying is performed. | |
2248 |
|
2263 | |||
2249 | This command takes effect in the next commit. To undo a rename |
|
2264 | This command takes effect in the next commit. To undo a rename | |
2250 | before that, see hg revert. |
|
2265 | before that, see hg revert. | |
2251 | """ |
|
2266 | """ | |
2252 | wlock = repo.wlock(False) |
|
2267 | wlock = repo.wlock(False) | |
2253 | try: |
|
2268 | try: | |
2254 | return cmdutil.copy(ui, repo, pats, opts, rename=True) |
|
2269 | return cmdutil.copy(ui, repo, pats, opts, rename=True) | |
2255 | finally: |
|
2270 | finally: | |
2256 | del wlock |
|
2271 | del wlock | |
2257 |
|
2272 | |||
2258 | def resolve(ui, repo, *pats, **opts): |
|
2273 | def resolve(ui, repo, *pats, **opts): | |
2259 | """resolve file merges from a branch merge or update |
|
2274 | """resolve file merges from a branch merge or update | |
2260 |
|
2275 | |||
2261 | This command will attempt to resolve unresolved merges from the |
|
2276 | This command will attempt to resolve unresolved merges from the | |
2262 | last update or merge command. This will use the local file |
|
2277 | last update or merge command. This will use the local file | |
2263 | revision preserved at the last update or merge to cleanly retry |
|
2278 | revision preserved at the last update or merge to cleanly retry | |
2264 | the file merge attempt. With no file or options specified, this |
|
2279 | the file merge attempt. With no file or options specified, this | |
2265 | command will attempt to resolve all unresolved files. |
|
2280 | command will attempt to resolve all unresolved files. | |
2266 |
|
2281 | |||
2267 | The codes used to show the status of files are: |
|
2282 | The codes used to show the status of files are: | |
2268 | U = unresolved |
|
2283 | U = unresolved | |
2269 | R = resolved |
|
2284 | R = resolved | |
2270 | """ |
|
2285 | """ | |
2271 |
|
2286 | |||
2272 | if len([x for x in opts if opts[x]]) > 1: |
|
2287 | if len([x for x in opts if opts[x]]) > 1: | |
2273 | raise util.Abort(_("too many options specified")) |
|
2288 | raise util.Abort(_("too many options specified")) | |
2274 |
|
2289 | |||
2275 | ms = merge_.mergestate(repo) |
|
2290 | ms = merge_.mergestate(repo) | |
2276 | m = cmdutil.match(repo, pats, opts) |
|
2291 | m = cmdutil.match(repo, pats, opts) | |
2277 |
|
2292 | |||
2278 | for f in ms: |
|
2293 | for f in ms: | |
2279 | if m(f): |
|
2294 | if m(f): | |
2280 | if opts.get("list"): |
|
2295 | if opts.get("list"): | |
2281 | ui.write("%s %s\n" % (ms[f].upper(), f)) |
|
2296 | ui.write("%s %s\n" % (ms[f].upper(), f)) | |
2282 | elif opts.get("mark"): |
|
2297 | elif opts.get("mark"): | |
2283 | ms.mark(f, "r") |
|
2298 | ms.mark(f, "r") | |
2284 | elif opts.get("unmark"): |
|
2299 | elif opts.get("unmark"): | |
2285 | ms.mark(f, "u") |
|
2300 | ms.mark(f, "u") | |
2286 | else: |
|
2301 | else: | |
2287 | wctx = repo[None] |
|
2302 | wctx = repo[None] | |
2288 | mctx = wctx.parents()[-1] |
|
2303 | mctx = wctx.parents()[-1] | |
2289 | ms.resolve(f, wctx, mctx) |
|
2304 | ms.resolve(f, wctx, mctx) | |
2290 |
|
2305 | |||
2291 | def revert(ui, repo, *pats, **opts): |
|
2306 | def revert(ui, repo, *pats, **opts): | |
2292 | """restore individual files or dirs to an earlier state |
|
2307 | """restore individual files or dirs to an earlier state | |
2293 |
|
2308 | |||
2294 | (use update -r to check out earlier revisions, revert does not |
|
2309 | (use update -r to check out earlier revisions, revert does not | |
2295 | change the working dir parents) |
|
2310 | change the working dir parents) | |
2296 |
|
2311 | |||
2297 | With no revision specified, revert the named files or directories |
|
2312 | With no revision specified, revert the named files or directories | |
2298 | to the contents they had in the parent of the working directory. |
|
2313 | to the contents they had in the parent of the working directory. | |
2299 | This restores the contents of the affected files to an unmodified |
|
2314 | This restores the contents of the affected files to an unmodified | |
2300 | state and unschedules adds, removes, copies, and renames. If the |
|
2315 | state and unschedules adds, removes, copies, and renames. If the | |
2301 | working directory has two parents, you must explicitly specify the |
|
2316 | working directory has two parents, you must explicitly specify the | |
2302 | revision to revert to. |
|
2317 | revision to revert to. | |
2303 |
|
2318 | |||
2304 | Using the -r option, revert the given files or directories to their |
|
2319 | Using the -r option, revert the given files or directories to their | |
2305 | contents as of a specific revision. This can be helpful to "roll |
|
2320 | contents as of a specific revision. This can be helpful to "roll | |
2306 | back" some or all of an earlier change. |
|
2321 | back" some or all of an earlier change. | |
2307 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
2322 | See 'hg help dates' for a list of formats valid for -d/--date. | |
2308 |
|
2323 | |||
2309 | Revert modifies the working directory. It does not commit any |
|
2324 | Revert modifies the working directory. It does not commit any | |
2310 | changes, or change the parent of the working directory. If you |
|
2325 | changes, or change the parent of the working directory. If you | |
2311 | revert to a revision other than the parent of the working |
|
2326 | revert to a revision other than the parent of the working | |
2312 | directory, the reverted files will thus appear modified |
|
2327 | directory, the reverted files will thus appear modified | |
2313 | afterwards. |
|
2328 | afterwards. | |
2314 |
|
2329 | |||
2315 | If a file has been deleted, it is restored. If the executable |
|
2330 | If a file has been deleted, it is restored. If the executable | |
2316 | mode of a file was changed, it is reset. |
|
2331 | mode of a file was changed, it is reset. | |
2317 |
|
2332 | |||
2318 | If names are given, all files matching the names are reverted. |
|
2333 | If names are given, all files matching the names are reverted. | |
2319 | If no arguments are given, no files are reverted. |
|
2334 | If no arguments are given, no files are reverted. | |
2320 |
|
2335 | |||
2321 | Modified files are saved with a .orig suffix before reverting. |
|
2336 | Modified files are saved with a .orig suffix before reverting. | |
2322 | To disable these backups, use --no-backup. |
|
2337 | To disable these backups, use --no-backup. | |
2323 | """ |
|
2338 | """ | |
2324 |
|
2339 | |||
2325 | if opts["date"]: |
|
2340 | if opts["date"]: | |
2326 | if opts["rev"]: |
|
2341 | if opts["rev"]: | |
2327 | raise util.Abort(_("you can't specify a revision and a date")) |
|
2342 | raise util.Abort(_("you can't specify a revision and a date")) | |
2328 | opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) |
|
2343 | opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) | |
2329 |
|
2344 | |||
2330 |
if not pats and not opts |
|
2345 | if not pats and not opts.get('all'): | |
2331 | raise util.Abort(_('no files or directories specified; ' |
|
2346 | raise util.Abort(_('no files or directories specified; ' | |
2332 | 'use --all to revert the whole repo')) |
|
2347 | 'use --all to revert the whole repo')) | |
2333 |
|
2348 | |||
2334 | parent, p2 = repo.dirstate.parents() |
|
2349 | parent, p2 = repo.dirstate.parents() | |
2335 |
if not opts |
|
2350 | if not opts.get('rev') and p2 != nullid: | |
2336 | raise util.Abort(_('uncommitted merge - please provide a ' |
|
2351 | raise util.Abort(_('uncommitted merge - please provide a ' | |
2337 | 'specific revision')) |
|
2352 | 'specific revision')) | |
2338 |
ctx = repo[opts |
|
2353 | ctx = repo[opts.get('rev')] | |
2339 | node = ctx.node() |
|
2354 | node = ctx.node() | |
2340 | mf = ctx.manifest() |
|
2355 | mf = ctx.manifest() | |
2341 | if node == parent: |
|
2356 | if node == parent: | |
2342 | pmf = mf |
|
2357 | pmf = mf | |
2343 | else: |
|
2358 | else: | |
2344 | pmf = None |
|
2359 | pmf = None | |
2345 |
|
2360 | |||
2346 | # need all matching names in dirstate and manifest of target rev, |
|
2361 | # need all matching names in dirstate and manifest of target rev, | |
2347 | # so have to walk both. do not print errors if files exist in one |
|
2362 | # so have to walk both. do not print errors if files exist in one | |
2348 | # but not other. |
|
2363 | # but not other. | |
2349 |
|
2364 | |||
2350 | names = {} |
|
2365 | names = {} | |
2351 |
|
2366 | |||
2352 | wlock = repo.wlock() |
|
2367 | wlock = repo.wlock() | |
2353 | try: |
|
2368 | try: | |
2354 | # walk dirstate. |
|
2369 | # walk dirstate. | |
2355 | files = [] |
|
2370 | files = [] | |
2356 |
|
2371 | |||
2357 | m = cmdutil.match(repo, pats, opts) |
|
2372 | m = cmdutil.match(repo, pats, opts) | |
2358 | m.bad = lambda x,y: False |
|
2373 | m.bad = lambda x,y: False | |
2359 | for abs in repo.walk(m): |
|
2374 | for abs in repo.walk(m): | |
2360 | names[abs] = m.rel(abs), m.exact(abs) |
|
2375 | names[abs] = m.rel(abs), m.exact(abs) | |
2361 |
|
2376 | |||
2362 | # walk target manifest. |
|
2377 | # walk target manifest. | |
2363 |
|
2378 | |||
2364 | def badfn(path, msg): |
|
2379 | def badfn(path, msg): | |
2365 | if path in names: |
|
2380 | if path in names: | |
2366 | return False |
|
2381 | return False | |
2367 | path_ = path + '/' |
|
2382 | path_ = path + '/' | |
2368 | for f in names: |
|
2383 | for f in names: | |
2369 | if f.startswith(path_): |
|
2384 | if f.startswith(path_): | |
2370 | return False |
|
2385 | return False | |
2371 | repo.ui.warn("%s: %s\n" % (m.rel(path), msg)) |
|
2386 | repo.ui.warn("%s: %s\n" % (m.rel(path), msg)) | |
2372 | return False |
|
2387 | return False | |
2373 |
|
2388 | |||
2374 | m = cmdutil.match(repo, pats, opts) |
|
2389 | m = cmdutil.match(repo, pats, opts) | |
2375 | m.bad = badfn |
|
2390 | m.bad = badfn | |
2376 | for abs in repo[node].walk(m): |
|
2391 | for abs in repo[node].walk(m): | |
2377 | if abs not in names: |
|
2392 | if abs not in names: | |
2378 | names[abs] = m.rel(abs), m.exact(abs) |
|
2393 | names[abs] = m.rel(abs), m.exact(abs) | |
2379 |
|
2394 | |||
2380 | m = cmdutil.matchfiles(repo, names) |
|
2395 | m = cmdutil.matchfiles(repo, names) | |
2381 | changes = repo.status(match=m)[:4] |
|
2396 | changes = repo.status(match=m)[:4] | |
2382 | modified, added, removed, deleted = map(dict.fromkeys, changes) |
|
2397 | modified, added, removed, deleted = map(dict.fromkeys, changes) | |
2383 |
|
2398 | |||
2384 | # if f is a rename, also revert the source |
|
2399 | # if f is a rename, also revert the source | |
2385 | cwd = repo.getcwd() |
|
2400 | cwd = repo.getcwd() | |
2386 | for f in added: |
|
2401 | for f in added: | |
2387 | src = repo.dirstate.copied(f) |
|
2402 | src = repo.dirstate.copied(f) | |
2388 | if src and src not in names and repo.dirstate[src] == 'r': |
|
2403 | if src and src not in names and repo.dirstate[src] == 'r': | |
2389 | removed[src] = None |
|
2404 | removed[src] = None | |
2390 | names[src] = (repo.pathto(src, cwd), True) |
|
2405 | names[src] = (repo.pathto(src, cwd), True) | |
2391 |
|
2406 | |||
2392 | def removeforget(abs): |
|
2407 | def removeforget(abs): | |
2393 | if repo.dirstate[abs] == 'a': |
|
2408 | if repo.dirstate[abs] == 'a': | |
2394 | return _('forgetting %s\n') |
|
2409 | return _('forgetting %s\n') | |
2395 | return _('removing %s\n') |
|
2410 | return _('removing %s\n') | |
2396 |
|
2411 | |||
2397 | revert = ([], _('reverting %s\n')) |
|
2412 | revert = ([], _('reverting %s\n')) | |
2398 | add = ([], _('adding %s\n')) |
|
2413 | add = ([], _('adding %s\n')) | |
2399 | remove = ([], removeforget) |
|
2414 | remove = ([], removeforget) | |
2400 | undelete = ([], _('undeleting %s\n')) |
|
2415 | undelete = ([], _('undeleting %s\n')) | |
2401 |
|
2416 | |||
2402 | disptable = ( |
|
2417 | disptable = ( | |
2403 | # dispatch table: |
|
2418 | # dispatch table: | |
2404 | # file state |
|
2419 | # file state | |
2405 | # action if in target manifest |
|
2420 | # action if in target manifest | |
2406 | # action if not in target manifest |
|
2421 | # action if not in target manifest | |
2407 | # make backup if in target manifest |
|
2422 | # make backup if in target manifest | |
2408 | # make backup if not in target manifest |
|
2423 | # make backup if not in target manifest | |
2409 | (modified, revert, remove, True, True), |
|
2424 | (modified, revert, remove, True, True), | |
2410 | (added, revert, remove, True, False), |
|
2425 | (added, revert, remove, True, False), | |
2411 | (removed, undelete, None, False, False), |
|
2426 | (removed, undelete, None, False, False), | |
2412 | (deleted, revert, remove, False, False), |
|
2427 | (deleted, revert, remove, False, False), | |
2413 | ) |
|
2428 | ) | |
2414 |
|
2429 | |||
2415 | for abs, (rel, exact) in util.sort(names.items()): |
|
2430 | for abs, (rel, exact) in util.sort(names.items()): | |
2416 | mfentry = mf.get(abs) |
|
2431 | mfentry = mf.get(abs) | |
2417 | target = repo.wjoin(abs) |
|
2432 | target = repo.wjoin(abs) | |
2418 | def handle(xlist, dobackup): |
|
2433 | def handle(xlist, dobackup): | |
2419 | xlist[0].append(abs) |
|
2434 | xlist[0].append(abs) | |
2420 |
if dobackup and not opts |
|
2435 | if dobackup and not opts.get('no_backup') and util.lexists(target): | |
2421 | bakname = "%s.orig" % rel |
|
2436 | bakname = "%s.orig" % rel | |
2422 | ui.note(_('saving current version of %s as %s\n') % |
|
2437 | ui.note(_('saving current version of %s as %s\n') % | |
2423 | (rel, bakname)) |
|
2438 | (rel, bakname)) | |
2424 | if not opts.get('dry_run'): |
|
2439 | if not opts.get('dry_run'): | |
2425 | util.copyfile(target, bakname) |
|
2440 | util.copyfile(target, bakname) | |
2426 | if ui.verbose or not exact: |
|
2441 | if ui.verbose or not exact: | |
2427 | msg = xlist[1] |
|
2442 | msg = xlist[1] | |
2428 | if not isinstance(msg, basestring): |
|
2443 | if not isinstance(msg, basestring): | |
2429 | msg = msg(abs) |
|
2444 | msg = msg(abs) | |
2430 | ui.status(msg % rel) |
|
2445 | ui.status(msg % rel) | |
2431 | for table, hitlist, misslist, backuphit, backupmiss in disptable: |
|
2446 | for table, hitlist, misslist, backuphit, backupmiss in disptable: | |
2432 | if abs not in table: continue |
|
2447 | if abs not in table: continue | |
2433 | # file has changed in dirstate |
|
2448 | # file has changed in dirstate | |
2434 | if mfentry: |
|
2449 | if mfentry: | |
2435 | handle(hitlist, backuphit) |
|
2450 | handle(hitlist, backuphit) | |
2436 | elif misslist is not None: |
|
2451 | elif misslist is not None: | |
2437 | handle(misslist, backupmiss) |
|
2452 | handle(misslist, backupmiss) | |
2438 | break |
|
2453 | break | |
2439 | else: |
|
2454 | else: | |
2440 | if abs not in repo.dirstate: |
|
2455 | if abs not in repo.dirstate: | |
2441 | if mfentry: |
|
2456 | if mfentry: | |
2442 | handle(add, True) |
|
2457 | handle(add, True) | |
2443 | elif exact: |
|
2458 | elif exact: | |
2444 | ui.warn(_('file not managed: %s\n') % rel) |
|
2459 | ui.warn(_('file not managed: %s\n') % rel) | |
2445 | continue |
|
2460 | continue | |
2446 | # file has not changed in dirstate |
|
2461 | # file has not changed in dirstate | |
2447 | if node == parent: |
|
2462 | if node == parent: | |
2448 | if exact: ui.warn(_('no changes needed to %s\n') % rel) |
|
2463 | if exact: ui.warn(_('no changes needed to %s\n') % rel) | |
2449 | continue |
|
2464 | continue | |
2450 | if pmf is None: |
|
2465 | if pmf is None: | |
2451 | # only need parent manifest in this unlikely case, |
|
2466 | # only need parent manifest in this unlikely case, | |
2452 | # so do not read by default |
|
2467 | # so do not read by default | |
2453 | pmf = repo[parent].manifest() |
|
2468 | pmf = repo[parent].manifest() | |
2454 | if abs in pmf: |
|
2469 | if abs in pmf: | |
2455 | if mfentry: |
|
2470 | if mfentry: | |
2456 | # if version of file is same in parent and target |
|
2471 | # if version of file is same in parent and target | |
2457 | # manifests, do nothing |
|
2472 | # manifests, do nothing | |
2458 | if (pmf[abs] != mfentry or |
|
2473 | if (pmf[abs] != mfentry or | |
2459 | pmf.flags(abs) != mf.flags(abs)): |
|
2474 | pmf.flags(abs) != mf.flags(abs)): | |
2460 | handle(revert, False) |
|
2475 | handle(revert, False) | |
2461 | else: |
|
2476 | else: | |
2462 | handle(remove, False) |
|
2477 | handle(remove, False) | |
2463 |
|
2478 | |||
2464 | if not opts.get('dry_run'): |
|
2479 | if not opts.get('dry_run'): | |
2465 | def checkout(f): |
|
2480 | def checkout(f): | |
2466 | fc = ctx[f] |
|
2481 | fc = ctx[f] | |
2467 | repo.wwrite(f, fc.data(), fc.flags()) |
|
2482 | repo.wwrite(f, fc.data(), fc.flags()) | |
2468 |
|
2483 | |||
2469 | audit_path = util.path_auditor(repo.root) |
|
2484 | audit_path = util.path_auditor(repo.root) | |
2470 | for f in remove[0]: |
|
2485 | for f in remove[0]: | |
2471 | if repo.dirstate[f] == 'a': |
|
2486 | if repo.dirstate[f] == 'a': | |
2472 | repo.dirstate.forget(f) |
|
2487 | repo.dirstate.forget(f) | |
2473 | continue |
|
2488 | continue | |
2474 | audit_path(f) |
|
2489 | audit_path(f) | |
2475 | try: |
|
2490 | try: | |
2476 | util.unlink(repo.wjoin(f)) |
|
2491 | util.unlink(repo.wjoin(f)) | |
2477 | except OSError: |
|
2492 | except OSError: | |
2478 | pass |
|
2493 | pass | |
2479 | repo.dirstate.remove(f) |
|
2494 | repo.dirstate.remove(f) | |
2480 |
|
2495 | |||
2481 | normal = None |
|
2496 | normal = None | |
2482 | if node == parent: |
|
2497 | if node == parent: | |
2483 | # We're reverting to our parent. If possible, we'd like status |
|
2498 | # We're reverting to our parent. If possible, we'd like status | |
2484 | # to report the file as clean. We have to use normallookup for |
|
2499 | # to report the file as clean. We have to use normallookup for | |
2485 | # merges to avoid losing information about merged/dirty files. |
|
2500 | # merges to avoid losing information about merged/dirty files. | |
2486 | if p2 != nullid: |
|
2501 | if p2 != nullid: | |
2487 | normal = repo.dirstate.normallookup |
|
2502 | normal = repo.dirstate.normallookup | |
2488 | else: |
|
2503 | else: | |
2489 | normal = repo.dirstate.normal |
|
2504 | normal = repo.dirstate.normal | |
2490 | for f in revert[0]: |
|
2505 | for f in revert[0]: | |
2491 | checkout(f) |
|
2506 | checkout(f) | |
2492 | if normal: |
|
2507 | if normal: | |
2493 | normal(f) |
|
2508 | normal(f) | |
2494 |
|
2509 | |||
2495 | for f in add[0]: |
|
2510 | for f in add[0]: | |
2496 | checkout(f) |
|
2511 | checkout(f) | |
2497 | repo.dirstate.add(f) |
|
2512 | repo.dirstate.add(f) | |
2498 |
|
2513 | |||
2499 | normal = repo.dirstate.normallookup |
|
2514 | normal = repo.dirstate.normallookup | |
2500 | if node == parent and p2 == nullid: |
|
2515 | if node == parent and p2 == nullid: | |
2501 | normal = repo.dirstate.normal |
|
2516 | normal = repo.dirstate.normal | |
2502 | for f in undelete[0]: |
|
2517 | for f in undelete[0]: | |
2503 | checkout(f) |
|
2518 | checkout(f) | |
2504 | normal(f) |
|
2519 | normal(f) | |
2505 |
|
2520 | |||
2506 | finally: |
|
2521 | finally: | |
2507 | del wlock |
|
2522 | del wlock | |
2508 |
|
2523 | |||
2509 | def rollback(ui, repo): |
|
2524 | def rollback(ui, repo): | |
2510 | """roll back the last transaction |
|
2525 | """roll back the last transaction | |
2511 |
|
2526 | |||
2512 | This command should be used with care. There is only one level of |
|
2527 | This command should be used with care. There is only one level of | |
2513 | rollback, and there is no way to undo a rollback. It will also |
|
2528 | rollback, and there is no way to undo a rollback. It will also | |
2514 | restore the dirstate at the time of the last transaction, losing |
|
2529 | restore the dirstate at the time of the last transaction, losing | |
2515 | any dirstate changes since that time. |
|
2530 | any dirstate changes since that time. | |
2516 |
|
2531 | |||
2517 | Transactions are used to encapsulate the effects of all commands |
|
2532 | Transactions are used to encapsulate the effects of all commands | |
2518 | that create new changesets or propagate existing changesets into a |
|
2533 | that create new changesets or propagate existing changesets into a | |
2519 | repository. For example, the following commands are transactional, |
|
2534 | repository. For example, the following commands are transactional, | |
2520 | and their effects can be rolled back: |
|
2535 | and their effects can be rolled back: | |
2521 |
|
2536 | |||
2522 | commit |
|
2537 | commit | |
2523 | import |
|
2538 | import | |
2524 | pull |
|
2539 | pull | |
2525 | push (with this repository as destination) |
|
2540 | push (with this repository as destination) | |
2526 | unbundle |
|
2541 | unbundle | |
2527 |
|
2542 | |||
2528 | This command is not intended for use on public repositories. Once |
|
2543 | This command is not intended for use on public repositories. Once | |
2529 | changes are visible for pull by other users, rolling a transaction |
|
2544 | changes are visible for pull by other users, rolling a transaction | |
2530 | back locally is ineffective (someone else may already have pulled |
|
2545 | back locally is ineffective (someone else may already have pulled | |
2531 | the changes). Furthermore, a race is possible with readers of the |
|
2546 | the changes). Furthermore, a race is possible with readers of the | |
2532 | repository; for example an in-progress pull from the repository |
|
2547 | repository; for example an in-progress pull from the repository | |
2533 | may fail if a rollback is performed. |
|
2548 | may fail if a rollback is performed. | |
2534 | """ |
|
2549 | """ | |
2535 | repo.rollback() |
|
2550 | repo.rollback() | |
2536 |
|
2551 | |||
2537 | def root(ui, repo): |
|
2552 | def root(ui, repo): | |
2538 | """print the root (top) of the current working dir |
|
2553 | """print the root (top) of the current working dir | |
2539 |
|
2554 | |||
2540 | Print the root directory of the current repository. |
|
2555 | Print the root directory of the current repository. | |
2541 | """ |
|
2556 | """ | |
2542 | ui.write(repo.root + "\n") |
|
2557 | ui.write(repo.root + "\n") | |
2543 |
|
2558 | |||
2544 | def serve(ui, repo, **opts): |
|
2559 | def serve(ui, repo, **opts): | |
2545 | """export the repository via HTTP |
|
2560 | """export the repository via HTTP | |
2546 |
|
2561 | |||
2547 | Start a local HTTP repository browser and pull server. |
|
2562 | Start a local HTTP repository browser and pull server. | |
2548 |
|
2563 | |||
2549 | By default, the server logs accesses to stdout and errors to |
|
2564 | By default, the server logs accesses to stdout and errors to | |
2550 | stderr. Use the "-A" and "-E" options to log to files. |
|
2565 | stderr. Use the "-A" and "-E" options to log to files. | |
2551 | """ |
|
2566 | """ | |
2552 |
|
2567 | |||
2553 | if opts["stdio"]: |
|
2568 | if opts["stdio"]: | |
2554 | if repo is None: |
|
2569 | if repo is None: | |
2555 | raise RepoError(_("There is no Mercurial repository here" |
|
2570 | raise RepoError(_("There is no Mercurial repository here" | |
2556 | " (.hg not found)")) |
|
2571 | " (.hg not found)")) | |
2557 | s = sshserver.sshserver(ui, repo) |
|
2572 | s = sshserver.sshserver(ui, repo) | |
2558 | s.serve_forever() |
|
2573 | s.serve_forever() | |
2559 |
|
2574 | |||
2560 | parentui = ui.parentui or ui |
|
2575 | parentui = ui.parentui or ui | |
2561 | optlist = ("name templates style address port prefix ipv6" |
|
2576 | optlist = ("name templates style address port prefix ipv6" | |
2562 | " accesslog errorlog webdir_conf certificate") |
|
2577 | " accesslog errorlog webdir_conf certificate") | |
2563 | for o in optlist.split(): |
|
2578 | for o in optlist.split(): | |
2564 | if opts[o]: |
|
2579 | if opts[o]: | |
2565 | parentui.setconfig("web", o, str(opts[o])) |
|
2580 | parentui.setconfig("web", o, str(opts[o])) | |
2566 | if (repo is not None) and (repo.ui != parentui): |
|
2581 | if (repo is not None) and (repo.ui != parentui): | |
2567 | repo.ui.setconfig("web", o, str(opts[o])) |
|
2582 | repo.ui.setconfig("web", o, str(opts[o])) | |
2568 |
|
2583 | |||
2569 | if repo is None and not ui.config("web", "webdir_conf"): |
|
2584 | if repo is None and not ui.config("web", "webdir_conf"): | |
2570 | raise RepoError(_("There is no Mercurial repository here" |
|
2585 | raise RepoError(_("There is no Mercurial repository here" | |
2571 | " (.hg not found)")) |
|
2586 | " (.hg not found)")) | |
2572 |
|
2587 | |||
2573 | class service: |
|
2588 | class service: | |
2574 | def init(self): |
|
2589 | def init(self): | |
2575 | util.set_signal_handler() |
|
2590 | util.set_signal_handler() | |
2576 | self.httpd = hgweb.server.create_server(parentui, repo) |
|
2591 | self.httpd = hgweb.server.create_server(parentui, repo) | |
2577 |
|
2592 | |||
2578 | if not ui.verbose: return |
|
2593 | if not ui.verbose: return | |
2579 |
|
2594 | |||
2580 | if self.httpd.prefix: |
|
2595 | if self.httpd.prefix: | |
2581 | prefix = self.httpd.prefix.strip('/') + '/' |
|
2596 | prefix = self.httpd.prefix.strip('/') + '/' | |
2582 | else: |
|
2597 | else: | |
2583 | prefix = '' |
|
2598 | prefix = '' | |
2584 |
|
2599 | |||
2585 | port = ':%d' % self.httpd.port |
|
2600 | port = ':%d' % self.httpd.port | |
2586 | if port == ':80': |
|
2601 | if port == ':80': | |
2587 | port = '' |
|
2602 | port = '' | |
2588 |
|
2603 | |||
2589 | bindaddr = self.httpd.addr |
|
2604 | bindaddr = self.httpd.addr | |
2590 | if bindaddr == '0.0.0.0': |
|
2605 | if bindaddr == '0.0.0.0': | |
2591 | bindaddr = '*' |
|
2606 | bindaddr = '*' | |
2592 | elif ':' in bindaddr: # IPv6 |
|
2607 | elif ':' in bindaddr: # IPv6 | |
2593 | bindaddr = '[%s]' % bindaddr |
|
2608 | bindaddr = '[%s]' % bindaddr | |
2594 |
|
2609 | |||
2595 | fqaddr = self.httpd.fqaddr |
|
2610 | fqaddr = self.httpd.fqaddr | |
2596 | if ':' in fqaddr: |
|
2611 | if ':' in fqaddr: | |
2597 | fqaddr = '[%s]' % fqaddr |
|
2612 | fqaddr = '[%s]' % fqaddr | |
2598 | ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') % |
|
2613 | ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') % | |
2599 | (fqaddr, port, prefix, bindaddr, self.httpd.port)) |
|
2614 | (fqaddr, port, prefix, bindaddr, self.httpd.port)) | |
2600 |
|
2615 | |||
2601 | def run(self): |
|
2616 | def run(self): | |
2602 | self.httpd.serve_forever() |
|
2617 | self.httpd.serve_forever() | |
2603 |
|
2618 | |||
2604 | service = service() |
|
2619 | service = service() | |
2605 |
|
2620 | |||
2606 | cmdutil.service(opts, initfn=service.init, runfn=service.run) |
|
2621 | cmdutil.service(opts, initfn=service.init, runfn=service.run) | |
2607 |
|
2622 | |||
2608 | def status(ui, repo, *pats, **opts): |
|
2623 | def status(ui, repo, *pats, **opts): | |
2609 | """show changed files in the working directory |
|
2624 | """show changed files in the working directory | |
2610 |
|
2625 | |||
2611 | Show status of files in the repository. If names are given, only |
|
2626 | Show status of files in the repository. If names are given, only | |
2612 | files that match are shown. Files that are clean or ignored or |
|
2627 | files that match are shown. Files that are clean or ignored or | |
2613 | source of a copy/move operation, are not listed unless -c (clean), |
|
2628 | source of a copy/move operation, are not listed unless -c (clean), | |
2614 | -i (ignored), -C (copies) or -A is given. Unless options described |
|
2629 | -i (ignored), -C (copies) or -A is given. Unless options described | |
2615 | with "show only ..." are given, the options -mardu are used. |
|
2630 | with "show only ..." are given, the options -mardu are used. | |
2616 |
|
2631 | |||
2617 | Option -q/--quiet hides untracked (unknown and ignored) files |
|
2632 | Option -q/--quiet hides untracked (unknown and ignored) files | |
2618 | unless explicitly requested with -u/--unknown or -i/-ignored. |
|
2633 | unless explicitly requested with -u/--unknown or -i/-ignored. | |
2619 |
|
2634 | |||
2620 | NOTE: status may appear to disagree with diff if permissions have |
|
2635 | NOTE: status may appear to disagree with diff if permissions have | |
2621 | changed or a merge has occurred. The standard diff format does not |
|
2636 | changed or a merge has occurred. The standard diff format does not | |
2622 | report permission changes and diff only reports changes relative |
|
2637 | report permission changes and diff only reports changes relative | |
2623 | to one merge parent. |
|
2638 | to one merge parent. | |
2624 |
|
2639 | |||
2625 | If one revision is given, it is used as the base revision. |
|
2640 | If one revision is given, it is used as the base revision. | |
2626 | If two revisions are given, the difference between them is shown. |
|
2641 | If two revisions are given, the difference between them is shown. | |
2627 |
|
2642 | |||
2628 | The codes used to show the status of files are: |
|
2643 | The codes used to show the status of files are: | |
2629 | M = modified |
|
2644 | M = modified | |
2630 | A = added |
|
2645 | A = added | |
2631 | R = removed |
|
2646 | R = removed | |
2632 | C = clean |
|
2647 | C = clean | |
2633 | ! = deleted, but still tracked |
|
2648 | ! = deleted, but still tracked | |
2634 | ? = not tracked |
|
2649 | ? = not tracked | |
2635 | I = ignored |
|
2650 | I = ignored | |
2636 | = the previous added file was copied from here |
|
2651 | = the previous added file was copied from here | |
2637 | """ |
|
2652 | """ | |
2638 |
|
2653 | |||
2639 | node1, node2 = cmdutil.revpair(repo, opts.get('rev')) |
|
2654 | node1, node2 = cmdutil.revpair(repo, opts.get('rev')) | |
2640 | cwd = (pats and repo.getcwd()) or '' |
|
2655 | cwd = (pats and repo.getcwd()) or '' | |
2641 |
end = opts |
|
2656 | end = opts.get('print0') and '\0' or '\n' | |
2642 | copy = {} |
|
2657 | copy = {} | |
2643 | states = 'modified added removed deleted unknown ignored clean'.split() |
|
2658 | states = 'modified added removed deleted unknown ignored clean'.split() | |
2644 | show = [k for k in states if opts[k]] |
|
2659 | show = [k for k in states if opts[k]] | |
2645 |
if opts |
|
2660 | if opts.get('all'): | |
2646 | show += ui.quiet and (states[:4] + ['clean']) or states |
|
2661 | show += ui.quiet and (states[:4] + ['clean']) or states | |
2647 | if not show: |
|
2662 | if not show: | |
2648 | show = ui.quiet and states[:4] or states[:5] |
|
2663 | show = ui.quiet and states[:4] or states[:5] | |
2649 |
|
2664 | |||
2650 | stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts), |
|
2665 | stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts), | |
2651 | 'ignored' in show, 'clean' in show, 'unknown' in show) |
|
2666 | 'ignored' in show, 'clean' in show, 'unknown' in show) | |
2652 | changestates = zip(states, 'MAR!?IC', stat) |
|
2667 | changestates = zip(states, 'MAR!?IC', stat) | |
2653 |
|
2668 | |||
2654 |
if (opts |
|
2669 | if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'): | |
2655 | ctxn = repo[nullid] |
|
2670 | ctxn = repo[nullid] | |
2656 | ctx1 = repo[node1] |
|
2671 | ctx1 = repo[node1] | |
2657 | ctx2 = repo[node2] |
|
2672 | ctx2 = repo[node2] | |
2658 | added = stat[1] |
|
2673 | added = stat[1] | |
2659 | if node2 is None: |
|
2674 | if node2 is None: | |
2660 | added = stat[0] + stat[1] # merged? |
|
2675 | added = stat[0] + stat[1] # merged? | |
2661 |
|
2676 | |||
2662 | for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items(): |
|
2677 | for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items(): | |
2663 | if k in added: |
|
2678 | if k in added: | |
2664 | copy[k] = v |
|
2679 | copy[k] = v | |
2665 | elif v in added: |
|
2680 | elif v in added: | |
2666 | copy[v] = k |
|
2681 | copy[v] = k | |
2667 |
|
2682 | |||
2668 | for state, char, files in changestates: |
|
2683 | for state, char, files in changestates: | |
2669 | if state in show: |
|
2684 | if state in show: | |
2670 | format = "%s %%s%s" % (char, end) |
|
2685 | format = "%s %%s%s" % (char, end) | |
2671 |
if opts |
|
2686 | if opts.get('no_status'): | |
2672 | format = "%%s%s" % end |
|
2687 | format = "%%s%s" % end | |
2673 |
|
2688 | |||
2674 | for f in files: |
|
2689 | for f in files: | |
2675 | ui.write(format % repo.pathto(f, cwd)) |
|
2690 | ui.write(format % repo.pathto(f, cwd)) | |
2676 | if f in copy: |
|
2691 | if f in copy: | |
2677 | ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end)) |
|
2692 | ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end)) | |
2678 |
|
2693 | |||
2679 | def tag(ui, repo, name1, *names, **opts): |
|
2694 | def tag(ui, repo, name1, *names, **opts): | |
2680 | """add one or more tags for the current or given revision |
|
2695 | """add one or more tags for the current or given revision | |
2681 |
|
2696 | |||
2682 | Name a particular revision using <name>. |
|
2697 | Name a particular revision using <name>. | |
2683 |
|
2698 | |||
2684 | Tags are used to name particular revisions of the repository and are |
|
2699 | Tags are used to name particular revisions of the repository and are | |
2685 | very useful to compare different revisions, to go back to significant |
|
2700 | very useful to compare different revisions, to go back to significant | |
2686 | earlier versions or to mark branch points as releases, etc. |
|
2701 | earlier versions or to mark branch points as releases, etc. | |
2687 |
|
2702 | |||
2688 | If no revision is given, the parent of the working directory is used, |
|
2703 | If no revision is given, the parent of the working directory is used, | |
2689 | or tip if no revision is checked out. |
|
2704 | or tip if no revision is checked out. | |
2690 |
|
2705 | |||
2691 | To facilitate version control, distribution, and merging of tags, |
|
2706 | To facilitate version control, distribution, and merging of tags, | |
2692 | they are stored as a file named ".hgtags" which is managed |
|
2707 | they are stored as a file named ".hgtags" which is managed | |
2693 | similarly to other project files and can be hand-edited if |
|
2708 | similarly to other project files and can be hand-edited if | |
2694 | necessary. The file '.hg/localtags' is used for local tags (not |
|
2709 | necessary. The file '.hg/localtags' is used for local tags (not | |
2695 | shared among repositories). |
|
2710 | shared among repositories). | |
2696 |
|
2711 | |||
2697 | See 'hg help dates' for a list of formats valid for -d/--date. |
|
2712 | See 'hg help dates' for a list of formats valid for -d/--date. | |
2698 | """ |
|
2713 | """ | |
2699 |
|
2714 | |||
2700 | rev_ = "." |
|
2715 | rev_ = "." | |
2701 | names = (name1,) + names |
|
2716 | names = (name1,) + names | |
2702 | if len(names) != len(dict.fromkeys(names)): |
|
2717 | if len(names) != len(dict.fromkeys(names)): | |
2703 | raise util.Abort(_('tag names must be unique')) |
|
2718 | raise util.Abort(_('tag names must be unique')) | |
2704 | for n in names: |
|
2719 | for n in names: | |
2705 | if n in ['tip', '.', 'null']: |
|
2720 | if n in ['tip', '.', 'null']: | |
2706 | raise util.Abort(_('the name \'%s\' is reserved') % n) |
|
2721 | raise util.Abort(_('the name \'%s\' is reserved') % n) | |
2707 |
if opts |
|
2722 | if opts.get('rev') and opts.get('remove'): | |
2708 | raise util.Abort(_("--rev and --remove are incompatible")) |
|
2723 | raise util.Abort(_("--rev and --remove are incompatible")) | |
2709 |
if opts |
|
2724 | if opts.get('rev'): | |
2710 | rev_ = opts['rev'] |
|
2725 | rev_ = opts['rev'] | |
2711 |
message = opts |
|
2726 | message = opts.get('message') | |
2712 |
if opts |
|
2727 | if opts.get('remove'): | |
2713 |
expectedtype = opts |
|
2728 | expectedtype = opts.get('local') and 'local' or 'global' | |
2714 | for n in names: |
|
2729 | for n in names: | |
2715 | if not repo.tagtype(n): |
|
2730 | if not repo.tagtype(n): | |
2716 | raise util.Abort(_('tag \'%s\' does not exist') % n) |
|
2731 | raise util.Abort(_('tag \'%s\' does not exist') % n) | |
2717 | if repo.tagtype(n) != expectedtype: |
|
2732 | if repo.tagtype(n) != expectedtype: | |
2718 | raise util.Abort(_('tag \'%s\' is not a %s tag') % |
|
2733 | raise util.Abort(_('tag \'%s\' is not a %s tag') % | |
2719 | (n, expectedtype)) |
|
2734 | (n, expectedtype)) | |
2720 | rev_ = nullid |
|
2735 | rev_ = nullid | |
2721 | if not message: |
|
2736 | if not message: | |
2722 | message = _('Removed tag %s') % ', '.join(names) |
|
2737 | message = _('Removed tag %s') % ', '.join(names) | |
2723 |
elif not opts |
|
2738 | elif not opts.get('force'): | |
2724 | for n in names: |
|
2739 | for n in names: | |
2725 | if n in repo.tags(): |
|
2740 | if n in repo.tags(): | |
2726 | raise util.Abort(_('tag \'%s\' already exists ' |
|
2741 | raise util.Abort(_('tag \'%s\' already exists ' | |
2727 | '(use -f to force)') % n) |
|
2742 | '(use -f to force)') % n) | |
2728 | if not rev_ and repo.dirstate.parents()[1] != nullid: |
|
2743 | if not rev_ and repo.dirstate.parents()[1] != nullid: | |
2729 | raise util.Abort(_('uncommitted merge - please provide a ' |
|
2744 | raise util.Abort(_('uncommitted merge - please provide a ' | |
2730 | 'specific revision')) |
|
2745 | 'specific revision')) | |
2731 | r = repo[rev_].node() |
|
2746 | r = repo[rev_].node() | |
2732 |
|
2747 | |||
2733 | if not message: |
|
2748 | if not message: | |
2734 | message = (_('Added tag %s for changeset %s') % |
|
2749 | message = (_('Added tag %s for changeset %s') % | |
2735 | (', '.join(names), short(r))) |
|
2750 | (', '.join(names), short(r))) | |
2736 |
|
2751 | |||
2737 | date = opts.get('date') |
|
2752 | date = opts.get('date') | |
2738 | if date: |
|
2753 | if date: | |
2739 | date = util.parsedate(date) |
|
2754 | date = util.parsedate(date) | |
2740 |
|
2755 | |||
2741 |
repo.tag(names, r, message, opts |
|
2756 | repo.tag(names, r, message, opts.get('local'), opts.get('user'), date) | |
2742 |
|
2757 | |||
2743 | def tags(ui, repo): |
|
2758 | def tags(ui, repo): | |
2744 | """list repository tags |
|
2759 | """list repository tags | |
2745 |
|
2760 | |||
2746 | List the repository tags. |
|
2761 | List the repository tags. | |
2747 |
|
2762 | |||
2748 | This lists both regular and local tags. When the -v/--verbose switch |
|
2763 | This lists both regular and local tags. When the -v/--verbose switch | |
2749 | is used, a third column "local" is printed for local tags. |
|
2764 | is used, a third column "local" is printed for local tags. | |
2750 | """ |
|
2765 | """ | |
2751 |
|
2766 | |||
2752 | l = repo.tagslist() |
|
2767 | l = repo.tagslist() | |
2753 | l.reverse() |
|
2768 | l.reverse() | |
2754 | hexfunc = ui.debugflag and hex or short |
|
2769 | hexfunc = ui.debugflag and hex or short | |
2755 | tagtype = "" |
|
2770 | tagtype = "" | |
2756 |
|
2771 | |||
2757 | for t, n in l: |
|
2772 | for t, n in l: | |
2758 | if ui.quiet: |
|
2773 | if ui.quiet: | |
2759 | ui.write("%s\n" % t) |
|
2774 | ui.write("%s\n" % t) | |
2760 | continue |
|
2775 | continue | |
2761 |
|
2776 | |||
2762 | try: |
|
2777 | try: | |
2763 | hn = hexfunc(n) |
|
2778 | hn = hexfunc(n) | |
2764 | r = "%5d:%s" % (repo.changelog.rev(n), hn) |
|
2779 | r = "%5d:%s" % (repo.changelog.rev(n), hn) | |
2765 | except revlog.LookupError: |
|
2780 | except revlog.LookupError: | |
2766 | r = " ?:%s" % hn |
|
2781 | r = " ?:%s" % hn | |
2767 | else: |
|
2782 | else: | |
2768 | spaces = " " * (30 - util.locallen(t)) |
|
2783 | spaces = " " * (30 - util.locallen(t)) | |
2769 | if ui.verbose: |
|
2784 | if ui.verbose: | |
2770 | if repo.tagtype(t) == 'local': |
|
2785 | if repo.tagtype(t) == 'local': | |
2771 | tagtype = " local" |
|
2786 | tagtype = " local" | |
2772 | else: |
|
2787 | else: | |
2773 | tagtype = "" |
|
2788 | tagtype = "" | |
2774 | ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype)) |
|
2789 | ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype)) | |
2775 |
|
2790 | |||
2776 | def tip(ui, repo, **opts): |
|
2791 | def tip(ui, repo, **opts): | |
2777 | """show the tip revision |
|
2792 | """show the tip revision | |
2778 |
|
2793 | |||
2779 | The tip revision (usually just called the tip) is the most |
|
2794 | The tip revision (usually just called the tip) is the most | |
2780 | recently added changeset in the repository, the most recently |
|
2795 | recently added changeset in the repository, the most recently | |
2781 | changed head. |
|
2796 | changed head. | |
2782 |
|
2797 | |||
2783 | If you have just made a commit, that commit will be the tip. If |
|
2798 | If you have just made a commit, that commit will be the tip. If | |
2784 | you have just pulled changes from another repository, the tip of |
|
2799 | you have just pulled changes from another repository, the tip of | |
2785 | that repository becomes the current tip. The "tip" tag is special |
|
2800 | that repository becomes the current tip. The "tip" tag is special | |
2786 | and cannot be renamed or assigned to a different changeset. |
|
2801 | and cannot be renamed or assigned to a different changeset. | |
2787 | """ |
|
2802 | """ | |
2788 | cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1) |
|
2803 | cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1) | |
2789 |
|
2804 | |||
2790 | def unbundle(ui, repo, fname1, *fnames, **opts): |
|
2805 | def unbundle(ui, repo, fname1, *fnames, **opts): | |
2791 | """apply one or more changegroup files |
|
2806 | """apply one or more changegroup files | |
2792 |
|
2807 | |||
2793 | Apply one or more compressed changegroup files generated by the |
|
2808 | Apply one or more compressed changegroup files generated by the | |
2794 | bundle command. |
|
2809 | bundle command. | |
2795 | """ |
|
2810 | """ | |
2796 | fnames = (fname1,) + fnames |
|
2811 | fnames = (fname1,) + fnames | |
2797 |
|
2812 | |||
2798 | lock = None |
|
2813 | lock = None | |
2799 | try: |
|
2814 | try: | |
2800 | lock = repo.lock() |
|
2815 | lock = repo.lock() | |
2801 | for fname in fnames: |
|
2816 | for fname in fnames: | |
2802 | if os.path.exists(fname): |
|
2817 | if os.path.exists(fname): | |
2803 | f = open(fname, "rb") |
|
2818 | f = open(fname, "rb") | |
2804 | else: |
|
2819 | else: | |
2805 | f = urllib.urlopen(fname) |
|
2820 | f = urllib.urlopen(fname) | |
2806 | gen = changegroup.readbundle(f, fname) |
|
2821 | gen = changegroup.readbundle(f, fname) | |
2807 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) |
|
2822 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) | |
2808 | finally: |
|
2823 | finally: | |
2809 | del lock |
|
2824 | del lock | |
2810 |
|
2825 | |||
2811 |
return postincoming(ui, repo, modheads, opts |
|
2826 | return postincoming(ui, repo, modheads, opts.get('update'), None) | |
2812 |
|
2827 | |||
2813 | def update(ui, repo, node=None, rev=None, clean=False, date=None): |
|
2828 | def update(ui, repo, node=None, rev=None, clean=False, date=None): | |
2814 | """update working directory |
|
2829 | """update working directory | |
2815 |
|
2830 | |||
2816 | Update the repository's working directory to the specified revision, |
|
2831 | Update the repository's working directory to the specified revision, | |
2817 | or the tip of the current branch if none is specified. Use null as |
|
2832 | or the tip of the current branch if none is specified. Use null as | |
2818 | the revision to remove the working copy (like 'hg clone -U'). |
|
2833 | the revision to remove the working copy (like 'hg clone -U'). | |
2819 |
|
2834 | |||
2820 | If the requested revision is a descendant of the working |
|
2835 | If the requested revision is a descendant of the working | |
2821 | directory, any outstanding changes in the working directory will |
|
2836 | directory, any outstanding changes in the working directory will | |
2822 | be merged into the result. If it is not directly descended but is |
|
2837 | be merged into the result. If it is not directly descended but is | |
2823 | on the same named branch, update aborts with a suggestion to use |
|
2838 | on the same named branch, update aborts with a suggestion to use | |
2824 | merge or update -C instead. |
|
2839 | merge or update -C instead. | |
2825 |
|
2840 | |||
2826 | If the requested revision is on a different named branch and the |
|
2841 | If the requested revision is on a different named branch and the | |
2827 | working directory is clean, update quietly switches branches. |
|
2842 | working directory is clean, update quietly switches branches. | |
2828 |
|
2843 | |||
2829 | If you want to update just one file to an older revision, use revert. |
|
2844 | If you want to update just one file to an older revision, use revert. | |
2830 |
|
2845 | |||
2831 | See 'hg help dates' for a list of formats valid for --date. |
|
2846 | See 'hg help dates' for a list of formats valid for --date. | |
2832 | """ |
|
2847 | """ | |
2833 | if rev and node: |
|
2848 | if rev and node: | |
2834 | raise util.Abort(_("please specify just one revision")) |
|
2849 | raise util.Abort(_("please specify just one revision")) | |
2835 |
|
2850 | |||
2836 | if not rev: |
|
2851 | if not rev: | |
2837 | rev = node |
|
2852 | rev = node | |
2838 |
|
2853 | |||
2839 | if date: |
|
2854 | if date: | |
2840 | if rev: |
|
2855 | if rev: | |
2841 | raise util.Abort(_("you can't specify a revision and a date")) |
|
2856 | raise util.Abort(_("you can't specify a revision and a date")) | |
2842 | rev = cmdutil.finddate(ui, repo, date) |
|
2857 | rev = cmdutil.finddate(ui, repo, date) | |
2843 |
|
2858 | |||
2844 | if clean: |
|
2859 | if clean: | |
2845 | return hg.clean(repo, rev) |
|
2860 | return hg.clean(repo, rev) | |
2846 | else: |
|
2861 | else: | |
2847 | return hg.update(repo, rev) |
|
2862 | return hg.update(repo, rev) | |
2848 |
|
2863 | |||
2849 | def verify(ui, repo): |
|
2864 | def verify(ui, repo): | |
2850 | """verify the integrity of the repository |
|
2865 | """verify the integrity of the repository | |
2851 |
|
2866 | |||
2852 | Verify the integrity of the current repository. |
|
2867 | Verify the integrity of the current repository. | |
2853 |
|
2868 | |||
2854 | This will perform an extensive check of the repository's |
|
2869 | This will perform an extensive check of the repository's | |
2855 | integrity, validating the hashes and checksums of each entry in |
|
2870 | integrity, validating the hashes and checksums of each entry in | |
2856 | the changelog, manifest, and tracked files, as well as the |
|
2871 | the changelog, manifest, and tracked files, as well as the | |
2857 | integrity of their crosslinks and indices. |
|
2872 | integrity of their crosslinks and indices. | |
2858 | """ |
|
2873 | """ | |
2859 | return hg.verify(repo) |
|
2874 | return hg.verify(repo) | |
2860 |
|
2875 | |||
2861 | def version_(ui): |
|
2876 | def version_(ui): | |
2862 | """output version and copyright information""" |
|
2877 | """output version and copyright information""" | |
2863 | ui.write(_("Mercurial Distributed SCM (version %s)\n") |
|
2878 | ui.write(_("Mercurial Distributed SCM (version %s)\n") | |
2864 | % version.get_version()) |
|
2879 | % version.get_version()) | |
2865 | ui.status(_( |
|
2880 | ui.status(_( | |
2866 | "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n" |
|
2881 | "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n" | |
2867 | "This is free software; see the source for copying conditions. " |
|
2882 | "This is free software; see the source for copying conditions. " | |
2868 | "There is NO\nwarranty; " |
|
2883 | "There is NO\nwarranty; " | |
2869 | "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" |
|
2884 | "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" | |
2870 | )) |
|
2885 | )) | |
2871 |
|
2886 | |||
2872 | # Command options and aliases are listed here, alphabetically |
|
2887 | # Command options and aliases are listed here, alphabetically | |
2873 |
|
2888 | |||
2874 | globalopts = [ |
|
2889 | globalopts = [ | |
2875 | ('R', 'repository', '', |
|
2890 | ('R', 'repository', '', | |
2876 | _('repository root directory or symbolic path name')), |
|
2891 | _('repository root directory or symbolic path name')), | |
2877 | ('', 'cwd', '', _('change working directory')), |
|
2892 | ('', 'cwd', '', _('change working directory')), | |
2878 | ('y', 'noninteractive', None, |
|
2893 | ('y', 'noninteractive', None, | |
2879 | _('do not prompt, assume \'yes\' for any required answers')), |
|
2894 | _('do not prompt, assume \'yes\' for any required answers')), | |
2880 | ('q', 'quiet', None, _('suppress output')), |
|
2895 | ('q', 'quiet', None, _('suppress output')), | |
2881 | ('v', 'verbose', None, _('enable additional output')), |
|
2896 | ('v', 'verbose', None, _('enable additional output')), | |
2882 | ('', 'config', [], _('set/override config option')), |
|
2897 | ('', 'config', [], _('set/override config option')), | |
2883 | ('', 'debug', None, _('enable debugging output')), |
|
2898 | ('', 'debug', None, _('enable debugging output')), | |
2884 | ('', 'debugger', None, _('start debugger')), |
|
2899 | ('', 'debugger', None, _('start debugger')), | |
2885 | ('', 'encoding', util._encoding, _('set the charset encoding')), |
|
2900 | ('', 'encoding', util._encoding, _('set the charset encoding')), | |
2886 | ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')), |
|
2901 | ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')), | |
2887 | ('', 'lsprof', None, _('print improved command execution profile')), |
|
2902 | ('', 'lsprof', None, _('print improved command execution profile')), | |
2888 | ('', 'traceback', None, _('print traceback on exception')), |
|
2903 | ('', 'traceback', None, _('print traceback on exception')), | |
2889 | ('', 'time', None, _('time how long the command takes')), |
|
2904 | ('', 'time', None, _('time how long the command takes')), | |
2890 | ('', 'profile', None, _('print command execution profile')), |
|
2905 | ('', 'profile', None, _('print command execution profile')), | |
2891 | ('', 'version', None, _('output version information and exit')), |
|
2906 | ('', 'version', None, _('output version information and exit')), | |
2892 | ('h', 'help', None, _('display help and exit')), |
|
2907 | ('h', 'help', None, _('display help and exit')), | |
2893 | ] |
|
2908 | ] | |
2894 |
|
2909 | |||
2895 | dryrunopts = [('n', 'dry-run', None, |
|
2910 | dryrunopts = [('n', 'dry-run', None, | |
2896 | _('do not perform actions, just print output'))] |
|
2911 | _('do not perform actions, just print output'))] | |
2897 |
|
2912 | |||
2898 | remoteopts = [ |
|
2913 | remoteopts = [ | |
2899 | ('e', 'ssh', '', _('specify ssh command to use')), |
|
2914 | ('e', 'ssh', '', _('specify ssh command to use')), | |
2900 | ('', 'remotecmd', '', _('specify hg command to run on the remote side')), |
|
2915 | ('', 'remotecmd', '', _('specify hg command to run on the remote side')), | |
2901 | ] |
|
2916 | ] | |
2902 |
|
2917 | |||
2903 | walkopts = [ |
|
2918 | walkopts = [ | |
2904 | ('I', 'include', [], _('include names matching the given patterns')), |
|
2919 | ('I', 'include', [], _('include names matching the given patterns')), | |
2905 | ('X', 'exclude', [], _('exclude names matching the given patterns')), |
|
2920 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
2906 | ] |
|
2921 | ] | |
2907 |
|
2922 | |||
2908 | commitopts = [ |
|
2923 | commitopts = [ | |
2909 | ('m', 'message', '', _('use <text> as commit message')), |
|
2924 | ('m', 'message', '', _('use <text> as commit message')), | |
2910 | ('l', 'logfile', '', _('read commit message from <file>')), |
|
2925 | ('l', 'logfile', '', _('read commit message from <file>')), | |
2911 | ] |
|
2926 | ] | |
2912 |
|
2927 | |||
2913 | commitopts2 = [ |
|
2928 | commitopts2 = [ | |
2914 | ('d', 'date', '', _('record datecode as commit date')), |
|
2929 | ('d', 'date', '', _('record datecode as commit date')), | |
2915 | ('u', 'user', '', _('record user as committer')), |
|
2930 | ('u', 'user', '', _('record user as committer')), | |
2916 | ] |
|
2931 | ] | |
2917 |
|
2932 | |||
2918 | templateopts = [ |
|
2933 | templateopts = [ | |
2919 | ('', 'style', '', _('display using template map file')), |
|
2934 | ('', 'style', '', _('display using template map file')), | |
2920 | ('', 'template', '', _('display with template')), |
|
2935 | ('', 'template', '', _('display with template')), | |
2921 | ] |
|
2936 | ] | |
2922 |
|
2937 | |||
2923 | logopts = [ |
|
2938 | logopts = [ | |
2924 | ('p', 'patch', None, _('show patch')), |
|
2939 | ('p', 'patch', None, _('show patch')), | |
2925 | ('l', 'limit', '', _('limit number of changes displayed')), |
|
2940 | ('l', 'limit', '', _('limit number of changes displayed')), | |
2926 | ('M', 'no-merges', None, _('do not show merges')), |
|
2941 | ('M', 'no-merges', None, _('do not show merges')), | |
2927 | ] + templateopts |
|
2942 | ] + templateopts | |
2928 |
|
2943 | |||
2929 | diffopts = [ |
|
2944 | diffopts = [ | |
2930 | ('a', 'text', None, _('treat all files as text')), |
|
2945 | ('a', 'text', None, _('treat all files as text')), | |
2931 | ('g', 'git', None, _('use git extended diff format')), |
|
2946 | ('g', 'git', None, _('use git extended diff format')), | |
2932 | ('', 'nodates', None, _("don't include dates in diff headers")) |
|
2947 | ('', 'nodates', None, _("don't include dates in diff headers")) | |
2933 | ] |
|
2948 | ] | |
2934 |
|
2949 | |||
2935 | diffopts2 = [ |
|
2950 | diffopts2 = [ | |
2936 | ('p', 'show-function', None, _('show which function each change is in')), |
|
2951 | ('p', 'show-function', None, _('show which function each change is in')), | |
2937 | ('w', 'ignore-all-space', None, |
|
2952 | ('w', 'ignore-all-space', None, | |
2938 | _('ignore white space when comparing lines')), |
|
2953 | _('ignore white space when comparing lines')), | |
2939 | ('b', 'ignore-space-change', None, |
|
2954 | ('b', 'ignore-space-change', None, | |
2940 | _('ignore changes in the amount of white space')), |
|
2955 | _('ignore changes in the amount of white space')), | |
2941 | ('B', 'ignore-blank-lines', None, |
|
2956 | ('B', 'ignore-blank-lines', None, | |
2942 | _('ignore changes whose lines are all blank')), |
|
2957 | _('ignore changes whose lines are all blank')), | |
2943 | ('U', 'unified', '', _('number of lines of context to show')) |
|
2958 | ('U', 'unified', '', _('number of lines of context to show')) | |
2944 | ] |
|
2959 | ] | |
2945 |
|
2960 | |||
2946 | table = { |
|
2961 | table = { | |
2947 | "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')), |
|
2962 | "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')), | |
2948 | "addremove": |
|
2963 | "addremove": | |
2949 | (addremove, |
|
2964 | (addremove, | |
2950 | [('s', 'similarity', '', |
|
2965 | [('s', 'similarity', '', | |
2951 | _('guess renamed files by similarity (0<=s<=100)')), |
|
2966 | _('guess renamed files by similarity (0<=s<=100)')), | |
2952 | ] + walkopts + dryrunopts, |
|
2967 | ] + walkopts + dryrunopts, | |
2953 | _('hg addremove [OPTION]... [FILE]...')), |
|
2968 | _('hg addremove [OPTION]... [FILE]...')), | |
2954 | "^annotate|blame": |
|
2969 | "^annotate|blame": | |
2955 | (annotate, |
|
2970 | (annotate, | |
2956 | [('r', 'rev', '', _('annotate the specified revision')), |
|
2971 | [('r', 'rev', '', _('annotate the specified revision')), | |
2957 | ('f', 'follow', None, _('follow file copies and renames')), |
|
2972 | ('f', 'follow', None, _('follow file copies and renames')), | |
2958 | ('a', 'text', None, _('treat all files as text')), |
|
2973 | ('a', 'text', None, _('treat all files as text')), | |
2959 | ('u', 'user', None, _('list the author (long with -v)')), |
|
2974 | ('u', 'user', None, _('list the author (long with -v)')), | |
2960 | ('d', 'date', None, _('list the date (short with -q)')), |
|
2975 | ('d', 'date', None, _('list the date (short with -q)')), | |
2961 | ('n', 'number', None, _('list the revision number (default)')), |
|
2976 | ('n', 'number', None, _('list the revision number (default)')), | |
2962 | ('c', 'changeset', None, _('list the changeset')), |
|
2977 | ('c', 'changeset', None, _('list the changeset')), | |
2963 | ('l', 'line-number', None, |
|
2978 | ('l', 'line-number', None, | |
2964 | _('show line number at the first appearance')) |
|
2979 | _('show line number at the first appearance')) | |
2965 | ] + walkopts, |
|
2980 | ] + walkopts, | |
2966 | _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')), |
|
2981 | _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')), | |
2967 | "archive": |
|
2982 | "archive": | |
2968 | (archive, |
|
2983 | (archive, | |
2969 | [('', 'no-decode', None, _('do not pass files through decoders')), |
|
2984 | [('', 'no-decode', None, _('do not pass files through decoders')), | |
2970 | ('p', 'prefix', '', _('directory prefix for files in archive')), |
|
2985 | ('p', 'prefix', '', _('directory prefix for files in archive')), | |
2971 | ('r', 'rev', '', _('revision to distribute')), |
|
2986 | ('r', 'rev', '', _('revision to distribute')), | |
2972 | ('t', 'type', '', _('type of distribution to create')), |
|
2987 | ('t', 'type', '', _('type of distribution to create')), | |
2973 | ] + walkopts, |
|
2988 | ] + walkopts, | |
2974 | _('hg archive [OPTION]... DEST')), |
|
2989 | _('hg archive [OPTION]... DEST')), | |
2975 | "backout": |
|
2990 | "backout": | |
2976 | (backout, |
|
2991 | (backout, | |
2977 | [('', 'merge', None, |
|
2992 | [('', 'merge', None, | |
2978 | _('merge with old dirstate parent after backout')), |
|
2993 | _('merge with old dirstate parent after backout')), | |
2979 | ('', 'parent', '', _('parent to choose when backing out merge')), |
|
2994 | ('', 'parent', '', _('parent to choose when backing out merge')), | |
2980 | ('r', 'rev', '', _('revision to backout')), |
|
2995 | ('r', 'rev', '', _('revision to backout')), | |
2981 | ] + walkopts + commitopts + commitopts2, |
|
2996 | ] + walkopts + commitopts + commitopts2, | |
2982 | _('hg backout [OPTION]... [-r] REV')), |
|
2997 | _('hg backout [OPTION]... [-r] REV')), | |
2983 | "bisect": |
|
2998 | "bisect": | |
2984 | (bisect, |
|
2999 | (bisect, | |
2985 | [('r', 'reset', False, _('reset bisect state')), |
|
3000 | [('r', 'reset', False, _('reset bisect state')), | |
2986 | ('g', 'good', False, _('mark changeset good')), |
|
3001 | ('g', 'good', False, _('mark changeset good')), | |
2987 | ('b', 'bad', False, _('mark changeset bad')), |
|
3002 | ('b', 'bad', False, _('mark changeset bad')), | |
2988 | ('s', 'skip', False, _('skip testing changeset')), |
|
3003 | ('s', 'skip', False, _('skip testing changeset')), | |
2989 | ('U', 'noupdate', False, _('do not update to target'))], |
|
3004 | ('U', 'noupdate', False, _('do not update to target'))], | |
2990 | _("hg bisect [-gbsr] [REV]")), |
|
3005 | _("hg bisect [-gbsr] [REV]")), | |
2991 | "branch": |
|
3006 | "branch": | |
2992 | (branch, |
|
3007 | (branch, | |
2993 | [('f', 'force', None, |
|
3008 | [('f', 'force', None, | |
2994 | _('set branch name even if it shadows an existing branch')), |
|
3009 | _('set branch name even if it shadows an existing branch')), | |
2995 | ('C', 'clean', None, _('reset branch name to parent branch name'))], |
|
3010 | ('C', 'clean', None, _('reset branch name to parent branch name'))], | |
2996 | _('hg branch [-fC] [NAME]')), |
|
3011 | _('hg branch [-fC] [NAME]')), | |
2997 | "branches": |
|
3012 | "branches": | |
2998 | (branches, |
|
3013 | (branches, | |
2999 | [('a', 'active', False, |
|
3014 | [('a', 'active', False, | |
3000 | _('show only branches that have unmerged heads'))], |
|
3015 | _('show only branches that have unmerged heads'))], | |
3001 | _('hg branches [-a]')), |
|
3016 | _('hg branches [-a]')), | |
3002 | "bundle": |
|
3017 | "bundle": | |
3003 | (bundle, |
|
3018 | (bundle, | |
3004 | [('f', 'force', None, |
|
3019 | [('f', 'force', None, | |
3005 | _('run even when remote repository is unrelated')), |
|
3020 | _('run even when remote repository is unrelated')), | |
3006 | ('r', 'rev', [], |
|
3021 | ('r', 'rev', [], | |
3007 | _('a changeset up to which you would like to bundle')), |
|
3022 | _('a changeset up to which you would like to bundle')), | |
3008 | ('', 'base', [], |
|
3023 | ('', 'base', [], | |
3009 | _('a base changeset to specify instead of a destination')), |
|
3024 | _('a base changeset to specify instead of a destination')), | |
3010 | ('a', 'all', None, _('bundle all changesets in the repository')), |
|
3025 | ('a', 'all', None, _('bundle all changesets in the repository')), | |
3011 | ('t', 'type', 'bzip2', _('bundle compression type to use')), |
|
3026 | ('t', 'type', 'bzip2', _('bundle compression type to use')), | |
3012 | ] + remoteopts, |
|
3027 | ] + remoteopts, | |
3013 | _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')), |
|
3028 | _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')), | |
3014 | "cat": |
|
3029 | "cat": | |
3015 | (cat, |
|
3030 | (cat, | |
3016 | [('o', 'output', '', _('print output to file with formatted name')), |
|
3031 | [('o', 'output', '', _('print output to file with formatted name')), | |
3017 | ('r', 'rev', '', _('print the given revision')), |
|
3032 | ('r', 'rev', '', _('print the given revision')), | |
3018 | ('', 'decode', None, _('apply any matching decode filter')), |
|
3033 | ('', 'decode', None, _('apply any matching decode filter')), | |
3019 | ] + walkopts, |
|
3034 | ] + walkopts, | |
3020 | _('hg cat [OPTION]... FILE...')), |
|
3035 | _('hg cat [OPTION]... FILE...')), | |
3021 | "^clone": |
|
3036 | "^clone": | |
3022 | (clone, |
|
3037 | (clone, | |
3023 | [('U', 'noupdate', None, |
|
3038 | [('U', 'noupdate', None, | |
3024 | _('the clone will only contain a repository (no working copy)')), |
|
3039 | _('the clone will only contain a repository (no working copy)')), | |
3025 | ('r', 'rev', [], |
|
3040 | ('r', 'rev', [], | |
3026 | _('a changeset you would like to have after cloning')), |
|
3041 | _('a changeset you would like to have after cloning')), | |
3027 | ('', 'pull', None, _('use pull protocol to copy metadata')), |
|
3042 | ('', 'pull', None, _('use pull protocol to copy metadata')), | |
3028 | ('', 'uncompressed', None, |
|
3043 | ('', 'uncompressed', None, | |
3029 | _('use uncompressed transfer (fast over LAN)')), |
|
3044 | _('use uncompressed transfer (fast over LAN)')), | |
3030 | ] + remoteopts, |
|
3045 | ] + remoteopts, | |
3031 | _('hg clone [OPTION]... SOURCE [DEST]')), |
|
3046 | _('hg clone [OPTION]... SOURCE [DEST]')), | |
3032 | "^commit|ci": |
|
3047 | "^commit|ci": | |
3033 | (commit, |
|
3048 | (commit, | |
3034 | [('A', 'addremove', None, |
|
3049 | [('A', 'addremove', None, | |
3035 | _('mark new/missing files as added/removed before committing')), |
|
3050 | _('mark new/missing files as added/removed before committing')), | |
3036 | ] + walkopts + commitopts + commitopts2, |
|
3051 | ] + walkopts + commitopts + commitopts2, | |
3037 | _('hg commit [OPTION]... [FILE]...')), |
|
3052 | _('hg commit [OPTION]... [FILE]...')), | |
3038 | "copy|cp": |
|
3053 | "copy|cp": | |
3039 | (copy, |
|
3054 | (copy, | |
3040 | [('A', 'after', None, _('record a copy that has already occurred')), |
|
3055 | [('A', 'after', None, _('record a copy that has already occurred')), | |
3041 | ('f', 'force', None, |
|
3056 | ('f', 'force', None, | |
3042 | _('forcibly copy over an existing managed file')), |
|
3057 | _('forcibly copy over an existing managed file')), | |
3043 | ] + walkopts + dryrunopts, |
|
3058 | ] + walkopts + dryrunopts, | |
3044 | _('hg copy [OPTION]... [SOURCE]... DEST')), |
|
3059 | _('hg copy [OPTION]... [SOURCE]... DEST')), | |
3045 | "debugancestor": (debugancestor, [], |
|
3060 | "debugancestor": (debugancestor, [], | |
3046 | _('hg debugancestor [INDEX] REV1 REV2')), |
|
3061 | _('hg debugancestor [INDEX] REV1 REV2')), | |
3047 | "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')), |
|
3062 | "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')), | |
3048 | "debugcomplete": |
|
3063 | "debugcomplete": | |
3049 | (debugcomplete, |
|
3064 | (debugcomplete, | |
3050 | [('o', 'options', None, _('show the command options'))], |
|
3065 | [('o', 'options', None, _('show the command options'))], | |
3051 | _('hg debugcomplete [-o] CMD')), |
|
3066 | _('hg debugcomplete [-o] CMD')), | |
3052 | "debugdate": |
|
3067 | "debugdate": | |
3053 | (debugdate, |
|
3068 | (debugdate, | |
3054 | [('e', 'extended', None, _('try extended date formats'))], |
|
3069 | [('e', 'extended', None, _('try extended date formats'))], | |
3055 | _('hg debugdate [-e] DATE [RANGE]')), |
|
3070 | _('hg debugdate [-e] DATE [RANGE]')), | |
3056 | "debugdata": (debugdata, [], _('hg debugdata FILE REV')), |
|
3071 | "debugdata": (debugdata, [], _('hg debugdata FILE REV')), | |
3057 | "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')), |
|
3072 | "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')), | |
3058 | "debugindex": (debugindex, [], _('hg debugindex FILE')), |
|
3073 | "debugindex": (debugindex, [], _('hg debugindex FILE')), | |
3059 | "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')), |
|
3074 | "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')), | |
3060 | "debuginstall": (debuginstall, [], _('hg debuginstall')), |
|
3075 | "debuginstall": (debuginstall, [], _('hg debuginstall')), | |
3061 | "debugrawcommit|rawcommit": |
|
3076 | "debugrawcommit|rawcommit": | |
3062 | (rawcommit, |
|
3077 | (rawcommit, | |
3063 | [('p', 'parent', [], _('parent')), |
|
3078 | [('p', 'parent', [], _('parent')), | |
3064 | ('F', 'files', '', _('file list')) |
|
3079 | ('F', 'files', '', _('file list')) | |
3065 | ] + commitopts + commitopts2, |
|
3080 | ] + commitopts + commitopts2, | |
3066 | _('hg debugrawcommit [OPTION]... [FILE]...')), |
|
3081 | _('hg debugrawcommit [OPTION]... [FILE]...')), | |
3067 | "debugrebuildstate": |
|
3082 | "debugrebuildstate": | |
3068 | (debugrebuildstate, |
|
3083 | (debugrebuildstate, | |
3069 | [('r', 'rev', '', _('revision to rebuild to'))], |
|
3084 | [('r', 'rev', '', _('revision to rebuild to'))], | |
3070 | _('hg debugrebuildstate [-r REV] [REV]')), |
|
3085 | _('hg debugrebuildstate [-r REV] [REV]')), | |
3071 | "debugrename": |
|
3086 | "debugrename": | |
3072 | (debugrename, |
|
3087 | (debugrename, | |
3073 | [('r', 'rev', '', _('revision to debug'))], |
|
3088 | [('r', 'rev', '', _('revision to debug'))], | |
3074 | _('hg debugrename [-r REV] FILE')), |
|
3089 | _('hg debugrename [-r REV] FILE')), | |
3075 | "debugsetparents": |
|
3090 | "debugsetparents": | |
3076 | (debugsetparents, |
|
3091 | (debugsetparents, | |
3077 | [], |
|
3092 | [], | |
3078 | _('hg debugsetparents REV1 [REV2]')), |
|
3093 | _('hg debugsetparents REV1 [REV2]')), | |
3079 | "debugstate": |
|
3094 | "debugstate": | |
3080 | (debugstate, |
|
3095 | (debugstate, | |
3081 | [('', 'nodates', None, _('do not display the saved mtime'))], |
|
3096 | [('', 'nodates', None, _('do not display the saved mtime'))], | |
3082 | _('hg debugstate [OPTS]')), |
|
3097 | _('hg debugstate [OPTS]')), | |
3083 | "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')), |
|
3098 | "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')), | |
3084 | "^diff": |
|
3099 | "^diff": | |
3085 | (diff, |
|
3100 | (diff, | |
3086 | [('r', 'rev', [], _('revision')) |
|
3101 | [('r', 'rev', [], _('revision')) | |
3087 | ] + diffopts + diffopts2 + walkopts, |
|
3102 | ] + diffopts + diffopts2 + walkopts, | |
3088 | _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')), |
|
3103 | _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')), | |
3089 | "^export": |
|
3104 | "^export": | |
3090 | (export, |
|
3105 | (export, | |
3091 | [('o', 'output', '', _('print output to file with formatted name')), |
|
3106 | [('o', 'output', '', _('print output to file with formatted name')), | |
3092 | ('', 'switch-parent', None, _('diff against the second parent')) |
|
3107 | ('', 'switch-parent', None, _('diff against the second parent')) | |
3093 | ] + diffopts, |
|
3108 | ] + diffopts, | |
3094 | _('hg export [OPTION]... [-o OUTFILESPEC] REV...')), |
|
3109 | _('hg export [OPTION]... [-o OUTFILESPEC] REV...')), | |
3095 | "grep": |
|
3110 | "grep": | |
3096 | (grep, |
|
3111 | (grep, | |
3097 | [('0', 'print0', None, _('end fields with NUL')), |
|
3112 | [('0', 'print0', None, _('end fields with NUL')), | |
3098 | ('', 'all', None, _('print all revisions that match')), |
|
3113 | ('', 'all', None, _('print all revisions that match')), | |
3099 | ('f', 'follow', None, |
|
3114 | ('f', 'follow', None, | |
3100 | _('follow changeset history, or file history across copies and renames')), |
|
3115 | _('follow changeset history, or file history across copies and renames')), | |
3101 | ('i', 'ignore-case', None, _('ignore case when matching')), |
|
3116 | ('i', 'ignore-case', None, _('ignore case when matching')), | |
3102 | ('l', 'files-with-matches', None, |
|
3117 | ('l', 'files-with-matches', None, | |
3103 | _('print only filenames and revs that match')), |
|
3118 | _('print only filenames and revs that match')), | |
3104 | ('n', 'line-number', None, _('print matching line numbers')), |
|
3119 | ('n', 'line-number', None, _('print matching line numbers')), | |
3105 | ('r', 'rev', [], _('search in given revision range')), |
|
3120 | ('r', 'rev', [], _('search in given revision range')), | |
3106 | ('u', 'user', None, _('list the author (long with -v)')), |
|
3121 | ('u', 'user', None, _('list the author (long with -v)')), | |
3107 | ('d', 'date', None, _('list the date (short with -q)')), |
|
3122 | ('d', 'date', None, _('list the date (short with -q)')), | |
3108 | ] + walkopts, |
|
3123 | ] + walkopts, | |
3109 | _('hg grep [OPTION]... PATTERN [FILE]...')), |
|
3124 | _('hg grep [OPTION]... PATTERN [FILE]...')), | |
3110 | "heads": |
|
3125 | "heads": | |
3111 | (heads, |
|
3126 | (heads, | |
3112 | [('r', 'rev', '', _('show only heads which are descendants of rev')), |
|
3127 | [('r', 'rev', '', _('show only heads which are descendants of rev')), | |
3113 | ] + templateopts, |
|
3128 | ] + templateopts, | |
3114 | _('hg heads [-r REV] [REV]...')), |
|
3129 | _('hg heads [-r REV] [REV]...')), | |
3115 | "help": (help_, [], _('hg help [COMMAND]')), |
|
3130 | "help": (help_, [], _('hg help [COMMAND]')), | |
3116 | "identify|id": |
|
3131 | "identify|id": | |
3117 | (identify, |
|
3132 | (identify, | |
3118 | [('r', 'rev', '', _('identify the specified rev')), |
|
3133 | [('r', 'rev', '', _('identify the specified rev')), | |
3119 | ('n', 'num', None, _('show local revision number')), |
|
3134 | ('n', 'num', None, _('show local revision number')), | |
3120 | ('i', 'id', None, _('show global revision id')), |
|
3135 | ('i', 'id', None, _('show global revision id')), | |
3121 | ('b', 'branch', None, _('show branch')), |
|
3136 | ('b', 'branch', None, _('show branch')), | |
3122 | ('t', 'tags', None, _('show tags'))], |
|
3137 | ('t', 'tags', None, _('show tags'))], | |
3123 | _('hg identify [-nibt] [-r REV] [SOURCE]')), |
|
3138 | _('hg identify [-nibt] [-r REV] [SOURCE]')), | |
3124 | "import|patch": |
|
3139 | "import|patch": | |
3125 | (import_, |
|
3140 | (import_, | |
3126 | [('p', 'strip', 1, |
|
3141 | [('p', 'strip', 1, | |
3127 | _('directory strip option for patch. This has the same\n' |
|
3142 | _('directory strip option for patch. This has the same\n' | |
3128 | 'meaning as the corresponding patch option')), |
|
3143 | 'meaning as the corresponding patch option')), | |
3129 | ('b', 'base', '', _('base path')), |
|
3144 | ('b', 'base', '', _('base path')), | |
3130 | ('f', 'force', None, |
|
3145 | ('f', 'force', None, | |
3131 | _('skip check for outstanding uncommitted changes')), |
|
3146 | _('skip check for outstanding uncommitted changes')), | |
3132 | ('', 'no-commit', None, _("don't commit, just update the working directory")), |
|
3147 | ('', 'no-commit', None, _("don't commit, just update the working directory")), | |
3133 | ('', 'exact', None, |
|
3148 | ('', 'exact', None, | |
3134 | _('apply patch to the nodes from which it was generated')), |
|
3149 | _('apply patch to the nodes from which it was generated')), | |
3135 | ('', 'import-branch', None, |
|
3150 | ('', 'import-branch', None, | |
3136 | _('Use any branch information in patch (implied by --exact)'))] + |
|
3151 | _('Use any branch information in patch (implied by --exact)'))] + | |
3137 | commitopts + commitopts2, |
|
3152 | commitopts + commitopts2, | |
3138 | _('hg import [OPTION]... PATCH...')), |
|
3153 | _('hg import [OPTION]... PATCH...')), | |
3139 | "incoming|in": |
|
3154 | "incoming|in": | |
3140 | (incoming, |
|
3155 | (incoming, | |
3141 | [('f', 'force', None, |
|
3156 | [('f', 'force', None, | |
3142 | _('run even when remote repository is unrelated')), |
|
3157 | _('run even when remote repository is unrelated')), | |
3143 | ('n', 'newest-first', None, _('show newest record first')), |
|
3158 | ('n', 'newest-first', None, _('show newest record first')), | |
3144 | ('', 'bundle', '', _('file to store the bundles into')), |
|
3159 | ('', 'bundle', '', _('file to store the bundles into')), | |
3145 | ('r', 'rev', [], |
|
3160 | ('r', 'rev', [], | |
3146 | _('a specific revision up to which you would like to pull')), |
|
3161 | _('a specific revision up to which you would like to pull')), | |
3147 | ] + logopts + remoteopts, |
|
3162 | ] + logopts + remoteopts, | |
3148 | _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...' |
|
3163 | _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...' | |
3149 | ' [--bundle FILENAME] [SOURCE]')), |
|
3164 | ' [--bundle FILENAME] [SOURCE]')), | |
3150 | "^init": |
|
3165 | "^init": | |
3151 | (init, |
|
3166 | (init, | |
3152 | remoteopts, |
|
3167 | remoteopts, | |
3153 | _('hg init [-e CMD] [--remotecmd CMD] [DEST]')), |
|
3168 | _('hg init [-e CMD] [--remotecmd CMD] [DEST]')), | |
3154 | "locate": |
|
3169 | "locate": | |
3155 | (locate, |
|
3170 | (locate, | |
3156 | [('r', 'rev', '', _('search the repository as it stood at rev')), |
|
3171 | [('r', 'rev', '', _('search the repository as it stood at rev')), | |
3157 | ('0', 'print0', None, |
|
3172 | ('0', 'print0', None, | |
3158 | _('end filenames with NUL, for use with xargs')), |
|
3173 | _('end filenames with NUL, for use with xargs')), | |
3159 | ('f', 'fullpath', None, |
|
3174 | ('f', 'fullpath', None, | |
3160 | _('print complete paths from the filesystem root')), |
|
3175 | _('print complete paths from the filesystem root')), | |
3161 | ] + walkopts, |
|
3176 | ] + walkopts, | |
3162 | _('hg locate [OPTION]... [PATTERN]...')), |
|
3177 | _('hg locate [OPTION]... [PATTERN]...')), | |
3163 | "^log|history": |
|
3178 | "^log|history": | |
3164 | (log, |
|
3179 | (log, | |
3165 | [('f', 'follow', None, |
|
3180 | [('f', 'follow', None, | |
3166 | _('follow changeset history, or file history across copies and renames')), |
|
3181 | _('follow changeset history, or file history across copies and renames')), | |
3167 | ('', 'follow-first', None, |
|
3182 | ('', 'follow-first', None, | |
3168 | _('only follow the first parent of merge changesets')), |
|
3183 | _('only follow the first parent of merge changesets')), | |
3169 | ('d', 'date', '', _('show revs matching date spec')), |
|
3184 | ('d', 'date', '', _('show revs matching date spec')), | |
3170 | ('C', 'copies', None, _('show copied files')), |
|
3185 | ('C', 'copies', None, _('show copied files')), | |
3171 | ('k', 'keyword', [], _('do case-insensitive search for a keyword')), |
|
3186 | ('k', 'keyword', [], _('do case-insensitive search for a keyword')), | |
3172 | ('r', 'rev', [], _('show the specified revision or range')), |
|
3187 | ('r', 'rev', [], _('show the specified revision or range')), | |
3173 | ('', 'removed', None, _('include revs where files were removed')), |
|
3188 | ('', 'removed', None, _('include revs where files were removed')), | |
3174 | ('m', 'only-merges', None, _('show only merges')), |
|
3189 | ('m', 'only-merges', None, _('show only merges')), | |
3175 | ('b', 'only-branch', [], |
|
3190 | ('b', 'only-branch', [], | |
3176 | _('show only changesets within the given named branch')), |
|
3191 | _('show only changesets within the given named branch')), | |
3177 | ('P', 'prune', [], _('do not display revision or any of its ancestors')), |
|
3192 | ('P', 'prune', [], _('do not display revision or any of its ancestors')), | |
3178 | ] + logopts + walkopts, |
|
3193 | ] + logopts + walkopts, | |
3179 | _('hg log [OPTION]... [FILE]')), |
|
3194 | _('hg log [OPTION]... [FILE]')), | |
3180 | "manifest": |
|
3195 | "manifest": | |
3181 | (manifest, |
|
3196 | (manifest, | |
3182 | [('r', 'rev', '', _('revision to display'))], |
|
3197 | [('r', 'rev', '', _('revision to display'))], | |
3183 | _('hg manifest [-r REV]')), |
|
3198 | _('hg manifest [-r REV]')), | |
3184 | "^merge": |
|
3199 | "^merge": | |
3185 | (merge, |
|
3200 | (merge, | |
3186 | [('f', 'force', None, _('force a merge with outstanding changes')), |
|
3201 | [('f', 'force', None, _('force a merge with outstanding changes')), | |
3187 | ('r', 'rev', '', _('revision to merge')), |
|
3202 | ('r', 'rev', '', _('revision to merge')), | |
3188 | ], |
|
3203 | ], | |
3189 | _('hg merge [-f] [[-r] REV]')), |
|
3204 | _('hg merge [-f] [[-r] REV]')), | |
3190 | "outgoing|out": |
|
3205 | "outgoing|out": | |
3191 | (outgoing, |
|
3206 | (outgoing, | |
3192 | [('f', 'force', None, |
|
3207 | [('f', 'force', None, | |
3193 | _('run even when remote repository is unrelated')), |
|
3208 | _('run even when remote repository is unrelated')), | |
3194 | ('r', 'rev', [], |
|
3209 | ('r', 'rev', [], | |
3195 | _('a specific revision up to which you would like to push')), |
|
3210 | _('a specific revision up to which you would like to push')), | |
3196 | ('n', 'newest-first', None, _('show newest record first')), |
|
3211 | ('n', 'newest-first', None, _('show newest record first')), | |
3197 | ] + logopts + remoteopts, |
|
3212 | ] + logopts + remoteopts, | |
3198 | _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')), |
|
3213 | _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')), | |
3199 | "^parents": |
|
3214 | "^parents": | |
3200 | (parents, |
|
3215 | (parents, | |
3201 | [('r', 'rev', '', _('show parents from the specified rev')), |
|
3216 | [('r', 'rev', '', _('show parents from the specified rev')), | |
3202 | ] + templateopts, |
|
3217 | ] + templateopts, | |
3203 | _('hg parents [-r REV] [FILE]')), |
|
3218 | _('hg parents [-r REV] [FILE]')), | |
3204 | "paths": (paths, [], _('hg paths [NAME]')), |
|
3219 | "paths": (paths, [], _('hg paths [NAME]')), | |
3205 | "^pull": |
|
3220 | "^pull": | |
3206 | (pull, |
|
3221 | (pull, | |
3207 | [('u', 'update', None, |
|
3222 | [('u', 'update', None, | |
3208 | _('update to new tip if changesets were pulled')), |
|
3223 | _('update to new tip if changesets were pulled')), | |
3209 | ('f', 'force', None, |
|
3224 | ('f', 'force', None, | |
3210 | _('run even when remote repository is unrelated')), |
|
3225 | _('run even when remote repository is unrelated')), | |
3211 | ('r', 'rev', [], |
|
3226 | ('r', 'rev', [], | |
3212 | _('a specific revision up to which you would like to pull')), |
|
3227 | _('a specific revision up to which you would like to pull')), | |
3213 | ] + remoteopts, |
|
3228 | ] + remoteopts, | |
3214 | _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')), |
|
3229 | _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')), | |
3215 | "^push": |
|
3230 | "^push": | |
3216 | (push, |
|
3231 | (push, | |
3217 | [('f', 'force', None, _('force push')), |
|
3232 | [('f', 'force', None, _('force push')), | |
3218 | ('r', 'rev', [], |
|
3233 | ('r', 'rev', [], | |
3219 | _('a specific revision up to which you would like to push')), |
|
3234 | _('a specific revision up to which you would like to push')), | |
3220 | ] + remoteopts, |
|
3235 | ] + remoteopts, | |
3221 | _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')), |
|
3236 | _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')), | |
3222 | "recover": (recover, [], _('hg recover')), |
|
3237 | "recover": (recover, [], _('hg recover')), | |
3223 | "^remove|rm": |
|
3238 | "^remove|rm": | |
3224 | (remove, |
|
3239 | (remove, | |
3225 | [('A', 'after', None, _('record delete for missing files')), |
|
3240 | [('A', 'after', None, _('record delete for missing files')), | |
3226 | ('f', 'force', None, |
|
3241 | ('f', 'force', None, | |
3227 | _('remove (and delete) file even if added or modified')), |
|
3242 | _('remove (and delete) file even if added or modified')), | |
3228 | ] + walkopts, |
|
3243 | ] + walkopts, | |
3229 | _('hg remove [OPTION]... FILE...')), |
|
3244 | _('hg remove [OPTION]... FILE...')), | |
3230 | "rename|mv": |
|
3245 | "rename|mv": | |
3231 | (rename, |
|
3246 | (rename, | |
3232 | [('A', 'after', None, _('record a rename that has already occurred')), |
|
3247 | [('A', 'after', None, _('record a rename that has already occurred')), | |
3233 | ('f', 'force', None, |
|
3248 | ('f', 'force', None, | |
3234 | _('forcibly copy over an existing managed file')), |
|
3249 | _('forcibly copy over an existing managed file')), | |
3235 | ] + walkopts + dryrunopts, |
|
3250 | ] + walkopts + dryrunopts, | |
3236 | _('hg rename [OPTION]... SOURCE... DEST')), |
|
3251 | _('hg rename [OPTION]... SOURCE... DEST')), | |
3237 | "resolve": |
|
3252 | "resolve": | |
3238 | (resolve, |
|
3253 | (resolve, | |
3239 | [('l', 'list', None, _('list state of files needing merge')), |
|
3254 | [('l', 'list', None, _('list state of files needing merge')), | |
3240 | ('m', 'mark', None, _('mark files as resolved')), |
|
3255 | ('m', 'mark', None, _('mark files as resolved')), | |
3241 | ('u', 'unmark', None, _('unmark files as resolved'))], |
|
3256 | ('u', 'unmark', None, _('unmark files as resolved'))], | |
3242 | _('hg resolve [OPTION] [FILES...]')), |
|
3257 | _('hg resolve [OPTION] [FILES...]')), | |
3243 | "revert": |
|
3258 | "revert": | |
3244 | (revert, |
|
3259 | (revert, | |
3245 | [('a', 'all', None, _('revert all changes when no arguments given')), |
|
3260 | [('a', 'all', None, _('revert all changes when no arguments given')), | |
3246 | ('d', 'date', '', _('tipmost revision matching date')), |
|
3261 | ('d', 'date', '', _('tipmost revision matching date')), | |
3247 | ('r', 'rev', '', _('revision to revert to')), |
|
3262 | ('r', 'rev', '', _('revision to revert to')), | |
3248 | ('', 'no-backup', None, _('do not save backup copies of files')), |
|
3263 | ('', 'no-backup', None, _('do not save backup copies of files')), | |
3249 | ] + walkopts + dryrunopts, |
|
3264 | ] + walkopts + dryrunopts, | |
3250 | _('hg revert [OPTION]... [-r REV] [NAME]...')), |
|
3265 | _('hg revert [OPTION]... [-r REV] [NAME]...')), | |
3251 | "rollback": (rollback, [], _('hg rollback')), |
|
3266 | "rollback": (rollback, [], _('hg rollback')), | |
3252 | "root": (root, [], _('hg root')), |
|
3267 | "root": (root, [], _('hg root')), | |
3253 | "^serve": |
|
3268 | "^serve": | |
3254 | (serve, |
|
3269 | (serve, | |
3255 | [('A', 'accesslog', '', _('name of access log file to write to')), |
|
3270 | [('A', 'accesslog', '', _('name of access log file to write to')), | |
3256 | ('d', 'daemon', None, _('run server in background')), |
|
3271 | ('d', 'daemon', None, _('run server in background')), | |
3257 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), |
|
3272 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), | |
3258 | ('E', 'errorlog', '', _('name of error log file to write to')), |
|
3273 | ('E', 'errorlog', '', _('name of error log file to write to')), | |
3259 | ('p', 'port', 0, _('port to listen on (default: 8000)')), |
|
3274 | ('p', 'port', 0, _('port to listen on (default: 8000)')), | |
3260 | ('a', 'address', '', _('address to listen on (default: all interfaces)')), |
|
3275 | ('a', 'address', '', _('address to listen on (default: all interfaces)')), | |
3261 | ('', 'prefix', '', _('prefix path to serve from (default: server root)')), |
|
3276 | ('', 'prefix', '', _('prefix path to serve from (default: server root)')), | |
3262 | ('n', 'name', '', |
|
3277 | ('n', 'name', '', | |
3263 | _('name to show in web pages (default: working dir)')), |
|
3278 | _('name to show in web pages (default: working dir)')), | |
3264 | ('', 'webdir-conf', '', _('name of the webdir config file' |
|
3279 | ('', 'webdir-conf', '', _('name of the webdir config file' | |
3265 | ' (serve more than one repo)')), |
|
3280 | ' (serve more than one repo)')), | |
3266 | ('', 'pid-file', '', _('name of file to write process ID to')), |
|
3281 | ('', 'pid-file', '', _('name of file to write process ID to')), | |
3267 | ('', 'stdio', None, _('for remote clients')), |
|
3282 | ('', 'stdio', None, _('for remote clients')), | |
3268 | ('t', 'templates', '', _('web templates to use')), |
|
3283 | ('t', 'templates', '', _('web templates to use')), | |
3269 | ('', 'style', '', _('template style to use')), |
|
3284 | ('', 'style', '', _('template style to use')), | |
3270 | ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), |
|
3285 | ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), | |
3271 | ('', 'certificate', '', _('SSL certificate file'))], |
|
3286 | ('', 'certificate', '', _('SSL certificate file'))], | |
3272 | _('hg serve [OPTION]...')), |
|
3287 | _('hg serve [OPTION]...')), | |
3273 | "showconfig|debugconfig": |
|
3288 | "showconfig|debugconfig": | |
3274 | (showconfig, |
|
3289 | (showconfig, | |
3275 | [('u', 'untrusted', None, _('show untrusted configuration options'))], |
|
3290 | [('u', 'untrusted', None, _('show untrusted configuration options'))], | |
3276 | _('hg showconfig [-u] [NAME]...')), |
|
3291 | _('hg showconfig [-u] [NAME]...')), | |
3277 | "^status|st": |
|
3292 | "^status|st": | |
3278 | (status, |
|
3293 | (status, | |
3279 | [('A', 'all', None, _('show status of all files')), |
|
3294 | [('A', 'all', None, _('show status of all files')), | |
3280 | ('m', 'modified', None, _('show only modified files')), |
|
3295 | ('m', 'modified', None, _('show only modified files')), | |
3281 | ('a', 'added', None, _('show only added files')), |
|
3296 | ('a', 'added', None, _('show only added files')), | |
3282 | ('r', 'removed', None, _('show only removed files')), |
|
3297 | ('r', 'removed', None, _('show only removed files')), | |
3283 | ('d', 'deleted', None, _('show only deleted (but tracked) files')), |
|
3298 | ('d', 'deleted', None, _('show only deleted (but tracked) files')), | |
3284 | ('c', 'clean', None, _('show only files without changes')), |
|
3299 | ('c', 'clean', None, _('show only files without changes')), | |
3285 | ('u', 'unknown', None, _('show only unknown (not tracked) files')), |
|
3300 | ('u', 'unknown', None, _('show only unknown (not tracked) files')), | |
3286 | ('i', 'ignored', None, _('show only ignored files')), |
|
3301 | ('i', 'ignored', None, _('show only ignored files')), | |
3287 | ('n', 'no-status', None, _('hide status prefix')), |
|
3302 | ('n', 'no-status', None, _('hide status prefix')), | |
3288 | ('C', 'copies', None, _('show source of copied files')), |
|
3303 | ('C', 'copies', None, _('show source of copied files')), | |
3289 | ('0', 'print0', None, |
|
3304 | ('0', 'print0', None, | |
3290 | _('end filenames with NUL, for use with xargs')), |
|
3305 | _('end filenames with NUL, for use with xargs')), | |
3291 | ('', 'rev', [], _('show difference from revision')), |
|
3306 | ('', 'rev', [], _('show difference from revision')), | |
3292 | ] + walkopts, |
|
3307 | ] + walkopts, | |
3293 | _('hg status [OPTION]... [FILE]...')), |
|
3308 | _('hg status [OPTION]... [FILE]...')), | |
3294 | "tag": |
|
3309 | "tag": | |
3295 | (tag, |
|
3310 | (tag, | |
3296 | [('f', 'force', None, _('replace existing tag')), |
|
3311 | [('f', 'force', None, _('replace existing tag')), | |
3297 | ('l', 'local', None, _('make the tag local')), |
|
3312 | ('l', 'local', None, _('make the tag local')), | |
3298 | ('r', 'rev', '', _('revision to tag')), |
|
3313 | ('r', 'rev', '', _('revision to tag')), | |
3299 | ('', 'remove', None, _('remove a tag')), |
|
3314 | ('', 'remove', None, _('remove a tag')), | |
3300 | # -l/--local is already there, commitopts cannot be used |
|
3315 | # -l/--local is already there, commitopts cannot be used | |
3301 | ('m', 'message', '', _('use <text> as commit message')), |
|
3316 | ('m', 'message', '', _('use <text> as commit message')), | |
3302 | ] + commitopts2, |
|
3317 | ] + commitopts2, | |
3303 | _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')), |
|
3318 | _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')), | |
3304 | "tags": (tags, [], _('hg tags')), |
|
3319 | "tags": (tags, [], _('hg tags')), | |
3305 | "tip": |
|
3320 | "tip": | |
3306 | (tip, |
|
3321 | (tip, | |
3307 | [('p', 'patch', None, _('show patch')), |
|
3322 | [('p', 'patch', None, _('show patch')), | |
3308 | ] + templateopts, |
|
3323 | ] + templateopts, | |
3309 | _('hg tip [-p]')), |
|
3324 | _('hg tip [-p]')), | |
3310 | "unbundle": |
|
3325 | "unbundle": | |
3311 | (unbundle, |
|
3326 | (unbundle, | |
3312 | [('u', 'update', None, |
|
3327 | [('u', 'update', None, | |
3313 | _('update to new tip if changesets were unbundled'))], |
|
3328 | _('update to new tip if changesets were unbundled'))], | |
3314 | _('hg unbundle [-u] FILE...')), |
|
3329 | _('hg unbundle [-u] FILE...')), | |
3315 | "^update|up|checkout|co": |
|
3330 | "^update|up|checkout|co": | |
3316 | (update, |
|
3331 | (update, | |
3317 | [('C', 'clean', None, _('overwrite locally modified files (no backup)')), |
|
3332 | [('C', 'clean', None, _('overwrite locally modified files (no backup)')), | |
3318 | ('d', 'date', '', _('tipmost revision matching date')), |
|
3333 | ('d', 'date', '', _('tipmost revision matching date')), | |
3319 | ('r', 'rev', '', _('revision'))], |
|
3334 | ('r', 'rev', '', _('revision'))], | |
3320 | _('hg update [-C] [-d DATE] [[-r] REV]')), |
|
3335 | _('hg update [-C] [-d DATE] [[-r] REV]')), | |
3321 | "verify": (verify, [], _('hg verify')), |
|
3336 | "verify": (verify, [], _('hg verify')), | |
3322 | "version": (version_, [], _('hg version')), |
|
3337 | "version": (version_, [], _('hg version')), | |
3323 | } |
|
3338 | } | |
3324 |
|
3339 | |||
3325 | norepo = ("clone init version help debugcomplete debugdata" |
|
3340 | norepo = ("clone init version help debugcomplete debugdata" | |
3326 | " debugindex debugindexdot debugdate debuginstall debugfsinfo") |
|
3341 | " debugindex debugindexdot debugdate debuginstall debugfsinfo") | |
3327 | optionalrepo = ("identify paths serve showconfig debugancestor") |
|
3342 | optionalrepo = ("identify paths serve showconfig debugancestor") |
@@ -1,584 +1,584 | |||||
1 | """ |
|
1 | """ | |
2 | dirstate.py - working directory tracking for mercurial |
|
2 | dirstate.py - working directory tracking for mercurial | |
3 |
|
3 | |||
4 | Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 |
|
5 | |||
6 | This software may be used and distributed according to the terms |
|
6 | This software may be used and distributed according to the terms | |
7 | of the GNU General Public License, incorporated herein by reference. |
|
7 | of the GNU General Public License, incorporated herein by reference. | |
8 | """ |
|
8 | """ | |
9 |
|
9 | |||
10 | from node import nullid |
|
10 | from node import nullid | |
11 | from i18n import _ |
|
11 | from i18n import _ | |
12 | import struct, os, stat, util, errno, ignore |
|
12 | import struct, os, stat, util, errno, ignore | |
13 | import cStringIO, osutil, sys, parsers |
|
13 | import cStringIO, osutil, sys, parsers | |
14 |
|
14 | |||
15 | _unknown = ('?', 0, 0, 0) |
|
15 | _unknown = ('?', 0, 0, 0) | |
16 | _format = ">cllll" |
|
16 | _format = ">cllll" | |
17 |
|
17 | |||
18 | def _finddirs(path): |
|
18 | def _finddirs(path): | |
19 | pos = path.rfind('/') |
|
19 | pos = path.rfind('/') | |
20 | while pos != -1: |
|
20 | while pos != -1: | |
21 | yield path[:pos] |
|
21 | yield path[:pos] | |
22 | pos = path.rfind('/', 0, pos) |
|
22 | pos = path.rfind('/', 0, pos) | |
23 |
|
23 | |||
24 | def _incdirs(dirs, path): |
|
24 | def _incdirs(dirs, path): | |
25 | for base in _finddirs(path): |
|
25 | for base in _finddirs(path): | |
26 | if base in dirs: |
|
26 | if base in dirs: | |
27 | dirs[base] += 1 |
|
27 | dirs[base] += 1 | |
28 | return |
|
28 | return | |
29 | dirs[base] = 1 |
|
29 | dirs[base] = 1 | |
30 |
|
30 | |||
31 | def _decdirs(dirs, path): |
|
31 | def _decdirs(dirs, path): | |
32 | for base in _finddirs(path): |
|
32 | for base in _finddirs(path): | |
33 | if dirs[base] > 1: |
|
33 | if dirs[base] > 1: | |
34 | dirs[base] -= 1 |
|
34 | dirs[base] -= 1 | |
35 | return |
|
35 | return | |
36 | del dirs[base] |
|
36 | del dirs[base] | |
37 |
|
37 | |||
38 | class dirstate(object): |
|
38 | class dirstate(object): | |
39 |
|
39 | |||
40 | def __init__(self, opener, ui, root): |
|
40 | def __init__(self, opener, ui, root): | |
41 | self._opener = opener |
|
41 | self._opener = opener | |
42 | self._root = root |
|
42 | self._root = root | |
43 | self._rootdir = os.path.join(root, '') |
|
43 | self._rootdir = os.path.join(root, '') | |
44 | self._dirty = False |
|
44 | self._dirty = False | |
45 | self._dirtypl = False |
|
45 | self._dirtypl = False | |
46 | self._ui = ui |
|
46 | self._ui = ui | |
47 |
|
47 | |||
48 | def __getattr__(self, name): |
|
48 | def __getattr__(self, name): | |
49 | if name == '_map': |
|
49 | if name == '_map': | |
50 | self._read() |
|
50 | self._read() | |
51 | return self._map |
|
51 | return self._map | |
52 | elif name == '_copymap': |
|
52 | elif name == '_copymap': | |
53 | self._read() |
|
53 | self._read() | |
54 | return self._copymap |
|
54 | return self._copymap | |
55 | elif name == '_foldmap': |
|
55 | elif name == '_foldmap': | |
56 | _foldmap = {} |
|
56 | _foldmap = {} | |
57 | for name in self._map: |
|
57 | for name in self._map: | |
58 | norm = os.path.normcase(name) |
|
58 | norm = os.path.normcase(name) | |
59 | _foldmap[norm] = name |
|
59 | _foldmap[norm] = name | |
60 | self._foldmap = _foldmap |
|
60 | self._foldmap = _foldmap | |
61 | return self._foldmap |
|
61 | return self._foldmap | |
62 | elif name == '_branch': |
|
62 | elif name == '_branch': | |
63 | try: |
|
63 | try: | |
64 | self._branch = (self._opener("branch").read().strip() |
|
64 | self._branch = (self._opener("branch").read().strip() | |
65 | or "default") |
|
65 | or "default") | |
66 | except IOError: |
|
66 | except IOError: | |
67 | self._branch = "default" |
|
67 | self._branch = "default" | |
68 | return self._branch |
|
68 | return self._branch | |
69 | elif name == '_pl': |
|
69 | elif name == '_pl': | |
70 | self._pl = [nullid, nullid] |
|
70 | self._pl = [nullid, nullid] | |
71 | try: |
|
71 | try: | |
72 | st = self._opener("dirstate").read(40) |
|
72 | st = self._opener("dirstate").read(40) | |
73 | if len(st) == 40: |
|
73 | if len(st) == 40: | |
74 | self._pl = st[:20], st[20:40] |
|
74 | self._pl = st[:20], st[20:40] | |
75 | except IOError, err: |
|
75 | except IOError, err: | |
76 | if err.errno != errno.ENOENT: raise |
|
76 | if err.errno != errno.ENOENT: raise | |
77 | return self._pl |
|
77 | return self._pl | |
78 | elif name == '_dirs': |
|
78 | elif name == '_dirs': | |
79 | dirs = {} |
|
79 | dirs = {} | |
80 | for f,s in self._map.iteritems(): |
|
80 | for f,s in self._map.iteritems(): | |
81 | if s[0] != 'r': |
|
81 | if s[0] != 'r': | |
82 | _incdirs(dirs, f) |
|
82 | _incdirs(dirs, f) | |
83 | self._dirs = dirs |
|
83 | self._dirs = dirs | |
84 | return self._dirs |
|
84 | return self._dirs | |
85 | elif name == '_ignore': |
|
85 | elif name == '_ignore': | |
86 | files = [self._join('.hgignore')] |
|
86 | files = [self._join('.hgignore')] | |
87 | for name, path in self._ui.configitems("ui"): |
|
87 | for name, path in self._ui.configitems("ui"): | |
88 | if name == 'ignore' or name.startswith('ignore.'): |
|
88 | if name == 'ignore' or name.startswith('ignore.'): | |
89 | files.append(os.path.expanduser(path)) |
|
89 | files.append(os.path.expanduser(path)) | |
90 | self._ignore = ignore.ignore(self._root, files, self._ui.warn) |
|
90 | self._ignore = ignore.ignore(self._root, files, self._ui.warn) | |
91 | return self._ignore |
|
91 | return self._ignore | |
92 | elif name == '_slash': |
|
92 | elif name == '_slash': | |
93 | self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/' |
|
93 | self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/' | |
94 | return self._slash |
|
94 | return self._slash | |
95 | elif name == '_checklink': |
|
95 | elif name == '_checklink': | |
96 | self._checklink = util.checklink(self._root) |
|
96 | self._checklink = util.checklink(self._root) | |
97 | return self._checklink |
|
97 | return self._checklink | |
98 | elif name == '_checkexec': |
|
98 | elif name == '_checkexec': | |
99 | self._checkexec = util.checkexec(self._root) |
|
99 | self._checkexec = util.checkexec(self._root) | |
100 | return self._checkexec |
|
100 | return self._checkexec | |
101 | elif name == '_checkcase': |
|
101 | elif name == '_checkcase': | |
102 | self._checkcase = not util.checkcase(self._join('.hg')) |
|
102 | self._checkcase = not util.checkcase(self._join('.hg')) | |
103 | return self._checkcase |
|
103 | return self._checkcase | |
104 | elif name == 'normalize': |
|
104 | elif name == 'normalize': | |
105 | if self._checkcase: |
|
105 | if self._checkcase: | |
106 | self.normalize = self._normalize |
|
106 | self.normalize = self._normalize | |
107 | else: |
|
107 | else: | |
108 | self.normalize = lambda x, y=False: x |
|
108 | self.normalize = lambda x, y=False: x | |
109 | return self.normalize |
|
109 | return self.normalize | |
110 | else: |
|
110 | else: | |
111 | raise AttributeError(name) |
|
111 | raise AttributeError(name) | |
112 |
|
112 | |||
113 | def _join(self, f): |
|
113 | def _join(self, f): | |
114 | # much faster than os.path.join() |
|
114 | # much faster than os.path.join() | |
115 | # it's safe because f is always a relative path |
|
115 | # it's safe because f is always a relative path | |
116 | return self._rootdir + f |
|
116 | return self._rootdir + f | |
117 |
|
117 | |||
118 | def flagfunc(self, fallback): |
|
118 | def flagfunc(self, fallback): | |
119 | if self._checklink: |
|
119 | if self._checklink: | |
120 | if self._checkexec: |
|
120 | if self._checkexec: | |
121 | def f(x): |
|
121 | def f(x): | |
122 | p = self._join(x) |
|
122 | p = self._join(x) | |
123 | if os.path.islink(p): |
|
123 | if os.path.islink(p): | |
124 | return 'l' |
|
124 | return 'l' | |
125 | if util.is_exec(p): |
|
125 | if util.is_exec(p): | |
126 | return 'x' |
|
126 | return 'x' | |
127 | return '' |
|
127 | return '' | |
128 | return f |
|
128 | return f | |
129 | def f(x): |
|
129 | def f(x): | |
130 | if os.path.islink(self._join(x)): |
|
130 | if os.path.islink(self._join(x)): | |
131 | return 'l' |
|
131 | return 'l' | |
132 | if 'x' in fallback(x): |
|
132 | if 'x' in fallback(x): | |
133 | return 'x' |
|
133 | return 'x' | |
134 | return '' |
|
134 | return '' | |
135 | return f |
|
135 | return f | |
136 | if self._checkexec: |
|
136 | if self._checkexec: | |
137 | def f(x): |
|
137 | def f(x): | |
138 | if 'l' in fallback(x): |
|
138 | if 'l' in fallback(x): | |
139 | return 'l' |
|
139 | return 'l' | |
140 | if util.is_exec(self._join(x)): |
|
140 | if util.is_exec(self._join(x)): | |
141 | return 'x' |
|
141 | return 'x' | |
142 | return '' |
|
142 | return '' | |
143 | return f |
|
143 | return f | |
144 | return fallback |
|
144 | return fallback | |
145 |
|
145 | |||
146 | def getcwd(self): |
|
146 | def getcwd(self): | |
147 | cwd = os.getcwd() |
|
147 | cwd = os.getcwd() | |
148 | if cwd == self._root: return '' |
|
148 | if cwd == self._root: return '' | |
149 | # self._root ends with a path separator if self._root is '/' or 'C:\' |
|
149 | # self._root ends with a path separator if self._root is '/' or 'C:\' | |
150 | rootsep = self._root |
|
150 | rootsep = self._root | |
151 | if not util.endswithsep(rootsep): |
|
151 | if not util.endswithsep(rootsep): | |
152 | rootsep += os.sep |
|
152 | rootsep += os.sep | |
153 | if cwd.startswith(rootsep): |
|
153 | if cwd.startswith(rootsep): | |
154 | return cwd[len(rootsep):] |
|
154 | return cwd[len(rootsep):] | |
155 | else: |
|
155 | else: | |
156 | # we're outside the repo. return an absolute path. |
|
156 | # we're outside the repo. return an absolute path. | |
157 | return cwd |
|
157 | return cwd | |
158 |
|
158 | |||
159 | def pathto(self, f, cwd=None): |
|
159 | def pathto(self, f, cwd=None): | |
160 | if cwd is None: |
|
160 | if cwd is None: | |
161 | cwd = self.getcwd() |
|
161 | cwd = self.getcwd() | |
162 | path = util.pathto(self._root, cwd, f) |
|
162 | path = util.pathto(self._root, cwd, f) | |
163 | if self._slash: |
|
163 | if self._slash: | |
164 | return util.normpath(path) |
|
164 | return util.normpath(path) | |
165 | return path |
|
165 | return path | |
166 |
|
166 | |||
167 | def __getitem__(self, key): |
|
167 | def __getitem__(self, key): | |
168 | ''' current states: |
|
168 | ''' current states: | |
169 | n normal |
|
169 | n normal | |
170 | m needs merging |
|
170 | m needs merging | |
171 | r marked for removal |
|
171 | r marked for removal | |
172 | a marked for addition |
|
172 | a marked for addition | |
173 | ? not tracked''' |
|
173 | ? not tracked''' | |
174 | return self._map.get(key, ("?",))[0] |
|
174 | return self._map.get(key, ("?",))[0] | |
175 |
|
175 | |||
176 | def __contains__(self, key): |
|
176 | def __contains__(self, key): | |
177 | return key in self._map |
|
177 | return key in self._map | |
178 |
|
178 | |||
179 | def __iter__(self): |
|
179 | def __iter__(self): | |
180 | for x in util.sort(self._map): |
|
180 | for x in util.sort(self._map): | |
181 | yield x |
|
181 | yield x | |
182 |
|
182 | |||
183 | def parents(self): |
|
183 | def parents(self): | |
184 | return self._pl |
|
184 | return self._pl | |
185 |
|
185 | |||
186 | def branch(self): |
|
186 | def branch(self): | |
187 | return self._branch |
|
187 | return self._branch | |
188 |
|
188 | |||
189 | def setparents(self, p1, p2=nullid): |
|
189 | def setparents(self, p1, p2=nullid): | |
190 | self._dirty = self._dirtypl = True |
|
190 | self._dirty = self._dirtypl = True | |
191 | self._pl = p1, p2 |
|
191 | self._pl = p1, p2 | |
192 |
|
192 | |||
193 | def setbranch(self, branch): |
|
193 | def setbranch(self, branch): | |
194 | self._branch = branch |
|
194 | self._branch = branch | |
195 | self._opener("branch", "w").write(branch + '\n') |
|
195 | self._opener("branch", "w").write(branch + '\n') | |
196 |
|
196 | |||
197 | def _read(self): |
|
197 | def _read(self): | |
198 | self._map = {} |
|
198 | self._map = {} | |
199 | self._copymap = {} |
|
199 | self._copymap = {} | |
200 | try: |
|
200 | try: | |
201 | st = self._opener("dirstate").read() |
|
201 | st = self._opener("dirstate").read() | |
202 | except IOError, err: |
|
202 | except IOError, err: | |
203 | if err.errno != errno.ENOENT: raise |
|
203 | if err.errno != errno.ENOENT: raise | |
204 | return |
|
204 | return | |
205 | if not st: |
|
205 | if not st: | |
206 | return |
|
206 | return | |
207 |
|
207 | |||
208 |
p = parsers.parse_dirstate(self._map, self._copymap, st) |
|
208 | p = parsers.parse_dirstate(self._map, self._copymap, st) | |
209 | if not self._dirtypl: |
|
209 | if not self._dirtypl: | |
210 | self._pl = p |
|
210 | self._pl = p | |
211 |
|
211 | |||
212 | def invalidate(self): |
|
212 | def invalidate(self): | |
213 | for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split(): |
|
213 | for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split(): | |
214 | if a in self.__dict__: |
|
214 | if a in self.__dict__: | |
215 | delattr(self, a) |
|
215 | delattr(self, a) | |
216 | self._dirty = False |
|
216 | self._dirty = False | |
217 |
|
217 | |||
218 | def copy(self, source, dest): |
|
218 | def copy(self, source, dest): | |
219 | if source == dest: |
|
219 | if source == dest: | |
220 | return |
|
220 | return | |
221 | self._dirty = True |
|
221 | self._dirty = True | |
222 | self._copymap[dest] = source |
|
222 | self._copymap[dest] = source | |
223 |
|
223 | |||
224 | def copied(self, file): |
|
224 | def copied(self, file): | |
225 | return self._copymap.get(file, None) |
|
225 | return self._copymap.get(file, None) | |
226 |
|
226 | |||
227 | def copies(self): |
|
227 | def copies(self): | |
228 | return self._copymap |
|
228 | return self._copymap | |
229 |
|
229 | |||
230 | def _droppath(self, f): |
|
230 | def _droppath(self, f): | |
231 | if self[f] not in "?r" and "_dirs" in self.__dict__: |
|
231 | if self[f] not in "?r" and "_dirs" in self.__dict__: | |
232 | _decdirs(self._dirs, f) |
|
232 | _decdirs(self._dirs, f) | |
233 |
|
233 | |||
234 | def _addpath(self, f, check=False): |
|
234 | def _addpath(self, f, check=False): | |
235 | oldstate = self[f] |
|
235 | oldstate = self[f] | |
236 | if check or oldstate == "r": |
|
236 | if check or oldstate == "r": | |
237 | if '\r' in f or '\n' in f: |
|
237 | if '\r' in f or '\n' in f: | |
238 | raise util.Abort( |
|
238 | raise util.Abort( | |
239 | _("'\\n' and '\\r' disallowed in filenames: %r") % f) |
|
239 | _("'\\n' and '\\r' disallowed in filenames: %r") % f) | |
240 | if f in self._dirs: |
|
240 | if f in self._dirs: | |
241 | raise util.Abort(_('directory %r already in dirstate') % f) |
|
241 | raise util.Abort(_('directory %r already in dirstate') % f) | |
242 | # shadows |
|
242 | # shadows | |
243 | for d in _finddirs(f): |
|
243 | for d in _finddirs(f): | |
244 | if d in self._dirs: |
|
244 | if d in self._dirs: | |
245 | break |
|
245 | break | |
246 | if d in self._map and self[d] != 'r': |
|
246 | if d in self._map and self[d] != 'r': | |
247 | raise util.Abort( |
|
247 | raise util.Abort( | |
248 | _('file %r in dirstate clashes with %r') % (d, f)) |
|
248 | _('file %r in dirstate clashes with %r') % (d, f)) | |
249 | if oldstate in "?r" and "_dirs" in self.__dict__: |
|
249 | if oldstate in "?r" and "_dirs" in self.__dict__: | |
250 | _incdirs(self._dirs, f) |
|
250 | _incdirs(self._dirs, f) | |
251 |
|
251 | |||
252 | def normal(self, f): |
|
252 | def normal(self, f): | |
253 | 'mark a file normal and clean' |
|
253 | 'mark a file normal and clean' | |
254 | self._dirty = True |
|
254 | self._dirty = True | |
255 | self._addpath(f) |
|
255 | self._addpath(f) | |
256 | s = os.lstat(self._join(f)) |
|
256 | s = os.lstat(self._join(f)) | |
257 | self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime)) |
|
257 | self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime)) | |
258 | if f in self._copymap: |
|
258 | if f in self._copymap: | |
259 | del self._copymap[f] |
|
259 | del self._copymap[f] | |
260 |
|
260 | |||
261 | def normallookup(self, f): |
|
261 | def normallookup(self, f): | |
262 | 'mark a file normal, but possibly dirty' |
|
262 | 'mark a file normal, but possibly dirty' | |
263 | if self._pl[1] != nullid and f in self._map: |
|
263 | if self._pl[1] != nullid and f in self._map: | |
264 | # if there is a merge going on and the file was either |
|
264 | # if there is a merge going on and the file was either | |
265 | # in state 'm' or dirty before being removed, restore that state. |
|
265 | # in state 'm' or dirty before being removed, restore that state. | |
266 | entry = self._map[f] |
|
266 | entry = self._map[f] | |
267 | if entry[0] == 'r' and entry[2] in (-1, -2): |
|
267 | if entry[0] == 'r' and entry[2] in (-1, -2): | |
268 | source = self._copymap.get(f) |
|
268 | source = self._copymap.get(f) | |
269 | if entry[2] == -1: |
|
269 | if entry[2] == -1: | |
270 | self.merge(f) |
|
270 | self.merge(f) | |
271 | elif entry[2] == -2: |
|
271 | elif entry[2] == -2: | |
272 | self.normaldirty(f) |
|
272 | self.normaldirty(f) | |
273 | if source: |
|
273 | if source: | |
274 | self.copy(source, f) |
|
274 | self.copy(source, f) | |
275 | return |
|
275 | return | |
276 | if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2: |
|
276 | if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2: | |
277 | return |
|
277 | return | |
278 | self._dirty = True |
|
278 | self._dirty = True | |
279 | self._addpath(f) |
|
279 | self._addpath(f) | |
280 | self._map[f] = ('n', 0, -1, -1) |
|
280 | self._map[f] = ('n', 0, -1, -1) | |
281 | if f in self._copymap: |
|
281 | if f in self._copymap: | |
282 | del self._copymap[f] |
|
282 | del self._copymap[f] | |
283 |
|
283 | |||
284 | def normaldirty(self, f): |
|
284 | def normaldirty(self, f): | |
285 | 'mark a file normal, but dirty' |
|
285 | 'mark a file normal, but dirty' | |
286 | self._dirty = True |
|
286 | self._dirty = True | |
287 | self._addpath(f) |
|
287 | self._addpath(f) | |
288 | self._map[f] = ('n', 0, -2, -1) |
|
288 | self._map[f] = ('n', 0, -2, -1) | |
289 | if f in self._copymap: |
|
289 | if f in self._copymap: | |
290 | del self._copymap[f] |
|
290 | del self._copymap[f] | |
291 |
|
291 | |||
292 | def add(self, f): |
|
292 | def add(self, f): | |
293 | 'mark a file added' |
|
293 | 'mark a file added' | |
294 | self._dirty = True |
|
294 | self._dirty = True | |
295 | self._addpath(f, True) |
|
295 | self._addpath(f, True) | |
296 | self._map[f] = ('a', 0, -1, -1) |
|
296 | self._map[f] = ('a', 0, -1, -1) | |
297 | if f in self._copymap: |
|
297 | if f in self._copymap: | |
298 | del self._copymap[f] |
|
298 | del self._copymap[f] | |
299 |
|
299 | |||
300 | def remove(self, f): |
|
300 | def remove(self, f): | |
301 | 'mark a file removed' |
|
301 | 'mark a file removed' | |
302 | self._dirty = True |
|
302 | self._dirty = True | |
303 | self._droppath(f) |
|
303 | self._droppath(f) | |
304 | size = 0 |
|
304 | size = 0 | |
305 | if self._pl[1] != nullid and f in self._map: |
|
305 | if self._pl[1] != nullid and f in self._map: | |
306 | entry = self._map[f] |
|
306 | entry = self._map[f] | |
307 | if entry[0] == 'm': |
|
307 | if entry[0] == 'm': | |
308 | size = -1 |
|
308 | size = -1 | |
309 | elif entry[0] == 'n' and entry[2] == -2: |
|
309 | elif entry[0] == 'n' and entry[2] == -2: | |
310 | size = -2 |
|
310 | size = -2 | |
311 | self._map[f] = ('r', 0, size, 0) |
|
311 | self._map[f] = ('r', 0, size, 0) | |
312 | if size == 0 and f in self._copymap: |
|
312 | if size == 0 and f in self._copymap: | |
313 | del self._copymap[f] |
|
313 | del self._copymap[f] | |
314 |
|
314 | |||
315 | def merge(self, f): |
|
315 | def merge(self, f): | |
316 | 'mark a file merged' |
|
316 | 'mark a file merged' | |
317 | self._dirty = True |
|
317 | self._dirty = True | |
318 | s = os.lstat(self._join(f)) |
|
318 | s = os.lstat(self._join(f)) | |
319 | self._addpath(f) |
|
319 | self._addpath(f) | |
320 | self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime)) |
|
320 | self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime)) | |
321 | if f in self._copymap: |
|
321 | if f in self._copymap: | |
322 | del self._copymap[f] |
|
322 | del self._copymap[f] | |
323 |
|
323 | |||
324 | def forget(self, f): |
|
324 | def forget(self, f): | |
325 | 'forget a file' |
|
325 | 'forget a file' | |
326 | self._dirty = True |
|
326 | self._dirty = True | |
327 | try: |
|
327 | try: | |
328 | self._droppath(f) |
|
328 | self._droppath(f) | |
329 | del self._map[f] |
|
329 | del self._map[f] | |
330 | except KeyError: |
|
330 | except KeyError: | |
331 | self._ui.warn(_("not in dirstate: %s\n") % f) |
|
331 | self._ui.warn(_("not in dirstate: %s\n") % f) | |
332 |
|
332 | |||
333 | def _normalize(self, path, knownpath=False): |
|
333 | def _normalize(self, path, knownpath=False): | |
334 | norm_path = os.path.normcase(path) |
|
334 | norm_path = os.path.normcase(path) | |
335 | fold_path = self._foldmap.get(norm_path, None) |
|
335 | fold_path = self._foldmap.get(norm_path, None) | |
336 | if fold_path is None: |
|
336 | if fold_path is None: | |
337 | if knownpath or not os.path.exists(os.path.join(self._root, path)): |
|
337 | if knownpath or not os.path.exists(os.path.join(self._root, path)): | |
338 | fold_path = path |
|
338 | fold_path = path | |
339 | else: |
|
339 | else: | |
340 | fold_path = self._foldmap.setdefault(norm_path, |
|
340 | fold_path = self._foldmap.setdefault(norm_path, | |
341 | util.fspath(path, self._root)) |
|
341 | util.fspath(path, self._root)) | |
342 | return fold_path |
|
342 | return fold_path | |
343 |
|
343 | |||
344 | def clear(self): |
|
344 | def clear(self): | |
345 | self._map = {} |
|
345 | self._map = {} | |
346 | if "_dirs" in self.__dict__: |
|
346 | if "_dirs" in self.__dict__: | |
347 | delattr(self, "_dirs"); |
|
347 | delattr(self, "_dirs"); | |
348 | self._copymap = {} |
|
348 | self._copymap = {} | |
349 | self._pl = [nullid, nullid] |
|
349 | self._pl = [nullid, nullid] | |
350 | self._dirty = True |
|
350 | self._dirty = True | |
351 |
|
351 | |||
352 | def rebuild(self, parent, files): |
|
352 | def rebuild(self, parent, files): | |
353 | self.clear() |
|
353 | self.clear() | |
354 | for f in files: |
|
354 | for f in files: | |
355 | if 'x' in files.flags(f): |
|
355 | if 'x' in files.flags(f): | |
356 | self._map[f] = ('n', 0777, -1, 0) |
|
356 | self._map[f] = ('n', 0777, -1, 0) | |
357 | else: |
|
357 | else: | |
358 | self._map[f] = ('n', 0666, -1, 0) |
|
358 | self._map[f] = ('n', 0666, -1, 0) | |
359 | self._pl = (parent, nullid) |
|
359 | self._pl = (parent, nullid) | |
360 | self._dirty = True |
|
360 | self._dirty = True | |
361 |
|
361 | |||
362 | def write(self): |
|
362 | def write(self): | |
363 | if not self._dirty: |
|
363 | if not self._dirty: | |
364 | return |
|
364 | return | |
365 | st = self._opener("dirstate", "w", atomictemp=True) |
|
365 | st = self._opener("dirstate", "w", atomictemp=True) | |
366 |
|
366 | |||
367 | try: |
|
367 | try: | |
368 | gran = int(self._ui.config('dirstate', 'granularity', 1)) |
|
368 | gran = int(self._ui.config('dirstate', 'granularity', 1)) | |
369 | except ValueError: |
|
369 | except ValueError: | |
370 | gran = 1 |
|
370 | gran = 1 | |
371 | limit = sys.maxint |
|
371 | limit = sys.maxint | |
372 | if gran > 0: |
|
372 | if gran > 0: | |
373 | limit = util.fstat(st).st_mtime - gran |
|
373 | limit = util.fstat(st).st_mtime - gran | |
374 |
|
374 | |||
375 | cs = cStringIO.StringIO() |
|
375 | cs = cStringIO.StringIO() | |
376 | copymap = self._copymap |
|
376 | copymap = self._copymap | |
377 | pack = struct.pack |
|
377 | pack = struct.pack | |
378 | write = cs.write |
|
378 | write = cs.write | |
379 | write("".join(self._pl)) |
|
379 | write("".join(self._pl)) | |
380 | for f, e in self._map.iteritems(): |
|
380 | for f, e in self._map.iteritems(): | |
381 | if f in copymap: |
|
381 | if f in copymap: | |
382 | f = "%s\0%s" % (f, copymap[f]) |
|
382 | f = "%s\0%s" % (f, copymap[f]) | |
383 | if e[3] > limit and e[0] == 'n': |
|
383 | if e[3] > limit and e[0] == 'n': | |
384 | e = (e[0], 0, -1, -1) |
|
384 | e = (e[0], 0, -1, -1) | |
385 | e = pack(_format, e[0], e[1], e[2], e[3], len(f)) |
|
385 | e = pack(_format, e[0], e[1], e[2], e[3], len(f)) | |
386 | write(e) |
|
386 | write(e) | |
387 | write(f) |
|
387 | write(f) | |
388 | st.write(cs.getvalue()) |
|
388 | st.write(cs.getvalue()) | |
389 | st.rename() |
|
389 | st.rename() | |
390 | self._dirty = self._dirtypl = False |
|
390 | self._dirty = self._dirtypl = False | |
391 |
|
391 | |||
392 | def _dirignore(self, f): |
|
392 | def _dirignore(self, f): | |
393 | if f == '.': |
|
393 | if f == '.': | |
394 | return False |
|
394 | return False | |
395 | if self._ignore(f): |
|
395 | if self._ignore(f): | |
396 | return True |
|
396 | return True | |
397 | for p in _finddirs(f): |
|
397 | for p in _finddirs(f): | |
398 | if self._ignore(p): |
|
398 | if self._ignore(p): | |
399 | return True |
|
399 | return True | |
400 | return False |
|
400 | return False | |
401 |
|
401 | |||
402 | def walk(self, match, unknown, ignored): |
|
402 | def walk(self, match, unknown, ignored): | |
403 | ''' |
|
403 | ''' | |
404 | walk recursively through the directory tree, finding all files |
|
404 | walk recursively through the directory tree, finding all files | |
405 | matched by the match function |
|
405 | matched by the match function | |
406 |
|
406 | |||
407 | results are yielded in a tuple (filename, stat), where stat |
|
407 | results are yielded in a tuple (filename, stat), where stat | |
408 | and st is the stat result if the file was found in the directory. |
|
408 | and st is the stat result if the file was found in the directory. | |
409 | ''' |
|
409 | ''' | |
410 |
|
410 | |||
411 | def fwarn(f, msg): |
|
411 | def fwarn(f, msg): | |
412 | self._ui.warn('%s: %s\n' % (self.pathto(f), msg)) |
|
412 | self._ui.warn('%s: %s\n' % (self.pathto(f), msg)) | |
413 | return False |
|
413 | return False | |
414 | badfn = fwarn |
|
414 | badfn = fwarn | |
415 | if hasattr(match, 'bad'): |
|
415 | if hasattr(match, 'bad'): | |
416 | badfn = match.bad |
|
416 | badfn = match.bad | |
417 |
|
417 | |||
418 | def badtype(f, mode): |
|
418 | def badtype(f, mode): | |
419 | kind = 'unknown' |
|
419 | kind = 'unknown' | |
420 | if stat.S_ISCHR(mode): kind = _('character device') |
|
420 | if stat.S_ISCHR(mode): kind = _('character device') | |
421 | elif stat.S_ISBLK(mode): kind = _('block device') |
|
421 | elif stat.S_ISBLK(mode): kind = _('block device') | |
422 | elif stat.S_ISFIFO(mode): kind = _('fifo') |
|
422 | elif stat.S_ISFIFO(mode): kind = _('fifo') | |
423 | elif stat.S_ISSOCK(mode): kind = _('socket') |
|
423 | elif stat.S_ISSOCK(mode): kind = _('socket') | |
424 | elif stat.S_ISDIR(mode): kind = _('directory') |
|
424 | elif stat.S_ISDIR(mode): kind = _('directory') | |
425 | self._ui.warn(_('%s: unsupported file type (type is %s)\n') |
|
425 | self._ui.warn(_('%s: unsupported file type (type is %s)\n') | |
426 | % (self.pathto(f), kind)) |
|
426 | % (self.pathto(f), kind)) | |
427 |
|
427 | |||
428 | ignore = self._ignore |
|
428 | ignore = self._ignore | |
429 | dirignore = self._dirignore |
|
429 | dirignore = self._dirignore | |
430 | if ignored: |
|
430 | if ignored: | |
431 | ignore = util.never |
|
431 | ignore = util.never | |
432 | dirignore = util.never |
|
432 | dirignore = util.never | |
433 | elif not unknown: |
|
433 | elif not unknown: | |
434 | # if unknown and ignored are False, skip step 2 |
|
434 | # if unknown and ignored are False, skip step 2 | |
435 | ignore = util.always |
|
435 | ignore = util.always | |
436 | dirignore = util.always |
|
436 | dirignore = util.always | |
437 |
|
437 | |||
438 | matchfn = match.matchfn |
|
438 | matchfn = match.matchfn | |
439 | dmap = self._map |
|
439 | dmap = self._map | |
440 | normpath = util.normpath |
|
440 | normpath = util.normpath | |
441 | normalize = self.normalize |
|
441 | normalize = self.normalize | |
442 | listdir = osutil.listdir |
|
442 | listdir = osutil.listdir | |
443 | lstat = os.lstat |
|
443 | lstat = os.lstat | |
444 | pconvert = util.pconvert |
|
444 | pconvert = util.pconvert | |
445 | getkind = stat.S_IFMT |
|
445 | getkind = stat.S_IFMT | |
446 | dirkind = stat.S_IFDIR |
|
446 | dirkind = stat.S_IFDIR | |
447 | regkind = stat.S_IFREG |
|
447 | regkind = stat.S_IFREG | |
448 | lnkkind = stat.S_IFLNK |
|
448 | lnkkind = stat.S_IFLNK | |
449 | join = self._join |
|
449 | join = self._join | |
450 | work = [] |
|
450 | work = [] | |
451 | wadd = work.append |
|
451 | wadd = work.append | |
452 |
|
452 | |||
453 | files = util.unique(match.files()) |
|
453 | files = util.unique(match.files()) | |
454 | if not files or '.' in files: |
|
454 | if not files or '.' in files: | |
455 | files = [''] |
|
455 | files = [''] | |
456 | results = {'.hg': None} |
|
456 | results = {'.hg': None} | |
457 |
|
457 | |||
458 | # step 1: find all explicit files |
|
458 | # step 1: find all explicit files | |
459 | for ff in util.sort(files): |
|
459 | for ff in util.sort(files): | |
460 | nf = normalize(normpath(ff)) |
|
460 | nf = normalize(normpath(ff)) | |
461 | if nf in results: |
|
461 | if nf in results: | |
462 | continue |
|
462 | continue | |
463 |
|
463 | |||
464 | try: |
|
464 | try: | |
465 | st = lstat(join(nf)) |
|
465 | st = lstat(join(nf)) | |
466 | kind = getkind(st.st_mode) |
|
466 | kind = getkind(st.st_mode) | |
467 | if kind == dirkind: |
|
467 | if kind == dirkind: | |
468 | if not dirignore(nf): |
|
468 | if not dirignore(nf): | |
469 | wadd(nf) |
|
469 | wadd(nf) | |
470 | elif kind == regkind or kind == lnkkind: |
|
470 | elif kind == regkind or kind == lnkkind: | |
471 | results[nf] = st |
|
471 | results[nf] = st | |
472 | else: |
|
472 | else: | |
473 | badtype(ff, kind) |
|
473 | badtype(ff, kind) | |
474 | if nf in dmap: |
|
474 | if nf in dmap: | |
475 | results[nf] = None |
|
475 | results[nf] = None | |
476 | except OSError, inst: |
|
476 | except OSError, inst: | |
477 | keep = False |
|
477 | keep = False | |
478 | prefix = nf + "/" |
|
478 | prefix = nf + "/" | |
479 | for fn in dmap: |
|
479 | for fn in dmap: | |
480 | if nf == fn or fn.startswith(prefix): |
|
480 | if nf == fn or fn.startswith(prefix): | |
481 | keep = True |
|
481 | keep = True | |
482 | break |
|
482 | break | |
483 | if not keep: |
|
483 | if not keep: | |
484 | if inst.errno != errno.ENOENT: |
|
484 | if inst.errno != errno.ENOENT: | |
485 | fwarn(ff, inst.strerror) |
|
485 | fwarn(ff, inst.strerror) | |
486 | elif badfn(ff, inst.strerror): |
|
486 | elif badfn(ff, inst.strerror): | |
487 | if (nf in dmap or not ignore(nf)) and matchfn(nf): |
|
487 | if (nf in dmap or not ignore(nf)) and matchfn(nf): | |
488 | results[nf] = None |
|
488 | results[nf] = None | |
489 |
|
489 | |||
490 | # step 2: visit subdirectories |
|
490 | # step 2: visit subdirectories | |
491 | while work: |
|
491 | while work: | |
492 | nd = work.pop() |
|
492 | nd = work.pop() | |
493 | if hasattr(match, 'dir'): |
|
493 | if hasattr(match, 'dir'): | |
494 | match.dir(nd) |
|
494 | match.dir(nd) | |
495 | skip = None |
|
495 | skip = None | |
496 | if nd == '.': |
|
496 | if nd == '.': | |
497 | nd = '' |
|
497 | nd = '' | |
498 | else: |
|
498 | else: | |
499 | skip = '.hg' |
|
499 | skip = '.hg' | |
500 | try: |
|
500 | try: | |
501 | entries = listdir(join(nd), stat=True, skip=skip) |
|
501 | entries = listdir(join(nd), stat=True, skip=skip) | |
502 | except OSError, inst: |
|
502 | except OSError, inst: | |
503 | if inst.errno == errno.EACCES: |
|
503 | if inst.errno == errno.EACCES: | |
504 | fwarn(nd, inst.strerror) |
|
504 | fwarn(nd, inst.strerror) | |
505 | continue |
|
505 | continue | |
506 | raise |
|
506 | raise | |
507 | for f, kind, st in entries: |
|
507 | for f, kind, st in entries: | |
508 | nf = normalize(nd and (nd + "/" + f) or f, True) |
|
508 | nf = normalize(nd and (nd + "/" + f) or f, True) | |
509 | if nf not in results: |
|
509 | if nf not in results: | |
510 | if kind == dirkind: |
|
510 | if kind == dirkind: | |
511 | if not ignore(nf): |
|
511 | if not ignore(nf): | |
512 | wadd(nf) |
|
512 | wadd(nf) | |
513 | if nf in dmap and matchfn(nf): |
|
513 | if nf in dmap and matchfn(nf): | |
514 | results[nf] = None |
|
514 | results[nf] = None | |
515 | elif kind == regkind or kind == lnkkind: |
|
515 | elif kind == regkind or kind == lnkkind: | |
516 | if nf in dmap: |
|
516 | if nf in dmap: | |
517 | if matchfn(nf): |
|
517 | if matchfn(nf): | |
518 | results[nf] = st |
|
518 | results[nf] = st | |
519 | elif matchfn(nf) and not ignore(nf): |
|
519 | elif matchfn(nf) and not ignore(nf): | |
520 | results[nf] = st |
|
520 | results[nf] = st | |
521 | elif nf in dmap and matchfn(nf): |
|
521 | elif nf in dmap and matchfn(nf): | |
522 | results[nf] = None |
|
522 | results[nf] = None | |
523 |
|
523 | |||
524 | # step 3: report unseen items in the dmap hash |
|
524 | # step 3: report unseen items in the dmap hash | |
525 | visit = util.sort([f for f in dmap if f not in results and match(f)]) |
|
525 | visit = util.sort([f for f in dmap if f not in results and match(f)]) | |
526 | for nf, st in zip(visit, util.statfiles([join(i) for i in visit])): |
|
526 | for nf, st in zip(visit, util.statfiles([join(i) for i in visit])): | |
527 | if not st is None and not getkind(st.st_mode) in (regkind, lnkkind): |
|
527 | if not st is None and not getkind(st.st_mode) in (regkind, lnkkind): | |
528 | st = None |
|
528 | st = None | |
529 | results[nf] = st |
|
529 | results[nf] = st | |
530 |
|
530 | |||
531 | del results['.hg'] |
|
531 | del results['.hg'] | |
532 | return results |
|
532 | return results | |
533 |
|
533 | |||
534 | def status(self, match, ignored, clean, unknown): |
|
534 | def status(self, match, ignored, clean, unknown): | |
535 | listignored, listclean, listunknown = ignored, clean, unknown |
|
535 | listignored, listclean, listunknown = ignored, clean, unknown | |
536 | lookup, modified, added, unknown, ignored = [], [], [], [], [] |
|
536 | lookup, modified, added, unknown, ignored = [], [], [], [], [] | |
537 | removed, deleted, clean = [], [], [] |
|
537 | removed, deleted, clean = [], [], [] | |
538 |
|
538 | |||
539 | _join = self._join |
|
539 | _join = self._join | |
540 | lstat = os.lstat |
|
540 | lstat = os.lstat | |
541 | cmap = self._copymap |
|
541 | cmap = self._copymap | |
542 | dmap = self._map |
|
542 | dmap = self._map | |
543 | ladd = lookup.append |
|
543 | ladd = lookup.append | |
544 | madd = modified.append |
|
544 | madd = modified.append | |
545 | aadd = added.append |
|
545 | aadd = added.append | |
546 | uadd = unknown.append |
|
546 | uadd = unknown.append | |
547 | iadd = ignored.append |
|
547 | iadd = ignored.append | |
548 | radd = removed.append |
|
548 | radd = removed.append | |
549 | dadd = deleted.append |
|
549 | dadd = deleted.append | |
550 | cadd = clean.append |
|
550 | cadd = clean.append | |
551 |
|
551 | |||
552 | for fn, st in self.walk(match, listunknown, listignored).iteritems(): |
|
552 | for fn, st in self.walk(match, listunknown, listignored).iteritems(): | |
553 | if fn not in dmap: |
|
553 | if fn not in dmap: | |
554 | if (listignored or match.exact(fn)) and self._dirignore(fn): |
|
554 | if (listignored or match.exact(fn)) and self._dirignore(fn): | |
555 | if listignored: |
|
555 | if listignored: | |
556 | iadd(fn) |
|
556 | iadd(fn) | |
557 | elif listunknown: |
|
557 | elif listunknown: | |
558 | uadd(fn) |
|
558 | uadd(fn) | |
559 | continue |
|
559 | continue | |
560 |
|
560 | |||
561 | state, mode, size, time = dmap[fn] |
|
561 | state, mode, size, time = dmap[fn] | |
562 |
|
562 | |||
563 | if not st and state in "nma": |
|
563 | if not st and state in "nma": | |
564 | dadd(fn) |
|
564 | dadd(fn) | |
565 | elif state == 'n': |
|
565 | elif state == 'n': | |
566 | if (size >= 0 and |
|
566 | if (size >= 0 and | |
567 | (size != st.st_size |
|
567 | (size != st.st_size | |
568 | or ((mode ^ st.st_mode) & 0100 and self._checkexec)) |
|
568 | or ((mode ^ st.st_mode) & 0100 and self._checkexec)) | |
569 | or size == -2 |
|
569 | or size == -2 | |
570 | or fn in self._copymap): |
|
570 | or fn in self._copymap): | |
571 | madd(fn) |
|
571 | madd(fn) | |
572 | elif time != int(st.st_mtime): |
|
572 | elif time != int(st.st_mtime): | |
573 | ladd(fn) |
|
573 | ladd(fn) | |
574 | elif listclean: |
|
574 | elif listclean: | |
575 | cadd(fn) |
|
575 | cadd(fn) | |
576 | elif state == 'm': |
|
576 | elif state == 'm': | |
577 | madd(fn) |
|
577 | madd(fn) | |
578 | elif state == 'a': |
|
578 | elif state == 'a': | |
579 | aadd(fn) |
|
579 | aadd(fn) | |
580 | elif state == 'r': |
|
580 | elif state == 'r': | |
581 | radd(fn) |
|
581 | radd(fn) | |
582 |
|
582 | |||
583 | return (lookup, modified, added, removed, deleted, unknown, ignored, |
|
583 | return (lookup, modified, added, removed, deleted, unknown, ignored, | |
584 | clean) |
|
584 | clean) |
@@ -1,413 +1,414 | |||||
1 | /* |
|
1 | /* | |
2 | parsers.c - efficient content parsing |
|
2 | parsers.c - efficient content parsing | |
3 |
|
3 | |||
4 | Copyright 2008 Matt Mackall <mpm@selenic.com> and others |
|
4 | Copyright 2008 Matt Mackall <mpm@selenic.com> and others | |
5 |
|
5 | |||
6 | This software may be used and distributed according to the terms of |
|
6 | This software may be used and distributed according to the terms of | |
7 | the GNU General Public License, incorporated herein by reference. |
|
7 | the GNU General Public License, incorporated herein by reference. | |
8 | */ |
|
8 | */ | |
9 |
|
9 | |||
10 | #include <Python.h> |
|
10 | #include <Python.h> | |
11 | #include <ctype.h> |
|
11 | #include <ctype.h> | |
12 | #include <string.h> |
|
12 | #include <string.h> | |
13 |
|
13 | |||
14 | static int hexdigit(char c) |
|
14 | static int hexdigit(char c) | |
15 | { |
|
15 | { | |
16 | if (c >= '0' && c <= '9') |
|
16 | if (c >= '0' && c <= '9') | |
17 | return c - '0'; |
|
17 | return c - '0'; | |
18 | if (c >= 'a' && c <= 'f') |
|
18 | if (c >= 'a' && c <= 'f') | |
19 | return c - 'a' + 10; |
|
19 | return c - 'a' + 10; | |
20 | if (c >= 'A' && c <= 'F') |
|
20 | if (c >= 'A' && c <= 'F') | |
21 | return c - 'A' + 10; |
|
21 | return c - 'A' + 10; | |
22 |
|
22 | |||
23 | PyErr_SetString(PyExc_ValueError, "input contains non-hex character"); |
|
23 | PyErr_SetString(PyExc_ValueError, "input contains non-hex character"); | |
24 | return 0; |
|
24 | return 0; | |
25 | } |
|
25 | } | |
26 |
|
26 | |||
27 | /* |
|
27 | /* | |
28 | * Turn a hex-encoded string into binary. |
|
28 | * Turn a hex-encoded string into binary. | |
29 | */ |
|
29 | */ | |
30 | static PyObject *unhexlify(const char *str, int len) |
|
30 | static PyObject *unhexlify(const char *str, int len) | |
31 | { |
|
31 | { | |
32 | PyObject *ret; |
|
32 | PyObject *ret; | |
33 | const char *c; |
|
33 | const char *c; | |
34 | char *d; |
|
34 | char *d; | |
35 |
|
35 | |||
36 | ret = PyString_FromStringAndSize(NULL, len / 2); |
|
36 | ret = PyString_FromStringAndSize(NULL, len / 2); | |
37 | if (!ret) |
|
37 | if (!ret) | |
38 | return NULL; |
|
38 | return NULL; | |
39 |
|
39 | |||
40 | d = PyString_AS_STRING(ret); |
|
40 | d = PyString_AS_STRING(ret); | |
41 | for (c = str; c < str + len;) { |
|
41 | for (c = str; c < str + len;) { | |
42 | int hi = hexdigit(*c++); |
|
42 | int hi = hexdigit(*c++); | |
43 | int lo = hexdigit(*c++); |
|
43 | int lo = hexdigit(*c++); | |
44 | *d++ = (hi << 4) | lo; |
|
44 | *d++ = (hi << 4) | lo; | |
45 | } |
|
45 | } | |
46 |
|
46 | |||
47 | return ret; |
|
47 | return ret; | |
48 | } |
|
48 | } | |
49 |
|
49 | |||
50 | /* |
|
50 | /* | |
51 | * This code assumes that a manifest is stitched together with newline |
|
51 | * This code assumes that a manifest is stitched together with newline | |
52 | * ('\n') characters. |
|
52 | * ('\n') characters. | |
53 | */ |
|
53 | */ | |
54 | static PyObject *parse_manifest(PyObject *self, PyObject *args) |
|
54 | static PyObject *parse_manifest(PyObject *self, PyObject *args) | |
55 | { |
|
55 | { | |
56 | PyObject *mfdict, *fdict; |
|
56 | PyObject *mfdict, *fdict; | |
57 | char *str, *cur, *start, *zero; |
|
57 | char *str, *cur, *start, *zero; | |
58 | int len; |
|
58 | int len; | |
59 |
|
59 | |||
60 | if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest", |
|
60 | if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest", | |
61 | &PyDict_Type, &mfdict, |
|
61 | &PyDict_Type, &mfdict, | |
62 | &PyDict_Type, &fdict, |
|
62 | &PyDict_Type, &fdict, | |
63 | &str, &len)) |
|
63 | &str, &len)) | |
64 | goto quit; |
|
64 | goto quit; | |
65 |
|
65 | |||
66 | for (start = cur = str, zero = NULL; cur < str + len; cur++) { |
|
66 | for (start = cur = str, zero = NULL; cur < str + len; cur++) { | |
67 | PyObject *file = NULL, *node = NULL; |
|
67 | PyObject *file = NULL, *node = NULL; | |
68 | PyObject *flags = NULL; |
|
68 | PyObject *flags = NULL; | |
69 | int nlen; |
|
69 | int nlen; | |
70 |
|
70 | |||
71 | if (!*cur) { |
|
71 | if (!*cur) { | |
72 | zero = cur; |
|
72 | zero = cur; | |
73 | continue; |
|
73 | continue; | |
74 | } |
|
74 | } | |
75 | else if (*cur != '\n') |
|
75 | else if (*cur != '\n') | |
76 | continue; |
|
76 | continue; | |
77 |
|
77 | |||
78 | if (!zero) { |
|
78 | if (!zero) { | |
79 | PyErr_SetString(PyExc_ValueError, |
|
79 | PyErr_SetString(PyExc_ValueError, | |
80 | "manifest entry has no separator"); |
|
80 | "manifest entry has no separator"); | |
81 | goto quit; |
|
81 | goto quit; | |
82 | } |
|
82 | } | |
83 |
|
83 | |||
84 | file = PyString_FromStringAndSize(start, zero - start); |
|
84 | file = PyString_FromStringAndSize(start, zero - start); | |
85 | if (!file) |
|
85 | if (!file) | |
86 | goto bail; |
|
86 | goto bail; | |
87 |
|
87 | |||
88 | nlen = cur - zero - 1; |
|
88 | nlen = cur - zero - 1; | |
89 |
|
89 | |||
90 | node = unhexlify(zero + 1, nlen > 40 ? 40 : nlen); |
|
90 | node = unhexlify(zero + 1, nlen > 40 ? 40 : nlen); | |
91 | if (!node) |
|
91 | if (!node) | |
92 | goto bail; |
|
92 | goto bail; | |
93 |
|
93 | |||
94 | if (nlen > 40) { |
|
94 | if (nlen > 40) { | |
95 | PyObject *flags; |
|
95 | PyObject *flags; | |
96 |
|
96 | |||
97 | flags = PyString_FromStringAndSize(zero + 41, |
|
97 | flags = PyString_FromStringAndSize(zero + 41, | |
98 | nlen - 40); |
|
98 | nlen - 40); | |
99 | if (!flags) |
|
99 | if (!flags) | |
100 | goto bail; |
|
100 | goto bail; | |
101 |
|
101 | |||
102 | if (PyDict_SetItem(fdict, file, flags) == -1) |
|
102 | if (PyDict_SetItem(fdict, file, flags) == -1) | |
103 | goto bail; |
|
103 | goto bail; | |
104 | } |
|
104 | } | |
105 |
|
105 | |||
106 | if (PyDict_SetItem(mfdict, file, node) == -1) |
|
106 | if (PyDict_SetItem(mfdict, file, node) == -1) | |
107 | goto bail; |
|
107 | goto bail; | |
108 |
|
108 | |||
109 | start = cur + 1; |
|
109 | start = cur + 1; | |
110 | zero = NULL; |
|
110 | zero = NULL; | |
111 |
|
111 | |||
112 | Py_XDECREF(flags); |
|
112 | Py_XDECREF(flags); | |
113 | Py_XDECREF(node); |
|
113 | Py_XDECREF(node); | |
114 | Py_XDECREF(file); |
|
114 | Py_XDECREF(file); | |
115 | continue; |
|
115 | continue; | |
116 | bail: |
|
116 | bail: | |
117 | Py_XDECREF(flags); |
|
117 | Py_XDECREF(flags); | |
118 | Py_XDECREF(node); |
|
118 | Py_XDECREF(node); | |
119 | Py_XDECREF(file); |
|
119 | Py_XDECREF(file); | |
120 | goto quit; |
|
120 | goto quit; | |
121 | } |
|
121 | } | |
122 |
|
122 | |||
123 | if (len > 0 && *(cur - 1) != '\n') { |
|
123 | if (len > 0 && *(cur - 1) != '\n') { | |
124 | PyErr_SetString(PyExc_ValueError, |
|
124 | PyErr_SetString(PyExc_ValueError, | |
125 | "manifest contains trailing garbage"); |
|
125 | "manifest contains trailing garbage"); | |
126 | goto quit; |
|
126 | goto quit; | |
127 | } |
|
127 | } | |
128 |
|
128 | |||
129 | Py_INCREF(Py_None); |
|
129 | Py_INCREF(Py_None); | |
130 | return Py_None; |
|
130 | return Py_None; | |
131 | quit: |
|
131 | quit: | |
132 | return NULL; |
|
132 | return NULL; | |
133 | } |
|
133 | } | |
134 |
|
134 | |||
135 | #ifdef _WIN32 |
|
135 | #ifdef _WIN32 | |
136 | # ifdef _MSC_VER |
|
136 | # ifdef _MSC_VER | |
137 | /* msvc 6.0 has problems */ |
|
137 | /* msvc 6.0 has problems */ | |
138 | # define inline __inline |
|
138 | # define inline __inline | |
139 | typedef unsigned long uint32_t; |
|
139 | typedef unsigned long uint32_t; | |
|
140 | typedef unsigned __int64 uint64_t; | |||
140 | # else |
|
141 | # else | |
141 | # include <stdint.h> |
|
142 | # include <stdint.h> | |
142 | # endif |
|
143 | # endif | |
143 | static uint32_t ntohl(uint32_t x) |
|
144 | static uint32_t ntohl(uint32_t x) | |
144 | { |
|
145 | { | |
145 | return ((x & 0x000000ffUL) << 24) | |
|
146 | return ((x & 0x000000ffUL) << 24) | | |
146 | ((x & 0x0000ff00UL) << 8) | |
|
147 | ((x & 0x0000ff00UL) << 8) | | |
147 | ((x & 0x00ff0000UL) >> 8) | |
|
148 | ((x & 0x00ff0000UL) >> 8) | | |
148 | ((x & 0xff000000UL) >> 24); |
|
149 | ((x & 0xff000000UL) >> 24); | |
149 | } |
|
150 | } | |
150 | #else |
|
151 | #else | |
151 | /* not windows */ |
|
152 | /* not windows */ | |
152 | # include <sys/types.h> |
|
153 | # include <sys/types.h> | |
153 | # if defined __BEOS__ && !defined __HAIKU__ |
|
154 | # if defined __BEOS__ && !defined __HAIKU__ | |
154 | # include <ByteOrder.h> |
|
155 | # include <ByteOrder.h> | |
155 | # else |
|
156 | # else | |
156 | # include <arpa/inet.h> |
|
157 | # include <arpa/inet.h> | |
157 | # endif |
|
158 | # endif | |
158 | # include <inttypes.h> |
|
159 | # include <inttypes.h> | |
159 | #endif |
|
160 | #endif | |
160 |
|
161 | |||
161 | static PyObject *parse_dirstate(PyObject *self, PyObject *args) |
|
162 | static PyObject *parse_dirstate(PyObject *self, PyObject *args) | |
162 | { |
|
163 | { | |
163 | PyObject *dmap, *cmap, *parents = NULL, *ret = NULL; |
|
164 | PyObject *dmap, *cmap, *parents = NULL, *ret = NULL; | |
164 | PyObject *fname = NULL, *cname = NULL, *entry = NULL; |
|
165 | PyObject *fname = NULL, *cname = NULL, *entry = NULL; | |
165 | char *str, *cur, *end, *cpos; |
|
166 | char *str, *cur, *end, *cpos; | |
166 | int state, mode, size, mtime, flen; |
|
167 | int state, mode, size, mtime, flen; | |
167 | int len; |
|
168 | int len; | |
168 | char decode[16]; /* for alignment */ |
|
169 | char decode[16]; /* for alignment */ | |
169 |
|
170 | |||
170 | if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate", |
|
171 | if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate", | |
171 | &PyDict_Type, &dmap, |
|
172 | &PyDict_Type, &dmap, | |
172 | &PyDict_Type, &cmap, |
|
173 | &PyDict_Type, &cmap, | |
173 | &str, &len)) |
|
174 | &str, &len)) | |
174 | goto quit; |
|
175 | goto quit; | |
175 |
|
176 | |||
176 | /* read parents */ |
|
177 | /* read parents */ | |
177 | if (len < 40) |
|
178 | if (len < 40) | |
178 | goto quit; |
|
179 | goto quit; | |
179 |
|
180 | |||
180 | parents = Py_BuildValue("s#s#", str, 20, str + 20, 20); |
|
181 | parents = Py_BuildValue("s#s#", str, 20, str + 20, 20); | |
181 | if (!parents) |
|
182 | if (!parents) | |
182 | goto quit; |
|
183 | goto quit; | |
183 |
|
184 | |||
184 | /* read filenames */ |
|
185 | /* read filenames */ | |
185 | cur = str + 40; |
|
186 | cur = str + 40; | |
186 | end = str + len; |
|
187 | end = str + len; | |
187 |
|
188 | |||
188 | while (cur < end - 17) { |
|
189 | while (cur < end - 17) { | |
189 | /* unpack header */ |
|
190 | /* unpack header */ | |
190 | state = *cur; |
|
191 | state = *cur; | |
191 | memcpy(decode, cur + 1, 16); |
|
192 | memcpy(decode, cur + 1, 16); | |
192 | mode = ntohl(*(uint32_t *)(decode)); |
|
193 | mode = ntohl(*(uint32_t *)(decode)); | |
193 | size = ntohl(*(uint32_t *)(decode + 4)); |
|
194 | size = ntohl(*(uint32_t *)(decode + 4)); | |
194 | mtime = ntohl(*(uint32_t *)(decode + 8)); |
|
195 | mtime = ntohl(*(uint32_t *)(decode + 8)); | |
195 | flen = ntohl(*(uint32_t *)(decode + 12)); |
|
196 | flen = ntohl(*(uint32_t *)(decode + 12)); | |
196 | cur += 17; |
|
197 | cur += 17; | |
197 | if (cur + flen > end) |
|
198 | if (cur + flen > end) | |
198 | goto quit; |
|
199 | goto quit; | |
199 |
|
200 | |||
200 | entry = Py_BuildValue("ciii", state, mode, size, mtime); |
|
201 | entry = Py_BuildValue("ciii", state, mode, size, mtime); | |
201 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ |
|
202 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ | |
202 | if (!entry) |
|
203 | if (!entry) | |
203 | goto quit; |
|
204 | goto quit; | |
204 |
|
205 | |||
205 | cpos = memchr(cur, 0, flen); |
|
206 | cpos = memchr(cur, 0, flen); | |
206 | if (cpos) { |
|
207 | if (cpos) { | |
207 | fname = PyString_FromStringAndSize(cur, cpos - cur); |
|
208 | fname = PyString_FromStringAndSize(cur, cpos - cur); | |
208 | cname = PyString_FromStringAndSize(cpos + 1, |
|
209 | cname = PyString_FromStringAndSize(cpos + 1, | |
209 | flen - (cpos - cur) - 1); |
|
210 | flen - (cpos - cur) - 1); | |
210 | if (!fname || !cname || |
|
211 | if (!fname || !cname || | |
211 | PyDict_SetItem(cmap, fname, cname) == -1 || |
|
212 | PyDict_SetItem(cmap, fname, cname) == -1 || | |
212 | PyDict_SetItem(dmap, fname, entry) == -1) |
|
213 | PyDict_SetItem(dmap, fname, entry) == -1) | |
213 | goto quit; |
|
214 | goto quit; | |
214 | Py_DECREF(cname); |
|
215 | Py_DECREF(cname); | |
215 | } else { |
|
216 | } else { | |
216 | fname = PyString_FromStringAndSize(cur, flen); |
|
217 | fname = PyString_FromStringAndSize(cur, flen); | |
217 | if (!fname || |
|
218 | if (!fname || | |
218 | PyDict_SetItem(dmap, fname, entry) == -1) |
|
219 | PyDict_SetItem(dmap, fname, entry) == -1) | |
219 | goto quit; |
|
220 | goto quit; | |
220 | } |
|
221 | } | |
221 | cur += flen; |
|
222 | cur += flen; | |
222 | Py_DECREF(fname); |
|
223 | Py_DECREF(fname); | |
223 | Py_DECREF(entry); |
|
224 | Py_DECREF(entry); | |
224 | fname = cname = entry = NULL; |
|
225 | fname = cname = entry = NULL; | |
225 | } |
|
226 | } | |
226 |
|
227 | |||
227 | ret = parents; |
|
228 | ret = parents; | |
228 | Py_INCREF(ret); |
|
229 | Py_INCREF(ret); | |
229 | quit: |
|
230 | quit: | |
230 | Py_XDECREF(fname); |
|
231 | Py_XDECREF(fname); | |
231 | Py_XDECREF(cname); |
|
232 | Py_XDECREF(cname); | |
232 | Py_XDECREF(entry); |
|
233 | Py_XDECREF(entry); | |
233 | Py_XDECREF(parents); |
|
234 | Py_XDECREF(parents); | |
234 | return ret; |
|
235 | return ret; | |
235 | } |
|
236 | } | |
236 |
|
237 | |||
237 | const char nullid[20]; |
|
238 | const char nullid[20]; | |
238 | const int nullrev = -1; |
|
239 | const int nullrev = -1; | |
239 |
|
240 | |||
240 | /* RevlogNG format (all in big endian, data may be inlined): |
|
241 | /* RevlogNG format (all in big endian, data may be inlined): | |
241 | * 6 bytes: offset |
|
242 | * 6 bytes: offset | |
242 | * 2 bytes: flags |
|
243 | * 2 bytes: flags | |
243 | * 4 bytes: compressed length |
|
244 | * 4 bytes: compressed length | |
244 | * 4 bytes: uncompressed length |
|
245 | * 4 bytes: uncompressed length | |
245 | * 4 bytes: base revision |
|
246 | * 4 bytes: base revision | |
246 | * 4 bytes: link revision |
|
247 | * 4 bytes: link revision | |
247 | * 4 bytes: parent 1 revision |
|
248 | * 4 bytes: parent 1 revision | |
248 | * 4 bytes: parent 2 revision |
|
249 | * 4 bytes: parent 2 revision | |
249 | * 32 bytes: nodeid (only 20 bytes used) |
|
250 | * 32 bytes: nodeid (only 20 bytes used) | |
250 | */ |
|
251 | */ | |
251 | static int _parse_index_ng (const char *data, int size, int inlined, |
|
252 | static int _parse_index_ng (const char *data, int size, int inlined, | |
252 | PyObject *index, PyObject *nodemap) |
|
253 | PyObject *index, PyObject *nodemap) | |
253 | { |
|
254 | { | |
254 | PyObject *entry = NULL, *node_id = NULL, *n_obj = NULL; |
|
255 | PyObject *entry = NULL, *node_id = NULL, *n_obj = NULL; | |
255 | PyObject *nullrev_obj = NULL, *nullid_obj = NULL; |
|
256 | PyObject *nullrev_obj = NULL, *nullid_obj = NULL; | |
256 | int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; |
|
257 | int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; | |
257 | uint64_t offset_flags; |
|
258 | uint64_t offset_flags; | |
258 | int n = 0; |
|
259 | int n = 0; | |
259 | const char *end = data + size; |
|
260 | const char *end = data + size; | |
260 |
|
261 | |||
261 | while (data < end) { |
|
262 | while (data < end) { | |
262 | offset_flags = ntohl(*((uint32_t *) (data + 4))); |
|
263 | offset_flags = ntohl(*((uint32_t *) (data + 4))); | |
263 | if (n == 0) /* mask out version number for the first entry */ |
|
264 | if (n == 0) /* mask out version number for the first entry */ | |
264 | offset_flags &= 0xFFFF; |
|
265 | offset_flags &= 0xFFFF; | |
265 | else { |
|
266 | else { | |
266 | uint32_t offset_high = ntohl(*((uint32_t *) data)); |
|
267 | uint32_t offset_high = ntohl(*((uint32_t *) data)); | |
267 | offset_flags |= ((uint64_t) offset_high) << 32; |
|
268 | offset_flags |= ((uint64_t) offset_high) << 32; | |
268 | } |
|
269 | } | |
269 |
|
270 | |||
270 |
|
271 | |||
271 | comp_len = ntohl(*((uint32_t *) (data + 8))); |
|
272 | comp_len = ntohl(*((uint32_t *) (data + 8))); | |
272 | uncomp_len = ntohl(*((uint32_t *) (data + 12))); |
|
273 | uncomp_len = ntohl(*((uint32_t *) (data + 12))); | |
273 | base_rev = ntohl(*((uint32_t *) (data + 16))); |
|
274 | base_rev = ntohl(*((uint32_t *) (data + 16))); | |
274 | link_rev = ntohl(*((uint32_t *) (data + 20))); |
|
275 | link_rev = ntohl(*((uint32_t *) (data + 20))); | |
275 | parent_1 = ntohl(*((uint32_t *) (data + 24))); |
|
276 | parent_1 = ntohl(*((uint32_t *) (data + 24))); | |
276 | parent_2 = ntohl(*((uint32_t *) (data + 28))); |
|
277 | parent_2 = ntohl(*((uint32_t *) (data + 28))); | |
277 | node_id = PyString_FromStringAndSize(data + 32, 20); |
|
278 | node_id = PyString_FromStringAndSize(data + 32, 20); | |
278 | n_obj = PyInt_FromLong(n); |
|
279 | n_obj = PyInt_FromLong(n); | |
279 | if (!node_id || !n_obj || |
|
280 | if (!node_id || !n_obj || | |
280 | PyDict_SetItem(nodemap, node_id, n_obj) != 0) |
|
281 | PyDict_SetItem(nodemap, node_id, n_obj) != 0) | |
281 | goto quit; |
|
282 | goto quit; | |
282 | Py_DECREF(n_obj); |
|
283 | Py_DECREF(n_obj); | |
283 |
|
284 | |||
284 | entry = Py_BuildValue("LiiiiiiN", offset_flags, comp_len, |
|
285 | entry = Py_BuildValue("LiiiiiiN", offset_flags, comp_len, | |
285 | uncomp_len, base_rev, link_rev, |
|
286 | uncomp_len, base_rev, link_rev, | |
286 | parent_1, parent_2, node_id); |
|
287 | parent_1, parent_2, node_id); | |
287 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ |
|
288 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ | |
288 | if (!entry) |
|
289 | if (!entry) | |
289 | goto quit; |
|
290 | goto quit; | |
290 |
|
291 | |||
291 | /* append to or set value in the index list */ |
|
292 | /* append to or set value in the index list */ | |
292 | if (inlined) { |
|
293 | if (inlined) { | |
293 | if (PyList_Append(index, entry) != 0) |
|
294 | if (PyList_Append(index, entry) != 0) | |
294 | goto quit; |
|
295 | goto quit; | |
295 | Py_DECREF(entry); |
|
296 | Py_DECREF(entry); | |
296 | } else { |
|
297 | } else { | |
297 | PyList_SET_ITEM(index, n, entry); /* steals reference */ |
|
298 | PyList_SET_ITEM(index, n, entry); /* steals reference */ | |
298 | } |
|
299 | } | |
299 |
|
300 | |||
300 | data += 64 + (inlined ? comp_len : 0); |
|
301 | data += 64 + (inlined ? comp_len : 0); | |
301 | n++; |
|
302 | n++; | |
302 | } |
|
303 | } | |
303 | if (data > end) { |
|
304 | if (data > end) { | |
304 | if (!PyErr_Occurred()) |
|
305 | if (!PyErr_Occurred()) | |
305 | PyErr_SetString(PyExc_ValueError, "corrupt index file"); |
|
306 | PyErr_SetString(PyExc_ValueError, "corrupt index file"); | |
306 | goto quit; |
|
307 | goto quit; | |
307 | } |
|
308 | } | |
308 |
|
309 | |||
309 | /* create the nullid/nullrev entry in the nodemap and the |
|
310 | /* create the nullid/nullrev entry in the nodemap and the | |
310 | * magic nullid entry in the index at [-1] */ |
|
311 | * magic nullid entry in the index at [-1] */ | |
311 | nullid_obj = PyString_FromStringAndSize(nullid, 20); |
|
312 | nullid_obj = PyString_FromStringAndSize(nullid, 20); | |
312 | nullrev_obj = PyInt_FromLong(nullrev); |
|
313 | nullrev_obj = PyInt_FromLong(nullrev); | |
313 | if (!nodemap || !nullid_obj || !nullrev_obj || |
|
314 | if (!nodemap || !nullid_obj || !nullrev_obj || | |
314 | PyDict_SetItem(nodemap, nullid_obj, nullrev_obj) != 0) |
|
315 | PyDict_SetItem(nodemap, nullid_obj, nullrev_obj) != 0) | |
315 | goto quit; |
|
316 | goto quit; | |
316 | Py_DECREF(nullrev_obj); |
|
317 | Py_DECREF(nullrev_obj); | |
317 |
|
318 | |||
318 | entry = Py_BuildValue("iiiiiiiN", 0, 0, 0, -1, -1, -1, -1, nullid_obj); |
|
319 | entry = Py_BuildValue("iiiiiiiN", 0, 0, 0, -1, -1, -1, -1, nullid_obj); | |
319 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ |
|
320 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ | |
320 | if (!entry) |
|
321 | if (!entry) | |
321 | goto quit; |
|
322 | goto quit; | |
322 | if (inlined) { |
|
323 | if (inlined) { | |
323 | if (PyList_Append(index, entry) != 0) |
|
324 | if (PyList_Append(index, entry) != 0) | |
324 | goto quit; |
|
325 | goto quit; | |
325 | Py_DECREF(entry); |
|
326 | Py_DECREF(entry); | |
326 | } else { |
|
327 | } else { | |
327 | PyList_SET_ITEM(index, n, entry); /* steals reference */ |
|
328 | PyList_SET_ITEM(index, n, entry); /* steals reference */ | |
328 | } |
|
329 | } | |
329 |
|
330 | |||
330 | return 1; |
|
331 | return 1; | |
331 |
|
332 | |||
332 | quit: |
|
333 | quit: | |
333 | Py_XDECREF(n_obj); |
|
334 | Py_XDECREF(n_obj); | |
334 | Py_XDECREF(node_id); |
|
335 | Py_XDECREF(node_id); | |
335 | Py_XDECREF(entry); |
|
336 | Py_XDECREF(entry); | |
336 | Py_XDECREF(nullrev_obj); |
|
337 | Py_XDECREF(nullrev_obj); | |
337 | Py_XDECREF(nullid_obj); |
|
338 | Py_XDECREF(nullid_obj); | |
338 | return 0; |
|
339 | return 0; | |
339 | } |
|
340 | } | |
340 |
|
341 | |||
341 |
|
342 | |||
342 |
|
343 | |||
343 | /* This function parses a index file and returns a Python tuple of the |
|
344 | /* This function parses a index file and returns a Python tuple of the | |
344 | * following format: (index, nodemap, cache) |
|
345 | * following format: (index, nodemap, cache) | |
345 | * |
|
346 | * | |
346 | * index: a list of tuples containing the RevlogNG records |
|
347 | * index: a list of tuples containing the RevlogNG records | |
347 | * nodemap: a dict mapping node ids to indices in the index list |
|
348 | * nodemap: a dict mapping node ids to indices in the index list | |
348 | * cache: if data is inlined, a tuple (index_file_content, 0) else None |
|
349 | * cache: if data is inlined, a tuple (index_file_content, 0) else None | |
349 | */ |
|
350 | */ | |
350 | static PyObject *parse_index(PyObject *self, PyObject *args) |
|
351 | static PyObject *parse_index(PyObject *self, PyObject *args) | |
351 | { |
|
352 | { | |
352 | const char *data; |
|
353 | const char *data; | |
353 | int size, inlined; |
|
354 | int size, inlined; | |
354 | PyObject *rval = NULL, *index = NULL, *nodemap = NULL, *cache = NULL; |
|
355 | PyObject *rval = NULL, *index = NULL, *nodemap = NULL, *cache = NULL; | |
355 | PyObject *data_obj = NULL, *inlined_obj; |
|
356 | PyObject *data_obj = NULL, *inlined_obj; | |
356 |
|
357 | |||
357 | if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj)) |
|
358 | if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj)) | |
358 | return NULL; |
|
359 | return NULL; | |
359 | inlined = inlined_obj && PyObject_IsTrue(inlined_obj); |
|
360 | inlined = inlined_obj && PyObject_IsTrue(inlined_obj); | |
360 |
|
361 | |||
361 | /* If no data is inlined, we know the size of the index list in |
|
362 | /* If no data is inlined, we know the size of the index list in | |
362 | * advance: size divided by size of one one revlog record (64 bytes) |
|
363 | * advance: size divided by size of one one revlog record (64 bytes) | |
363 | * plus one for the nullid */ |
|
364 | * plus one for the nullid */ | |
364 | index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1); |
|
365 | index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1); | |
365 | if (!index) |
|
366 | if (!index) | |
366 | goto quit; |
|
367 | goto quit; | |
367 |
|
368 | |||
368 | nodemap = PyDict_New(); |
|
369 | nodemap = PyDict_New(); | |
369 |
|
370 | |||
370 | /* set up the cache return value */ |
|
371 | /* set up the cache return value */ | |
371 | if (inlined) { |
|
372 | if (inlined) { | |
372 | /* Note that the reference to data_obj is only borrowed */ |
|
373 | /* Note that the reference to data_obj is only borrowed */ | |
373 | data_obj = PyTuple_GET_ITEM(args, 0); |
|
374 | data_obj = PyTuple_GET_ITEM(args, 0); | |
374 | cache = Py_BuildValue("iO", 0, data_obj); |
|
375 | cache = Py_BuildValue("iO", 0, data_obj); | |
375 | if (!cache) |
|
376 | if (!cache) | |
376 | goto quit; |
|
377 | goto quit; | |
377 | } else { |
|
378 | } else { | |
378 | cache = Py_None; |
|
379 | cache = Py_None; | |
379 | Py_INCREF(Py_None); |
|
380 | Py_INCREF(Py_None); | |
380 | } |
|
381 | } | |
381 |
|
382 | |||
382 | /* actually populate the index and the nodemap with data */ |
|
383 | /* actually populate the index and the nodemap with data */ | |
383 | if (!_parse_index_ng (data, size, inlined, index, nodemap)) |
|
384 | if (!_parse_index_ng (data, size, inlined, index, nodemap)) | |
384 | goto quit; |
|
385 | goto quit; | |
385 |
|
386 | |||
386 | rval = Py_BuildValue("NNN", index, nodemap, cache); |
|
387 | rval = Py_BuildValue("NNN", index, nodemap, cache); | |
387 | if (!rval) |
|
388 | if (!rval) | |
388 | goto quit; |
|
389 | goto quit; | |
389 | return rval; |
|
390 | return rval; | |
390 |
|
391 | |||
391 | quit: |
|
392 | quit: | |
392 | Py_XDECREF(index); |
|
393 | Py_XDECREF(index); | |
393 | Py_XDECREF(nodemap); |
|
394 | Py_XDECREF(nodemap); | |
394 | Py_XDECREF(cache); |
|
395 | Py_XDECREF(cache); | |
395 | Py_XDECREF(rval); |
|
396 | Py_XDECREF(rval); | |
396 | Py_XDECREF(data_obj); |
|
397 | Py_XDECREF(data_obj); | |
397 | return NULL; |
|
398 | return NULL; | |
398 | } |
|
399 | } | |
399 |
|
400 | |||
400 |
|
401 | |||
401 | static char parsers_doc[] = "Efficient content parsing."; |
|
402 | static char parsers_doc[] = "Efficient content parsing."; | |
402 |
|
403 | |||
403 | static PyMethodDef methods[] = { |
|
404 | static PyMethodDef methods[] = { | |
404 | {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, |
|
405 | {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, | |
405 | {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, |
|
406 | {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, | |
406 | {"parse_index", parse_index, METH_VARARGS, "parse a revlog index\n"}, |
|
407 | {"parse_index", parse_index, METH_VARARGS, "parse a revlog index\n"}, | |
407 | {NULL, NULL} |
|
408 | {NULL, NULL} | |
408 | }; |
|
409 | }; | |
409 |
|
410 | |||
410 | PyMODINIT_FUNC initparsers(void) |
|
411 | PyMODINIT_FUNC initparsers(void) | |
411 | { |
|
412 | { | |
412 | Py_InitModule3("parsers", methods, parsers_doc); |
|
413 | Py_InitModule3("parsers", methods, parsers_doc); | |
413 | } |
|
414 | } |
@@ -1,1939 +1,1943 | |||||
1 | """ |
|
1 | """ | |
2 | util.py - Mercurial utility functions and platform specfic implementations |
|
2 | util.py - Mercurial utility functions and platform specfic implementations | |
3 |
|
3 | |||
4 | Copyright 2005 K. Thananchayan <thananck@yahoo.com> |
|
4 | Copyright 2005 K. Thananchayan <thananck@yahoo.com> | |
5 | Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
5 | Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
6 | Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
6 | Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
7 |
|
7 | |||
8 | This software may be used and distributed according to the terms |
|
8 | This software may be used and distributed according to the terms | |
9 | of the GNU General Public License, incorporated herein by reference. |
|
9 | of the GNU General Public License, incorporated herein by reference. | |
10 |
|
10 | |||
11 | This contains helper routines that are independent of the SCM core and hide |
|
11 | This contains helper routines that are independent of the SCM core and hide | |
12 | platform-specific details from the core. |
|
12 | platform-specific details from the core. | |
13 | """ |
|
13 | """ | |
14 |
|
14 | |||
15 | from i18n import _ |
|
15 | from i18n import _ | |
16 | import cStringIO, errno, getpass, re, shutil, sys, tempfile |
|
16 | import cStringIO, errno, getpass, re, shutil, sys, tempfile | |
17 | import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil |
|
17 | import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil | |
18 | import imp, urlparse |
|
18 | import imp, urlparse | |
19 |
|
19 | |||
20 | # Python compatibility |
|
20 | # Python compatibility | |
21 |
|
21 | |||
22 | try: |
|
22 | try: | |
23 | set = set |
|
23 | set = set | |
24 | frozenset = frozenset |
|
24 | frozenset = frozenset | |
25 | except NameError: |
|
25 | except NameError: | |
26 | from sets import Set as set, ImmutableSet as frozenset |
|
26 | from sets import Set as set, ImmutableSet as frozenset | |
27 |
|
27 | |||
28 | _md5 = None |
|
28 | _md5 = None | |
29 | def md5(s): |
|
29 | def md5(s): | |
30 | global _md5 |
|
30 | global _md5 | |
31 | if _md5 is None: |
|
31 | if _md5 is None: | |
32 | try: |
|
32 | try: | |
33 | import hashlib |
|
33 | import hashlib | |
34 | _md5 = hashlib.md5 |
|
34 | _md5 = hashlib.md5 | |
35 | except ImportError: |
|
35 | except ImportError: | |
36 | import md5 |
|
36 | import md5 | |
37 | _md5 = md5.md5 |
|
37 | _md5 = md5.md5 | |
38 | return _md5(s) |
|
38 | return _md5(s) | |
39 |
|
39 | |||
40 | _sha1 = None |
|
40 | _sha1 = None | |
41 | def sha1(s): |
|
41 | def sha1(s): | |
42 | global _sha1 |
|
42 | global _sha1 | |
43 | if _sha1 is None: |
|
43 | if _sha1 is None: | |
44 | try: |
|
44 | try: | |
45 | import hashlib |
|
45 | import hashlib | |
46 | _sha1 = hashlib.sha1 |
|
46 | _sha1 = hashlib.sha1 | |
47 | except ImportError: |
|
47 | except ImportError: | |
48 | import sha |
|
48 | import sha | |
49 | _sha1 = sha.sha |
|
49 | _sha1 = sha.sha | |
50 | return _sha1(s) |
|
50 | return _sha1(s) | |
51 |
|
51 | |||
52 | try: |
|
52 | try: | |
53 | import subprocess |
|
53 | import subprocess | |
|
54 | closefds = os.name == 'posix' | |||
54 | def popen2(cmd, mode='t', bufsize=-1): |
|
55 | def popen2(cmd, mode='t', bufsize=-1): | |
55 |
p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, |
|
56 | p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, | |
|
57 | close_fds=closefds, | |||
56 | stdin=subprocess.PIPE, stdout=subprocess.PIPE) |
|
58 | stdin=subprocess.PIPE, stdout=subprocess.PIPE) | |
57 | return p.stdin, p.stdout |
|
59 | return p.stdin, p.stdout | |
58 | def popen3(cmd, mode='t', bufsize=-1): |
|
60 | def popen3(cmd, mode='t', bufsize=-1): | |
59 |
p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, |
|
61 | p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, | |
|
62 | close_fds=closefds, | |||
60 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
|
63 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, | |
61 | stderr=subprocess.PIPE) |
|
64 | stderr=subprocess.PIPE) | |
62 | return p.stdin, p.stdout, p.stderr |
|
65 | return p.stdin, p.stdout, p.stderr | |
63 | def Popen3(cmd, capturestderr=False, bufsize=-1): |
|
66 | def Popen3(cmd, capturestderr=False, bufsize=-1): | |
64 | stderr = capturestderr and subprocess.PIPE or None |
|
67 | stderr = capturestderr and subprocess.PIPE or None | |
65 |
p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, |
|
68 | p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, | |
|
69 | close_fds=closefds, | |||
66 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
|
70 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, | |
67 | stderr=stderr) |
|
71 | stderr=stderr) | |
68 | p.fromchild = p.stdout |
|
72 | p.fromchild = p.stdout | |
69 | p.tochild = p.stdin |
|
73 | p.tochild = p.stdin | |
70 | p.childerr = p.stderr |
|
74 | p.childerr = p.stderr | |
71 | return p |
|
75 | return p | |
72 | except ImportError: |
|
76 | except ImportError: | |
73 | subprocess = None |
|
77 | subprocess = None | |
74 | import popen2 as _popen2 |
|
78 | import popen2 as _popen2 | |
75 | popen2 = _popen2.popen2 |
|
79 | popen2 = _popen2.popen2 | |
76 | Popen3 = _popen2.Popen3 |
|
80 | Popen3 = _popen2.Popen3 | |
77 |
|
81 | |||
78 |
|
82 | |||
79 | try: |
|
83 | try: | |
80 | _encoding = os.environ.get("HGENCODING") |
|
84 | _encoding = os.environ.get("HGENCODING") | |
81 | if sys.platform == 'darwin' and not _encoding: |
|
85 | if sys.platform == 'darwin' and not _encoding: | |
82 | # On darwin, getpreferredencoding ignores the locale environment and |
|
86 | # On darwin, getpreferredencoding ignores the locale environment and | |
83 | # always returns mac-roman. We override this if the environment is |
|
87 | # always returns mac-roman. We override this if the environment is | |
84 | # not C (has been customized by the user). |
|
88 | # not C (has been customized by the user). | |
85 | locale.setlocale(locale.LC_CTYPE, '') |
|
89 | locale.setlocale(locale.LC_CTYPE, '') | |
86 | _encoding = locale.getlocale()[1] |
|
90 | _encoding = locale.getlocale()[1] | |
87 | if not _encoding: |
|
91 | if not _encoding: | |
88 | _encoding = locale.getpreferredencoding() or 'ascii' |
|
92 | _encoding = locale.getpreferredencoding() or 'ascii' | |
89 | except locale.Error: |
|
93 | except locale.Error: | |
90 | _encoding = 'ascii' |
|
94 | _encoding = 'ascii' | |
91 | _encodingmode = os.environ.get("HGENCODINGMODE", "strict") |
|
95 | _encodingmode = os.environ.get("HGENCODINGMODE", "strict") | |
92 | _fallbackencoding = 'ISO-8859-1' |
|
96 | _fallbackencoding = 'ISO-8859-1' | |
93 |
|
97 | |||
94 | def tolocal(s): |
|
98 | def tolocal(s): | |
95 | """ |
|
99 | """ | |
96 | Convert a string from internal UTF-8 to local encoding |
|
100 | Convert a string from internal UTF-8 to local encoding | |
97 |
|
101 | |||
98 | All internal strings should be UTF-8 but some repos before the |
|
102 | All internal strings should be UTF-8 but some repos before the | |
99 | implementation of locale support may contain latin1 or possibly |
|
103 | implementation of locale support may contain latin1 or possibly | |
100 | other character sets. We attempt to decode everything strictly |
|
104 | other character sets. We attempt to decode everything strictly | |
101 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and |
|
105 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and | |
102 | replace unknown characters. |
|
106 | replace unknown characters. | |
103 | """ |
|
107 | """ | |
104 | for e in ('UTF-8', _fallbackencoding): |
|
108 | for e in ('UTF-8', _fallbackencoding): | |
105 | try: |
|
109 | try: | |
106 | u = s.decode(e) # attempt strict decoding |
|
110 | u = s.decode(e) # attempt strict decoding | |
107 | return u.encode(_encoding, "replace") |
|
111 | return u.encode(_encoding, "replace") | |
108 | except LookupError, k: |
|
112 | except LookupError, k: | |
109 | raise Abort(_("%s, please check your locale settings") % k) |
|
113 | raise Abort(_("%s, please check your locale settings") % k) | |
110 | except UnicodeDecodeError: |
|
114 | except UnicodeDecodeError: | |
111 | pass |
|
115 | pass | |
112 | u = s.decode("utf-8", "replace") # last ditch |
|
116 | u = s.decode("utf-8", "replace") # last ditch | |
113 | return u.encode(_encoding, "replace") |
|
117 | return u.encode(_encoding, "replace") | |
114 |
|
118 | |||
115 | def fromlocal(s): |
|
119 | def fromlocal(s): | |
116 | """ |
|
120 | """ | |
117 | Convert a string from the local character encoding to UTF-8 |
|
121 | Convert a string from the local character encoding to UTF-8 | |
118 |
|
122 | |||
119 | We attempt to decode strings using the encoding mode set by |
|
123 | We attempt to decode strings using the encoding mode set by | |
120 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown |
|
124 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown | |
121 | characters will cause an error message. Other modes include |
|
125 | characters will cause an error message. Other modes include | |
122 | 'replace', which replaces unknown characters with a special |
|
126 | 'replace', which replaces unknown characters with a special | |
123 | Unicode character, and 'ignore', which drops the character. |
|
127 | Unicode character, and 'ignore', which drops the character. | |
124 | """ |
|
128 | """ | |
125 | try: |
|
129 | try: | |
126 | return s.decode(_encoding, _encodingmode).encode("utf-8") |
|
130 | return s.decode(_encoding, _encodingmode).encode("utf-8") | |
127 | except UnicodeDecodeError, inst: |
|
131 | except UnicodeDecodeError, inst: | |
128 | sub = s[max(0, inst.start-10):inst.start+10] |
|
132 | sub = s[max(0, inst.start-10):inst.start+10] | |
129 | raise Abort("decoding near '%s': %s!" % (sub, inst)) |
|
133 | raise Abort("decoding near '%s': %s!" % (sub, inst)) | |
130 | except LookupError, k: |
|
134 | except LookupError, k: | |
131 | raise Abort(_("%s, please check your locale settings") % k) |
|
135 | raise Abort(_("%s, please check your locale settings") % k) | |
132 |
|
136 | |||
133 | def locallen(s): |
|
137 | def locallen(s): | |
134 | """Find the length in characters of a local string""" |
|
138 | """Find the length in characters of a local string""" | |
135 | return len(s.decode(_encoding, "replace")) |
|
139 | return len(s.decode(_encoding, "replace")) | |
136 |
|
140 | |||
137 | # used by parsedate |
|
141 | # used by parsedate | |
138 | defaultdateformats = ( |
|
142 | defaultdateformats = ( | |
139 | '%Y-%m-%d %H:%M:%S', |
|
143 | '%Y-%m-%d %H:%M:%S', | |
140 | '%Y-%m-%d %I:%M:%S%p', |
|
144 | '%Y-%m-%d %I:%M:%S%p', | |
141 | '%Y-%m-%d %H:%M', |
|
145 | '%Y-%m-%d %H:%M', | |
142 | '%Y-%m-%d %I:%M%p', |
|
146 | '%Y-%m-%d %I:%M%p', | |
143 | '%Y-%m-%d', |
|
147 | '%Y-%m-%d', | |
144 | '%m-%d', |
|
148 | '%m-%d', | |
145 | '%m/%d', |
|
149 | '%m/%d', | |
146 | '%m/%d/%y', |
|
150 | '%m/%d/%y', | |
147 | '%m/%d/%Y', |
|
151 | '%m/%d/%Y', | |
148 | '%a %b %d %H:%M:%S %Y', |
|
152 | '%a %b %d %H:%M:%S %Y', | |
149 | '%a %b %d %I:%M:%S%p %Y', |
|
153 | '%a %b %d %I:%M:%S%p %Y', | |
150 | '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822" |
|
154 | '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822" | |
151 | '%b %d %H:%M:%S %Y', |
|
155 | '%b %d %H:%M:%S %Y', | |
152 | '%b %d %I:%M:%S%p %Y', |
|
156 | '%b %d %I:%M:%S%p %Y', | |
153 | '%b %d %H:%M:%S', |
|
157 | '%b %d %H:%M:%S', | |
154 | '%b %d %I:%M:%S%p', |
|
158 | '%b %d %I:%M:%S%p', | |
155 | '%b %d %H:%M', |
|
159 | '%b %d %H:%M', | |
156 | '%b %d %I:%M%p', |
|
160 | '%b %d %I:%M%p', | |
157 | '%b %d %Y', |
|
161 | '%b %d %Y', | |
158 | '%b %d', |
|
162 | '%b %d', | |
159 | '%H:%M:%S', |
|
163 | '%H:%M:%S', | |
160 | '%I:%M:%SP', |
|
164 | '%I:%M:%SP', | |
161 | '%H:%M', |
|
165 | '%H:%M', | |
162 | '%I:%M%p', |
|
166 | '%I:%M%p', | |
163 | ) |
|
167 | ) | |
164 |
|
168 | |||
165 | extendeddateformats = defaultdateformats + ( |
|
169 | extendeddateformats = defaultdateformats + ( | |
166 | "%Y", |
|
170 | "%Y", | |
167 | "%Y-%m", |
|
171 | "%Y-%m", | |
168 | "%b", |
|
172 | "%b", | |
169 | "%b %Y", |
|
173 | "%b %Y", | |
170 | ) |
|
174 | ) | |
171 |
|
175 | |||
172 | class SignalInterrupt(Exception): |
|
176 | class SignalInterrupt(Exception): | |
173 | """Exception raised on SIGTERM and SIGHUP.""" |
|
177 | """Exception raised on SIGTERM and SIGHUP.""" | |
174 |
|
178 | |||
175 | # differences from SafeConfigParser: |
|
179 | # differences from SafeConfigParser: | |
176 | # - case-sensitive keys |
|
180 | # - case-sensitive keys | |
177 | # - allows values that are not strings (this means that you may not |
|
181 | # - allows values that are not strings (this means that you may not | |
178 | # be able to save the configuration to a file) |
|
182 | # be able to save the configuration to a file) | |
179 | class configparser(ConfigParser.SafeConfigParser): |
|
183 | class configparser(ConfigParser.SafeConfigParser): | |
180 | def optionxform(self, optionstr): |
|
184 | def optionxform(self, optionstr): | |
181 | return optionstr |
|
185 | return optionstr | |
182 |
|
186 | |||
183 | def set(self, section, option, value): |
|
187 | def set(self, section, option, value): | |
184 | return ConfigParser.ConfigParser.set(self, section, option, value) |
|
188 | return ConfigParser.ConfigParser.set(self, section, option, value) | |
185 |
|
189 | |||
186 | def _interpolate(self, section, option, rawval, vars): |
|
190 | def _interpolate(self, section, option, rawval, vars): | |
187 | if not isinstance(rawval, basestring): |
|
191 | if not isinstance(rawval, basestring): | |
188 | return rawval |
|
192 | return rawval | |
189 | return ConfigParser.SafeConfigParser._interpolate(self, section, |
|
193 | return ConfigParser.SafeConfigParser._interpolate(self, section, | |
190 | option, rawval, vars) |
|
194 | option, rawval, vars) | |
191 |
|
195 | |||
192 | def cachefunc(func): |
|
196 | def cachefunc(func): | |
193 | '''cache the result of function calls''' |
|
197 | '''cache the result of function calls''' | |
194 | # XXX doesn't handle keywords args |
|
198 | # XXX doesn't handle keywords args | |
195 | cache = {} |
|
199 | cache = {} | |
196 | if func.func_code.co_argcount == 1: |
|
200 | if func.func_code.co_argcount == 1: | |
197 | # we gain a small amount of time because |
|
201 | # we gain a small amount of time because | |
198 | # we don't need to pack/unpack the list |
|
202 | # we don't need to pack/unpack the list | |
199 | def f(arg): |
|
203 | def f(arg): | |
200 | if arg not in cache: |
|
204 | if arg not in cache: | |
201 | cache[arg] = func(arg) |
|
205 | cache[arg] = func(arg) | |
202 | return cache[arg] |
|
206 | return cache[arg] | |
203 | else: |
|
207 | else: | |
204 | def f(*args): |
|
208 | def f(*args): | |
205 | if args not in cache: |
|
209 | if args not in cache: | |
206 | cache[args] = func(*args) |
|
210 | cache[args] = func(*args) | |
207 | return cache[args] |
|
211 | return cache[args] | |
208 |
|
212 | |||
209 | return f |
|
213 | return f | |
210 |
|
214 | |||
211 | def pipefilter(s, cmd): |
|
215 | def pipefilter(s, cmd): | |
212 | '''filter string S through command CMD, returning its output''' |
|
216 | '''filter string S through command CMD, returning its output''' | |
213 | (pin, pout) = popen2(cmd, 'b') |
|
217 | (pin, pout) = popen2(cmd, 'b') | |
214 | def writer(): |
|
218 | def writer(): | |
215 | try: |
|
219 | try: | |
216 | pin.write(s) |
|
220 | pin.write(s) | |
217 | pin.close() |
|
221 | pin.close() | |
218 | except IOError, inst: |
|
222 | except IOError, inst: | |
219 | if inst.errno != errno.EPIPE: |
|
223 | if inst.errno != errno.EPIPE: | |
220 | raise |
|
224 | raise | |
221 |
|
225 | |||
222 | # we should use select instead on UNIX, but this will work on most |
|
226 | # we should use select instead on UNIX, but this will work on most | |
223 | # systems, including Windows |
|
227 | # systems, including Windows | |
224 | w = threading.Thread(target=writer) |
|
228 | w = threading.Thread(target=writer) | |
225 | w.start() |
|
229 | w.start() | |
226 | f = pout.read() |
|
230 | f = pout.read() | |
227 | pout.close() |
|
231 | pout.close() | |
228 | w.join() |
|
232 | w.join() | |
229 | return f |
|
233 | return f | |
230 |
|
234 | |||
231 | def tempfilter(s, cmd): |
|
235 | def tempfilter(s, cmd): | |
232 | '''filter string S through a pair of temporary files with CMD. |
|
236 | '''filter string S through a pair of temporary files with CMD. | |
233 | CMD is used as a template to create the real command to be run, |
|
237 | CMD is used as a template to create the real command to be run, | |
234 | with the strings INFILE and OUTFILE replaced by the real names of |
|
238 | with the strings INFILE and OUTFILE replaced by the real names of | |
235 | the temporary files generated.''' |
|
239 | the temporary files generated.''' | |
236 | inname, outname = None, None |
|
240 | inname, outname = None, None | |
237 | try: |
|
241 | try: | |
238 | infd, inname = tempfile.mkstemp(prefix='hg-filter-in-') |
|
242 | infd, inname = tempfile.mkstemp(prefix='hg-filter-in-') | |
239 | fp = os.fdopen(infd, 'wb') |
|
243 | fp = os.fdopen(infd, 'wb') | |
240 | fp.write(s) |
|
244 | fp.write(s) | |
241 | fp.close() |
|
245 | fp.close() | |
242 | outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-') |
|
246 | outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-') | |
243 | os.close(outfd) |
|
247 | os.close(outfd) | |
244 | cmd = cmd.replace('INFILE', inname) |
|
248 | cmd = cmd.replace('INFILE', inname) | |
245 | cmd = cmd.replace('OUTFILE', outname) |
|
249 | cmd = cmd.replace('OUTFILE', outname) | |
246 | code = os.system(cmd) |
|
250 | code = os.system(cmd) | |
247 | if sys.platform == 'OpenVMS' and code & 1: |
|
251 | if sys.platform == 'OpenVMS' and code & 1: | |
248 | code = 0 |
|
252 | code = 0 | |
249 | if code: raise Abort(_("command '%s' failed: %s") % |
|
253 | if code: raise Abort(_("command '%s' failed: %s") % | |
250 | (cmd, explain_exit(code))) |
|
254 | (cmd, explain_exit(code))) | |
251 | return open(outname, 'rb').read() |
|
255 | return open(outname, 'rb').read() | |
252 | finally: |
|
256 | finally: | |
253 | try: |
|
257 | try: | |
254 | if inname: os.unlink(inname) |
|
258 | if inname: os.unlink(inname) | |
255 | except: pass |
|
259 | except: pass | |
256 | try: |
|
260 | try: | |
257 | if outname: os.unlink(outname) |
|
261 | if outname: os.unlink(outname) | |
258 | except: pass |
|
262 | except: pass | |
259 |
|
263 | |||
260 | filtertable = { |
|
264 | filtertable = { | |
261 | 'tempfile:': tempfilter, |
|
265 | 'tempfile:': tempfilter, | |
262 | 'pipe:': pipefilter, |
|
266 | 'pipe:': pipefilter, | |
263 | } |
|
267 | } | |
264 |
|
268 | |||
265 | def filter(s, cmd): |
|
269 | def filter(s, cmd): | |
266 | "filter a string through a command that transforms its input to its output" |
|
270 | "filter a string through a command that transforms its input to its output" | |
267 | for name, fn in filtertable.iteritems(): |
|
271 | for name, fn in filtertable.iteritems(): | |
268 | if cmd.startswith(name): |
|
272 | if cmd.startswith(name): | |
269 | return fn(s, cmd[len(name):].lstrip()) |
|
273 | return fn(s, cmd[len(name):].lstrip()) | |
270 | return pipefilter(s, cmd) |
|
274 | return pipefilter(s, cmd) | |
271 |
|
275 | |||
272 | def binary(s): |
|
276 | def binary(s): | |
273 | """return true if a string is binary data""" |
|
277 | """return true if a string is binary data""" | |
274 | if s and '\0' in s: |
|
278 | if s and '\0' in s: | |
275 | return True |
|
279 | return True | |
276 | return False |
|
280 | return False | |
277 |
|
281 | |||
278 | def unique(g): |
|
282 | def unique(g): | |
279 | """return the uniq elements of iterable g""" |
|
283 | """return the uniq elements of iterable g""" | |
280 | return dict.fromkeys(g).keys() |
|
284 | return dict.fromkeys(g).keys() | |
281 |
|
285 | |||
282 | def sort(l): |
|
286 | def sort(l): | |
283 | if not isinstance(l, list): |
|
287 | if not isinstance(l, list): | |
284 | l = list(l) |
|
288 | l = list(l) | |
285 | l.sort() |
|
289 | l.sort() | |
286 | return l |
|
290 | return l | |
287 |
|
291 | |||
288 | class Abort(Exception): |
|
292 | class Abort(Exception): | |
289 | """Raised if a command needs to print an error and exit.""" |
|
293 | """Raised if a command needs to print an error and exit.""" | |
290 |
|
294 | |||
291 | class UnexpectedOutput(Abort): |
|
295 | class UnexpectedOutput(Abort): | |
292 | """Raised to print an error with part of output and exit.""" |
|
296 | """Raised to print an error with part of output and exit.""" | |
293 |
|
297 | |||
294 | def always(fn): return True |
|
298 | def always(fn): return True | |
295 | def never(fn): return False |
|
299 | def never(fn): return False | |
296 |
|
300 | |||
297 | def expand_glob(pats): |
|
301 | def expand_glob(pats): | |
298 | '''On Windows, expand the implicit globs in a list of patterns''' |
|
302 | '''On Windows, expand the implicit globs in a list of patterns''' | |
299 | if os.name != 'nt': |
|
303 | if os.name != 'nt': | |
300 | return list(pats) |
|
304 | return list(pats) | |
301 | ret = [] |
|
305 | ret = [] | |
302 | for p in pats: |
|
306 | for p in pats: | |
303 | kind, name = patkind(p, None) |
|
307 | kind, name = patkind(p, None) | |
304 | if kind is None: |
|
308 | if kind is None: | |
305 | globbed = glob.glob(name) |
|
309 | globbed = glob.glob(name) | |
306 | if globbed: |
|
310 | if globbed: | |
307 | ret.extend(globbed) |
|
311 | ret.extend(globbed) | |
308 | continue |
|
312 | continue | |
309 | # if we couldn't expand the glob, just keep it around |
|
313 | # if we couldn't expand the glob, just keep it around | |
310 | ret.append(p) |
|
314 | ret.append(p) | |
311 | return ret |
|
315 | return ret | |
312 |
|
316 | |||
313 | def patkind(name, default): |
|
317 | def patkind(name, default): | |
314 | """Split a string into an optional pattern kind prefix and the |
|
318 | """Split a string into an optional pattern kind prefix and the | |
315 | actual pattern.""" |
|
319 | actual pattern.""" | |
316 | for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre': |
|
320 | for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre': | |
317 | if name.startswith(prefix + ':'): return name.split(':', 1) |
|
321 | if name.startswith(prefix + ':'): return name.split(':', 1) | |
318 | return default, name |
|
322 | return default, name | |
319 |
|
323 | |||
320 | def globre(pat, head='^', tail='$'): |
|
324 | def globre(pat, head='^', tail='$'): | |
321 | "convert a glob pattern into a regexp" |
|
325 | "convert a glob pattern into a regexp" | |
322 | i, n = 0, len(pat) |
|
326 | i, n = 0, len(pat) | |
323 | res = '' |
|
327 | res = '' | |
324 | group = 0 |
|
328 | group = 0 | |
325 | def peek(): return i < n and pat[i] |
|
329 | def peek(): return i < n and pat[i] | |
326 | while i < n: |
|
330 | while i < n: | |
327 | c = pat[i] |
|
331 | c = pat[i] | |
328 | i = i+1 |
|
332 | i = i+1 | |
329 | if c == '*': |
|
333 | if c == '*': | |
330 | if peek() == '*': |
|
334 | if peek() == '*': | |
331 | i += 1 |
|
335 | i += 1 | |
332 | res += '.*' |
|
336 | res += '.*' | |
333 | else: |
|
337 | else: | |
334 | res += '[^/]*' |
|
338 | res += '[^/]*' | |
335 | elif c == '?': |
|
339 | elif c == '?': | |
336 | res += '.' |
|
340 | res += '.' | |
337 | elif c == '[': |
|
341 | elif c == '[': | |
338 | j = i |
|
342 | j = i | |
339 | if j < n and pat[j] in '!]': |
|
343 | if j < n and pat[j] in '!]': | |
340 | j += 1 |
|
344 | j += 1 | |
341 | while j < n and pat[j] != ']': |
|
345 | while j < n and pat[j] != ']': | |
342 | j += 1 |
|
346 | j += 1 | |
343 | if j >= n: |
|
347 | if j >= n: | |
344 | res += '\\[' |
|
348 | res += '\\[' | |
345 | else: |
|
349 | else: | |
346 | stuff = pat[i:j].replace('\\','\\\\') |
|
350 | stuff = pat[i:j].replace('\\','\\\\') | |
347 | i = j + 1 |
|
351 | i = j + 1 | |
348 | if stuff[0] == '!': |
|
352 | if stuff[0] == '!': | |
349 | stuff = '^' + stuff[1:] |
|
353 | stuff = '^' + stuff[1:] | |
350 | elif stuff[0] == '^': |
|
354 | elif stuff[0] == '^': | |
351 | stuff = '\\' + stuff |
|
355 | stuff = '\\' + stuff | |
352 | res = '%s[%s]' % (res, stuff) |
|
356 | res = '%s[%s]' % (res, stuff) | |
353 | elif c == '{': |
|
357 | elif c == '{': | |
354 | group += 1 |
|
358 | group += 1 | |
355 | res += '(?:' |
|
359 | res += '(?:' | |
356 | elif c == '}' and group: |
|
360 | elif c == '}' and group: | |
357 | res += ')' |
|
361 | res += ')' | |
358 | group -= 1 |
|
362 | group -= 1 | |
359 | elif c == ',' and group: |
|
363 | elif c == ',' and group: | |
360 | res += '|' |
|
364 | res += '|' | |
361 | elif c == '\\': |
|
365 | elif c == '\\': | |
362 | p = peek() |
|
366 | p = peek() | |
363 | if p: |
|
367 | if p: | |
364 | i += 1 |
|
368 | i += 1 | |
365 | res += re.escape(p) |
|
369 | res += re.escape(p) | |
366 | else: |
|
370 | else: | |
367 | res += re.escape(c) |
|
371 | res += re.escape(c) | |
368 | else: |
|
372 | else: | |
369 | res += re.escape(c) |
|
373 | res += re.escape(c) | |
370 | return head + res + tail |
|
374 | return head + res + tail | |
371 |
|
375 | |||
372 | _globchars = {'[': 1, '{': 1, '*': 1, '?': 1} |
|
376 | _globchars = {'[': 1, '{': 1, '*': 1, '?': 1} | |
373 |
|
377 | |||
374 | def pathto(root, n1, n2): |
|
378 | def pathto(root, n1, n2): | |
375 | '''return the relative path from one place to another. |
|
379 | '''return the relative path from one place to another. | |
376 | root should use os.sep to separate directories |
|
380 | root should use os.sep to separate directories | |
377 | n1 should use os.sep to separate directories |
|
381 | n1 should use os.sep to separate directories | |
378 | n2 should use "/" to separate directories |
|
382 | n2 should use "/" to separate directories | |
379 | returns an os.sep-separated path. |
|
383 | returns an os.sep-separated path. | |
380 |
|
384 | |||
381 | If n1 is a relative path, it's assumed it's |
|
385 | If n1 is a relative path, it's assumed it's | |
382 | relative to root. |
|
386 | relative to root. | |
383 | n2 should always be relative to root. |
|
387 | n2 should always be relative to root. | |
384 | ''' |
|
388 | ''' | |
385 | if not n1: return localpath(n2) |
|
389 | if not n1: return localpath(n2) | |
386 | if os.path.isabs(n1): |
|
390 | if os.path.isabs(n1): | |
387 | if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: |
|
391 | if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: | |
388 | return os.path.join(root, localpath(n2)) |
|
392 | return os.path.join(root, localpath(n2)) | |
389 | n2 = '/'.join((pconvert(root), n2)) |
|
393 | n2 = '/'.join((pconvert(root), n2)) | |
390 | a, b = splitpath(n1), n2.split('/') |
|
394 | a, b = splitpath(n1), n2.split('/') | |
391 | a.reverse() |
|
395 | a.reverse() | |
392 | b.reverse() |
|
396 | b.reverse() | |
393 | while a and b and a[-1] == b[-1]: |
|
397 | while a and b and a[-1] == b[-1]: | |
394 | a.pop() |
|
398 | a.pop() | |
395 | b.pop() |
|
399 | b.pop() | |
396 | b.reverse() |
|
400 | b.reverse() | |
397 | return os.sep.join((['..'] * len(a)) + b) or '.' |
|
401 | return os.sep.join((['..'] * len(a)) + b) or '.' | |
398 |
|
402 | |||
399 | def canonpath(root, cwd, myname): |
|
403 | def canonpath(root, cwd, myname): | |
400 | """return the canonical path of myname, given cwd and root""" |
|
404 | """return the canonical path of myname, given cwd and root""" | |
401 | if root == os.sep: |
|
405 | if root == os.sep: | |
402 | rootsep = os.sep |
|
406 | rootsep = os.sep | |
403 | elif endswithsep(root): |
|
407 | elif endswithsep(root): | |
404 | rootsep = root |
|
408 | rootsep = root | |
405 | else: |
|
409 | else: | |
406 | rootsep = root + os.sep |
|
410 | rootsep = root + os.sep | |
407 | name = myname |
|
411 | name = myname | |
408 | if not os.path.isabs(name): |
|
412 | if not os.path.isabs(name): | |
409 | name = os.path.join(root, cwd, name) |
|
413 | name = os.path.join(root, cwd, name) | |
410 | name = os.path.normpath(name) |
|
414 | name = os.path.normpath(name) | |
411 | audit_path = path_auditor(root) |
|
415 | audit_path = path_auditor(root) | |
412 | if name != rootsep and name.startswith(rootsep): |
|
416 | if name != rootsep and name.startswith(rootsep): | |
413 | name = name[len(rootsep):] |
|
417 | name = name[len(rootsep):] | |
414 | audit_path(name) |
|
418 | audit_path(name) | |
415 | return pconvert(name) |
|
419 | return pconvert(name) | |
416 | elif name == root: |
|
420 | elif name == root: | |
417 | return '' |
|
421 | return '' | |
418 | else: |
|
422 | else: | |
419 | # Determine whether `name' is in the hierarchy at or beneath `root', |
|
423 | # Determine whether `name' is in the hierarchy at or beneath `root', | |
420 | # by iterating name=dirname(name) until that causes no change (can't |
|
424 | # by iterating name=dirname(name) until that causes no change (can't | |
421 | # check name == '/', because that doesn't work on windows). For each |
|
425 | # check name == '/', because that doesn't work on windows). For each | |
422 | # `name', compare dev/inode numbers. If they match, the list `rel' |
|
426 | # `name', compare dev/inode numbers. If they match, the list `rel' | |
423 | # holds the reversed list of components making up the relative file |
|
427 | # holds the reversed list of components making up the relative file | |
424 | # name we want. |
|
428 | # name we want. | |
425 | root_st = os.stat(root) |
|
429 | root_st = os.stat(root) | |
426 | rel = [] |
|
430 | rel = [] | |
427 | while True: |
|
431 | while True: | |
428 | try: |
|
432 | try: | |
429 | name_st = os.stat(name) |
|
433 | name_st = os.stat(name) | |
430 | except OSError: |
|
434 | except OSError: | |
431 | break |
|
435 | break | |
432 | if samestat(name_st, root_st): |
|
436 | if samestat(name_st, root_st): | |
433 | if not rel: |
|
437 | if not rel: | |
434 | # name was actually the same as root (maybe a symlink) |
|
438 | # name was actually the same as root (maybe a symlink) | |
435 | return '' |
|
439 | return '' | |
436 | rel.reverse() |
|
440 | rel.reverse() | |
437 | name = os.path.join(*rel) |
|
441 | name = os.path.join(*rel) | |
438 | audit_path(name) |
|
442 | audit_path(name) | |
439 | return pconvert(name) |
|
443 | return pconvert(name) | |
440 | dirname, basename = os.path.split(name) |
|
444 | dirname, basename = os.path.split(name) | |
441 | rel.append(basename) |
|
445 | rel.append(basename) | |
442 | if dirname == name: |
|
446 | if dirname == name: | |
443 | break |
|
447 | break | |
444 | name = dirname |
|
448 | name = dirname | |
445 |
|
449 | |||
446 | raise Abort('%s not under root' % myname) |
|
450 | raise Abort('%s not under root' % myname) | |
447 |
|
451 | |||
448 | def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'): |
|
452 | def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'): | |
449 | """build a function to match a set of file patterns |
|
453 | """build a function to match a set of file patterns | |
450 |
|
454 | |||
451 | arguments: |
|
455 | arguments: | |
452 | canonroot - the canonical root of the tree you're matching against |
|
456 | canonroot - the canonical root of the tree you're matching against | |
453 | cwd - the current working directory, if relevant |
|
457 | cwd - the current working directory, if relevant | |
454 | names - patterns to find |
|
458 | names - patterns to find | |
455 | inc - patterns to include |
|
459 | inc - patterns to include | |
456 | exc - patterns to exclude |
|
460 | exc - patterns to exclude | |
457 | dflt_pat - if a pattern in names has no explicit type, assume this one |
|
461 | dflt_pat - if a pattern in names has no explicit type, assume this one | |
458 | src - where these patterns came from (e.g. .hgignore) |
|
462 | src - where these patterns came from (e.g. .hgignore) | |
459 |
|
463 | |||
460 | a pattern is one of: |
|
464 | a pattern is one of: | |
461 | 'glob:<glob>' - a glob relative to cwd |
|
465 | 'glob:<glob>' - a glob relative to cwd | |
462 | 're:<regexp>' - a regular expression |
|
466 | 're:<regexp>' - a regular expression | |
463 | 'path:<path>' - a path relative to canonroot |
|
467 | 'path:<path>' - a path relative to canonroot | |
464 | 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs) |
|
468 | 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs) | |
465 | 'relpath:<path>' - a path relative to cwd |
|
469 | 'relpath:<path>' - a path relative to cwd | |
466 | 'relre:<regexp>' - a regexp that doesn't have to match the start of a name |
|
470 | 'relre:<regexp>' - a regexp that doesn't have to match the start of a name | |
467 | '<something>' - one of the cases above, selected by the dflt_pat argument |
|
471 | '<something>' - one of the cases above, selected by the dflt_pat argument | |
468 |
|
472 | |||
469 | returns: |
|
473 | returns: | |
470 | a 3-tuple containing |
|
474 | a 3-tuple containing | |
471 | - list of roots (places where one should start a recursive walk of the fs); |
|
475 | - list of roots (places where one should start a recursive walk of the fs); | |
472 | this often matches the explicit non-pattern names passed in, but also |
|
476 | this often matches the explicit non-pattern names passed in, but also | |
473 | includes the initial part of glob: patterns that has no glob characters |
|
477 | includes the initial part of glob: patterns that has no glob characters | |
474 | - a bool match(filename) function |
|
478 | - a bool match(filename) function | |
475 | - a bool indicating if any patterns were passed in |
|
479 | - a bool indicating if any patterns were passed in | |
476 | """ |
|
480 | """ | |
477 |
|
481 | |||
478 | # a common case: no patterns at all |
|
482 | # a common case: no patterns at all | |
479 | if not names and not inc and not exc: |
|
483 | if not names and not inc and not exc: | |
480 | return [], always, False |
|
484 | return [], always, False | |
481 |
|
485 | |||
482 | def contains_glob(name): |
|
486 | def contains_glob(name): | |
483 | for c in name: |
|
487 | for c in name: | |
484 | if c in _globchars: return True |
|
488 | if c in _globchars: return True | |
485 | return False |
|
489 | return False | |
486 |
|
490 | |||
487 | def regex(kind, name, tail): |
|
491 | def regex(kind, name, tail): | |
488 | '''convert a pattern into a regular expression''' |
|
492 | '''convert a pattern into a regular expression''' | |
489 | if not name: |
|
493 | if not name: | |
490 | return '' |
|
494 | return '' | |
491 | if kind == 're': |
|
495 | if kind == 're': | |
492 | return name |
|
496 | return name | |
493 | elif kind == 'path': |
|
497 | elif kind == 'path': | |
494 | return '^' + re.escape(name) + '(?:/|$)' |
|
498 | return '^' + re.escape(name) + '(?:/|$)' | |
495 | elif kind == 'relglob': |
|
499 | elif kind == 'relglob': | |
496 | return globre(name, '(?:|.*/)', tail) |
|
500 | return globre(name, '(?:|.*/)', tail) | |
497 | elif kind == 'relpath': |
|
501 | elif kind == 'relpath': | |
498 | return re.escape(name) + '(?:/|$)' |
|
502 | return re.escape(name) + '(?:/|$)' | |
499 | elif kind == 'relre': |
|
503 | elif kind == 'relre': | |
500 | if name.startswith('^'): |
|
504 | if name.startswith('^'): | |
501 | return name |
|
505 | return name | |
502 | return '.*' + name |
|
506 | return '.*' + name | |
503 | return globre(name, '', tail) |
|
507 | return globre(name, '', tail) | |
504 |
|
508 | |||
505 | def matchfn(pats, tail): |
|
509 | def matchfn(pats, tail): | |
506 | """build a matching function from a set of patterns""" |
|
510 | """build a matching function from a set of patterns""" | |
507 | if not pats: |
|
511 | if not pats: | |
508 | return |
|
512 | return | |
509 | try: |
|
513 | try: | |
510 | pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats]) |
|
514 | pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats]) | |
511 | if len(pat) > 20000: |
|
515 | if len(pat) > 20000: | |
512 | raise OverflowError() |
|
516 | raise OverflowError() | |
513 | return re.compile(pat).match |
|
517 | return re.compile(pat).match | |
514 | except OverflowError: |
|
518 | except OverflowError: | |
515 | # We're using a Python with a tiny regex engine and we |
|
519 | # We're using a Python with a tiny regex engine and we | |
516 | # made it explode, so we'll divide the pattern list in two |
|
520 | # made it explode, so we'll divide the pattern list in two | |
517 | # until it works |
|
521 | # until it works | |
518 | l = len(pats) |
|
522 | l = len(pats) | |
519 | if l < 2: |
|
523 | if l < 2: | |
520 | raise |
|
524 | raise | |
521 | a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail) |
|
525 | a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail) | |
522 | return lambda s: a(s) or b(s) |
|
526 | return lambda s: a(s) or b(s) | |
523 | except re.error: |
|
527 | except re.error: | |
524 | for k, p in pats: |
|
528 | for k, p in pats: | |
525 | try: |
|
529 | try: | |
526 | re.compile('(?:%s)' % regex(k, p, tail)) |
|
530 | re.compile('(?:%s)' % regex(k, p, tail)) | |
527 | except re.error: |
|
531 | except re.error: | |
528 | if src: |
|
532 | if src: | |
529 | raise Abort("%s: invalid pattern (%s): %s" % |
|
533 | raise Abort("%s: invalid pattern (%s): %s" % | |
530 | (src, k, p)) |
|
534 | (src, k, p)) | |
531 | else: |
|
535 | else: | |
532 | raise Abort("invalid pattern (%s): %s" % (k, p)) |
|
536 | raise Abort("invalid pattern (%s): %s" % (k, p)) | |
533 | raise Abort("invalid pattern") |
|
537 | raise Abort("invalid pattern") | |
534 |
|
538 | |||
535 | def globprefix(pat): |
|
539 | def globprefix(pat): | |
536 | '''return the non-glob prefix of a path, e.g. foo/* -> foo''' |
|
540 | '''return the non-glob prefix of a path, e.g. foo/* -> foo''' | |
537 | root = [] |
|
541 | root = [] | |
538 | for p in pat.split('/'): |
|
542 | for p in pat.split('/'): | |
539 | if contains_glob(p): break |
|
543 | if contains_glob(p): break | |
540 | root.append(p) |
|
544 | root.append(p) | |
541 | return '/'.join(root) or '.' |
|
545 | return '/'.join(root) or '.' | |
542 |
|
546 | |||
543 | def normalizepats(names, default): |
|
547 | def normalizepats(names, default): | |
544 | pats = [] |
|
548 | pats = [] | |
545 | roots = [] |
|
549 | roots = [] | |
546 | anypats = False |
|
550 | anypats = False | |
547 | for kind, name in [patkind(p, default) for p in names]: |
|
551 | for kind, name in [patkind(p, default) for p in names]: | |
548 | if kind in ('glob', 'relpath'): |
|
552 | if kind in ('glob', 'relpath'): | |
549 | name = canonpath(canonroot, cwd, name) |
|
553 | name = canonpath(canonroot, cwd, name) | |
550 | elif kind in ('relglob', 'path'): |
|
554 | elif kind in ('relglob', 'path'): | |
551 | name = normpath(name) |
|
555 | name = normpath(name) | |
552 |
|
556 | |||
553 | pats.append((kind, name)) |
|
557 | pats.append((kind, name)) | |
554 |
|
558 | |||
555 | if kind in ('glob', 're', 'relglob', 'relre'): |
|
559 | if kind in ('glob', 're', 'relglob', 'relre'): | |
556 | anypats = True |
|
560 | anypats = True | |
557 |
|
561 | |||
558 | if kind == 'glob': |
|
562 | if kind == 'glob': | |
559 | root = globprefix(name) |
|
563 | root = globprefix(name) | |
560 | roots.append(root) |
|
564 | roots.append(root) | |
561 | elif kind in ('relpath', 'path'): |
|
565 | elif kind in ('relpath', 'path'): | |
562 | roots.append(name or '.') |
|
566 | roots.append(name or '.') | |
563 | elif kind == 'relglob': |
|
567 | elif kind == 'relglob': | |
564 | roots.append('.') |
|
568 | roots.append('.') | |
565 | return roots, pats, anypats |
|
569 | return roots, pats, anypats | |
566 |
|
570 | |||
567 | roots, pats, anypats = normalizepats(names, dflt_pat) |
|
571 | roots, pats, anypats = normalizepats(names, dflt_pat) | |
568 |
|
572 | |||
569 | patmatch = matchfn(pats, '$') or always |
|
573 | patmatch = matchfn(pats, '$') or always | |
570 | incmatch = always |
|
574 | incmatch = always | |
571 | if inc: |
|
575 | if inc: | |
572 | dummy, inckinds, dummy = normalizepats(inc, 'glob') |
|
576 | dummy, inckinds, dummy = normalizepats(inc, 'glob') | |
573 | incmatch = matchfn(inckinds, '(?:/|$)') |
|
577 | incmatch = matchfn(inckinds, '(?:/|$)') | |
574 | excmatch = lambda fn: False |
|
578 | excmatch = lambda fn: False | |
575 | if exc: |
|
579 | if exc: | |
576 | dummy, exckinds, dummy = normalizepats(exc, 'glob') |
|
580 | dummy, exckinds, dummy = normalizepats(exc, 'glob') | |
577 | excmatch = matchfn(exckinds, '(?:/|$)') |
|
581 | excmatch = matchfn(exckinds, '(?:/|$)') | |
578 |
|
582 | |||
579 | if not names and inc and not exc: |
|
583 | if not names and inc and not exc: | |
580 | # common case: hgignore patterns |
|
584 | # common case: hgignore patterns | |
581 | match = incmatch |
|
585 | match = incmatch | |
582 | else: |
|
586 | else: | |
583 | match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn) |
|
587 | match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn) | |
584 |
|
588 | |||
585 | return (roots, match, (inc or exc or anypats) and True) |
|
589 | return (roots, match, (inc or exc or anypats) and True) | |
586 |
|
590 | |||
587 | _hgexecutable = None |
|
591 | _hgexecutable = None | |
588 |
|
592 | |||
589 | def main_is_frozen(): |
|
593 | def main_is_frozen(): | |
590 | """return True if we are a frozen executable. |
|
594 | """return True if we are a frozen executable. | |
591 |
|
595 | |||
592 | The code supports py2exe (most common, Windows only) and tools/freeze |
|
596 | The code supports py2exe (most common, Windows only) and tools/freeze | |
593 | (portable, not much used). |
|
597 | (portable, not much used). | |
594 | """ |
|
598 | """ | |
595 | return (hasattr(sys, "frozen") or # new py2exe |
|
599 | return (hasattr(sys, "frozen") or # new py2exe | |
596 | hasattr(sys, "importers") or # old py2exe |
|
600 | hasattr(sys, "importers") or # old py2exe | |
597 | imp.is_frozen("__main__")) # tools/freeze |
|
601 | imp.is_frozen("__main__")) # tools/freeze | |
598 |
|
602 | |||
599 | def hgexecutable(): |
|
603 | def hgexecutable(): | |
600 | """return location of the 'hg' executable. |
|
604 | """return location of the 'hg' executable. | |
601 |
|
605 | |||
602 | Defaults to $HG or 'hg' in the search path. |
|
606 | Defaults to $HG or 'hg' in the search path. | |
603 | """ |
|
607 | """ | |
604 | if _hgexecutable is None: |
|
608 | if _hgexecutable is None: | |
605 | hg = os.environ.get('HG') |
|
609 | hg = os.environ.get('HG') | |
606 | if hg: |
|
610 | if hg: | |
607 | set_hgexecutable(hg) |
|
611 | set_hgexecutable(hg) | |
608 | elif main_is_frozen(): |
|
612 | elif main_is_frozen(): | |
609 | set_hgexecutable(sys.executable) |
|
613 | set_hgexecutable(sys.executable) | |
610 | else: |
|
614 | else: | |
611 | set_hgexecutable(find_exe('hg', 'hg')) |
|
615 | set_hgexecutable(find_exe('hg', 'hg')) | |
612 | return _hgexecutable |
|
616 | return _hgexecutable | |
613 |
|
617 | |||
614 | def set_hgexecutable(path): |
|
618 | def set_hgexecutable(path): | |
615 | """set location of the 'hg' executable""" |
|
619 | """set location of the 'hg' executable""" | |
616 | global _hgexecutable |
|
620 | global _hgexecutable | |
617 | _hgexecutable = path |
|
621 | _hgexecutable = path | |
618 |
|
622 | |||
619 | def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None): |
|
623 | def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None): | |
620 | '''enhanced shell command execution. |
|
624 | '''enhanced shell command execution. | |
621 | run with environment maybe modified, maybe in different dir. |
|
625 | run with environment maybe modified, maybe in different dir. | |
622 |
|
626 | |||
623 | if command fails and onerr is None, return status. if ui object, |
|
627 | if command fails and onerr is None, return status. if ui object, | |
624 | print error message and return status, else raise onerr object as |
|
628 | print error message and return status, else raise onerr object as | |
625 | exception.''' |
|
629 | exception.''' | |
626 | def py2shell(val): |
|
630 | def py2shell(val): | |
627 | 'convert python object into string that is useful to shell' |
|
631 | 'convert python object into string that is useful to shell' | |
628 | if val in (None, False): |
|
632 | if val in (None, False): | |
629 | return '0' |
|
633 | return '0' | |
630 | if val == True: |
|
634 | if val == True: | |
631 | return '1' |
|
635 | return '1' | |
632 | return str(val) |
|
636 | return str(val) | |
633 | oldenv = {} |
|
637 | oldenv = {} | |
634 | for k in environ: |
|
638 | for k in environ: | |
635 | oldenv[k] = os.environ.get(k) |
|
639 | oldenv[k] = os.environ.get(k) | |
636 | if cwd is not None: |
|
640 | if cwd is not None: | |
637 | oldcwd = os.getcwd() |
|
641 | oldcwd = os.getcwd() | |
638 | origcmd = cmd |
|
642 | origcmd = cmd | |
639 | if os.name == 'nt': |
|
643 | if os.name == 'nt': | |
640 | cmd = '"%s"' % cmd |
|
644 | cmd = '"%s"' % cmd | |
641 | try: |
|
645 | try: | |
642 | for k, v in environ.iteritems(): |
|
646 | for k, v in environ.iteritems(): | |
643 | os.environ[k] = py2shell(v) |
|
647 | os.environ[k] = py2shell(v) | |
644 | os.environ['HG'] = hgexecutable() |
|
648 | os.environ['HG'] = hgexecutable() | |
645 | if cwd is not None and oldcwd != cwd: |
|
649 | if cwd is not None and oldcwd != cwd: | |
646 | os.chdir(cwd) |
|
650 | os.chdir(cwd) | |
647 | rc = os.system(cmd) |
|
651 | rc = os.system(cmd) | |
648 | if sys.platform == 'OpenVMS' and rc & 1: |
|
652 | if sys.platform == 'OpenVMS' and rc & 1: | |
649 | rc = 0 |
|
653 | rc = 0 | |
650 | if rc and onerr: |
|
654 | if rc and onerr: | |
651 | errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]), |
|
655 | errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]), | |
652 | explain_exit(rc)[0]) |
|
656 | explain_exit(rc)[0]) | |
653 | if errprefix: |
|
657 | if errprefix: | |
654 | errmsg = '%s: %s' % (errprefix, errmsg) |
|
658 | errmsg = '%s: %s' % (errprefix, errmsg) | |
655 | try: |
|
659 | try: | |
656 | onerr.warn(errmsg + '\n') |
|
660 | onerr.warn(errmsg + '\n') | |
657 | except AttributeError: |
|
661 | except AttributeError: | |
658 | raise onerr(errmsg) |
|
662 | raise onerr(errmsg) | |
659 | return rc |
|
663 | return rc | |
660 | finally: |
|
664 | finally: | |
661 | for k, v in oldenv.iteritems(): |
|
665 | for k, v in oldenv.iteritems(): | |
662 | if v is None: |
|
666 | if v is None: | |
663 | del os.environ[k] |
|
667 | del os.environ[k] | |
664 | else: |
|
668 | else: | |
665 | os.environ[k] = v |
|
669 | os.environ[k] = v | |
666 | if cwd is not None and oldcwd != cwd: |
|
670 | if cwd is not None and oldcwd != cwd: | |
667 | os.chdir(oldcwd) |
|
671 | os.chdir(oldcwd) | |
668 |
|
672 | |||
669 | # os.path.lexists is not available on python2.3 |
|
673 | # os.path.lexists is not available on python2.3 | |
670 | def lexists(filename): |
|
674 | def lexists(filename): | |
671 | "test whether a file with this name exists. does not follow symlinks" |
|
675 | "test whether a file with this name exists. does not follow symlinks" | |
672 | try: |
|
676 | try: | |
673 | os.lstat(filename) |
|
677 | os.lstat(filename) | |
674 | except: |
|
678 | except: | |
675 | return False |
|
679 | return False | |
676 | return True |
|
680 | return True | |
677 |
|
681 | |||
678 | def rename(src, dst): |
|
682 | def rename(src, dst): | |
679 | """forcibly rename a file""" |
|
683 | """forcibly rename a file""" | |
680 | try: |
|
684 | try: | |
681 | os.rename(src, dst) |
|
685 | os.rename(src, dst) | |
682 | except OSError, err: # FIXME: check err (EEXIST ?) |
|
686 | except OSError, err: # FIXME: check err (EEXIST ?) | |
683 | # on windows, rename to existing file is not allowed, so we |
|
687 | # on windows, rename to existing file is not allowed, so we | |
684 | # must delete destination first. but if file is open, unlink |
|
688 | # must delete destination first. but if file is open, unlink | |
685 | # schedules it for delete but does not delete it. rename |
|
689 | # schedules it for delete but does not delete it. rename | |
686 | # happens immediately even for open files, so we create |
|
690 | # happens immediately even for open files, so we create | |
687 | # temporary file, delete it, rename destination to that name, |
|
691 | # temporary file, delete it, rename destination to that name, | |
688 | # then delete that. then rename is safe to do. |
|
692 | # then delete that. then rename is safe to do. | |
689 | fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.') |
|
693 | fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.') | |
690 | os.close(fd) |
|
694 | os.close(fd) | |
691 | os.unlink(temp) |
|
695 | os.unlink(temp) | |
692 | os.rename(dst, temp) |
|
696 | os.rename(dst, temp) | |
693 | os.unlink(temp) |
|
697 | os.unlink(temp) | |
694 | os.rename(src, dst) |
|
698 | os.rename(src, dst) | |
695 |
|
699 | |||
696 | def unlink(f): |
|
700 | def unlink(f): | |
697 | """unlink and remove the directory if it is empty""" |
|
701 | """unlink and remove the directory if it is empty""" | |
698 | os.unlink(f) |
|
702 | os.unlink(f) | |
699 | # try removing directories that might now be empty |
|
703 | # try removing directories that might now be empty | |
700 | try: |
|
704 | try: | |
701 | os.removedirs(os.path.dirname(f)) |
|
705 | os.removedirs(os.path.dirname(f)) | |
702 | except OSError: |
|
706 | except OSError: | |
703 | pass |
|
707 | pass | |
704 |
|
708 | |||
705 | def copyfile(src, dest): |
|
709 | def copyfile(src, dest): | |
706 | "copy a file, preserving mode" |
|
710 | "copy a file, preserving mode" | |
707 | if os.path.islink(src): |
|
711 | if os.path.islink(src): | |
708 | try: |
|
712 | try: | |
709 | os.unlink(dest) |
|
713 | os.unlink(dest) | |
710 | except: |
|
714 | except: | |
711 | pass |
|
715 | pass | |
712 | os.symlink(os.readlink(src), dest) |
|
716 | os.symlink(os.readlink(src), dest) | |
713 | else: |
|
717 | else: | |
714 | try: |
|
718 | try: | |
715 | shutil.copyfile(src, dest) |
|
719 | shutil.copyfile(src, dest) | |
716 | shutil.copymode(src, dest) |
|
720 | shutil.copymode(src, dest) | |
717 | except shutil.Error, inst: |
|
721 | except shutil.Error, inst: | |
718 | raise Abort(str(inst)) |
|
722 | raise Abort(str(inst)) | |
719 |
|
723 | |||
720 | def copyfiles(src, dst, hardlink=None): |
|
724 | def copyfiles(src, dst, hardlink=None): | |
721 | """Copy a directory tree using hardlinks if possible""" |
|
725 | """Copy a directory tree using hardlinks if possible""" | |
722 |
|
726 | |||
723 | if hardlink is None: |
|
727 | if hardlink is None: | |
724 | hardlink = (os.stat(src).st_dev == |
|
728 | hardlink = (os.stat(src).st_dev == | |
725 | os.stat(os.path.dirname(dst)).st_dev) |
|
729 | os.stat(os.path.dirname(dst)).st_dev) | |
726 |
|
730 | |||
727 | if os.path.isdir(src): |
|
731 | if os.path.isdir(src): | |
728 | os.mkdir(dst) |
|
732 | os.mkdir(dst) | |
729 | for name, kind in osutil.listdir(src): |
|
733 | for name, kind in osutil.listdir(src): | |
730 | srcname = os.path.join(src, name) |
|
734 | srcname = os.path.join(src, name) | |
731 | dstname = os.path.join(dst, name) |
|
735 | dstname = os.path.join(dst, name) | |
732 | copyfiles(srcname, dstname, hardlink) |
|
736 | copyfiles(srcname, dstname, hardlink) | |
733 | else: |
|
737 | else: | |
734 | if hardlink: |
|
738 | if hardlink: | |
735 | try: |
|
739 | try: | |
736 | os_link(src, dst) |
|
740 | os_link(src, dst) | |
737 | except (IOError, OSError): |
|
741 | except (IOError, OSError): | |
738 | hardlink = False |
|
742 | hardlink = False | |
739 | shutil.copy(src, dst) |
|
743 | shutil.copy(src, dst) | |
740 | else: |
|
744 | else: | |
741 | shutil.copy(src, dst) |
|
745 | shutil.copy(src, dst) | |
742 |
|
746 | |||
743 | class path_auditor(object): |
|
747 | class path_auditor(object): | |
744 | '''ensure that a filesystem path contains no banned components. |
|
748 | '''ensure that a filesystem path contains no banned components. | |
745 | the following properties of a path are checked: |
|
749 | the following properties of a path are checked: | |
746 |
|
750 | |||
747 | - under top-level .hg |
|
751 | - under top-level .hg | |
748 | - starts at the root of a windows drive |
|
752 | - starts at the root of a windows drive | |
749 | - contains ".." |
|
753 | - contains ".." | |
750 | - traverses a symlink (e.g. a/symlink_here/b) |
|
754 | - traverses a symlink (e.g. a/symlink_here/b) | |
751 | - inside a nested repository''' |
|
755 | - inside a nested repository''' | |
752 |
|
756 | |||
753 | def __init__(self, root): |
|
757 | def __init__(self, root): | |
754 | self.audited = set() |
|
758 | self.audited = set() | |
755 | self.auditeddir = set() |
|
759 | self.auditeddir = set() | |
756 | self.root = root |
|
760 | self.root = root | |
757 |
|
761 | |||
758 | def __call__(self, path): |
|
762 | def __call__(self, path): | |
759 | if path in self.audited: |
|
763 | if path in self.audited: | |
760 | return |
|
764 | return | |
761 | normpath = os.path.normcase(path) |
|
765 | normpath = os.path.normcase(path) | |
762 | parts = splitpath(normpath) |
|
766 | parts = splitpath(normpath) | |
763 | if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '') |
|
767 | if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '') | |
764 | or os.pardir in parts): |
|
768 | or os.pardir in parts): | |
765 | raise Abort(_("path contains illegal component: %s") % path) |
|
769 | raise Abort(_("path contains illegal component: %s") % path) | |
766 | def check(prefix): |
|
770 | def check(prefix): | |
767 | curpath = os.path.join(self.root, prefix) |
|
771 | curpath = os.path.join(self.root, prefix) | |
768 | try: |
|
772 | try: | |
769 | st = os.lstat(curpath) |
|
773 | st = os.lstat(curpath) | |
770 | except OSError, err: |
|
774 | except OSError, err: | |
771 | # EINVAL can be raised as invalid path syntax under win32. |
|
775 | # EINVAL can be raised as invalid path syntax under win32. | |
772 | # They must be ignored for patterns can be checked too. |
|
776 | # They must be ignored for patterns can be checked too. | |
773 | if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): |
|
777 | if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): | |
774 | raise |
|
778 | raise | |
775 | else: |
|
779 | else: | |
776 | if stat.S_ISLNK(st.st_mode): |
|
780 | if stat.S_ISLNK(st.st_mode): | |
777 | raise Abort(_('path %r traverses symbolic link %r') % |
|
781 | raise Abort(_('path %r traverses symbolic link %r') % | |
778 | (path, prefix)) |
|
782 | (path, prefix)) | |
779 | elif (stat.S_ISDIR(st.st_mode) and |
|
783 | elif (stat.S_ISDIR(st.st_mode) and | |
780 | os.path.isdir(os.path.join(curpath, '.hg'))): |
|
784 | os.path.isdir(os.path.join(curpath, '.hg'))): | |
781 | raise Abort(_('path %r is inside repo %r') % |
|
785 | raise Abort(_('path %r is inside repo %r') % | |
782 | (path, prefix)) |
|
786 | (path, prefix)) | |
783 | parts.pop() |
|
787 | parts.pop() | |
784 | prefixes = [] |
|
788 | prefixes = [] | |
785 | for n in range(len(parts)): |
|
789 | for n in range(len(parts)): | |
786 | prefix = os.sep.join(parts) |
|
790 | prefix = os.sep.join(parts) | |
787 | if prefix in self.auditeddir: |
|
791 | if prefix in self.auditeddir: | |
788 | break |
|
792 | break | |
789 | check(prefix) |
|
793 | check(prefix) | |
790 | prefixes.append(prefix) |
|
794 | prefixes.append(prefix) | |
791 | parts.pop() |
|
795 | parts.pop() | |
792 |
|
796 | |||
793 | self.audited.add(path) |
|
797 | self.audited.add(path) | |
794 | # only add prefixes to the cache after checking everything: we don't |
|
798 | # only add prefixes to the cache after checking everything: we don't | |
795 | # want to add "foo/bar/baz" before checking if there's a "foo/.hg" |
|
799 | # want to add "foo/bar/baz" before checking if there's a "foo/.hg" | |
796 | self.auditeddir.update(prefixes) |
|
800 | self.auditeddir.update(prefixes) | |
797 |
|
801 | |||
798 | def _makelock_file(info, pathname): |
|
802 | def _makelock_file(info, pathname): | |
799 | ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL) |
|
803 | ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL) | |
800 | os.write(ld, info) |
|
804 | os.write(ld, info) | |
801 | os.close(ld) |
|
805 | os.close(ld) | |
802 |
|
806 | |||
803 | def _readlock_file(pathname): |
|
807 | def _readlock_file(pathname): | |
804 | return posixfile(pathname).read() |
|
808 | return posixfile(pathname).read() | |
805 |
|
809 | |||
806 | def nlinks(pathname): |
|
810 | def nlinks(pathname): | |
807 | """Return number of hardlinks for the given file.""" |
|
811 | """Return number of hardlinks for the given file.""" | |
808 | return os.lstat(pathname).st_nlink |
|
812 | return os.lstat(pathname).st_nlink | |
809 |
|
813 | |||
810 | if hasattr(os, 'link'): |
|
814 | if hasattr(os, 'link'): | |
811 | os_link = os.link |
|
815 | os_link = os.link | |
812 | else: |
|
816 | else: | |
813 | def os_link(src, dst): |
|
817 | def os_link(src, dst): | |
814 | raise OSError(0, _("Hardlinks not supported")) |
|
818 | raise OSError(0, _("Hardlinks not supported")) | |
815 |
|
819 | |||
816 | def fstat(fp): |
|
820 | def fstat(fp): | |
817 | '''stat file object that may not have fileno method.''' |
|
821 | '''stat file object that may not have fileno method.''' | |
818 | try: |
|
822 | try: | |
819 | return os.fstat(fp.fileno()) |
|
823 | return os.fstat(fp.fileno()) | |
820 | except AttributeError: |
|
824 | except AttributeError: | |
821 | return os.stat(fp.name) |
|
825 | return os.stat(fp.name) | |
822 |
|
826 | |||
823 | posixfile = file |
|
827 | posixfile = file | |
824 |
|
828 | |||
825 | def openhardlinks(): |
|
829 | def openhardlinks(): | |
826 | '''return true if it is safe to hold open file handles to hardlinks''' |
|
830 | '''return true if it is safe to hold open file handles to hardlinks''' | |
827 | return True |
|
831 | return True | |
828 |
|
832 | |||
829 | def _statfiles(files): |
|
833 | def _statfiles(files): | |
830 | 'Stat each file in files and yield stat or None if file does not exist.' |
|
834 | 'Stat each file in files and yield stat or None if file does not exist.' | |
831 | lstat = os.lstat |
|
835 | lstat = os.lstat | |
832 | for nf in files: |
|
836 | for nf in files: | |
833 | try: |
|
837 | try: | |
834 | st = lstat(nf) |
|
838 | st = lstat(nf) | |
835 | except OSError, err: |
|
839 | except OSError, err: | |
836 | if err.errno not in (errno.ENOENT, errno.ENOTDIR): |
|
840 | if err.errno not in (errno.ENOENT, errno.ENOTDIR): | |
837 | raise |
|
841 | raise | |
838 | st = None |
|
842 | st = None | |
839 | yield st |
|
843 | yield st | |
840 |
|
844 | |||
841 | def _statfiles_clustered(files): |
|
845 | def _statfiles_clustered(files): | |
842 | '''Stat each file in files and yield stat or None if file does not exist. |
|
846 | '''Stat each file in files and yield stat or None if file does not exist. | |
843 | Cluster and cache stat per directory to minimize number of OS stat calls.''' |
|
847 | Cluster and cache stat per directory to minimize number of OS stat calls.''' | |
844 | lstat = os.lstat |
|
848 | lstat = os.lstat | |
845 | ncase = os.path.normcase |
|
849 | ncase = os.path.normcase | |
846 | sep = os.sep |
|
850 | sep = os.sep | |
847 | dircache = {} # dirname -> filename -> status | None if file does not exist |
|
851 | dircache = {} # dirname -> filename -> status | None if file does not exist | |
848 | for nf in files: |
|
852 | for nf in files: | |
849 | nf = ncase(nf) |
|
853 | nf = ncase(nf) | |
850 | pos = nf.rfind(sep) |
|
854 | pos = nf.rfind(sep) | |
851 | if pos == -1: |
|
855 | if pos == -1: | |
852 | dir, base = '.', nf |
|
856 | dir, base = '.', nf | |
853 | else: |
|
857 | else: | |
854 | dir, base = nf[:pos], nf[pos+1:] |
|
858 | dir, base = nf[:pos], nf[pos+1:] | |
855 | cache = dircache.get(dir, None) |
|
859 | cache = dircache.get(dir, None) | |
856 | if cache is None: |
|
860 | if cache is None: | |
857 | try: |
|
861 | try: | |
858 | dmap = dict([(ncase(n), s) |
|
862 | dmap = dict([(ncase(n), s) | |
859 | for n, k, s in osutil.listdir(dir, True)]) |
|
863 | for n, k, s in osutil.listdir(dir, True)]) | |
860 | except OSError, err: |
|
864 | except OSError, err: | |
861 | # handle directory not found in Python version prior to 2.5 |
|
865 | # handle directory not found in Python version prior to 2.5 | |
862 | # Python <= 2.4 returns native Windows code 3 in errno |
|
866 | # Python <= 2.4 returns native Windows code 3 in errno | |
863 | # Python >= 2.5 returns ENOENT and adds winerror field |
|
867 | # Python >= 2.5 returns ENOENT and adds winerror field | |
864 | if err.errno not in (3, errno.ENOENT, errno.ENOTDIR): |
|
868 | if err.errno not in (3, errno.ENOENT, errno.ENOTDIR): | |
865 | raise |
|
869 | raise | |
866 | dmap = {} |
|
870 | dmap = {} | |
867 | cache = dircache.setdefault(dir, dmap) |
|
871 | cache = dircache.setdefault(dir, dmap) | |
868 | yield cache.get(base, None) |
|
872 | yield cache.get(base, None) | |
869 |
|
873 | |||
870 | if sys.platform == 'win32': |
|
874 | if sys.platform == 'win32': | |
871 | statfiles = _statfiles_clustered |
|
875 | statfiles = _statfiles_clustered | |
872 | else: |
|
876 | else: | |
873 | statfiles = _statfiles |
|
877 | statfiles = _statfiles | |
874 |
|
878 | |||
875 | getuser_fallback = None |
|
879 | getuser_fallback = None | |
876 |
|
880 | |||
877 | def getuser(): |
|
881 | def getuser(): | |
878 | '''return name of current user''' |
|
882 | '''return name of current user''' | |
879 | try: |
|
883 | try: | |
880 | return getpass.getuser() |
|
884 | return getpass.getuser() | |
881 | except ImportError: |
|
885 | except ImportError: | |
882 | # import of pwd will fail on windows - try fallback |
|
886 | # import of pwd will fail on windows - try fallback | |
883 | if getuser_fallback: |
|
887 | if getuser_fallback: | |
884 | return getuser_fallback() |
|
888 | return getuser_fallback() | |
885 | # raised if win32api not available |
|
889 | # raised if win32api not available | |
886 | raise Abort(_('user name not available - set USERNAME ' |
|
890 | raise Abort(_('user name not available - set USERNAME ' | |
887 | 'environment variable')) |
|
891 | 'environment variable')) | |
888 |
|
892 | |||
889 | def username(uid=None): |
|
893 | def username(uid=None): | |
890 | """Return the name of the user with the given uid. |
|
894 | """Return the name of the user with the given uid. | |
891 |
|
895 | |||
892 | If uid is None, return the name of the current user.""" |
|
896 | If uid is None, return the name of the current user.""" | |
893 | try: |
|
897 | try: | |
894 | import pwd |
|
898 | import pwd | |
895 | if uid is None: |
|
899 | if uid is None: | |
896 | uid = os.getuid() |
|
900 | uid = os.getuid() | |
897 | try: |
|
901 | try: | |
898 | return pwd.getpwuid(uid)[0] |
|
902 | return pwd.getpwuid(uid)[0] | |
899 | except KeyError: |
|
903 | except KeyError: | |
900 | return str(uid) |
|
904 | return str(uid) | |
901 | except ImportError: |
|
905 | except ImportError: | |
902 | return None |
|
906 | return None | |
903 |
|
907 | |||
904 | def groupname(gid=None): |
|
908 | def groupname(gid=None): | |
905 | """Return the name of the group with the given gid. |
|
909 | """Return the name of the group with the given gid. | |
906 |
|
910 | |||
907 | If gid is None, return the name of the current group.""" |
|
911 | If gid is None, return the name of the current group.""" | |
908 | try: |
|
912 | try: | |
909 | import grp |
|
913 | import grp | |
910 | if gid is None: |
|
914 | if gid is None: | |
911 | gid = os.getgid() |
|
915 | gid = os.getgid() | |
912 | try: |
|
916 | try: | |
913 | return grp.getgrgid(gid)[0] |
|
917 | return grp.getgrgid(gid)[0] | |
914 | except KeyError: |
|
918 | except KeyError: | |
915 | return str(gid) |
|
919 | return str(gid) | |
916 | except ImportError: |
|
920 | except ImportError: | |
917 | return None |
|
921 | return None | |
918 |
|
922 | |||
919 | # File system features |
|
923 | # File system features | |
920 |
|
924 | |||
921 | def checkcase(path): |
|
925 | def checkcase(path): | |
922 | """ |
|
926 | """ | |
923 | Check whether the given path is on a case-sensitive filesystem |
|
927 | Check whether the given path is on a case-sensitive filesystem | |
924 |
|
928 | |||
925 | Requires a path (like /foo/.hg) ending with a foldable final |
|
929 | Requires a path (like /foo/.hg) ending with a foldable final | |
926 | directory component. |
|
930 | directory component. | |
927 | """ |
|
931 | """ | |
928 | s1 = os.stat(path) |
|
932 | s1 = os.stat(path) | |
929 | d, b = os.path.split(path) |
|
933 | d, b = os.path.split(path) | |
930 | p2 = os.path.join(d, b.upper()) |
|
934 | p2 = os.path.join(d, b.upper()) | |
931 | if path == p2: |
|
935 | if path == p2: | |
932 | p2 = os.path.join(d, b.lower()) |
|
936 | p2 = os.path.join(d, b.lower()) | |
933 | try: |
|
937 | try: | |
934 | s2 = os.stat(p2) |
|
938 | s2 = os.stat(p2) | |
935 | if s2 == s1: |
|
939 | if s2 == s1: | |
936 | return False |
|
940 | return False | |
937 | return True |
|
941 | return True | |
938 | except: |
|
942 | except: | |
939 | return True |
|
943 | return True | |
940 |
|
944 | |||
941 | _fspathcache = {} |
|
945 | _fspathcache = {} | |
942 | def fspath(name, root): |
|
946 | def fspath(name, root): | |
943 | '''Get name in the case stored in the filesystem |
|
947 | '''Get name in the case stored in the filesystem | |
944 |
|
948 | |||
945 | The name is either relative to root, or it is an absolute path starting |
|
949 | The name is either relative to root, or it is an absolute path starting | |
946 | with root. Note that this function is unnecessary, and should not be |
|
950 | with root. Note that this function is unnecessary, and should not be | |
947 | called, for case-sensitive filesystems (simply because it's expensive). |
|
951 | called, for case-sensitive filesystems (simply because it's expensive). | |
948 | ''' |
|
952 | ''' | |
949 | # If name is absolute, make it relative |
|
953 | # If name is absolute, make it relative | |
950 | if name.lower().startswith(root.lower()): |
|
954 | if name.lower().startswith(root.lower()): | |
951 | l = len(root) |
|
955 | l = len(root) | |
952 | if name[l] == os.sep or name[l] == os.altsep: |
|
956 | if name[l] == os.sep or name[l] == os.altsep: | |
953 | l = l + 1 |
|
957 | l = l + 1 | |
954 | name = name[l:] |
|
958 | name = name[l:] | |
955 |
|
959 | |||
956 | if not os.path.exists(os.path.join(root, name)): |
|
960 | if not os.path.exists(os.path.join(root, name)): | |
957 | return None |
|
961 | return None | |
958 |
|
962 | |||
959 | seps = os.sep |
|
963 | seps = os.sep | |
960 | if os.altsep: |
|
964 | if os.altsep: | |
961 | seps = seps + os.altsep |
|
965 | seps = seps + os.altsep | |
962 | # Protect backslashes. This gets silly very quickly. |
|
966 | # Protect backslashes. This gets silly very quickly. | |
963 | seps.replace('\\','\\\\') |
|
967 | seps.replace('\\','\\\\') | |
964 | pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) |
|
968 | pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) | |
965 | dir = os.path.normcase(os.path.normpath(root)) |
|
969 | dir = os.path.normcase(os.path.normpath(root)) | |
966 | result = [] |
|
970 | result = [] | |
967 | for part, sep in pattern.findall(name): |
|
971 | for part, sep in pattern.findall(name): | |
968 | if sep: |
|
972 | if sep: | |
969 | result.append(sep) |
|
973 | result.append(sep) | |
970 | continue |
|
974 | continue | |
971 |
|
975 | |||
972 | if dir not in _fspathcache: |
|
976 | if dir not in _fspathcache: | |
973 | _fspathcache[dir] = os.listdir(dir) |
|
977 | _fspathcache[dir] = os.listdir(dir) | |
974 | contents = _fspathcache[dir] |
|
978 | contents = _fspathcache[dir] | |
975 |
|
979 | |||
976 | lpart = part.lower() |
|
980 | lpart = part.lower() | |
977 | for n in contents: |
|
981 | for n in contents: | |
978 | if n.lower() == lpart: |
|
982 | if n.lower() == lpart: | |
979 | result.append(n) |
|
983 | result.append(n) | |
980 | break |
|
984 | break | |
981 | else: |
|
985 | else: | |
982 | # Cannot happen, as the file exists! |
|
986 | # Cannot happen, as the file exists! | |
983 | result.append(part) |
|
987 | result.append(part) | |
984 | dir = os.path.join(dir, lpart) |
|
988 | dir = os.path.join(dir, lpart) | |
985 |
|
989 | |||
986 | return ''.join(result) |
|
990 | return ''.join(result) | |
987 |
|
991 | |||
988 | def checkexec(path): |
|
992 | def checkexec(path): | |
989 | """ |
|
993 | """ | |
990 | Check whether the given path is on a filesystem with UNIX-like exec flags |
|
994 | Check whether the given path is on a filesystem with UNIX-like exec flags | |
991 |
|
995 | |||
992 | Requires a directory (like /foo/.hg) |
|
996 | Requires a directory (like /foo/.hg) | |
993 | """ |
|
997 | """ | |
994 |
|
998 | |||
995 | # VFAT on some Linux versions can flip mode but it doesn't persist |
|
999 | # VFAT on some Linux versions can flip mode but it doesn't persist | |
996 | # a FS remount. Frequently we can detect it if files are created |
|
1000 | # a FS remount. Frequently we can detect it if files are created | |
997 | # with exec bit on. |
|
1001 | # with exec bit on. | |
998 |
|
1002 | |||
999 | try: |
|
1003 | try: | |
1000 | EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH |
|
1004 | EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | |
1001 | fh, fn = tempfile.mkstemp("", "", path) |
|
1005 | fh, fn = tempfile.mkstemp("", "", path) | |
1002 | try: |
|
1006 | try: | |
1003 | os.close(fh) |
|
1007 | os.close(fh) | |
1004 | m = os.stat(fn).st_mode & 0777 |
|
1008 | m = os.stat(fn).st_mode & 0777 | |
1005 | new_file_has_exec = m & EXECFLAGS |
|
1009 | new_file_has_exec = m & EXECFLAGS | |
1006 | os.chmod(fn, m ^ EXECFLAGS) |
|
1010 | os.chmod(fn, m ^ EXECFLAGS) | |
1007 | exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m) |
|
1011 | exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m) | |
1008 | finally: |
|
1012 | finally: | |
1009 | os.unlink(fn) |
|
1013 | os.unlink(fn) | |
1010 | except (IOError, OSError): |
|
1014 | except (IOError, OSError): | |
1011 | # we don't care, the user probably won't be able to commit anyway |
|
1015 | # we don't care, the user probably won't be able to commit anyway | |
1012 | return False |
|
1016 | return False | |
1013 | return not (new_file_has_exec or exec_flags_cannot_flip) |
|
1017 | return not (new_file_has_exec or exec_flags_cannot_flip) | |
1014 |
|
1018 | |||
1015 | def checklink(path): |
|
1019 | def checklink(path): | |
1016 | """check whether the given path is on a symlink-capable filesystem""" |
|
1020 | """check whether the given path is on a symlink-capable filesystem""" | |
1017 | # mktemp is not racy because symlink creation will fail if the |
|
1021 | # mktemp is not racy because symlink creation will fail if the | |
1018 | # file already exists |
|
1022 | # file already exists | |
1019 | name = tempfile.mktemp(dir=path) |
|
1023 | name = tempfile.mktemp(dir=path) | |
1020 | try: |
|
1024 | try: | |
1021 | os.symlink(".", name) |
|
1025 | os.symlink(".", name) | |
1022 | os.unlink(name) |
|
1026 | os.unlink(name) | |
1023 | return True |
|
1027 | return True | |
1024 | except (OSError, AttributeError): |
|
1028 | except (OSError, AttributeError): | |
1025 | return False |
|
1029 | return False | |
1026 |
|
1030 | |||
1027 | _umask = os.umask(0) |
|
1031 | _umask = os.umask(0) | |
1028 | os.umask(_umask) |
|
1032 | os.umask(_umask) | |
1029 |
|
1033 | |||
1030 | def needbinarypatch(): |
|
1034 | def needbinarypatch(): | |
1031 | """return True if patches should be applied in binary mode by default.""" |
|
1035 | """return True if patches should be applied in binary mode by default.""" | |
1032 | return os.name == 'nt' |
|
1036 | return os.name == 'nt' | |
1033 |
|
1037 | |||
1034 | def endswithsep(path): |
|
1038 | def endswithsep(path): | |
1035 | '''Check path ends with os.sep or os.altsep.''' |
|
1039 | '''Check path ends with os.sep or os.altsep.''' | |
1036 | return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) |
|
1040 | return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) | |
1037 |
|
1041 | |||
1038 | def splitpath(path): |
|
1042 | def splitpath(path): | |
1039 | '''Split path by os.sep. |
|
1043 | '''Split path by os.sep. | |
1040 | Note that this function does not use os.altsep because this is |
|
1044 | Note that this function does not use os.altsep because this is | |
1041 | an alternative of simple "xxx.split(os.sep)". |
|
1045 | an alternative of simple "xxx.split(os.sep)". | |
1042 | It is recommended to use os.path.normpath() before using this |
|
1046 | It is recommended to use os.path.normpath() before using this | |
1043 | function if need.''' |
|
1047 | function if need.''' | |
1044 | return path.split(os.sep) |
|
1048 | return path.split(os.sep) | |
1045 |
|
1049 | |||
1046 | def gui(): |
|
1050 | def gui(): | |
1047 | '''Are we running in a GUI?''' |
|
1051 | '''Are we running in a GUI?''' | |
1048 | return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") |
|
1052 | return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") | |
1049 |
|
1053 | |||
1050 | def lookup_reg(key, name=None, scope=None): |
|
1054 | def lookup_reg(key, name=None, scope=None): | |
1051 | return None |
|
1055 | return None | |
1052 |
|
1056 | |||
1053 | # Platform specific variants |
|
1057 | # Platform specific variants | |
1054 | if os.name == 'nt': |
|
1058 | if os.name == 'nt': | |
1055 | import msvcrt |
|
1059 | import msvcrt | |
1056 | nulldev = 'NUL:' |
|
1060 | nulldev = 'NUL:' | |
1057 |
|
1061 | |||
1058 | class winstdout: |
|
1062 | class winstdout: | |
1059 | '''stdout on windows misbehaves if sent through a pipe''' |
|
1063 | '''stdout on windows misbehaves if sent through a pipe''' | |
1060 |
|
1064 | |||
1061 | def __init__(self, fp): |
|
1065 | def __init__(self, fp): | |
1062 | self.fp = fp |
|
1066 | self.fp = fp | |
1063 |
|
1067 | |||
1064 | def __getattr__(self, key): |
|
1068 | def __getattr__(self, key): | |
1065 | return getattr(self.fp, key) |
|
1069 | return getattr(self.fp, key) | |
1066 |
|
1070 | |||
1067 | def close(self): |
|
1071 | def close(self): | |
1068 | try: |
|
1072 | try: | |
1069 | self.fp.close() |
|
1073 | self.fp.close() | |
1070 | except: pass |
|
1074 | except: pass | |
1071 |
|
1075 | |||
1072 | def write(self, s): |
|
1076 | def write(self, s): | |
1073 | try: |
|
1077 | try: | |
1074 | # This is workaround for "Not enough space" error on |
|
1078 | # This is workaround for "Not enough space" error on | |
1075 | # writing large size of data to console. |
|
1079 | # writing large size of data to console. | |
1076 | limit = 16000 |
|
1080 | limit = 16000 | |
1077 | l = len(s) |
|
1081 | l = len(s) | |
1078 | start = 0 |
|
1082 | start = 0 | |
1079 | while start < l: |
|
1083 | while start < l: | |
1080 | end = start + limit |
|
1084 | end = start + limit | |
1081 | self.fp.write(s[start:end]) |
|
1085 | self.fp.write(s[start:end]) | |
1082 | start = end |
|
1086 | start = end | |
1083 | except IOError, inst: |
|
1087 | except IOError, inst: | |
1084 | if inst.errno != 0: raise |
|
1088 | if inst.errno != 0: raise | |
1085 | self.close() |
|
1089 | self.close() | |
1086 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
1090 | raise IOError(errno.EPIPE, 'Broken pipe') | |
1087 |
|
1091 | |||
1088 | def flush(self): |
|
1092 | def flush(self): | |
1089 | try: |
|
1093 | try: | |
1090 | return self.fp.flush() |
|
1094 | return self.fp.flush() | |
1091 | except IOError, inst: |
|
1095 | except IOError, inst: | |
1092 | if inst.errno != errno.EINVAL: raise |
|
1096 | if inst.errno != errno.EINVAL: raise | |
1093 | self.close() |
|
1097 | self.close() | |
1094 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
1098 | raise IOError(errno.EPIPE, 'Broken pipe') | |
1095 |
|
1099 | |||
1096 | sys.stdout = winstdout(sys.stdout) |
|
1100 | sys.stdout = winstdout(sys.stdout) | |
1097 |
|
1101 | |||
1098 | def _is_win_9x(): |
|
1102 | def _is_win_9x(): | |
1099 | '''return true if run on windows 95, 98 or me.''' |
|
1103 | '''return true if run on windows 95, 98 or me.''' | |
1100 | try: |
|
1104 | try: | |
1101 | return sys.getwindowsversion()[3] == 1 |
|
1105 | return sys.getwindowsversion()[3] == 1 | |
1102 | except AttributeError: |
|
1106 | except AttributeError: | |
1103 | return 'command' in os.environ.get('comspec', '') |
|
1107 | return 'command' in os.environ.get('comspec', '') | |
1104 |
|
1108 | |||
1105 | def openhardlinks(): |
|
1109 | def openhardlinks(): | |
1106 | return not _is_win_9x and "win32api" in locals() |
|
1110 | return not _is_win_9x and "win32api" in locals() | |
1107 |
|
1111 | |||
1108 | def system_rcpath(): |
|
1112 | def system_rcpath(): | |
1109 | try: |
|
1113 | try: | |
1110 | return system_rcpath_win32() |
|
1114 | return system_rcpath_win32() | |
1111 | except: |
|
1115 | except: | |
1112 | return [r'c:\mercurial\mercurial.ini'] |
|
1116 | return [r'c:\mercurial\mercurial.ini'] | |
1113 |
|
1117 | |||
1114 | def user_rcpath(): |
|
1118 | def user_rcpath(): | |
1115 | '''return os-specific hgrc search path to the user dir''' |
|
1119 | '''return os-specific hgrc search path to the user dir''' | |
1116 | try: |
|
1120 | try: | |
1117 | path = user_rcpath_win32() |
|
1121 | path = user_rcpath_win32() | |
1118 | except: |
|
1122 | except: | |
1119 | home = os.path.expanduser('~') |
|
1123 | home = os.path.expanduser('~') | |
1120 | path = [os.path.join(home, 'mercurial.ini'), |
|
1124 | path = [os.path.join(home, 'mercurial.ini'), | |
1121 | os.path.join(home, '.hgrc')] |
|
1125 | os.path.join(home, '.hgrc')] | |
1122 | userprofile = os.environ.get('USERPROFILE') |
|
1126 | userprofile = os.environ.get('USERPROFILE') | |
1123 | if userprofile: |
|
1127 | if userprofile: | |
1124 | path.append(os.path.join(userprofile, 'mercurial.ini')) |
|
1128 | path.append(os.path.join(userprofile, 'mercurial.ini')) | |
1125 | path.append(os.path.join(userprofile, '.hgrc')) |
|
1129 | path.append(os.path.join(userprofile, '.hgrc')) | |
1126 | return path |
|
1130 | return path | |
1127 |
|
1131 | |||
1128 | def parse_patch_output(output_line): |
|
1132 | def parse_patch_output(output_line): | |
1129 | """parses the output produced by patch and returns the file name""" |
|
1133 | """parses the output produced by patch and returns the file name""" | |
1130 | pf = output_line[14:] |
|
1134 | pf = output_line[14:] | |
1131 | if pf[0] == '`': |
|
1135 | if pf[0] == '`': | |
1132 | pf = pf[1:-1] # Remove the quotes |
|
1136 | pf = pf[1:-1] # Remove the quotes | |
1133 | return pf |
|
1137 | return pf | |
1134 |
|
1138 | |||
1135 | def sshargs(sshcmd, host, user, port): |
|
1139 | def sshargs(sshcmd, host, user, port): | |
1136 | '''Build argument list for ssh or Plink''' |
|
1140 | '''Build argument list for ssh or Plink''' | |
1137 | pflag = 'plink' in sshcmd.lower() and '-P' or '-p' |
|
1141 | pflag = 'plink' in sshcmd.lower() and '-P' or '-p' | |
1138 | args = user and ("%s@%s" % (user, host)) or host |
|
1142 | args = user and ("%s@%s" % (user, host)) or host | |
1139 | return port and ("%s %s %s" % (args, pflag, port)) or args |
|
1143 | return port and ("%s %s %s" % (args, pflag, port)) or args | |
1140 |
|
1144 | |||
1141 | def testpid(pid): |
|
1145 | def testpid(pid): | |
1142 | '''return False if pid dead, True if running or not known''' |
|
1146 | '''return False if pid dead, True if running or not known''' | |
1143 | return True |
|
1147 | return True | |
1144 |
|
1148 | |||
1145 | def set_flags(f, l, x): |
|
1149 | def set_flags(f, l, x): | |
1146 | pass |
|
1150 | pass | |
1147 |
|
1151 | |||
1148 | def set_binary(fd): |
|
1152 | def set_binary(fd): | |
1149 | # When run without console, pipes may expose invalid |
|
1153 | # When run without console, pipes may expose invalid | |
1150 | # fileno(), usually set to -1. |
|
1154 | # fileno(), usually set to -1. | |
1151 | if hasattr(fd, 'fileno') and fd.fileno() >= 0: |
|
1155 | if hasattr(fd, 'fileno') and fd.fileno() >= 0: | |
1152 | msvcrt.setmode(fd.fileno(), os.O_BINARY) |
|
1156 | msvcrt.setmode(fd.fileno(), os.O_BINARY) | |
1153 |
|
1157 | |||
1154 | def pconvert(path): |
|
1158 | def pconvert(path): | |
1155 | return '/'.join(splitpath(path)) |
|
1159 | return '/'.join(splitpath(path)) | |
1156 |
|
1160 | |||
1157 | def localpath(path): |
|
1161 | def localpath(path): | |
1158 | return path.replace('/', '\\') |
|
1162 | return path.replace('/', '\\') | |
1159 |
|
1163 | |||
1160 | def normpath(path): |
|
1164 | def normpath(path): | |
1161 | return pconvert(os.path.normpath(path)) |
|
1165 | return pconvert(os.path.normpath(path)) | |
1162 |
|
1166 | |||
1163 | makelock = _makelock_file |
|
1167 | makelock = _makelock_file | |
1164 | readlock = _readlock_file |
|
1168 | readlock = _readlock_file | |
1165 |
|
1169 | |||
1166 | def samestat(s1, s2): |
|
1170 | def samestat(s1, s2): | |
1167 | return False |
|
1171 | return False | |
1168 |
|
1172 | |||
1169 | # A sequence of backslashes is special iff it precedes a double quote: |
|
1173 | # A sequence of backslashes is special iff it precedes a double quote: | |
1170 | # - if there's an even number of backslashes, the double quote is not |
|
1174 | # - if there's an even number of backslashes, the double quote is not | |
1171 | # quoted (i.e. it ends the quoted region) |
|
1175 | # quoted (i.e. it ends the quoted region) | |
1172 | # - if there's an odd number of backslashes, the double quote is quoted |
|
1176 | # - if there's an odd number of backslashes, the double quote is quoted | |
1173 | # - in both cases, every pair of backslashes is unquoted into a single |
|
1177 | # - in both cases, every pair of backslashes is unquoted into a single | |
1174 | # backslash |
|
1178 | # backslash | |
1175 | # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) |
|
1179 | # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) | |
1176 | # So, to quote a string, we must surround it in double quotes, double |
|
1180 | # So, to quote a string, we must surround it in double quotes, double | |
1177 | # the number of backslashes that preceed double quotes and add another |
|
1181 | # the number of backslashes that preceed double quotes and add another | |
1178 | # backslash before every double quote (being careful with the double |
|
1182 | # backslash before every double quote (being careful with the double | |
1179 | # quote we've appended to the end) |
|
1183 | # quote we've appended to the end) | |
1180 | _quotere = None |
|
1184 | _quotere = None | |
1181 | def shellquote(s): |
|
1185 | def shellquote(s): | |
1182 | global _quotere |
|
1186 | global _quotere | |
1183 | if _quotere is None: |
|
1187 | if _quotere is None: | |
1184 | _quotere = re.compile(r'(\\*)("|\\$)') |
|
1188 | _quotere = re.compile(r'(\\*)("|\\$)') | |
1185 | return '"%s"' % _quotere.sub(r'\1\1\\\2', s) |
|
1189 | return '"%s"' % _quotere.sub(r'\1\1\\\2', s) | |
1186 |
|
1190 | |||
1187 | def quotecommand(cmd): |
|
1191 | def quotecommand(cmd): | |
1188 | """Build a command string suitable for os.popen* calls.""" |
|
1192 | """Build a command string suitable for os.popen* calls.""" | |
1189 | # The extra quotes are needed because popen* runs the command |
|
1193 | # The extra quotes are needed because popen* runs the command | |
1190 | # through the current COMSPEC. cmd.exe suppress enclosing quotes. |
|
1194 | # through the current COMSPEC. cmd.exe suppress enclosing quotes. | |
1191 | return '"' + cmd + '"' |
|
1195 | return '"' + cmd + '"' | |
1192 |
|
1196 | |||
1193 | def popen(command, mode='r'): |
|
1197 | def popen(command, mode='r'): | |
1194 | # Work around "popen spawned process may not write to stdout |
|
1198 | # Work around "popen spawned process may not write to stdout | |
1195 | # under windows" |
|
1199 | # under windows" | |
1196 | # http://bugs.python.org/issue1366 |
|
1200 | # http://bugs.python.org/issue1366 | |
1197 | command += " 2> %s" % nulldev |
|
1201 | command += " 2> %s" % nulldev | |
1198 | return os.popen(quotecommand(command), mode) |
|
1202 | return os.popen(quotecommand(command), mode) | |
1199 |
|
1203 | |||
1200 | def explain_exit(code): |
|
1204 | def explain_exit(code): | |
1201 | return _("exited with status %d") % code, code |
|
1205 | return _("exited with status %d") % code, code | |
1202 |
|
1206 | |||
1203 | # if you change this stub into a real check, please try to implement the |
|
1207 | # if you change this stub into a real check, please try to implement the | |
1204 | # username and groupname functions above, too. |
|
1208 | # username and groupname functions above, too. | |
1205 | def isowner(fp, st=None): |
|
1209 | def isowner(fp, st=None): | |
1206 | return True |
|
1210 | return True | |
1207 |
|
1211 | |||
1208 | def find_in_path(name, path, default=None): |
|
1212 | def find_in_path(name, path, default=None): | |
1209 | '''find name in search path. path can be string (will be split |
|
1213 | '''find name in search path. path can be string (will be split | |
1210 | with os.pathsep), or iterable thing that returns strings. if name |
|
1214 | with os.pathsep), or iterable thing that returns strings. if name | |
1211 | found, return path to name. else return default. name is looked up |
|
1215 | found, return path to name. else return default. name is looked up | |
1212 | using cmd.exe rules, using PATHEXT.''' |
|
1216 | using cmd.exe rules, using PATHEXT.''' | |
1213 | if isinstance(path, str): |
|
1217 | if isinstance(path, str): | |
1214 | path = path.split(os.pathsep) |
|
1218 | path = path.split(os.pathsep) | |
1215 |
|
1219 | |||
1216 | pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') |
|
1220 | pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') | |
1217 | pathext = pathext.lower().split(os.pathsep) |
|
1221 | pathext = pathext.lower().split(os.pathsep) | |
1218 | isexec = os.path.splitext(name)[1].lower() in pathext |
|
1222 | isexec = os.path.splitext(name)[1].lower() in pathext | |
1219 |
|
1223 | |||
1220 | for p in path: |
|
1224 | for p in path: | |
1221 | p_name = os.path.join(p, name) |
|
1225 | p_name = os.path.join(p, name) | |
1222 |
|
1226 | |||
1223 | if isexec and os.path.exists(p_name): |
|
1227 | if isexec and os.path.exists(p_name): | |
1224 | return p_name |
|
1228 | return p_name | |
1225 |
|
1229 | |||
1226 | for ext in pathext: |
|
1230 | for ext in pathext: | |
1227 | p_name_ext = p_name + ext |
|
1231 | p_name_ext = p_name + ext | |
1228 | if os.path.exists(p_name_ext): |
|
1232 | if os.path.exists(p_name_ext): | |
1229 | return p_name_ext |
|
1233 | return p_name_ext | |
1230 | return default |
|
1234 | return default | |
1231 |
|
1235 | |||
1232 | def set_signal_handler(): |
|
1236 | def set_signal_handler(): | |
1233 | try: |
|
1237 | try: | |
1234 | set_signal_handler_win32() |
|
1238 | set_signal_handler_win32() | |
1235 | except NameError: |
|
1239 | except NameError: | |
1236 | pass |
|
1240 | pass | |
1237 |
|
1241 | |||
1238 | try: |
|
1242 | try: | |
1239 | # override functions with win32 versions if possible |
|
1243 | # override functions with win32 versions if possible | |
1240 | from util_win32 import * |
|
1244 | from util_win32 import * | |
1241 | if not _is_win_9x(): |
|
1245 | if not _is_win_9x(): | |
1242 | posixfile = posixfile_nt |
|
1246 | posixfile = posixfile_nt | |
1243 | except ImportError: |
|
1247 | except ImportError: | |
1244 | pass |
|
1248 | pass | |
1245 |
|
1249 | |||
1246 | else: |
|
1250 | else: | |
1247 | nulldev = '/dev/null' |
|
1251 | nulldev = '/dev/null' | |
1248 |
|
1252 | |||
1249 | def rcfiles(path): |
|
1253 | def rcfiles(path): | |
1250 | rcs = [os.path.join(path, 'hgrc')] |
|
1254 | rcs = [os.path.join(path, 'hgrc')] | |
1251 | rcdir = os.path.join(path, 'hgrc.d') |
|
1255 | rcdir = os.path.join(path, 'hgrc.d') | |
1252 | try: |
|
1256 | try: | |
1253 | rcs.extend([os.path.join(rcdir, f) |
|
1257 | rcs.extend([os.path.join(rcdir, f) | |
1254 | for f, kind in osutil.listdir(rcdir) |
|
1258 | for f, kind in osutil.listdir(rcdir) | |
1255 | if f.endswith(".rc")]) |
|
1259 | if f.endswith(".rc")]) | |
1256 | except OSError: |
|
1260 | except OSError: | |
1257 | pass |
|
1261 | pass | |
1258 | return rcs |
|
1262 | return rcs | |
1259 |
|
1263 | |||
1260 | def system_rcpath(): |
|
1264 | def system_rcpath(): | |
1261 | path = [] |
|
1265 | path = [] | |
1262 | # old mod_python does not set sys.argv |
|
1266 | # old mod_python does not set sys.argv | |
1263 | if len(getattr(sys, 'argv', [])) > 0: |
|
1267 | if len(getattr(sys, 'argv', [])) > 0: | |
1264 | path.extend(rcfiles(os.path.dirname(sys.argv[0]) + |
|
1268 | path.extend(rcfiles(os.path.dirname(sys.argv[0]) + | |
1265 | '/../etc/mercurial')) |
|
1269 | '/../etc/mercurial')) | |
1266 | path.extend(rcfiles('/etc/mercurial')) |
|
1270 | path.extend(rcfiles('/etc/mercurial')) | |
1267 | return path |
|
1271 | return path | |
1268 |
|
1272 | |||
1269 | def user_rcpath(): |
|
1273 | def user_rcpath(): | |
1270 | return [os.path.expanduser('~/.hgrc')] |
|
1274 | return [os.path.expanduser('~/.hgrc')] | |
1271 |
|
1275 | |||
1272 | def parse_patch_output(output_line): |
|
1276 | def parse_patch_output(output_line): | |
1273 | """parses the output produced by patch and returns the file name""" |
|
1277 | """parses the output produced by patch and returns the file name""" | |
1274 | pf = output_line[14:] |
|
1278 | pf = output_line[14:] | |
1275 | if os.sys.platform == 'OpenVMS': |
|
1279 | if os.sys.platform == 'OpenVMS': | |
1276 | if pf[0] == '`': |
|
1280 | if pf[0] == '`': | |
1277 | pf = pf[1:-1] # Remove the quotes |
|
1281 | pf = pf[1:-1] # Remove the quotes | |
1278 | else: |
|
1282 | else: | |
1279 | if pf.startswith("'") and pf.endswith("'") and " " in pf: |
|
1283 | if pf.startswith("'") and pf.endswith("'") and " " in pf: | |
1280 | pf = pf[1:-1] # Remove the quotes |
|
1284 | pf = pf[1:-1] # Remove the quotes | |
1281 | return pf |
|
1285 | return pf | |
1282 |
|
1286 | |||
1283 | def sshargs(sshcmd, host, user, port): |
|
1287 | def sshargs(sshcmd, host, user, port): | |
1284 | '''Build argument list for ssh''' |
|
1288 | '''Build argument list for ssh''' | |
1285 | args = user and ("%s@%s" % (user, host)) or host |
|
1289 | args = user and ("%s@%s" % (user, host)) or host | |
1286 | return port and ("%s -p %s" % (args, port)) or args |
|
1290 | return port and ("%s -p %s" % (args, port)) or args | |
1287 |
|
1291 | |||
1288 | def is_exec(f): |
|
1292 | def is_exec(f): | |
1289 | """check whether a file is executable""" |
|
1293 | """check whether a file is executable""" | |
1290 | return (os.lstat(f).st_mode & 0100 != 0) |
|
1294 | return (os.lstat(f).st_mode & 0100 != 0) | |
1291 |
|
1295 | |||
1292 | def set_flags(f, l, x): |
|
1296 | def set_flags(f, l, x): | |
1293 | s = os.lstat(f).st_mode |
|
1297 | s = os.lstat(f).st_mode | |
1294 | if l: |
|
1298 | if l: | |
1295 | if not stat.S_ISLNK(s): |
|
1299 | if not stat.S_ISLNK(s): | |
1296 | # switch file to link |
|
1300 | # switch file to link | |
1297 | data = file(f).read() |
|
1301 | data = file(f).read() | |
1298 | os.unlink(f) |
|
1302 | os.unlink(f) | |
1299 | try: |
|
1303 | try: | |
1300 | os.symlink(data, f) |
|
1304 | os.symlink(data, f) | |
1301 | except: |
|
1305 | except: | |
1302 | # failed to make a link, rewrite file |
|
1306 | # failed to make a link, rewrite file | |
1303 | file(f, "w").write(data) |
|
1307 | file(f, "w").write(data) | |
1304 | # no chmod needed at this point |
|
1308 | # no chmod needed at this point | |
1305 | return |
|
1309 | return | |
1306 | if stat.S_ISLNK(s): |
|
1310 | if stat.S_ISLNK(s): | |
1307 | # switch link to file |
|
1311 | # switch link to file | |
1308 | data = os.readlink(f) |
|
1312 | data = os.readlink(f) | |
1309 | os.unlink(f) |
|
1313 | os.unlink(f) | |
1310 | file(f, "w").write(data) |
|
1314 | file(f, "w").write(data) | |
1311 | s = 0666 & ~_umask # avoid restatting for chmod |
|
1315 | s = 0666 & ~_umask # avoid restatting for chmod | |
1312 |
|
1316 | |||
1313 | sx = s & 0100 |
|
1317 | sx = s & 0100 | |
1314 | if x and not sx: |
|
1318 | if x and not sx: | |
1315 | # Turn on +x for every +r bit when making a file executable |
|
1319 | # Turn on +x for every +r bit when making a file executable | |
1316 | # and obey umask. |
|
1320 | # and obey umask. | |
1317 | os.chmod(f, s | (s & 0444) >> 2 & ~_umask) |
|
1321 | os.chmod(f, s | (s & 0444) >> 2 & ~_umask) | |
1318 | elif not x and sx: |
|
1322 | elif not x and sx: | |
1319 | # Turn off all +x bits |
|
1323 | # Turn off all +x bits | |
1320 | os.chmod(f, s & 0666) |
|
1324 | os.chmod(f, s & 0666) | |
1321 |
|
1325 | |||
1322 | def set_binary(fd): |
|
1326 | def set_binary(fd): | |
1323 | pass |
|
1327 | pass | |
1324 |
|
1328 | |||
1325 | def pconvert(path): |
|
1329 | def pconvert(path): | |
1326 | return path |
|
1330 | return path | |
1327 |
|
1331 | |||
1328 | def localpath(path): |
|
1332 | def localpath(path): | |
1329 | return path |
|
1333 | return path | |
1330 |
|
1334 | |||
1331 | normpath = os.path.normpath |
|
1335 | normpath = os.path.normpath | |
1332 | samestat = os.path.samestat |
|
1336 | samestat = os.path.samestat | |
1333 |
|
1337 | |||
1334 | def makelock(info, pathname): |
|
1338 | def makelock(info, pathname): | |
1335 | try: |
|
1339 | try: | |
1336 | os.symlink(info, pathname) |
|
1340 | os.symlink(info, pathname) | |
1337 | except OSError, why: |
|
1341 | except OSError, why: | |
1338 | if why.errno == errno.EEXIST: |
|
1342 | if why.errno == errno.EEXIST: | |
1339 | raise |
|
1343 | raise | |
1340 | else: |
|
1344 | else: | |
1341 | _makelock_file(info, pathname) |
|
1345 | _makelock_file(info, pathname) | |
1342 |
|
1346 | |||
1343 | def readlock(pathname): |
|
1347 | def readlock(pathname): | |
1344 | try: |
|
1348 | try: | |
1345 | return os.readlink(pathname) |
|
1349 | return os.readlink(pathname) | |
1346 | except OSError, why: |
|
1350 | except OSError, why: | |
1347 | if why.errno in (errno.EINVAL, errno.ENOSYS): |
|
1351 | if why.errno in (errno.EINVAL, errno.ENOSYS): | |
1348 | return _readlock_file(pathname) |
|
1352 | return _readlock_file(pathname) | |
1349 | else: |
|
1353 | else: | |
1350 | raise |
|
1354 | raise | |
1351 |
|
1355 | |||
1352 | def shellquote(s): |
|
1356 | def shellquote(s): | |
1353 | if os.sys.platform == 'OpenVMS': |
|
1357 | if os.sys.platform == 'OpenVMS': | |
1354 | return '"%s"' % s |
|
1358 | return '"%s"' % s | |
1355 | else: |
|
1359 | else: | |
1356 | return "'%s'" % s.replace("'", "'\\''") |
|
1360 | return "'%s'" % s.replace("'", "'\\''") | |
1357 |
|
1361 | |||
1358 | def quotecommand(cmd): |
|
1362 | def quotecommand(cmd): | |
1359 | return cmd |
|
1363 | return cmd | |
1360 |
|
1364 | |||
1361 | def popen(command, mode='r'): |
|
1365 | def popen(command, mode='r'): | |
1362 | return os.popen(command, mode) |
|
1366 | return os.popen(command, mode) | |
1363 |
|
1367 | |||
1364 | def testpid(pid): |
|
1368 | def testpid(pid): | |
1365 | '''return False if pid dead, True if running or not sure''' |
|
1369 | '''return False if pid dead, True if running or not sure''' | |
1366 | if os.sys.platform == 'OpenVMS': |
|
1370 | if os.sys.platform == 'OpenVMS': | |
1367 | return True |
|
1371 | return True | |
1368 | try: |
|
1372 | try: | |
1369 | os.kill(pid, 0) |
|
1373 | os.kill(pid, 0) | |
1370 | return True |
|
1374 | return True | |
1371 | except OSError, inst: |
|
1375 | except OSError, inst: | |
1372 | return inst.errno != errno.ESRCH |
|
1376 | return inst.errno != errno.ESRCH | |
1373 |
|
1377 | |||
1374 | def explain_exit(code): |
|
1378 | def explain_exit(code): | |
1375 | """return a 2-tuple (desc, code) describing a process's status""" |
|
1379 | """return a 2-tuple (desc, code) describing a process's status""" | |
1376 | if os.WIFEXITED(code): |
|
1380 | if os.WIFEXITED(code): | |
1377 | val = os.WEXITSTATUS(code) |
|
1381 | val = os.WEXITSTATUS(code) | |
1378 | return _("exited with status %d") % val, val |
|
1382 | return _("exited with status %d") % val, val | |
1379 | elif os.WIFSIGNALED(code): |
|
1383 | elif os.WIFSIGNALED(code): | |
1380 | val = os.WTERMSIG(code) |
|
1384 | val = os.WTERMSIG(code) | |
1381 | return _("killed by signal %d") % val, val |
|
1385 | return _("killed by signal %d") % val, val | |
1382 | elif os.WIFSTOPPED(code): |
|
1386 | elif os.WIFSTOPPED(code): | |
1383 | val = os.WSTOPSIG(code) |
|
1387 | val = os.WSTOPSIG(code) | |
1384 | return _("stopped by signal %d") % val, val |
|
1388 | return _("stopped by signal %d") % val, val | |
1385 | raise ValueError(_("invalid exit code")) |
|
1389 | raise ValueError(_("invalid exit code")) | |
1386 |
|
1390 | |||
1387 | def isowner(fp, st=None): |
|
1391 | def isowner(fp, st=None): | |
1388 | """Return True if the file object f belongs to the current user. |
|
1392 | """Return True if the file object f belongs to the current user. | |
1389 |
|
1393 | |||
1390 | The return value of a util.fstat(f) may be passed as the st argument. |
|
1394 | The return value of a util.fstat(f) may be passed as the st argument. | |
1391 | """ |
|
1395 | """ | |
1392 | if st is None: |
|
1396 | if st is None: | |
1393 | st = fstat(fp) |
|
1397 | st = fstat(fp) | |
1394 | return st.st_uid == os.getuid() |
|
1398 | return st.st_uid == os.getuid() | |
1395 |
|
1399 | |||
1396 | def find_in_path(name, path, default=None): |
|
1400 | def find_in_path(name, path, default=None): | |
1397 | '''find name in search path. path can be string (will be split |
|
1401 | '''find name in search path. path can be string (will be split | |
1398 | with os.pathsep), or iterable thing that returns strings. if name |
|
1402 | with os.pathsep), or iterable thing that returns strings. if name | |
1399 | found, return path to name. else return default.''' |
|
1403 | found, return path to name. else return default.''' | |
1400 | if isinstance(path, str): |
|
1404 | if isinstance(path, str): | |
1401 | path = path.split(os.pathsep) |
|
1405 | path = path.split(os.pathsep) | |
1402 | for p in path: |
|
1406 | for p in path: | |
1403 | p_name = os.path.join(p, name) |
|
1407 | p_name = os.path.join(p, name) | |
1404 | if os.path.exists(p_name): |
|
1408 | if os.path.exists(p_name): | |
1405 | return p_name |
|
1409 | return p_name | |
1406 | return default |
|
1410 | return default | |
1407 |
|
1411 | |||
1408 | def set_signal_handler(): |
|
1412 | def set_signal_handler(): | |
1409 | pass |
|
1413 | pass | |
1410 |
|
1414 | |||
1411 | def find_exe(name, default=None): |
|
1415 | def find_exe(name, default=None): | |
1412 | '''find path of an executable. |
|
1416 | '''find path of an executable. | |
1413 | if name contains a path component, return it as is. otherwise, |
|
1417 | if name contains a path component, return it as is. otherwise, | |
1414 | use normal executable search path.''' |
|
1418 | use normal executable search path.''' | |
1415 |
|
1419 | |||
1416 | if os.sep in name or sys.platform == 'OpenVMS': |
|
1420 | if os.sep in name or sys.platform == 'OpenVMS': | |
1417 | # don't check the executable bit. if the file isn't |
|
1421 | # don't check the executable bit. if the file isn't | |
1418 | # executable, whoever tries to actually run it will give a |
|
1422 | # executable, whoever tries to actually run it will give a | |
1419 | # much more useful error message. |
|
1423 | # much more useful error message. | |
1420 | return name |
|
1424 | return name | |
1421 | return find_in_path(name, os.environ.get('PATH', ''), default=default) |
|
1425 | return find_in_path(name, os.environ.get('PATH', ''), default=default) | |
1422 |
|
1426 | |||
1423 | def mktempcopy(name, emptyok=False, createmode=None): |
|
1427 | def mktempcopy(name, emptyok=False, createmode=None): | |
1424 | """Create a temporary file with the same contents from name |
|
1428 | """Create a temporary file with the same contents from name | |
1425 |
|
1429 | |||
1426 | The permission bits are copied from the original file. |
|
1430 | The permission bits are copied from the original file. | |
1427 |
|
1431 | |||
1428 | If the temporary file is going to be truncated immediately, you |
|
1432 | If the temporary file is going to be truncated immediately, you | |
1429 | can use emptyok=True as an optimization. |
|
1433 | can use emptyok=True as an optimization. | |
1430 |
|
1434 | |||
1431 | Returns the name of the temporary file. |
|
1435 | Returns the name of the temporary file. | |
1432 | """ |
|
1436 | """ | |
1433 | d, fn = os.path.split(name) |
|
1437 | d, fn = os.path.split(name) | |
1434 | fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d) |
|
1438 | fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d) | |
1435 | os.close(fd) |
|
1439 | os.close(fd) | |
1436 | # Temporary files are created with mode 0600, which is usually not |
|
1440 | # Temporary files are created with mode 0600, which is usually not | |
1437 | # what we want. If the original file already exists, just copy |
|
1441 | # what we want. If the original file already exists, just copy | |
1438 | # its mode. Otherwise, manually obey umask. |
|
1442 | # its mode. Otherwise, manually obey umask. | |
1439 | try: |
|
1443 | try: | |
1440 | st_mode = os.lstat(name).st_mode & 0777 |
|
1444 | st_mode = os.lstat(name).st_mode & 0777 | |
1441 | except OSError, inst: |
|
1445 | except OSError, inst: | |
1442 | if inst.errno != errno.ENOENT: |
|
1446 | if inst.errno != errno.ENOENT: | |
1443 | raise |
|
1447 | raise | |
1444 | st_mode = createmode |
|
1448 | st_mode = createmode | |
1445 | if st_mode is None: |
|
1449 | if st_mode is None: | |
1446 | st_mode = ~_umask |
|
1450 | st_mode = ~_umask | |
1447 | st_mode &= 0666 |
|
1451 | st_mode &= 0666 | |
1448 | os.chmod(temp, st_mode) |
|
1452 | os.chmod(temp, st_mode) | |
1449 | if emptyok: |
|
1453 | if emptyok: | |
1450 | return temp |
|
1454 | return temp | |
1451 | try: |
|
1455 | try: | |
1452 | try: |
|
1456 | try: | |
1453 | ifp = posixfile(name, "rb") |
|
1457 | ifp = posixfile(name, "rb") | |
1454 | except IOError, inst: |
|
1458 | except IOError, inst: | |
1455 | if inst.errno == errno.ENOENT: |
|
1459 | if inst.errno == errno.ENOENT: | |
1456 | return temp |
|
1460 | return temp | |
1457 | if not getattr(inst, 'filename', None): |
|
1461 | if not getattr(inst, 'filename', None): | |
1458 | inst.filename = name |
|
1462 | inst.filename = name | |
1459 | raise |
|
1463 | raise | |
1460 | ofp = posixfile(temp, "wb") |
|
1464 | ofp = posixfile(temp, "wb") | |
1461 | for chunk in filechunkiter(ifp): |
|
1465 | for chunk in filechunkiter(ifp): | |
1462 | ofp.write(chunk) |
|
1466 | ofp.write(chunk) | |
1463 | ifp.close() |
|
1467 | ifp.close() | |
1464 | ofp.close() |
|
1468 | ofp.close() | |
1465 | except: |
|
1469 | except: | |
1466 | try: os.unlink(temp) |
|
1470 | try: os.unlink(temp) | |
1467 | except: pass |
|
1471 | except: pass | |
1468 | raise |
|
1472 | raise | |
1469 | return temp |
|
1473 | return temp | |
1470 |
|
1474 | |||
1471 | class atomictempfile(posixfile): |
|
1475 | class atomictempfile(posixfile): | |
1472 | """file-like object that atomically updates a file |
|
1476 | """file-like object that atomically updates a file | |
1473 |
|
1477 | |||
1474 | All writes will be redirected to a temporary copy of the original |
|
1478 | All writes will be redirected to a temporary copy of the original | |
1475 | file. When rename is called, the copy is renamed to the original |
|
1479 | file. When rename is called, the copy is renamed to the original | |
1476 | name, making the changes visible. |
|
1480 | name, making the changes visible. | |
1477 | """ |
|
1481 | """ | |
1478 | def __init__(self, name, mode, createmode): |
|
1482 | def __init__(self, name, mode, createmode): | |
1479 | self.__name = name |
|
1483 | self.__name = name | |
1480 | self.temp = mktempcopy(name, emptyok=('w' in mode), |
|
1484 | self.temp = mktempcopy(name, emptyok=('w' in mode), | |
1481 | createmode=createmode) |
|
1485 | createmode=createmode) | |
1482 | posixfile.__init__(self, self.temp, mode) |
|
1486 | posixfile.__init__(self, self.temp, mode) | |
1483 |
|
1487 | |||
1484 | def rename(self): |
|
1488 | def rename(self): | |
1485 | if not self.closed: |
|
1489 | if not self.closed: | |
1486 | posixfile.close(self) |
|
1490 | posixfile.close(self) | |
1487 | rename(self.temp, localpath(self.__name)) |
|
1491 | rename(self.temp, localpath(self.__name)) | |
1488 |
|
1492 | |||
1489 | def __del__(self): |
|
1493 | def __del__(self): | |
1490 | if not self.closed: |
|
1494 | if not self.closed: | |
1491 | try: |
|
1495 | try: | |
1492 | os.unlink(self.temp) |
|
1496 | os.unlink(self.temp) | |
1493 | except: pass |
|
1497 | except: pass | |
1494 | posixfile.close(self) |
|
1498 | posixfile.close(self) | |
1495 |
|
1499 | |||
1496 | def makedirs(name, mode=None): |
|
1500 | def makedirs(name, mode=None): | |
1497 | """recursive directory creation with parent mode inheritance""" |
|
1501 | """recursive directory creation with parent mode inheritance""" | |
1498 | try: |
|
1502 | try: | |
1499 | os.mkdir(name) |
|
1503 | os.mkdir(name) | |
1500 | if mode is not None: |
|
1504 | if mode is not None: | |
1501 | os.chmod(name, mode) |
|
1505 | os.chmod(name, mode) | |
1502 | return |
|
1506 | return | |
1503 | except OSError, err: |
|
1507 | except OSError, err: | |
1504 | if err.errno == errno.EEXIST: |
|
1508 | if err.errno == errno.EEXIST: | |
1505 | return |
|
1509 | return | |
1506 | if err.errno != errno.ENOENT: |
|
1510 | if err.errno != errno.ENOENT: | |
1507 | raise |
|
1511 | raise | |
1508 | parent = os.path.abspath(os.path.dirname(name)) |
|
1512 | parent = os.path.abspath(os.path.dirname(name)) | |
1509 | makedirs(parent, mode) |
|
1513 | makedirs(parent, mode) | |
1510 | makedirs(name, mode) |
|
1514 | makedirs(name, mode) | |
1511 |
|
1515 | |||
1512 | class opener(object): |
|
1516 | class opener(object): | |
1513 | """Open files relative to a base directory |
|
1517 | """Open files relative to a base directory | |
1514 |
|
1518 | |||
1515 | This class is used to hide the details of COW semantics and |
|
1519 | This class is used to hide the details of COW semantics and | |
1516 | remote file access from higher level code. |
|
1520 | remote file access from higher level code. | |
1517 | """ |
|
1521 | """ | |
1518 | def __init__(self, base, audit=True): |
|
1522 | def __init__(self, base, audit=True): | |
1519 | self.base = base |
|
1523 | self.base = base | |
1520 | if audit: |
|
1524 | if audit: | |
1521 | self.audit_path = path_auditor(base) |
|
1525 | self.audit_path = path_auditor(base) | |
1522 | else: |
|
1526 | else: | |
1523 | self.audit_path = always |
|
1527 | self.audit_path = always | |
1524 | self.createmode = None |
|
1528 | self.createmode = None | |
1525 |
|
1529 | |||
1526 | def __getattr__(self, name): |
|
1530 | def __getattr__(self, name): | |
1527 | if name == '_can_symlink': |
|
1531 | if name == '_can_symlink': | |
1528 | self._can_symlink = checklink(self.base) |
|
1532 | self._can_symlink = checklink(self.base) | |
1529 | return self._can_symlink |
|
1533 | return self._can_symlink | |
1530 | raise AttributeError(name) |
|
1534 | raise AttributeError(name) | |
1531 |
|
1535 | |||
1532 | def _fixfilemode(self, name): |
|
1536 | def _fixfilemode(self, name): | |
1533 | if self.createmode is None: |
|
1537 | if self.createmode is None: | |
1534 | return |
|
1538 | return | |
1535 | os.chmod(name, self.createmode & 0666) |
|
1539 | os.chmod(name, self.createmode & 0666) | |
1536 |
|
1540 | |||
1537 | def __call__(self, path, mode="r", text=False, atomictemp=False): |
|
1541 | def __call__(self, path, mode="r", text=False, atomictemp=False): | |
1538 | self.audit_path(path) |
|
1542 | self.audit_path(path) | |
1539 | f = os.path.join(self.base, path) |
|
1543 | f = os.path.join(self.base, path) | |
1540 |
|
1544 | |||
1541 | if not text and "b" not in mode: |
|
1545 | if not text and "b" not in mode: | |
1542 | mode += "b" # for that other OS |
|
1546 | mode += "b" # for that other OS | |
1543 |
|
1547 | |||
1544 | nlink = -1 |
|
1548 | nlink = -1 | |
1545 | if mode not in ("r", "rb"): |
|
1549 | if mode not in ("r", "rb"): | |
1546 | try: |
|
1550 | try: | |
1547 | nlink = nlinks(f) |
|
1551 | nlink = nlinks(f) | |
1548 | except OSError: |
|
1552 | except OSError: | |
1549 | nlink = 0 |
|
1553 | nlink = 0 | |
1550 | d = os.path.dirname(f) |
|
1554 | d = os.path.dirname(f) | |
1551 | if not os.path.isdir(d): |
|
1555 | if not os.path.isdir(d): | |
1552 | makedirs(d, self.createmode) |
|
1556 | makedirs(d, self.createmode) | |
1553 | if atomictemp: |
|
1557 | if atomictemp: | |
1554 | return atomictempfile(f, mode, self.createmode) |
|
1558 | return atomictempfile(f, mode, self.createmode) | |
1555 | if nlink > 1: |
|
1559 | if nlink > 1: | |
1556 | rename(mktempcopy(f), f) |
|
1560 | rename(mktempcopy(f), f) | |
1557 | fp = posixfile(f, mode) |
|
1561 | fp = posixfile(f, mode) | |
1558 | if nlink == 0: |
|
1562 | if nlink == 0: | |
1559 | self._fixfilemode(f) |
|
1563 | self._fixfilemode(f) | |
1560 | return fp |
|
1564 | return fp | |
1561 |
|
1565 | |||
1562 | def symlink(self, src, dst): |
|
1566 | def symlink(self, src, dst): | |
1563 | self.audit_path(dst) |
|
1567 | self.audit_path(dst) | |
1564 | linkname = os.path.join(self.base, dst) |
|
1568 | linkname = os.path.join(self.base, dst) | |
1565 | try: |
|
1569 | try: | |
1566 | os.unlink(linkname) |
|
1570 | os.unlink(linkname) | |
1567 | except OSError: |
|
1571 | except OSError: | |
1568 | pass |
|
1572 | pass | |
1569 |
|
1573 | |||
1570 | dirname = os.path.dirname(linkname) |
|
1574 | dirname = os.path.dirname(linkname) | |
1571 | if not os.path.exists(dirname): |
|
1575 | if not os.path.exists(dirname): | |
1572 | makedirs(dirname, self.createmode) |
|
1576 | makedirs(dirname, self.createmode) | |
1573 |
|
1577 | |||
1574 | if self._can_symlink: |
|
1578 | if self._can_symlink: | |
1575 | try: |
|
1579 | try: | |
1576 | os.symlink(src, linkname) |
|
1580 | os.symlink(src, linkname) | |
1577 | except OSError, err: |
|
1581 | except OSError, err: | |
1578 | raise OSError(err.errno, _('could not symlink to %r: %s') % |
|
1582 | raise OSError(err.errno, _('could not symlink to %r: %s') % | |
1579 | (src, err.strerror), linkname) |
|
1583 | (src, err.strerror), linkname) | |
1580 | else: |
|
1584 | else: | |
1581 | f = self(dst, "w") |
|
1585 | f = self(dst, "w") | |
1582 | f.write(src) |
|
1586 | f.write(src) | |
1583 | f.close() |
|
1587 | f.close() | |
1584 | self._fixfilemode(dst) |
|
1588 | self._fixfilemode(dst) | |
1585 |
|
1589 | |||
1586 | class chunkbuffer(object): |
|
1590 | class chunkbuffer(object): | |
1587 | """Allow arbitrary sized chunks of data to be efficiently read from an |
|
1591 | """Allow arbitrary sized chunks of data to be efficiently read from an | |
1588 | iterator over chunks of arbitrary size.""" |
|
1592 | iterator over chunks of arbitrary size.""" | |
1589 |
|
1593 | |||
1590 | def __init__(self, in_iter): |
|
1594 | def __init__(self, in_iter): | |
1591 | """in_iter is the iterator that's iterating over the input chunks. |
|
1595 | """in_iter is the iterator that's iterating over the input chunks. | |
1592 | targetsize is how big a buffer to try to maintain.""" |
|
1596 | targetsize is how big a buffer to try to maintain.""" | |
1593 | self.iter = iter(in_iter) |
|
1597 | self.iter = iter(in_iter) | |
1594 | self.buf = '' |
|
1598 | self.buf = '' | |
1595 | self.targetsize = 2**16 |
|
1599 | self.targetsize = 2**16 | |
1596 |
|
1600 | |||
1597 | def read(self, l): |
|
1601 | def read(self, l): | |
1598 | """Read L bytes of data from the iterator of chunks of data. |
|
1602 | """Read L bytes of data from the iterator of chunks of data. | |
1599 | Returns less than L bytes if the iterator runs dry.""" |
|
1603 | Returns less than L bytes if the iterator runs dry.""" | |
1600 | if l > len(self.buf) and self.iter: |
|
1604 | if l > len(self.buf) and self.iter: | |
1601 | # Clamp to a multiple of self.targetsize |
|
1605 | # Clamp to a multiple of self.targetsize | |
1602 | targetsize = max(l, self.targetsize) |
|
1606 | targetsize = max(l, self.targetsize) | |
1603 | collector = cStringIO.StringIO() |
|
1607 | collector = cStringIO.StringIO() | |
1604 | collector.write(self.buf) |
|
1608 | collector.write(self.buf) | |
1605 | collected = len(self.buf) |
|
1609 | collected = len(self.buf) | |
1606 | for chunk in self.iter: |
|
1610 | for chunk in self.iter: | |
1607 | collector.write(chunk) |
|
1611 | collector.write(chunk) | |
1608 | collected += len(chunk) |
|
1612 | collected += len(chunk) | |
1609 | if collected >= targetsize: |
|
1613 | if collected >= targetsize: | |
1610 | break |
|
1614 | break | |
1611 | if collected < targetsize: |
|
1615 | if collected < targetsize: | |
1612 | self.iter = False |
|
1616 | self.iter = False | |
1613 | self.buf = collector.getvalue() |
|
1617 | self.buf = collector.getvalue() | |
1614 | if len(self.buf) == l: |
|
1618 | if len(self.buf) == l: | |
1615 | s, self.buf = str(self.buf), '' |
|
1619 | s, self.buf = str(self.buf), '' | |
1616 | else: |
|
1620 | else: | |
1617 | s, self.buf = self.buf[:l], buffer(self.buf, l) |
|
1621 | s, self.buf = self.buf[:l], buffer(self.buf, l) | |
1618 | return s |
|
1622 | return s | |
1619 |
|
1623 | |||
1620 | def filechunkiter(f, size=65536, limit=None): |
|
1624 | def filechunkiter(f, size=65536, limit=None): | |
1621 | """Create a generator that produces the data in the file size |
|
1625 | """Create a generator that produces the data in the file size | |
1622 | (default 65536) bytes at a time, up to optional limit (default is |
|
1626 | (default 65536) bytes at a time, up to optional limit (default is | |
1623 | to read all data). Chunks may be less than size bytes if the |
|
1627 | to read all data). Chunks may be less than size bytes if the | |
1624 | chunk is the last chunk in the file, or the file is a socket or |
|
1628 | chunk is the last chunk in the file, or the file is a socket or | |
1625 | some other type of file that sometimes reads less data than is |
|
1629 | some other type of file that sometimes reads less data than is | |
1626 | requested.""" |
|
1630 | requested.""" | |
1627 | assert size >= 0 |
|
1631 | assert size >= 0 | |
1628 | assert limit is None or limit >= 0 |
|
1632 | assert limit is None or limit >= 0 | |
1629 | while True: |
|
1633 | while True: | |
1630 | if limit is None: nbytes = size |
|
1634 | if limit is None: nbytes = size | |
1631 | else: nbytes = min(limit, size) |
|
1635 | else: nbytes = min(limit, size) | |
1632 | s = nbytes and f.read(nbytes) |
|
1636 | s = nbytes and f.read(nbytes) | |
1633 | if not s: break |
|
1637 | if not s: break | |
1634 | if limit: limit -= len(s) |
|
1638 | if limit: limit -= len(s) | |
1635 | yield s |
|
1639 | yield s | |
1636 |
|
1640 | |||
1637 | def makedate(): |
|
1641 | def makedate(): | |
1638 | lt = time.localtime() |
|
1642 | lt = time.localtime() | |
1639 | if lt[8] == 1 and time.daylight: |
|
1643 | if lt[8] == 1 and time.daylight: | |
1640 | tz = time.altzone |
|
1644 | tz = time.altzone | |
1641 | else: |
|
1645 | else: | |
1642 | tz = time.timezone |
|
1646 | tz = time.timezone | |
1643 | return time.mktime(lt), tz |
|
1647 | return time.mktime(lt), tz | |
1644 |
|
1648 | |||
1645 | def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): |
|
1649 | def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): | |
1646 | """represent a (unixtime, offset) tuple as a localized time. |
|
1650 | """represent a (unixtime, offset) tuple as a localized time. | |
1647 | unixtime is seconds since the epoch, and offset is the time zone's |
|
1651 | unixtime is seconds since the epoch, and offset is the time zone's | |
1648 | number of seconds away from UTC. if timezone is false, do not |
|
1652 | number of seconds away from UTC. if timezone is false, do not | |
1649 | append time zone to string.""" |
|
1653 | append time zone to string.""" | |
1650 | t, tz = date or makedate() |
|
1654 | t, tz = date or makedate() | |
1651 | if "%1" in format or "%2" in format: |
|
1655 | if "%1" in format or "%2" in format: | |
1652 | sign = (tz > 0) and "-" or "+" |
|
1656 | sign = (tz > 0) and "-" or "+" | |
1653 | minutes = abs(tz) / 60 |
|
1657 | minutes = abs(tz) / 60 | |
1654 | format = format.replace("%1", "%c%02d" % (sign, minutes / 60)) |
|
1658 | format = format.replace("%1", "%c%02d" % (sign, minutes / 60)) | |
1655 | format = format.replace("%2", "%02d" % (minutes % 60)) |
|
1659 | format = format.replace("%2", "%02d" % (minutes % 60)) | |
1656 | s = time.strftime(format, time.gmtime(float(t) - tz)) |
|
1660 | s = time.strftime(format, time.gmtime(float(t) - tz)) | |
1657 | return s |
|
1661 | return s | |
1658 |
|
1662 | |||
1659 | def shortdate(date=None): |
|
1663 | def shortdate(date=None): | |
1660 | """turn (timestamp, tzoff) tuple into iso 8631 date.""" |
|
1664 | """turn (timestamp, tzoff) tuple into iso 8631 date.""" | |
1661 | return datestr(date, format='%Y-%m-%d') |
|
1665 | return datestr(date, format='%Y-%m-%d') | |
1662 |
|
1666 | |||
1663 | def strdate(string, format, defaults=[]): |
|
1667 | def strdate(string, format, defaults=[]): | |
1664 | """parse a localized time string and return a (unixtime, offset) tuple. |
|
1668 | """parse a localized time string and return a (unixtime, offset) tuple. | |
1665 | if the string cannot be parsed, ValueError is raised.""" |
|
1669 | if the string cannot be parsed, ValueError is raised.""" | |
1666 | def timezone(string): |
|
1670 | def timezone(string): | |
1667 | tz = string.split()[-1] |
|
1671 | tz = string.split()[-1] | |
1668 | if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit(): |
|
1672 | if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit(): | |
1669 | sign = (tz[0] == "+") and 1 or -1 |
|
1673 | sign = (tz[0] == "+") and 1 or -1 | |
1670 | hours = int(tz[1:3]) |
|
1674 | hours = int(tz[1:3]) | |
1671 | minutes = int(tz[3:5]) |
|
1675 | minutes = int(tz[3:5]) | |
1672 | return -sign * (hours * 60 + minutes) * 60 |
|
1676 | return -sign * (hours * 60 + minutes) * 60 | |
1673 | if tz == "GMT" or tz == "UTC": |
|
1677 | if tz == "GMT" or tz == "UTC": | |
1674 | return 0 |
|
1678 | return 0 | |
1675 | return None |
|
1679 | return None | |
1676 |
|
1680 | |||
1677 | # NOTE: unixtime = localunixtime + offset |
|
1681 | # NOTE: unixtime = localunixtime + offset | |
1678 | offset, date = timezone(string), string |
|
1682 | offset, date = timezone(string), string | |
1679 | if offset != None: |
|
1683 | if offset != None: | |
1680 | date = " ".join(string.split()[:-1]) |
|
1684 | date = " ".join(string.split()[:-1]) | |
1681 |
|
1685 | |||
1682 | # add missing elements from defaults |
|
1686 | # add missing elements from defaults | |
1683 | for part in defaults: |
|
1687 | for part in defaults: | |
1684 | found = [True for p in part if ("%"+p) in format] |
|
1688 | found = [True for p in part if ("%"+p) in format] | |
1685 | if not found: |
|
1689 | if not found: | |
1686 | date += "@" + defaults[part] |
|
1690 | date += "@" + defaults[part] | |
1687 | format += "@%" + part[0] |
|
1691 | format += "@%" + part[0] | |
1688 |
|
1692 | |||
1689 | timetuple = time.strptime(date, format) |
|
1693 | timetuple = time.strptime(date, format) | |
1690 | localunixtime = int(calendar.timegm(timetuple)) |
|
1694 | localunixtime = int(calendar.timegm(timetuple)) | |
1691 | if offset is None: |
|
1695 | if offset is None: | |
1692 | # local timezone |
|
1696 | # local timezone | |
1693 | unixtime = int(time.mktime(timetuple)) |
|
1697 | unixtime = int(time.mktime(timetuple)) | |
1694 | offset = unixtime - localunixtime |
|
1698 | offset = unixtime - localunixtime | |
1695 | else: |
|
1699 | else: | |
1696 | unixtime = localunixtime + offset |
|
1700 | unixtime = localunixtime + offset | |
1697 | return unixtime, offset |
|
1701 | return unixtime, offset | |
1698 |
|
1702 | |||
1699 | def parsedate(date, formats=None, defaults=None): |
|
1703 | def parsedate(date, formats=None, defaults=None): | |
1700 | """parse a localized date/time string and return a (unixtime, offset) tuple. |
|
1704 | """parse a localized date/time string and return a (unixtime, offset) tuple. | |
1701 |
|
1705 | |||
1702 | The date may be a "unixtime offset" string or in one of the specified |
|
1706 | The date may be a "unixtime offset" string or in one of the specified | |
1703 | formats. If the date already is a (unixtime, offset) tuple, it is returned. |
|
1707 | formats. If the date already is a (unixtime, offset) tuple, it is returned. | |
1704 | """ |
|
1708 | """ | |
1705 | if not date: |
|
1709 | if not date: | |
1706 | return 0, 0 |
|
1710 | return 0, 0 | |
1707 | if isinstance(date, tuple) and len(date) == 2: |
|
1711 | if isinstance(date, tuple) and len(date) == 2: | |
1708 | return date |
|
1712 | return date | |
1709 | if not formats: |
|
1713 | if not formats: | |
1710 | formats = defaultdateformats |
|
1714 | formats = defaultdateformats | |
1711 | date = date.strip() |
|
1715 | date = date.strip() | |
1712 | try: |
|
1716 | try: | |
1713 | when, offset = map(int, date.split(' ')) |
|
1717 | when, offset = map(int, date.split(' ')) | |
1714 | except ValueError: |
|
1718 | except ValueError: | |
1715 | # fill out defaults |
|
1719 | # fill out defaults | |
1716 | if not defaults: |
|
1720 | if not defaults: | |
1717 | defaults = {} |
|
1721 | defaults = {} | |
1718 | now = makedate() |
|
1722 | now = makedate() | |
1719 | for part in "d mb yY HI M S".split(): |
|
1723 | for part in "d mb yY HI M S".split(): | |
1720 | if part not in defaults: |
|
1724 | if part not in defaults: | |
1721 | if part[0] in "HMS": |
|
1725 | if part[0] in "HMS": | |
1722 | defaults[part] = "00" |
|
1726 | defaults[part] = "00" | |
1723 | else: |
|
1727 | else: | |
1724 | defaults[part] = datestr(now, "%" + part[0]) |
|
1728 | defaults[part] = datestr(now, "%" + part[0]) | |
1725 |
|
1729 | |||
1726 | for format in formats: |
|
1730 | for format in formats: | |
1727 | try: |
|
1731 | try: | |
1728 | when, offset = strdate(date, format, defaults) |
|
1732 | when, offset = strdate(date, format, defaults) | |
1729 | except (ValueError, OverflowError): |
|
1733 | except (ValueError, OverflowError): | |
1730 | pass |
|
1734 | pass | |
1731 | else: |
|
1735 | else: | |
1732 | break |
|
1736 | break | |
1733 | else: |
|
1737 | else: | |
1734 | raise Abort(_('invalid date: %r ') % date) |
|
1738 | raise Abort(_('invalid date: %r ') % date) | |
1735 | # validate explicit (probably user-specified) date and |
|
1739 | # validate explicit (probably user-specified) date and | |
1736 | # time zone offset. values must fit in signed 32 bits for |
|
1740 | # time zone offset. values must fit in signed 32 bits for | |
1737 | # current 32-bit linux runtimes. timezones go from UTC-12 |
|
1741 | # current 32-bit linux runtimes. timezones go from UTC-12 | |
1738 | # to UTC+14 |
|
1742 | # to UTC+14 | |
1739 | if abs(when) > 0x7fffffff: |
|
1743 | if abs(when) > 0x7fffffff: | |
1740 | raise Abort(_('date exceeds 32 bits: %d') % when) |
|
1744 | raise Abort(_('date exceeds 32 bits: %d') % when) | |
1741 | if offset < -50400 or offset > 43200: |
|
1745 | if offset < -50400 or offset > 43200: | |
1742 | raise Abort(_('impossible time zone offset: %d') % offset) |
|
1746 | raise Abort(_('impossible time zone offset: %d') % offset) | |
1743 | return when, offset |
|
1747 | return when, offset | |
1744 |
|
1748 | |||
1745 | def matchdate(date): |
|
1749 | def matchdate(date): | |
1746 | """Return a function that matches a given date match specifier |
|
1750 | """Return a function that matches a given date match specifier | |
1747 |
|
1751 | |||
1748 | Formats include: |
|
1752 | Formats include: | |
1749 |
|
1753 | |||
1750 | '{date}' match a given date to the accuracy provided |
|
1754 | '{date}' match a given date to the accuracy provided | |
1751 |
|
1755 | |||
1752 | '<{date}' on or before a given date |
|
1756 | '<{date}' on or before a given date | |
1753 |
|
1757 | |||
1754 | '>{date}' on or after a given date |
|
1758 | '>{date}' on or after a given date | |
1755 |
|
1759 | |||
1756 | """ |
|
1760 | """ | |
1757 |
|
1761 | |||
1758 | def lower(date): |
|
1762 | def lower(date): | |
1759 | d = dict(mb="1", d="1") |
|
1763 | d = dict(mb="1", d="1") | |
1760 | return parsedate(date, extendeddateformats, d)[0] |
|
1764 | return parsedate(date, extendeddateformats, d)[0] | |
1761 |
|
1765 | |||
1762 | def upper(date): |
|
1766 | def upper(date): | |
1763 | d = dict(mb="12", HI="23", M="59", S="59") |
|
1767 | d = dict(mb="12", HI="23", M="59", S="59") | |
1764 | for days in "31 30 29".split(): |
|
1768 | for days in "31 30 29".split(): | |
1765 | try: |
|
1769 | try: | |
1766 | d["d"] = days |
|
1770 | d["d"] = days | |
1767 | return parsedate(date, extendeddateformats, d)[0] |
|
1771 | return parsedate(date, extendeddateformats, d)[0] | |
1768 | except: |
|
1772 | except: | |
1769 | pass |
|
1773 | pass | |
1770 | d["d"] = "28" |
|
1774 | d["d"] = "28" | |
1771 | return parsedate(date, extendeddateformats, d)[0] |
|
1775 | return parsedate(date, extendeddateformats, d)[0] | |
1772 |
|
1776 | |||
1773 | if date[0] == "<": |
|
1777 | if date[0] == "<": | |
1774 | when = upper(date[1:]) |
|
1778 | when = upper(date[1:]) | |
1775 | return lambda x: x <= when |
|
1779 | return lambda x: x <= when | |
1776 | elif date[0] == ">": |
|
1780 | elif date[0] == ">": | |
1777 | when = lower(date[1:]) |
|
1781 | when = lower(date[1:]) | |
1778 | return lambda x: x >= when |
|
1782 | return lambda x: x >= when | |
1779 | elif date[0] == "-": |
|
1783 | elif date[0] == "-": | |
1780 | try: |
|
1784 | try: | |
1781 | days = int(date[1:]) |
|
1785 | days = int(date[1:]) | |
1782 | except ValueError: |
|
1786 | except ValueError: | |
1783 | raise Abort(_("invalid day spec: %s") % date[1:]) |
|
1787 | raise Abort(_("invalid day spec: %s") % date[1:]) | |
1784 | when = makedate()[0] - days * 3600 * 24 |
|
1788 | when = makedate()[0] - days * 3600 * 24 | |
1785 | return lambda x: x >= when |
|
1789 | return lambda x: x >= when | |
1786 | elif " to " in date: |
|
1790 | elif " to " in date: | |
1787 | a, b = date.split(" to ") |
|
1791 | a, b = date.split(" to ") | |
1788 | start, stop = lower(a), upper(b) |
|
1792 | start, stop = lower(a), upper(b) | |
1789 | return lambda x: x >= start and x <= stop |
|
1793 | return lambda x: x >= start and x <= stop | |
1790 | else: |
|
1794 | else: | |
1791 | start, stop = lower(date), upper(date) |
|
1795 | start, stop = lower(date), upper(date) | |
1792 | return lambda x: x >= start and x <= stop |
|
1796 | return lambda x: x >= start and x <= stop | |
1793 |
|
1797 | |||
1794 | def shortuser(user): |
|
1798 | def shortuser(user): | |
1795 | """Return a short representation of a user name or email address.""" |
|
1799 | """Return a short representation of a user name or email address.""" | |
1796 | f = user.find('@') |
|
1800 | f = user.find('@') | |
1797 | if f >= 0: |
|
1801 | if f >= 0: | |
1798 | user = user[:f] |
|
1802 | user = user[:f] | |
1799 | f = user.find('<') |
|
1803 | f = user.find('<') | |
1800 | if f >= 0: |
|
1804 | if f >= 0: | |
1801 | user = user[f+1:] |
|
1805 | user = user[f+1:] | |
1802 | f = user.find(' ') |
|
1806 | f = user.find(' ') | |
1803 | if f >= 0: |
|
1807 | if f >= 0: | |
1804 | user = user[:f] |
|
1808 | user = user[:f] | |
1805 | f = user.find('.') |
|
1809 | f = user.find('.') | |
1806 | if f >= 0: |
|
1810 | if f >= 0: | |
1807 | user = user[:f] |
|
1811 | user = user[:f] | |
1808 | return user |
|
1812 | return user | |
1809 |
|
1813 | |||
1810 | def email(author): |
|
1814 | def email(author): | |
1811 | '''get email of author.''' |
|
1815 | '''get email of author.''' | |
1812 | r = author.find('>') |
|
1816 | r = author.find('>') | |
1813 | if r == -1: r = None |
|
1817 | if r == -1: r = None | |
1814 | return author[author.find('<')+1:r] |
|
1818 | return author[author.find('<')+1:r] | |
1815 |
|
1819 | |||
1816 | def ellipsis(text, maxlength=400): |
|
1820 | def ellipsis(text, maxlength=400): | |
1817 | """Trim string to at most maxlength (default: 400) characters.""" |
|
1821 | """Trim string to at most maxlength (default: 400) characters.""" | |
1818 | if len(text) <= maxlength: |
|
1822 | if len(text) <= maxlength: | |
1819 | return text |
|
1823 | return text | |
1820 | else: |
|
1824 | else: | |
1821 | return "%s..." % (text[:maxlength-3]) |
|
1825 | return "%s..." % (text[:maxlength-3]) | |
1822 |
|
1826 | |||
1823 | def walkrepos(path, followsym=False, seen_dirs=None): |
|
1827 | def walkrepos(path, followsym=False, seen_dirs=None): | |
1824 | '''yield every hg repository under path, recursively.''' |
|
1828 | '''yield every hg repository under path, recursively.''' | |
1825 | def errhandler(err): |
|
1829 | def errhandler(err): | |
1826 | if err.filename == path: |
|
1830 | if err.filename == path: | |
1827 | raise err |
|
1831 | raise err | |
1828 | if followsym and hasattr(os.path, 'samestat'): |
|
1832 | if followsym and hasattr(os.path, 'samestat'): | |
1829 | def _add_dir_if_not_there(dirlst, dirname): |
|
1833 | def _add_dir_if_not_there(dirlst, dirname): | |
1830 | match = False |
|
1834 | match = False | |
1831 | samestat = os.path.samestat |
|
1835 | samestat = os.path.samestat | |
1832 | dirstat = os.stat(dirname) |
|
1836 | dirstat = os.stat(dirname) | |
1833 | for lstdirstat in dirlst: |
|
1837 | for lstdirstat in dirlst: | |
1834 | if samestat(dirstat, lstdirstat): |
|
1838 | if samestat(dirstat, lstdirstat): | |
1835 | match = True |
|
1839 | match = True | |
1836 | break |
|
1840 | break | |
1837 | if not match: |
|
1841 | if not match: | |
1838 | dirlst.append(dirstat) |
|
1842 | dirlst.append(dirstat) | |
1839 | return not match |
|
1843 | return not match | |
1840 | else: |
|
1844 | else: | |
1841 | followsym = False |
|
1845 | followsym = False | |
1842 |
|
1846 | |||
1843 | if (seen_dirs is None) and followsym: |
|
1847 | if (seen_dirs is None) and followsym: | |
1844 | seen_dirs = [] |
|
1848 | seen_dirs = [] | |
1845 | _add_dir_if_not_there(seen_dirs, path) |
|
1849 | _add_dir_if_not_there(seen_dirs, path) | |
1846 | for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): |
|
1850 | for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): | |
1847 | if '.hg' in dirs: |
|
1851 | if '.hg' in dirs: | |
1848 | dirs[:] = [] # don't descend further |
|
1852 | dirs[:] = [] # don't descend further | |
1849 | yield root # found a repository |
|
1853 | yield root # found a repository | |
1850 | qroot = os.path.join(root, '.hg', 'patches') |
|
1854 | qroot = os.path.join(root, '.hg', 'patches') | |
1851 | if os.path.isdir(os.path.join(qroot, '.hg')): |
|
1855 | if os.path.isdir(os.path.join(qroot, '.hg')): | |
1852 | yield qroot # we have a patch queue repo here |
|
1856 | yield qroot # we have a patch queue repo here | |
1853 | elif followsym: |
|
1857 | elif followsym: | |
1854 | newdirs = [] |
|
1858 | newdirs = [] | |
1855 | for d in dirs: |
|
1859 | for d in dirs: | |
1856 | fname = os.path.join(root, d) |
|
1860 | fname = os.path.join(root, d) | |
1857 | if _add_dir_if_not_there(seen_dirs, fname): |
|
1861 | if _add_dir_if_not_there(seen_dirs, fname): | |
1858 | if os.path.islink(fname): |
|
1862 | if os.path.islink(fname): | |
1859 | for hgname in walkrepos(fname, True, seen_dirs): |
|
1863 | for hgname in walkrepos(fname, True, seen_dirs): | |
1860 | yield hgname |
|
1864 | yield hgname | |
1861 | else: |
|
1865 | else: | |
1862 | newdirs.append(d) |
|
1866 | newdirs.append(d) | |
1863 | dirs[:] = newdirs |
|
1867 | dirs[:] = newdirs | |
1864 |
|
1868 | |||
1865 | _rcpath = None |
|
1869 | _rcpath = None | |
1866 |
|
1870 | |||
1867 | def os_rcpath(): |
|
1871 | def os_rcpath(): | |
1868 | '''return default os-specific hgrc search path''' |
|
1872 | '''return default os-specific hgrc search path''' | |
1869 | path = system_rcpath() |
|
1873 | path = system_rcpath() | |
1870 | path.extend(user_rcpath()) |
|
1874 | path.extend(user_rcpath()) | |
1871 | path = [os.path.normpath(f) for f in path] |
|
1875 | path = [os.path.normpath(f) for f in path] | |
1872 | return path |
|
1876 | return path | |
1873 |
|
1877 | |||
1874 | def rcpath(): |
|
1878 | def rcpath(): | |
1875 | '''return hgrc search path. if env var HGRCPATH is set, use it. |
|
1879 | '''return hgrc search path. if env var HGRCPATH is set, use it. | |
1876 | for each item in path, if directory, use files ending in .rc, |
|
1880 | for each item in path, if directory, use files ending in .rc, | |
1877 | else use item. |
|
1881 | else use item. | |
1878 | make HGRCPATH empty to only look in .hg/hgrc of current repo. |
|
1882 | make HGRCPATH empty to only look in .hg/hgrc of current repo. | |
1879 | if no HGRCPATH, use default os-specific path.''' |
|
1883 | if no HGRCPATH, use default os-specific path.''' | |
1880 | global _rcpath |
|
1884 | global _rcpath | |
1881 | if _rcpath is None: |
|
1885 | if _rcpath is None: | |
1882 | if 'HGRCPATH' in os.environ: |
|
1886 | if 'HGRCPATH' in os.environ: | |
1883 | _rcpath = [] |
|
1887 | _rcpath = [] | |
1884 | for p in os.environ['HGRCPATH'].split(os.pathsep): |
|
1888 | for p in os.environ['HGRCPATH'].split(os.pathsep): | |
1885 | if not p: continue |
|
1889 | if not p: continue | |
1886 | if os.path.isdir(p): |
|
1890 | if os.path.isdir(p): | |
1887 | for f, kind in osutil.listdir(p): |
|
1891 | for f, kind in osutil.listdir(p): | |
1888 | if f.endswith('.rc'): |
|
1892 | if f.endswith('.rc'): | |
1889 | _rcpath.append(os.path.join(p, f)) |
|
1893 | _rcpath.append(os.path.join(p, f)) | |
1890 | else: |
|
1894 | else: | |
1891 | _rcpath.append(p) |
|
1895 | _rcpath.append(p) | |
1892 | else: |
|
1896 | else: | |
1893 | _rcpath = os_rcpath() |
|
1897 | _rcpath = os_rcpath() | |
1894 | return _rcpath |
|
1898 | return _rcpath | |
1895 |
|
1899 | |||
1896 | def bytecount(nbytes): |
|
1900 | def bytecount(nbytes): | |
1897 | '''return byte count formatted as readable string, with units''' |
|
1901 | '''return byte count formatted as readable string, with units''' | |
1898 |
|
1902 | |||
1899 | units = ( |
|
1903 | units = ( | |
1900 | (100, 1<<30, _('%.0f GB')), |
|
1904 | (100, 1<<30, _('%.0f GB')), | |
1901 | (10, 1<<30, _('%.1f GB')), |
|
1905 | (10, 1<<30, _('%.1f GB')), | |
1902 | (1, 1<<30, _('%.2f GB')), |
|
1906 | (1, 1<<30, _('%.2f GB')), | |
1903 | (100, 1<<20, _('%.0f MB')), |
|
1907 | (100, 1<<20, _('%.0f MB')), | |
1904 | (10, 1<<20, _('%.1f MB')), |
|
1908 | (10, 1<<20, _('%.1f MB')), | |
1905 | (1, 1<<20, _('%.2f MB')), |
|
1909 | (1, 1<<20, _('%.2f MB')), | |
1906 | (100, 1<<10, _('%.0f KB')), |
|
1910 | (100, 1<<10, _('%.0f KB')), | |
1907 | (10, 1<<10, _('%.1f KB')), |
|
1911 | (10, 1<<10, _('%.1f KB')), | |
1908 | (1, 1<<10, _('%.2f KB')), |
|
1912 | (1, 1<<10, _('%.2f KB')), | |
1909 | (1, 1, _('%.0f bytes')), |
|
1913 | (1, 1, _('%.0f bytes')), | |
1910 | ) |
|
1914 | ) | |
1911 |
|
1915 | |||
1912 | for multiplier, divisor, format in units: |
|
1916 | for multiplier, divisor, format in units: | |
1913 | if nbytes >= divisor * multiplier: |
|
1917 | if nbytes >= divisor * multiplier: | |
1914 | return format % (nbytes / float(divisor)) |
|
1918 | return format % (nbytes / float(divisor)) | |
1915 | return units[-1][2] % nbytes |
|
1919 | return units[-1][2] % nbytes | |
1916 |
|
1920 | |||
1917 | def drop_scheme(scheme, path): |
|
1921 | def drop_scheme(scheme, path): | |
1918 | sc = scheme + ':' |
|
1922 | sc = scheme + ':' | |
1919 | if path.startswith(sc): |
|
1923 | if path.startswith(sc): | |
1920 | path = path[len(sc):] |
|
1924 | path = path[len(sc):] | |
1921 | if path.startswith('//'): |
|
1925 | if path.startswith('//'): | |
1922 | path = path[2:] |
|
1926 | path = path[2:] | |
1923 | return path |
|
1927 | return path | |
1924 |
|
1928 | |||
1925 | def uirepr(s): |
|
1929 | def uirepr(s): | |
1926 | # Avoid double backslash in Windows path repr() |
|
1930 | # Avoid double backslash in Windows path repr() | |
1927 | return repr(s).replace('\\\\', '\\') |
|
1931 | return repr(s).replace('\\\\', '\\') | |
1928 |
|
1932 | |||
1929 | def hidepassword(url): |
|
1933 | def hidepassword(url): | |
1930 | '''hide user credential in a url string''' |
|
1934 | '''hide user credential in a url string''' | |
1931 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) |
|
1935 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
1932 | netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) |
|
1936 | netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) | |
1933 | return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) |
|
1937 | return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) | |
1934 |
|
1938 | |||
1935 | def removeauth(url): |
|
1939 | def removeauth(url): | |
1936 | '''remove all authentication information from a url string''' |
|
1940 | '''remove all authentication information from a url string''' | |
1937 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) |
|
1941 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
1938 | netloc = netloc[netloc.find('@')+1:] |
|
1942 | netloc = netloc[netloc.find('@')+1:] | |
1939 | return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) |
|
1943 | return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) |
@@ -1,113 +1,111 | |||||
1 | {header} |
|
1 | {header} | |
2 | <title>{repo|escape}: revision graph</title> |
|
2 | <title>{repo|escape}: revision graph</title> | |
3 | <link rel="alternate" type="application/atom+xml" |
|
3 | <link rel="alternate" type="application/atom+xml" | |
4 | href="{url}atom-log" title="Atom feed for {repo|escape}: log"> |
|
4 | href="{url}atom-log" title="Atom feed for {repo|escape}: log"> | |
5 | <link rel="alternate" type="application/rss+xml" |
|
5 | <link rel="alternate" type="application/rss+xml" | |
6 | href="{url}rss-log" title="RSS feed for {repo|escape}: log"> |
|
6 | href="{url}rss-log" title="RSS feed for {repo|escape}: log"> | |
7 | <!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> |
|
7 | <!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> | |
8 | </head> |
|
8 | </head> | |
9 | <body> |
|
9 | <body> | |
10 |
|
10 | |||
11 | <div class="container"> |
|
11 | <div class="container"> | |
12 | <div class="menu"> |
|
12 | <div class="menu"> | |
13 | <div class="logo"> |
|
13 | <div class="logo"> | |
14 | <a href="http://www.selenic.com/mercurial/"> |
|
14 | <a href="http://www.selenic.com/mercurial/"> | |
15 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> |
|
15 | <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> | |
16 | </div> |
|
16 | </div> | |
17 | <ul> |
|
17 | <ul> | |
18 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> |
|
18 | <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> | |
19 | <li class="active">graph</li> |
|
19 | <li class="active">graph</li> | |
20 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> |
|
20 | <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> | |
21 | </ul> |
|
21 | </ul> | |
22 | <ul> |
|
22 | <ul> | |
23 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> |
|
23 | <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> | |
24 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> |
|
24 | <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> | |
25 | </ul> |
|
25 | </ul> | |
26 | </div> |
|
26 | </div> | |
27 |
|
27 | |||
28 | <div class="main"> |
|
28 | <div class="main"> | |
29 | <h2>{repo|escape}</h2> |
|
29 | <h2>{repo|escape}</h2> | |
30 | <h3>graph</h3> |
|
30 | <h3>graph</h3> | |
31 |
|
31 | |||
32 | <form class="search" action="{url}log"> |
|
32 | <form class="search" action="{url}log"> | |
33 | {sessionvars%hiddenformentry} |
|
33 | {sessionvars%hiddenformentry} | |
34 | <p><input name="rev" id="search1" type="text" size="30"></p> |
|
34 | <p><input name="rev" id="search1" type="text" size="30"></p> | |
35 | </form> |
|
35 | </form> | |
36 |
|
36 | |||
37 | <div class="navigate"> |
|
37 | <div class="navigate"> | |
38 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> |
|
38 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |
39 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> |
|
39 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |
40 | | {changenav%navgraphentry} |
|
40 | | {changenav%navgraphentry} | |
41 | </div> |
|
41 | </div> | |
42 |
|
42 | |||
43 |
< |
|
43 | <noscript>The revision graph only works with JavaScript-enabled browsers.</noscript> | |
44 |
|
44 | |||
45 | <div id="wrapper"> |
|
45 | <div id="wrapper"> | |
46 | <ul id="nodebgs"></ul> |
|
46 | <ul id="nodebgs"></ul> | |
47 | <canvas id="graph" width="224" height="{canvasheight}"></canvas> |
|
47 | <canvas id="graph" width="224" height="{canvasheight}"></canvas> | |
48 | <ul id="graphnodes"></ul> |
|
48 | <ul id="graphnodes"></ul> | |
49 | </div> |
|
49 | </div> | |
50 |
|
50 | |||
51 | <script type="text/javascript" src="{staticurl}graph.js"></script> |
|
51 | <script type="text/javascript" src="{staticurl}graph.js"></script> | |
52 | <script type="text/javascript"> |
|
52 | <script type="text/javascript"> | |
53 | <!-- hide script content |
|
53 | <!-- hide script content | |
54 |
|
54 | |||
55 | document.getElementById('noscript').style.display = 'none'; |
|
|||
56 |
|
||||
57 | var data = {jsdata|json}; |
|
55 | var data = {jsdata|json}; | |
58 | var graph = new Graph(); |
|
56 | var graph = new Graph(); | |
59 | graph.scale({bg_height}); |
|
57 | graph.scale({bg_height}); | |
60 |
|
58 | |||
61 | graph.edge = function(x0, y0, x1, y1, color) { |
|
59 | graph.edge = function(x0, y0, x1, y1, color) { | |
62 |
|
60 | |||
63 | this.setColor(color, 0.0, 0.65); |
|
61 | this.setColor(color, 0.0, 0.65); | |
64 | this.ctx.beginPath(); |
|
62 | this.ctx.beginPath(); | |
65 | this.ctx.moveTo(x0, y0); |
|
63 | this.ctx.moveTo(x0, y0); | |
66 | this.ctx.lineTo(x1, y1); |
|
64 | this.ctx.lineTo(x1, y1); | |
67 | this.ctx.stroke(); |
|
65 | this.ctx.stroke(); | |
68 |
|
66 | |||
69 | } |
|
67 | } | |
70 |
|
68 | |||
71 | var revlink = '<li style="_STYLE"><span class="desc">'; |
|
69 | var revlink = '<li style="_STYLE"><span class="desc">'; | |
72 | revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; |
|
70 | revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; | |
73 | revlink += '</span><span class="tag">_TAGS</span>'; |
|
71 | revlink += '</span><span class="tag">_TAGS</span>'; | |
74 | revlink += '<span class="info">_DATE ago, by _USER</span></li>'; |
|
72 | revlink += '<span class="info">_DATE ago, by _USER</span></li>'; | |
75 |
|
73 | |||
76 | graph.vertex = function(x, y, color, parity, cur) { |
|
74 | graph.vertex = function(x, y, color, parity, cur) { | |
77 |
|
75 | |||
78 | this.ctx.beginPath(); |
|
76 | this.ctx.beginPath(); | |
79 | color = this.setColor(color, 0.25, 0.75); |
|
77 | color = this.setColor(color, 0.25, 0.75); | |
80 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); |
|
78 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); | |
81 | this.ctx.fill(); |
|
79 | this.ctx.fill(); | |
82 |
|
80 | |||
83 | var bg = '<li class="bg parity' + parity + '"></li>'; |
|
81 | var bg = '<li class="bg parity' + parity + '"></li>'; | |
84 | var left = (this.columns + 1) * this.bg_height; |
|
82 | var left = (this.columns + 1) * this.bg_height; | |
85 | var nstyle = 'padding-left: ' + left + 'px;'; |
|
83 | var nstyle = 'padding-left: ' + left + 'px;'; | |
86 | var item = revlink.replace(/_STYLE/, nstyle); |
|
84 | var item = revlink.replace(/_STYLE/, nstyle); | |
87 | item = item.replace(/_PARITY/, 'parity' + parity); |
|
85 | item = item.replace(/_PARITY/, 'parity' + parity); | |
88 | item = item.replace(/_NODEID/, cur[0]); |
|
86 | item = item.replace(/_NODEID/, cur[0]); | |
89 | item = item.replace(/_NODEID/, cur[0]); |
|
87 | item = item.replace(/_NODEID/, cur[0]); | |
90 | item = item.replace(/_DESC/, cur[3]); |
|
88 | item = item.replace(/_DESC/, cur[3]); | |
91 | item = item.replace(/_USER/, cur[4]); |
|
89 | item = item.replace(/_USER/, cur[4]); | |
92 | item = item.replace(/_DATE/, cur[5]); |
|
90 | item = item.replace(/_DATE/, cur[5]); | |
93 | item = item.replace(/_TAGS/, cur[7].join(' ')); |
|
91 | item = item.replace(/_TAGS/, cur[7].join(' ')); | |
94 |
|
92 | |||
95 | return [bg, item]; |
|
93 | return [bg, item]; | |
96 |
|
94 | |||
97 | } |
|
95 | } | |
98 |
|
96 | |||
99 | graph.render(data); |
|
97 | graph.render(data); | |
100 |
|
98 | |||
101 | // stop hiding script --> |
|
99 | // stop hiding script --> | |
102 | </script> |
|
100 | </script> | |
103 |
|
101 | |||
104 | <div class="navigate"> |
|
102 | <div class="navigate"> | |
105 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> |
|
103 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |
106 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> |
|
104 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |
107 | | {changenav%navgraphentry} |
|
105 | | {changenav%navgraphentry} | |
108 | </div> |
|
106 | </div> | |
109 |
|
107 | |||
110 | </div> |
|
108 | </div> | |
111 | </div> |
|
109 | </div> | |
112 |
|
110 | |||
113 | {footer} |
|
111 | {footer} |
@@ -1,121 +1,119 | |||||
1 | #header# |
|
1 | #header# | |
2 | <title>#repo|escape#: Graph</title> |
|
2 | <title>#repo|escape#: Graph</title> | |
3 | <link rel="alternate" type="application/atom+xml" |
|
3 | <link rel="alternate" type="application/atom+xml" | |
4 | href="{url}atom-log" title="Atom feed for #repo|escape#"/> |
|
4 | href="{url}atom-log" title="Atom feed for #repo|escape#"/> | |
5 | <link rel="alternate" type="application/rss+xml" |
|
5 | <link rel="alternate" type="application/rss+xml" | |
6 | href="{url}rss-log" title="RSS feed for #repo|escape#"/> |
|
6 | href="{url}rss-log" title="RSS feed for #repo|escape#"/> | |
7 | </head> |
|
7 | </head> | |
8 | <body> |
|
8 | <body> | |
9 |
|
9 | |||
10 | <div class="page_header"> |
|
10 | <div class="page_header"> | |
11 | <a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / graph |
|
11 | <a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / graph | |
12 | </div> |
|
12 | </div> | |
13 |
|
13 | |||
14 | <form action="{url}log"> |
|
14 | <form action="{url}log"> | |
15 | {sessionvars%hiddenformentry} |
|
15 | {sessionvars%hiddenformentry} | |
16 | <div class="search"> |
|
16 | <div class="search"> | |
17 | <input type="text" name="rev" /> |
|
17 | <input type="text" name="rev" /> | |
18 | </div> |
|
18 | </div> | |
19 | </form> |
|
19 | </form> | |
20 | <div class="page_nav"> |
|
20 | <div class="page_nav"> | |
21 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | |
|
21 | <a href="{url}summary{sessionvars%urlparameter}">summary</a> | | |
22 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | |
|
22 | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | | |
23 | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | |
|
23 | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | | |
24 | graph | |
|
24 | graph | | |
25 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | |
|
25 | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | | |
26 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> |
|
26 | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | |
27 | <br/> |
|
27 | <br/> | |
28 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> |
|
28 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |
29 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> |
|
29 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |
30 | | #changenav%navgraphentry#<br/> |
|
30 | | #changenav%navgraphentry#<br/> | |
31 | </div> |
|
31 | </div> | |
32 |
|
32 | |||
33 | <div class="title"> </div> |
|
33 | <div class="title"> </div> | |
34 |
|
34 | |||
35 |
< |
|
35 | <noscript>The revision graph only works with JavaScript-enabled browsers.</noscript> | |
36 |
|
36 | |||
37 | <div id="wrapper"> |
|
37 | <div id="wrapper"> | |
38 | <ul id="nodebgs"></ul> |
|
38 | <ul id="nodebgs"></ul> | |
39 | <canvas id="graph" width="224" height="#canvasheight#"></canvas> |
|
39 | <canvas id="graph" width="224" height="#canvasheight#"></canvas> | |
40 | <ul id="graphnodes"></ul> |
|
40 | <ul id="graphnodes"></ul> | |
41 | </div> |
|
41 | </div> | |
42 |
|
42 | |||
43 | <script type="text/javascript" src="#staticurl#graph.js"></script> |
|
43 | <script type="text/javascript" src="#staticurl#graph.js"></script> | |
44 | <script> |
|
44 | <script> | |
45 | <!-- hide script content |
|
45 | <!-- hide script content | |
46 |
|
46 | |||
47 | document.getElementById('noscript').style.display = 'none'; |
|
|||
48 |
|
||||
49 | var data = {jsdata|json}; |
|
47 | var data = {jsdata|json}; | |
50 | var graph = new Graph(); |
|
48 | var graph = new Graph(); | |
51 | graph.scale({bg_height}); |
|
49 | graph.scale({bg_height}); | |
52 |
|
50 | |||
53 | graph.edge = function(x0, y0, x1, y1, color) { |
|
51 | graph.edge = function(x0, y0, x1, y1, color) { | |
54 |
|
52 | |||
55 | this.setColor(color, 0.0, 0.65); |
|
53 | this.setColor(color, 0.0, 0.65); | |
56 | this.ctx.beginPath(); |
|
54 | this.ctx.beginPath(); | |
57 | this.ctx.moveTo(x0, y0); |
|
55 | this.ctx.moveTo(x0, y0); | |
58 | this.ctx.lineTo(x1, y1); |
|
56 | this.ctx.lineTo(x1, y1); | |
59 | this.ctx.stroke(); |
|
57 | this.ctx.stroke(); | |
60 |
|
58 | |||
61 | } |
|
59 | } | |
62 |
|
60 | |||
63 | var revlink = '<li style="_STYLE"><span class="desc">'; |
|
61 | var revlink = '<li style="_STYLE"><span class="desc">'; | |
64 | revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>'; |
|
62 | revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>'; | |
65 | revlink += '</span> _TAGS'; |
|
63 | revlink += '</span> _TAGS'; | |
66 | revlink += '<span class="info">_DATE ago, by _USER</span></li>'; |
|
64 | revlink += '<span class="info">_DATE ago, by _USER</span></li>'; | |
67 |
|
65 | |||
68 | graph.vertex = function(x, y, color, parity, cur) { |
|
66 | graph.vertex = function(x, y, color, parity, cur) { | |
69 |
|
67 | |||
70 | this.ctx.beginPath(); |
|
68 | this.ctx.beginPath(); | |
71 | color = this.setColor(color, 0.25, 0.75); |
|
69 | color = this.setColor(color, 0.25, 0.75); | |
72 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); |
|
70 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); | |
73 | this.ctx.fill(); |
|
71 | this.ctx.fill(); | |
74 |
|
72 | |||
75 | var bg = '<li class="bg parity' + parity + '"></li>'; |
|
73 | var bg = '<li class="bg parity' + parity + '"></li>'; | |
76 | var left = (this.columns + 1) * this.bg_height; |
|
74 | var left = (this.columns + 1) * this.bg_height; | |
77 | var nstyle = 'padding-left: ' + left + 'px;'; |
|
75 | var nstyle = 'padding-left: ' + left + 'px;'; | |
78 | var item = revlink.replace(/_STYLE/, nstyle); |
|
76 | var item = revlink.replace(/_STYLE/, nstyle); | |
79 | item = item.replace(/_PARITY/, 'parity' + parity); |
|
77 | item = item.replace(/_PARITY/, 'parity' + parity); | |
80 | item = item.replace(/_NODEID/, cur[0]); |
|
78 | item = item.replace(/_NODEID/, cur[0]); | |
81 | item = item.replace(/_NODEID/, cur[0]); |
|
79 | item = item.replace(/_NODEID/, cur[0]); | |
82 | item = item.replace(/_DESC/, cur[3]); |
|
80 | item = item.replace(/_DESC/, cur[3]); | |
83 | item = item.replace(/_USER/, cur[4]); |
|
81 | item = item.replace(/_USER/, cur[4]); | |
84 | item = item.replace(/_DATE/, cur[5]); |
|
82 | item = item.replace(/_DATE/, cur[5]); | |
85 |
|
83 | |||
86 | var tagspan = ''; |
|
84 | var tagspan = ''; | |
87 | if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) { |
|
85 | if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) { | |
88 | tagspan = '<span class="logtags">'; |
|
86 | tagspan = '<span class="logtags">'; | |
89 | if (cur[6][1]) { |
|
87 | if (cur[6][1]) { | |
90 | tagspan += '<span class="branchtag" title="' + cur[6][0] + '">'; |
|
88 | tagspan += '<span class="branchtag" title="' + cur[6][0] + '">'; | |
91 | tagspan += cur[6][0] + '</span> '; |
|
89 | tagspan += cur[6][0] + '</span> '; | |
92 | } else if (!cur[6][1] && cur[6][0] != 'default') { |
|
90 | } else if (!cur[6][1] && cur[6][0] != 'default') { | |
93 | tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">'; |
|
91 | tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">'; | |
94 | tagspan += cur[6][0] + '</span> '; |
|
92 | tagspan += cur[6][0] + '</span> '; | |
95 | } |
|
93 | } | |
96 | if (cur[7].length) { |
|
94 | if (cur[7].length) { | |
97 | for (var t in cur[7]) { |
|
95 | for (var t in cur[7]) { | |
98 | var tag = cur[7][t]; |
|
96 | var tag = cur[7][t]; | |
99 | tagspan += '<span class="tagtag">' + tag + '</span> '; |
|
97 | tagspan += '<span class="tagtag">' + tag + '</span> '; | |
100 | } |
|
98 | } | |
101 | } |
|
99 | } | |
102 | tagspan += '</span>'; |
|
100 | tagspan += '</span>'; | |
103 | } |
|
101 | } | |
104 |
|
102 | |||
105 | item = item.replace(/_TAGS/, tagspan); |
|
103 | item = item.replace(/_TAGS/, tagspan); | |
106 | return [bg, item]; |
|
104 | return [bg, item]; | |
107 |
|
105 | |||
108 | } |
|
106 | } | |
109 |
|
107 | |||
110 | graph.render(data); |
|
108 | graph.render(data); | |
111 |
|
109 | |||
112 | // stop hiding script --> |
|
110 | // stop hiding script --> | |
113 | </script> |
|
111 | </script> | |
114 |
|
112 | |||
115 | <div class="page_nav"> |
|
113 | <div class="page_nav"> | |
116 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> |
|
114 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a> | |
117 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> |
|
115 | <a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a> | |
118 | | {changenav%navgraphentry} |
|
116 | | {changenav%navgraphentry} | |
119 | </div> |
|
117 | </div> | |
120 |
|
118 | |||
121 | #footer# |
|
119 | #footer# |
@@ -1,97 +1,95 | |||||
1 | #header# |
|
1 | #header# | |
2 | <title>#repo|escape#: graph</title> |
|
2 | <title>#repo|escape#: graph</title> | |
3 | <link rel="alternate" type="application/atom+xml" |
|
3 | <link rel="alternate" type="application/atom+xml" | |
4 | href="#url#atom-tags" title="Atom feed for #repo|escape#: tags"> |
|
4 | href="#url#atom-tags" title="Atom feed for #repo|escape#: tags"> | |
5 | <link rel="alternate" type="application/rss+xml" |
|
5 | <link rel="alternate" type="application/rss+xml" | |
6 | href="#url#rss-tags" title="RSS feed for #repo|escape#: tags"> |
|
6 | href="#url#rss-tags" title="RSS feed for #repo|escape#: tags"> | |
7 | <!--[if IE]><script type="text/javascript" src="#staticurl#excanvas.js"></script><![endif]--> |
|
7 | <!--[if IE]><script type="text/javascript" src="#staticurl#excanvas.js"></script><![endif]--> | |
8 | </head> |
|
8 | </head> | |
9 | <body> |
|
9 | <body> | |
10 |
|
10 | |||
11 | <div class="buttons"> |
|
11 | <div class="buttons"> | |
12 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> |
|
12 | <a href="#url#log{sessionvars%urlparameter}">changelog</a> | |
13 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> |
|
13 | <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a> | |
14 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> |
|
14 | <a href="#url#tags{sessionvars%urlparameter}">tags</a> | |
15 | <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a> |
|
15 | <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a> | |
16 | </div> |
|
16 | </div> | |
17 |
|
17 | |||
18 | <h2>graph</h2> |
|
18 | <h2>graph</h2> | |
19 |
|
19 | |||
20 | <form action="#url#log"> |
|
20 | <form action="#url#log"> | |
21 | {sessionvars%hiddenformentry} |
|
21 | {sessionvars%hiddenformentry} | |
22 | <p> |
|
22 | <p> | |
23 | <label for="search1">search:</label> |
|
23 | <label for="search1">search:</label> | |
24 | <input name="rev" id="search1" type="text" size="30"> |
|
24 | <input name="rev" id="search1" type="text" size="30"> | |
25 | navigate: <small class="navigate">#changenav%navgraphentry#</small> |
|
25 | navigate: <small class="navigate">#changenav%navgraphentry#</small> | |
26 | </p> |
|
26 | </p> | |
27 | </form> |
|
27 | </form> | |
28 |
|
28 | |||
29 |
< |
|
29 | <noscript>The revision graph only works with JavaScript-enabled browsers.</noscript> | |
30 |
|
30 | |||
31 | <div id="wrapper"> |
|
31 | <div id="wrapper"> | |
32 | <ul id="nodebgs"></ul> |
|
32 | <ul id="nodebgs"></ul> | |
33 | <canvas id="graph" width="224" height="#canvasheight#"></canvas> |
|
33 | <canvas id="graph" width="224" height="#canvasheight#"></canvas> | |
34 | <ul id="graphnodes"></ul> |
|
34 | <ul id="graphnodes"></ul> | |
35 | </div> |
|
35 | </div> | |
36 |
|
36 | |||
37 | <script type="text/javascript" src="#staticurl#graph.js"></script> |
|
37 | <script type="text/javascript" src="#staticurl#graph.js"></script> | |
38 | <script type="text/javascript"> |
|
38 | <script type="text/javascript"> | |
39 | <!-- hide script content |
|
39 | <!-- hide script content | |
40 |
|
40 | |||
41 | document.getElementById('noscript').style.display = 'none'; |
|
|||
42 |
|
||||
43 | var data = {jsdata|json}; |
|
41 | var data = {jsdata|json}; | |
44 | var graph = new Graph(); |
|
42 | var graph = new Graph(); | |
45 | graph.scale({bg_height}); |
|
43 | graph.scale({bg_height}); | |
46 |
|
44 | |||
47 | graph.edge = function(x0, y0, x1, y1, color) { |
|
45 | graph.edge = function(x0, y0, x1, y1, color) { | |
48 |
|
46 | |||
49 | this.setColor(color, 0.0, 0.65); |
|
47 | this.setColor(color, 0.0, 0.65); | |
50 | this.ctx.beginPath(); |
|
48 | this.ctx.beginPath(); | |
51 | this.ctx.moveTo(x0, y0); |
|
49 | this.ctx.moveTo(x0, y0); | |
52 | this.ctx.lineTo(x1, y1); |
|
50 | this.ctx.lineTo(x1, y1); | |
53 | this.ctx.stroke(); |
|
51 | this.ctx.stroke(); | |
54 |
|
52 | |||
55 | } |
|
53 | } | |
56 |
|
54 | |||
57 | var revlink = '<li style="_STYLE"><span class="desc">'; |
|
55 | var revlink = '<li style="_STYLE"><span class="desc">'; | |
58 | revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; |
|
56 | revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; | |
59 | revlink += '</span><span class="info">_DATE ago, by _USER</span></li>'; |
|
57 | revlink += '</span><span class="info">_DATE ago, by _USER</span></li>'; | |
60 |
|
58 | |||
61 | graph.vertex = function(x, y, color, parity, cur) { |
|
59 | graph.vertex = function(x, y, color, parity, cur) { | |
62 |
|
60 | |||
63 | this.ctx.beginPath(); |
|
61 | this.ctx.beginPath(); | |
64 | color = this.setColor(color, 0.25, 0.75); |
|
62 | color = this.setColor(color, 0.25, 0.75); | |
65 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); |
|
63 | this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); | |
66 | this.ctx.fill(); |
|
64 | this.ctx.fill(); | |
67 |
|
65 | |||
68 | var bg = '<li class="bg parity' + parity + '"></li>'; |
|
66 | var bg = '<li class="bg parity' + parity + '"></li>'; | |
69 | var left = (this.columns + 1) * this.bg_height; |
|
67 | var left = (this.columns + 1) * this.bg_height; | |
70 | var nstyle = 'padding-left: ' + left + 'px;'; |
|
68 | var nstyle = 'padding-left: ' + left + 'px;'; | |
71 | var item = revlink.replace(/_STYLE/, nstyle); |
|
69 | var item = revlink.replace(/_STYLE/, nstyle); | |
72 | item = item.replace(/_PARITY/, 'parity' + parity); |
|
70 | item = item.replace(/_PARITY/, 'parity' + parity); | |
73 | item = item.replace(/_NODEID/, cur[0]); |
|
71 | item = item.replace(/_NODEID/, cur[0]); | |
74 | item = item.replace(/_NODEID/, cur[0]); |
|
72 | item = item.replace(/_NODEID/, cur[0]); | |
75 | item = item.replace(/_DESC/, cur[3]); |
|
73 | item = item.replace(/_DESC/, cur[3]); | |
76 | item = item.replace(/_USER/, cur[4]); |
|
74 | item = item.replace(/_USER/, cur[4]); | |
77 | item = item.replace(/_DATE/, cur[5]); |
|
75 | item = item.replace(/_DATE/, cur[5]); | |
78 |
|
76 | |||
79 | return [bg, item]; |
|
77 | return [bg, item]; | |
80 |
|
78 | |||
81 | } |
|
79 | } | |
82 |
|
80 | |||
83 | graph.render(data); |
|
81 | graph.render(data); | |
84 |
|
82 | |||
85 | // stop hiding script --> |
|
83 | // stop hiding script --> | |
86 | </script> |
|
84 | </script> | |
87 |
|
85 | |||
88 | <form action="#url#log"> |
|
86 | <form action="#url#log"> | |
89 | {sessionvars%hiddenformentry} |
|
87 | {sessionvars%hiddenformentry} | |
90 | <p> |
|
88 | <p> | |
91 | <label for="search1">search:</label> |
|
89 | <label for="search1">search:</label> | |
92 | <input name="rev" id="search1" type="text" size="30"> |
|
90 | <input name="rev" id="search1" type="text" size="30"> | |
93 | navigate: <small class="navigate">#changenav%navgraphentry#</small> |
|
91 | navigate: <small class="navigate">#changenav%navgraphentry#</small> | |
94 | </p> |
|
92 | </p> | |
95 | </form> |
|
93 | </form> | |
96 |
|
94 | |||
97 | #footer# |
|
95 | #footer# |
@@ -1,63 +1,67 | |||||
1 | uisetup called |
|
1 | uisetup called | |
2 | ui.parentui isnot None |
|
2 | ui.parentui isnot None | |
3 | reposetup called for a |
|
3 | reposetup called for a | |
4 | ui == repo.ui |
|
4 | ui == repo.ui | |
5 | Foo |
|
5 | Foo | |
6 | uisetup called |
|
6 | uisetup called | |
7 | ui.parentui is None |
|
7 | ui.parentui is None | |
8 | reposetup called for a |
|
8 | reposetup called for a | |
9 | ui == repo.ui |
|
9 | ui == repo.ui | |
10 | reposetup called for b |
|
10 | reposetup called for b | |
11 | ui == repo.ui |
|
11 | ui == repo.ui | |
12 | updating working directory |
|
12 | updating working directory | |
13 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
13 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
14 | uisetup called |
|
14 | uisetup called | |
15 | ui.parentui is None |
|
15 | ui.parentui is None | |
16 | Bar |
|
16 | Bar | |
17 | % module/__init__.py-style |
|
17 | % module/__init__.py-style | |
18 | uisetup called |
|
18 | uisetup called | |
19 | ui.parentui isnot None |
|
19 | ui.parentui isnot None | |
20 | reposetup called for a |
|
20 | reposetup called for a | |
21 | ui == repo.ui |
|
21 | ui == repo.ui | |
22 | Foo |
|
22 | Foo | |
23 | empty extension - empty cmdtable |
|
23 | empty extension - empty cmdtable | |
24 |
|
24 | |||
25 | no commands defined |
|
25 | no commands defined | |
26 | debugextension extension - only debugcommands |
|
26 | debugextension extension - only debugcommands | |
27 |
|
27 | |||
28 | no commands defined |
|
28 | no commands defined | |
29 | debugextension extension - only debugcommands |
|
29 | debugextension extension - only debugcommands | |
30 |
|
30 | |||
31 | list of commands: |
|
31 | list of commands: | |
32 |
|
32 | |||
33 | debugfoobar: |
|
33 | debugfoobar: | |
34 | yet another debug command |
|
34 | yet another debug command | |
35 |
|
35 | |||
|
36 | enabled extensions: | |||
|
37 | ||||
|
38 | debugextension only debugcommands | |||
|
39 | ||||
36 | special help topics: |
|
40 | special help topics: | |
37 | dates Date Formats |
|
41 | dates Date Formats | |
38 | patterns File Name Patterns |
|
42 | patterns File Name Patterns | |
39 | environment, env Environment Variables |
|
43 | environment, env Environment Variables | |
40 | revs, revisions Specifying Single Revisions |
|
44 | revs, revisions Specifying Single Revisions | |
41 | mrevs, multirevs Specifying Multiple Revisions |
|
45 | mrevs, multirevs Specifying Multiple Revisions | |
42 |
|
46 | |||
43 | global options: |
|
47 | global options: | |
44 | -R --repository repository root directory or symbolic path name |
|
48 | -R --repository repository root directory or symbolic path name | |
45 | --cwd change working directory |
|
49 | --cwd change working directory | |
46 | -y --noninteractive do not prompt, assume 'yes' for any required answers |
|
50 | -y --noninteractive do not prompt, assume 'yes' for any required answers | |
47 | -q --quiet suppress output |
|
51 | -q --quiet suppress output | |
48 | -v --verbose enable additional output |
|
52 | -v --verbose enable additional output | |
49 | --config set/override config option |
|
53 | --config set/override config option | |
50 | --debug enable debugging output |
|
54 | --debug enable debugging output | |
51 | --debugger start debugger |
|
55 | --debugger start debugger | |
52 | --encoding set the charset encoding (default: ascii) |
|
56 | --encoding set the charset encoding (default: ascii) | |
53 | --encodingmode set the charset encoding mode (default: strict) |
|
57 | --encodingmode set the charset encoding mode (default: strict) | |
54 | --lsprof print improved command execution profile |
|
58 | --lsprof print improved command execution profile | |
55 | --traceback print traceback on exception |
|
59 | --traceback print traceback on exception | |
56 | --time time how long the command takes |
|
60 | --time time how long the command takes | |
57 | --profile print command execution profile |
|
61 | --profile print command execution profile | |
58 | --version output version information and exit |
|
62 | --version output version information and exit | |
59 | -h --help display help and exit |
|
63 | -h --help display help and exit | |
60 | % issue811 |
|
64 | % issue811 | |
61 | % show extensions |
|
65 | % show extensions | |
62 | debugissue811 |
|
66 | debugissue811 | |
63 | mq |
|
67 | mq |
1 | NO CONTENT: modified file, binary diff hidden |
|
NO CONTENT: modified file, binary diff hidden |
@@ -1,496 +1,502 | |||||
1 | % help |
|
1 | % help | |
2 | keyword extension - keyword expansion in local repositories |
|
2 | keyword extension - keyword expansion in local repositories | |
3 |
|
3 | |||
4 | This extension expands RCS/CVS-like or self-customized $Keywords$ |
|
4 | This extension expands RCS/CVS-like or self-customized $Keywords$ | |
5 | in tracked text files selected by your configuration. |
|
5 | in tracked text files selected by your configuration. | |
6 |
|
6 | |||
7 | Keywords are only expanded in local repositories and not stored in |
|
7 | Keywords are only expanded in local repositories and not stored in | |
8 | the change history. The mechanism can be regarded as a convenience |
|
8 | the change history. The mechanism can be regarded as a convenience | |
9 | for the current user or for archive distribution. |
|
9 | for the current user or for archive distribution. | |
10 |
|
10 | |||
11 | Configuration is done in the [keyword] and [keywordmaps] sections |
|
11 | Configuration is done in the [keyword] and [keywordmaps] sections | |
12 | of hgrc files. |
|
12 | of hgrc files. | |
13 |
|
13 | |||
14 | Example: |
|
14 | Example: | |
15 |
|
15 | |||
16 | [keyword] |
|
16 | [keyword] | |
17 | # expand keywords in every python file except those matching "x*" |
|
17 | # expand keywords in every python file except those matching "x*" | |
18 | **.py = |
|
18 | **.py = | |
19 | x* = ignore |
|
19 | x* = ignore | |
20 |
|
20 | |||
21 | Note: the more specific you are in your filename patterns |
|
21 | Note: the more specific you are in your filename patterns | |
22 | the less you lose speed in huge repos. |
|
22 | the less you lose speed in huge repos. | |
23 |
|
23 | |||
24 | For [keywordmaps] template mapping and expansion demonstration and |
|
24 | For [keywordmaps] template mapping and expansion demonstration and | |
25 | control run "hg kwdemo". |
|
25 | control run "hg kwdemo". | |
26 |
|
26 | |||
27 | An additional date template filter {date|utcdate} is provided. |
|
27 | An additional date template filter {date|utcdate} is provided. | |
28 |
|
28 | |||
29 | The default template mappings (view with "hg kwdemo -d") can be replaced |
|
29 | The default template mappings (view with "hg kwdemo -d") can be replaced | |
30 | with customized keywords and templates. |
|
30 | with customized keywords and templates. | |
31 | Again, run "hg kwdemo" to control the results of your config changes. |
|
31 | Again, run "hg kwdemo" to control the results of your config changes. | |
32 |
|
32 | |||
33 | Before changing/disabling active keywords, run "hg kwshrink" to avoid |
|
33 | Before changing/disabling active keywords, run "hg kwshrink" to avoid | |
34 | the risk of inadvertedly storing expanded keywords in the change history. |
|
34 | the risk of inadvertedly storing expanded keywords in the change history. | |
35 |
|
35 | |||
36 | To force expansion after enabling it, or a configuration change, run |
|
36 | To force expansion after enabling it, or a configuration change, run | |
37 | "hg kwexpand". |
|
37 | "hg kwexpand". | |
38 |
|
38 | |||
39 | Also, when committing with the record extension or using mq's qrecord, be aware |
|
39 | Also, when committing with the record extension or using mq's qrecord, be aware | |
40 | that keywords cannot be updated. Again, run "hg kwexpand" on the files in |
|
40 | that keywords cannot be updated. Again, run "hg kwexpand" on the files in | |
41 | question to update keyword expansions after all changes have been checked in. |
|
41 | question to update keyword expansions after all changes have been checked in. | |
42 |
|
42 | |||
43 | Expansions spanning more than one line and incremental expansions, |
|
43 | Expansions spanning more than one line and incremental expansions, | |
44 | like CVS' $Log$, are not supported. A keyword template map |
|
44 | like CVS' $Log$, are not supported. A keyword template map | |
45 | "Log = {desc}" expands to the first line of the changeset description. |
|
45 | "Log = {desc}" expands to the first line of the changeset description. | |
46 |
|
46 | |||
47 | list of commands: |
|
47 | list of commands: | |
48 |
|
48 | |||
49 | kwdemo print [keywordmaps] configuration and an expansion example |
|
49 | kwdemo print [keywordmaps] configuration and an expansion example | |
50 | kwexpand expand keywords in working directory |
|
50 | kwexpand expand keywords in working directory | |
51 | kwfiles print files currently configured for keyword expansion |
|
51 | kwfiles print files currently configured for keyword expansion | |
52 | kwshrink revert expanded keywords in working directory |
|
52 | kwshrink revert expanded keywords in working directory | |
53 |
|
53 | |||
|
54 | enabled extensions: | |||
|
55 | ||||
|
56 | keyword keyword expansion in local repositories | |||
|
57 | mq patch management and development | |||
|
58 | notify hook extension to email notifications on commits/pushes | |||
|
59 | ||||
54 | use "hg -v help keyword" to show aliases and global options |
|
60 | use "hg -v help keyword" to show aliases and global options | |
55 | % hg kwdemo |
|
61 | % hg kwdemo | |
56 | [extensions] |
|
62 | [extensions] | |
57 | hgext.keyword = |
|
63 | hgext.keyword = | |
58 | [keyword] |
|
64 | [keyword] | |
59 | * = |
|
65 | * = | |
60 | b = ignore |
|
66 | b = ignore | |
61 | demo.txt = |
|
67 | demo.txt = | |
62 | [keywordmaps] |
|
68 | [keywordmaps] | |
63 | RCSFile = {file|basename},v |
|
69 | RCSFile = {file|basename},v | |
64 | Author = {author|user} |
|
70 | Author = {author|user} | |
65 | Header = {root}/{file},v {node|short} {date|utcdate} {author|user} |
|
71 | Header = {root}/{file},v {node|short} {date|utcdate} {author|user} | |
66 | Source = {root}/{file},v |
|
72 | Source = {root}/{file},v | |
67 | Date = {date|utcdate} |
|
73 | Date = {date|utcdate} | |
68 | Id = {file|basename},v {node|short} {date|utcdate} {author|user} |
|
74 | Id = {file|basename},v {node|short} {date|utcdate} {author|user} | |
69 | Revision = {node|short} |
|
75 | Revision = {node|short} | |
70 | $RCSFile: demo.txt,v $ |
|
76 | $RCSFile: demo.txt,v $ | |
71 | $Author: test $ |
|
77 | $Author: test $ | |
72 | $Header: /TMP/demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $ |
|
78 | $Header: /TMP/demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $ | |
73 | $Source: /TMP/demo.txt,v $ |
|
79 | $Source: /TMP/demo.txt,v $ | |
74 | $Date: 2000/00/00 00:00:00 $ |
|
80 | $Date: 2000/00/00 00:00:00 $ | |
75 | $Id: demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $ |
|
81 | $Id: demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $ | |
76 | $Revision: xxxxxxxxxxxx $ |
|
82 | $Revision: xxxxxxxxxxxx $ | |
77 | [extensions] |
|
83 | [extensions] | |
78 | hgext.keyword = |
|
84 | hgext.keyword = | |
79 | [keyword] |
|
85 | [keyword] | |
80 | * = |
|
86 | * = | |
81 | b = ignore |
|
87 | b = ignore | |
82 | demo.txt = |
|
88 | demo.txt = | |
83 | [keywordmaps] |
|
89 | [keywordmaps] | |
84 | Branch = {branches} |
|
90 | Branch = {branches} | |
85 | $Branch: demobranch $ |
|
91 | $Branch: demobranch $ | |
86 | % kwshrink should exit silently in empty/invalid repo |
|
92 | % kwshrink should exit silently in empty/invalid repo | |
87 | pulling from test-keyword.hg |
|
93 | pulling from test-keyword.hg | |
88 | requesting all changes |
|
94 | requesting all changes | |
89 | adding changesets |
|
95 | adding changesets | |
90 | adding manifests |
|
96 | adding manifests | |
91 | adding file changes |
|
97 | adding file changes | |
92 | added 1 changesets with 1 changes to 1 files |
|
98 | added 1 changesets with 1 changes to 1 files | |
93 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
99 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
94 | % cat |
|
100 | % cat | |
95 | expand $Id$ |
|
101 | expand $Id$ | |
96 | do not process $Id: |
|
102 | do not process $Id: | |
97 | xxx $ |
|
103 | xxx $ | |
98 | ignore $Id$ |
|
104 | ignore $Id$ | |
99 | % addremove |
|
105 | % addremove | |
100 | adding a |
|
106 | adding a | |
101 | adding b |
|
107 | adding b | |
102 | % status |
|
108 | % status | |
103 | A a |
|
109 | A a | |
104 | A b |
|
110 | A b | |
105 | % default keyword expansion including commit hook |
|
111 | % default keyword expansion including commit hook | |
106 | % interrupted commit should not change state or run commit hook |
|
112 | % interrupted commit should not change state or run commit hook | |
107 | a |
|
113 | a | |
108 | b |
|
114 | b | |
109 | transaction abort! |
|
115 | transaction abort! | |
110 | rollback completed |
|
116 | rollback completed | |
111 | abort: empty commit message |
|
117 | abort: empty commit message | |
112 | % status |
|
118 | % status | |
113 | A a |
|
119 | A a | |
114 | A b |
|
120 | A b | |
115 | % commit |
|
121 | % commit | |
116 | a |
|
122 | a | |
117 | b |
|
123 | b | |
118 | overwriting a expanding keywords |
|
124 | overwriting a expanding keywords | |
119 | running hook commit.test: cp a hooktest |
|
125 | running hook commit.test: cp a hooktest | |
120 | committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9 |
|
126 | committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9 | |
121 | % status |
|
127 | % status | |
122 | ? hooktest |
|
128 | ? hooktest | |
123 | % identify |
|
129 | % identify | |
124 | ef63ca68695b |
|
130 | ef63ca68695b | |
125 | % cat |
|
131 | % cat | |
126 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ |
|
132 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ | |
127 | do not process $Id: |
|
133 | do not process $Id: | |
128 | xxx $ |
|
134 | xxx $ | |
129 | ignore $Id$ |
|
135 | ignore $Id$ | |
130 | % hg cat |
|
136 | % hg cat | |
131 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ |
|
137 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ | |
132 | do not process $Id: |
|
138 | do not process $Id: | |
133 | xxx $ |
|
139 | xxx $ | |
134 | ignore $Id$ |
|
140 | ignore $Id$ | |
135 | a |
|
141 | a | |
136 | % diff a hooktest |
|
142 | % diff a hooktest | |
137 | % removing commit hook from config |
|
143 | % removing commit hook from config | |
138 | % bundle |
|
144 | % bundle | |
139 | 2 changesets found |
|
145 | 2 changesets found | |
140 | % notify on pull to check whether keywords stay as is in email |
|
146 | % notify on pull to check whether keywords stay as is in email | |
141 | % ie. if patch.diff wrapper acts as it should |
|
147 | % ie. if patch.diff wrapper acts as it should | |
142 | % pull from bundle |
|
148 | % pull from bundle | |
143 | pulling from ../kw.hg |
|
149 | pulling from ../kw.hg | |
144 | requesting all changes |
|
150 | requesting all changes | |
145 | adding changesets |
|
151 | adding changesets | |
146 | adding manifests |
|
152 | adding manifests | |
147 | adding file changes |
|
153 | adding file changes | |
148 | added 2 changesets with 3 changes to 3 files |
|
154 | added 2 changesets with 3 changes to 3 files | |
149 |
|
155 | |||
150 | diff -r 000000000000 -r a2392c293916 sym |
|
156 | diff -r 000000000000 -r a2392c293916 sym | |
151 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
|
157 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
152 | +++ b/sym Sat Feb 09 20:25:47 2008 +0100 |
|
158 | +++ b/sym Sat Feb 09 20:25:47 2008 +0100 | |
153 | @@ -0,0 +1,1 @@ |
|
159 | @@ -0,0 +1,1 @@ | |
154 | +a |
|
160 | +a | |
155 | \ No newline at end of file |
|
161 | \ No newline at end of file | |
156 |
|
162 | |||
157 | diff -r a2392c293916 -r ef63ca68695b a |
|
163 | diff -r a2392c293916 -r ef63ca68695b a | |
158 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
|
164 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
159 | +++ b/a Thu Jan 01 00:00:00 1970 +0000 |
|
165 | +++ b/a Thu Jan 01 00:00:00 1970 +0000 | |
160 | @@ -0,0 +1,3 @@ |
|
166 | @@ -0,0 +1,3 @@ | |
161 | +expand $Id$ |
|
167 | +expand $Id$ | |
162 | +do not process $Id: |
|
168 | +do not process $Id: | |
163 | +xxx $ |
|
169 | +xxx $ | |
164 | diff -r a2392c293916 -r ef63ca68695b b |
|
170 | diff -r a2392c293916 -r ef63ca68695b b | |
165 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
|
171 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
166 | +++ b/b Thu Jan 01 00:00:00 1970 +0000 |
|
172 | +++ b/b Thu Jan 01 00:00:00 1970 +0000 | |
167 | @@ -0,0 +1,1 @@ |
|
173 | @@ -0,0 +1,1 @@ | |
168 | +ignore $Id$ |
|
174 | +ignore $Id$ | |
169 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
175 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
170 | % remove notify config |
|
176 | % remove notify config | |
171 | % touch |
|
177 | % touch | |
172 | % status |
|
178 | % status | |
173 | % update |
|
179 | % update | |
174 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
180 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
175 | % cat |
|
181 | % cat | |
176 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ |
|
182 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ | |
177 | do not process $Id: |
|
183 | do not process $Id: | |
178 | xxx $ |
|
184 | xxx $ | |
179 | ignore $Id$ |
|
185 | ignore $Id$ | |
180 | % check whether expansion is filewise |
|
186 | % check whether expansion is filewise | |
181 | % commit c |
|
187 | % commit c | |
182 | adding c |
|
188 | adding c | |
183 | % force expansion |
|
189 | % force expansion | |
184 | overwriting a expanding keywords |
|
190 | overwriting a expanding keywords | |
185 | overwriting c expanding keywords |
|
191 | overwriting c expanding keywords | |
186 | % compare changenodes in a c |
|
192 | % compare changenodes in a c | |
187 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ |
|
193 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ | |
188 | do not process $Id: |
|
194 | do not process $Id: | |
189 | xxx $ |
|
195 | xxx $ | |
190 | $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ |
|
196 | $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ | |
191 | tests for different changenodes |
|
197 | tests for different changenodes | |
192 | % qinit -c |
|
198 | % qinit -c | |
193 | % qimport |
|
199 | % qimport | |
194 | % qcommit |
|
200 | % qcommit | |
195 | % keywords should not be expanded in patch |
|
201 | % keywords should not be expanded in patch | |
196 | # HG changeset patch |
|
202 | # HG changeset patch | |
197 | # User User Name <user@example.com> |
|
203 | # User User Name <user@example.com> | |
198 | # Date 1 0 |
|
204 | # Date 1 0 | |
199 | # Node ID 40a904bbbe4cd4ab0a1f28411e35db26341a40ad |
|
205 | # Node ID 40a904bbbe4cd4ab0a1f28411e35db26341a40ad | |
200 | # Parent ef63ca68695bc9495032c6fda1350c71e6d256e9 |
|
206 | # Parent ef63ca68695bc9495032c6fda1350c71e6d256e9 | |
201 | cndiff |
|
207 | cndiff | |
202 |
|
208 | |||
203 | diff -r ef63ca68695b -r 40a904bbbe4c c |
|
209 | diff -r ef63ca68695b -r 40a904bbbe4c c | |
204 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
|
210 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
205 | +++ b/c Thu Jan 01 00:00:01 1970 +0000 |
|
211 | +++ b/c Thu Jan 01 00:00:01 1970 +0000 | |
206 | @@ -0,0 +1,2 @@ |
|
212 | @@ -0,0 +1,2 @@ | |
207 | +$Id$ |
|
213 | +$Id$ | |
208 | +tests for different changenodes |
|
214 | +tests for different changenodes | |
209 | % qpop |
|
215 | % qpop | |
210 | Patch queue now empty |
|
216 | Patch queue now empty | |
211 | % qgoto - should imply qpush |
|
217 | % qgoto - should imply qpush | |
212 | applying mqtest.diff |
|
218 | applying mqtest.diff | |
213 | Now at: mqtest.diff |
|
219 | Now at: mqtest.diff | |
214 | % cat |
|
220 | % cat | |
215 | $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ |
|
221 | $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ | |
216 | tests for different changenodes |
|
222 | tests for different changenodes | |
217 | % qpop and move on |
|
223 | % qpop and move on | |
218 | Patch queue now empty |
|
224 | Patch queue now empty | |
219 | % copy |
|
225 | % copy | |
220 | % kwfiles added |
|
226 | % kwfiles added | |
221 | a |
|
227 | a | |
222 | c |
|
228 | c | |
223 | % commit |
|
229 | % commit | |
224 | c |
|
230 | c | |
225 | c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292 |
|
231 | c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292 | |
226 | overwriting c expanding keywords |
|
232 | overwriting c expanding keywords | |
227 | committed changeset 2:e22d299ac0c2bd8897b3df5114374b9e4d4ca62f |
|
233 | committed changeset 2:e22d299ac0c2bd8897b3df5114374b9e4d4ca62f | |
228 | % cat a c |
|
234 | % cat a c | |
229 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ |
|
235 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ | |
230 | do not process $Id: |
|
236 | do not process $Id: | |
231 | xxx $ |
|
237 | xxx $ | |
232 | expand $Id: c,v e22d299ac0c2 1970/01/01 00:00:01 user $ |
|
238 | expand $Id: c,v e22d299ac0c2 1970/01/01 00:00:01 user $ | |
233 | do not process $Id: |
|
239 | do not process $Id: | |
234 | xxx $ |
|
240 | xxx $ | |
235 | % touch copied c |
|
241 | % touch copied c | |
236 | % status |
|
242 | % status | |
237 | % kwfiles |
|
243 | % kwfiles | |
238 | a |
|
244 | a | |
239 | c |
|
245 | c | |
240 | % diff --rev |
|
246 | % diff --rev | |
241 | diff -r ef63ca68695b c |
|
247 | diff -r ef63ca68695b c | |
242 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
|
248 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
243 | @@ -0,0 +1,3 @@ |
|
249 | @@ -0,0 +1,3 @@ | |
244 | +expand $Id$ |
|
250 | +expand $Id$ | |
245 | +do not process $Id: |
|
251 | +do not process $Id: | |
246 | +xxx $ |
|
252 | +xxx $ | |
247 | % rollback |
|
253 | % rollback | |
248 | rolling back last transaction |
|
254 | rolling back last transaction | |
249 | % status |
|
255 | % status | |
250 | A c |
|
256 | A c | |
251 | % update -C |
|
257 | % update -C | |
252 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
258 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
253 | % custom keyword expansion |
|
259 | % custom keyword expansion | |
254 | % try with kwdemo |
|
260 | % try with kwdemo | |
255 | [extensions] |
|
261 | [extensions] | |
256 | hgext.keyword = |
|
262 | hgext.keyword = | |
257 | [keyword] |
|
263 | [keyword] | |
258 | * = |
|
264 | * = | |
259 | b = ignore |
|
265 | b = ignore | |
260 | demo.txt = |
|
266 | demo.txt = | |
261 | [keywordmaps] |
|
267 | [keywordmaps] | |
262 | Xinfo = {author}: {desc} |
|
268 | Xinfo = {author}: {desc} | |
263 | $Xinfo: test: hg keyword config and expansion example $ |
|
269 | $Xinfo: test: hg keyword config and expansion example $ | |
264 | % cat |
|
270 | % cat | |
265 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ |
|
271 | expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ | |
266 | do not process $Id: |
|
272 | do not process $Id: | |
267 | xxx $ |
|
273 | xxx $ | |
268 | ignore $Id$ |
|
274 | ignore $Id$ | |
269 | % hg cat |
|
275 | % hg cat | |
270 | expand $Id: a ef63ca68695b Thu, 01 Jan 1970 00:00:00 +0000 user $ |
|
276 | expand $Id: a ef63ca68695b Thu, 01 Jan 1970 00:00:00 +0000 user $ | |
271 | do not process $Id: |
|
277 | do not process $Id: | |
272 | xxx $ |
|
278 | xxx $ | |
273 | ignore $Id$ |
|
279 | ignore $Id$ | |
274 | a |
|
280 | a | |
275 | % interrupted commit should not change state |
|
281 | % interrupted commit should not change state | |
276 | transaction abort! |
|
282 | transaction abort! | |
277 | rollback completed |
|
283 | rollback completed | |
278 | abort: empty commit message |
|
284 | abort: empty commit message | |
279 | % status |
|
285 | % status | |
280 | M a |
|
286 | M a | |
281 | ? log |
|
287 | ? log | |
282 | % commit |
|
288 | % commit | |
283 | a |
|
289 | a | |
284 | overwriting a expanding keywords |
|
290 | overwriting a expanding keywords | |
285 | committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83 |
|
291 | committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83 | |
286 | % status |
|
292 | % status | |
287 | % verify |
|
293 | % verify | |
288 | checking changesets |
|
294 | checking changesets | |
289 | checking manifests |
|
295 | checking manifests | |
290 | crosschecking files in changesets and manifests |
|
296 | crosschecking files in changesets and manifests | |
291 | checking files |
|
297 | checking files | |
292 | 3 files, 3 changesets, 4 total revisions |
|
298 | 3 files, 3 changesets, 4 total revisions | |
293 | % cat |
|
299 | % cat | |
294 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ |
|
300 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ | |
295 | do not process $Id: |
|
301 | do not process $Id: | |
296 | xxx $ |
|
302 | xxx $ | |
297 | $Xinfo: User Name <user@example.com>: firstline $ |
|
303 | $Xinfo: User Name <user@example.com>: firstline $ | |
298 | ignore $Id$ |
|
304 | ignore $Id$ | |
299 | % hg cat |
|
305 | % hg cat | |
300 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ |
|
306 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ | |
301 | do not process $Id: |
|
307 | do not process $Id: | |
302 | xxx $ |
|
308 | xxx $ | |
303 | $Xinfo: User Name <user@example.com>: firstline $ |
|
309 | $Xinfo: User Name <user@example.com>: firstline $ | |
304 | ignore $Id$ |
|
310 | ignore $Id$ | |
305 | a |
|
311 | a | |
306 | % annotate |
|
312 | % annotate | |
307 | 1: expand $Id$ |
|
313 | 1: expand $Id$ | |
308 | 1: do not process $Id: |
|
314 | 1: do not process $Id: | |
309 | 1: xxx $ |
|
315 | 1: xxx $ | |
310 | 2: $Xinfo$ |
|
316 | 2: $Xinfo$ | |
311 | % remove |
|
317 | % remove | |
312 | committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012 |
|
318 | committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012 | |
313 | % status |
|
319 | % status | |
314 | % rollback |
|
320 | % rollback | |
315 | rolling back last transaction |
|
321 | rolling back last transaction | |
316 | % status |
|
322 | % status | |
317 | R a |
|
323 | R a | |
318 | % revert a |
|
324 | % revert a | |
319 | % cat a |
|
325 | % cat a | |
320 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ |
|
326 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ | |
321 | do not process $Id: |
|
327 | do not process $Id: | |
322 | xxx $ |
|
328 | xxx $ | |
323 | $Xinfo: User Name <user@example.com>: firstline $ |
|
329 | $Xinfo: User Name <user@example.com>: firstline $ | |
324 | % clone to test incoming |
|
330 | % clone to test incoming | |
325 | requesting all changes |
|
331 | requesting all changes | |
326 | adding changesets |
|
332 | adding changesets | |
327 | adding manifests |
|
333 | adding manifests | |
328 | adding file changes |
|
334 | adding file changes | |
329 | added 2 changesets with 3 changes to 3 files |
|
335 | added 2 changesets with 3 changes to 3 files | |
330 | updating working directory |
|
336 | updating working directory | |
331 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
337 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
332 | % incoming |
|
338 | % incoming | |
333 | comparing with test-keyword/Test |
|
339 | comparing with test-keyword/Test | |
334 | searching for changes |
|
340 | searching for changes | |
335 | changeset: 2:bb948857c743 |
|
341 | changeset: 2:bb948857c743 | |
336 | tag: tip |
|
342 | tag: tip | |
337 | user: User Name <user@example.com> |
|
343 | user: User Name <user@example.com> | |
338 | date: Thu Jan 01 00:00:02 1970 +0000 |
|
344 | date: Thu Jan 01 00:00:02 1970 +0000 | |
339 | summary: firstline |
|
345 | summary: firstline | |
340 |
|
346 | |||
341 | % commit rejecttest |
|
347 | % commit rejecttest | |
342 | a |
|
348 | a | |
343 | overwriting a expanding keywords |
|
349 | overwriting a expanding keywords | |
344 | committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082 |
|
350 | committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082 | |
345 | % export |
|
351 | % export | |
346 | % import |
|
352 | % import | |
347 | applying ../rejecttest.diff |
|
353 | applying ../rejecttest.diff | |
348 | % cat |
|
354 | % cat | |
349 | expand $Id: a 4e0994474d25 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest |
|
355 | expand $Id: a 4e0994474d25 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest | |
350 | do not process $Id: rejecttest |
|
356 | do not process $Id: rejecttest | |
351 | xxx $ |
|
357 | xxx $ | |
352 | $Xinfo: User Name <user@example.com>: rejects? $ |
|
358 | $Xinfo: User Name <user@example.com>: rejects? $ | |
353 | ignore $Id$ |
|
359 | ignore $Id$ | |
354 |
|
360 | |||
355 | % rollback |
|
361 | % rollback | |
356 | rolling back last transaction |
|
362 | rolling back last transaction | |
357 | % clean update |
|
363 | % clean update | |
358 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
364 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
359 | % kwexpand/kwshrink on selected files |
|
365 | % kwexpand/kwshrink on selected files | |
360 | % copy a x/a |
|
366 | % copy a x/a | |
361 | % kwexpand a |
|
367 | % kwexpand a | |
362 | overwriting a expanding keywords |
|
368 | overwriting a expanding keywords | |
363 | % kwexpand x/a should abort |
|
369 | % kwexpand x/a should abort | |
364 | abort: outstanding uncommitted changes |
|
370 | abort: outstanding uncommitted changes | |
365 | x/a |
|
371 | x/a | |
366 | x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e |
|
372 | x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e | |
367 | overwriting x/a expanding keywords |
|
373 | overwriting x/a expanding keywords | |
368 | committed changeset 3:cfa68229c1167443337266ebac453c73b1d5d16e |
|
374 | committed changeset 3:cfa68229c1167443337266ebac453c73b1d5d16e | |
369 | % cat a |
|
375 | % cat a | |
370 | expand $Id: x/a cfa68229c116 Thu, 01 Jan 1970 00:00:03 +0000 user $ |
|
376 | expand $Id: x/a cfa68229c116 Thu, 01 Jan 1970 00:00:03 +0000 user $ | |
371 | do not process $Id: |
|
377 | do not process $Id: | |
372 | xxx $ |
|
378 | xxx $ | |
373 | $Xinfo: User Name <user@example.com>: xa $ |
|
379 | $Xinfo: User Name <user@example.com>: xa $ | |
374 | % kwshrink a inside directory x |
|
380 | % kwshrink a inside directory x | |
375 | overwriting x/a shrinking keywords |
|
381 | overwriting x/a shrinking keywords | |
376 | % cat a |
|
382 | % cat a | |
377 | expand $Id$ |
|
383 | expand $Id$ | |
378 | do not process $Id: |
|
384 | do not process $Id: | |
379 | xxx $ |
|
385 | xxx $ | |
380 | $Xinfo$ |
|
386 | $Xinfo$ | |
381 | % kwexpand nonexistent |
|
387 | % kwexpand nonexistent | |
382 | nonexistent: |
|
388 | nonexistent: | |
383 | % hg serve |
|
389 | % hg serve | |
384 | % expansion |
|
390 | % expansion | |
385 | % hgweb file |
|
391 | % hgweb file | |
386 | 200 Script output follows |
|
392 | 200 Script output follows | |
387 |
|
393 | |||
388 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ |
|
394 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ | |
389 | do not process $Id: |
|
395 | do not process $Id: | |
390 | xxx $ |
|
396 | xxx $ | |
391 | $Xinfo: User Name <user@example.com>: firstline $ |
|
397 | $Xinfo: User Name <user@example.com>: firstline $ | |
392 | % no expansion |
|
398 | % no expansion | |
393 | % hgweb annotate |
|
399 | % hgweb annotate | |
394 | 200 Script output follows |
|
400 | 200 Script output follows | |
395 |
|
401 | |||
396 |
|
402 | |||
397 | user@1: expand $Id$ |
|
403 | user@1: expand $Id$ | |
398 | user@1: do not process $Id: |
|
404 | user@1: do not process $Id: | |
399 | user@1: xxx $ |
|
405 | user@1: xxx $ | |
400 | user@2: $Xinfo$ |
|
406 | user@2: $Xinfo$ | |
401 |
|
407 | |||
402 |
|
408 | |||
403 |
|
409 | |||
404 |
|
410 | |||
405 | % hgweb changeset |
|
411 | % hgweb changeset | |
406 | 200 Script output follows |
|
412 | 200 Script output follows | |
407 |
|
413 | |||
408 |
|
414 | |||
409 | # HG changeset patch |
|
415 | # HG changeset patch | |
410 | # User User Name <user@example.com> |
|
416 | # User User Name <user@example.com> | |
411 | # Date 3 0 |
|
417 | # Date 3 0 | |
412 | # Node ID cfa68229c1167443337266ebac453c73b1d5d16e |
|
418 | # Node ID cfa68229c1167443337266ebac453c73b1d5d16e | |
413 | # Parent bb948857c743469b22bbf51f7ec8112279ca5d83 |
|
419 | # Parent bb948857c743469b22bbf51f7ec8112279ca5d83 | |
414 | xa |
|
420 | xa | |
415 |
|
421 | |||
416 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
|
422 | --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
417 | +++ b/x/a Thu Jan 01 00:00:03 1970 +0000 |
|
423 | +++ b/x/a Thu Jan 01 00:00:03 1970 +0000 | |
418 | @@ -0,0 +1,4 @@ |
|
424 | @@ -0,0 +1,4 @@ | |
419 | +expand $Id$ |
|
425 | +expand $Id$ | |
420 | +do not process $Id: |
|
426 | +do not process $Id: | |
421 | +xxx $ |
|
427 | +xxx $ | |
422 | +$Xinfo$ |
|
428 | +$Xinfo$ | |
423 |
|
429 | |||
424 | % hgweb filediff |
|
430 | % hgweb filediff | |
425 | 200 Script output follows |
|
431 | 200 Script output follows | |
426 |
|
432 | |||
427 |
|
433 | |||
428 | --- a/a Thu Jan 01 00:00:00 1970 +0000 |
|
434 | --- a/a Thu Jan 01 00:00:00 1970 +0000 | |
429 | +++ b/a Thu Jan 01 00:00:02 1970 +0000 |
|
435 | +++ b/a Thu Jan 01 00:00:02 1970 +0000 | |
430 | @@ -1,3 +1,4 @@ |
|
436 | @@ -1,3 +1,4 @@ | |
431 | expand $Id$ |
|
437 | expand $Id$ | |
432 | do not process $Id: |
|
438 | do not process $Id: | |
433 | xxx $ |
|
439 | xxx $ | |
434 | +$Xinfo$ |
|
440 | +$Xinfo$ | |
435 |
|
441 | |||
436 |
|
442 | |||
437 |
|
443 | |||
438 |
|
444 | |||
439 | % errors encountered |
|
445 | % errors encountered | |
440 | % merge/resolve |
|
446 | % merge/resolve | |
441 | % simplemerge |
|
447 | % simplemerge | |
442 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
448 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
443 | created new head |
|
449 | created new head | |
444 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
450 | 0 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
445 | (branch merge, don't forget to commit) |
|
451 | (branch merge, don't forget to commit) | |
446 | $Id: m 8731e1dadc99 Thu, 01 Jan 1970 00:00:00 +0000 test $ |
|
452 | $Id: m 8731e1dadc99 Thu, 01 Jan 1970 00:00:00 +0000 test $ | |
447 | foo |
|
453 | foo | |
448 | % conflict |
|
454 | % conflict | |
449 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
455 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
450 | created new head |
|
456 | created new head | |
451 | merging m |
|
457 | merging m | |
452 | warning: conflicts during merge. |
|
458 | warning: conflicts during merge. | |
453 | merging m failed! |
|
459 | merging m failed! | |
454 | 0 files updated, 0 files merged, 0 files removed, 1 files unresolved |
|
460 | 0 files updated, 0 files merged, 0 files removed, 1 files unresolved | |
455 | use 'hg resolve' to retry unresolved file merges |
|
461 | use 'hg resolve' to retry unresolved file merges | |
456 | % keyword stays outside conflict zone |
|
462 | % keyword stays outside conflict zone | |
457 | $Id$ |
|
463 | $Id$ | |
458 | <<<<<<< local |
|
464 | <<<<<<< local | |
459 | bar |
|
465 | bar | |
460 | ======= |
|
466 | ======= | |
461 | foo |
|
467 | foo | |
462 | >>>>>>> other |
|
468 | >>>>>>> other | |
463 | % resolve to local |
|
469 | % resolve to local | |
464 | $Id: m 43dfd2854b5b Thu, 01 Jan 1970 00:00:00 +0000 test $ |
|
470 | $Id: m 43dfd2854b5b Thu, 01 Jan 1970 00:00:00 +0000 test $ | |
465 | bar |
|
471 | bar | |
466 | % switch off expansion |
|
472 | % switch off expansion | |
467 | % kwshrink with unknown file u |
|
473 | % kwshrink with unknown file u | |
468 | overwriting a shrinking keywords |
|
474 | overwriting a shrinking keywords | |
469 | overwriting m shrinking keywords |
|
475 | overwriting m shrinking keywords | |
470 | overwriting x/a shrinking keywords |
|
476 | overwriting x/a shrinking keywords | |
471 | % cat |
|
477 | % cat | |
472 | expand $Id$ |
|
478 | expand $Id$ | |
473 | do not process $Id: |
|
479 | do not process $Id: | |
474 | xxx $ |
|
480 | xxx $ | |
475 | $Xinfo$ |
|
481 | $Xinfo$ | |
476 | ignore $Id$ |
|
482 | ignore $Id$ | |
477 | % hg cat |
|
483 | % hg cat | |
478 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ |
|
484 | expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ | |
479 | do not process $Id: |
|
485 | do not process $Id: | |
480 | xxx $ |
|
486 | xxx $ | |
481 | $Xinfo: User Name <user@example.com>: firstline $ |
|
487 | $Xinfo: User Name <user@example.com>: firstline $ | |
482 | ignore $Id$ |
|
488 | ignore $Id$ | |
483 | a |
|
489 | a | |
484 | % cat |
|
490 | % cat | |
485 | expand $Id$ |
|
491 | expand $Id$ | |
486 | do not process $Id: |
|
492 | do not process $Id: | |
487 | xxx $ |
|
493 | xxx $ | |
488 | $Xinfo$ |
|
494 | $Xinfo$ | |
489 | ignore $Id$ |
|
495 | ignore $Id$ | |
490 | % hg cat |
|
496 | % hg cat | |
491 | expand $Id$ |
|
497 | expand $Id$ | |
492 | do not process $Id: |
|
498 | do not process $Id: | |
493 | xxx $ |
|
499 | xxx $ | |
494 | $Xinfo$ |
|
500 | $Xinfo$ | |
495 | ignore $Id$ |
|
501 | ignore $Id$ | |
496 | a |
|
502 | a |
@@ -1,554 +1,558 | |||||
1 | % help |
|
1 | % help | |
2 | mq extension - patch management and development |
|
2 | mq extension - patch management and development | |
3 |
|
3 | |||
4 | This extension lets you work with a stack of patches in a Mercurial |
|
4 | This extension lets you work with a stack of patches in a Mercurial | |
5 | repository. It manages two stacks of patches - all known patches, and |
|
5 | repository. It manages two stacks of patches - all known patches, and | |
6 | applied patches (subset of known patches). |
|
6 | applied patches (subset of known patches). | |
7 |
|
7 | |||
8 | Known patches are represented as patch files in the .hg/patches |
|
8 | Known patches are represented as patch files in the .hg/patches | |
9 | directory. Applied patches are both patch files and changesets. |
|
9 | directory. Applied patches are both patch files and changesets. | |
10 |
|
10 | |||
11 | Common tasks (use "hg help command" for more details): |
|
11 | Common tasks (use "hg help command" for more details): | |
12 |
|
12 | |||
13 | prepare repository to work with patches qinit |
|
13 | prepare repository to work with patches qinit | |
14 | create new patch qnew |
|
14 | create new patch qnew | |
15 | import existing patch qimport |
|
15 | import existing patch qimport | |
16 |
|
16 | |||
17 | print patch series qseries |
|
17 | print patch series qseries | |
18 | print applied patches qapplied |
|
18 | print applied patches qapplied | |
19 | print name of top applied patch qtop |
|
19 | print name of top applied patch qtop | |
20 |
|
20 | |||
21 | add known patch to applied stack qpush |
|
21 | add known patch to applied stack qpush | |
22 | remove patch from applied stack qpop |
|
22 | remove patch from applied stack qpop | |
23 | refresh contents of top applied patch qrefresh |
|
23 | refresh contents of top applied patch qrefresh | |
24 |
|
24 | |||
25 | list of commands: |
|
25 | list of commands: | |
26 |
|
26 | |||
27 | qapplied print the patches already applied |
|
27 | qapplied print the patches already applied | |
28 | qclone clone main and patch repository at same time |
|
28 | qclone clone main and patch repository at same time | |
29 | qcommit commit changes in the queue repository |
|
29 | qcommit commit changes in the queue repository | |
30 | qdelete remove patches from queue |
|
30 | qdelete remove patches from queue | |
31 | qdiff diff of the current patch and subsequent modifications |
|
31 | qdiff diff of the current patch and subsequent modifications | |
32 | qfinish move applied patches into repository history |
|
32 | qfinish move applied patches into repository history | |
33 | qfold fold the named patches into the current patch |
|
33 | qfold fold the named patches into the current patch | |
34 | qgoto push or pop patches until named patch is at top of stack |
|
34 | qgoto push or pop patches until named patch is at top of stack | |
35 | qguard set or print guards for a patch |
|
35 | qguard set or print guards for a patch | |
36 | qheader Print the header of the topmost or specified patch |
|
36 | qheader Print the header of the topmost or specified patch | |
37 | qimport import a patch |
|
37 | qimport import a patch | |
38 | qinit init a new queue repository |
|
38 | qinit init a new queue repository | |
39 | qnew create a new patch |
|
39 | qnew create a new patch | |
40 | qnext print the name of the next patch |
|
40 | qnext print the name of the next patch | |
41 | qpop pop the current patch off the stack |
|
41 | qpop pop the current patch off the stack | |
42 | qprev print the name of the previous patch |
|
42 | qprev print the name of the previous patch | |
43 | qpush push the next patch onto the stack |
|
43 | qpush push the next patch onto the stack | |
44 | qrefresh update the current patch |
|
44 | qrefresh update the current patch | |
45 | qrename rename a patch |
|
45 | qrename rename a patch | |
46 | qrestore restore the queue state saved by a rev |
|
46 | qrestore restore the queue state saved by a rev | |
47 | qsave save current queue state |
|
47 | qsave save current queue state | |
48 | qselect set or print guarded patches to push |
|
48 | qselect set or print guarded patches to push | |
49 | qseries print the entire series file |
|
49 | qseries print the entire series file | |
50 | qtop print the name of the current patch |
|
50 | qtop print the name of the current patch | |
51 | qunapplied print the patches not yet applied |
|
51 | qunapplied print the patches not yet applied | |
52 | strip strip a revision and all its descendants from the repository |
|
52 | strip strip a revision and all its descendants from the repository | |
53 |
|
53 | |||
|
54 | enabled extensions: | |||
|
55 | ||||
|
56 | mq patch management and development | |||
|
57 | ||||
54 | use "hg -v help mq" to show aliases and global options |
|
58 | use "hg -v help mq" to show aliases and global options | |
55 | adding a |
|
59 | adding a | |
56 | updating working directory |
|
60 | updating working directory | |
57 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
61 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
58 | adding b/z |
|
62 | adding b/z | |
59 | % qinit |
|
63 | % qinit | |
60 | % -R qinit |
|
64 | % -R qinit | |
61 | % qinit -c |
|
65 | % qinit -c | |
62 | A .hgignore |
|
66 | A .hgignore | |
63 | A series |
|
67 | A series | |
64 | % qnew should refuse bad patch names |
|
68 | % qnew should refuse bad patch names | |
65 | abort: "series" cannot be used as the name of a patch |
|
69 | abort: "series" cannot be used as the name of a patch | |
66 | abort: "status" cannot be used as the name of a patch |
|
70 | abort: "status" cannot be used as the name of a patch | |
67 | abort: "guards" cannot be used as the name of a patch |
|
71 | abort: "guards" cannot be used as the name of a patch | |
68 | abort: ".hgignore" cannot be used as the name of a patch |
|
72 | abort: ".hgignore" cannot be used as the name of a patch | |
69 | % qnew implies add |
|
73 | % qnew implies add | |
70 | A .hgignore |
|
74 | A .hgignore | |
71 | A series |
|
75 | A series | |
72 | A test.patch |
|
76 | A test.patch | |
73 | % qinit; qinit -c |
|
77 | % qinit; qinit -c | |
74 | .hgignore: |
|
78 | .hgignore: | |
75 | ^\.hg |
|
79 | ^\.hg | |
76 | ^\.mq |
|
80 | ^\.mq | |
77 | syntax: glob |
|
81 | syntax: glob | |
78 | status |
|
82 | status | |
79 | guards |
|
83 | guards | |
80 | series: |
|
84 | series: | |
81 | abort: repository already exists! |
|
85 | abort: repository already exists! | |
82 | % qinit; <stuff>; qinit -c |
|
86 | % qinit; <stuff>; qinit -c | |
83 | adding .hg/patches/A |
|
87 | adding .hg/patches/A | |
84 | adding .hg/patches/B |
|
88 | adding .hg/patches/B | |
85 | A .hgignore |
|
89 | A .hgignore | |
86 | A A |
|
90 | A A | |
87 | A B |
|
91 | A B | |
88 | A series |
|
92 | A series | |
89 | .hgignore: |
|
93 | .hgignore: | |
90 | status |
|
94 | status | |
91 | bleh |
|
95 | bleh | |
92 | series: |
|
96 | series: | |
93 | A |
|
97 | A | |
94 | B |
|
98 | B | |
95 | % qnew with uncommitted changes |
|
99 | % qnew with uncommitted changes | |
96 | abort: local changes found, refresh first |
|
100 | abort: local changes found, refresh first | |
97 | A somefile |
|
101 | A somefile | |
98 | % qnew with uncommitted changes and missing file (issue 803) |
|
102 | % qnew with uncommitted changes and missing file (issue 803) | |
99 | someotherfile: No such file or directory |
|
103 | someotherfile: No such file or directory | |
100 | someotherfile: No such file or directory |
|
104 | someotherfile: No such file or directory | |
101 | A somefile |
|
105 | A somefile | |
102 | issue803.patch |
|
106 | issue803.patch | |
103 | Patch queue now empty |
|
107 | Patch queue now empty | |
104 | % qnew -m |
|
108 | % qnew -m | |
105 | foo bar |
|
109 | foo bar | |
106 | % qrefresh |
|
110 | % qrefresh | |
107 | foo bar |
|
111 | foo bar | |
108 |
|
112 | |||
109 | diff -r xa |
|
113 | diff -r xa | |
110 | --- a/a |
|
114 | --- a/a | |
111 | +++ b/a |
|
115 | +++ b/a | |
112 | @@ -1,1 +1,2 @@ |
|
116 | @@ -1,1 +1,2 @@ | |
113 | a |
|
117 | a | |
114 | +a |
|
118 | +a | |
115 | % empty qrefresh |
|
119 | % empty qrefresh | |
116 | revision: |
|
120 | revision: | |
117 | patch: |
|
121 | patch: | |
118 | foo bar |
|
122 | foo bar | |
119 |
|
123 | |||
120 | working dir diff: |
|
124 | working dir diff: | |
121 | --- a/a |
|
125 | --- a/a | |
122 | +++ b/a |
|
126 | +++ b/a | |
123 | @@ -1,1 +1,2 @@ |
|
127 | @@ -1,1 +1,2 @@ | |
124 | a |
|
128 | a | |
125 | +a |
|
129 | +a | |
126 | % qpop |
|
130 | % qpop | |
127 | Patch queue now empty |
|
131 | Patch queue now empty | |
128 | % qpush |
|
132 | % qpush | |
129 | applying test.patch |
|
133 | applying test.patch | |
130 | Now at: test.patch |
|
134 | Now at: test.patch | |
131 | % pop/push outside repo |
|
135 | % pop/push outside repo | |
132 | Patch queue now empty |
|
136 | Patch queue now empty | |
133 | applying test.patch |
|
137 | applying test.patch | |
134 | Now at: test.patch |
|
138 | Now at: test.patch | |
135 | % qrefresh in subdir |
|
139 | % qrefresh in subdir | |
136 | % pop/push -a in subdir |
|
140 | % pop/push -a in subdir | |
137 | Patch queue now empty |
|
141 | Patch queue now empty | |
138 | applying test.patch |
|
142 | applying test.patch | |
139 | applying test2.patch |
|
143 | applying test2.patch | |
140 | Now at: test2.patch |
|
144 | Now at: test2.patch | |
141 | % qseries |
|
145 | % qseries | |
142 | test.patch |
|
146 | test.patch | |
143 | test2.patch |
|
147 | test2.patch | |
144 | Now at: test.patch |
|
148 | Now at: test.patch | |
145 | 0 A test.patch: foo bar |
|
149 | 0 A test.patch: foo bar | |
146 | 1 U test2.patch: |
|
150 | 1 U test2.patch: | |
147 | applying test2.patch |
|
151 | applying test2.patch | |
148 | Now at: test2.patch |
|
152 | Now at: test2.patch | |
149 | % qapplied |
|
153 | % qapplied | |
150 | test.patch |
|
154 | test.patch | |
151 | test2.patch |
|
155 | test2.patch | |
152 | % qtop |
|
156 | % qtop | |
153 | test2.patch |
|
157 | test2.patch | |
154 | % qprev |
|
158 | % qprev | |
155 | test.patch |
|
159 | test.patch | |
156 | % qnext |
|
160 | % qnext | |
157 | All patches applied |
|
161 | All patches applied | |
158 | % pop, qnext, qprev, qapplied |
|
162 | % pop, qnext, qprev, qapplied | |
159 | Now at: test.patch |
|
163 | Now at: test.patch | |
160 | test2.patch |
|
164 | test2.patch | |
161 | Only one patch applied |
|
165 | Only one patch applied | |
162 | test.patch |
|
166 | test.patch | |
163 | % commit should fail |
|
167 | % commit should fail | |
164 | abort: cannot commit over an applied mq patch |
|
168 | abort: cannot commit over an applied mq patch | |
165 | % push should fail |
|
169 | % push should fail | |
166 | pushing to ../../k |
|
170 | pushing to ../../k | |
167 | abort: source has mq patches applied |
|
171 | abort: source has mq patches applied | |
168 | % qunapplied |
|
172 | % qunapplied | |
169 | test2.patch |
|
173 | test2.patch | |
170 | % qpush/qpop with index |
|
174 | % qpush/qpop with index | |
171 | applying test2.patch |
|
175 | applying test2.patch | |
172 | Now at: test2.patch |
|
176 | Now at: test2.patch | |
173 | Now at: test.patch |
|
177 | Now at: test.patch | |
174 | applying test1b.patch |
|
178 | applying test1b.patch | |
175 | Now at: test1b.patch |
|
179 | Now at: test1b.patch | |
176 | applying test2.patch |
|
180 | applying test2.patch | |
177 | Now at: test2.patch |
|
181 | Now at: test2.patch | |
178 | Now at: test1b.patch |
|
182 | Now at: test1b.patch | |
179 | Now at: test.patch |
|
183 | Now at: test.patch | |
180 | applying test1b.patch |
|
184 | applying test1b.patch | |
181 | applying test2.patch |
|
185 | applying test2.patch | |
182 | Now at: test2.patch |
|
186 | Now at: test2.patch | |
183 | % push should succeed |
|
187 | % push should succeed | |
184 | Patch queue now empty |
|
188 | Patch queue now empty | |
185 | pushing to ../../k |
|
189 | pushing to ../../k | |
186 | searching for changes |
|
190 | searching for changes | |
187 | adding changesets |
|
191 | adding changesets | |
188 | adding manifests |
|
192 | adding manifests | |
189 | adding file changes |
|
193 | adding file changes | |
190 | added 1 changesets with 1 changes to 1 files |
|
194 | added 1 changesets with 1 changes to 1 files | |
191 | % qpush/qpop error codes |
|
195 | % qpush/qpop error codes | |
192 | applying test.patch |
|
196 | applying test.patch | |
193 | applying test1b.patch |
|
197 | applying test1b.patch | |
194 | applying test2.patch |
|
198 | applying test2.patch | |
195 | Now at: test2.patch |
|
199 | Now at: test2.patch | |
196 | % pops all patches and succeeds |
|
200 | % pops all patches and succeeds | |
197 | Patch queue now empty |
|
201 | Patch queue now empty | |
198 | qpop -a succeeds |
|
202 | qpop -a succeeds | |
199 | % does nothing and succeeds |
|
203 | % does nothing and succeeds | |
200 | no patches applied |
|
204 | no patches applied | |
201 | qpop -a succeeds |
|
205 | qpop -a succeeds | |
202 | % fails - nothing else to pop |
|
206 | % fails - nothing else to pop | |
203 | no patches applied |
|
207 | no patches applied | |
204 | qpop fails |
|
208 | qpop fails | |
205 | % pushes a patch and succeeds |
|
209 | % pushes a patch and succeeds | |
206 | applying test.patch |
|
210 | applying test.patch | |
207 | Now at: test.patch |
|
211 | Now at: test.patch | |
208 | qpush succeeds |
|
212 | qpush succeeds | |
209 | % pops a patch and succeeds |
|
213 | % pops a patch and succeeds | |
210 | Patch queue now empty |
|
214 | Patch queue now empty | |
211 | qpop succeeds |
|
215 | qpop succeeds | |
212 | % pushes up to test1b.patch and succeeds |
|
216 | % pushes up to test1b.patch and succeeds | |
213 | applying test.patch |
|
217 | applying test.patch | |
214 | applying test1b.patch |
|
218 | applying test1b.patch | |
215 | Now at: test1b.patch |
|
219 | Now at: test1b.patch | |
216 | qpush test1b.patch succeeds |
|
220 | qpush test1b.patch succeeds | |
217 | % does nothing and succeeds |
|
221 | % does nothing and succeeds | |
218 | qpush: test1b.patch is already at the top |
|
222 | qpush: test1b.patch is already at the top | |
219 | qpush test1b.patch succeeds |
|
223 | qpush test1b.patch succeeds | |
220 | % does nothing and succeeds |
|
224 | % does nothing and succeeds | |
221 | qpop: test1b.patch is already at the top |
|
225 | qpop: test1b.patch is already at the top | |
222 | qpop test1b.patch succeeds |
|
226 | qpop test1b.patch succeeds | |
223 | % fails - can't push to this patch |
|
227 | % fails - can't push to this patch | |
224 | abort: cannot push to a previous patch: test.patch |
|
228 | abort: cannot push to a previous patch: test.patch | |
225 | qpush test.patch fails |
|
229 | qpush test.patch fails | |
226 | % fails - can't pop to this patch |
|
230 | % fails - can't pop to this patch | |
227 | abort: patch test2.patch is not applied |
|
231 | abort: patch test2.patch is not applied | |
228 | qpop test2.patch fails |
|
232 | qpop test2.patch fails | |
229 | % pops up to test.patch and succeeds |
|
233 | % pops up to test.patch and succeeds | |
230 | Now at: test.patch |
|
234 | Now at: test.patch | |
231 | qpop test.patch succeeds |
|
235 | qpop test.patch succeeds | |
232 | % pushes all patches and succeeds |
|
236 | % pushes all patches and succeeds | |
233 | applying test1b.patch |
|
237 | applying test1b.patch | |
234 | applying test2.patch |
|
238 | applying test2.patch | |
235 | Now at: test2.patch |
|
239 | Now at: test2.patch | |
236 | qpush -a succeeds |
|
240 | qpush -a succeeds | |
237 | % does nothing and succeeds |
|
241 | % does nothing and succeeds | |
238 | all patches are currently applied |
|
242 | all patches are currently applied | |
239 | qpush -a succeeds |
|
243 | qpush -a succeeds | |
240 | % fails - nothing else to push |
|
244 | % fails - nothing else to push | |
241 | patch series already fully applied |
|
245 | patch series already fully applied | |
242 | qpush fails |
|
246 | qpush fails | |
243 | % does nothing and succeeds |
|
247 | % does nothing and succeeds | |
244 | all patches are currently applied |
|
248 | all patches are currently applied | |
245 | qpush test2.patch succeeds |
|
249 | qpush test2.patch succeeds | |
246 | % strip |
|
250 | % strip | |
247 | adding x |
|
251 | adding x | |
248 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
252 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
249 | saving bundle to |
|
253 | saving bundle to | |
250 | adding changesets |
|
254 | adding changesets | |
251 | adding manifests |
|
255 | adding manifests | |
252 | adding file changes |
|
256 | adding file changes | |
253 | added 1 changesets with 1 changes to 1 files |
|
257 | added 1 changesets with 1 changes to 1 files | |
254 | (run 'hg update' to get a working copy) |
|
258 | (run 'hg update' to get a working copy) | |
255 | % strip with local changes, should complain |
|
259 | % strip with local changes, should complain | |
256 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
260 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
257 | abort: local changes found |
|
261 | abort: local changes found | |
258 | % --force strip with local changes |
|
262 | % --force strip with local changes | |
259 | 0 files updated, 0 files merged, 2 files removed, 0 files unresolved |
|
263 | 0 files updated, 0 files merged, 2 files removed, 0 files unresolved | |
260 | saving bundle to |
|
264 | saving bundle to | |
261 | % cd b; hg qrefresh |
|
265 | % cd b; hg qrefresh | |
262 | adding a |
|
266 | adding a | |
263 | foo |
|
267 | foo | |
264 |
|
268 | |||
265 | diff -r cb9a9f314b8b a |
|
269 | diff -r cb9a9f314b8b a | |
266 | --- a/a |
|
270 | --- a/a | |
267 | +++ b/a |
|
271 | +++ b/a | |
268 | @@ -1,1 +1,2 @@ |
|
272 | @@ -1,1 +1,2 @@ | |
269 | a |
|
273 | a | |
270 | +a |
|
274 | +a | |
271 | diff -r cb9a9f314b8b b/f |
|
275 | diff -r cb9a9f314b8b b/f | |
272 | --- /dev/null |
|
276 | --- /dev/null | |
273 | +++ b/b/f |
|
277 | +++ b/b/f | |
274 | @@ -0,0 +1,1 @@ |
|
278 | @@ -0,0 +1,1 @@ | |
275 | +f |
|
279 | +f | |
276 | % hg qrefresh . |
|
280 | % hg qrefresh . | |
277 | foo |
|
281 | foo | |
278 |
|
282 | |||
279 | diff -r cb9a9f314b8b b/f |
|
283 | diff -r cb9a9f314b8b b/f | |
280 | --- /dev/null |
|
284 | --- /dev/null | |
281 | +++ b/b/f |
|
285 | +++ b/b/f | |
282 | @@ -0,0 +1,1 @@ |
|
286 | @@ -0,0 +1,1 @@ | |
283 | +f |
|
287 | +f | |
284 | M a |
|
288 | M a | |
285 | % qpush failure |
|
289 | % qpush failure | |
286 | Patch queue now empty |
|
290 | Patch queue now empty | |
287 | applying foo |
|
291 | applying foo | |
288 | applying bar |
|
292 | applying bar | |
289 | file foo already exists |
|
293 | file foo already exists | |
290 | 1 out of 1 hunks FAILED -- saving rejects to file foo.rej |
|
294 | 1 out of 1 hunks FAILED -- saving rejects to file foo.rej | |
291 | patch failed, unable to continue (try -v) |
|
295 | patch failed, unable to continue (try -v) | |
292 | patch failed, rejects left in working dir |
|
296 | patch failed, rejects left in working dir | |
293 | Errors during apply, please fix and refresh bar |
|
297 | Errors during apply, please fix and refresh bar | |
294 | ? foo |
|
298 | ? foo | |
295 | ? foo.rej |
|
299 | ? foo.rej | |
296 | % mq tags |
|
300 | % mq tags | |
297 | 0 qparent |
|
301 | 0 qparent | |
298 | 1 qbase foo |
|
302 | 1 qbase foo | |
299 | 2 qtip bar tip |
|
303 | 2 qtip bar tip | |
300 | % bad node in status |
|
304 | % bad node in status | |
301 | Now at: foo |
|
305 | Now at: foo | |
302 | changeset: 0:cb9a9f314b8b |
|
306 | changeset: 0:cb9a9f314b8b | |
303 | mq status file refers to unknown node |
|
307 | mq status file refers to unknown node | |
304 | tag: tip |
|
308 | tag: tip | |
305 | user: test |
|
309 | user: test | |
306 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
310 | date: Thu Jan 01 00:00:00 1970 +0000 | |
307 | summary: a |
|
311 | summary: a | |
308 |
|
312 | |||
309 | mq status file refers to unknown node |
|
313 | mq status file refers to unknown node | |
310 | default 0:cb9a9f314b8b |
|
314 | default 0:cb9a9f314b8b | |
311 | abort: working directory revision is not qtip |
|
315 | abort: working directory revision is not qtip | |
312 | new file |
|
316 | new file | |
313 |
|
317 | |||
314 | diff --git a/new b/new |
|
318 | diff --git a/new b/new | |
315 | new file mode 100755 |
|
319 | new file mode 100755 | |
316 | --- /dev/null |
|
320 | --- /dev/null | |
317 | +++ b/new |
|
321 | +++ b/new | |
318 | @@ -0,0 +1,1 @@ |
|
322 | @@ -0,0 +1,1 @@ | |
319 | +foo |
|
323 | +foo | |
320 | copy file |
|
324 | copy file | |
321 |
|
325 | |||
322 | diff --git a/new b/copy |
|
326 | diff --git a/new b/copy | |
323 | copy from new |
|
327 | copy from new | |
324 | copy to copy |
|
328 | copy to copy | |
325 | Now at: new |
|
329 | Now at: new | |
326 | applying copy |
|
330 | applying copy | |
327 | Now at: copy |
|
331 | Now at: copy | |
328 | diff --git a/new b/copy |
|
332 | diff --git a/new b/copy | |
329 | copy from new |
|
333 | copy from new | |
330 | copy to copy |
|
334 | copy to copy | |
331 | diff --git a/new b/copy |
|
335 | diff --git a/new b/copy | |
332 | copy from new |
|
336 | copy from new | |
333 | copy to copy |
|
337 | copy to copy | |
334 | 1 files updated, 0 files merged, 2 files removed, 0 files unresolved |
|
338 | 1 files updated, 0 files merged, 2 files removed, 0 files unresolved | |
335 | created new head |
|
339 | created new head | |
336 | 2 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
340 | 2 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
337 | adding branch |
|
341 | adding branch | |
338 | adding changesets |
|
342 | adding changesets | |
339 | adding manifests |
|
343 | adding manifests | |
340 | adding file changes |
|
344 | adding file changes | |
341 | added 1 changesets with 1 changes to 1 files |
|
345 | added 1 changesets with 1 changes to 1 files | |
342 | Patch queue now empty |
|
346 | Patch queue now empty | |
343 | (working directory not at tip) |
|
347 | (working directory not at tip) | |
344 | applying bar |
|
348 | applying bar | |
345 | Now at: bar |
|
349 | Now at: bar | |
346 | diff --git a/bar b/bar |
|
350 | diff --git a/bar b/bar | |
347 | new file mode 100644 |
|
351 | new file mode 100644 | |
348 | --- /dev/null |
|
352 | --- /dev/null | |
349 | +++ b/bar |
|
353 | +++ b/bar | |
350 | @@ -0,0 +1,1 @@ |
|
354 | @@ -0,0 +1,1 @@ | |
351 | +bar |
|
355 | +bar | |
352 | diff --git a/foo b/baz |
|
356 | diff --git a/foo b/baz | |
353 | rename from foo |
|
357 | rename from foo | |
354 | rename to baz |
|
358 | rename to baz | |
355 | 2 baz (foo) |
|
359 | 2 baz (foo) | |
356 | diff --git a/bar b/bar |
|
360 | diff --git a/bar b/bar | |
357 | new file mode 100644 |
|
361 | new file mode 100644 | |
358 | --- /dev/null |
|
362 | --- /dev/null | |
359 | +++ b/bar |
|
363 | +++ b/bar | |
360 | @@ -0,0 +1,1 @@ |
|
364 | @@ -0,0 +1,1 @@ | |
361 | +bar |
|
365 | +bar | |
362 | diff --git a/foo b/baz |
|
366 | diff --git a/foo b/baz | |
363 | rename from foo |
|
367 | rename from foo | |
364 | rename to baz |
|
368 | rename to baz | |
365 | 2 baz (foo) |
|
369 | 2 baz (foo) | |
366 | diff --git a/bar b/bar |
|
370 | diff --git a/bar b/bar | |
367 | diff --git a/foo b/baz |
|
371 | diff --git a/foo b/baz | |
368 |
|
372 | |||
369 | 1 files updated, 0 files merged, 2 files removed, 0 files unresolved |
|
373 | 1 files updated, 0 files merged, 2 files removed, 0 files unresolved | |
370 | 2 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
374 | 2 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
371 | adding branch |
|
375 | adding branch | |
372 | adding changesets |
|
376 | adding changesets | |
373 | adding manifests |
|
377 | adding manifests | |
374 | adding file changes |
|
378 | adding file changes | |
375 | added 1 changesets with 1 changes to 1 files |
|
379 | added 1 changesets with 1 changes to 1 files | |
376 | Patch queue now empty |
|
380 | Patch queue now empty | |
377 | (working directory not at tip) |
|
381 | (working directory not at tip) | |
378 | applying bar |
|
382 | applying bar | |
379 | Now at: bar |
|
383 | Now at: bar | |
380 | diff --git a/foo b/bleh |
|
384 | diff --git a/foo b/bleh | |
381 | rename from foo |
|
385 | rename from foo | |
382 | rename to bleh |
|
386 | rename to bleh | |
383 | diff --git a/quux b/quux |
|
387 | diff --git a/quux b/quux | |
384 | new file mode 100644 |
|
388 | new file mode 100644 | |
385 | --- /dev/null |
|
389 | --- /dev/null | |
386 | +++ b/quux |
|
390 | +++ b/quux | |
387 | @@ -0,0 +1,1 @@ |
|
391 | @@ -0,0 +1,1 @@ | |
388 | +bar |
|
392 | +bar | |
389 | 3 bleh (foo) |
|
393 | 3 bleh (foo) | |
390 | diff --git a/foo b/barney |
|
394 | diff --git a/foo b/barney | |
391 | rename from foo |
|
395 | rename from foo | |
392 | rename to barney |
|
396 | rename to barney | |
393 | diff --git a/fred b/fred |
|
397 | diff --git a/fred b/fred | |
394 | new file mode 100644 |
|
398 | new file mode 100644 | |
395 | --- /dev/null |
|
399 | --- /dev/null | |
396 | +++ b/fred |
|
400 | +++ b/fred | |
397 | @@ -0,0 +1,1 @@ |
|
401 | @@ -0,0 +1,1 @@ | |
398 | +bar |
|
402 | +bar | |
399 | 3 barney (foo) |
|
403 | 3 barney (foo) | |
400 | % refresh omitting an added file |
|
404 | % refresh omitting an added file | |
401 | C newfile |
|
405 | C newfile | |
402 | A newfile |
|
406 | A newfile | |
403 | Now at: bar |
|
407 | Now at: bar | |
404 | % create a git patch |
|
408 | % create a git patch | |
405 | diff --git a/alexander b/alexander |
|
409 | diff --git a/alexander b/alexander | |
406 | % create a git binary patch |
|
410 | % create a git binary patch | |
407 | 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus |
|
411 | 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus | |
408 | diff --git a/bucephalus b/bucephalus |
|
412 | diff --git a/bucephalus b/bucephalus | |
409 | % check binary patches can be popped and pushed |
|
413 | % check binary patches can be popped and pushed | |
410 | Now at: addalexander |
|
414 | Now at: addalexander | |
411 | applying addbucephalus |
|
415 | applying addbucephalus | |
412 | Now at: addbucephalus |
|
416 | Now at: addbucephalus | |
413 | 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus |
|
417 | 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus | |
414 | % strip again |
|
418 | % strip again | |
415 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
419 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
416 | created new head |
|
420 | created new head | |
417 | merging foo |
|
421 | merging foo | |
418 | 0 files updated, 1 files merged, 0 files removed, 0 files unresolved |
|
422 | 0 files updated, 1 files merged, 0 files removed, 0 files unresolved | |
419 | (branch merge, don't forget to commit) |
|
423 | (branch merge, don't forget to commit) | |
420 | changeset: 3:99615015637b |
|
424 | changeset: 3:99615015637b | |
421 | tag: tip |
|
425 | tag: tip | |
422 | parent: 2:20cbbe65cff7 |
|
426 | parent: 2:20cbbe65cff7 | |
423 | parent: 1:d2871fc282d4 |
|
427 | parent: 1:d2871fc282d4 | |
424 | user: test |
|
428 | user: test | |
425 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
429 | date: Thu Jan 01 00:00:00 1970 +0000 | |
426 | summary: merge |
|
430 | summary: merge | |
427 |
|
431 | |||
428 | changeset: 2:20cbbe65cff7 |
|
432 | changeset: 2:20cbbe65cff7 | |
429 | parent: 0:53245c60e682 |
|
433 | parent: 0:53245c60e682 | |
430 | user: test |
|
434 | user: test | |
431 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
435 | date: Thu Jan 01 00:00:00 1970 +0000 | |
432 | summary: change foo 2 |
|
436 | summary: change foo 2 | |
433 |
|
437 | |||
434 | changeset: 1:d2871fc282d4 |
|
438 | changeset: 1:d2871fc282d4 | |
435 | user: test |
|
439 | user: test | |
436 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
440 | date: Thu Jan 01 00:00:00 1970 +0000 | |
437 | summary: change foo 1 |
|
441 | summary: change foo 1 | |
438 |
|
442 | |||
439 | changeset: 0:53245c60e682 |
|
443 | changeset: 0:53245c60e682 | |
440 | user: test |
|
444 | user: test | |
441 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
445 | date: Thu Jan 01 00:00:00 1970 +0000 | |
442 | summary: add foo |
|
446 | summary: add foo | |
443 |
|
447 | |||
444 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
448 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
445 | saving bundle to |
|
449 | saving bundle to | |
446 | saving bundle to |
|
450 | saving bundle to | |
447 | adding branch |
|
451 | adding branch | |
448 | adding changesets |
|
452 | adding changesets | |
449 | adding manifests |
|
453 | adding manifests | |
450 | adding file changes |
|
454 | adding file changes | |
451 | added 1 changesets with 1 changes to 1 files |
|
455 | added 1 changesets with 1 changes to 1 files | |
452 | changeset: 1:20cbbe65cff7 |
|
456 | changeset: 1:20cbbe65cff7 | |
453 | tag: tip |
|
457 | tag: tip | |
454 | user: test |
|
458 | user: test | |
455 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
459 | date: Thu Jan 01 00:00:00 1970 +0000 | |
456 | summary: change foo 2 |
|
460 | summary: change foo 2 | |
457 |
|
461 | |||
458 | changeset: 0:53245c60e682 |
|
462 | changeset: 0:53245c60e682 | |
459 | user: test |
|
463 | user: test | |
460 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
464 | date: Thu Jan 01 00:00:00 1970 +0000 | |
461 | summary: add foo |
|
465 | summary: add foo | |
462 |
|
466 | |||
463 | % qclone |
|
467 | % qclone | |
464 | abort: versioned patch repository not found (see qinit -c) |
|
468 | abort: versioned patch repository not found (see qinit -c) | |
465 | adding .hg/patches/patch1 |
|
469 | adding .hg/patches/patch1 | |
466 | main repo: |
|
470 | main repo: | |
467 | rev 1: change foo |
|
471 | rev 1: change foo | |
468 | rev 0: add foo |
|
472 | rev 0: add foo | |
469 | patch repo: |
|
473 | patch repo: | |
470 | rev 0: checkpoint |
|
474 | rev 0: checkpoint | |
471 | updating working directory |
|
475 | updating working directory | |
472 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
476 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
473 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
477 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
474 | main repo: |
|
478 | main repo: | |
475 | rev 0: add foo |
|
479 | rev 0: add foo | |
476 | patch repo: |
|
480 | patch repo: | |
477 | rev 0: checkpoint |
|
481 | rev 0: checkpoint | |
478 | Patch queue now empty |
|
482 | Patch queue now empty | |
479 | main repo: |
|
483 | main repo: | |
480 | rev 0: add foo |
|
484 | rev 0: add foo | |
481 | patch repo: |
|
485 | patch repo: | |
482 | rev 0: checkpoint |
|
486 | rev 0: checkpoint | |
483 | updating working directory |
|
487 | updating working directory | |
484 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
488 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
485 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
489 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
486 | main repo: |
|
490 | main repo: | |
487 | rev 0: add foo |
|
491 | rev 0: add foo | |
488 | patch repo: |
|
492 | patch repo: | |
489 | rev 0: checkpoint |
|
493 | rev 0: checkpoint | |
490 | % test applying on an empty file (issue 1033) |
|
494 | % test applying on an empty file (issue 1033) | |
491 | adding a |
|
495 | adding a | |
492 | Patch queue now empty |
|
496 | Patch queue now empty | |
493 | applying changea |
|
497 | applying changea | |
494 | Now at: changea |
|
498 | Now at: changea | |
495 | % test qpush with --force, issue1087 |
|
499 | % test qpush with --force, issue1087 | |
496 | adding bye.txt |
|
500 | adding bye.txt | |
497 | adding hello.txt |
|
501 | adding hello.txt | |
498 | Patch queue now empty |
|
502 | Patch queue now empty | |
499 | % qpush should fail, local changes |
|
503 | % qpush should fail, local changes | |
500 | abort: local changes found, refresh first |
|
504 | abort: local changes found, refresh first | |
501 | % apply force, should not discard changes with empty patch |
|
505 | % apply force, should not discard changes with empty patch | |
502 | applying empty |
|
506 | applying empty | |
503 | patch: **** Only garbage was found in the patch input. |
|
507 | patch: **** Only garbage was found in the patch input. | |
504 | patch failed, unable to continue (try -v) |
|
508 | patch failed, unable to continue (try -v) | |
505 | patch empty is empty |
|
509 | patch empty is empty | |
506 | Now at: empty |
|
510 | Now at: empty | |
507 | diff -r bf5fc3f07a0a hello.txt |
|
511 | diff -r bf5fc3f07a0a hello.txt | |
508 | --- a/hello.txt |
|
512 | --- a/hello.txt | |
509 | +++ b/hello.txt |
|
513 | +++ b/hello.txt | |
510 | @@ -1,1 +1,2 @@ |
|
514 | @@ -1,1 +1,2 @@ | |
511 | hello |
|
515 | hello | |
512 | +world |
|
516 | +world | |
513 | diff -r 9ecee4f634e3 hello.txt |
|
517 | diff -r 9ecee4f634e3 hello.txt | |
514 | --- a/hello.txt |
|
518 | --- a/hello.txt | |
515 | +++ b/hello.txt |
|
519 | +++ b/hello.txt | |
516 | @@ -1,1 +1,2 @@ |
|
520 | @@ -1,1 +1,2 @@ | |
517 | hello |
|
521 | hello | |
518 | +world |
|
522 | +world | |
519 | changeset: 1:bf5fc3f07a0a |
|
523 | changeset: 1:bf5fc3f07a0a | |
520 | tag: qtip |
|
524 | tag: qtip | |
521 | tag: tip |
|
525 | tag: tip | |
522 | tag: empty |
|
526 | tag: empty | |
523 | tag: qbase |
|
527 | tag: qbase | |
524 | user: test |
|
528 | user: test | |
525 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
529 | date: Thu Jan 01 00:00:00 1970 +0000 | |
526 | summary: imported patch empty |
|
530 | summary: imported patch empty | |
527 |
|
531 | |||
528 |
|
532 | |||
529 | Patch queue now empty |
|
533 | Patch queue now empty | |
530 | % qpush should fail, local changes |
|
534 | % qpush should fail, local changes | |
531 | abort: local changes found, refresh first |
|
535 | abort: local changes found, refresh first | |
532 | % apply force, should discard changes in hello, but not bye |
|
536 | % apply force, should discard changes in hello, but not bye | |
533 | applying empty |
|
537 | applying empty | |
534 | Now at: empty |
|
538 | Now at: empty | |
535 | M bye.txt |
|
539 | M bye.txt | |
536 | diff -r ba252371dbc1 bye.txt |
|
540 | diff -r ba252371dbc1 bye.txt | |
537 | --- a/bye.txt |
|
541 | --- a/bye.txt | |
538 | +++ b/bye.txt |
|
542 | +++ b/bye.txt | |
539 | @@ -1,1 +1,2 @@ |
|
543 | @@ -1,1 +1,2 @@ | |
540 | bye |
|
544 | bye | |
541 | +universe |
|
545 | +universe | |
542 | diff -r 9ecee4f634e3 bye.txt |
|
546 | diff -r 9ecee4f634e3 bye.txt | |
543 | --- a/bye.txt |
|
547 | --- a/bye.txt | |
544 | +++ b/bye.txt |
|
548 | +++ b/bye.txt | |
545 | @@ -1,1 +1,2 @@ |
|
549 | @@ -1,1 +1,2 @@ | |
546 | bye |
|
550 | bye | |
547 | +universe |
|
551 | +universe | |
548 | diff -r 9ecee4f634e3 hello.txt |
|
552 | diff -r 9ecee4f634e3 hello.txt | |
549 | --- a/hello.txt |
|
553 | --- a/hello.txt | |
550 | +++ b/hello.txt |
|
554 | +++ b/hello.txt | |
551 | @@ -1,1 +1,3 @@ |
|
555 | @@ -1,1 +1,3 @@ | |
552 | hello |
|
556 | hello | |
553 | +world |
|
557 | +world | |
554 | +universe |
|
558 | +universe |
@@ -1,107 +1,164 | |||||
1 | notify extension - No help text available |
|
1 | notify extension - hook extension to email notifications on commits/pushes | |
|
2 | ||||
|
3 | Subscriptions can be managed through hgrc. Default mode is to print | |||
|
4 | messages to stdout, for testing and configuring. | |||
|
5 | ||||
|
6 | To use, configure notify extension and enable in hgrc like this: | |||
|
7 | ||||
|
8 | [extensions] | |||
|
9 | hgext.notify = | |||
|
10 | ||||
|
11 | [hooks] | |||
|
12 | # one email for each incoming changeset | |||
|
13 | incoming.notify = python:hgext.notify.hook | |||
|
14 | # batch emails when many changesets incoming at one time | |||
|
15 | changegroup.notify = python:hgext.notify.hook | |||
|
16 | ||||
|
17 | [notify] | |||
|
18 | # config items go in here | |||
|
19 | ||||
|
20 | config items: | |||
|
21 | ||||
|
22 | REQUIRED: | |||
|
23 | config = /path/to/file # file containing subscriptions | |||
|
24 | ||||
|
25 | OPTIONAL: | |||
|
26 | test = True # print messages to stdout for testing | |||
|
27 | strip = 3 # number of slashes to strip for url paths | |||
|
28 | domain = example.com # domain to use if committer missing domain | |||
|
29 | style = ... # style file to use when formatting email | |||
|
30 | template = ... # template to use when formatting email | |||
|
31 | incoming = ... # template to use when run as incoming hook | |||
|
32 | changegroup = ... # template when run as changegroup hook | |||
|
33 | maxdiff = 300 # max lines of diffs to include (0=none, -1=all) | |||
|
34 | maxsubject = 67 # truncate subject line longer than this | |||
|
35 | diffstat = True # add a diffstat before the diff content | |||
|
36 | sources = serve # notify if source of incoming changes in this list | |||
|
37 | # (serve == ssh or http, push, pull, bundle) | |||
|
38 | [email] | |||
|
39 | from = user@host.com # email address to send as if none given | |||
|
40 | [web] | |||
|
41 | baseurl = http://hgserver/... # root of hg web site for browsing commits | |||
|
42 | ||||
|
43 | notify config file has same format as regular hgrc. it has two | |||
|
44 | sections so you can express subscriptions in whatever way is handier | |||
|
45 | for you. | |||
|
46 | ||||
|
47 | [usersubs] | |||
|
48 | # key is subscriber email, value is ","-separated list of glob patterns | |||
|
49 | user@host = pattern | |||
|
50 | ||||
|
51 | [reposubs] | |||
|
52 | # key is glob pattern, value is ","-separated list of subscriber emails | |||
|
53 | pattern = user@host | |||
|
54 | ||||
|
55 | glob patterns are matched against path to repo root. | |||
|
56 | ||||
|
57 | if you like, you can put notify config file in repo that users can | |||
|
58 | push changes to, they can manage their own subscriptions. | |||
2 |
|
59 | |||
3 | no commands defined |
|
60 | no commands defined | |
4 | % commit |
|
61 | % commit | |
5 | adding a |
|
62 | adding a | |
6 | % clone |
|
63 | % clone | |
7 | updating working directory |
|
64 | updating working directory | |
8 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
65 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
9 | % commit |
|
66 | % commit | |
10 | % pull (minimal config) |
|
67 | % pull (minimal config) | |
11 | pulling from ../a |
|
68 | pulling from ../a | |
12 | searching for changes |
|
69 | searching for changes | |
13 | adding changesets |
|
70 | adding changesets | |
14 | adding manifests |
|
71 | adding manifests | |
15 | adding file changes |
|
72 | adding file changes | |
16 | added 1 changesets with 1 changes to 1 files |
|
73 | added 1 changesets with 1 changes to 1 files | |
17 | Content-Type: text/plain; charset="us-ascii" |
|
74 | Content-Type: text/plain; charset="us-ascii" | |
18 | MIME-Version: 1.0 |
|
75 | MIME-Version: 1.0 | |
19 | Content-Transfer-Encoding: 7bit |
|
76 | Content-Transfer-Encoding: 7bit | |
20 | Date: |
|
77 | Date: | |
21 | Subject: changeset in test-notify/b: b |
|
78 | Subject: changeset in test-notify/b: b | |
22 | From: test |
|
79 | From: test | |
23 | X-Hg-Notification: changeset 0647d048b600 |
|
80 | X-Hg-Notification: changeset 0647d048b600 | |
24 | Message-Id: |
|
81 | Message-Id: | |
25 | To: baz, foo@bar |
|
82 | To: baz, foo@bar | |
26 |
|
83 | |||
27 | changeset 0647d048b600 in test-notify/b |
|
84 | changeset 0647d048b600 in test-notify/b | |
28 | details: test-notify/b?cmd=changeset;node=0647d048b600 |
|
85 | details: test-notify/b?cmd=changeset;node=0647d048b600 | |
29 | description: b |
|
86 | description: b | |
30 |
|
87 | |||
31 | diffs (6 lines): |
|
88 | diffs (6 lines): | |
32 |
|
89 | |||
33 | diff -r cb9a9f314b8b -r 0647d048b600 a |
|
90 | diff -r cb9a9f314b8b -r 0647d048b600 a | |
34 | --- a/a Thu Jan 01 00:00:00 1970 +0000 |
|
91 | --- a/a Thu Jan 01 00:00:00 1970 +0000 | |
35 | +++ b/a Thu Jan 01 00:00:01 1970 +0000 |
|
92 | +++ b/a Thu Jan 01 00:00:01 1970 +0000 | |
36 | @@ -1,1 +1,2 @@ |
|
93 | @@ -1,1 +1,2 @@ | |
37 | a |
|
94 | a | |
38 | +a |
|
95 | +a | |
39 | (run 'hg update' to get a working copy) |
|
96 | (run 'hg update' to get a working copy) | |
40 | % fail for config file is missing |
|
97 | % fail for config file is missing | |
41 | rolling back last transaction |
|
98 | rolling back last transaction | |
42 | pull failed |
|
99 | pull failed | |
43 | % pull |
|
100 | % pull | |
44 | rolling back last transaction |
|
101 | rolling back last transaction | |
45 | pulling from ../a |
|
102 | pulling from ../a | |
46 | searching for changes |
|
103 | searching for changes | |
47 | adding changesets |
|
104 | adding changesets | |
48 | adding manifests |
|
105 | adding manifests | |
49 | adding file changes |
|
106 | adding file changes | |
50 | added 1 changesets with 1 changes to 1 files |
|
107 | added 1 changesets with 1 changes to 1 files | |
51 | Content-Type: text/plain; charset="us-ascii" |
|
108 | Content-Type: text/plain; charset="us-ascii" | |
52 | MIME-Version: 1.0 |
|
109 | MIME-Version: 1.0 | |
53 | Content-Transfer-Encoding: 7bit |
|
110 | Content-Transfer-Encoding: 7bit | |
54 | Date: |
|
111 | Date: | |
55 | Subject: b |
|
112 | Subject: b | |
56 | From: test@test.com |
|
113 | From: test@test.com | |
57 | X-Hg-Notification: changeset 0647d048b600 |
|
114 | X-Hg-Notification: changeset 0647d048b600 | |
58 | Message-Id: |
|
115 | Message-Id: | |
59 | To: baz@test.com, foo@bar |
|
116 | To: baz@test.com, foo@bar | |
60 |
|
117 | |||
61 | changeset 0647d048b600 |
|
118 | changeset 0647d048b600 | |
62 | description: |
|
119 | description: | |
63 | b |
|
120 | b | |
64 | diffs (6 lines): |
|
121 | diffs (6 lines): | |
65 |
|
122 | |||
66 | diff -r cb9a9f314b8b -r 0647d048b600 a |
|
123 | diff -r cb9a9f314b8b -r 0647d048b600 a | |
67 | --- a/a Thu Jan 01 00:00:00 1970 +0000 |
|
124 | --- a/a Thu Jan 01 00:00:00 1970 +0000 | |
68 | +++ b/a Thu Jan 01 00:00:01 1970 +0000 |
|
125 | +++ b/a Thu Jan 01 00:00:01 1970 +0000 | |
69 | @@ -1,1 +1,2 @@ |
|
126 | @@ -1,1 +1,2 @@ | |
70 | a |
|
127 | a | |
71 | +a |
|
128 | +a | |
72 | (run 'hg update' to get a working copy) |
|
129 | (run 'hg update' to get a working copy) | |
73 | % pull |
|
130 | % pull | |
74 | rolling back last transaction |
|
131 | rolling back last transaction | |
75 | pulling from ../a |
|
132 | pulling from ../a | |
76 | searching for changes |
|
133 | searching for changes | |
77 | adding changesets |
|
134 | adding changesets | |
78 | adding manifests |
|
135 | adding manifests | |
79 | adding file changes |
|
136 | adding file changes | |
80 | added 1 changesets with 1 changes to 1 files |
|
137 | added 1 changesets with 1 changes to 1 files | |
81 | Content-Type: text/plain; charset="us-ascii" |
|
138 | Content-Type: text/plain; charset="us-ascii" | |
82 | MIME-Version: 1.0 |
|
139 | MIME-Version: 1.0 | |
83 | Content-Transfer-Encoding: 7bit |
|
140 | Content-Transfer-Encoding: 7bit | |
84 | Date: |
|
141 | Date: | |
85 | Subject: b |
|
142 | Subject: b | |
86 | From: test@test.com |
|
143 | From: test@test.com | |
87 | X-Hg-Notification: changeset 0647d048b600 |
|
144 | X-Hg-Notification: changeset 0647d048b600 | |
88 | Message-Id: |
|
145 | Message-Id: | |
89 | To: baz@test.com, foo@bar |
|
146 | To: baz@test.com, foo@bar | |
90 |
|
147 | |||
91 | changeset 0647d048b600 |
|
148 | changeset 0647d048b600 | |
92 | description: |
|
149 | description: | |
93 | b |
|
150 | b | |
94 | diffstat: |
|
151 | diffstat: | |
95 |
|
152 | |||
96 | 1 file changed, 1 insertion(+) |
|
153 | 1 file changed, 1 insertion(+) | |
97 | a | 1 + |
|
154 | a | 1 + | |
98 |
|
155 | |||
99 | diffs (6 lines): |
|
156 | diffs (6 lines): | |
100 |
|
157 | |||
101 | diff -r cb9a9f314b8b -r 0647d048b600 a |
|
158 | diff -r cb9a9f314b8b -r 0647d048b600 a | |
102 | --- a/a Thu Jan 01 00:00:00 1970 +0000 |
|
159 | --- a/a Thu Jan 01 00:00:00 1970 +0000 | |
103 | +++ b/a Thu Jan 01 00:00:01 1970 +0000 |
|
160 | +++ b/a Thu Jan 01 00:00:01 1970 +0000 | |
104 | @@ -1,1 +1,2 @@ |
|
161 | @@ -1,1 +1,2 @@ | |
105 | a |
|
162 | a | |
106 | +a |
|
163 | +a | |
107 | (run 'hg update' to get a working copy) |
|
164 | (run 'hg update' to get a working copy) |
@@ -1,207 +1,211 | |||||
1 | % help (no mq, so no qrecord) |
|
1 | % help (no mq, so no qrecord) | |
2 | hg: unknown command 'qrecord' |
|
2 | hg: unknown command 'qrecord' | |
3 | Mercurial Distributed SCM |
|
3 | Mercurial Distributed SCM | |
4 |
|
4 | |||
5 | basic commands: |
|
5 | basic commands: | |
6 |
|
6 | |||
7 | add add the specified files on the next commit |
|
7 | add add the specified files on the next commit | |
8 | annotate show changeset information per file line |
|
8 | annotate show changeset information per file line | |
9 | clone make a copy of an existing repository |
|
9 | clone make a copy of an existing repository | |
10 | commit commit the specified files or all outstanding changes |
|
10 | commit commit the specified files or all outstanding changes | |
11 | diff diff repository (or selected files) |
|
11 | diff diff repository (or selected files) | |
12 | export dump the header and diffs for one or more changesets |
|
12 | export dump the header and diffs for one or more changesets | |
13 | init create a new repository in the given directory |
|
13 | init create a new repository in the given directory | |
14 | log show revision history of entire repository or files |
|
14 | log show revision history of entire repository or files | |
15 | merge merge working directory with another revision |
|
15 | merge merge working directory with another revision | |
16 | parents show the parents of the working dir or revision |
|
16 | parents show the parents of the working dir or revision | |
17 | pull pull changes from the specified source |
|
17 | pull pull changes from the specified source | |
18 | push push changes to the specified destination |
|
18 | push push changes to the specified destination | |
19 | remove remove the specified files on the next commit |
|
19 | remove remove the specified files on the next commit | |
20 | serve export the repository via HTTP |
|
20 | serve export the repository via HTTP | |
21 | status show changed files in the working directory |
|
21 | status show changed files in the working directory | |
22 | update update working directory |
|
22 | update update working directory | |
23 |
|
23 | |||
|
24 | enabled extensions: | |||
|
25 | ||||
|
26 | record interactive change selection during commit or qrefresh | |||
|
27 | ||||
24 | use "hg help" for the full list of commands or "hg -v" for details |
|
28 | use "hg help" for the full list of commands or "hg -v" for details | |
25 | % help (mq present) |
|
29 | % help (mq present) | |
26 | hg qrecord [OPTION]... PATCH [FILE]... |
|
30 | hg qrecord [OPTION]... PATCH [FILE]... | |
27 |
|
31 | |||
28 | interactively record a new patch |
|
32 | interactively record a new patch | |
29 |
|
33 | |||
30 | see 'hg help qnew' & 'hg help record' for more information and usage |
|
34 | see 'hg help qnew' & 'hg help record' for more information and usage | |
31 |
|
35 | |||
32 | options: |
|
36 | options: | |
33 |
|
37 | |||
34 | -e --edit edit commit message |
|
38 | -e --edit edit commit message | |
35 | -g --git use git extended diff format |
|
39 | -g --git use git extended diff format | |
36 | -U --currentuser add "From: <current user>" to patch |
|
40 | -U --currentuser add "From: <current user>" to patch | |
37 | -u --user add "From: <given user>" to patch |
|
41 | -u --user add "From: <given user>" to patch | |
38 | -D --currentdate add "Date: <current date>" to patch |
|
42 | -D --currentdate add "Date: <current date>" to patch | |
39 | -d --date add "Date: <given date>" to patch |
|
43 | -d --date add "Date: <given date>" to patch | |
40 | -I --include include names matching the given patterns |
|
44 | -I --include include names matching the given patterns | |
41 | -X --exclude exclude names matching the given patterns |
|
45 | -X --exclude exclude names matching the given patterns | |
42 | -m --message use <text> as commit message |
|
46 | -m --message use <text> as commit message | |
43 | -l --logfile read commit message from <file> |
|
47 | -l --logfile read commit message from <file> | |
44 |
|
48 | |||
45 | use "hg -v help qrecord" to show global options |
|
49 | use "hg -v help qrecord" to show global options | |
46 | % base commit |
|
50 | % base commit | |
47 | % changing files |
|
51 | % changing files | |
48 | % whole diff |
|
52 | % whole diff | |
49 | diff -r 1057167b20ef 1.txt |
|
53 | diff -r 1057167b20ef 1.txt | |
50 | --- a/1.txt |
|
54 | --- a/1.txt | |
51 | +++ b/1.txt |
|
55 | +++ b/1.txt | |
52 | @@ -1,5 +1,5 @@ |
|
56 | @@ -1,5 +1,5 @@ | |
53 | 1 |
|
57 | 1 | |
54 | -2 |
|
58 | -2 | |
55 | +2 2 |
|
59 | +2 2 | |
56 | 3 |
|
60 | 3 | |
57 | -4 |
|
61 | -4 | |
58 | +4 4 |
|
62 | +4 4 | |
59 | 5 |
|
63 | 5 | |
60 | diff -r 1057167b20ef 2.txt |
|
64 | diff -r 1057167b20ef 2.txt | |
61 | --- a/2.txt |
|
65 | --- a/2.txt | |
62 | +++ b/2.txt |
|
66 | +++ b/2.txt | |
63 | @@ -1,5 +1,5 @@ |
|
67 | @@ -1,5 +1,5 @@ | |
64 | a |
|
68 | a | |
65 | -b |
|
69 | -b | |
66 | +b b |
|
70 | +b b | |
67 | c |
|
71 | c | |
68 | d |
|
72 | d | |
69 | e |
|
73 | e | |
70 | diff -r 1057167b20ef dir/a.txt |
|
74 | diff -r 1057167b20ef dir/a.txt | |
71 | --- a/dir/a.txt |
|
75 | --- a/dir/a.txt | |
72 | +++ b/dir/a.txt |
|
76 | +++ b/dir/a.txt | |
73 | @@ -1,4 +1,4 @@ |
|
77 | @@ -1,4 +1,4 @@ | |
74 | -hello world |
|
78 | -hello world | |
75 | +hello world! |
|
79 | +hello world! | |
76 |
|
80 | |||
77 | someone |
|
81 | someone | |
78 | up |
|
82 | up | |
79 | % qrecord a.patch |
|
83 | % qrecord a.patch | |
80 | diff --git a/1.txt b/1.txt |
|
84 | diff --git a/1.txt b/1.txt | |
81 | 2 hunks, 4 lines changed |
|
85 | 2 hunks, 4 lines changed | |
82 | examine changes to '1.txt'? [Ynsfdaq?] @@ -1,3 +1,3 @@ |
|
86 | examine changes to '1.txt'? [Ynsfdaq?] @@ -1,3 +1,3 @@ | |
83 | 1 |
|
87 | 1 | |
84 | -2 |
|
88 | -2 | |
85 | +2 2 |
|
89 | +2 2 | |
86 | 3 |
|
90 | 3 | |
87 | record this change to '1.txt'? [Ynsfdaq?] @@ -3,3 +3,3 @@ |
|
91 | record this change to '1.txt'? [Ynsfdaq?] @@ -3,3 +3,3 @@ | |
88 | 3 |
|
92 | 3 | |
89 | -4 |
|
93 | -4 | |
90 | +4 4 |
|
94 | +4 4 | |
91 | 5 |
|
95 | 5 | |
92 | record this change to '1.txt'? [Ynsfdaq?] diff --git a/2.txt b/2.txt |
|
96 | record this change to '1.txt'? [Ynsfdaq?] diff --git a/2.txt b/2.txt | |
93 | 1 hunks, 2 lines changed |
|
97 | 1 hunks, 2 lines changed | |
94 | examine changes to '2.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@ |
|
98 | examine changes to '2.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@ | |
95 | a |
|
99 | a | |
96 | -b |
|
100 | -b | |
97 | +b b |
|
101 | +b b | |
98 | c |
|
102 | c | |
99 | d |
|
103 | d | |
100 | e |
|
104 | e | |
101 | record this change to '2.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt |
|
105 | record this change to '2.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt | |
102 | 1 hunks, 2 lines changed |
|
106 | 1 hunks, 2 lines changed | |
103 | examine changes to 'dir/a.txt'? [Ynsfdaq?] |
|
107 | examine changes to 'dir/a.txt'? [Ynsfdaq?] | |
104 | % after qrecord a.patch 'tip' |
|
108 | % after qrecord a.patch 'tip' | |
105 | changeset: 1:5d1ca63427ee |
|
109 | changeset: 1:5d1ca63427ee | |
106 | tag: qtip |
|
110 | tag: qtip | |
107 | tag: tip |
|
111 | tag: tip | |
108 | tag: a.patch |
|
112 | tag: a.patch | |
109 | tag: qbase |
|
113 | tag: qbase | |
110 | user: test |
|
114 | user: test | |
111 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
115 | date: Thu Jan 01 00:00:00 1970 +0000 | |
112 | summary: aaa |
|
116 | summary: aaa | |
113 |
|
117 | |||
114 | diff -r 1057167b20ef -r 5d1ca63427ee 1.txt |
|
118 | diff -r 1057167b20ef -r 5d1ca63427ee 1.txt | |
115 | --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 |
|
119 | --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 | |
116 | +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 |
|
120 | +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 | |
117 | @@ -1,5 +1,5 @@ |
|
121 | @@ -1,5 +1,5 @@ | |
118 | 1 |
|
122 | 1 | |
119 | -2 |
|
123 | -2 | |
120 | +2 2 |
|
124 | +2 2 | |
121 | 3 |
|
125 | 3 | |
122 | 4 |
|
126 | 4 | |
123 | 5 |
|
127 | 5 | |
124 | diff -r 1057167b20ef -r 5d1ca63427ee 2.txt |
|
128 | diff -r 1057167b20ef -r 5d1ca63427ee 2.txt | |
125 | --- a/2.txt Thu Jan 01 00:00:00 1970 +0000 |
|
129 | --- a/2.txt Thu Jan 01 00:00:00 1970 +0000 | |
126 | +++ b/2.txt Thu Jan 01 00:00:00 1970 +0000 |
|
130 | +++ b/2.txt Thu Jan 01 00:00:00 1970 +0000 | |
127 | @@ -1,5 +1,5 @@ |
|
131 | @@ -1,5 +1,5 @@ | |
128 | a |
|
132 | a | |
129 | -b |
|
133 | -b | |
130 | +b b |
|
134 | +b b | |
131 | c |
|
135 | c | |
132 | d |
|
136 | d | |
133 | e |
|
137 | e | |
134 |
|
138 | |||
135 |
|
139 | |||
136 | % after qrecord a.patch 'diff' |
|
140 | % after qrecord a.patch 'diff' | |
137 | diff -r 5d1ca63427ee 1.txt |
|
141 | diff -r 5d1ca63427ee 1.txt | |
138 | --- a/1.txt |
|
142 | --- a/1.txt | |
139 | +++ b/1.txt |
|
143 | +++ b/1.txt | |
140 | @@ -1,5 +1,5 @@ |
|
144 | @@ -1,5 +1,5 @@ | |
141 | 1 |
|
145 | 1 | |
142 | 2 2 |
|
146 | 2 2 | |
143 | 3 |
|
147 | 3 | |
144 | -4 |
|
148 | -4 | |
145 | +4 4 |
|
149 | +4 4 | |
146 | 5 |
|
150 | 5 | |
147 | diff -r 5d1ca63427ee dir/a.txt |
|
151 | diff -r 5d1ca63427ee dir/a.txt | |
148 | --- a/dir/a.txt |
|
152 | --- a/dir/a.txt | |
149 | +++ b/dir/a.txt |
|
153 | +++ b/dir/a.txt | |
150 | @@ -1,4 +1,4 @@ |
|
154 | @@ -1,4 +1,4 @@ | |
151 | -hello world |
|
155 | -hello world | |
152 | +hello world! |
|
156 | +hello world! | |
153 |
|
157 | |||
154 | someone |
|
158 | someone | |
155 | up |
|
159 | up | |
156 | % qrecord b.patch |
|
160 | % qrecord b.patch | |
157 | diff --git a/1.txt b/1.txt |
|
161 | diff --git a/1.txt b/1.txt | |
158 | 1 hunks, 2 lines changed |
|
162 | 1 hunks, 2 lines changed | |
159 | examine changes to '1.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@ |
|
163 | examine changes to '1.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@ | |
160 | 1 |
|
164 | 1 | |
161 | 2 2 |
|
165 | 2 2 | |
162 | 3 |
|
166 | 3 | |
163 | -4 |
|
167 | -4 | |
164 | +4 4 |
|
168 | +4 4 | |
165 | 5 |
|
169 | 5 | |
166 | record this change to '1.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt |
|
170 | record this change to '1.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt | |
167 | 1 hunks, 2 lines changed |
|
171 | 1 hunks, 2 lines changed | |
168 | examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -1,4 +1,4 @@ |
|
172 | examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -1,4 +1,4 @@ | |
169 | -hello world |
|
173 | -hello world | |
170 | +hello world! |
|
174 | +hello world! | |
171 |
|
175 | |||
172 | someone |
|
176 | someone | |
173 | up |
|
177 | up | |
174 | record this change to 'dir/a.txt'? [Ynsfdaq?] |
|
178 | record this change to 'dir/a.txt'? [Ynsfdaq?] | |
175 | % after qrecord b.patch 'tip' |
|
179 | % after qrecord b.patch 'tip' | |
176 | changeset: 2:b056198bf878 |
|
180 | changeset: 2:b056198bf878 | |
177 | tag: qtip |
|
181 | tag: qtip | |
178 | tag: tip |
|
182 | tag: tip | |
179 | tag: b.patch |
|
183 | tag: b.patch | |
180 | user: test |
|
184 | user: test | |
181 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
185 | date: Thu Jan 01 00:00:00 1970 +0000 | |
182 | summary: bbb |
|
186 | summary: bbb | |
183 |
|
187 | |||
184 | diff -r 5d1ca63427ee -r b056198bf878 1.txt |
|
188 | diff -r 5d1ca63427ee -r b056198bf878 1.txt | |
185 | --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 |
|
189 | --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 | |
186 | +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 |
|
190 | +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 | |
187 | @@ -1,5 +1,5 @@ |
|
191 | @@ -1,5 +1,5 @@ | |
188 | 1 |
|
192 | 1 | |
189 | 2 2 |
|
193 | 2 2 | |
190 | 3 |
|
194 | 3 | |
191 | -4 |
|
195 | -4 | |
192 | +4 4 |
|
196 | +4 4 | |
193 | 5 |
|
197 | 5 | |
194 | diff -r 5d1ca63427ee -r b056198bf878 dir/a.txt |
|
198 | diff -r 5d1ca63427ee -r b056198bf878 dir/a.txt | |
195 | --- a/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 |
|
199 | --- a/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 | |
196 | +++ b/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 |
|
200 | +++ b/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 | |
197 | @@ -1,4 +1,4 @@ |
|
201 | @@ -1,4 +1,4 @@ | |
198 | -hello world |
|
202 | -hello world | |
199 | +hello world! |
|
203 | +hello world! | |
200 |
|
204 | |||
201 | someone |
|
205 | someone | |
202 | up |
|
206 | up | |
203 |
|
207 | |||
204 |
|
208 | |||
205 | % after qrecord b.patch 'diff' |
|
209 | % after qrecord b.patch 'diff' | |
206 |
|
210 | |||
207 | % --- end --- |
|
211 | % --- end --- |
@@ -1,105 +1,108 | |||||
1 | #!/bin/sh |
|
1 | #!/bin/sh | |
2 |
|
2 | |||
3 | echo "[extensions]" >> $HGRCPATH |
|
3 | echo "[extensions]" >> $HGRCPATH | |
4 | echo "rebase=" >> $HGRCPATH |
|
4 | echo "rebase=" >> $HGRCPATH | |
5 |
|
5 | |||
6 | addcommit () { |
|
6 | addcommit () { | |
7 | echo $1 > $1 |
|
7 | echo $1 > $1 | |
8 | hg add $1 |
|
8 | hg add $1 | |
9 | hg commit -d "${2} 0" -u test -m $1 |
|
9 | hg commit -d "${2} 0" -u test -m $1 | |
10 | } |
|
10 | } | |
11 |
|
11 | |||
12 | commit () { |
|
12 | commit () { | |
13 | hg commit -d "${2} 0" -u test -m $1 |
|
13 | hg commit -d "${2} 0" -u test -m $1 | |
14 | } |
|
14 | } | |
15 |
|
15 | |||
16 | createrepo () { |
|
16 | createrepo () { | |
17 | hg init a |
|
17 | hg init a | |
18 | cd a |
|
18 | cd a | |
19 | addcommit "c1" 0 |
|
19 | addcommit "c1" 0 | |
20 | addcommit "c2" 1 |
|
20 | addcommit "c2" 1 | |
21 | addcommit "c3" 2 |
|
21 | addcommit "c3" 2 | |
22 |
|
22 | |||
23 | hg update -C 1 |
|
23 | hg update -C 1 | |
24 | addcommit "l1" 3 |
|
24 | addcommit "l1" 3 | |
25 | addcommit "l2" 4 |
|
25 | addcommit "l2" 4 | |
26 | addcommit "l3" 5 |
|
26 | addcommit "l3" 5 | |
27 |
|
27 | |||
28 | hg update -C 2 |
|
28 | hg update -C 2 | |
29 | addcommit "r1" 6 |
|
29 | addcommit "r1" 6 | |
30 | addcommit "r2" 7 |
|
30 | addcommit "r2" 7 | |
31 | } |
|
31 | } | |
32 |
|
32 | |||
33 | createrepo > /dev/null 2>&1 |
|
33 | createrepo > /dev/null 2>&1 | |
34 | echo "% These fail" |
|
34 | echo "% These fail" | |
35 | echo |
|
35 | echo | |
36 | echo "% Use continue and abort" |
|
36 | echo "% Use continue and abort" | |
37 | hg rebase --continue --abort |
|
37 | hg rebase --continue --abort | |
38 |
|
38 | |||
39 | echo |
|
39 | echo | |
40 | echo "% Use continue and collapse" |
|
40 | echo "% Use continue and collapse" | |
41 | hg rebase --continue --collapse |
|
41 | hg rebase --continue --collapse | |
42 |
|
42 | |||
43 | echo |
|
43 | echo | |
44 | echo "% Use continue/abort and dest/source" |
|
44 | echo "% Use continue/abort and dest/source" | |
45 | hg rebase --continue --dest 4 |
|
45 | hg rebase --continue --dest 4 | |
46 |
|
46 | |||
47 | echo |
|
47 | echo | |
48 | echo "% Use source and base" |
|
48 | echo "% Use source and base" | |
49 | hg rebase --base 5 --source 4 |
|
49 | hg rebase --base 5 --source 4 | |
50 |
|
50 | |||
51 | echo |
|
51 | echo | |
52 | echo "% Rebase with no arguments - from current" |
|
52 | echo "% Rebase with no arguments - from current" | |
53 | hg rebase |
|
53 | hg rebase | |
54 |
|
54 | |||
55 | echo |
|
55 | echo | |
56 | echo "% Rebase with no arguments - from the current branch" |
|
56 | echo "% Rebase with no arguments - from the current branch" | |
57 | hg update 6 |
|
57 | hg update 6 | |
58 | hg rebase |
|
58 | hg rebase | |
59 |
|
59 | |||
60 | echo "% ----------" |
|
60 | echo "% ----------" | |
61 | echo "% These work" |
|
61 | echo "% These work" | |
62 | echo |
|
62 | echo | |
63 | echo "% Rebase with no arguments (from 3 onto 7)" |
|
63 | echo "% Rebase with no arguments (from 3 onto 7)" | |
64 | hg update -C 5 |
|
64 | hg update -C 5 | |
65 | hg rebase 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
65 | hg rebase 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
66 |
|
66 | |||
|
67 | echo "% Try to rollback after a rebase (fail)" | |||
|
68 | hg rollback | |||
|
69 | ||||
67 | createrepo > /dev/null 2>&1 |
|
70 | createrepo > /dev/null 2>&1 | |
68 | echo |
|
71 | echo | |
69 | echo "% Rebase with base == '.' => same as no arguments (from 3 onto 7)" |
|
72 | echo "% Rebase with base == '.' => same as no arguments (from 3 onto 7)" | |
70 | hg update -C 5 |
|
73 | hg update -C 5 | |
71 | hg rebase --base . 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
74 | hg rebase --base . 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
72 |
|
75 | |||
73 | createrepo > /dev/null 2>&1 |
|
76 | createrepo > /dev/null 2>&1 | |
74 | echo |
|
77 | echo | |
75 | echo "% Rebase with dest == `hg branch` => same as no arguments (from 3 onto 7)" |
|
78 | echo "% Rebase with dest == `hg branch` => same as no arguments (from 3 onto 7)" | |
76 | hg update -C 5 |
|
79 | hg update -C 5 | |
77 | hg rebase --dest `hg branch` 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
80 | hg rebase --dest `hg branch` 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
78 |
|
81 | |||
79 | createrepo > /dev/null 2>&1 |
|
82 | createrepo > /dev/null 2>&1 | |
80 | echo |
|
83 | echo | |
81 | echo "% Specify only source (from 4 onto 7)" |
|
84 | echo "% Specify only source (from 4 onto 7)" | |
82 | hg rebase --source 4 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
85 | hg rebase --source 4 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
83 |
|
86 | |||
84 | createrepo > /dev/null 2>&1 |
|
87 | createrepo > /dev/null 2>&1 | |
85 | echo |
|
88 | echo | |
86 | echo "% Specify only dest (from 3 onto 6)" |
|
89 | echo "% Specify only dest (from 3 onto 6)" | |
87 | hg update -C 5 |
|
90 | hg update -C 5 | |
88 | hg rebase --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
91 | hg rebase --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
89 |
|
92 | |||
90 | createrepo > /dev/null 2>&1 |
|
93 | createrepo > /dev/null 2>&1 | |
91 | echo |
|
94 | echo | |
92 | echo "% Specify only base (from 3 onto 7)" |
|
95 | echo "% Specify only base (from 3 onto 7)" | |
93 | hg rebase --base 5 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
96 | hg rebase --base 5 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
94 |
|
97 | |||
95 | createrepo > /dev/null 2>&1 |
|
98 | createrepo > /dev/null 2>&1 | |
96 | echo |
|
99 | echo | |
97 | echo "% Specify source and dest (from 4 onto 6)" |
|
100 | echo "% Specify source and dest (from 4 onto 6)" | |
98 | hg rebase --source 4 --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
101 | hg rebase --source 4 --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
99 |
|
102 | |||
100 | createrepo > /dev/null 2>&1 |
|
103 | createrepo > /dev/null 2>&1 | |
101 | echo |
|
104 | echo | |
102 | echo "% Specify base and dest (from 3 onto 6)" |
|
105 | echo "% Specify base and dest (from 3 onto 6)" | |
103 | hg rebase --base 4 --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/' |
|
106 | hg rebase --base 4 --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/' | |
104 |
|
107 | |||
105 | exit 0 |
|
108 | exit 0 |
@@ -1,194 +1,196 | |||||
1 | % These fail |
|
1 | % These fail | |
2 |
|
2 | |||
3 | % Use continue and abort |
|
3 | % Use continue and abort | |
4 | hg rebase: cannot use both abort and continue |
|
4 | hg rebase: cannot use both abort and continue | |
5 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] |
|
5 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] | |
6 |
|
6 | |||
7 | move changeset (and descendants) to a different branch |
|
7 | move changeset (and descendants) to a different branch | |
8 |
|
8 | |||
9 | Rebase uses repeated merging to graft changesets from one part of history |
|
9 | Rebase uses repeated merging to graft changesets from one part of history | |
10 | onto another. This can be useful for linearizing local changes relative to |
|
10 | onto another. This can be useful for linearizing local changes relative to | |
11 | a master development tree. |
|
11 | a master development tree. | |
12 |
|
12 | |||
13 | If a rebase is interrupted to manually resolve a merge, it can be continued |
|
13 | If a rebase is interrupted to manually resolve a merge, it can be continued | |
14 | with --continue or aborted with --abort. |
|
14 | with --continue or aborted with --abort. | |
15 |
|
15 | |||
16 | options: |
|
16 | options: | |
17 |
|
17 | |||
18 | --keep keep original revisions |
|
18 | --keep keep original revisions | |
19 | -s --source rebase from a given revision |
|
19 | -s --source rebase from a given revision | |
20 | -b --base rebase from the base of a given revision |
|
20 | -b --base rebase from the base of a given revision | |
21 | -d --dest rebase onto a given revision |
|
21 | -d --dest rebase onto a given revision | |
22 | --collapse collapse the rebased revisions |
|
22 | --collapse collapse the rebased revisions | |
23 | -c --continue continue an interrupted rebase |
|
23 | -c --continue continue an interrupted rebase | |
24 | -a --abort abort an interrupted rebase |
|
24 | -a --abort abort an interrupted rebase | |
25 | --style display using template map file |
|
25 | --style display using template map file | |
26 | --template display with template |
|
26 | --template display with template | |
27 |
|
27 | |||
28 | use "hg -v help rebase" to show global options |
|
28 | use "hg -v help rebase" to show global options | |
29 |
|
29 | |||
30 | % Use continue and collapse |
|
30 | % Use continue and collapse | |
31 | hg rebase: cannot use collapse with continue or abort |
|
31 | hg rebase: cannot use collapse with continue or abort | |
32 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] |
|
32 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] | |
33 |
|
33 | |||
34 | move changeset (and descendants) to a different branch |
|
34 | move changeset (and descendants) to a different branch | |
35 |
|
35 | |||
36 | Rebase uses repeated merging to graft changesets from one part of history |
|
36 | Rebase uses repeated merging to graft changesets from one part of history | |
37 | onto another. This can be useful for linearizing local changes relative to |
|
37 | onto another. This can be useful for linearizing local changes relative to | |
38 | a master development tree. |
|
38 | a master development tree. | |
39 |
|
39 | |||
40 | If a rebase is interrupted to manually resolve a merge, it can be continued |
|
40 | If a rebase is interrupted to manually resolve a merge, it can be continued | |
41 | with --continue or aborted with --abort. |
|
41 | with --continue or aborted with --abort. | |
42 |
|
42 | |||
43 | options: |
|
43 | options: | |
44 |
|
44 | |||
45 | --keep keep original revisions |
|
45 | --keep keep original revisions | |
46 | -s --source rebase from a given revision |
|
46 | -s --source rebase from a given revision | |
47 | -b --base rebase from the base of a given revision |
|
47 | -b --base rebase from the base of a given revision | |
48 | -d --dest rebase onto a given revision |
|
48 | -d --dest rebase onto a given revision | |
49 | --collapse collapse the rebased revisions |
|
49 | --collapse collapse the rebased revisions | |
50 | -c --continue continue an interrupted rebase |
|
50 | -c --continue continue an interrupted rebase | |
51 | -a --abort abort an interrupted rebase |
|
51 | -a --abort abort an interrupted rebase | |
52 | --style display using template map file |
|
52 | --style display using template map file | |
53 | --template display with template |
|
53 | --template display with template | |
54 |
|
54 | |||
55 | use "hg -v help rebase" to show global options |
|
55 | use "hg -v help rebase" to show global options | |
56 |
|
56 | |||
57 | % Use continue/abort and dest/source |
|
57 | % Use continue/abort and dest/source | |
58 | hg rebase: abort and continue do not allow specifying revisions |
|
58 | hg rebase: abort and continue do not allow specifying revisions | |
59 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] |
|
59 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] | |
60 |
|
60 | |||
61 | move changeset (and descendants) to a different branch |
|
61 | move changeset (and descendants) to a different branch | |
62 |
|
62 | |||
63 | Rebase uses repeated merging to graft changesets from one part of history |
|
63 | Rebase uses repeated merging to graft changesets from one part of history | |
64 | onto another. This can be useful for linearizing local changes relative to |
|
64 | onto another. This can be useful for linearizing local changes relative to | |
65 | a master development tree. |
|
65 | a master development tree. | |
66 |
|
66 | |||
67 | If a rebase is interrupted to manually resolve a merge, it can be continued |
|
67 | If a rebase is interrupted to manually resolve a merge, it can be continued | |
68 | with --continue or aborted with --abort. |
|
68 | with --continue or aborted with --abort. | |
69 |
|
69 | |||
70 | options: |
|
70 | options: | |
71 |
|
71 | |||
72 | --keep keep original revisions |
|
72 | --keep keep original revisions | |
73 | -s --source rebase from a given revision |
|
73 | -s --source rebase from a given revision | |
74 | -b --base rebase from the base of a given revision |
|
74 | -b --base rebase from the base of a given revision | |
75 | -d --dest rebase onto a given revision |
|
75 | -d --dest rebase onto a given revision | |
76 | --collapse collapse the rebased revisions |
|
76 | --collapse collapse the rebased revisions | |
77 | -c --continue continue an interrupted rebase |
|
77 | -c --continue continue an interrupted rebase | |
78 | -a --abort abort an interrupted rebase |
|
78 | -a --abort abort an interrupted rebase | |
79 | --style display using template map file |
|
79 | --style display using template map file | |
80 | --template display with template |
|
80 | --template display with template | |
81 |
|
81 | |||
82 | use "hg -v help rebase" to show global options |
|
82 | use "hg -v help rebase" to show global options | |
83 |
|
83 | |||
84 | % Use source and base |
|
84 | % Use source and base | |
85 | hg rebase: cannot specify both a revision and a base |
|
85 | hg rebase: cannot specify both a revision and a base | |
86 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] |
|
86 | hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep] | |
87 |
|
87 | |||
88 | move changeset (and descendants) to a different branch |
|
88 | move changeset (and descendants) to a different branch | |
89 |
|
89 | |||
90 | Rebase uses repeated merging to graft changesets from one part of history |
|
90 | Rebase uses repeated merging to graft changesets from one part of history | |
91 | onto another. This can be useful for linearizing local changes relative to |
|
91 | onto another. This can be useful for linearizing local changes relative to | |
92 | a master development tree. |
|
92 | a master development tree. | |
93 |
|
93 | |||
94 | If a rebase is interrupted to manually resolve a merge, it can be continued |
|
94 | If a rebase is interrupted to manually resolve a merge, it can be continued | |
95 | with --continue or aborted with --abort. |
|
95 | with --continue or aborted with --abort. | |
96 |
|
96 | |||
97 | options: |
|
97 | options: | |
98 |
|
98 | |||
99 | --keep keep original revisions |
|
99 | --keep keep original revisions | |
100 | -s --source rebase from a given revision |
|
100 | -s --source rebase from a given revision | |
101 | -b --base rebase from the base of a given revision |
|
101 | -b --base rebase from the base of a given revision | |
102 | -d --dest rebase onto a given revision |
|
102 | -d --dest rebase onto a given revision | |
103 | --collapse collapse the rebased revisions |
|
103 | --collapse collapse the rebased revisions | |
104 | -c --continue continue an interrupted rebase |
|
104 | -c --continue continue an interrupted rebase | |
105 | -a --abort abort an interrupted rebase |
|
105 | -a --abort abort an interrupted rebase | |
106 | --style display using template map file |
|
106 | --style display using template map file | |
107 | --template display with template |
|
107 | --template display with template | |
108 |
|
108 | |||
109 | use "hg -v help rebase" to show global options |
|
109 | use "hg -v help rebase" to show global options | |
110 |
|
110 | |||
111 | % Rebase with no arguments - from current |
|
111 | % Rebase with no arguments - from current | |
112 | nothing to rebase |
|
112 | nothing to rebase | |
113 |
|
113 | |||
114 | % Rebase with no arguments - from the current branch |
|
114 | % Rebase with no arguments - from the current branch | |
115 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
115 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
116 | nothing to rebase |
|
116 | nothing to rebase | |
117 | % ---------- |
|
117 | % ---------- | |
118 | % These work |
|
118 | % These work | |
119 |
|
119 | |||
120 | % Rebase with no arguments (from 3 onto 7) |
|
120 | % Rebase with no arguments (from 3 onto 7) | |
121 | 3 files updated, 0 files merged, 2 files removed, 0 files unresolved |
|
121 | 3 files updated, 0 files merged, 2 files removed, 0 files unresolved | |
122 | saving bundle to |
|
122 | saving bundle to | |
123 | adding branch |
|
123 | adding branch | |
124 | adding changesets |
|
124 | adding changesets | |
125 | adding manifests |
|
125 | adding manifests | |
126 | adding file changes |
|
126 | adding file changes | |
127 | added 5 changesets with 5 changes to 5 files |
|
127 | added 5 changesets with 5 changes to 5 files | |
128 | rebase completed |
|
128 | rebase completed | |
|
129 | % Try to rollback after a rebase (fail) | |||
|
130 | no rollback information available | |||
129 |
|
131 | |||
130 | % Rebase with base == '.' => same as no arguments (from 3 onto 7) |
|
132 | % Rebase with base == '.' => same as no arguments (from 3 onto 7) | |
131 | 3 files updated, 0 files merged, 3 files removed, 0 files unresolved |
|
133 | 3 files updated, 0 files merged, 3 files removed, 0 files unresolved | |
132 | saving bundle to |
|
134 | saving bundle to | |
133 | adding branch |
|
135 | adding branch | |
134 | adding changesets |
|
136 | adding changesets | |
135 | adding manifests |
|
137 | adding manifests | |
136 | adding file changes |
|
138 | adding file changes | |
137 | added 5 changesets with 5 changes to 5 files |
|
139 | added 5 changesets with 5 changes to 5 files | |
138 | rebase completed |
|
140 | rebase completed | |
139 |
|
141 | |||
140 | % Rebase with dest == default => same as no arguments (from 3 onto 7) |
|
142 | % Rebase with dest == default => same as no arguments (from 3 onto 7) | |
141 | 3 files updated, 0 files merged, 3 files removed, 0 files unresolved |
|
143 | 3 files updated, 0 files merged, 3 files removed, 0 files unresolved | |
142 | saving bundle to |
|
144 | saving bundle to | |
143 | adding branch |
|
145 | adding branch | |
144 | adding changesets |
|
146 | adding changesets | |
145 | adding manifests |
|
147 | adding manifests | |
146 | adding file changes |
|
148 | adding file changes | |
147 | added 5 changesets with 5 changes to 5 files |
|
149 | added 5 changesets with 5 changes to 5 files | |
148 | rebase completed |
|
150 | rebase completed | |
149 |
|
151 | |||
150 | % Specify only source (from 4 onto 7) |
|
152 | % Specify only source (from 4 onto 7) | |
151 | saving bundle to |
|
153 | saving bundle to | |
152 | adding branch |
|
154 | adding branch | |
153 | adding changesets |
|
155 | adding changesets | |
154 | adding manifests |
|
156 | adding manifests | |
155 | adding file changes |
|
157 | adding file changes | |
156 | added 4 changesets with 4 changes to 4 files (-1 heads) |
|
158 | added 4 changesets with 4 changes to 4 files (-1 heads) | |
157 | rebase completed |
|
159 | rebase completed | |
158 |
|
160 | |||
159 | % Specify only dest (from 3 onto 6) |
|
161 | % Specify only dest (from 3 onto 6) | |
160 | 3 files updated, 0 files merged, 3 files removed, 0 files unresolved |
|
162 | 3 files updated, 0 files merged, 3 files removed, 0 files unresolved | |
161 | saving bundle to |
|
163 | saving bundle to | |
162 | adding branch |
|
164 | adding branch | |
163 | adding changesets |
|
165 | adding changesets | |
164 | adding manifests |
|
166 | adding manifests | |
165 | adding file changes |
|
167 | adding file changes | |
166 | added 5 changesets with 5 changes to 5 files (+1 heads) |
|
168 | added 5 changesets with 5 changes to 5 files (+1 heads) | |
167 | rebase completed |
|
169 | rebase completed | |
168 |
|
170 | |||
169 | % Specify only base (from 3 onto 7) |
|
171 | % Specify only base (from 3 onto 7) | |
170 | saving bundle to |
|
172 | saving bundle to | |
171 | adding branch |
|
173 | adding branch | |
172 | adding changesets |
|
174 | adding changesets | |
173 | adding manifests |
|
175 | adding manifests | |
174 | adding file changes |
|
176 | adding file changes | |
175 | added 5 changesets with 5 changes to 5 files |
|
177 | added 5 changesets with 5 changes to 5 files | |
176 | rebase completed |
|
178 | rebase completed | |
177 |
|
179 | |||
178 | % Specify source and dest (from 4 onto 6) |
|
180 | % Specify source and dest (from 4 onto 6) | |
179 | saving bundle to |
|
181 | saving bundle to | |
180 | adding branch |
|
182 | adding branch | |
181 | adding changesets |
|
183 | adding changesets | |
182 | adding manifests |
|
184 | adding manifests | |
183 | adding file changes |
|
185 | adding file changes | |
184 | added 4 changesets with 4 changes to 4 files |
|
186 | added 4 changesets with 4 changes to 4 files | |
185 | rebase completed |
|
187 | rebase completed | |
186 |
|
188 | |||
187 | % Specify base and dest (from 3 onto 6) |
|
189 | % Specify base and dest (from 3 onto 6) | |
188 | saving bundle to |
|
190 | saving bundle to | |
189 | adding branch |
|
191 | adding branch | |
190 | adding changesets |
|
192 | adding changesets | |
191 | adding manifests |
|
193 | adding manifests | |
192 | adding file changes |
|
194 | adding file changes | |
193 | added 5 changesets with 5 changes to 5 files (+1 heads) |
|
195 | added 5 changesets with 5 changes to 5 files (+1 heads) | |
194 | rebase completed |
|
196 | rebase completed |
General Comments 0
You need to be logged in to leave comments.
Login now