Show More
@@ -1196,7 +1196,8 b' def graph(web, req, tmpl):' | |||||
1196 | canvaswidth=(cols + 1) * bg_height, |
|
1196 | canvaswidth=(cols + 1) * bg_height, | |
1197 | truecanvasheight=rows * bg_height, |
|
1197 | truecanvasheight=rows * bg_height, | |
1198 | canvasheight=canvasheight, bg_height=bg_height, |
|
1198 | canvasheight=canvasheight, bg_height=bg_height, | |
1199 | jsdata=lambda **x: graphdata(True, str), |
|
1199 | # {jsdata} will be passed to |json, so it must be in utf-8 | |
|
1200 | jsdata=lambda **x: graphdata(True, encoding.fromlocal), | |||
1200 | nodes=lambda **x: graphdata(False, str), |
|
1201 | nodes=lambda **x: graphdata(False, str), | |
1201 | node=ctx.hex(), changenav=changenav) |
|
1202 | node=ctx.hex(), changenav=changenav) | |
1202 |
|
1203 |
@@ -197,15 +197,8 b' def json(obj):' | |||||
197 | return {None: 'null', False: 'false', True: 'true'}[obj] |
|
197 | return {None: 'null', False: 'false', True: 'true'}[obj] | |
198 | elif isinstance(obj, int) or isinstance(obj, float): |
|
198 | elif isinstance(obj, int) or isinstance(obj, float): | |
199 | return str(obj) |
|
199 | return str(obj) | |
200 | elif isinstance(obj, encoding.localstr): |
|
|||
201 | u = encoding.fromlocal(obj).decode('utf-8') # can round-trip |
|
|||
202 | return '"%s"' % jsonescape(u) |
|
|||
203 | elif isinstance(obj, str): |
|
200 | elif isinstance(obj, str): | |
204 | # no encoding.fromlocal() because it may abort if obj can't be decoded |
|
201 | return '"%s"' % encoding.jsonescape(obj, paranoid=True) | |
205 | u = unicode(obj, encoding.encoding, 'replace') |
|
|||
206 | return '"%s"' % jsonescape(u) |
|
|||
207 | elif isinstance(obj, unicode): |
|
|||
208 | return '"%s"' % jsonescape(obj) |
|
|||
209 | elif util.safehasattr(obj, 'keys'): |
|
202 | elif util.safehasattr(obj, 'keys'): | |
210 | out = [] |
|
203 | out = [] | |
211 | for k, v in sorted(obj.iteritems()): |
|
204 | for k, v in sorted(obj.iteritems()): |
@@ -8,26 +8,26 b' shortlog = changelist.tmpl' | |||||
8 | changelistentry = '\{ |
|
8 | changelistentry = '\{ | |
9 | "node": {node|json}, |
|
9 | "node": {node|json}, | |
10 | "date": {date|json}, |
|
10 | "date": {date|json}, | |
11 | "desc": {desc|json}, |
|
11 | "desc": {desc|utf8|json}, | |
12 | "bookmarks": [{join(bookmarks%changelistentryname, ", ")}], |
|
12 | "bookmarks": [{join(bookmarks%changelistentryname, ", ")}], | |
13 | "tags": [{join(tags%changelistentryname, ", ")}], |
|
13 | "tags": [{join(tags%changelistentryname, ", ")}], | |
14 | "user": {author|json} |
|
14 | "user": {author|utf8|json} | |
15 | }' |
|
15 | }' | |
16 | changelistentryname = '{name|json}' |
|
16 | changelistentryname = '{name|utf8|json}' | |
17 | changeset = '\{ |
|
17 | changeset = '\{ | |
18 | "node": {node|json}, |
|
18 | "node": {node|json}, | |
19 | "date": {date|json}, |
|
19 | "date": {date|json}, | |
20 | "desc": {desc|json}, |
|
20 | "desc": {desc|utf8|json}, | |
21 | "branch": {if(branch, branch%changesetbranch, "default"|json)}, |
|
21 | "branch": {if(branch, branch%changesetbranch, "default"|json)}, | |
22 | "bookmarks": [{join(changesetbookmark, ", ")}], |
|
22 | "bookmarks": [{join(changesetbookmark, ", ")}], | |
23 | "tags": [{join(changesettag, ", ")}], |
|
23 | "tags": [{join(changesettag, ", ")}], | |
24 | "user": {author|json}, |
|
24 | "user": {author|utf8|json}, | |
25 | "parents": [{join(parent%changesetparent, ", ")}], |
|
25 | "parents": [{join(parent%changesetparent, ", ")}], | |
26 | "phase": {phase|json} |
|
26 | "phase": {phase|json} | |
27 | }' |
|
27 | }' | |
28 | changesetbranch = '{name|json}' |
|
28 | changesetbranch = '{name|utf8|json}' | |
29 | changesetbookmark = '{bookmark|json}' |
|
29 | changesetbookmark = '{bookmark|utf8|json}' | |
30 | changesettag = '{tag|json}' |
|
30 | changesettag = '{tag|utf8|json}' | |
31 | changesetparent = '{node|json}' |
|
31 | changesetparent = '{node|json}' | |
32 | manifest = '\{ |
|
32 | manifest = '\{ | |
33 | "node": {node|json}, |
|
33 | "node": {node|json}, | |
@@ -37,7 +37,7 b" manifest = '\\{" | |||||
37 | "bookmarks": [{join(bookmarks%name, ", ")}], |
|
37 | "bookmarks": [{join(bookmarks%name, ", ")}], | |
38 | "tags": [{join(tags%name, ", ")}] |
|
38 | "tags": [{join(tags%name, ", ")}] | |
39 | }' |
|
39 | }' | |
40 | name = '{name|json}' |
|
40 | name = '{name|utf8|json}' | |
41 | direntry = '\{ |
|
41 | direntry = '\{ | |
42 | "abspath": {path|json}, |
|
42 | "abspath": {path|json}, | |
43 | "basename": {basename|json}, |
|
43 | "basename": {basename|json}, | |
@@ -55,7 +55,7 b" tags = '\\{" | |||||
55 | "tags": [{join(entriesnotip%tagentry, ", ")}] |
|
55 | "tags": [{join(entriesnotip%tagentry, ", ")}] | |
56 | }' |
|
56 | }' | |
57 | tagentry = '\{ |
|
57 | tagentry = '\{ | |
58 | "tag": {tag|json}, |
|
58 | "tag": {tag|utf8|json}, | |
59 | "node": {node|json}, |
|
59 | "node": {node|json}, | |
60 | "date": {date|json} |
|
60 | "date": {date|json} | |
61 | }' |
|
61 | }' | |
@@ -64,7 +64,7 b" bookmarks = '\\{" | |||||
64 | "bookmarks": [{join(entries%bookmarkentry, ", ")}] |
|
64 | "bookmarks": [{join(entries%bookmarkentry, ", ")}] | |
65 | }' |
|
65 | }' | |
66 | bookmarkentry = '\{ |
|
66 | bookmarkentry = '\{ | |
67 | "bookmark": {bookmark|json}, |
|
67 | "bookmark": {bookmark|utf8|json}, | |
68 | "node": {node|json}, |
|
68 | "node": {node|json}, | |
69 | "date": {date|json} |
|
69 | "date": {date|json} | |
70 | }' |
|
70 | }' | |
@@ -72,7 +72,7 b" branches = '\\{" | |||||
72 | "branches": [{join(entries%branchentry, ", ")}] |
|
72 | "branches": [{join(entries%branchentry, ", ")}] | |
73 | }' |
|
73 | }' | |
74 | branchentry = '\{ |
|
74 | branchentry = '\{ | |
75 | "branch": {branch|json}, |
|
75 | "branch": {branch|utf8|json}, | |
76 | "node": {node|json}, |
|
76 | "node": {node|json}, | |
77 | "date": {date|json}, |
|
77 | "date": {date|json}, | |
78 | "status": {status|json} |
|
78 | "status": {status|json} | |
@@ -82,8 +82,8 b" filediff = '\\{" | |||||
82 | "path": {file|json}, |
|
82 | "path": {file|json}, | |
83 | "node": {node|json}, |
|
83 | "node": {node|json}, | |
84 | "date": {date|json}, |
|
84 | "date": {date|json}, | |
85 | "desc": {desc|json}, |
|
85 | "desc": {desc|utf8|json}, | |
86 | "author": {author|json}, |
|
86 | "author": {author|utf8|json}, | |
87 | "parents": [{join(parent%changesetparent, ", ")}], |
|
87 | "parents": [{join(parent%changesetparent, ", ")}], | |
88 | "children": [{join(child%changesetparent, ", ")}], |
|
88 | "children": [{join(child%changesetparent, ", ")}], | |
89 | "diff": [{join(diff%diffblock, ", ")}] |
|
89 | "diff": [{join(diff%diffblock, ", ")}] | |
@@ -116,8 +116,8 b" filecomparison = '\\{" | |||||
116 | "path": {file|json}, |
|
116 | "path": {file|json}, | |
117 | "node": {node|json}, |
|
117 | "node": {node|json}, | |
118 | "date": {date|json}, |
|
118 | "date": {date|json}, | |
119 | "desc": {desc|json}, |
|
119 | "desc": {desc|utf8|json}, | |
120 | "author": {author|json}, |
|
120 | "author": {author|utf8|json}, | |
121 | "parents": [{join(parent%changesetparent, ", ")}], |
|
121 | "parents": [{join(parent%changesetparent, ", ")}], | |
122 | "children": [{join(child%changesetparent, ", ")}], |
|
122 | "children": [{join(child%changesetparent, ", ")}], | |
123 | "leftnode": {leftnode|json}, |
|
123 | "leftnode": {leftnode|json}, | |
@@ -137,9 +137,9 b" comparisonline = '\\{" | |||||
137 | fileannotate = '\{ |
|
137 | fileannotate = '\{ | |
138 | "abspath": {file|json}, |
|
138 | "abspath": {file|json}, | |
139 | "node": {node|json}, |
|
139 | "node": {node|json}, | |
140 | "author": {author|json}, |
|
140 | "author": {author|utf8|json}, | |
141 | "date": {date|json}, |
|
141 | "date": {date|json}, | |
142 | "desc": {desc|json}, |
|
142 | "desc": {desc|utf8|json}, | |
143 | "parents": [{join(parent%changesetparent, ", ")}], |
|
143 | "parents": [{join(parent%changesetparent, ", ")}], | |
144 | "children": [{join(child%changesetparent, ", ")}], |
|
144 | "children": [{join(child%changesetparent, ", ")}], | |
145 | "permissions": {permissions|json}, |
|
145 | "permissions": {permissions|json}, | |
@@ -147,8 +147,8 b" fileannotate = '\\{" | |||||
147 | }' |
|
147 | }' | |
148 | fileannotation = '\{ |
|
148 | fileannotation = '\{ | |
149 | "node": {node|json}, |
|
149 | "node": {node|json}, | |
150 | "author": {author|json}, |
|
150 | "author": {author|utf8|json}, | |
151 | "desc": {desc|json}, |
|
151 | "desc": {desc|utf8|json}, | |
152 | "abspath": {file|json}, |
|
152 | "abspath": {file|json}, | |
153 | "targetline": {targetline|json}, |
|
153 | "targetline": {targetline|json}, | |
154 | "line": {line|json}, |
|
154 | "line": {line|json}, | |
@@ -163,12 +163,12 b" helptopics = '\\{" | |||||
163 | "othercommands": [{join(othercommands%helptopicentry, ", ")}] |
|
163 | "othercommands": [{join(othercommands%helptopicentry, ", ")}] | |
164 | }' |
|
164 | }' | |
165 | helptopicentry = '\{ |
|
165 | helptopicentry = '\{ | |
166 | "topic": {topic|json}, |
|
166 | "topic": {topic|utf8|json}, | |
167 | "summary": {summary|json} |
|
167 | "summary": {summary|utf8|json} | |
168 | }' |
|
168 | }' | |
169 | help = '\{ |
|
169 | help = '\{ | |
170 | "topic": {topic|json}, |
|
170 | "topic": {topic|utf8|json}, | |
171 | "rawdoc": {doc|json} |
|
171 | "rawdoc": {doc|utf8|json} | |
172 | }' |
|
172 | }' | |
173 | filenodelink = '' |
|
173 | filenodelink = '' | |
174 | filenolink = '' |
|
174 | filenolink = '' |
@@ -3542,6 +3542,11 b' Test broken string escapes:' | |||||
3542 | hg: parse error: invalid \x escape |
|
3542 | hg: parse error: invalid \x escape | |
3543 | [255] |
|
3543 | [255] | |
3544 |
|
3544 | |||
|
3545 | json filter should escape HTML tags so that the output can be embedded in hgweb: | |||
|
3546 | ||||
|
3547 | $ hg log -T "{'<foo@example.org>'|json}\n" -R a -l1 | |||
|
3548 | "\u003cfoo@example.org\u003e" | |||
|
3549 | ||||
3545 | Set up repository for non-ascii encoding tests: |
|
3550 | Set up repository for non-ascii encoding tests: | |
3546 |
|
3551 | |||
3547 | $ hg init nonascii |
|
3552 | $ hg init nonascii | |
@@ -3558,11 +3563,12 b' json filter should try round-trip conver' | |||||
3558 | $ HGENCODING=ascii hg log -T "{branch|json}\n" -r0 |
|
3563 | $ HGENCODING=ascii hg log -T "{branch|json}\n" -r0 | |
3559 | "\u00e9" |
|
3564 | "\u00e9" | |
3560 |
|
3565 | |||
3561 | json filter should not abort if it can't decode bytes: |
|
3566 | json filter takes input as utf-8b: | |
3562 | (not sure the current behavior is right; we might want to use utf-8b encoding?) |
|
|||
3563 |
|
3567 | |||
3564 |
$ |
|
3568 | $ HGENCODING=ascii hg log -T "{'`cat utf-8`'|json}\n" -l1 | |
3565 | "\ufffd\ufffd" |
|
3569 | "\u00e9" | |
|
3570 | $ HGENCODING=ascii hg log -T "{'`cat latin1`'|json}\n" -l1 | |||
|
3571 | "\udce9" | |||
3566 |
|
3572 | |||
3567 | utf8 filter: |
|
3573 | utf8 filter: | |
3568 |
|
3574 |
General Comments 0
You need to be logged in to leave comments.
Login now