Show More
@@ -1196,7 +1196,8 b' def graph(web, req, tmpl):' | |||
|
1196 | 1196 | canvaswidth=(cols + 1) * bg_height, |
|
1197 | 1197 | truecanvasheight=rows * bg_height, |
|
1198 | 1198 | canvasheight=canvasheight, bg_height=bg_height, |
|
1199 | jsdata=lambda **x: graphdata(True, str), | |
|
1199 | # {jsdata} will be passed to |json, so it must be in utf-8 | |
|
1200 | jsdata=lambda **x: graphdata(True, encoding.fromlocal), | |
|
1200 | 1201 | nodes=lambda **x: graphdata(False, str), |
|
1201 | 1202 | node=ctx.hex(), changenav=changenav) |
|
1202 | 1203 |
@@ -197,15 +197,8 b' def json(obj):' | |||
|
197 | 197 | return {None: 'null', False: 'false', True: 'true'}[obj] |
|
198 | 198 | elif isinstance(obj, int) or isinstance(obj, float): |
|
199 | 199 | return str(obj) |
|
200 | elif isinstance(obj, encoding.localstr): | |
|
201 | u = encoding.fromlocal(obj).decode('utf-8') # can round-trip | |
|
202 | return '"%s"' % jsonescape(u) | |
|
203 | 200 | elif isinstance(obj, str): |
|
204 | # no encoding.fromlocal() because it may abort if obj can't be decoded | |
|
205 | u = unicode(obj, encoding.encoding, 'replace') | |
|
206 | return '"%s"' % jsonescape(u) | |
|
207 | elif isinstance(obj, unicode): | |
|
208 | return '"%s"' % jsonescape(obj) | |
|
201 | return '"%s"' % encoding.jsonescape(obj, paranoid=True) | |
|
209 | 202 | elif util.safehasattr(obj, 'keys'): |
|
210 | 203 | out = [] |
|
211 | 204 | for k, v in sorted(obj.iteritems()): |
@@ -8,26 +8,26 b' shortlog = changelist.tmpl' | |||
|
8 | 8 | changelistentry = '\{ |
|
9 | 9 | "node": {node|json}, |
|
10 | 10 | "date": {date|json}, |
|
11 | "desc": {desc|json}, | |
|
11 | "desc": {desc|utf8|json}, | |
|
12 | 12 | "bookmarks": [{join(bookmarks%changelistentryname, ", ")}], |
|
13 | 13 | "tags": [{join(tags%changelistentryname, ", ")}], |
|
14 | "user": {author|json} | |
|
14 | "user": {author|utf8|json} | |
|
15 | 15 | }' |
|
16 | changelistentryname = '{name|json}' | |
|
16 | changelistentryname = '{name|utf8|json}' | |
|
17 | 17 | changeset = '\{ |
|
18 | 18 | "node": {node|json}, |
|
19 | 19 | "date": {date|json}, |
|
20 | "desc": {desc|json}, | |
|
20 | "desc": {desc|utf8|json}, | |
|
21 | 21 | "branch": {if(branch, branch%changesetbranch, "default"|json)}, |
|
22 | 22 | "bookmarks": [{join(changesetbookmark, ", ")}], |
|
23 | 23 | "tags": [{join(changesettag, ", ")}], |
|
24 | "user": {author|json}, | |
|
24 | "user": {author|utf8|json}, | |
|
25 | 25 | "parents": [{join(parent%changesetparent, ", ")}], |
|
26 | 26 | "phase": {phase|json} |
|
27 | 27 | }' |
|
28 | changesetbranch = '{name|json}' | |
|
29 | changesetbookmark = '{bookmark|json}' | |
|
30 | changesettag = '{tag|json}' | |
|
28 | changesetbranch = '{name|utf8|json}' | |
|
29 | changesetbookmark = '{bookmark|utf8|json}' | |
|
30 | changesettag = '{tag|utf8|json}' | |
|
31 | 31 | changesetparent = '{node|json}' |
|
32 | 32 | manifest = '\{ |
|
33 | 33 | "node": {node|json}, |
@@ -37,7 +37,7 b" manifest = '\\{" | |||
|
37 | 37 | "bookmarks": [{join(bookmarks%name, ", ")}], |
|
38 | 38 | "tags": [{join(tags%name, ", ")}] |
|
39 | 39 | }' |
|
40 | name = '{name|json}' | |
|
40 | name = '{name|utf8|json}' | |
|
41 | 41 | direntry = '\{ |
|
42 | 42 | "abspath": {path|json}, |
|
43 | 43 | "basename": {basename|json}, |
@@ -55,7 +55,7 b" tags = '\\{" | |||
|
55 | 55 | "tags": [{join(entriesnotip%tagentry, ", ")}] |
|
56 | 56 | }' |
|
57 | 57 | tagentry = '\{ |
|
58 | "tag": {tag|json}, | |
|
58 | "tag": {tag|utf8|json}, | |
|
59 | 59 | "node": {node|json}, |
|
60 | 60 | "date": {date|json} |
|
61 | 61 | }' |
@@ -64,7 +64,7 b" bookmarks = '\\{" | |||
|
64 | 64 | "bookmarks": [{join(entries%bookmarkentry, ", ")}] |
|
65 | 65 | }' |
|
66 | 66 | bookmarkentry = '\{ |
|
67 | "bookmark": {bookmark|json}, | |
|
67 | "bookmark": {bookmark|utf8|json}, | |
|
68 | 68 | "node": {node|json}, |
|
69 | 69 | "date": {date|json} |
|
70 | 70 | }' |
@@ -72,7 +72,7 b" branches = '\\{" | |||
|
72 | 72 | "branches": [{join(entries%branchentry, ", ")}] |
|
73 | 73 | }' |
|
74 | 74 | branchentry = '\{ |
|
75 | "branch": {branch|json}, | |
|
75 | "branch": {branch|utf8|json}, | |
|
76 | 76 | "node": {node|json}, |
|
77 | 77 | "date": {date|json}, |
|
78 | 78 | "status": {status|json} |
@@ -82,8 +82,8 b" filediff = '\\{" | |||
|
82 | 82 | "path": {file|json}, |
|
83 | 83 | "node": {node|json}, |
|
84 | 84 | "date": {date|json}, |
|
85 | "desc": {desc|json}, | |
|
86 | "author": {author|json}, | |
|
85 | "desc": {desc|utf8|json}, | |
|
86 | "author": {author|utf8|json}, | |
|
87 | 87 | "parents": [{join(parent%changesetparent, ", ")}], |
|
88 | 88 | "children": [{join(child%changesetparent, ", ")}], |
|
89 | 89 | "diff": [{join(diff%diffblock, ", ")}] |
@@ -116,8 +116,8 b" filecomparison = '\\{" | |||
|
116 | 116 | "path": {file|json}, |
|
117 | 117 | "node": {node|json}, |
|
118 | 118 | "date": {date|json}, |
|
119 | "desc": {desc|json}, | |
|
120 | "author": {author|json}, | |
|
119 | "desc": {desc|utf8|json}, | |
|
120 | "author": {author|utf8|json}, | |
|
121 | 121 | "parents": [{join(parent%changesetparent, ", ")}], |
|
122 | 122 | "children": [{join(child%changesetparent, ", ")}], |
|
123 | 123 | "leftnode": {leftnode|json}, |
@@ -137,9 +137,9 b" comparisonline = '\\{" | |||
|
137 | 137 | fileannotate = '\{ |
|
138 | 138 | "abspath": {file|json}, |
|
139 | 139 | "node": {node|json}, |
|
140 | "author": {author|json}, | |
|
140 | "author": {author|utf8|json}, | |
|
141 | 141 | "date": {date|json}, |
|
142 | "desc": {desc|json}, | |
|
142 | "desc": {desc|utf8|json}, | |
|
143 | 143 | "parents": [{join(parent%changesetparent, ", ")}], |
|
144 | 144 | "children": [{join(child%changesetparent, ", ")}], |
|
145 | 145 | "permissions": {permissions|json}, |
@@ -147,8 +147,8 b" fileannotate = '\\{" | |||
|
147 | 147 | }' |
|
148 | 148 | fileannotation = '\{ |
|
149 | 149 | "node": {node|json}, |
|
150 | "author": {author|json}, | |
|
151 | "desc": {desc|json}, | |
|
150 | "author": {author|utf8|json}, | |
|
151 | "desc": {desc|utf8|json}, | |
|
152 | 152 | "abspath": {file|json}, |
|
153 | 153 | "targetline": {targetline|json}, |
|
154 | 154 | "line": {line|json}, |
@@ -163,12 +163,12 b" helptopics = '\\{" | |||
|
163 | 163 | "othercommands": [{join(othercommands%helptopicentry, ", ")}] |
|
164 | 164 | }' |
|
165 | 165 | helptopicentry = '\{ |
|
166 | "topic": {topic|json}, | |
|
167 | "summary": {summary|json} | |
|
166 | "topic": {topic|utf8|json}, | |
|
167 | "summary": {summary|utf8|json} | |
|
168 | 168 | }' |
|
169 | 169 | help = '\{ |
|
170 | "topic": {topic|json}, | |
|
171 | "rawdoc": {doc|json} | |
|
170 | "topic": {topic|utf8|json}, | |
|
171 | "rawdoc": {doc|utf8|json} | |
|
172 | 172 | }' |
|
173 | 173 | filenodelink = '' |
|
174 | 174 | filenolink = '' |
@@ -3542,6 +3542,11 b' Test broken string escapes:' | |||
|
3542 | 3542 | hg: parse error: invalid \x escape |
|
3543 | 3543 | [255] |
|
3544 | 3544 | |
|
3545 | json filter should escape HTML tags so that the output can be embedded in hgweb: | |
|
3546 | ||
|
3547 | $ hg log -T "{'<foo@example.org>'|json}\n" -R a -l1 | |
|
3548 | "\u003cfoo@example.org\u003e" | |
|
3549 | ||
|
3545 | 3550 | Set up repository for non-ascii encoding tests: |
|
3546 | 3551 | |
|
3547 | 3552 | $ hg init nonascii |
@@ -3558,11 +3563,12 b' json filter should try round-trip conver' | |||
|
3558 | 3563 | $ HGENCODING=ascii hg log -T "{branch|json}\n" -r0 |
|
3559 | 3564 | "\u00e9" |
|
3560 | 3565 | |
|
3561 | json filter should not abort if it can't decode bytes: | |
|
3562 | (not sure the current behavior is right; we might want to use utf-8b encoding?) | |
|
3566 | json filter takes input as utf-8b: | |
|
3563 | 3567 | |
|
3564 | 3568 |
$ |
|
3565 | "\ufffd\ufffd" | |
|
3569 | "\u00e9" | |
|
3570 | $ HGENCODING=ascii hg log -T "{'`cat latin1`'|json}\n" -l1 | |
|
3571 | "\udce9" | |
|
3566 | 3572 | |
|
3567 | 3573 | utf8 filter: |
|
3568 | 3574 |
General Comments 0
You need to be logged in to leave comments.
Login now