##// END OF EJS Templates
formatter: rename {abspath}/{file} to {path}, and drop relative {path} (BC)...
Yuya Nishihara -
r39405:34ba4711 default
parent child Browse files
Show More
@@ -576,8 +576,8 b' def files(ui, repo, *pats, **opts):'
576 label = 'kwfiles.' + kwstate
576 label = 'kwfiles.' + kwstate
577 for f in filenames:
577 for f in filenames:
578 fm.startitem()
578 fm.startitem()
579 fm.write('kwstatus path', fmt, char,
579 fm.data(kwstatus=char, path=f)
580 repo.pathto(f, cwd), label=label)
580 fm.plain(fmt % (char, repo.pathto(f, cwd)), label=label)
581 fm.end()
581 fm.end()
582
582
583 @command('kwshrink',
583 @command('kwshrink',
@@ -2176,8 +2176,8 b' def files(ui, ctx, m, fm, fmt, subrepos)'
2176 if needsfctx:
2176 if needsfctx:
2177 fc = ctx[f]
2177 fc = ctx[f]
2178 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2178 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2179 fm.data(abspath=f)
2179 fm.data(path=f)
2180 fm.write('path', fmt, m.rel(f))
2180 fm.plain(fmt % m.rel(f))
2181 ret = 0
2181 ret = 0
2182
2182
2183 for subpath in sorted(ctx.substate):
2183 for subpath in sorted(ctx.substate):
@@ -2323,7 +2323,7 b' def _updatecatformatter(fm, ctx, matcher'
2323 fm.startitem()
2323 fm.startitem()
2324 fm.context(ctx=ctx)
2324 fm.context(ctx=ctx)
2325 fm.write('data', '%s', data)
2325 fm.write('data', '%s', data)
2326 fm.data(abspath=path, path=matcher.rel(path))
2326 fm.data(path=path)
2327
2327
2328 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2328 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2329 err = 1
2329 err = 1
@@ -337,10 +337,10 b' def annotate(ui, repo, *pats, **opts):'
337 ('rev', ' ', lambda x: x.fctx.rev(), formatrev),
337 ('rev', ' ', lambda x: x.fctx.rev(), formatrev),
338 ('node', ' ', lambda x: hexfn(x.fctx.node()), formathex),
338 ('node', ' ', lambda x: hexfn(x.fctx.node()), formathex),
339 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
339 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
340 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
340 ('path', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
341 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
341 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
342 ]
342 ]
343 opnamemap = {'rev': 'number', 'node': 'changeset'}
343 opnamemap = {'rev': 'number', 'node': 'changeset', 'path': 'file'}
344
344
345 if (not opts.get('user') and not opts.get('changeset')
345 if (not opts.get('user') and not opts.get('changeset')
346 and not opts.get('date') and not opts.get('file')):
346 and not opts.get('date') and not opts.get('file')):
@@ -380,7 +380,7 b' def annotate(ui, repo, *pats, **opts):'
380 for abs in ctx.walk(m):
380 for abs in ctx.walk(m):
381 fctx = ctx[abs]
381 fctx = ctx[abs]
382 rootfm.startitem()
382 rootfm.startitem()
383 rootfm.data(abspath=abs, path=m.rel(abs))
383 rootfm.data(path=abs)
384 if not opts.get('text') and fctx.isbinary():
384 if not opts.get('text') and fctx.isbinary():
385 rootfm.plain(_("%s: binary file\n")
385 rootfm.plain(_("%s: binary file\n")
386 % ((pats and m.rel(abs)) or abs))
386 % ((pats and m.rel(abs)) or abs))
@@ -2660,7 +2660,7 b' def grep(ui, repo, pattern, *pats, **opt'
2660 except error.WdirUnsupported:
2660 except error.WdirUnsupported:
2661 return ctx[fn].isbinary()
2661 return ctx[fn].isbinary()
2662
2662
2663 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2663 fieldnamemap = {'filename': 'path', 'linenumber': 'line_number'}
2664 if diff:
2664 if diff:
2665 iter = difflinestates(pstates, states)
2665 iter = difflinestates(pstates, states)
2666 else:
2666 else:
@@ -5187,10 +5187,12 b' def status(ui, repo, *pats, **opts):'
5187 for f in files:
5187 for f in files:
5188 fm.startitem()
5188 fm.startitem()
5189 fm.context(ctx=ctx2)
5189 fm.context(ctx=ctx2)
5190 fm.data(path=f)
5190 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5191 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5191 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5192 fm.plain(fmt % repo.pathto(f, cwd), label=label)
5192 if f in copy:
5193 if f in copy:
5193 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5194 fm.data(copy=copy[f])
5195 fm.plain((' %s' + end) % repo.pathto(copy[f], cwd),
5194 label='status.copied')
5196 label='status.copied')
5195
5197
5196 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
5198 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
@@ -74,7 +74,7 b' description = \'{if(desc|strip, "{label(\''
74 {label('ui.note log.description',
74 {label('ui.note log.description',
75 '{desc|strip}')}\n\n")}'
75 '{desc|strip}')}\n\n")}'
76
76
77 status = '{status} {path}\n{if(copy, " {copy}\n")}'
77 status = '{status} {path|relpath}\n{if(copy, " {copy|relpath}\n")}'
78
78
79 # Obsfate templates, it would be removed once we introduce the obsfate
79 # Obsfate templates, it would be removed once we introduce the obsfate
80 # template fragment
80 # template fragment
@@ -56,7 +56,6 b' annotate (JSON)'
56 $ hg annotate -Tjson a
56 $ hg annotate -Tjson a
57 [
57 [
58 {
58 {
59 "abspath": "a",
60 "lines": [{"line": "a\n", "rev": 0}],
59 "lines": [{"line": "a\n", "rev": 0}],
61 "path": "a"
60 "path": "a"
62 }
61 }
@@ -65,8 +64,7 b' annotate (JSON)'
65 $ hg annotate -Tjson -cdfnul a
64 $ hg annotate -Tjson -cdfnul a
66 [
65 [
67 {
66 {
68 "abspath": "a",
67 "lines": [{"date": [1.0, 0], "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "path": "a", "rev": 0, "user": "nobody"}],
69 "lines": [{"date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody"}],
70 "path": "a"
68 "path": "a"
71 }
69 }
72 ]
70 ]
@@ -127,12 +125,10 b' annotate multiple files (JSON)'
127 $ hg annotate -Tjson a b
125 $ hg annotate -Tjson a b
128 [
126 [
129 {
127 {
130 "abspath": "a",
131 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
128 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
132 "path": "a"
129 "path": "a"
133 },
130 },
134 {
131 {
135 "abspath": "b",
136 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
132 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
137 "path": "b"
133 "path": "b"
138 }
134 }
@@ -140,7 +136,7 b' annotate multiple files (JSON)'
140
136
141 annotate multiple files (template)
137 annotate multiple files (template)
142
138
143 $ hg annotate -T'== {abspath} ==\n{lines % "{rev}: {line}"}' a b
139 $ hg annotate -T'== {path} ==\n{lines % "{rev}: {line}"}' a b
144 == a ==
140 == a ==
145 0: a
141 0: a
146 1: a
142 1: a
@@ -568,7 +564,6 b' annotate modified file'
568 $ hg annotate -ncr "wdir()" -Tjson foo
564 $ hg annotate -ncr "wdir()" -Tjson foo
569 [
565 [
570 {
566 {
571 "abspath": "foo",
572 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
567 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
573 "path": "foo"
568 "path": "foo"
574 }
569 }
@@ -870,11 +865,9 b' Test empty annotate output'
870 $ hg annotate -Tjson binary empty
865 $ hg annotate -Tjson binary empty
871 [
866 [
872 {
867 {
873 "abspath": "binary",
874 "path": "binary"
868 "path": "binary"
875 },
869 },
876 {
870 {
877 "abspath": "empty",
878 "lines": [],
871 "lines": [],
879 "path": "empty"
872 "path": "empty"
880 }
873 }
@@ -65,7 +65,7 b' Test fileset'
65
65
66 Test template output
66 Test template output
67
67
68 $ hg --cwd tmp cat ../b ../c -T '== {path} ({abspath}) r{rev} ==\n{data}'
68 $ hg --cwd tmp cat ../b ../c -T '== {path|relpath} ({path}) r{rev} ==\n{data}'
69 == ../b (b) r2 ==
69 == ../b (b) r2 ==
70 1
70 1
71 == ../c (c) r2 ==
71 == ../c (c) r2 ==
@@ -74,12 +74,10 b' Test template output'
74 $ hg cat b c -Tjson --output -
74 $ hg cat b c -Tjson --output -
75 [
75 [
76 {
76 {
77 "abspath": "b",
78 "data": "1\n",
77 "data": "1\n",
79 "path": "b"
78 "path": "b"
80 },
79 },
81 {
80 {
82 "abspath": "c",
83 "data": "3\n",
81 "data": "3\n",
84 "path": "c"
82 "path": "c"
85 }
83 }
@@ -89,7 +87,6 b' Test template output'
89 $ cat tmp/b.json
87 $ cat tmp/b.json
90 [
88 [
91 {
89 {
92 "abspath": "b",
93 "data": "1\n",
90 "data": "1\n",
94 "path": "b"
91 "path": "b"
95 }
92 }
@@ -97,7 +94,6 b' Test template output'
97 $ cat tmp/c.json
94 $ cat tmp/c.json
98 [
95 [
99 {
96 {
100 "abspath": "c",
101 "data": "3\n",
97 "data": "3\n",
102 "path": "c"
98 "path": "c"
103 }
99 }
@@ -69,7 +69,6 b' annotate (JSON)'
69 $ hg annotate -Tjson a
69 $ hg annotate -Tjson a
70 [
70 [
71 {
71 {
72 "abspath": "a",
73 "lines": [{"line": "a\n", "rev": 0}],
72 "lines": [{"line": "a\n", "rev": 0}],
74 "path": "a"
73 "path": "a"
75 }
74 }
@@ -78,8 +77,7 b' annotate (JSON)'
78 $ hg annotate -Tjson -cdfnul a
77 $ hg annotate -Tjson -cdfnul a
79 [
78 [
80 {
79 {
81 "abspath": "a",
80 "lines": [{"date": [1.0, 0], "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "path": "a", "rev": 0, "user": "nobody"}],
82 "lines": [{"date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody"}],
83 "path": "a"
81 "path": "a"
84 }
82 }
85 ]
83 ]
@@ -571,7 +569,6 b' annotate modified file'
571 $ hg annotate -ncr "wdir()" -Tjson foo
569 $ hg annotate -ncr "wdir()" -Tjson foo
572 [
570 [
573 {
571 {
574 "abspath": "foo",
575 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
572 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
576 "path": "foo"
573 "path": "foo"
577 }
574 }
@@ -43,17 +43,17 b' simple with color'
43 simple templated
43 simple templated
44
44
45 $ hg grep port -r tip:0 \
45 $ hg grep port -r tip:0 \
46 > -T '{file}:{rev}:{node|short}:{texts % "{if(matched, text|upper, text)}"}\n'
46 > -T '{path}:{rev}:{node|short}:{texts % "{if(matched, text|upper, text)}"}\n'
47 port:4:914fa752cdea:exPORT
47 port:4:914fa752cdea:exPORT
48 port:4:914fa752cdea:vaPORTight
48 port:4:914fa752cdea:vaPORTight
49 port:4:914fa752cdea:imPORT/exPORT
49 port:4:914fa752cdea:imPORT/exPORT
50
50
51 $ hg grep port -r tip:0 -T '{file}:{rev}:{texts}\n'
51 $ hg grep port -r tip:0 -T '{path}:{rev}:{texts}\n'
52 port:4:export
52 port:4:export
53 port:4:vaportight
53 port:4:vaportight
54 port:4:import/export
54 port:4:import/export
55
55
56 $ hg grep port -r tip:0 -T '{file}:{tags}:{texts}\n'
56 $ hg grep port -r tip:0 -T '{path}:{tags}:{texts}\n'
57 port:tip:export
57 port:tip:export
58 port:tip:vaportight
58 port:tip:vaportight
59 port:tip:import/export
59 port:tip:import/export
@@ -64,27 +64,27 b' simple JSON (no "change" field)'
64 [
64 [
65 {
65 {
66 "date": [4, 0],
66 "date": [4, 0],
67 "file": "port",
68 "line_number": 1,
67 "line_number": 1,
69 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
68 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
69 "path": "port",
70 "rev": 4,
70 "rev": 4,
71 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
71 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
72 "user": "spam"
72 "user": "spam"
73 },
73 },
74 {
74 {
75 "date": [4, 0],
75 "date": [4, 0],
76 "file": "port",
77 "line_number": 2,
76 "line_number": 2,
78 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
77 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
78 "path": "port",
79 "rev": 4,
79 "rev": 4,
80 "texts": [{"matched": false, "text": "va"}, {"matched": true, "text": "port"}, {"matched": false, "text": "ight"}],
80 "texts": [{"matched": false, "text": "va"}, {"matched": true, "text": "port"}, {"matched": false, "text": "ight"}],
81 "user": "spam"
81 "user": "spam"
82 },
82 },
83 {
83 {
84 "date": [4, 0],
84 "date": [4, 0],
85 "file": "port",
86 "line_number": 3,
85 "line_number": 3,
87 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
86 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
87 "path": "port",
88 "rev": 4,
88 "rev": 4,
89 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
89 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
90 "user": "spam"
90 "user": "spam"
@@ -97,9 +97,9 b' simple JSON without matching lines'
97 [
97 [
98 {
98 {
99 "date": [4, 0],
99 "date": [4, 0],
100 "file": "port",
101 "line_number": 1,
100 "line_number": 1,
102 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
101 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
102 "path": "port",
103 "rev": 4,
103 "rev": 4,
104 "user": "spam"
104 "user": "spam"
105 }
105 }
@@ -125,9 +125,9 b' all JSON'
125 {
125 {
126 "change": "-",
126 "change": "-",
127 "date": [4, 0],
127 "date": [4, 0],
128 "file": "port",
129 "line_number": 4,
128 "line_number": 4,
130 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
129 "node": "914fa752cdea87777ac1a8d5c858b0c736218f6c",
130 "path": "port",
131 "rev": 4,
131 "rev": 4,
132 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
132 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
133 "user": "spam"
133 "user": "spam"
@@ -135,9 +135,9 b' all JSON'
135 {
135 {
136 "change": "+",
136 "change": "+",
137 "date": [3, 0],
137 "date": [3, 0],
138 "file": "port",
139 "line_number": 4,
138 "line_number": 4,
140 "node": "95040cfd017d658c536071c6290230a613c4c2a6",
139 "node": "95040cfd017d658c536071c6290230a613c4c2a6",
140 "path": "port",
141 "rev": 3,
141 "rev": 3,
142 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
142 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
143 "user": "eggs"
143 "user": "eggs"
@@ -145,9 +145,9 b' all JSON'
145 {
145 {
146 "change": "-",
146 "change": "-",
147 "date": [2, 0],
147 "date": [2, 0],
148 "file": "port",
149 "line_number": 1,
148 "line_number": 1,
150 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
149 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
150 "path": "port",
151 "rev": 2,
151 "rev": 2,
152 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}],
152 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}],
153 "user": "spam"
153 "user": "spam"
@@ -155,9 +155,9 b' all JSON'
155 {
155 {
156 "change": "-",
156 "change": "-",
157 "date": [2, 0],
157 "date": [2, 0],
158 "file": "port",
159 "line_number": 2,
158 "line_number": 2,
160 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
159 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
160 "path": "port",
161 "rev": 2,
161 "rev": 2,
162 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
162 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
163 "user": "spam"
163 "user": "spam"
@@ -165,9 +165,9 b' all JSON'
165 {
165 {
166 "change": "+",
166 "change": "+",
167 "date": [2, 0],
167 "date": [2, 0],
168 "file": "port",
169 "line_number": 1,
168 "line_number": 1,
170 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
169 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
170 "path": "port",
171 "rev": 2,
171 "rev": 2,
172 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
172 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
173 "user": "spam"
173 "user": "spam"
@@ -175,9 +175,9 b' all JSON'
175 {
175 {
176 "change": "+",
176 "change": "+",
177 "date": [2, 0],
177 "date": [2, 0],
178 "file": "port",
179 "line_number": 2,
178 "line_number": 2,
180 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
179 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
180 "path": "port",
181 "rev": 2,
181 "rev": 2,
182 "texts": [{"matched": false, "text": "va"}, {"matched": true, "text": "port"}, {"matched": false, "text": "ight"}],
182 "texts": [{"matched": false, "text": "va"}, {"matched": true, "text": "port"}, {"matched": false, "text": "ight"}],
183 "user": "spam"
183 "user": "spam"
@@ -185,9 +185,9 b' all JSON'
185 {
185 {
186 "change": "+",
186 "change": "+",
187 "date": [2, 0],
187 "date": [2, 0],
188 "file": "port",
189 "line_number": 3,
188 "line_number": 3,
190 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
189 "node": "3b325e3481a1f07435d81dfdbfa434d9a0245b47",
190 "path": "port",
191 "rev": 2,
191 "rev": 2,
192 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
192 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}, {"matched": false, "text": "/ex"}, {"matched": true, "text": "port"}],
193 "user": "spam"
193 "user": "spam"
@@ -195,9 +195,9 b' all JSON'
195 {
195 {
196 "change": "+",
196 "change": "+",
197 "date": [1, 0],
197 "date": [1, 0],
198 "file": "port",
199 "line_number": 2,
198 "line_number": 2,
200 "node": "8b20f75c158513ff5ac80bd0e5219bfb6f0eb587",
199 "node": "8b20f75c158513ff5ac80bd0e5219bfb6f0eb587",
200 "path": "port",
201 "rev": 1,
201 "rev": 1,
202 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
202 "texts": [{"matched": false, "text": "ex"}, {"matched": true, "text": "port"}],
203 "user": "eggs"
203 "user": "eggs"
@@ -205,9 +205,9 b' all JSON'
205 {
205 {
206 "change": "+",
206 "change": "+",
207 "date": [0, 0],
207 "date": [0, 0],
208 "file": "port",
209 "line_number": 1,
208 "line_number": 1,
210 "node": "f31323c9217050ba245ee8b537c713ec2e8ab226",
209 "node": "f31323c9217050ba245ee8b537c713ec2e8ab226",
210 "path": "port",
211 "rev": 0,
211 "rev": 0,
212 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}],
212 "texts": [{"matched": false, "text": "im"}, {"matched": true, "text": "port"}],
213 "user": "spam"
213 "user": "spam"
@@ -481,9 +481,9 b' but it should be available in template d'
481 [
481 [
482 {
482 {
483 "date": [0, 0],
483 "date": [0, 0],
484 "file": "file2",
485 "line_number": 1,
484 "line_number": 1,
486 "node": "ffffffffffffffffffffffffffffffffffffffff",
485 "node": "ffffffffffffffffffffffffffffffffffffffff",
486 "path": "file2",
487 "rev": 2147483647,
487 "rev": 2147483647,
488 "texts": [{"matched": true, "text": "some"}, {"matched": false, "text": " text"}],
488 "texts": [{"matched": true, "text": "some"}, {"matched": false, "text": " text"}],
489 "user": "test"
489 "user": "test"
@@ -158,7 +158,7 b' Issue294: hg remove --after dir fails wh'
158
158
159 Convert native path separator to slash (issue5572)
159 Convert native path separator to slash (issue5572)
160
160
161 $ hg files -T '{path|slashpath}\n'
161 $ hg files -T '{path|relpath|slashpath}\n'
162 ../b
162 ../b
163 ../dir.h/foo
163 ../dir.h/foo
164 ../t.h
164 ../t.h
@@ -1066,19 +1066,18 b' Prepare a repo with subrepo'
1066 $ hg cat sub/repo/foo -Tjson | sed 's|\\\\|/|g'
1066 $ hg cat sub/repo/foo -Tjson | sed 's|\\\\|/|g'
1067 [
1067 [
1068 {
1068 {
1069 "abspath": "foo",
1070 "data": "test\ntest\n",
1069 "data": "test\ntest\n",
1071 "path": "sub/repo/foo"
1070 "path": "foo"
1072 }
1071 }
1073 ]
1072 ]
1074
1073
1075 non-exact match:
1074 non-exact match:
1076
1075
1077 $ hg cat -T '{path}\n' 'glob:**'
1076 $ hg cat -T '{path|relpath}\n' 'glob:**'
1078 .hgsub
1077 .hgsub
1079 .hgsubstate
1078 .hgsubstate
1080 sub/repo/foo
1079 sub/repo/foo
1081 $ hg cat -T '{path}\n' 're:^sub'
1080 $ hg cat -T '{path|relpath}\n' 're:^sub'
1082 sub/repo/foo
1081 sub/repo/foo
1083
1082
1084 missing subrepos in working directory:
1083 missing subrepos in working directory:
General Comments 0
You need to be logged in to leave comments. Login now