Show More
@@ -1,449 +1,451 b'' | |||||
1 | # store.py - repository store handling for Mercurial |
|
1 | # store.py - repository store handling for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2008 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 | import osutil, scmutil, util |
|
9 | import osutil, scmutil, util | |
10 | import os, stat, errno |
|
10 | import os, stat, errno | |
11 |
|
11 | |||
12 | _sha = util.sha1 |
|
12 | _sha = util.sha1 | |
13 |
|
13 | |||
14 | # This avoids a collision between a file named foo and a dir named |
|
14 | # This avoids a collision between a file named foo and a dir named | |
15 | # foo.i or foo.d |
|
15 | # foo.i or foo.d | |
16 | def encodedir(path): |
|
16 | def encodedir(path): | |
17 | ''' |
|
17 | ''' | |
18 | >>> encodedir('data/foo.i') |
|
18 | >>> encodedir('data/foo.i') | |
19 | 'data/foo.i' |
|
19 | 'data/foo.i' | |
20 | >>> encodedir('data/foo.i/bla.i') |
|
20 | >>> encodedir('data/foo.i/bla.i') | |
21 | 'data/foo.i.hg/bla.i' |
|
21 | 'data/foo.i.hg/bla.i' | |
22 | >>> encodedir('data/foo.i.hg/bla.i') |
|
22 | >>> encodedir('data/foo.i.hg/bla.i') | |
23 | 'data/foo.i.hg.hg/bla.i' |
|
23 | 'data/foo.i.hg.hg/bla.i' | |
24 | ''' |
|
24 | ''' | |
25 | return (path |
|
25 | return (path | |
26 | .replace(".hg/", ".hg.hg/") |
|
26 | .replace(".hg/", ".hg.hg/") | |
27 | .replace(".i/", ".i.hg/") |
|
27 | .replace(".i/", ".i.hg/") | |
28 | .replace(".d/", ".d.hg/")) |
|
28 | .replace(".d/", ".d.hg/")) | |
29 |
|
29 | |||
30 | def decodedir(path): |
|
30 | def decodedir(path): | |
31 | ''' |
|
31 | ''' | |
32 | >>> decodedir('data/foo.i') |
|
32 | >>> decodedir('data/foo.i') | |
33 | 'data/foo.i' |
|
33 | 'data/foo.i' | |
34 | >>> decodedir('data/foo.i.hg/bla.i') |
|
34 | >>> decodedir('data/foo.i.hg/bla.i') | |
35 | 'data/foo.i/bla.i' |
|
35 | 'data/foo.i/bla.i' | |
36 | >>> decodedir('data/foo.i.hg.hg/bla.i') |
|
36 | >>> decodedir('data/foo.i.hg.hg/bla.i') | |
37 | 'data/foo.i.hg/bla.i' |
|
37 | 'data/foo.i.hg/bla.i' | |
38 | ''' |
|
38 | ''' | |
39 | if ".hg/" not in path: |
|
39 | if ".hg/" not in path: | |
40 | return path |
|
40 | return path | |
41 | return (path |
|
41 | return (path | |
42 | .replace(".d.hg/", ".d/") |
|
42 | .replace(".d.hg/", ".d/") | |
43 | .replace(".i.hg/", ".i/") |
|
43 | .replace(".i.hg/", ".i/") | |
44 | .replace(".hg.hg/", ".hg/")) |
|
44 | .replace(".hg.hg/", ".hg/")) | |
45 |
|
45 | |||
46 | def _buildencodefun(): |
|
46 | def _buildencodefun(): | |
47 | ''' |
|
47 | ''' | |
48 | >>> enc, dec = _buildencodefun() |
|
48 | >>> enc, dec = _buildencodefun() | |
49 |
|
49 | |||
50 | >>> enc('nothing/special.txt') |
|
50 | >>> enc('nothing/special.txt') | |
51 | 'nothing/special.txt' |
|
51 | 'nothing/special.txt' | |
52 | >>> dec('nothing/special.txt') |
|
52 | >>> dec('nothing/special.txt') | |
53 | 'nothing/special.txt' |
|
53 | 'nothing/special.txt' | |
54 |
|
54 | |||
55 | >>> enc('HELLO') |
|
55 | >>> enc('HELLO') | |
56 | '_h_e_l_l_o' |
|
56 | '_h_e_l_l_o' | |
57 | >>> dec('_h_e_l_l_o') |
|
57 | >>> dec('_h_e_l_l_o') | |
58 | 'HELLO' |
|
58 | 'HELLO' | |
59 |
|
59 | |||
60 | >>> enc('hello:world?') |
|
60 | >>> enc('hello:world?') | |
61 | 'hello~3aworld~3f' |
|
61 | 'hello~3aworld~3f' | |
62 | >>> dec('hello~3aworld~3f') |
|
62 | >>> dec('hello~3aworld~3f') | |
63 | 'hello:world?' |
|
63 | 'hello:world?' | |
64 |
|
64 | |||
65 | >>> enc('the\x07quick\xADshot') |
|
65 | >>> enc('the\x07quick\xADshot') | |
66 | 'the~07quick~adshot' |
|
66 | 'the~07quick~adshot' | |
67 | >>> dec('the~07quick~adshot') |
|
67 | >>> dec('the~07quick~adshot') | |
68 | 'the\\x07quick\\xadshot' |
|
68 | 'the\\x07quick\\xadshot' | |
69 | ''' |
|
69 | ''' | |
70 | e = '_' |
|
70 | e = '_' | |
71 | winreserved = [ord(x) for x in '\\:*?"<>|'] |
|
71 | winreserved = [ord(x) for x in '\\:*?"<>|'] | |
72 | cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) |
|
72 | cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) | |
73 | for x in (range(32) + range(126, 256) + winreserved): |
|
73 | for x in (range(32) + range(126, 256) + winreserved): | |
74 | cmap[chr(x)] = "~%02x" % x |
|
74 | cmap[chr(x)] = "~%02x" % x | |
75 | for x in range(ord("A"), ord("Z")+1) + [ord(e)]: |
|
75 | for x in range(ord("A"), ord("Z")+1) + [ord(e)]: | |
76 | cmap[chr(x)] = e + chr(x).lower() |
|
76 | cmap[chr(x)] = e + chr(x).lower() | |
77 | dmap = {} |
|
77 | dmap = {} | |
78 | for k, v in cmap.iteritems(): |
|
78 | for k, v in cmap.iteritems(): | |
79 | dmap[v] = k |
|
79 | dmap[v] = k | |
80 | def decode(s): |
|
80 | def decode(s): | |
81 | i = 0 |
|
81 | i = 0 | |
82 | while i < len(s): |
|
82 | while i < len(s): | |
83 | for l in xrange(1, 4): |
|
83 | for l in xrange(1, 4): | |
84 | try: |
|
84 | try: | |
85 | yield dmap[s[i:i + l]] |
|
85 | yield dmap[s[i:i + l]] | |
86 | i += l |
|
86 | i += l | |
87 | break |
|
87 | break | |
88 | except KeyError: |
|
88 | except KeyError: | |
89 | pass |
|
89 | pass | |
90 | else: |
|
90 | else: | |
91 | raise KeyError |
|
91 | raise KeyError | |
92 | return (lambda s: "".join([cmap[c] for c in encodedir(s)]), |
|
92 | return (lambda s: "".join([cmap[c] for c in encodedir(s)]), | |
93 | lambda s: decodedir("".join(list(decode(s))))) |
|
93 | lambda s: decodedir("".join(list(decode(s))))) | |
94 |
|
94 | |||
95 | encodefilename, decodefilename = _buildencodefun() |
|
95 | encodefilename, decodefilename = _buildencodefun() | |
96 |
|
96 | |||
97 | def _buildlowerencodefun(): |
|
97 | def _buildlowerencodefun(): | |
98 | ''' |
|
98 | ''' | |
99 | >>> f = _buildlowerencodefun() |
|
99 | >>> f = _buildlowerencodefun() | |
100 | >>> f('nothing/special.txt') |
|
100 | >>> f('nothing/special.txt') | |
101 | 'nothing/special.txt' |
|
101 | 'nothing/special.txt' | |
102 | >>> f('HELLO') |
|
102 | >>> f('HELLO') | |
103 | 'hello' |
|
103 | 'hello' | |
104 | >>> f('hello:world?') |
|
104 | >>> f('hello:world?') | |
105 | 'hello~3aworld~3f' |
|
105 | 'hello~3aworld~3f' | |
106 | >>> f('the\x07quick\xADshot') |
|
106 | >>> f('the\x07quick\xADshot') | |
107 | 'the~07quick~adshot' |
|
107 | 'the~07quick~adshot' | |
108 | ''' |
|
108 | ''' | |
109 | winreserved = [ord(x) for x in '\\:*?"<>|'] |
|
109 | winreserved = [ord(x) for x in '\\:*?"<>|'] | |
110 | cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) |
|
110 | cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) | |
111 | for x in (range(32) + range(126, 256) + winreserved): |
|
111 | for x in (range(32) + range(126, 256) + winreserved): | |
112 | cmap[chr(x)] = "~%02x" % x |
|
112 | cmap[chr(x)] = "~%02x" % x | |
113 | for x in range(ord("A"), ord("Z")+1): |
|
113 | for x in range(ord("A"), ord("Z")+1): | |
114 | cmap[chr(x)] = chr(x).lower() |
|
114 | cmap[chr(x)] = chr(x).lower() | |
115 | return lambda s: "".join([cmap[c] for c in s]) |
|
115 | return lambda s: "".join([cmap[c] for c in s]) | |
116 |
|
116 | |||
117 | lowerencode = _buildlowerencodefun() |
|
117 | lowerencode = _buildlowerencodefun() | |
118 |
|
118 | |||
119 | # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9 |
|
119 | # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9 | |
120 | _winres3 = ('aux', 'con', 'prn', 'nul') # length 3 |
|
120 | _winres3 = ('aux', 'con', 'prn', 'nul') # length 3 | |
121 | _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9) |
|
121 | _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9) | |
122 | def _auxencode(path, dotencode): |
|
122 | def _auxencode(path, dotencode): | |
123 | ''' |
|
123 | ''' | |
124 | Encodes filenames containing names reserved by Windows or which end in |
|
124 | Encodes filenames containing names reserved by Windows or which end in | |
125 | period or space. Does not touch other single reserved characters c. |
|
125 | period or space. Does not touch other single reserved characters c. | |
126 | Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here. |
|
126 | Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here. | |
127 | Additionally encodes space or period at the beginning, if dotencode is |
|
127 | Additionally encodes space or period at the beginning, if dotencode is | |
128 | True. Parameter path is assumed to be all lowercase. |
|
128 | True. Parameter path is assumed to be all lowercase. | |
129 | A segment only needs encoding if a reserved name appears as a |
|
129 | A segment only needs encoding if a reserved name appears as a | |
130 | basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux" |
|
130 | basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux" | |
131 | doesn't need encoding. |
|
131 | doesn't need encoding. | |
132 |
|
132 | |||
133 | >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.' |
|
133 | >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.' | |
134 | >>> _auxencode(s.split('/'), True) |
|
134 | >>> _auxencode(s.split('/'), True) | |
135 | ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e'] |
|
135 | ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e'] | |
136 | >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.' |
|
136 | >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.' | |
137 | >>> _auxencode(s.split('/'), False) |
|
137 | >>> _auxencode(s.split('/'), False) | |
138 | ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e'] |
|
138 | ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e'] | |
139 | >>> _auxencode(['foo. '], True) |
|
139 | >>> _auxencode(['foo. '], True) | |
140 | ['foo.~20'] |
|
140 | ['foo.~20'] | |
141 | >>> _auxencode([' .foo'], True) |
|
141 | >>> _auxencode([' .foo'], True) | |
142 | ['~20.foo'] |
|
142 | ['~20.foo'] | |
143 | ''' |
|
143 | ''' | |
144 | for i, n in enumerate(path): |
|
144 | for i, n in enumerate(path): | |
145 | if not n: |
|
145 | if not n: | |
146 | continue |
|
146 | continue | |
147 | if dotencode and n[0] in '. ': |
|
147 | if dotencode and n[0] in '. ': | |
148 | n = "~%02x" % ord(n[0]) + n[1:] |
|
148 | n = "~%02x" % ord(n[0]) + n[1:] | |
149 | path[i] = n |
|
149 | path[i] = n | |
150 | else: |
|
150 | else: | |
151 | l = n.find('.') |
|
151 | l = n.find('.') | |
152 | if l == -1: |
|
152 | if l == -1: | |
153 | l = len(n) |
|
153 | l = len(n) | |
154 | if ((l == 3 and n[:3] in _winres3) or |
|
154 | if ((l == 3 and n[:3] in _winres3) or | |
155 | (l == 4 and n[3] <= '9' and n[3] >= '1' |
|
155 | (l == 4 and n[3] <= '9' and n[3] >= '1' | |
156 | and n[:3] in _winres4)): |
|
156 | and n[:3] in _winres4)): | |
157 | # encode third letter ('aux' -> 'au~78') |
|
157 | # encode third letter ('aux' -> 'au~78') | |
158 | ec = "~%02x" % ord(n[2]) |
|
158 | ec = "~%02x" % ord(n[2]) | |
159 | n = n[0:2] + ec + n[3:] |
|
159 | n = n[0:2] + ec + n[3:] | |
160 | path[i] = n |
|
160 | path[i] = n | |
161 | if n[-1] in '. ': |
|
161 | if n[-1] in '. ': | |
162 | # encode last period or space ('foo...' -> 'foo..~2e') |
|
162 | # encode last period or space ('foo...' -> 'foo..~2e') | |
163 | path[i] = n[:-1] + "~%02x" % ord(n[-1]) |
|
163 | path[i] = n[:-1] + "~%02x" % ord(n[-1]) | |
164 | return path |
|
164 | return path | |
165 |
|
165 | |||
166 | _maxstorepathlen = 120 |
|
166 | _maxstorepathlen = 120 | |
167 | _dirprefixlen = 8 |
|
167 | _dirprefixlen = 8 | |
168 | _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4 |
|
168 | _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4 | |
169 |
def _hybridencode(path, |
|
169 | def _hybridencode(path, dotencode): | |
170 | '''encodes path with a length limit |
|
170 | '''encodes path with a length limit | |
171 |
|
171 | |||
172 | Encodes all paths that begin with 'data/', according to the following. |
|
172 | Encodes all paths that begin with 'data/', according to the following. | |
173 |
|
173 | |||
174 | Default encoding (reversible): |
|
174 | Default encoding (reversible): | |
175 |
|
175 | |||
176 | Encodes all uppercase letters 'X' as '_x'. All reserved or illegal |
|
176 | Encodes all uppercase letters 'X' as '_x'. All reserved or illegal | |
177 | characters are encoded as '~xx', where xx is the two digit hex code |
|
177 | characters are encoded as '~xx', where xx is the two digit hex code | |
178 | of the character (see encodefilename). |
|
178 | of the character (see encodefilename). | |
179 | Relevant path components consisting of Windows reserved filenames are |
|
179 | Relevant path components consisting of Windows reserved filenames are | |
180 | masked by encoding the third character ('aux' -> 'au~78', see auxencode). |
|
180 | masked by encoding the third character ('aux' -> 'au~78', see auxencode). | |
181 |
|
181 | |||
182 | Hashed encoding (not reversible): |
|
182 | Hashed encoding (not reversible): | |
183 |
|
183 | |||
184 | If the default-encoded path is longer than _maxstorepathlen, a |
|
184 | If the default-encoded path is longer than _maxstorepathlen, a | |
185 | non-reversible hybrid hashing of the path is done instead. |
|
185 | non-reversible hybrid hashing of the path is done instead. | |
186 | This encoding uses up to _dirprefixlen characters of all directory |
|
186 | This encoding uses up to _dirprefixlen characters of all directory | |
187 | levels of the lowerencoded path, but not more levels than can fit into |
|
187 | levels of the lowerencoded path, but not more levels than can fit into | |
188 | _maxshortdirslen. |
|
188 | _maxshortdirslen. | |
189 | Then follows the filler followed by the sha digest of the full path. |
|
189 | Then follows the filler followed by the sha digest of the full path. | |
190 | The filler is the beginning of the basename of the lowerencoded path |
|
190 | The filler is the beginning of the basename of the lowerencoded path | |
191 | (the basename is everything after the last path separator). The filler |
|
191 | (the basename is everything after the last path separator). The filler | |
192 | is as long as possible, filling in characters from the basename until |
|
192 | is as long as possible, filling in characters from the basename until | |
193 | the encoded path has _maxstorepathlen characters (or all chars of the |
|
193 | the encoded path has _maxstorepathlen characters (or all chars of the | |
194 | basename have been taken). |
|
194 | basename have been taken). | |
195 | The extension (e.g. '.i' or '.d') is preserved. |
|
195 | The extension (e.g. '.i' or '.d') is preserved. | |
196 |
|
196 | |||
197 | The string 'data/' at the beginning is replaced with 'dh/', if the hashed |
|
197 | The string 'data/' at the beginning is replaced with 'dh/', if the hashed | |
198 | encoding was used. |
|
198 | encoding was used. | |
199 | ''' |
|
199 | ''' | |
200 |
|
|
200 | ef = encodefilename(path).split('/') | |
|
201 | res = '/'.join(_auxencode(ef, dotencode)) | |||
201 | if len(res) > _maxstorepathlen: |
|
202 | if len(res) > _maxstorepathlen: | |
202 | path = encodedir(path) |
|
203 | path = encodedir(path) | |
203 | digest = _sha(path).hexdigest() |
|
204 | digest = _sha(path).hexdigest() | |
204 |
|
|
205 | le = lowerencode(path).split('/')[1:] | |
|
206 | parts = _auxencode(le, dotencode) | |||
205 | basename = parts[-1] |
|
207 | basename = parts[-1] | |
206 | _root, ext = os.path.splitext(basename) |
|
208 | _root, ext = os.path.splitext(basename) | |
207 | sdirs = [] |
|
209 | sdirs = [] | |
208 | sdirslen = 0 |
|
210 | sdirslen = 0 | |
209 | for p in parts[:-1]: |
|
211 | for p in parts[:-1]: | |
210 | d = p[:_dirprefixlen] |
|
212 | d = p[:_dirprefixlen] | |
211 | if d[-1] in '. ': |
|
213 | if d[-1] in '. ': | |
212 | # Windows can't access dirs ending in period or space |
|
214 | # Windows can't access dirs ending in period or space | |
213 | d = d[:-1] + '_' |
|
215 | d = d[:-1] + '_' | |
214 | if sdirslen == 0: |
|
216 | if sdirslen == 0: | |
215 | t = len(d) |
|
217 | t = len(d) | |
216 | else: |
|
218 | else: | |
217 | t = sdirslen + 1 + len(d) |
|
219 | t = sdirslen + 1 + len(d) | |
218 | if t > _maxshortdirslen: |
|
220 | if t > _maxshortdirslen: | |
219 | break |
|
221 | break | |
220 | sdirs.append(d) |
|
222 | sdirs.append(d) | |
221 | sdirslen = t |
|
223 | sdirslen = t | |
222 | dirs = '/'.join(sdirs) |
|
224 | dirs = '/'.join(sdirs) | |
223 | if len(dirs) > 0: |
|
225 | if len(dirs) > 0: | |
224 | dirs += '/' |
|
226 | dirs += '/' | |
225 | res = 'dh/' + dirs + digest + ext |
|
227 | res = 'dh/' + dirs + digest + ext | |
226 | spaceleft = _maxstorepathlen - len(res) |
|
228 | spaceleft = _maxstorepathlen - len(res) | |
227 | if spaceleft > 0: |
|
229 | if spaceleft > 0: | |
228 | filler = basename[:spaceleft] |
|
230 | filler = basename[:spaceleft] | |
229 | res = 'dh/' + dirs + filler + digest + ext |
|
231 | res = 'dh/' + dirs + filler + digest + ext | |
230 | return res |
|
232 | return res | |
231 |
|
233 | |||
232 | def _calcmode(path): |
|
234 | def _calcmode(path): | |
233 | try: |
|
235 | try: | |
234 | # files in .hg/ will be created using this mode |
|
236 | # files in .hg/ will be created using this mode | |
235 | mode = os.stat(path).st_mode |
|
237 | mode = os.stat(path).st_mode | |
236 | # avoid some useless chmods |
|
238 | # avoid some useless chmods | |
237 | if (0777 & ~util.umask) == (0777 & mode): |
|
239 | if (0777 & ~util.umask) == (0777 & mode): | |
238 | mode = None |
|
240 | mode = None | |
239 | except OSError: |
|
241 | except OSError: | |
240 | mode = None |
|
242 | mode = None | |
241 | return mode |
|
243 | return mode | |
242 |
|
244 | |||
243 | _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i' |
|
245 | _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i' | |
244 | ' phaseroots obsstore') |
|
246 | ' phaseroots obsstore') | |
245 |
|
247 | |||
246 | class basicstore(object): |
|
248 | class basicstore(object): | |
247 | '''base class for local repository stores''' |
|
249 | '''base class for local repository stores''' | |
248 | def __init__(self, path, openertype): |
|
250 | def __init__(self, path, openertype): | |
249 | self.path = path |
|
251 | self.path = path | |
250 | self.createmode = _calcmode(path) |
|
252 | self.createmode = _calcmode(path) | |
251 | op = openertype(self.path) |
|
253 | op = openertype(self.path) | |
252 | op.createmode = self.createmode |
|
254 | op.createmode = self.createmode | |
253 | self.opener = scmutil.filteropener(op, encodedir) |
|
255 | self.opener = scmutil.filteropener(op, encodedir) | |
254 |
|
256 | |||
255 | def join(self, f): |
|
257 | def join(self, f): | |
256 | return self.path + '/' + encodedir(f) |
|
258 | return self.path + '/' + encodedir(f) | |
257 |
|
259 | |||
258 | def _walk(self, relpath, recurse): |
|
260 | def _walk(self, relpath, recurse): | |
259 | '''yields (unencoded, encoded, size)''' |
|
261 | '''yields (unencoded, encoded, size)''' | |
260 | path = self.path |
|
262 | path = self.path | |
261 | if relpath: |
|
263 | if relpath: | |
262 | path += '/' + relpath |
|
264 | path += '/' + relpath | |
263 | striplen = len(self.path) + 1 |
|
265 | striplen = len(self.path) + 1 | |
264 | l = [] |
|
266 | l = [] | |
265 | if os.path.isdir(path): |
|
267 | if os.path.isdir(path): | |
266 | visit = [path] |
|
268 | visit = [path] | |
267 | while visit: |
|
269 | while visit: | |
268 | p = visit.pop() |
|
270 | p = visit.pop() | |
269 | for f, kind, st in osutil.listdir(p, stat=True): |
|
271 | for f, kind, st in osutil.listdir(p, stat=True): | |
270 | fp = p + '/' + f |
|
272 | fp = p + '/' + f | |
271 | if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): |
|
273 | if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): | |
272 | n = util.pconvert(fp[striplen:]) |
|
274 | n = util.pconvert(fp[striplen:]) | |
273 | l.append((decodedir(n), n, st.st_size)) |
|
275 | l.append((decodedir(n), n, st.st_size)) | |
274 | elif kind == stat.S_IFDIR and recurse: |
|
276 | elif kind == stat.S_IFDIR and recurse: | |
275 | visit.append(fp) |
|
277 | visit.append(fp) | |
276 | l.sort() |
|
278 | l.sort() | |
277 | return l |
|
279 | return l | |
278 |
|
280 | |||
279 | def datafiles(self): |
|
281 | def datafiles(self): | |
280 | return self._walk('data', True) |
|
282 | return self._walk('data', True) | |
281 |
|
283 | |||
282 | def walk(self): |
|
284 | def walk(self): | |
283 | '''yields (unencoded, encoded, size)''' |
|
285 | '''yields (unencoded, encoded, size)''' | |
284 | # yield data files first |
|
286 | # yield data files first | |
285 | for x in self.datafiles(): |
|
287 | for x in self.datafiles(): | |
286 | yield x |
|
288 | yield x | |
287 | # yield manifest before changelog |
|
289 | # yield manifest before changelog | |
288 | for x in reversed(self._walk('', False)): |
|
290 | for x in reversed(self._walk('', False)): | |
289 | yield x |
|
291 | yield x | |
290 |
|
292 | |||
291 | def copylist(self): |
|
293 | def copylist(self): | |
292 | return ['requires'] + _data.split() |
|
294 | return ['requires'] + _data.split() | |
293 |
|
295 | |||
294 | def write(self): |
|
296 | def write(self): | |
295 | pass |
|
297 | pass | |
296 |
|
298 | |||
297 | class encodedstore(basicstore): |
|
299 | class encodedstore(basicstore): | |
298 | def __init__(self, path, openertype): |
|
300 | def __init__(self, path, openertype): | |
299 | self.path = path + '/store' |
|
301 | self.path = path + '/store' | |
300 | self.createmode = _calcmode(self.path) |
|
302 | self.createmode = _calcmode(self.path) | |
301 | op = openertype(self.path) |
|
303 | op = openertype(self.path) | |
302 | op.createmode = self.createmode |
|
304 | op.createmode = self.createmode | |
303 | self.opener = scmutil.filteropener(op, encodefilename) |
|
305 | self.opener = scmutil.filteropener(op, encodefilename) | |
304 |
|
306 | |||
305 | def datafiles(self): |
|
307 | def datafiles(self): | |
306 | for a, b, size in self._walk('data', True): |
|
308 | for a, b, size in self._walk('data', True): | |
307 | try: |
|
309 | try: | |
308 | a = decodefilename(a) |
|
310 | a = decodefilename(a) | |
309 | except KeyError: |
|
311 | except KeyError: | |
310 | a = None |
|
312 | a = None | |
311 | yield a, b, size |
|
313 | yield a, b, size | |
312 |
|
314 | |||
313 | def join(self, f): |
|
315 | def join(self, f): | |
314 | return self.path + '/' + encodefilename(f) |
|
316 | return self.path + '/' + encodefilename(f) | |
315 |
|
317 | |||
316 | def copylist(self): |
|
318 | def copylist(self): | |
317 | return (['requires', '00changelog.i'] + |
|
319 | return (['requires', '00changelog.i'] + | |
318 | ['store/' + f for f in _data.split()]) |
|
320 | ['store/' + f for f in _data.split()]) | |
319 |
|
321 | |||
320 | class fncache(object): |
|
322 | class fncache(object): | |
321 | # the filename used to be partially encoded |
|
323 | # the filename used to be partially encoded | |
322 | # hence the encodedir/decodedir dance |
|
324 | # hence the encodedir/decodedir dance | |
323 | def __init__(self, opener): |
|
325 | def __init__(self, opener): | |
324 | self.opener = opener |
|
326 | self.opener = opener | |
325 | self.entries = None |
|
327 | self.entries = None | |
326 | self._dirty = False |
|
328 | self._dirty = False | |
327 |
|
329 | |||
328 | def _load(self): |
|
330 | def _load(self): | |
329 | '''fill the entries from the fncache file''' |
|
331 | '''fill the entries from the fncache file''' | |
330 | self._dirty = False |
|
332 | self._dirty = False | |
331 | try: |
|
333 | try: | |
332 | fp = self.opener('fncache', mode='rb') |
|
334 | fp = self.opener('fncache', mode='rb') | |
333 | except IOError: |
|
335 | except IOError: | |
334 | # skip nonexistent file |
|
336 | # skip nonexistent file | |
335 | self.entries = set() |
|
337 | self.entries = set() | |
336 | return |
|
338 | return | |
337 | self.entries = set(map(decodedir, fp.read().splitlines())) |
|
339 | self.entries = set(map(decodedir, fp.read().splitlines())) | |
338 | if '' in self.entries: |
|
340 | if '' in self.entries: | |
339 | fp.seek(0) |
|
341 | fp.seek(0) | |
340 | for n, line in enumerate(fp): |
|
342 | for n, line in enumerate(fp): | |
341 | if not line.rstrip('\n'): |
|
343 | if not line.rstrip('\n'): | |
342 | t = _('invalid entry in fncache, line %s') % (n + 1) |
|
344 | t = _('invalid entry in fncache, line %s') % (n + 1) | |
343 | raise util.Abort(t) |
|
345 | raise util.Abort(t) | |
344 | fp.close() |
|
346 | fp.close() | |
345 |
|
347 | |||
346 | def _write(self, files, atomictemp): |
|
348 | def _write(self, files, atomictemp): | |
347 | fp = self.opener('fncache', mode='wb', atomictemp=atomictemp) |
|
349 | fp = self.opener('fncache', mode='wb', atomictemp=atomictemp) | |
348 | if files: |
|
350 | if files: | |
349 | fp.write('\n'.join(map(encodedir, files)) + '\n') |
|
351 | fp.write('\n'.join(map(encodedir, files)) + '\n') | |
350 | fp.close() |
|
352 | fp.close() | |
351 | self._dirty = False |
|
353 | self._dirty = False | |
352 |
|
354 | |||
353 | def rewrite(self, files): |
|
355 | def rewrite(self, files): | |
354 | self._write(files, False) |
|
356 | self._write(files, False) | |
355 | self.entries = set(files) |
|
357 | self.entries = set(files) | |
356 |
|
358 | |||
357 | def write(self): |
|
359 | def write(self): | |
358 | if self._dirty: |
|
360 | if self._dirty: | |
359 | self._write(self.entries, True) |
|
361 | self._write(self.entries, True) | |
360 |
|
362 | |||
361 | def add(self, fn): |
|
363 | def add(self, fn): | |
362 | if self.entries is None: |
|
364 | if self.entries is None: | |
363 | self._load() |
|
365 | self._load() | |
364 | if fn not in self.entries: |
|
366 | if fn not in self.entries: | |
365 | self._dirty = True |
|
367 | self._dirty = True | |
366 | self.entries.add(fn) |
|
368 | self.entries.add(fn) | |
367 |
|
369 | |||
368 | def __contains__(self, fn): |
|
370 | def __contains__(self, fn): | |
369 | if self.entries is None: |
|
371 | if self.entries is None: | |
370 | self._load() |
|
372 | self._load() | |
371 | return fn in self.entries |
|
373 | return fn in self.entries | |
372 |
|
374 | |||
373 | def __iter__(self): |
|
375 | def __iter__(self): | |
374 | if self.entries is None: |
|
376 | if self.entries is None: | |
375 | self._load() |
|
377 | self._load() | |
376 | return iter(self.entries) |
|
378 | return iter(self.entries) | |
377 |
|
379 | |||
378 | class _fncacheopener(scmutil.abstractopener): |
|
380 | class _fncacheopener(scmutil.abstractopener): | |
379 | def __init__(self, op, fnc, encode): |
|
381 | def __init__(self, op, fnc, encode): | |
380 | self.opener = op |
|
382 | self.opener = op | |
381 | self.fncache = fnc |
|
383 | self.fncache = fnc | |
382 | self.encode = encode |
|
384 | self.encode = encode | |
383 |
|
385 | |||
384 | def _getmustaudit(self): |
|
386 | def _getmustaudit(self): | |
385 | return self.opener.mustaudit |
|
387 | return self.opener.mustaudit | |
386 |
|
388 | |||
387 | def _setmustaudit(self, onoff): |
|
389 | def _setmustaudit(self, onoff): | |
388 | self.opener.mustaudit = onoff |
|
390 | self.opener.mustaudit = onoff | |
389 |
|
391 | |||
390 | mustaudit = property(_getmustaudit, _setmustaudit) |
|
392 | mustaudit = property(_getmustaudit, _setmustaudit) | |
391 |
|
393 | |||
392 | def __call__(self, path, mode='r', *args, **kw): |
|
394 | def __call__(self, path, mode='r', *args, **kw): | |
393 | if mode not in ('r', 'rb') and path.startswith('data/'): |
|
395 | if mode not in ('r', 'rb') and path.startswith('data/'): | |
394 | self.fncache.add(path) |
|
396 | self.fncache.add(path) | |
395 | return self.opener(self.encode(path), mode, *args, **kw) |
|
397 | return self.opener(self.encode(path), mode, *args, **kw) | |
396 |
|
398 | |||
397 | class fncachestore(basicstore): |
|
399 | class fncachestore(basicstore): | |
398 | def __init__(self, path, openertype, encode): |
|
400 | def __init__(self, path, openertype, encode): | |
399 | self.encode = encode |
|
401 | self.encode = encode | |
400 | self.path = path + '/store' |
|
402 | self.path = path + '/store' | |
401 | self.pathsep = self.path + '/' |
|
403 | self.pathsep = self.path + '/' | |
402 | self.createmode = _calcmode(self.path) |
|
404 | self.createmode = _calcmode(self.path) | |
403 | op = openertype(self.path) |
|
405 | op = openertype(self.path) | |
404 | op.createmode = self.createmode |
|
406 | op.createmode = self.createmode | |
405 | fnc = fncache(op) |
|
407 | fnc = fncache(op) | |
406 | self.fncache = fnc |
|
408 | self.fncache = fnc | |
407 | self.opener = _fncacheopener(op, fnc, encode) |
|
409 | self.opener = _fncacheopener(op, fnc, encode) | |
408 |
|
410 | |||
409 | def join(self, f): |
|
411 | def join(self, f): | |
410 | return self.pathsep + self.encode(f) |
|
412 | return self.pathsep + self.encode(f) | |
411 |
|
413 | |||
412 | def getsize(self, path): |
|
414 | def getsize(self, path): | |
413 | return os.stat(self.pathsep + path).st_size |
|
415 | return os.stat(self.pathsep + path).st_size | |
414 |
|
416 | |||
415 | def datafiles(self): |
|
417 | def datafiles(self): | |
416 | rewrite = False |
|
418 | rewrite = False | |
417 | existing = [] |
|
419 | existing = [] | |
418 | for f in sorted(self.fncache): |
|
420 | for f in sorted(self.fncache): | |
419 | ef = self.encode(f) |
|
421 | ef = self.encode(f) | |
420 | try: |
|
422 | try: | |
421 | yield f, ef, self.getsize(ef) |
|
423 | yield f, ef, self.getsize(ef) | |
422 | existing.append(f) |
|
424 | existing.append(f) | |
423 | except OSError, err: |
|
425 | except OSError, err: | |
424 | if err.errno != errno.ENOENT: |
|
426 | if err.errno != errno.ENOENT: | |
425 | raise |
|
427 | raise | |
426 | # nonexistent entry |
|
428 | # nonexistent entry | |
427 | rewrite = True |
|
429 | rewrite = True | |
428 | if rewrite: |
|
430 | if rewrite: | |
429 | # rewrite fncache to remove nonexistent entries |
|
431 | # rewrite fncache to remove nonexistent entries | |
430 | # (may be caused by rollback / strip) |
|
432 | # (may be caused by rollback / strip) | |
431 | self.fncache.rewrite(existing) |
|
433 | self.fncache.rewrite(existing) | |
432 |
|
434 | |||
433 | def copylist(self): |
|
435 | def copylist(self): | |
434 | d = ('data dh fncache phaseroots obsstore' |
|
436 | d = ('data dh fncache phaseroots obsstore' | |
435 | ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') |
|
437 | ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') | |
436 | return (['requires', '00changelog.i'] + |
|
438 | return (['requires', '00changelog.i'] + | |
437 | ['store/' + f for f in d.split()]) |
|
439 | ['store/' + f for f in d.split()]) | |
438 |
|
440 | |||
439 | def write(self): |
|
441 | def write(self): | |
440 | self.fncache.write() |
|
442 | self.fncache.write() | |
441 |
|
443 | |||
442 | def store(requirements, path, openertype): |
|
444 | def store(requirements, path, openertype): | |
443 | if 'store' in requirements: |
|
445 | if 'store' in requirements: | |
444 | if 'fncache' in requirements: |
|
446 | if 'fncache' in requirements: | |
445 |
|
|
447 | de = 'dotencode' in requirements | |
446 |
encode = lambda f: _hybridencode(f, |
|
448 | encode = lambda f: _hybridencode(f, de) | |
447 | return fncachestore(path, openertype, encode) |
|
449 | return fncachestore(path, openertype, encode) | |
448 | return encodedstore(path, openertype) |
|
450 | return encodedstore(path, openertype) | |
449 | return basicstore(path, openertype) |
|
451 | return basicstore(path, openertype) |
@@ -1,454 +1,453 b'' | |||||
1 | from mercurial import store |
|
1 | from mercurial import store | |
2 |
|
2 | |||
3 |
|
|
3 | hybridencode = lambda f: store._hybridencode(f, True) | |
4 | hybridencode = lambda f: store._hybridencode(f, auxencode) |
|
|||
5 |
|
4 | |||
6 | enc = hybridencode # used for 'dotencode' repo format |
|
5 | enc = hybridencode # used for 'dotencode' repo format | |
7 |
|
6 | |||
8 | def show(s): |
|
7 | def show(s): | |
9 | print "A = '%s'" % s.encode("string_escape") |
|
8 | print "A = '%s'" % s.encode("string_escape") | |
10 | print "B = '%s'" % enc(s).encode("string_escape") |
|
9 | print "B = '%s'" % enc(s).encode("string_escape") | |
11 |
|
10 | |||
12 |
|
11 | |||
13 | show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}") |
|
12 | show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}") | |
14 |
|
13 | |||
15 | print "uppercase char X is encoded as _x" |
|
14 | print "uppercase char X is encoded as _x" | |
16 | show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ") |
|
15 | show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ") | |
17 |
|
16 | |||
18 | print "underbar is doubled" |
|
17 | print "underbar is doubled" | |
19 | show("data/_") |
|
18 | show("data/_") | |
20 |
|
19 | |||
21 | print "tilde is character-encoded" |
|
20 | print "tilde is character-encoded" | |
22 | show("data/~") |
|
21 | show("data/~") | |
23 |
|
22 | |||
24 | print "characters in ASCII code range 1..31" |
|
23 | print "characters in ASCII code range 1..31" | |
25 | show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' |
|
24 | show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' | |
26 | '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f') |
|
25 | '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f') | |
27 |
|
26 | |||
28 | print "characters in ASCII code range 126..255" |
|
27 | print "characters in ASCII code range 126..255" | |
29 | show('data/\x7e\x7f' |
|
28 | show('data/\x7e\x7f' | |
30 | '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' |
|
29 | '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' | |
31 | '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f') |
|
30 | '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f') | |
32 | show('data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf' |
|
31 | show('data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf' | |
33 | '\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf') |
|
32 | '\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf') | |
34 | show('data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf' |
|
33 | show('data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf' | |
35 | '\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf') |
|
34 | '\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf') | |
36 | show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef' |
|
35 | show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef' | |
37 | '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff') |
|
36 | '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff') | |
38 |
|
37 | |||
39 | print "Windows reserved characters" |
|
38 | print "Windows reserved characters" | |
40 | show('data/less <, greater >, colon :, double-quote ", backslash \\' |
|
39 | show('data/less <, greater >, colon :, double-quote ", backslash \\' | |
41 | ', pipe |, question-mark ?, asterisk *') |
|
40 | ', pipe |, question-mark ?, asterisk *') | |
42 |
|
41 | |||
43 | print "encoding directories ending in .hg, .i or .d with '.hg' suffix" |
|
42 | print "encoding directories ending in .hg, .i or .d with '.hg' suffix" | |
44 | show('data/x.hg/x.i/x.d/foo') |
|
43 | show('data/x.hg/x.i/x.d/foo') | |
45 | show('data/a.hg/a.i/a.d/foo') |
|
44 | show('data/a.hg/a.i/a.d/foo') | |
46 | show('data/au.hg/au.i/au.d/foo') |
|
45 | show('data/au.hg/au.i/au.d/foo') | |
47 | show('data/aux.hg/aux.i/aux.d/foo') |
|
46 | show('data/aux.hg/aux.i/aux.d/foo') | |
48 | show('data/auxy.hg/auxy.i/auxy.d/foo') |
|
47 | show('data/auxy.hg/auxy.i/auxy.d/foo') | |
49 |
|
48 | |||
50 | print "but these are not encoded on *filenames*" |
|
49 | print "but these are not encoded on *filenames*" | |
51 | show('data/foo/x.hg') |
|
50 | show('data/foo/x.hg') | |
52 | show('data/foo/x.i') |
|
51 | show('data/foo/x.i') | |
53 | show('data/foo/x.d') |
|
52 | show('data/foo/x.d') | |
54 | show('data/foo/a.hg') |
|
53 | show('data/foo/a.hg') | |
55 | show('data/foo/a.i') |
|
54 | show('data/foo/a.i') | |
56 | show('data/foo/a.d') |
|
55 | show('data/foo/a.d') | |
57 | show('data/foo/au.hg') |
|
56 | show('data/foo/au.hg') | |
58 | show('data/foo/au.i') |
|
57 | show('data/foo/au.i') | |
59 | show('data/foo/au.d') |
|
58 | show('data/foo/au.d') | |
60 | show('data/foo/aux.hg') |
|
59 | show('data/foo/aux.hg') | |
61 | show('data/foo/aux.i') |
|
60 | show('data/foo/aux.i') | |
62 | show('data/foo/aux.d') |
|
61 | show('data/foo/aux.d') | |
63 | show('data/foo/auxy.hg') |
|
62 | show('data/foo/auxy.hg') | |
64 | show('data/foo/auxy.i') |
|
63 | show('data/foo/auxy.i') | |
65 | show('data/foo/auxy.d') |
|
64 | show('data/foo/auxy.d') | |
66 |
|
65 | |||
67 | print "plain .hg, .i and .d directories have the leading dot encoded" |
|
66 | print "plain .hg, .i and .d directories have the leading dot encoded" | |
68 | show('data/.hg/.i/.d/foo') |
|
67 | show('data/.hg/.i/.d/foo') | |
69 |
|
68 | |||
70 | show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i') |
|
69 | show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i') | |
71 |
|
70 | |||
72 | show('data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/' |
|
71 | show('data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/' | |
73 | 'TENTH/ELEVENTH/LOREMIPSUM.TXT.i') |
|
72 | 'TENTH/ELEVENTH/LOREMIPSUM.TXT.i') | |
74 | show('data/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/' |
|
73 | show('data/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/' | |
75 | 'wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules' |
|
74 | 'wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules' | |
76 | '.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider.i') |
|
75 | '.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider.i') | |
77 | show('data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-' |
|
76 | show('data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-' | |
78 | 'BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i') |
|
77 | 'BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i') | |
79 | show('data/Project Planning/Resources/AnotherLongDirectoryName/' |
|
78 | show('data/Project Planning/Resources/AnotherLongDirectoryName/' | |
80 | 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') |
|
79 | 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') | |
81 | show('data/Project.Planning/Resources/AnotherLongDirectoryName/' |
|
80 | show('data/Project.Planning/Resources/AnotherLongDirectoryName/' | |
82 | 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') |
|
81 | 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') | |
83 | show('data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i') |
|
82 | show('data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i') | |
84 |
|
83 | |||
85 | show('data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9') |
|
84 | show('data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9') | |
86 | show('data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9') |
|
85 | show('data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9') | |
87 | show('data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x' |
|
86 | show('data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x' | |
88 | '/com6.x/com7.x/com8.x/com9.x') |
|
87 | '/com6.x/com7.x/com8.x/com9.x') | |
89 | show('data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5' |
|
88 | show('data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5' | |
90 | '/x.com6/x.com7/x.com8/x.com9') |
|
89 | '/x.com6/x.com7/x.com8/x.com9') | |
91 | show('data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x' |
|
90 | show('data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x' | |
92 | '/com6x/com7x/com8x/com9x') |
|
91 | '/com6x/com7x/com8x/com9x') | |
93 | show('data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5' |
|
92 | show('data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5' | |
94 | '/xcom6/xcom7/xcom8/xcom9') |
|
93 | '/xcom6/xcom7/xcom8/xcom9') | |
95 |
|
94 | |||
96 | show('data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9') |
|
95 | show('data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9') | |
97 | show('data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9') |
|
96 | show('data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9') | |
98 | show('data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x' |
|
97 | show('data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x' | |
99 | '/lpt6.x/lpt7.x/lpt8.x/lpt9.x') |
|
98 | '/lpt6.x/lpt7.x/lpt8.x/lpt9.x') | |
100 | show('data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5' |
|
99 | show('data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5' | |
101 | '/x.lpt6/x.lpt7/x.lpt8/x.lpt9') |
|
100 | '/x.lpt6/x.lpt7/x.lpt8/x.lpt9') | |
102 | show('data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x' |
|
101 | show('data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x' | |
103 | '/lpt6x/lpt7x/lpt8x/lpt9x') |
|
102 | '/lpt6x/lpt7x/lpt8x/lpt9x') | |
104 | show('data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5' |
|
103 | show('data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5' | |
105 | '/xlpt6/xlpt7/xlpt8/xlpt9') |
|
104 | '/xlpt6/xlpt7/xlpt8/xlpt9') | |
106 |
|
105 | |||
107 | show('data/con/p/pr/prn/a/au/aux/n/nu/nul') |
|
106 | show('data/con/p/pr/prn/a/au/aux/n/nu/nul') | |
108 | show('data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL') |
|
107 | show('data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL') | |
109 | show('data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x') |
|
108 | show('data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x') | |
110 | show('data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul') |
|
109 | show('data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul') | |
111 | show('data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx') |
|
110 | show('data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx') | |
112 | show('data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul') |
|
111 | show('data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul') | |
113 |
|
112 | |||
114 | show('data/a./au./aux./auxy./aux.') |
|
113 | show('data/a./au./aux./auxy./aux.') | |
115 | show('data/c./co./con./cony./con.') |
|
114 | show('data/c./co./con./cony./con.') | |
116 | show('data/p./pr./prn./prny./prn.') |
|
115 | show('data/p./pr./prn./prny./prn.') | |
117 | show('data/n./nu./nul./nuly./nul.') |
|
116 | show('data/n./nu./nul./nuly./nul.') | |
118 | show('data/l./lp./lpt./lpt1./lpt1y./lpt1.') |
|
117 | show('data/l./lp./lpt./lpt1./lpt1y./lpt1.') | |
119 | show('data/lpt9./lpt9y./lpt9.') |
|
118 | show('data/lpt9./lpt9y./lpt9.') | |
120 | show('data/com./com1./com1y./com1.') |
|
119 | show('data/com./com1./com1y./com1.') | |
121 | show('data/com9./com9y./com9.') |
|
120 | show('data/com9./com9y./com9.') | |
122 |
|
121 | |||
123 | show('data/a /au /aux /auxy /aux ') |
|
122 | show('data/a /au /aux /auxy /aux ') | |
124 |
|
123 | |||
125 | print "largest unhashed path" |
|
124 | print "largest unhashed path" | |
126 | show('data/123456789-123456789-123456789-123456789-123456789-' |
|
125 | show('data/123456789-123456789-123456789-123456789-123456789-' | |
127 | 'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
126 | 'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
128 | '123456789-12345') |
|
127 | '123456789-12345') | |
129 |
|
128 | |||
130 | print "shortest hashed path" |
|
129 | print "shortest hashed path" | |
131 | show('data/123456789-123456789-123456789-123456789-123456789-' |
|
130 | show('data/123456789-123456789-123456789-123456789-123456789-' | |
132 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
131 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
133 | '123456789-123456') |
|
132 | '123456789-123456') | |
134 |
|
133 | |||
135 | print "changing one char in part that's hashed away produces a different hash" |
|
134 | print "changing one char in part that's hashed away produces a different hash" | |
136 | show('data/123456789-123456789-123456789-123456789-123456789-' |
|
135 | show('data/123456789-123456789-123456789-123456789-123456789-' | |
137 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-' |
|
136 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-' | |
138 | '123456789-123456') |
|
137 | '123456789-123456') | |
139 |
|
138 | |||
140 | print "uppercase hitting length limit due to encoding" |
|
139 | print "uppercase hitting length limit due to encoding" | |
141 | show('data/A23456789-123456789-123456789-123456789-123456789-' |
|
140 | show('data/A23456789-123456789-123456789-123456789-123456789-' | |
142 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
141 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
143 | '123456789-12345') |
|
142 | '123456789-12345') | |
144 | show('data/Z23456789-123456789-123456789-123456789-123456789-' |
|
143 | show('data/Z23456789-123456789-123456789-123456789-123456789-' | |
145 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
144 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
146 | '123456789-12345') |
|
145 | '123456789-12345') | |
147 |
|
146 | |||
148 | print "compare with lowercase not hitting limit" |
|
147 | print "compare with lowercase not hitting limit" | |
149 | show('data/a23456789-123456789-123456789-123456789-123456789-' |
|
148 | show('data/a23456789-123456789-123456789-123456789-123456789-' | |
150 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
149 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
151 | '123456789-12345') |
|
150 | '123456789-12345') | |
152 | show('data/z23456789-123456789-123456789-123456789-123456789-' |
|
151 | show('data/z23456789-123456789-123456789-123456789-123456789-' | |
153 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
152 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
154 | '123456789-12345') |
|
153 | '123456789-12345') | |
155 |
|
154 | |||
156 | print "not hitting limit with any of these" |
|
155 | print "not hitting limit with any of these" | |
157 | show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=" |
|
156 | show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=" | |
158 | "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-" |
|
157 | "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-" | |
159 | "123456789-12345") |
|
158 | "123456789-12345") | |
160 |
|
159 | |||
161 | print "underbar hitting length limit due to encoding" |
|
160 | print "underbar hitting length limit due to encoding" | |
162 | show('data/_23456789-123456789-123456789-123456789-123456789-' |
|
161 | show('data/_23456789-123456789-123456789-123456789-123456789-' | |
163 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
162 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
164 | '123456789-12345') |
|
163 | '123456789-12345') | |
165 |
|
164 | |||
166 | print "tilde hitting length limit due to encoding" |
|
165 | print "tilde hitting length limit due to encoding" | |
167 | show('data/~23456789-123456789-123456789-123456789-123456789-' |
|
166 | show('data/~23456789-123456789-123456789-123456789-123456789-' | |
168 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
167 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
169 | '123456789-12345') |
|
168 | '123456789-12345') | |
170 |
|
169 | |||
171 | print "Windows reserved characters hitting length limit" |
|
170 | print "Windows reserved characters hitting length limit" | |
172 | show('data/<23456789-123456789-123456789-123456789-123456789-' |
|
171 | show('data/<23456789-123456789-123456789-123456789-123456789-' | |
173 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
172 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
174 | '123456789-12345') |
|
173 | '123456789-12345') | |
175 | show('data/>23456789-123456789-123456789-123456789-123456789-' |
|
174 | show('data/>23456789-123456789-123456789-123456789-123456789-' | |
176 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
175 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
177 | '123456789-12345') |
|
176 | '123456789-12345') | |
178 | show('data/:23456789-123456789-123456789-123456789-123456789-' |
|
177 | show('data/:23456789-123456789-123456789-123456789-123456789-' | |
179 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
178 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
180 | '123456789-12345') |
|
179 | '123456789-12345') | |
181 | show('data/"23456789-123456789-123456789-123456789-123456789-' |
|
180 | show('data/"23456789-123456789-123456789-123456789-123456789-' | |
182 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
181 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
183 | '123456789-12345') |
|
182 | '123456789-12345') | |
184 | show('data/\\23456789-123456789-123456789-123456789-123456789-' |
|
183 | show('data/\\23456789-123456789-123456789-123456789-123456789-' | |
185 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
184 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
186 | '123456789-12345') |
|
185 | '123456789-12345') | |
187 | show('data/|23456789-123456789-123456789-123456789-123456789-' |
|
186 | show('data/|23456789-123456789-123456789-123456789-123456789-' | |
188 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
187 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
189 | '123456789-12345') |
|
188 | '123456789-12345') | |
190 | show('data/?23456789-123456789-123456789-123456789-123456789-' |
|
189 | show('data/?23456789-123456789-123456789-123456789-123456789-' | |
191 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
190 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
192 | '123456789-12345') |
|
191 | '123456789-12345') | |
193 | show('data/*23456789-123456789-123456789-123456789-123456789-' |
|
192 | show('data/*23456789-123456789-123456789-123456789-123456789-' | |
194 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
193 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
195 | '123456789-12345') |
|
194 | '123456789-12345') | |
196 |
|
195 | |||
197 | print "initial space hitting length limit" |
|
196 | print "initial space hitting length limit" | |
198 | show('data/ 23456789-123456789-123456789-123456789-123456789-' |
|
197 | show('data/ 23456789-123456789-123456789-123456789-123456789-' | |
199 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
198 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
200 | '123456789-12345') |
|
199 | '123456789-12345') | |
201 |
|
200 | |||
202 | print "initial dot hitting length limit" |
|
201 | print "initial dot hitting length limit" | |
203 | show('data/.23456789-123456789-123456789-123456789-123456789-' |
|
202 | show('data/.23456789-123456789-123456789-123456789-123456789-' | |
204 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
203 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
205 | '123456789-12345') |
|
204 | '123456789-12345') | |
206 |
|
205 | |||
207 | print "trailing space in filename hitting length limit" |
|
206 | print "trailing space in filename hitting length limit" | |
208 | show('data/123456789-123456789-123456789-123456789-123456789-' |
|
207 | show('data/123456789-123456789-123456789-123456789-123456789-' | |
209 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
208 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
210 | '123456789-1234 ') |
|
209 | '123456789-1234 ') | |
211 |
|
210 | |||
212 | print "trailing dot in filename hitting length limit" |
|
211 | print "trailing dot in filename hitting length limit" | |
213 | show('data/123456789-123456789-123456789-123456789-123456789-' |
|
212 | show('data/123456789-123456789-123456789-123456789-123456789-' | |
214 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
213 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
215 | '123456789-1234.') |
|
214 | '123456789-1234.') | |
216 |
|
215 | |||
217 | print "initial space in directory hitting length limit" |
|
216 | print "initial space in directory hitting length limit" | |
218 | show('data/ x/456789-123456789-123456789-123456789-123456789-' |
|
217 | show('data/ x/456789-123456789-123456789-123456789-123456789-' | |
219 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
218 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
220 | '123456789-12345') |
|
219 | '123456789-12345') | |
221 |
|
220 | |||
222 | print "initial dot in directory hitting length limit" |
|
221 | print "initial dot in directory hitting length limit" | |
223 | show('data/.x/456789-123456789-123456789-123456789-123456789-' |
|
222 | show('data/.x/456789-123456789-123456789-123456789-123456789-' | |
224 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
223 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
225 | '123456789-12345') |
|
224 | '123456789-12345') | |
226 |
|
225 | |||
227 | print "trailing space in directory hitting length limit" |
|
226 | print "trailing space in directory hitting length limit" | |
228 | show('data/x /456789-123456789-123456789-123456789-123456789-' |
|
227 | show('data/x /456789-123456789-123456789-123456789-123456789-' | |
229 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
228 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
230 | '123456789-12345') |
|
229 | '123456789-12345') | |
231 |
|
230 | |||
232 | print "trailing dot in directory hitting length limit" |
|
231 | print "trailing dot in directory hitting length limit" | |
233 | show('data/x./456789-123456789-123456789-123456789-123456789-' |
|
232 | show('data/x./456789-123456789-123456789-123456789-123456789-' | |
234 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
233 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
235 | '123456789-12345') |
|
234 | '123456789-12345') | |
236 |
|
235 | |||
237 | print "with directories that need direncoding, hitting length limit" |
|
236 | print "with directories that need direncoding, hitting length limit" | |
238 | show('data/x.i/56789-123456789-123456789-123456789-123456789-' |
|
237 | show('data/x.i/56789-123456789-123456789-123456789-123456789-' | |
239 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
238 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
240 | '123456789-12345') |
|
239 | '123456789-12345') | |
241 | show('data/x.d/56789-123456789-123456789-123456789-123456789-' |
|
240 | show('data/x.d/56789-123456789-123456789-123456789-123456789-' | |
242 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
241 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
243 | '123456789-12345') |
|
242 | '123456789-12345') | |
244 | show('data/x.hg/5789-123456789-123456789-123456789-123456789-' |
|
243 | show('data/x.hg/5789-123456789-123456789-123456789-123456789-' | |
245 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
244 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
246 | '123456789-12345') |
|
245 | '123456789-12345') | |
247 |
|
246 | |||
248 | print "Windows reserved filenames, hitting length limit" |
|
247 | print "Windows reserved filenames, hitting length limit" | |
249 | show('data/con/56789-123456789-123456789-123456789-123456789-' |
|
248 | show('data/con/56789-123456789-123456789-123456789-123456789-' | |
250 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
249 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
251 | '123456789-12345') |
|
250 | '123456789-12345') | |
252 | show('data/prn/56789-123456789-123456789-123456789-123456789-' |
|
251 | show('data/prn/56789-123456789-123456789-123456789-123456789-' | |
253 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
252 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
254 | '123456789-12345') |
|
253 | '123456789-12345') | |
255 | show('data/aux/56789-123456789-123456789-123456789-123456789-' |
|
254 | show('data/aux/56789-123456789-123456789-123456789-123456789-' | |
256 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
255 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
257 | '123456789-12345') |
|
256 | '123456789-12345') | |
258 | show('data/nul/56789-123456789-123456789-123456789-123456789-' |
|
257 | show('data/nul/56789-123456789-123456789-123456789-123456789-' | |
259 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
258 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
260 | '123456789-12345') |
|
259 | '123456789-12345') | |
261 | show('data/com1/6789-123456789-123456789-123456789-123456789-' |
|
260 | show('data/com1/6789-123456789-123456789-123456789-123456789-' | |
262 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
261 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
263 | '123456789-12345') |
|
262 | '123456789-12345') | |
264 | show('data/com9/6789-123456789-123456789-123456789-123456789-' |
|
263 | show('data/com9/6789-123456789-123456789-123456789-123456789-' | |
265 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
264 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
266 | '123456789-12345') |
|
265 | '123456789-12345') | |
267 | show('data/lpt1/6789-123456789-123456789-123456789-123456789-' |
|
266 | show('data/lpt1/6789-123456789-123456789-123456789-123456789-' | |
268 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
267 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
269 | '123456789-12345') |
|
268 | '123456789-12345') | |
270 | show('data/lpt9/6789-123456789-123456789-123456789-123456789-' |
|
269 | show('data/lpt9/6789-123456789-123456789-123456789-123456789-' | |
271 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
270 | 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
272 | '123456789-12345') |
|
271 | '123456789-12345') | |
273 |
|
272 | |||
274 | print "non-reserved names, just not hitting limit" |
|
273 | print "non-reserved names, just not hitting limit" | |
275 | show('data/123456789-123456789-123456789-123456789-123456789-' |
|
274 | show('data/123456789-123456789-123456789-123456789-123456789-' | |
276 | '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
275 | '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
277 | '123456789-12345') |
|
276 | '123456789-12345') | |
278 |
|
277 | |||
279 | print "hashed path with largest untruncated 1st dir" |
|
278 | print "hashed path with largest untruncated 1st dir" | |
280 | show('data/12345678/-123456789-123456789-123456789-123456789-' |
|
279 | show('data/12345678/-123456789-123456789-123456789-123456789-' | |
281 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
280 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
282 | '123456789-123456') |
|
281 | '123456789-123456') | |
283 |
|
282 | |||
284 | print "hashed path with smallest truncated 1st dir" |
|
283 | print "hashed path with smallest truncated 1st dir" | |
285 | show('data/123456789/123456789-123456789-123456789-123456789-' |
|
284 | show('data/123456789/123456789-123456789-123456789-123456789-' | |
286 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
285 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
287 | '123456789-123456') |
|
286 | '123456789-123456') | |
288 |
|
287 | |||
289 | print "hashed path with largest untruncated two dirs" |
|
288 | print "hashed path with largest untruncated two dirs" | |
290 | show('data/12345678/12345678/9-123456789-123456789-123456789-' |
|
289 | show('data/12345678/12345678/9-123456789-123456789-123456789-' | |
291 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
290 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
292 | '123456789-123456') |
|
291 | '123456789-123456') | |
293 |
|
292 | |||
294 | print "hashed path with smallest truncated two dirs" |
|
293 | print "hashed path with smallest truncated two dirs" | |
295 | show('data/123456789/123456789/123456789-123456789-123456789-' |
|
294 | show('data/123456789/123456789/123456789-123456789-123456789-' | |
296 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
295 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
297 | '123456789-123456') |
|
296 | '123456789-123456') | |
298 |
|
297 | |||
299 | print "hashed path with largest untruncated three dirs" |
|
298 | print "hashed path with largest untruncated three dirs" | |
300 | show('data/12345678/12345678/12345678/89-123456789-123456789-' |
|
299 | show('data/12345678/12345678/12345678/89-123456789-123456789-' | |
301 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
300 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
302 | '123456789-123456') |
|
301 | '123456789-123456') | |
303 |
|
302 | |||
304 | print "hashed path with smallest truncated three dirs" |
|
303 | print "hashed path with smallest truncated three dirs" | |
305 | show('data/123456789/123456789/123456789/123456789-123456789-' |
|
304 | show('data/123456789/123456789/123456789/123456789-123456789-' | |
306 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
305 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
307 | '123456789-123456') |
|
306 | '123456789-123456') | |
308 |
|
307 | |||
309 | print "hashed path with largest untruncated four dirs" |
|
308 | print "hashed path with largest untruncated four dirs" | |
310 | show('data/12345678/12345678/12345678/12345678/789-123456789-' |
|
309 | show('data/12345678/12345678/12345678/12345678/789-123456789-' | |
311 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
310 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
312 | '123456789-123456') |
|
311 | '123456789-123456') | |
313 |
|
312 | |||
314 | print "hashed path with smallest truncated four dirs" |
|
313 | print "hashed path with smallest truncated four dirs" | |
315 | show('data/123456789/123456789/123456789/123456789/123456789-' |
|
314 | show('data/123456789/123456789/123456789/123456789/123456789-' | |
316 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
315 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
317 | '123456789-123456') |
|
316 | '123456789-123456') | |
318 |
|
317 | |||
319 | print "hashed path with largest untruncated five dirs" |
|
318 | print "hashed path with largest untruncated five dirs" | |
320 | show('data/12345678/12345678/12345678/12345678/12345678/6789-' |
|
319 | show('data/12345678/12345678/12345678/12345678/12345678/6789-' | |
321 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
320 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
322 | '123456789-123456') |
|
321 | '123456789-123456') | |
323 |
|
322 | |||
324 | print "hashed path with smallest truncated five dirs" |
|
323 | print "hashed path with smallest truncated five dirs" | |
325 | show('data/123456789/123456789/123456789/123456789/123456789/' |
|
324 | show('data/123456789/123456789/123456789/123456789/123456789/' | |
326 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
325 | 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
327 | '123456789-123456') |
|
326 | '123456789-123456') | |
328 |
|
327 | |||
329 | print "hashed path with largest untruncated six dirs" |
|
328 | print "hashed path with largest untruncated six dirs" | |
330 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
329 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
331 | '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
330 | '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
332 | '123456789-123456') |
|
331 | '123456789-123456') | |
333 |
|
332 | |||
334 | print "hashed path with smallest truncated six dirs" |
|
333 | print "hashed path with smallest truncated six dirs" | |
335 | show('data/123456789/123456789/123456789/123456789/123456789/' |
|
334 | show('data/123456789/123456789/123456789/123456789/123456789/' | |
336 | '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
335 | '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
337 | '123456789-123456') |
|
336 | '123456789-123456') | |
338 |
|
337 | |||
339 | print "hashed path with largest untruncated seven dirs" |
|
338 | print "hashed path with largest untruncated seven dirs" | |
340 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
339 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
341 | '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
340 | '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
342 | '123456789-123456') |
|
341 | '123456789-123456') | |
343 |
|
342 | |||
344 | print "hashed path with smallest truncated seven dirs" |
|
343 | print "hashed path with smallest truncated seven dirs" | |
345 | show('data/123456789/123456789/123456789/123456789/123456789/' |
|
344 | show('data/123456789/123456789/123456789/123456789/123456789/' | |
346 | '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
345 | '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
347 | '123456789-123456') |
|
346 | '123456789-123456') | |
348 |
|
347 | |||
349 | print "hashed path with largest untruncated eight dirs" |
|
348 | print "hashed path with largest untruncated eight dirs" | |
350 | print "(directory 8 is dropped because it hits _maxshortdirslen)" |
|
349 | print "(directory 8 is dropped because it hits _maxshortdirslen)" | |
351 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
350 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
352 | '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
351 | '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
353 | '123456789-123456') |
|
352 | '123456789-123456') | |
354 |
|
353 | |||
355 | print "hashed path with smallest truncated eight dirs" |
|
354 | print "hashed path with smallest truncated eight dirs" | |
356 | print "(directory 8 is dropped because it hits _maxshortdirslen)" |
|
355 | print "(directory 8 is dropped because it hits _maxshortdirslen)" | |
357 | show('data/123456789/123456789/123456789/123456789/123456789/' |
|
356 | show('data/123456789/123456789/123456789/123456789/123456789/' | |
358 | '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-' |
|
357 | '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-' | |
359 | '123456789-123456') |
|
358 | '123456789-123456') | |
360 |
|
359 | |||
361 | print "hashed path with largest non-dropped directory 8" |
|
360 | print "hashed path with largest non-dropped directory 8" | |
362 | print "(just not hitting the _maxshortdirslen boundary)" |
|
361 | print "(just not hitting the _maxshortdirslen boundary)" | |
363 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
362 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
364 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
363 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
365 | '123456789-123456') |
|
364 | '123456789-123456') | |
366 |
|
365 | |||
367 | print "...adding one truncated char to dir 1..7 won't drop dir 8" |
|
366 | print "...adding one truncated char to dir 1..7 won't drop dir 8" | |
368 | show('data/12345678x/12345678/12345678/12345678/12345678/12345' |
|
367 | show('data/12345678x/12345678/12345678/12345678/12345678/12345' | |
369 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
368 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
370 | '123456789-123456') |
|
369 | '123456789-123456') | |
371 | show('data/12345678/12345678x/12345678/12345678/12345678/12345' |
|
370 | show('data/12345678/12345678x/12345678/12345678/12345678/12345' | |
372 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
371 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
373 | '123456789-123456') |
|
372 | '123456789-123456') | |
374 | show('data/12345678/12345678/12345678x/12345678/12345678/12345' |
|
373 | show('data/12345678/12345678/12345678x/12345678/12345678/12345' | |
375 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
374 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
376 | '123456789-123456') |
|
375 | '123456789-123456') | |
377 | show('data/12345678/12345678/12345678/12345678x/12345678/12345' |
|
376 | show('data/12345678/12345678/12345678/12345678x/12345678/12345' | |
378 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
377 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
379 | '123456789-123456') |
|
378 | '123456789-123456') | |
380 | show('data/12345678/12345678/12345678/12345678/12345678x/12345' |
|
379 | show('data/12345678/12345678/12345678/12345678/12345678x/12345' | |
381 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
380 | '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
382 | '123456789-123456') |
|
381 | '123456789-123456') | |
383 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
382 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
384 | '678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
383 | '678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
385 | '123456789-123456') |
|
384 | '123456789-123456') | |
386 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
385 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
387 | '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
386 | '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
388 | '123456789-123456') |
|
387 | '123456789-123456') | |
389 |
|
388 | |||
390 | print "hashed path with shortest dropped directory 8" |
|
389 | print "hashed path with shortest dropped directory 8" | |
391 | print "(just hitting the _maxshortdirslen boundary)" |
|
390 | print "(just hitting the _maxshortdirslen boundary)" | |
392 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
391 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
393 | '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
392 | '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
394 | '123456789-123456') |
|
393 | '123456789-123456') | |
395 |
|
394 | |||
396 | print "hashed path that drops dir 8 due to dot or space at end is" |
|
395 | print "hashed path that drops dir 8 due to dot or space at end is" | |
397 | print "encoded, and thus causing to hit _maxshortdirslen" |
|
396 | print "encoded, and thus causing to hit _maxshortdirslen" | |
398 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
397 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
399 | '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
398 | '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
400 | '123456789-123456') |
|
399 | '123456789-123456') | |
401 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
400 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
402 | '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
401 | '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
403 | '123456789-123456') |
|
402 | '123456789-123456') | |
404 |
|
403 | |||
405 | print "... with dir 8 short enough for encoding" |
|
404 | print "... with dir 8 short enough for encoding" | |
406 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
405 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
407 | '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
406 | '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
408 | '123456789-123456') |
|
407 | '123456789-123456') | |
409 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
408 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
410 | '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
409 | '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
411 | '123456789-123456') |
|
410 | '123456789-123456') | |
412 |
|
411 | |||
413 | print "extensions are replicated on hashed paths (unbounded!)" |
|
412 | print "extensions are replicated on hashed paths (unbounded!)" | |
414 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
413 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
415 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
414 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
416 | '123456789-12.345') |
|
415 | '123456789-12.345') | |
417 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
416 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
418 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
417 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
419 | '123456789-12.3456') |
|
418 | '123456789-12.3456') | |
420 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
419 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
421 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
420 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
422 | '123456789-12.34567') |
|
421 | '123456789-12.34567') | |
423 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
422 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
424 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
423 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
425 | '123456789-12.345678') |
|
424 | '123456789-12.345678') | |
426 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
425 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
427 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
426 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
428 | '123456789-12.3456789') |
|
427 | '123456789-12.3456789') | |
429 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
428 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
430 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
429 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
431 | '123456789-12.3456789-') |
|
430 | '123456789-12.3456789-') | |
432 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
431 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
433 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
432 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
434 | '123456789-12.3456789-1') |
|
433 | '123456789-12.3456789-1') | |
435 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
434 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
436 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
435 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
437 | '123456789-12.3456789-12') |
|
436 | '123456789-12.3456789-12') | |
438 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
437 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
439 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
438 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
440 | '123456789-12.3456789-123') |
|
439 | '123456789-12.3456789-123') | |
441 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
440 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
442 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
441 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
443 | '123456789-12.3456789-1234') |
|
442 | '123456789-12.3456789-1234') | |
444 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
443 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
445 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
444 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
446 | '123456789-12.3456789-12345') |
|
445 | '123456789-12.3456789-12345') | |
447 | show('data/12345678/12345678/12345678/12345678/12345678/12345' |
|
446 | show('data/12345678/12345678/12345678/12345678/12345678/12345' | |
448 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' |
|
447 | '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' | |
449 | '123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWX' |
|
448 | '123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWX' | |
450 | 'YZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTU' |
|
449 | 'YZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTU' | |
451 | 'VWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxx' |
|
450 | 'VWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxx' | |
452 | 'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww' |
|
451 | 'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww' | |
453 | 'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww') |
|
452 | 'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww') | |
454 |
|
453 |
General Comments 0
You need to be logged in to leave comments.
Login now