##// END OF EJS Templates
py3: replace os.name with pycompat.osname (part 2 of 2)
Pulkit Goyal -
r30640:7a3e67bf default
parent child Browse files
Show More
@@ -1,719 +1,718 b''
1 # color.py color output for Mercurial commands
1 # color.py color output for Mercurial commands
2 #
2 #
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''colorize output from some commands
8 '''colorize output from some commands
9
9
10 The color extension colorizes output from several Mercurial commands.
10 The color extension colorizes output from several Mercurial commands.
11 For example, the diff command shows additions in green and deletions
11 For example, the diff command shows additions in green and deletions
12 in red, while the status command shows modified files in magenta. Many
12 in red, while the status command shows modified files in magenta. Many
13 other commands have analogous colors. It is possible to customize
13 other commands have analogous colors. It is possible to customize
14 these colors.
14 these colors.
15
15
16 Effects
16 Effects
17 -------
17 -------
18
18
19 Other effects in addition to color, like bold and underlined text, are
19 Other effects in addition to color, like bold and underlined text, are
20 also available. By default, the terminfo database is used to find the
20 also available. By default, the terminfo database is used to find the
21 terminal codes used to change color and effect. If terminfo is not
21 terminal codes used to change color and effect. If terminfo is not
22 available, then effects are rendered with the ECMA-48 SGR control
22 available, then effects are rendered with the ECMA-48 SGR control
23 function (aka ANSI escape codes).
23 function (aka ANSI escape codes).
24
24
25 The available effects in terminfo mode are 'blink', 'bold', 'dim',
25 The available effects in terminfo mode are 'blink', 'bold', 'dim',
26 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
26 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
27 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
27 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
28 'underline'. How each is rendered depends on the terminal emulator.
28 'underline'. How each is rendered depends on the terminal emulator.
29 Some may not be available for a given terminal type, and will be
29 Some may not be available for a given terminal type, and will be
30 silently ignored.
30 silently ignored.
31
31
32 If the terminfo entry for your terminal is missing codes for an effect
32 If the terminfo entry for your terminal is missing codes for an effect
33 or has the wrong codes, you can add or override those codes in your
33 or has the wrong codes, you can add or override those codes in your
34 configuration::
34 configuration::
35
35
36 [color]
36 [color]
37 terminfo.dim = \E[2m
37 terminfo.dim = \E[2m
38
38
39 where '\E' is substituted with an escape character.
39 where '\E' is substituted with an escape character.
40
40
41 Labels
41 Labels
42 ------
42 ------
43
43
44 Text receives color effects depending on the labels that it has. Many
44 Text receives color effects depending on the labels that it has. Many
45 default Mercurial commands emit labelled text. You can also define
45 default Mercurial commands emit labelled text. You can also define
46 your own labels in templates using the label function, see :hg:`help
46 your own labels in templates using the label function, see :hg:`help
47 templates`. A single portion of text may have more than one label. In
47 templates`. A single portion of text may have more than one label. In
48 that case, effects given to the last label will override any other
48 that case, effects given to the last label will override any other
49 effects. This includes the special "none" effect, which nullifies
49 effects. This includes the special "none" effect, which nullifies
50 other effects.
50 other effects.
51
51
52 Labels are normally invisible. In order to see these labels and their
52 Labels are normally invisible. In order to see these labels and their
53 position in the text, use the global --color=debug option. The same
53 position in the text, use the global --color=debug option. The same
54 anchor text may be associated to multiple labels, e.g.
54 anchor text may be associated to multiple labels, e.g.
55
55
56 [log.changeset changeset.secret|changeset: 22611:6f0a53c8f587]
56 [log.changeset changeset.secret|changeset: 22611:6f0a53c8f587]
57
57
58 The following are the default effects for some default labels. Default
58 The following are the default effects for some default labels. Default
59 effects may be overridden from your configuration file::
59 effects may be overridden from your configuration file::
60
60
61 [color]
61 [color]
62 status.modified = blue bold underline red_background
62 status.modified = blue bold underline red_background
63 status.added = green bold
63 status.added = green bold
64 status.removed = red bold blue_background
64 status.removed = red bold blue_background
65 status.deleted = cyan bold underline
65 status.deleted = cyan bold underline
66 status.unknown = magenta bold underline
66 status.unknown = magenta bold underline
67 status.ignored = black bold
67 status.ignored = black bold
68
68
69 # 'none' turns off all effects
69 # 'none' turns off all effects
70 status.clean = none
70 status.clean = none
71 status.copied = none
71 status.copied = none
72
72
73 qseries.applied = blue bold underline
73 qseries.applied = blue bold underline
74 qseries.unapplied = black bold
74 qseries.unapplied = black bold
75 qseries.missing = red bold
75 qseries.missing = red bold
76
76
77 diff.diffline = bold
77 diff.diffline = bold
78 diff.extended = cyan bold
78 diff.extended = cyan bold
79 diff.file_a = red bold
79 diff.file_a = red bold
80 diff.file_b = green bold
80 diff.file_b = green bold
81 diff.hunk = magenta
81 diff.hunk = magenta
82 diff.deleted = red
82 diff.deleted = red
83 diff.inserted = green
83 diff.inserted = green
84 diff.changed = white
84 diff.changed = white
85 diff.tab =
85 diff.tab =
86 diff.trailingwhitespace = bold red_background
86 diff.trailingwhitespace = bold red_background
87
87
88 # Blank so it inherits the style of the surrounding label
88 # Blank so it inherits the style of the surrounding label
89 changeset.public =
89 changeset.public =
90 changeset.draft =
90 changeset.draft =
91 changeset.secret =
91 changeset.secret =
92
92
93 resolve.unresolved = red bold
93 resolve.unresolved = red bold
94 resolve.resolved = green bold
94 resolve.resolved = green bold
95
95
96 bookmarks.active = green
96 bookmarks.active = green
97
97
98 branches.active = none
98 branches.active = none
99 branches.closed = black bold
99 branches.closed = black bold
100 branches.current = green
100 branches.current = green
101 branches.inactive = none
101 branches.inactive = none
102
102
103 tags.normal = green
103 tags.normal = green
104 tags.local = black bold
104 tags.local = black bold
105
105
106 rebase.rebased = blue
106 rebase.rebased = blue
107 rebase.remaining = red bold
107 rebase.remaining = red bold
108
108
109 shelve.age = cyan
109 shelve.age = cyan
110 shelve.newest = green bold
110 shelve.newest = green bold
111 shelve.name = blue bold
111 shelve.name = blue bold
112
112
113 histedit.remaining = red bold
113 histedit.remaining = red bold
114
114
115 Custom colors
115 Custom colors
116 -------------
116 -------------
117
117
118 Because there are only eight standard colors, this module allows you
118 Because there are only eight standard colors, this module allows you
119 to define color names for other color slots which might be available
119 to define color names for other color slots which might be available
120 for your terminal type, assuming terminfo mode. For instance::
120 for your terminal type, assuming terminfo mode. For instance::
121
121
122 color.brightblue = 12
122 color.brightblue = 12
123 color.pink = 207
123 color.pink = 207
124 color.orange = 202
124 color.orange = 202
125
125
126 to set 'brightblue' to color slot 12 (useful for 16 color terminals
126 to set 'brightblue' to color slot 12 (useful for 16 color terminals
127 that have brighter colors defined in the upper eight) and, 'pink' and
127 that have brighter colors defined in the upper eight) and, 'pink' and
128 'orange' to colors in 256-color xterm's default color cube. These
128 'orange' to colors in 256-color xterm's default color cube. These
129 defined colors may then be used as any of the pre-defined eight,
129 defined colors may then be used as any of the pre-defined eight,
130 including appending '_background' to set the background to that color.
130 including appending '_background' to set the background to that color.
131
131
132 Modes
132 Modes
133 -----
133 -----
134
134
135 By default, the color extension will use ANSI mode (or win32 mode on
135 By default, the color extension will use ANSI mode (or win32 mode on
136 Windows) if it detects a terminal. To override auto mode (to enable
136 Windows) if it detects a terminal. To override auto mode (to enable
137 terminfo mode, for example), set the following configuration option::
137 terminfo mode, for example), set the following configuration option::
138
138
139 [color]
139 [color]
140 mode = terminfo
140 mode = terminfo
141
141
142 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
142 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
143 disable color.
143 disable color.
144
144
145 Note that on some systems, terminfo mode may cause problems when using
145 Note that on some systems, terminfo mode may cause problems when using
146 color with the pager extension and less -R. less with the -R option
146 color with the pager extension and less -R. less with the -R option
147 will only display ECMA-48 color codes, and terminfo mode may sometimes
147 will only display ECMA-48 color codes, and terminfo mode may sometimes
148 emit codes that less doesn't understand. You can work around this by
148 emit codes that less doesn't understand. You can work around this by
149 either using ansi mode (or auto mode), or by using less -r (which will
149 either using ansi mode (or auto mode), or by using less -r (which will
150 pass through all terminal control codes, not just color control
150 pass through all terminal control codes, not just color control
151 codes).
151 codes).
152
152
153 On some systems (such as MSYS in Windows), the terminal may support
153 On some systems (such as MSYS in Windows), the terminal may support
154 a different color mode than the pager (activated via the "pager"
154 a different color mode than the pager (activated via the "pager"
155 extension). It is possible to define separate modes depending on whether
155 extension). It is possible to define separate modes depending on whether
156 the pager is active::
156 the pager is active::
157
157
158 [color]
158 [color]
159 mode = auto
159 mode = auto
160 pagermode = ansi
160 pagermode = ansi
161
161
162 If ``pagermode`` is not defined, the ``mode`` will be used.
162 If ``pagermode`` is not defined, the ``mode`` will be used.
163 '''
163 '''
164
164
165 from __future__ import absolute_import
165 from __future__ import absolute_import
166
166
167 import os
168
169 from mercurial.i18n import _
167 from mercurial.i18n import _
170 from mercurial import (
168 from mercurial import (
171 cmdutil,
169 cmdutil,
172 commands,
170 commands,
173 dispatch,
171 dispatch,
174 encoding,
172 encoding,
175 extensions,
173 extensions,
174 pycompat,
176 subrepo,
175 subrepo,
177 ui as uimod,
176 ui as uimod,
178 util,
177 util,
179 )
178 )
180
179
181 cmdtable = {}
180 cmdtable = {}
182 command = cmdutil.command(cmdtable)
181 command = cmdutil.command(cmdtable)
183 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
182 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
184 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
183 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
185 # be specifying the version(s) of Mercurial they are tested with, or
184 # be specifying the version(s) of Mercurial they are tested with, or
186 # leave the attribute unspecified.
185 # leave the attribute unspecified.
187 testedwith = 'ships-with-hg-core'
186 testedwith = 'ships-with-hg-core'
188
187
189 # start and stop parameters for effects
188 # start and stop parameters for effects
190 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
189 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
191 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
190 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
192 'italic': 3, 'underline': 4, 'inverse': 7, 'dim': 2,
191 'italic': 3, 'underline': 4, 'inverse': 7, 'dim': 2,
193 'black_background': 40, 'red_background': 41,
192 'black_background': 40, 'red_background': 41,
194 'green_background': 42, 'yellow_background': 43,
193 'green_background': 42, 'yellow_background': 43,
195 'blue_background': 44, 'purple_background': 45,
194 'blue_background': 44, 'purple_background': 45,
196 'cyan_background': 46, 'white_background': 47}
195 'cyan_background': 46, 'white_background': 47}
197
196
198 def _terminfosetup(ui, mode):
197 def _terminfosetup(ui, mode):
199 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
198 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
200
199
201 global _terminfo_params
200 global _terminfo_params
202 # If we failed to load curses, we go ahead and return.
201 # If we failed to load curses, we go ahead and return.
203 if not _terminfo_params:
202 if not _terminfo_params:
204 return
203 return
205 # Otherwise, see what the config file says.
204 # Otherwise, see what the config file says.
206 if mode not in ('auto', 'terminfo'):
205 if mode not in ('auto', 'terminfo'):
207 return
206 return
208
207
209 _terminfo_params.update((key[6:], (False, int(val), ''))
208 _terminfo_params.update((key[6:], (False, int(val), ''))
210 for key, val in ui.configitems('color')
209 for key, val in ui.configitems('color')
211 if key.startswith('color.'))
210 if key.startswith('color.'))
212 _terminfo_params.update((key[9:], (True, '', val.replace('\\E', '\x1b')))
211 _terminfo_params.update((key[9:], (True, '', val.replace('\\E', '\x1b')))
213 for key, val in ui.configitems('color')
212 for key, val in ui.configitems('color')
214 if key.startswith('terminfo.'))
213 if key.startswith('terminfo.'))
215
214
216 try:
215 try:
217 curses.setupterm()
216 curses.setupterm()
218 except curses.error as e:
217 except curses.error as e:
219 _terminfo_params = {}
218 _terminfo_params = {}
220 return
219 return
221
220
222 for key, (b, e, c) in _terminfo_params.items():
221 for key, (b, e, c) in _terminfo_params.items():
223 if not b:
222 if not b:
224 continue
223 continue
225 if not c and not curses.tigetstr(e):
224 if not c and not curses.tigetstr(e):
226 # Most terminals don't support dim, invis, etc, so don't be
225 # Most terminals don't support dim, invis, etc, so don't be
227 # noisy and use ui.debug().
226 # noisy and use ui.debug().
228 ui.debug("no terminfo entry for %s\n" % e)
227 ui.debug("no terminfo entry for %s\n" % e)
229 del _terminfo_params[key]
228 del _terminfo_params[key]
230 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
229 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
231 # Only warn about missing terminfo entries if we explicitly asked for
230 # Only warn about missing terminfo entries if we explicitly asked for
232 # terminfo mode.
231 # terminfo mode.
233 if mode == "terminfo":
232 if mode == "terminfo":
234 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
233 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
235 "ECMA-48 color\n"))
234 "ECMA-48 color\n"))
236 _terminfo_params = {}
235 _terminfo_params = {}
237
236
238 def _modesetup(ui, coloropt):
237 def _modesetup(ui, coloropt):
239 global _terminfo_params
238 global _terminfo_params
240
239
241 if coloropt == 'debug':
240 if coloropt == 'debug':
242 return 'debug'
241 return 'debug'
243
242
244 auto = (coloropt == 'auto')
243 auto = (coloropt == 'auto')
245 always = not auto and util.parsebool(coloropt)
244 always = not auto and util.parsebool(coloropt)
246 if not always and not auto:
245 if not always and not auto:
247 return None
246 return None
248
247
249 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
248 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
250 and ui.formatted()))
249 and ui.formatted()))
251
250
252 mode = ui.config('color', 'mode', 'auto')
251 mode = ui.config('color', 'mode', 'auto')
253
252
254 # If pager is active, color.pagermode overrides color.mode.
253 # If pager is active, color.pagermode overrides color.mode.
255 if getattr(ui, 'pageractive', False):
254 if getattr(ui, 'pageractive', False):
256 mode = ui.config('color', 'pagermode', mode)
255 mode = ui.config('color', 'pagermode', mode)
257
256
258 realmode = mode
257 realmode = mode
259 if mode == 'auto':
258 if mode == 'auto':
260 if os.name == 'nt':
259 if pycompat.osname == 'nt':
261 term = encoding.environ.get('TERM')
260 term = encoding.environ.get('TERM')
262 # TERM won't be defined in a vanilla cmd.exe environment.
261 # TERM won't be defined in a vanilla cmd.exe environment.
263
262
264 # UNIX-like environments on Windows such as Cygwin and MSYS will
263 # UNIX-like environments on Windows such as Cygwin and MSYS will
265 # set TERM. They appear to make a best effort attempt at setting it
264 # set TERM. They appear to make a best effort attempt at setting it
266 # to something appropriate. However, not all environments with TERM
265 # to something appropriate. However, not all environments with TERM
267 # defined support ANSI. Since "ansi" could result in terminal
266 # defined support ANSI. Since "ansi" could result in terminal
268 # gibberish, we error on the side of selecting "win32". However, if
267 # gibberish, we error on the side of selecting "win32". However, if
269 # w32effects is not defined, we almost certainly don't support
268 # w32effects is not defined, we almost certainly don't support
270 # "win32", so don't even try.
269 # "win32", so don't even try.
271 if (term and 'xterm' in term) or not w32effects:
270 if (term and 'xterm' in term) or not w32effects:
272 realmode = 'ansi'
271 realmode = 'ansi'
273 else:
272 else:
274 realmode = 'win32'
273 realmode = 'win32'
275 else:
274 else:
276 realmode = 'ansi'
275 realmode = 'ansi'
277
276
278 def modewarn():
277 def modewarn():
279 # only warn if color.mode was explicitly set and we're in
278 # only warn if color.mode was explicitly set and we're in
280 # a formatted terminal
279 # a formatted terminal
281 if mode == realmode and ui.formatted():
280 if mode == realmode and ui.formatted():
282 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
281 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
283
282
284 if realmode == 'win32':
283 if realmode == 'win32':
285 _terminfo_params = {}
284 _terminfo_params = {}
286 if not w32effects:
285 if not w32effects:
287 modewarn()
286 modewarn()
288 return None
287 return None
289 _effects.update(w32effects)
288 _effects.update(w32effects)
290 elif realmode == 'ansi':
289 elif realmode == 'ansi':
291 _terminfo_params = {}
290 _terminfo_params = {}
292 elif realmode == 'terminfo':
291 elif realmode == 'terminfo':
293 _terminfosetup(ui, mode)
292 _terminfosetup(ui, mode)
294 if not _terminfo_params:
293 if not _terminfo_params:
295 ## FIXME Shouldn't we return None in this case too?
294 ## FIXME Shouldn't we return None in this case too?
296 modewarn()
295 modewarn()
297 realmode = 'ansi'
296 realmode = 'ansi'
298 else:
297 else:
299 return None
298 return None
300
299
301 if always or (auto and formatted):
300 if always or (auto and formatted):
302 return realmode
301 return realmode
303 return None
302 return None
304
303
305 try:
304 try:
306 import curses
305 import curses
307 # Mapping from effect name to terminfo attribute name (or raw code) or
306 # Mapping from effect name to terminfo attribute name (or raw code) or
308 # color number. This will also force-load the curses module.
307 # color number. This will also force-load the curses module.
309 _terminfo_params = {'none': (True, 'sgr0', ''),
308 _terminfo_params = {'none': (True, 'sgr0', ''),
310 'standout': (True, 'smso', ''),
309 'standout': (True, 'smso', ''),
311 'underline': (True, 'smul', ''),
310 'underline': (True, 'smul', ''),
312 'reverse': (True, 'rev', ''),
311 'reverse': (True, 'rev', ''),
313 'inverse': (True, 'rev', ''),
312 'inverse': (True, 'rev', ''),
314 'blink': (True, 'blink', ''),
313 'blink': (True, 'blink', ''),
315 'dim': (True, 'dim', ''),
314 'dim': (True, 'dim', ''),
316 'bold': (True, 'bold', ''),
315 'bold': (True, 'bold', ''),
317 'invisible': (True, 'invis', ''),
316 'invisible': (True, 'invis', ''),
318 'italic': (True, 'sitm', ''),
317 'italic': (True, 'sitm', ''),
319 'black': (False, curses.COLOR_BLACK, ''),
318 'black': (False, curses.COLOR_BLACK, ''),
320 'red': (False, curses.COLOR_RED, ''),
319 'red': (False, curses.COLOR_RED, ''),
321 'green': (False, curses.COLOR_GREEN, ''),
320 'green': (False, curses.COLOR_GREEN, ''),
322 'yellow': (False, curses.COLOR_YELLOW, ''),
321 'yellow': (False, curses.COLOR_YELLOW, ''),
323 'blue': (False, curses.COLOR_BLUE, ''),
322 'blue': (False, curses.COLOR_BLUE, ''),
324 'magenta': (False, curses.COLOR_MAGENTA, ''),
323 'magenta': (False, curses.COLOR_MAGENTA, ''),
325 'cyan': (False, curses.COLOR_CYAN, ''),
324 'cyan': (False, curses.COLOR_CYAN, ''),
326 'white': (False, curses.COLOR_WHITE, '')}
325 'white': (False, curses.COLOR_WHITE, '')}
327 except ImportError:
326 except ImportError:
328 _terminfo_params = {}
327 _terminfo_params = {}
329
328
330 _styles = {'grep.match': 'red bold',
329 _styles = {'grep.match': 'red bold',
331 'grep.linenumber': 'green',
330 'grep.linenumber': 'green',
332 'grep.rev': 'green',
331 'grep.rev': 'green',
333 'grep.change': 'green',
332 'grep.change': 'green',
334 'grep.sep': 'cyan',
333 'grep.sep': 'cyan',
335 'grep.filename': 'magenta',
334 'grep.filename': 'magenta',
336 'grep.user': 'magenta',
335 'grep.user': 'magenta',
337 'grep.date': 'magenta',
336 'grep.date': 'magenta',
338 'bookmarks.active': 'green',
337 'bookmarks.active': 'green',
339 'branches.active': 'none',
338 'branches.active': 'none',
340 'branches.closed': 'black bold',
339 'branches.closed': 'black bold',
341 'branches.current': 'green',
340 'branches.current': 'green',
342 'branches.inactive': 'none',
341 'branches.inactive': 'none',
343 'diff.changed': 'white',
342 'diff.changed': 'white',
344 'diff.deleted': 'red',
343 'diff.deleted': 'red',
345 'diff.diffline': 'bold',
344 'diff.diffline': 'bold',
346 'diff.extended': 'cyan bold',
345 'diff.extended': 'cyan bold',
347 'diff.file_a': 'red bold',
346 'diff.file_a': 'red bold',
348 'diff.file_b': 'green bold',
347 'diff.file_b': 'green bold',
349 'diff.hunk': 'magenta',
348 'diff.hunk': 'magenta',
350 'diff.inserted': 'green',
349 'diff.inserted': 'green',
351 'diff.tab': '',
350 'diff.tab': '',
352 'diff.trailingwhitespace': 'bold red_background',
351 'diff.trailingwhitespace': 'bold red_background',
353 'changeset.public' : '',
352 'changeset.public' : '',
354 'changeset.draft' : '',
353 'changeset.draft' : '',
355 'changeset.secret' : '',
354 'changeset.secret' : '',
356 'diffstat.deleted': 'red',
355 'diffstat.deleted': 'red',
357 'diffstat.inserted': 'green',
356 'diffstat.inserted': 'green',
358 'histedit.remaining': 'red bold',
357 'histedit.remaining': 'red bold',
359 'ui.prompt': 'yellow',
358 'ui.prompt': 'yellow',
360 'log.changeset': 'yellow',
359 'log.changeset': 'yellow',
361 'patchbomb.finalsummary': '',
360 'patchbomb.finalsummary': '',
362 'patchbomb.from': 'magenta',
361 'patchbomb.from': 'magenta',
363 'patchbomb.to': 'cyan',
362 'patchbomb.to': 'cyan',
364 'patchbomb.subject': 'green',
363 'patchbomb.subject': 'green',
365 'patchbomb.diffstats': '',
364 'patchbomb.diffstats': '',
366 'rebase.rebased': 'blue',
365 'rebase.rebased': 'blue',
367 'rebase.remaining': 'red bold',
366 'rebase.remaining': 'red bold',
368 'resolve.resolved': 'green bold',
367 'resolve.resolved': 'green bold',
369 'resolve.unresolved': 'red bold',
368 'resolve.unresolved': 'red bold',
370 'shelve.age': 'cyan',
369 'shelve.age': 'cyan',
371 'shelve.newest': 'green bold',
370 'shelve.newest': 'green bold',
372 'shelve.name': 'blue bold',
371 'shelve.name': 'blue bold',
373 'status.added': 'green bold',
372 'status.added': 'green bold',
374 'status.clean': 'none',
373 'status.clean': 'none',
375 'status.copied': 'none',
374 'status.copied': 'none',
376 'status.deleted': 'cyan bold underline',
375 'status.deleted': 'cyan bold underline',
377 'status.ignored': 'black bold',
376 'status.ignored': 'black bold',
378 'status.modified': 'blue bold',
377 'status.modified': 'blue bold',
379 'status.removed': 'red bold',
378 'status.removed': 'red bold',
380 'status.unknown': 'magenta bold underline',
379 'status.unknown': 'magenta bold underline',
381 'tags.normal': 'green',
380 'tags.normal': 'green',
382 'tags.local': 'black bold'}
381 'tags.local': 'black bold'}
383
382
384
383
385 def _effect_str(effect):
384 def _effect_str(effect):
386 '''Helper function for render_effects().'''
385 '''Helper function for render_effects().'''
387
386
388 bg = False
387 bg = False
389 if effect.endswith('_background'):
388 if effect.endswith('_background'):
390 bg = True
389 bg = True
391 effect = effect[:-11]
390 effect = effect[:-11]
392 try:
391 try:
393 attr, val, termcode = _terminfo_params[effect]
392 attr, val, termcode = _terminfo_params[effect]
394 except KeyError:
393 except KeyError:
395 return ''
394 return ''
396 if attr:
395 if attr:
397 if termcode:
396 if termcode:
398 return termcode
397 return termcode
399 else:
398 else:
400 return curses.tigetstr(val)
399 return curses.tigetstr(val)
401 elif bg:
400 elif bg:
402 return curses.tparm(curses.tigetstr('setab'), val)
401 return curses.tparm(curses.tigetstr('setab'), val)
403 else:
402 else:
404 return curses.tparm(curses.tigetstr('setaf'), val)
403 return curses.tparm(curses.tigetstr('setaf'), val)
405
404
406 def render_effects(text, effects):
405 def render_effects(text, effects):
407 'Wrap text in commands to turn on each effect.'
406 'Wrap text in commands to turn on each effect.'
408 if not text:
407 if not text:
409 return text
408 return text
410 if not _terminfo_params:
409 if not _terminfo_params:
411 start = [str(_effects[e]) for e in ['none'] + effects.split()]
410 start = [str(_effects[e]) for e in ['none'] + effects.split()]
412 start = '\033[' + ';'.join(start) + 'm'
411 start = '\033[' + ';'.join(start) + 'm'
413 stop = '\033[' + str(_effects['none']) + 'm'
412 stop = '\033[' + str(_effects['none']) + 'm'
414 else:
413 else:
415 start = ''.join(_effect_str(effect)
414 start = ''.join(_effect_str(effect)
416 for effect in ['none'] + effects.split())
415 for effect in ['none'] + effects.split())
417 stop = _effect_str('none')
416 stop = _effect_str('none')
418 return ''.join([start, text, stop])
417 return ''.join([start, text, stop])
419
418
420 def extstyles():
419 def extstyles():
421 for name, ext in extensions.extensions():
420 for name, ext in extensions.extensions():
422 _styles.update(getattr(ext, 'colortable', {}))
421 _styles.update(getattr(ext, 'colortable', {}))
423
422
424 def valideffect(effect):
423 def valideffect(effect):
425 'Determine if the effect is valid or not.'
424 'Determine if the effect is valid or not.'
426 good = False
425 good = False
427 if not _terminfo_params and effect in _effects:
426 if not _terminfo_params and effect in _effects:
428 good = True
427 good = True
429 elif effect in _terminfo_params or effect[:-11] in _terminfo_params:
428 elif effect in _terminfo_params or effect[:-11] in _terminfo_params:
430 good = True
429 good = True
431 return good
430 return good
432
431
433 def configstyles(ui):
432 def configstyles(ui):
434 for status, cfgeffects in ui.configitems('color'):
433 for status, cfgeffects in ui.configitems('color'):
435 if '.' not in status or status.startswith(('color.', 'terminfo.')):
434 if '.' not in status or status.startswith(('color.', 'terminfo.')):
436 continue
435 continue
437 cfgeffects = ui.configlist('color', status)
436 cfgeffects = ui.configlist('color', status)
438 if cfgeffects:
437 if cfgeffects:
439 good = []
438 good = []
440 for e in cfgeffects:
439 for e in cfgeffects:
441 if valideffect(e):
440 if valideffect(e):
442 good.append(e)
441 good.append(e)
443 else:
442 else:
444 ui.warn(_("ignoring unknown color/effect %r "
443 ui.warn(_("ignoring unknown color/effect %r "
445 "(configured in color.%s)\n")
444 "(configured in color.%s)\n")
446 % (e, status))
445 % (e, status))
447 _styles[status] = ' '.join(good)
446 _styles[status] = ' '.join(good)
448
447
449 class colorui(uimod.ui):
448 class colorui(uimod.ui):
450 _colormode = 'ansi'
449 _colormode = 'ansi'
451 def write(self, *args, **opts):
450 def write(self, *args, **opts):
452 if self._colormode is None:
451 if self._colormode is None:
453 return super(colorui, self).write(*args, **opts)
452 return super(colorui, self).write(*args, **opts)
454
453
455 label = opts.get('label', '')
454 label = opts.get('label', '')
456 if self._buffers and not opts.get('prompt', False):
455 if self._buffers and not opts.get('prompt', False):
457 if self._bufferapplylabels:
456 if self._bufferapplylabels:
458 self._buffers[-1].extend(self.label(a, label) for a in args)
457 self._buffers[-1].extend(self.label(a, label) for a in args)
459 else:
458 else:
460 self._buffers[-1].extend(args)
459 self._buffers[-1].extend(args)
461 elif self._colormode == 'win32':
460 elif self._colormode == 'win32':
462 for a in args:
461 for a in args:
463 win32print(a, super(colorui, self).write, **opts)
462 win32print(a, super(colorui, self).write, **opts)
464 else:
463 else:
465 return super(colorui, self).write(
464 return super(colorui, self).write(
466 *[self.label(a, label) for a in args], **opts)
465 *[self.label(a, label) for a in args], **opts)
467
466
468 def write_err(self, *args, **opts):
467 def write_err(self, *args, **opts):
469 if self._colormode is None:
468 if self._colormode is None:
470 return super(colorui, self).write_err(*args, **opts)
469 return super(colorui, self).write_err(*args, **opts)
471
470
472 label = opts.get('label', '')
471 label = opts.get('label', '')
473 if self._bufferstates and self._bufferstates[-1][0]:
472 if self._bufferstates and self._bufferstates[-1][0]:
474 return self.write(*args, **opts)
473 return self.write(*args, **opts)
475 if self._colormode == 'win32':
474 if self._colormode == 'win32':
476 for a in args:
475 for a in args:
477 win32print(a, super(colorui, self).write_err, **opts)
476 win32print(a, super(colorui, self).write_err, **opts)
478 else:
477 else:
479 return super(colorui, self).write_err(
478 return super(colorui, self).write_err(
480 *[self.label(a, label) for a in args], **opts)
479 *[self.label(a, label) for a in args], **opts)
481
480
482 def showlabel(self, msg, label):
481 def showlabel(self, msg, label):
483 if label and msg:
482 if label and msg:
484 if msg[-1] == '\n':
483 if msg[-1] == '\n':
485 return "[%s|%s]\n" % (label, msg[:-1])
484 return "[%s|%s]\n" % (label, msg[:-1])
486 else:
485 else:
487 return "[%s|%s]" % (label, msg)
486 return "[%s|%s]" % (label, msg)
488 else:
487 else:
489 return msg
488 return msg
490
489
491 def label(self, msg, label):
490 def label(self, msg, label):
492 if self._colormode is None:
491 if self._colormode is None:
493 return super(colorui, self).label(msg, label)
492 return super(colorui, self).label(msg, label)
494
493
495 if self._colormode == 'debug':
494 if self._colormode == 'debug':
496 return self.showlabel(msg, label)
495 return self.showlabel(msg, label)
497
496
498 effects = []
497 effects = []
499 for l in label.split():
498 for l in label.split():
500 s = _styles.get(l, '')
499 s = _styles.get(l, '')
501 if s:
500 if s:
502 effects.append(s)
501 effects.append(s)
503 elif valideffect(l):
502 elif valideffect(l):
504 effects.append(l)
503 effects.append(l)
505 effects = ' '.join(effects)
504 effects = ' '.join(effects)
506 if effects:
505 if effects:
507 return '\n'.join([render_effects(line, effects)
506 return '\n'.join([render_effects(line, effects)
508 for line in msg.split('\n')])
507 for line in msg.split('\n')])
509 return msg
508 return msg
510
509
511 def uisetup(ui):
510 def uisetup(ui):
512 if ui.plain():
511 if ui.plain():
513 return
512 return
514 if not isinstance(ui, colorui):
513 if not isinstance(ui, colorui):
515 colorui.__bases__ = (ui.__class__,)
514 colorui.__bases__ = (ui.__class__,)
516 ui.__class__ = colorui
515 ui.__class__ = colorui
517 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
516 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
518 mode = _modesetup(ui_, opts['color'])
517 mode = _modesetup(ui_, opts['color'])
519 colorui._colormode = mode
518 colorui._colormode = mode
520 if mode and mode != 'debug':
519 if mode and mode != 'debug':
521 extstyles()
520 extstyles()
522 configstyles(ui_)
521 configstyles(ui_)
523 return orig(ui_, opts, cmd, cmdfunc)
522 return orig(ui_, opts, cmd, cmdfunc)
524 def colorgit(orig, gitsub, commands, env=None, stream=False, cwd=None):
523 def colorgit(orig, gitsub, commands, env=None, stream=False, cwd=None):
525 if gitsub.ui._colormode and len(commands) and commands[0] == "diff":
524 if gitsub.ui._colormode and len(commands) and commands[0] == "diff":
526 # insert the argument in the front,
525 # insert the argument in the front,
527 # the end of git diff arguments is used for paths
526 # the end of git diff arguments is used for paths
528 commands.insert(1, '--color')
527 commands.insert(1, '--color')
529 return orig(gitsub, commands, env, stream, cwd)
528 return orig(gitsub, commands, env, stream, cwd)
530 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
529 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
531 extensions.wrapfunction(subrepo.gitsubrepo, '_gitnodir', colorgit)
530 extensions.wrapfunction(subrepo.gitsubrepo, '_gitnodir', colorgit)
532
531
533 def extsetup(ui):
532 def extsetup(ui):
534 commands.globalopts.append(
533 commands.globalopts.append(
535 ('', 'color', 'auto',
534 ('', 'color', 'auto',
536 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
535 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
537 # and should not be translated
536 # and should not be translated
538 _("when to colorize (boolean, always, auto, never, or debug)"),
537 _("when to colorize (boolean, always, auto, never, or debug)"),
539 _('TYPE')))
538 _('TYPE')))
540
539
541 @command('debugcolor',
540 @command('debugcolor',
542 [('', 'style', None, _('show all configured styles'))],
541 [('', 'style', None, _('show all configured styles'))],
543 'hg debugcolor')
542 'hg debugcolor')
544 def debugcolor(ui, repo, **opts):
543 def debugcolor(ui, repo, **opts):
545 """show available color, effects or style"""
544 """show available color, effects or style"""
546 ui.write(('color mode: %s\n') % ui._colormode)
545 ui.write(('color mode: %s\n') % ui._colormode)
547 if opts.get('style'):
546 if opts.get('style'):
548 return _debugdisplaystyle(ui)
547 return _debugdisplaystyle(ui)
549 else:
548 else:
550 return _debugdisplaycolor(ui)
549 return _debugdisplaycolor(ui)
551
550
552 def _debugdisplaycolor(ui):
551 def _debugdisplaycolor(ui):
553 global _styles
552 global _styles
554 oldstyle = _styles
553 oldstyle = _styles
555 try:
554 try:
556 _styles = {}
555 _styles = {}
557 for effect in _effects.keys():
556 for effect in _effects.keys():
558 _styles[effect] = effect
557 _styles[effect] = effect
559 if _terminfo_params:
558 if _terminfo_params:
560 for k, v in ui.configitems('color'):
559 for k, v in ui.configitems('color'):
561 if k.startswith('color.'):
560 if k.startswith('color.'):
562 _styles[k] = k[6:]
561 _styles[k] = k[6:]
563 elif k.startswith('terminfo.'):
562 elif k.startswith('terminfo.'):
564 _styles[k] = k[9:]
563 _styles[k] = k[9:]
565 ui.write(_('available colors:\n'))
564 ui.write(_('available colors:\n'))
566 # sort label with a '_' after the other to group '_background' entry.
565 # sort label with a '_' after the other to group '_background' entry.
567 items = sorted(_styles.items(),
566 items = sorted(_styles.items(),
568 key=lambda i: ('_' in i[0], i[0], i[1]))
567 key=lambda i: ('_' in i[0], i[0], i[1]))
569 for colorname, label in items:
568 for colorname, label in items:
570 ui.write(('%s\n') % colorname, label=label)
569 ui.write(('%s\n') % colorname, label=label)
571 finally:
570 finally:
572 _styles = oldstyle
571 _styles = oldstyle
573
572
574 def _debugdisplaystyle(ui):
573 def _debugdisplaystyle(ui):
575 ui.write(_('available style:\n'))
574 ui.write(_('available style:\n'))
576 width = max(len(s) for s in _styles)
575 width = max(len(s) for s in _styles)
577 for label, effects in sorted(_styles.items()):
576 for label, effects in sorted(_styles.items()):
578 ui.write('%s' % label, label=label)
577 ui.write('%s' % label, label=label)
579 if effects:
578 if effects:
580 # 50
579 # 50
581 ui.write(': ')
580 ui.write(': ')
582 ui.write(' ' * (max(0, width - len(label))))
581 ui.write(' ' * (max(0, width - len(label))))
583 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
582 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
584 ui.write('\n')
583 ui.write('\n')
585
584
586 if os.name != 'nt':
585 if pycompat.osname != 'nt':
587 w32effects = None
586 w32effects = None
588 else:
587 else:
589 import ctypes
588 import ctypes
590 import re
589 import re
591
590
592 _kernel32 = ctypes.windll.kernel32
591 _kernel32 = ctypes.windll.kernel32
593
592
594 _WORD = ctypes.c_ushort
593 _WORD = ctypes.c_ushort
595
594
596 _INVALID_HANDLE_VALUE = -1
595 _INVALID_HANDLE_VALUE = -1
597
596
598 class _COORD(ctypes.Structure):
597 class _COORD(ctypes.Structure):
599 _fields_ = [('X', ctypes.c_short),
598 _fields_ = [('X', ctypes.c_short),
600 ('Y', ctypes.c_short)]
599 ('Y', ctypes.c_short)]
601
600
602 class _SMALL_RECT(ctypes.Structure):
601 class _SMALL_RECT(ctypes.Structure):
603 _fields_ = [('Left', ctypes.c_short),
602 _fields_ = [('Left', ctypes.c_short),
604 ('Top', ctypes.c_short),
603 ('Top', ctypes.c_short),
605 ('Right', ctypes.c_short),
604 ('Right', ctypes.c_short),
606 ('Bottom', ctypes.c_short)]
605 ('Bottom', ctypes.c_short)]
607
606
608 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
607 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
609 _fields_ = [('dwSize', _COORD),
608 _fields_ = [('dwSize', _COORD),
610 ('dwCursorPosition', _COORD),
609 ('dwCursorPosition', _COORD),
611 ('wAttributes', _WORD),
610 ('wAttributes', _WORD),
612 ('srWindow', _SMALL_RECT),
611 ('srWindow', _SMALL_RECT),
613 ('dwMaximumWindowSize', _COORD)]
612 ('dwMaximumWindowSize', _COORD)]
614
613
615 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
614 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
616 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
615 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
617
616
618 _FOREGROUND_BLUE = 0x0001
617 _FOREGROUND_BLUE = 0x0001
619 _FOREGROUND_GREEN = 0x0002
618 _FOREGROUND_GREEN = 0x0002
620 _FOREGROUND_RED = 0x0004
619 _FOREGROUND_RED = 0x0004
621 _FOREGROUND_INTENSITY = 0x0008
620 _FOREGROUND_INTENSITY = 0x0008
622
621
623 _BACKGROUND_BLUE = 0x0010
622 _BACKGROUND_BLUE = 0x0010
624 _BACKGROUND_GREEN = 0x0020
623 _BACKGROUND_GREEN = 0x0020
625 _BACKGROUND_RED = 0x0040
624 _BACKGROUND_RED = 0x0040
626 _BACKGROUND_INTENSITY = 0x0080
625 _BACKGROUND_INTENSITY = 0x0080
627
626
628 _COMMON_LVB_REVERSE_VIDEO = 0x4000
627 _COMMON_LVB_REVERSE_VIDEO = 0x4000
629 _COMMON_LVB_UNDERSCORE = 0x8000
628 _COMMON_LVB_UNDERSCORE = 0x8000
630
629
631 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
630 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
632 w32effects = {
631 w32effects = {
633 'none': -1,
632 'none': -1,
634 'black': 0,
633 'black': 0,
635 'red': _FOREGROUND_RED,
634 'red': _FOREGROUND_RED,
636 'green': _FOREGROUND_GREEN,
635 'green': _FOREGROUND_GREEN,
637 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
636 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
638 'blue': _FOREGROUND_BLUE,
637 'blue': _FOREGROUND_BLUE,
639 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
638 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
640 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
639 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
641 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
640 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
642 'bold': _FOREGROUND_INTENSITY,
641 'bold': _FOREGROUND_INTENSITY,
643 'black_background': 0x100, # unused value > 0x0f
642 'black_background': 0x100, # unused value > 0x0f
644 'red_background': _BACKGROUND_RED,
643 'red_background': _BACKGROUND_RED,
645 'green_background': _BACKGROUND_GREEN,
644 'green_background': _BACKGROUND_GREEN,
646 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
645 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
647 'blue_background': _BACKGROUND_BLUE,
646 'blue_background': _BACKGROUND_BLUE,
648 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
647 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
649 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
648 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
650 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
649 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
651 _BACKGROUND_BLUE),
650 _BACKGROUND_BLUE),
652 'bold_background': _BACKGROUND_INTENSITY,
651 'bold_background': _BACKGROUND_INTENSITY,
653 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
652 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
654 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
653 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
655 }
654 }
656
655
657 passthrough = set([_FOREGROUND_INTENSITY,
656 passthrough = set([_FOREGROUND_INTENSITY,
658 _BACKGROUND_INTENSITY,
657 _BACKGROUND_INTENSITY,
659 _COMMON_LVB_UNDERSCORE,
658 _COMMON_LVB_UNDERSCORE,
660 _COMMON_LVB_REVERSE_VIDEO])
659 _COMMON_LVB_REVERSE_VIDEO])
661
660
662 stdout = _kernel32.GetStdHandle(
661 stdout = _kernel32.GetStdHandle(
663 _STD_OUTPUT_HANDLE) # don't close the handle returned
662 _STD_OUTPUT_HANDLE) # don't close the handle returned
664 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
663 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
665 w32effects = None
664 w32effects = None
666 else:
665 else:
667 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
666 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
668 if not _kernel32.GetConsoleScreenBufferInfo(
667 if not _kernel32.GetConsoleScreenBufferInfo(
669 stdout, ctypes.byref(csbi)):
668 stdout, ctypes.byref(csbi)):
670 # stdout may not support GetConsoleScreenBufferInfo()
669 # stdout may not support GetConsoleScreenBufferInfo()
671 # when called from subprocess or redirected
670 # when called from subprocess or redirected
672 w32effects = None
671 w32effects = None
673 else:
672 else:
674 origattr = csbi.wAttributes
673 origattr = csbi.wAttributes
675 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
674 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
676 re.MULTILINE | re.DOTALL)
675 re.MULTILINE | re.DOTALL)
677
676
678 def win32print(text, orig, **opts):
677 def win32print(text, orig, **opts):
679 label = opts.get('label', '')
678 label = opts.get('label', '')
680 attr = origattr
679 attr = origattr
681
680
682 def mapcolor(val, attr):
681 def mapcolor(val, attr):
683 if val == -1:
682 if val == -1:
684 return origattr
683 return origattr
685 elif val in passthrough:
684 elif val in passthrough:
686 return attr | val
685 return attr | val
687 elif val > 0x0f:
686 elif val > 0x0f:
688 return (val & 0x70) | (attr & 0x8f)
687 return (val & 0x70) | (attr & 0x8f)
689 else:
688 else:
690 return (val & 0x07) | (attr & 0xf8)
689 return (val & 0x07) | (attr & 0xf8)
691
690
692 # determine console attributes based on labels
691 # determine console attributes based on labels
693 for l in label.split():
692 for l in label.split():
694 style = _styles.get(l, '')
693 style = _styles.get(l, '')
695 for effect in style.split():
694 for effect in style.split():
696 try:
695 try:
697 attr = mapcolor(w32effects[effect], attr)
696 attr = mapcolor(w32effects[effect], attr)
698 except KeyError:
697 except KeyError:
699 # w32effects could not have certain attributes so we skip
698 # w32effects could not have certain attributes so we skip
700 # them if not found
699 # them if not found
701 pass
700 pass
702 # hack to ensure regexp finds data
701 # hack to ensure regexp finds data
703 if not text.startswith('\033['):
702 if not text.startswith('\033['):
704 text = '\033[m' + text
703 text = '\033[m' + text
705
704
706 # Look for ANSI-like codes embedded in text
705 # Look for ANSI-like codes embedded in text
707 m = re.match(ansire, text)
706 m = re.match(ansire, text)
708
707
709 try:
708 try:
710 while m:
709 while m:
711 for sattr in m.group(1).split(';'):
710 for sattr in m.group(1).split(';'):
712 if sattr:
711 if sattr:
713 attr = mapcolor(int(sattr), attr)
712 attr = mapcolor(int(sattr), attr)
714 _kernel32.SetConsoleTextAttribute(stdout, attr)
713 _kernel32.SetConsoleTextAttribute(stdout, attr)
715 orig(m.group(2), **opts)
714 orig(m.group(2), **opts)
716 m = re.match(ansire, m.group(3))
715 m = re.match(ansire, m.group(3))
717 finally:
716 finally:
718 # Explicitly reset original attributes
717 # Explicitly reset original attributes
719 _kernel32.SetConsoleTextAttribute(stdout, origattr)
718 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,1353 +1,1353 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5
5
6 import os
6 import os
7 import re
7 import re
8 import tempfile
8 import tempfile
9 import xml.dom.minidom
9 import xml.dom.minidom
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import (
12 from mercurial import (
13 encoding,
13 encoding,
14 error,
14 error,
15 pycompat,
15 pycompat,
16 scmutil,
16 scmutil,
17 util,
17 util,
18 )
18 )
19
19
20 from . import common
20 from . import common
21
21
22 pickle = util.pickle
22 pickle = util.pickle
23 stringio = util.stringio
23 stringio = util.stringio
24 propertycache = util.propertycache
24 propertycache = util.propertycache
25 urlerr = util.urlerr
25 urlerr = util.urlerr
26 urlreq = util.urlreq
26 urlreq = util.urlreq
27
27
28 commandline = common.commandline
28 commandline = common.commandline
29 commit = common.commit
29 commit = common.commit
30 converter_sink = common.converter_sink
30 converter_sink = common.converter_sink
31 converter_source = common.converter_source
31 converter_source = common.converter_source
32 decodeargs = common.decodeargs
32 decodeargs = common.decodeargs
33 encodeargs = common.encodeargs
33 encodeargs = common.encodeargs
34 makedatetimestamp = common.makedatetimestamp
34 makedatetimestamp = common.makedatetimestamp
35 mapfile = common.mapfile
35 mapfile = common.mapfile
36 MissingTool = common.MissingTool
36 MissingTool = common.MissingTool
37 NoRepo = common.NoRepo
37 NoRepo = common.NoRepo
38
38
39 # Subversion stuff. Works best with very recent Python SVN bindings
39 # Subversion stuff. Works best with very recent Python SVN bindings
40 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
40 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
41 # these bindings.
41 # these bindings.
42
42
43 try:
43 try:
44 import svn
44 import svn
45 import svn.client
45 import svn.client
46 import svn.core
46 import svn.core
47 import svn.ra
47 import svn.ra
48 import svn.delta
48 import svn.delta
49 from . import transport
49 from . import transport
50 import warnings
50 import warnings
51 warnings.filterwarnings('ignore',
51 warnings.filterwarnings('ignore',
52 module='svn.core',
52 module='svn.core',
53 category=DeprecationWarning)
53 category=DeprecationWarning)
54 svn.core.SubversionException # trigger import to catch error
54 svn.core.SubversionException # trigger import to catch error
55
55
56 except ImportError:
56 except ImportError:
57 svn = None
57 svn = None
58
58
59 class SvnPathNotFound(Exception):
59 class SvnPathNotFound(Exception):
60 pass
60 pass
61
61
62 def revsplit(rev):
62 def revsplit(rev):
63 """Parse a revision string and return (uuid, path, revnum).
63 """Parse a revision string and return (uuid, path, revnum).
64 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
64 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
65 ... '/proj%20B/mytrunk/mytrunk@1')
65 ... '/proj%20B/mytrunk/mytrunk@1')
66 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
66 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
67 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
67 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
68 ('', '', 1)
68 ('', '', 1)
69 >>> revsplit('@7')
69 >>> revsplit('@7')
70 ('', '', 7)
70 ('', '', 7)
71 >>> revsplit('7')
71 >>> revsplit('7')
72 ('', '', 0)
72 ('', '', 0)
73 >>> revsplit('bad')
73 >>> revsplit('bad')
74 ('', '', 0)
74 ('', '', 0)
75 """
75 """
76 parts = rev.rsplit('@', 1)
76 parts = rev.rsplit('@', 1)
77 revnum = 0
77 revnum = 0
78 if len(parts) > 1:
78 if len(parts) > 1:
79 revnum = int(parts[1])
79 revnum = int(parts[1])
80 parts = parts[0].split('/', 1)
80 parts = parts[0].split('/', 1)
81 uuid = ''
81 uuid = ''
82 mod = ''
82 mod = ''
83 if len(parts) > 1 and parts[0].startswith('svn:'):
83 if len(parts) > 1 and parts[0].startswith('svn:'):
84 uuid = parts[0][4:]
84 uuid = parts[0][4:]
85 mod = '/' + parts[1]
85 mod = '/' + parts[1]
86 return uuid, mod, revnum
86 return uuid, mod, revnum
87
87
88 def quote(s):
88 def quote(s):
89 # As of svn 1.7, many svn calls expect "canonical" paths. In
89 # As of svn 1.7, many svn calls expect "canonical" paths. In
90 # theory, we should call svn.core.*canonicalize() on all paths
90 # theory, we should call svn.core.*canonicalize() on all paths
91 # before passing them to the API. Instead, we assume the base url
91 # before passing them to the API. Instead, we assume the base url
92 # is canonical and copy the behaviour of svn URL encoding function
92 # is canonical and copy the behaviour of svn URL encoding function
93 # so we can extend it safely with new components. The "safe"
93 # so we can extend it safely with new components. The "safe"
94 # characters were taken from the "svn_uri__char_validity" table in
94 # characters were taken from the "svn_uri__char_validity" table in
95 # libsvn_subr/path.c.
95 # libsvn_subr/path.c.
96 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
96 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
97
97
98 def geturl(path):
98 def geturl(path):
99 try:
99 try:
100 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
100 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
101 except svn.core.SubversionException:
101 except svn.core.SubversionException:
102 # svn.client.url_from_path() fails with local repositories
102 # svn.client.url_from_path() fails with local repositories
103 pass
103 pass
104 if os.path.isdir(path):
104 if os.path.isdir(path):
105 path = os.path.normpath(os.path.abspath(path))
105 path = os.path.normpath(os.path.abspath(path))
106 if os.name == 'nt':
106 if pycompat.osname == 'nt':
107 path = '/' + util.normpath(path)
107 path = '/' + util.normpath(path)
108 # Module URL is later compared with the repository URL returned
108 # Module URL is later compared with the repository URL returned
109 # by svn API, which is UTF-8.
109 # by svn API, which is UTF-8.
110 path = encoding.tolocal(path)
110 path = encoding.tolocal(path)
111 path = 'file://%s' % quote(path)
111 path = 'file://%s' % quote(path)
112 return svn.core.svn_path_canonicalize(path)
112 return svn.core.svn_path_canonicalize(path)
113
113
114 def optrev(number):
114 def optrev(number):
115 optrev = svn.core.svn_opt_revision_t()
115 optrev = svn.core.svn_opt_revision_t()
116 optrev.kind = svn.core.svn_opt_revision_number
116 optrev.kind = svn.core.svn_opt_revision_number
117 optrev.value.number = number
117 optrev.value.number = number
118 return optrev
118 return optrev
119
119
120 class changedpath(object):
120 class changedpath(object):
121 def __init__(self, p):
121 def __init__(self, p):
122 self.copyfrom_path = p.copyfrom_path
122 self.copyfrom_path = p.copyfrom_path
123 self.copyfrom_rev = p.copyfrom_rev
123 self.copyfrom_rev = p.copyfrom_rev
124 self.action = p.action
124 self.action = p.action
125
125
126 def get_log_child(fp, url, paths, start, end, limit=0,
126 def get_log_child(fp, url, paths, start, end, limit=0,
127 discover_changed_paths=True, strict_node_history=False):
127 discover_changed_paths=True, strict_node_history=False):
128 protocol = -1
128 protocol = -1
129 def receiver(orig_paths, revnum, author, date, message, pool):
129 def receiver(orig_paths, revnum, author, date, message, pool):
130 paths = {}
130 paths = {}
131 if orig_paths is not None:
131 if orig_paths is not None:
132 for k, v in orig_paths.iteritems():
132 for k, v in orig_paths.iteritems():
133 paths[k] = changedpath(v)
133 paths[k] = changedpath(v)
134 pickle.dump((paths, revnum, author, date, message),
134 pickle.dump((paths, revnum, author, date, message),
135 fp, protocol)
135 fp, protocol)
136
136
137 try:
137 try:
138 # Use an ra of our own so that our parent can consume
138 # Use an ra of our own so that our parent can consume
139 # our results without confusing the server.
139 # our results without confusing the server.
140 t = transport.SvnRaTransport(url=url)
140 t = transport.SvnRaTransport(url=url)
141 svn.ra.get_log(t.ra, paths, start, end, limit,
141 svn.ra.get_log(t.ra, paths, start, end, limit,
142 discover_changed_paths,
142 discover_changed_paths,
143 strict_node_history,
143 strict_node_history,
144 receiver)
144 receiver)
145 except IOError:
145 except IOError:
146 # Caller may interrupt the iteration
146 # Caller may interrupt the iteration
147 pickle.dump(None, fp, protocol)
147 pickle.dump(None, fp, protocol)
148 except Exception as inst:
148 except Exception as inst:
149 pickle.dump(str(inst), fp, protocol)
149 pickle.dump(str(inst), fp, protocol)
150 else:
150 else:
151 pickle.dump(None, fp, protocol)
151 pickle.dump(None, fp, protocol)
152 fp.close()
152 fp.close()
153 # With large history, cleanup process goes crazy and suddenly
153 # With large history, cleanup process goes crazy and suddenly
154 # consumes *huge* amount of memory. The output file being closed,
154 # consumes *huge* amount of memory. The output file being closed,
155 # there is no need for clean termination.
155 # there is no need for clean termination.
156 os._exit(0)
156 os._exit(0)
157
157
158 def debugsvnlog(ui, **opts):
158 def debugsvnlog(ui, **opts):
159 """Fetch SVN log in a subprocess and channel them back to parent to
159 """Fetch SVN log in a subprocess and channel them back to parent to
160 avoid memory collection issues.
160 avoid memory collection issues.
161 """
161 """
162 if svn is None:
162 if svn is None:
163 raise error.Abort(_('debugsvnlog could not load Subversion python '
163 raise error.Abort(_('debugsvnlog could not load Subversion python '
164 'bindings'))
164 'bindings'))
165
165
166 args = decodeargs(ui.fin.read())
166 args = decodeargs(ui.fin.read())
167 get_log_child(ui.fout, *args)
167 get_log_child(ui.fout, *args)
168
168
169 class logstream(object):
169 class logstream(object):
170 """Interruptible revision log iterator."""
170 """Interruptible revision log iterator."""
171 def __init__(self, stdout):
171 def __init__(self, stdout):
172 self._stdout = stdout
172 self._stdout = stdout
173
173
174 def __iter__(self):
174 def __iter__(self):
175 while True:
175 while True:
176 try:
176 try:
177 entry = pickle.load(self._stdout)
177 entry = pickle.load(self._stdout)
178 except EOFError:
178 except EOFError:
179 raise error.Abort(_('Mercurial failed to run itself, check'
179 raise error.Abort(_('Mercurial failed to run itself, check'
180 ' hg executable is in PATH'))
180 ' hg executable is in PATH'))
181 try:
181 try:
182 orig_paths, revnum, author, date, message = entry
182 orig_paths, revnum, author, date, message = entry
183 except (TypeError, ValueError):
183 except (TypeError, ValueError):
184 if entry is None:
184 if entry is None:
185 break
185 break
186 raise error.Abort(_("log stream exception '%s'") % entry)
186 raise error.Abort(_("log stream exception '%s'") % entry)
187 yield entry
187 yield entry
188
188
189 def close(self):
189 def close(self):
190 if self._stdout:
190 if self._stdout:
191 self._stdout.close()
191 self._stdout.close()
192 self._stdout = None
192 self._stdout = None
193
193
194 class directlogstream(list):
194 class directlogstream(list):
195 """Direct revision log iterator.
195 """Direct revision log iterator.
196 This can be used for debugging and development but it will probably leak
196 This can be used for debugging and development but it will probably leak
197 memory and is not suitable for real conversions."""
197 memory and is not suitable for real conversions."""
198 def __init__(self, url, paths, start, end, limit=0,
198 def __init__(self, url, paths, start, end, limit=0,
199 discover_changed_paths=True, strict_node_history=False):
199 discover_changed_paths=True, strict_node_history=False):
200
200
201 def receiver(orig_paths, revnum, author, date, message, pool):
201 def receiver(orig_paths, revnum, author, date, message, pool):
202 paths = {}
202 paths = {}
203 if orig_paths is not None:
203 if orig_paths is not None:
204 for k, v in orig_paths.iteritems():
204 for k, v in orig_paths.iteritems():
205 paths[k] = changedpath(v)
205 paths[k] = changedpath(v)
206 self.append((paths, revnum, author, date, message))
206 self.append((paths, revnum, author, date, message))
207
207
208 # Use an ra of our own so that our parent can consume
208 # Use an ra of our own so that our parent can consume
209 # our results without confusing the server.
209 # our results without confusing the server.
210 t = transport.SvnRaTransport(url=url)
210 t = transport.SvnRaTransport(url=url)
211 svn.ra.get_log(t.ra, paths, start, end, limit,
211 svn.ra.get_log(t.ra, paths, start, end, limit,
212 discover_changed_paths,
212 discover_changed_paths,
213 strict_node_history,
213 strict_node_history,
214 receiver)
214 receiver)
215
215
216 def close(self):
216 def close(self):
217 pass
217 pass
218
218
219 # Check to see if the given path is a local Subversion repo. Verify this by
219 # Check to see if the given path is a local Subversion repo. Verify this by
220 # looking for several svn-specific files and directories in the given
220 # looking for several svn-specific files and directories in the given
221 # directory.
221 # directory.
222 def filecheck(ui, path, proto):
222 def filecheck(ui, path, proto):
223 for x in ('locks', 'hooks', 'format', 'db'):
223 for x in ('locks', 'hooks', 'format', 'db'):
224 if not os.path.exists(os.path.join(path, x)):
224 if not os.path.exists(os.path.join(path, x)):
225 return False
225 return False
226 return True
226 return True
227
227
228 # Check to see if a given path is the root of an svn repo over http. We verify
228 # Check to see if a given path is the root of an svn repo over http. We verify
229 # this by requesting a version-controlled URL we know can't exist and looking
229 # this by requesting a version-controlled URL we know can't exist and looking
230 # for the svn-specific "not found" XML.
230 # for the svn-specific "not found" XML.
231 def httpcheck(ui, path, proto):
231 def httpcheck(ui, path, proto):
232 try:
232 try:
233 opener = urlreq.buildopener()
233 opener = urlreq.buildopener()
234 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
234 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
235 data = rsp.read()
235 data = rsp.read()
236 except urlerr.httperror as inst:
236 except urlerr.httperror as inst:
237 if inst.code != 404:
237 if inst.code != 404:
238 # Except for 404 we cannot know for sure this is not an svn repo
238 # Except for 404 we cannot know for sure this is not an svn repo
239 ui.warn(_('svn: cannot probe remote repository, assume it could '
239 ui.warn(_('svn: cannot probe remote repository, assume it could '
240 'be a subversion repository. Use --source-type if you '
240 'be a subversion repository. Use --source-type if you '
241 'know better.\n'))
241 'know better.\n'))
242 return True
242 return True
243 data = inst.fp.read()
243 data = inst.fp.read()
244 except Exception:
244 except Exception:
245 # Could be urlerr.urlerror if the URL is invalid or anything else.
245 # Could be urlerr.urlerror if the URL is invalid or anything else.
246 return False
246 return False
247 return '<m:human-readable errcode="160013">' in data
247 return '<m:human-readable errcode="160013">' in data
248
248
249 protomap = {'http': httpcheck,
249 protomap = {'http': httpcheck,
250 'https': httpcheck,
250 'https': httpcheck,
251 'file': filecheck,
251 'file': filecheck,
252 }
252 }
253 def issvnurl(ui, url):
253 def issvnurl(ui, url):
254 try:
254 try:
255 proto, path = url.split('://', 1)
255 proto, path = url.split('://', 1)
256 if proto == 'file':
256 if proto == 'file':
257 if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha()
257 if (pycompat.osname == 'nt' and path[:1] == '/'
258 and path[2:6].lower() == '%3a/'):
258 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
259 path = path[:2] + ':/' + path[6:]
259 path = path[:2] + ':/' + path[6:]
260 path = urlreq.url2pathname(path)
260 path = urlreq.url2pathname(path)
261 except ValueError:
261 except ValueError:
262 proto = 'file'
262 proto = 'file'
263 path = os.path.abspath(url)
263 path = os.path.abspath(url)
264 if proto == 'file':
264 if proto == 'file':
265 path = util.pconvert(path)
265 path = util.pconvert(path)
266 check = protomap.get(proto, lambda *args: False)
266 check = protomap.get(proto, lambda *args: False)
267 while '/' in path:
267 while '/' in path:
268 if check(ui, path, proto):
268 if check(ui, path, proto):
269 return True
269 return True
270 path = path.rsplit('/', 1)[0]
270 path = path.rsplit('/', 1)[0]
271 return False
271 return False
272
272
273 # SVN conversion code stolen from bzr-svn and tailor
273 # SVN conversion code stolen from bzr-svn and tailor
274 #
274 #
275 # Subversion looks like a versioned filesystem, branches structures
275 # Subversion looks like a versioned filesystem, branches structures
276 # are defined by conventions and not enforced by the tool. First,
276 # are defined by conventions and not enforced by the tool. First,
277 # we define the potential branches (modules) as "trunk" and "branches"
277 # we define the potential branches (modules) as "trunk" and "branches"
278 # children directories. Revisions are then identified by their
278 # children directories. Revisions are then identified by their
279 # module and revision number (and a repository identifier).
279 # module and revision number (and a repository identifier).
280 #
280 #
281 # The revision graph is really a tree (or a forest). By default, a
281 # The revision graph is really a tree (or a forest). By default, a
282 # revision parent is the previous revision in the same module. If the
282 # revision parent is the previous revision in the same module. If the
283 # module directory is copied/moved from another module then the
283 # module directory is copied/moved from another module then the
284 # revision is the module root and its parent the source revision in
284 # revision is the module root and its parent the source revision in
285 # the parent module. A revision has at most one parent.
285 # the parent module. A revision has at most one parent.
286 #
286 #
287 class svn_source(converter_source):
287 class svn_source(converter_source):
288 def __init__(self, ui, url, revs=None):
288 def __init__(self, ui, url, revs=None):
289 super(svn_source, self).__init__(ui, url, revs=revs)
289 super(svn_source, self).__init__(ui, url, revs=revs)
290
290
291 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
291 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
292 (os.path.exists(url) and
292 (os.path.exists(url) and
293 os.path.exists(os.path.join(url, '.svn'))) or
293 os.path.exists(os.path.join(url, '.svn'))) or
294 issvnurl(ui, url)):
294 issvnurl(ui, url)):
295 raise NoRepo(_("%s does not look like a Subversion repository")
295 raise NoRepo(_("%s does not look like a Subversion repository")
296 % url)
296 % url)
297 if svn is None:
297 if svn is None:
298 raise MissingTool(_('could not load Subversion python bindings'))
298 raise MissingTool(_('could not load Subversion python bindings'))
299
299
300 try:
300 try:
301 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
301 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
302 if version < (1, 4):
302 if version < (1, 4):
303 raise MissingTool(_('Subversion python bindings %d.%d found, '
303 raise MissingTool(_('Subversion python bindings %d.%d found, '
304 '1.4 or later required') % version)
304 '1.4 or later required') % version)
305 except AttributeError:
305 except AttributeError:
306 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
306 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
307 'or later required'))
307 'or later required'))
308
308
309 self.lastrevs = {}
309 self.lastrevs = {}
310
310
311 latest = None
311 latest = None
312 try:
312 try:
313 # Support file://path@rev syntax. Useful e.g. to convert
313 # Support file://path@rev syntax. Useful e.g. to convert
314 # deleted branches.
314 # deleted branches.
315 at = url.rfind('@')
315 at = url.rfind('@')
316 if at >= 0:
316 if at >= 0:
317 latest = int(url[at + 1:])
317 latest = int(url[at + 1:])
318 url = url[:at]
318 url = url[:at]
319 except ValueError:
319 except ValueError:
320 pass
320 pass
321 self.url = geturl(url)
321 self.url = geturl(url)
322 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
322 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
323 try:
323 try:
324 self.transport = transport.SvnRaTransport(url=self.url)
324 self.transport = transport.SvnRaTransport(url=self.url)
325 self.ra = self.transport.ra
325 self.ra = self.transport.ra
326 self.ctx = self.transport.client
326 self.ctx = self.transport.client
327 self.baseurl = svn.ra.get_repos_root(self.ra)
327 self.baseurl = svn.ra.get_repos_root(self.ra)
328 # Module is either empty or a repository path starting with
328 # Module is either empty or a repository path starting with
329 # a slash and not ending with a slash.
329 # a slash and not ending with a slash.
330 self.module = urlreq.unquote(self.url[len(self.baseurl):])
330 self.module = urlreq.unquote(self.url[len(self.baseurl):])
331 self.prevmodule = None
331 self.prevmodule = None
332 self.rootmodule = self.module
332 self.rootmodule = self.module
333 self.commits = {}
333 self.commits = {}
334 self.paths = {}
334 self.paths = {}
335 self.uuid = svn.ra.get_uuid(self.ra)
335 self.uuid = svn.ra.get_uuid(self.ra)
336 except svn.core.SubversionException:
336 except svn.core.SubversionException:
337 ui.traceback()
337 ui.traceback()
338 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
338 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
339 svn.core.SVN_VER_MINOR,
339 svn.core.SVN_VER_MINOR,
340 svn.core.SVN_VER_MICRO)
340 svn.core.SVN_VER_MICRO)
341 raise NoRepo(_("%s does not look like a Subversion repository "
341 raise NoRepo(_("%s does not look like a Subversion repository "
342 "to libsvn version %s")
342 "to libsvn version %s")
343 % (self.url, svnversion))
343 % (self.url, svnversion))
344
344
345 if revs:
345 if revs:
346 if len(revs) > 1:
346 if len(revs) > 1:
347 raise error.Abort(_('subversion source does not support '
347 raise error.Abort(_('subversion source does not support '
348 'specifying multiple revisions'))
348 'specifying multiple revisions'))
349 try:
349 try:
350 latest = int(revs[0])
350 latest = int(revs[0])
351 except ValueError:
351 except ValueError:
352 raise error.Abort(_('svn: revision %s is not an integer') %
352 raise error.Abort(_('svn: revision %s is not an integer') %
353 revs[0])
353 revs[0])
354
354
355 self.trunkname = self.ui.config('convert', 'svn.trunk',
355 self.trunkname = self.ui.config('convert', 'svn.trunk',
356 'trunk').strip('/')
356 'trunk').strip('/')
357 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
357 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
358 try:
358 try:
359 self.startrev = int(self.startrev)
359 self.startrev = int(self.startrev)
360 if self.startrev < 0:
360 if self.startrev < 0:
361 self.startrev = 0
361 self.startrev = 0
362 except ValueError:
362 except ValueError:
363 raise error.Abort(_('svn: start revision %s is not an integer')
363 raise error.Abort(_('svn: start revision %s is not an integer')
364 % self.startrev)
364 % self.startrev)
365
365
366 try:
366 try:
367 self.head = self.latest(self.module, latest)
367 self.head = self.latest(self.module, latest)
368 except SvnPathNotFound:
368 except SvnPathNotFound:
369 self.head = None
369 self.head = None
370 if not self.head:
370 if not self.head:
371 raise error.Abort(_('no revision found in module %s')
371 raise error.Abort(_('no revision found in module %s')
372 % self.module)
372 % self.module)
373 self.last_changed = self.revnum(self.head)
373 self.last_changed = self.revnum(self.head)
374
374
375 self._changescache = (None, None)
375 self._changescache = (None, None)
376
376
377 if os.path.exists(os.path.join(url, '.svn/entries')):
377 if os.path.exists(os.path.join(url, '.svn/entries')):
378 self.wc = url
378 self.wc = url
379 else:
379 else:
380 self.wc = None
380 self.wc = None
381 self.convertfp = None
381 self.convertfp = None
382
382
383 def setrevmap(self, revmap):
383 def setrevmap(self, revmap):
384 lastrevs = {}
384 lastrevs = {}
385 for revid in revmap.iterkeys():
385 for revid in revmap.iterkeys():
386 uuid, module, revnum = revsplit(revid)
386 uuid, module, revnum = revsplit(revid)
387 lastrevnum = lastrevs.setdefault(module, revnum)
387 lastrevnum = lastrevs.setdefault(module, revnum)
388 if revnum > lastrevnum:
388 if revnum > lastrevnum:
389 lastrevs[module] = revnum
389 lastrevs[module] = revnum
390 self.lastrevs = lastrevs
390 self.lastrevs = lastrevs
391
391
392 def exists(self, path, optrev):
392 def exists(self, path, optrev):
393 try:
393 try:
394 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
394 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
395 optrev, False, self.ctx)
395 optrev, False, self.ctx)
396 return True
396 return True
397 except svn.core.SubversionException:
397 except svn.core.SubversionException:
398 return False
398 return False
399
399
400 def getheads(self):
400 def getheads(self):
401
401
402 def isdir(path, revnum):
402 def isdir(path, revnum):
403 kind = self._checkpath(path, revnum)
403 kind = self._checkpath(path, revnum)
404 return kind == svn.core.svn_node_dir
404 return kind == svn.core.svn_node_dir
405
405
406 def getcfgpath(name, rev):
406 def getcfgpath(name, rev):
407 cfgpath = self.ui.config('convert', 'svn.' + name)
407 cfgpath = self.ui.config('convert', 'svn.' + name)
408 if cfgpath is not None and cfgpath.strip() == '':
408 if cfgpath is not None and cfgpath.strip() == '':
409 return None
409 return None
410 path = (cfgpath or name).strip('/')
410 path = (cfgpath or name).strip('/')
411 if not self.exists(path, rev):
411 if not self.exists(path, rev):
412 if self.module.endswith(path) and name == 'trunk':
412 if self.module.endswith(path) and name == 'trunk':
413 # we are converting from inside this directory
413 # we are converting from inside this directory
414 return None
414 return None
415 if cfgpath:
415 if cfgpath:
416 raise error.Abort(_('expected %s to be at %r, but not found'
416 raise error.Abort(_('expected %s to be at %r, but not found'
417 ) % (name, path))
417 ) % (name, path))
418 return None
418 return None
419 self.ui.note(_('found %s at %r\n') % (name, path))
419 self.ui.note(_('found %s at %r\n') % (name, path))
420 return path
420 return path
421
421
422 rev = optrev(self.last_changed)
422 rev = optrev(self.last_changed)
423 oldmodule = ''
423 oldmodule = ''
424 trunk = getcfgpath('trunk', rev)
424 trunk = getcfgpath('trunk', rev)
425 self.tags = getcfgpath('tags', rev)
425 self.tags = getcfgpath('tags', rev)
426 branches = getcfgpath('branches', rev)
426 branches = getcfgpath('branches', rev)
427
427
428 # If the project has a trunk or branches, we will extract heads
428 # If the project has a trunk or branches, we will extract heads
429 # from them. We keep the project root otherwise.
429 # from them. We keep the project root otherwise.
430 if trunk:
430 if trunk:
431 oldmodule = self.module or ''
431 oldmodule = self.module or ''
432 self.module += '/' + trunk
432 self.module += '/' + trunk
433 self.head = self.latest(self.module, self.last_changed)
433 self.head = self.latest(self.module, self.last_changed)
434 if not self.head:
434 if not self.head:
435 raise error.Abort(_('no revision found in module %s')
435 raise error.Abort(_('no revision found in module %s')
436 % self.module)
436 % self.module)
437
437
438 # First head in the list is the module's head
438 # First head in the list is the module's head
439 self.heads = [self.head]
439 self.heads = [self.head]
440 if self.tags is not None:
440 if self.tags is not None:
441 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
441 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
442
442
443 # Check if branches bring a few more heads to the list
443 # Check if branches bring a few more heads to the list
444 if branches:
444 if branches:
445 rpath = self.url.strip('/')
445 rpath = self.url.strip('/')
446 branchnames = svn.client.ls(rpath + '/' + quote(branches),
446 branchnames = svn.client.ls(rpath + '/' + quote(branches),
447 rev, False, self.ctx)
447 rev, False, self.ctx)
448 for branch in sorted(branchnames):
448 for branch in sorted(branchnames):
449 module = '%s/%s/%s' % (oldmodule, branches, branch)
449 module = '%s/%s/%s' % (oldmodule, branches, branch)
450 if not isdir(module, self.last_changed):
450 if not isdir(module, self.last_changed):
451 continue
451 continue
452 brevid = self.latest(module, self.last_changed)
452 brevid = self.latest(module, self.last_changed)
453 if not brevid:
453 if not brevid:
454 self.ui.note(_('ignoring empty branch %s\n') % branch)
454 self.ui.note(_('ignoring empty branch %s\n') % branch)
455 continue
455 continue
456 self.ui.note(_('found branch %s at %d\n') %
456 self.ui.note(_('found branch %s at %d\n') %
457 (branch, self.revnum(brevid)))
457 (branch, self.revnum(brevid)))
458 self.heads.append(brevid)
458 self.heads.append(brevid)
459
459
460 if self.startrev and self.heads:
460 if self.startrev and self.heads:
461 if len(self.heads) > 1:
461 if len(self.heads) > 1:
462 raise error.Abort(_('svn: start revision is not supported '
462 raise error.Abort(_('svn: start revision is not supported '
463 'with more than one branch'))
463 'with more than one branch'))
464 revnum = self.revnum(self.heads[0])
464 revnum = self.revnum(self.heads[0])
465 if revnum < self.startrev:
465 if revnum < self.startrev:
466 raise error.Abort(
466 raise error.Abort(
467 _('svn: no revision found after start revision %d')
467 _('svn: no revision found after start revision %d')
468 % self.startrev)
468 % self.startrev)
469
469
470 return self.heads
470 return self.heads
471
471
472 def _getchanges(self, rev, full):
472 def _getchanges(self, rev, full):
473 (paths, parents) = self.paths[rev]
473 (paths, parents) = self.paths[rev]
474 copies = {}
474 copies = {}
475 if parents:
475 if parents:
476 files, self.removed, copies = self.expandpaths(rev, paths, parents)
476 files, self.removed, copies = self.expandpaths(rev, paths, parents)
477 if full or not parents:
477 if full or not parents:
478 # Perform a full checkout on roots
478 # Perform a full checkout on roots
479 uuid, module, revnum = revsplit(rev)
479 uuid, module, revnum = revsplit(rev)
480 entries = svn.client.ls(self.baseurl + quote(module),
480 entries = svn.client.ls(self.baseurl + quote(module),
481 optrev(revnum), True, self.ctx)
481 optrev(revnum), True, self.ctx)
482 files = [n for n, e in entries.iteritems()
482 files = [n for n, e in entries.iteritems()
483 if e.kind == svn.core.svn_node_file]
483 if e.kind == svn.core.svn_node_file]
484 self.removed = set()
484 self.removed = set()
485
485
486 files.sort()
486 files.sort()
487 files = zip(files, [rev] * len(files))
487 files = zip(files, [rev] * len(files))
488 return (files, copies)
488 return (files, copies)
489
489
490 def getchanges(self, rev, full):
490 def getchanges(self, rev, full):
491 # reuse cache from getchangedfiles
491 # reuse cache from getchangedfiles
492 if self._changescache[0] == rev and not full:
492 if self._changescache[0] == rev and not full:
493 (files, copies) = self._changescache[1]
493 (files, copies) = self._changescache[1]
494 else:
494 else:
495 (files, copies) = self._getchanges(rev, full)
495 (files, copies) = self._getchanges(rev, full)
496 # caller caches the result, so free it here to release memory
496 # caller caches the result, so free it here to release memory
497 del self.paths[rev]
497 del self.paths[rev]
498 return (files, copies, set())
498 return (files, copies, set())
499
499
500 def getchangedfiles(self, rev, i):
500 def getchangedfiles(self, rev, i):
501 # called from filemap - cache computed values for reuse in getchanges
501 # called from filemap - cache computed values for reuse in getchanges
502 (files, copies) = self._getchanges(rev, False)
502 (files, copies) = self._getchanges(rev, False)
503 self._changescache = (rev, (files, copies))
503 self._changescache = (rev, (files, copies))
504 return [f[0] for f in files]
504 return [f[0] for f in files]
505
505
506 def getcommit(self, rev):
506 def getcommit(self, rev):
507 if rev not in self.commits:
507 if rev not in self.commits:
508 uuid, module, revnum = revsplit(rev)
508 uuid, module, revnum = revsplit(rev)
509 self.module = module
509 self.module = module
510 self.reparent(module)
510 self.reparent(module)
511 # We assume that:
511 # We assume that:
512 # - requests for revisions after "stop" come from the
512 # - requests for revisions after "stop" come from the
513 # revision graph backward traversal. Cache all of them
513 # revision graph backward traversal. Cache all of them
514 # down to stop, they will be used eventually.
514 # down to stop, they will be used eventually.
515 # - requests for revisions before "stop" come to get
515 # - requests for revisions before "stop" come to get
516 # isolated branches parents. Just fetch what is needed.
516 # isolated branches parents. Just fetch what is needed.
517 stop = self.lastrevs.get(module, 0)
517 stop = self.lastrevs.get(module, 0)
518 if revnum < stop:
518 if revnum < stop:
519 stop = revnum + 1
519 stop = revnum + 1
520 self._fetch_revisions(revnum, stop)
520 self._fetch_revisions(revnum, stop)
521 if rev not in self.commits:
521 if rev not in self.commits:
522 raise error.Abort(_('svn: revision %s not found') % revnum)
522 raise error.Abort(_('svn: revision %s not found') % revnum)
523 revcommit = self.commits[rev]
523 revcommit = self.commits[rev]
524 # caller caches the result, so free it here to release memory
524 # caller caches the result, so free it here to release memory
525 del self.commits[rev]
525 del self.commits[rev]
526 return revcommit
526 return revcommit
527
527
528 def checkrevformat(self, revstr, mapname='splicemap'):
528 def checkrevformat(self, revstr, mapname='splicemap'):
529 """ fails if revision format does not match the correct format"""
529 """ fails if revision format does not match the correct format"""
530 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
530 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
531 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
531 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
532 r'{12,12}(.*)\@[0-9]+$',revstr):
532 r'{12,12}(.*)\@[0-9]+$',revstr):
533 raise error.Abort(_('%s entry %s is not a valid revision'
533 raise error.Abort(_('%s entry %s is not a valid revision'
534 ' identifier') % (mapname, revstr))
534 ' identifier') % (mapname, revstr))
535
535
536 def numcommits(self):
536 def numcommits(self):
537 return int(self.head.rsplit('@', 1)[1]) - self.startrev
537 return int(self.head.rsplit('@', 1)[1]) - self.startrev
538
538
539 def gettags(self):
539 def gettags(self):
540 tags = {}
540 tags = {}
541 if self.tags is None:
541 if self.tags is None:
542 return tags
542 return tags
543
543
544 # svn tags are just a convention, project branches left in a
544 # svn tags are just a convention, project branches left in a
545 # 'tags' directory. There is no other relationship than
545 # 'tags' directory. There is no other relationship than
546 # ancestry, which is expensive to discover and makes them hard
546 # ancestry, which is expensive to discover and makes them hard
547 # to update incrementally. Worse, past revisions may be
547 # to update incrementally. Worse, past revisions may be
548 # referenced by tags far away in the future, requiring a deep
548 # referenced by tags far away in the future, requiring a deep
549 # history traversal on every calculation. Current code
549 # history traversal on every calculation. Current code
550 # performs a single backward traversal, tracking moves within
550 # performs a single backward traversal, tracking moves within
551 # the tags directory (tag renaming) and recording a new tag
551 # the tags directory (tag renaming) and recording a new tag
552 # everytime a project is copied from outside the tags
552 # everytime a project is copied from outside the tags
553 # directory. It also lists deleted tags, this behaviour may
553 # directory. It also lists deleted tags, this behaviour may
554 # change in the future.
554 # change in the future.
555 pendings = []
555 pendings = []
556 tagspath = self.tags
556 tagspath = self.tags
557 start = svn.ra.get_latest_revnum(self.ra)
557 start = svn.ra.get_latest_revnum(self.ra)
558 stream = self._getlog([self.tags], start, self.startrev)
558 stream = self._getlog([self.tags], start, self.startrev)
559 try:
559 try:
560 for entry in stream:
560 for entry in stream:
561 origpaths, revnum, author, date, message = entry
561 origpaths, revnum, author, date, message = entry
562 if not origpaths:
562 if not origpaths:
563 origpaths = []
563 origpaths = []
564 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
564 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
565 in origpaths.iteritems() if e.copyfrom_path]
565 in origpaths.iteritems() if e.copyfrom_path]
566 # Apply moves/copies from more specific to general
566 # Apply moves/copies from more specific to general
567 copies.sort(reverse=True)
567 copies.sort(reverse=True)
568
568
569 srctagspath = tagspath
569 srctagspath = tagspath
570 if copies and copies[-1][2] == tagspath:
570 if copies and copies[-1][2] == tagspath:
571 # Track tags directory moves
571 # Track tags directory moves
572 srctagspath = copies.pop()[0]
572 srctagspath = copies.pop()[0]
573
573
574 for source, sourcerev, dest in copies:
574 for source, sourcerev, dest in copies:
575 if not dest.startswith(tagspath + '/'):
575 if not dest.startswith(tagspath + '/'):
576 continue
576 continue
577 for tag in pendings:
577 for tag in pendings:
578 if tag[0].startswith(dest):
578 if tag[0].startswith(dest):
579 tagpath = source + tag[0][len(dest):]
579 tagpath = source + tag[0][len(dest):]
580 tag[:2] = [tagpath, sourcerev]
580 tag[:2] = [tagpath, sourcerev]
581 break
581 break
582 else:
582 else:
583 pendings.append([source, sourcerev, dest])
583 pendings.append([source, sourcerev, dest])
584
584
585 # Filter out tags with children coming from different
585 # Filter out tags with children coming from different
586 # parts of the repository like:
586 # parts of the repository like:
587 # /tags/tag.1 (from /trunk:10)
587 # /tags/tag.1 (from /trunk:10)
588 # /tags/tag.1/foo (from /branches/foo:12)
588 # /tags/tag.1/foo (from /branches/foo:12)
589 # Here/tags/tag.1 discarded as well as its children.
589 # Here/tags/tag.1 discarded as well as its children.
590 # It happens with tools like cvs2svn. Such tags cannot
590 # It happens with tools like cvs2svn. Such tags cannot
591 # be represented in mercurial.
591 # be represented in mercurial.
592 addeds = dict((p, e.copyfrom_path) for p, e
592 addeds = dict((p, e.copyfrom_path) for p, e
593 in origpaths.iteritems()
593 in origpaths.iteritems()
594 if e.action == 'A' and e.copyfrom_path)
594 if e.action == 'A' and e.copyfrom_path)
595 badroots = set()
595 badroots = set()
596 for destroot in addeds:
596 for destroot in addeds:
597 for source, sourcerev, dest in pendings:
597 for source, sourcerev, dest in pendings:
598 if (not dest.startswith(destroot + '/')
598 if (not dest.startswith(destroot + '/')
599 or source.startswith(addeds[destroot] + '/')):
599 or source.startswith(addeds[destroot] + '/')):
600 continue
600 continue
601 badroots.add(destroot)
601 badroots.add(destroot)
602 break
602 break
603
603
604 for badroot in badroots:
604 for badroot in badroots:
605 pendings = [p for p in pendings if p[2] != badroot
605 pendings = [p for p in pendings if p[2] != badroot
606 and not p[2].startswith(badroot + '/')]
606 and not p[2].startswith(badroot + '/')]
607
607
608 # Tell tag renamings from tag creations
608 # Tell tag renamings from tag creations
609 renamings = []
609 renamings = []
610 for source, sourcerev, dest in pendings:
610 for source, sourcerev, dest in pendings:
611 tagname = dest.split('/')[-1]
611 tagname = dest.split('/')[-1]
612 if source.startswith(srctagspath):
612 if source.startswith(srctagspath):
613 renamings.append([source, sourcerev, tagname])
613 renamings.append([source, sourcerev, tagname])
614 continue
614 continue
615 if tagname in tags:
615 if tagname in tags:
616 # Keep the latest tag value
616 # Keep the latest tag value
617 continue
617 continue
618 # From revision may be fake, get one with changes
618 # From revision may be fake, get one with changes
619 try:
619 try:
620 tagid = self.latest(source, sourcerev)
620 tagid = self.latest(source, sourcerev)
621 if tagid and tagname not in tags:
621 if tagid and tagname not in tags:
622 tags[tagname] = tagid
622 tags[tagname] = tagid
623 except SvnPathNotFound:
623 except SvnPathNotFound:
624 # It happens when we are following directories
624 # It happens when we are following directories
625 # we assumed were copied with their parents
625 # we assumed were copied with their parents
626 # but were really created in the tag
626 # but were really created in the tag
627 # directory.
627 # directory.
628 pass
628 pass
629 pendings = renamings
629 pendings = renamings
630 tagspath = srctagspath
630 tagspath = srctagspath
631 finally:
631 finally:
632 stream.close()
632 stream.close()
633 return tags
633 return tags
634
634
635 def converted(self, rev, destrev):
635 def converted(self, rev, destrev):
636 if not self.wc:
636 if not self.wc:
637 return
637 return
638 if self.convertfp is None:
638 if self.convertfp is None:
639 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
639 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
640 'a')
640 'a')
641 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
641 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
642 self.convertfp.flush()
642 self.convertfp.flush()
643
643
644 def revid(self, revnum, module=None):
644 def revid(self, revnum, module=None):
645 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
645 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
646
646
647 def revnum(self, rev):
647 def revnum(self, rev):
648 return int(rev.split('@')[-1])
648 return int(rev.split('@')[-1])
649
649
650 def latest(self, path, stop=None):
650 def latest(self, path, stop=None):
651 """Find the latest revid affecting path, up to stop revision
651 """Find the latest revid affecting path, up to stop revision
652 number. If stop is None, default to repository latest
652 number. If stop is None, default to repository latest
653 revision. It may return a revision in a different module,
653 revision. It may return a revision in a different module,
654 since a branch may be moved without a change being
654 since a branch may be moved without a change being
655 reported. Return None if computed module does not belong to
655 reported. Return None if computed module does not belong to
656 rootmodule subtree.
656 rootmodule subtree.
657 """
657 """
658 def findchanges(path, start, stop=None):
658 def findchanges(path, start, stop=None):
659 stream = self._getlog([path], start, stop or 1)
659 stream = self._getlog([path], start, stop or 1)
660 try:
660 try:
661 for entry in stream:
661 for entry in stream:
662 paths, revnum, author, date, message = entry
662 paths, revnum, author, date, message = entry
663 if stop is None and paths:
663 if stop is None and paths:
664 # We do not know the latest changed revision,
664 # We do not know the latest changed revision,
665 # keep the first one with changed paths.
665 # keep the first one with changed paths.
666 break
666 break
667 if revnum <= stop:
667 if revnum <= stop:
668 break
668 break
669
669
670 for p in paths:
670 for p in paths:
671 if (not path.startswith(p) or
671 if (not path.startswith(p) or
672 not paths[p].copyfrom_path):
672 not paths[p].copyfrom_path):
673 continue
673 continue
674 newpath = paths[p].copyfrom_path + path[len(p):]
674 newpath = paths[p].copyfrom_path + path[len(p):]
675 self.ui.debug("branch renamed from %s to %s at %d\n" %
675 self.ui.debug("branch renamed from %s to %s at %d\n" %
676 (path, newpath, revnum))
676 (path, newpath, revnum))
677 path = newpath
677 path = newpath
678 break
678 break
679 if not paths:
679 if not paths:
680 revnum = None
680 revnum = None
681 return revnum, path
681 return revnum, path
682 finally:
682 finally:
683 stream.close()
683 stream.close()
684
684
685 if not path.startswith(self.rootmodule):
685 if not path.startswith(self.rootmodule):
686 # Requests on foreign branches may be forbidden at server level
686 # Requests on foreign branches may be forbidden at server level
687 self.ui.debug('ignoring foreign branch %r\n' % path)
687 self.ui.debug('ignoring foreign branch %r\n' % path)
688 return None
688 return None
689
689
690 if stop is None:
690 if stop is None:
691 stop = svn.ra.get_latest_revnum(self.ra)
691 stop = svn.ra.get_latest_revnum(self.ra)
692 try:
692 try:
693 prevmodule = self.reparent('')
693 prevmodule = self.reparent('')
694 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
694 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
695 self.reparent(prevmodule)
695 self.reparent(prevmodule)
696 except svn.core.SubversionException:
696 except svn.core.SubversionException:
697 dirent = None
697 dirent = None
698 if not dirent:
698 if not dirent:
699 raise SvnPathNotFound(_('%s not found up to revision %d')
699 raise SvnPathNotFound(_('%s not found up to revision %d')
700 % (path, stop))
700 % (path, stop))
701
701
702 # stat() gives us the previous revision on this line of
702 # stat() gives us the previous revision on this line of
703 # development, but it might be in *another module*. Fetch the
703 # development, but it might be in *another module*. Fetch the
704 # log and detect renames down to the latest revision.
704 # log and detect renames down to the latest revision.
705 revnum, realpath = findchanges(path, stop, dirent.created_rev)
705 revnum, realpath = findchanges(path, stop, dirent.created_rev)
706 if revnum is None:
706 if revnum is None:
707 # Tools like svnsync can create empty revision, when
707 # Tools like svnsync can create empty revision, when
708 # synchronizing only a subtree for instance. These empty
708 # synchronizing only a subtree for instance. These empty
709 # revisions created_rev still have their original values
709 # revisions created_rev still have their original values
710 # despite all changes having disappeared and can be
710 # despite all changes having disappeared and can be
711 # returned by ra.stat(), at least when stating the root
711 # returned by ra.stat(), at least when stating the root
712 # module. In that case, do not trust created_rev and scan
712 # module. In that case, do not trust created_rev and scan
713 # the whole history.
713 # the whole history.
714 revnum, realpath = findchanges(path, stop)
714 revnum, realpath = findchanges(path, stop)
715 if revnum is None:
715 if revnum is None:
716 self.ui.debug('ignoring empty branch %r\n' % realpath)
716 self.ui.debug('ignoring empty branch %r\n' % realpath)
717 return None
717 return None
718
718
719 if not realpath.startswith(self.rootmodule):
719 if not realpath.startswith(self.rootmodule):
720 self.ui.debug('ignoring foreign branch %r\n' % realpath)
720 self.ui.debug('ignoring foreign branch %r\n' % realpath)
721 return None
721 return None
722 return self.revid(revnum, realpath)
722 return self.revid(revnum, realpath)
723
723
724 def reparent(self, module):
724 def reparent(self, module):
725 """Reparent the svn transport and return the previous parent."""
725 """Reparent the svn transport and return the previous parent."""
726 if self.prevmodule == module:
726 if self.prevmodule == module:
727 return module
727 return module
728 svnurl = self.baseurl + quote(module)
728 svnurl = self.baseurl + quote(module)
729 prevmodule = self.prevmodule
729 prevmodule = self.prevmodule
730 if prevmodule is None:
730 if prevmodule is None:
731 prevmodule = ''
731 prevmodule = ''
732 self.ui.debug("reparent to %s\n" % svnurl)
732 self.ui.debug("reparent to %s\n" % svnurl)
733 svn.ra.reparent(self.ra, svnurl)
733 svn.ra.reparent(self.ra, svnurl)
734 self.prevmodule = module
734 self.prevmodule = module
735 return prevmodule
735 return prevmodule
736
736
737 def expandpaths(self, rev, paths, parents):
737 def expandpaths(self, rev, paths, parents):
738 changed, removed = set(), set()
738 changed, removed = set(), set()
739 copies = {}
739 copies = {}
740
740
741 new_module, revnum = revsplit(rev)[1:]
741 new_module, revnum = revsplit(rev)[1:]
742 if new_module != self.module:
742 if new_module != self.module:
743 self.module = new_module
743 self.module = new_module
744 self.reparent(self.module)
744 self.reparent(self.module)
745
745
746 for i, (path, ent) in enumerate(paths):
746 for i, (path, ent) in enumerate(paths):
747 self.ui.progress(_('scanning paths'), i, item=path,
747 self.ui.progress(_('scanning paths'), i, item=path,
748 total=len(paths), unit=_('paths'))
748 total=len(paths), unit=_('paths'))
749 entrypath = self.getrelpath(path)
749 entrypath = self.getrelpath(path)
750
750
751 kind = self._checkpath(entrypath, revnum)
751 kind = self._checkpath(entrypath, revnum)
752 if kind == svn.core.svn_node_file:
752 if kind == svn.core.svn_node_file:
753 changed.add(self.recode(entrypath))
753 changed.add(self.recode(entrypath))
754 if not ent.copyfrom_path or not parents:
754 if not ent.copyfrom_path or not parents:
755 continue
755 continue
756 # Copy sources not in parent revisions cannot be
756 # Copy sources not in parent revisions cannot be
757 # represented, ignore their origin for now
757 # represented, ignore their origin for now
758 pmodule, prevnum = revsplit(parents[0])[1:]
758 pmodule, prevnum = revsplit(parents[0])[1:]
759 if ent.copyfrom_rev < prevnum:
759 if ent.copyfrom_rev < prevnum:
760 continue
760 continue
761 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
761 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
762 if not copyfrom_path:
762 if not copyfrom_path:
763 continue
763 continue
764 self.ui.debug("copied to %s from %s@%s\n" %
764 self.ui.debug("copied to %s from %s@%s\n" %
765 (entrypath, copyfrom_path, ent.copyfrom_rev))
765 (entrypath, copyfrom_path, ent.copyfrom_rev))
766 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
766 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
767 elif kind == 0: # gone, but had better be a deleted *file*
767 elif kind == 0: # gone, but had better be a deleted *file*
768 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
768 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
769 pmodule, prevnum = revsplit(parents[0])[1:]
769 pmodule, prevnum = revsplit(parents[0])[1:]
770 parentpath = pmodule + "/" + entrypath
770 parentpath = pmodule + "/" + entrypath
771 fromkind = self._checkpath(entrypath, prevnum, pmodule)
771 fromkind = self._checkpath(entrypath, prevnum, pmodule)
772
772
773 if fromkind == svn.core.svn_node_file:
773 if fromkind == svn.core.svn_node_file:
774 removed.add(self.recode(entrypath))
774 removed.add(self.recode(entrypath))
775 elif fromkind == svn.core.svn_node_dir:
775 elif fromkind == svn.core.svn_node_dir:
776 oroot = parentpath.strip('/')
776 oroot = parentpath.strip('/')
777 nroot = path.strip('/')
777 nroot = path.strip('/')
778 children = self._iterfiles(oroot, prevnum)
778 children = self._iterfiles(oroot, prevnum)
779 for childpath in children:
779 for childpath in children:
780 childpath = childpath.replace(oroot, nroot)
780 childpath = childpath.replace(oroot, nroot)
781 childpath = self.getrelpath("/" + childpath, pmodule)
781 childpath = self.getrelpath("/" + childpath, pmodule)
782 if childpath:
782 if childpath:
783 removed.add(self.recode(childpath))
783 removed.add(self.recode(childpath))
784 else:
784 else:
785 self.ui.debug('unknown path in revision %d: %s\n' % \
785 self.ui.debug('unknown path in revision %d: %s\n' % \
786 (revnum, path))
786 (revnum, path))
787 elif kind == svn.core.svn_node_dir:
787 elif kind == svn.core.svn_node_dir:
788 if ent.action == 'M':
788 if ent.action == 'M':
789 # If the directory just had a prop change,
789 # If the directory just had a prop change,
790 # then we shouldn't need to look for its children.
790 # then we shouldn't need to look for its children.
791 continue
791 continue
792 if ent.action == 'R' and parents:
792 if ent.action == 'R' and parents:
793 # If a directory is replacing a file, mark the previous
793 # If a directory is replacing a file, mark the previous
794 # file as deleted
794 # file as deleted
795 pmodule, prevnum = revsplit(parents[0])[1:]
795 pmodule, prevnum = revsplit(parents[0])[1:]
796 pkind = self._checkpath(entrypath, prevnum, pmodule)
796 pkind = self._checkpath(entrypath, prevnum, pmodule)
797 if pkind == svn.core.svn_node_file:
797 if pkind == svn.core.svn_node_file:
798 removed.add(self.recode(entrypath))
798 removed.add(self.recode(entrypath))
799 elif pkind == svn.core.svn_node_dir:
799 elif pkind == svn.core.svn_node_dir:
800 # We do not know what files were kept or removed,
800 # We do not know what files were kept or removed,
801 # mark them all as changed.
801 # mark them all as changed.
802 for childpath in self._iterfiles(pmodule, prevnum):
802 for childpath in self._iterfiles(pmodule, prevnum):
803 childpath = self.getrelpath("/" + childpath)
803 childpath = self.getrelpath("/" + childpath)
804 if childpath:
804 if childpath:
805 changed.add(self.recode(childpath))
805 changed.add(self.recode(childpath))
806
806
807 for childpath in self._iterfiles(path, revnum):
807 for childpath in self._iterfiles(path, revnum):
808 childpath = self.getrelpath("/" + childpath)
808 childpath = self.getrelpath("/" + childpath)
809 if childpath:
809 if childpath:
810 changed.add(self.recode(childpath))
810 changed.add(self.recode(childpath))
811
811
812 # Handle directory copies
812 # Handle directory copies
813 if not ent.copyfrom_path or not parents:
813 if not ent.copyfrom_path or not parents:
814 continue
814 continue
815 # Copy sources not in parent revisions cannot be
815 # Copy sources not in parent revisions cannot be
816 # represented, ignore their origin for now
816 # represented, ignore their origin for now
817 pmodule, prevnum = revsplit(parents[0])[1:]
817 pmodule, prevnum = revsplit(parents[0])[1:]
818 if ent.copyfrom_rev < prevnum:
818 if ent.copyfrom_rev < prevnum:
819 continue
819 continue
820 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
820 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
821 if not copyfrompath:
821 if not copyfrompath:
822 continue
822 continue
823 self.ui.debug("mark %s came from %s:%d\n"
823 self.ui.debug("mark %s came from %s:%d\n"
824 % (path, copyfrompath, ent.copyfrom_rev))
824 % (path, copyfrompath, ent.copyfrom_rev))
825 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
825 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
826 for childpath in children:
826 for childpath in children:
827 childpath = self.getrelpath("/" + childpath, pmodule)
827 childpath = self.getrelpath("/" + childpath, pmodule)
828 if not childpath:
828 if not childpath:
829 continue
829 continue
830 copytopath = path + childpath[len(copyfrompath):]
830 copytopath = path + childpath[len(copyfrompath):]
831 copytopath = self.getrelpath(copytopath)
831 copytopath = self.getrelpath(copytopath)
832 copies[self.recode(copytopath)] = self.recode(childpath)
832 copies[self.recode(copytopath)] = self.recode(childpath)
833
833
834 self.ui.progress(_('scanning paths'), None)
834 self.ui.progress(_('scanning paths'), None)
835 changed.update(removed)
835 changed.update(removed)
836 return (list(changed), removed, copies)
836 return (list(changed), removed, copies)
837
837
838 def _fetch_revisions(self, from_revnum, to_revnum):
838 def _fetch_revisions(self, from_revnum, to_revnum):
839 if from_revnum < to_revnum:
839 if from_revnum < to_revnum:
840 from_revnum, to_revnum = to_revnum, from_revnum
840 from_revnum, to_revnum = to_revnum, from_revnum
841
841
842 self.child_cset = None
842 self.child_cset = None
843
843
844 def parselogentry(orig_paths, revnum, author, date, message):
844 def parselogentry(orig_paths, revnum, author, date, message):
845 """Return the parsed commit object or None, and True if
845 """Return the parsed commit object or None, and True if
846 the revision is a branch root.
846 the revision is a branch root.
847 """
847 """
848 self.ui.debug("parsing revision %d (%d changes)\n" %
848 self.ui.debug("parsing revision %d (%d changes)\n" %
849 (revnum, len(orig_paths)))
849 (revnum, len(orig_paths)))
850
850
851 branched = False
851 branched = False
852 rev = self.revid(revnum)
852 rev = self.revid(revnum)
853 # branch log might return entries for a parent we already have
853 # branch log might return entries for a parent we already have
854
854
855 if rev in self.commits or revnum < to_revnum:
855 if rev in self.commits or revnum < to_revnum:
856 return None, branched
856 return None, branched
857
857
858 parents = []
858 parents = []
859 # check whether this revision is the start of a branch or part
859 # check whether this revision is the start of a branch or part
860 # of a branch renaming
860 # of a branch renaming
861 orig_paths = sorted(orig_paths.iteritems())
861 orig_paths = sorted(orig_paths.iteritems())
862 root_paths = [(p, e) for p, e in orig_paths
862 root_paths = [(p, e) for p, e in orig_paths
863 if self.module.startswith(p)]
863 if self.module.startswith(p)]
864 if root_paths:
864 if root_paths:
865 path, ent = root_paths[-1]
865 path, ent = root_paths[-1]
866 if ent.copyfrom_path:
866 if ent.copyfrom_path:
867 branched = True
867 branched = True
868 newpath = ent.copyfrom_path + self.module[len(path):]
868 newpath = ent.copyfrom_path + self.module[len(path):]
869 # ent.copyfrom_rev may not be the actual last revision
869 # ent.copyfrom_rev may not be the actual last revision
870 previd = self.latest(newpath, ent.copyfrom_rev)
870 previd = self.latest(newpath, ent.copyfrom_rev)
871 if previd is not None:
871 if previd is not None:
872 prevmodule, prevnum = revsplit(previd)[1:]
872 prevmodule, prevnum = revsplit(previd)[1:]
873 if prevnum >= self.startrev:
873 if prevnum >= self.startrev:
874 parents = [previd]
874 parents = [previd]
875 self.ui.note(
875 self.ui.note(
876 _('found parent of branch %s at %d: %s\n') %
876 _('found parent of branch %s at %d: %s\n') %
877 (self.module, prevnum, prevmodule))
877 (self.module, prevnum, prevmodule))
878 else:
878 else:
879 self.ui.debug("no copyfrom path, don't know what to do.\n")
879 self.ui.debug("no copyfrom path, don't know what to do.\n")
880
880
881 paths = []
881 paths = []
882 # filter out unrelated paths
882 # filter out unrelated paths
883 for path, ent in orig_paths:
883 for path, ent in orig_paths:
884 if self.getrelpath(path) is None:
884 if self.getrelpath(path) is None:
885 continue
885 continue
886 paths.append((path, ent))
886 paths.append((path, ent))
887
887
888 # Example SVN datetime. Includes microseconds.
888 # Example SVN datetime. Includes microseconds.
889 # ISO-8601 conformant
889 # ISO-8601 conformant
890 # '2007-01-04T17:35:00.902377Z'
890 # '2007-01-04T17:35:00.902377Z'
891 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
891 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
892 if self.ui.configbool('convert', 'localtimezone'):
892 if self.ui.configbool('convert', 'localtimezone'):
893 date = makedatetimestamp(date[0])
893 date = makedatetimestamp(date[0])
894
894
895 if message:
895 if message:
896 log = self.recode(message)
896 log = self.recode(message)
897 else:
897 else:
898 log = ''
898 log = ''
899
899
900 if author:
900 if author:
901 author = self.recode(author)
901 author = self.recode(author)
902 else:
902 else:
903 author = ''
903 author = ''
904
904
905 try:
905 try:
906 branch = self.module.split("/")[-1]
906 branch = self.module.split("/")[-1]
907 if branch == self.trunkname:
907 if branch == self.trunkname:
908 branch = None
908 branch = None
909 except IndexError:
909 except IndexError:
910 branch = None
910 branch = None
911
911
912 cset = commit(author=author,
912 cset = commit(author=author,
913 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
913 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
914 desc=log,
914 desc=log,
915 parents=parents,
915 parents=parents,
916 branch=branch,
916 branch=branch,
917 rev=rev)
917 rev=rev)
918
918
919 self.commits[rev] = cset
919 self.commits[rev] = cset
920 # The parents list is *shared* among self.paths and the
920 # The parents list is *shared* among self.paths and the
921 # commit object. Both will be updated below.
921 # commit object. Both will be updated below.
922 self.paths[rev] = (paths, cset.parents)
922 self.paths[rev] = (paths, cset.parents)
923 if self.child_cset and not self.child_cset.parents:
923 if self.child_cset and not self.child_cset.parents:
924 self.child_cset.parents[:] = [rev]
924 self.child_cset.parents[:] = [rev]
925 self.child_cset = cset
925 self.child_cset = cset
926 return cset, branched
926 return cset, branched
927
927
928 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
928 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
929 (self.module, from_revnum, to_revnum))
929 (self.module, from_revnum, to_revnum))
930
930
931 try:
931 try:
932 firstcset = None
932 firstcset = None
933 lastonbranch = False
933 lastonbranch = False
934 stream = self._getlog([self.module], from_revnum, to_revnum)
934 stream = self._getlog([self.module], from_revnum, to_revnum)
935 try:
935 try:
936 for entry in stream:
936 for entry in stream:
937 paths, revnum, author, date, message = entry
937 paths, revnum, author, date, message = entry
938 if revnum < self.startrev:
938 if revnum < self.startrev:
939 lastonbranch = True
939 lastonbranch = True
940 break
940 break
941 if not paths:
941 if not paths:
942 self.ui.debug('revision %d has no entries\n' % revnum)
942 self.ui.debug('revision %d has no entries\n' % revnum)
943 # If we ever leave the loop on an empty
943 # If we ever leave the loop on an empty
944 # revision, do not try to get a parent branch
944 # revision, do not try to get a parent branch
945 lastonbranch = lastonbranch or revnum == 0
945 lastonbranch = lastonbranch or revnum == 0
946 continue
946 continue
947 cset, lastonbranch = parselogentry(paths, revnum, author,
947 cset, lastonbranch = parselogentry(paths, revnum, author,
948 date, message)
948 date, message)
949 if cset:
949 if cset:
950 firstcset = cset
950 firstcset = cset
951 if lastonbranch:
951 if lastonbranch:
952 break
952 break
953 finally:
953 finally:
954 stream.close()
954 stream.close()
955
955
956 if not lastonbranch and firstcset and not firstcset.parents:
956 if not lastonbranch and firstcset and not firstcset.parents:
957 # The first revision of the sequence (the last fetched one)
957 # The first revision of the sequence (the last fetched one)
958 # has invalid parents if not a branch root. Find the parent
958 # has invalid parents if not a branch root. Find the parent
959 # revision now, if any.
959 # revision now, if any.
960 try:
960 try:
961 firstrevnum = self.revnum(firstcset.rev)
961 firstrevnum = self.revnum(firstcset.rev)
962 if firstrevnum > 1:
962 if firstrevnum > 1:
963 latest = self.latest(self.module, firstrevnum - 1)
963 latest = self.latest(self.module, firstrevnum - 1)
964 if latest:
964 if latest:
965 firstcset.parents.append(latest)
965 firstcset.parents.append(latest)
966 except SvnPathNotFound:
966 except SvnPathNotFound:
967 pass
967 pass
968 except svn.core.SubversionException as xxx_todo_changeme:
968 except svn.core.SubversionException as xxx_todo_changeme:
969 (inst, num) = xxx_todo_changeme.args
969 (inst, num) = xxx_todo_changeme.args
970 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
970 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
971 raise error.Abort(_('svn: branch has no revision %s')
971 raise error.Abort(_('svn: branch has no revision %s')
972 % to_revnum)
972 % to_revnum)
973 raise
973 raise
974
974
975 def getfile(self, file, rev):
975 def getfile(self, file, rev):
976 # TODO: ra.get_file transmits the whole file instead of diffs.
976 # TODO: ra.get_file transmits the whole file instead of diffs.
977 if file in self.removed:
977 if file in self.removed:
978 return None, None
978 return None, None
979 mode = ''
979 mode = ''
980 try:
980 try:
981 new_module, revnum = revsplit(rev)[1:]
981 new_module, revnum = revsplit(rev)[1:]
982 if self.module != new_module:
982 if self.module != new_module:
983 self.module = new_module
983 self.module = new_module
984 self.reparent(self.module)
984 self.reparent(self.module)
985 io = stringio()
985 io = stringio()
986 info = svn.ra.get_file(self.ra, file, revnum, io)
986 info = svn.ra.get_file(self.ra, file, revnum, io)
987 data = io.getvalue()
987 data = io.getvalue()
988 # ra.get_file() seems to keep a reference on the input buffer
988 # ra.get_file() seems to keep a reference on the input buffer
989 # preventing collection. Release it explicitly.
989 # preventing collection. Release it explicitly.
990 io.close()
990 io.close()
991 if isinstance(info, list):
991 if isinstance(info, list):
992 info = info[-1]
992 info = info[-1]
993 mode = ("svn:executable" in info) and 'x' or ''
993 mode = ("svn:executable" in info) and 'x' or ''
994 mode = ("svn:special" in info) and 'l' or mode
994 mode = ("svn:special" in info) and 'l' or mode
995 except svn.core.SubversionException as e:
995 except svn.core.SubversionException as e:
996 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
996 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
997 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
997 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
998 if e.apr_err in notfound: # File not found
998 if e.apr_err in notfound: # File not found
999 return None, None
999 return None, None
1000 raise
1000 raise
1001 if mode == 'l':
1001 if mode == 'l':
1002 link_prefix = "link "
1002 link_prefix = "link "
1003 if data.startswith(link_prefix):
1003 if data.startswith(link_prefix):
1004 data = data[len(link_prefix):]
1004 data = data[len(link_prefix):]
1005 return data, mode
1005 return data, mode
1006
1006
1007 def _iterfiles(self, path, revnum):
1007 def _iterfiles(self, path, revnum):
1008 """Enumerate all files in path at revnum, recursively."""
1008 """Enumerate all files in path at revnum, recursively."""
1009 path = path.strip('/')
1009 path = path.strip('/')
1010 pool = svn.core.Pool()
1010 pool = svn.core.Pool()
1011 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1011 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1012 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1012 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1013 if path:
1013 if path:
1014 path += '/'
1014 path += '/'
1015 return ((path + p) for p, e in entries.iteritems()
1015 return ((path + p) for p, e in entries.iteritems()
1016 if e.kind == svn.core.svn_node_file)
1016 if e.kind == svn.core.svn_node_file)
1017
1017
1018 def getrelpath(self, path, module=None):
1018 def getrelpath(self, path, module=None):
1019 if module is None:
1019 if module is None:
1020 module = self.module
1020 module = self.module
1021 # Given the repository url of this wc, say
1021 # Given the repository url of this wc, say
1022 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1022 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1023 # extract the "entry" portion (a relative path) from what
1023 # extract the "entry" portion (a relative path) from what
1024 # svn log --xml says, i.e.
1024 # svn log --xml says, i.e.
1025 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1025 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1026 # that is to say "tests/PloneTestCase.py"
1026 # that is to say "tests/PloneTestCase.py"
1027 if path.startswith(module):
1027 if path.startswith(module):
1028 relative = path.rstrip('/')[len(module):]
1028 relative = path.rstrip('/')[len(module):]
1029 if relative.startswith('/'):
1029 if relative.startswith('/'):
1030 return relative[1:]
1030 return relative[1:]
1031 elif relative == '':
1031 elif relative == '':
1032 return relative
1032 return relative
1033
1033
1034 # The path is outside our tracked tree...
1034 # The path is outside our tracked tree...
1035 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1035 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1036 return None
1036 return None
1037
1037
1038 def _checkpath(self, path, revnum, module=None):
1038 def _checkpath(self, path, revnum, module=None):
1039 if module is not None:
1039 if module is not None:
1040 prevmodule = self.reparent('')
1040 prevmodule = self.reparent('')
1041 path = module + '/' + path
1041 path = module + '/' + path
1042 try:
1042 try:
1043 # ra.check_path does not like leading slashes very much, it leads
1043 # ra.check_path does not like leading slashes very much, it leads
1044 # to PROPFIND subversion errors
1044 # to PROPFIND subversion errors
1045 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1045 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1046 finally:
1046 finally:
1047 if module is not None:
1047 if module is not None:
1048 self.reparent(prevmodule)
1048 self.reparent(prevmodule)
1049
1049
1050 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1050 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1051 strict_node_history=False):
1051 strict_node_history=False):
1052 # Normalize path names, svn >= 1.5 only wants paths relative to
1052 # Normalize path names, svn >= 1.5 only wants paths relative to
1053 # supplied URL
1053 # supplied URL
1054 relpaths = []
1054 relpaths = []
1055 for p in paths:
1055 for p in paths:
1056 if not p.startswith('/'):
1056 if not p.startswith('/'):
1057 p = self.module + '/' + p
1057 p = self.module + '/' + p
1058 relpaths.append(p.strip('/'))
1058 relpaths.append(p.strip('/'))
1059 args = [self.baseurl, relpaths, start, end, limit,
1059 args = [self.baseurl, relpaths, start, end, limit,
1060 discover_changed_paths, strict_node_history]
1060 discover_changed_paths, strict_node_history]
1061 # developer config: convert.svn.debugsvnlog
1061 # developer config: convert.svn.debugsvnlog
1062 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1062 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1063 return directlogstream(*args)
1063 return directlogstream(*args)
1064 arg = encodeargs(args)
1064 arg = encodeargs(args)
1065 hgexe = util.hgexecutable()
1065 hgexe = util.hgexecutable()
1066 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1066 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1067 stdin, stdout = util.popen2(util.quotecommand(cmd))
1067 stdin, stdout = util.popen2(util.quotecommand(cmd))
1068 stdin.write(arg)
1068 stdin.write(arg)
1069 try:
1069 try:
1070 stdin.close()
1070 stdin.close()
1071 except IOError:
1071 except IOError:
1072 raise error.Abort(_('Mercurial failed to run itself, check'
1072 raise error.Abort(_('Mercurial failed to run itself, check'
1073 ' hg executable is in PATH'))
1073 ' hg executable is in PATH'))
1074 return logstream(stdout)
1074 return logstream(stdout)
1075
1075
1076 pre_revprop_change = '''#!/bin/sh
1076 pre_revprop_change = '''#!/bin/sh
1077
1077
1078 REPOS="$1"
1078 REPOS="$1"
1079 REV="$2"
1079 REV="$2"
1080 USER="$3"
1080 USER="$3"
1081 PROPNAME="$4"
1081 PROPNAME="$4"
1082 ACTION="$5"
1082 ACTION="$5"
1083
1083
1084 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1084 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1085 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1085 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1086 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1086 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1087
1087
1088 echo "Changing prohibited revision property" >&2
1088 echo "Changing prohibited revision property" >&2
1089 exit 1
1089 exit 1
1090 '''
1090 '''
1091
1091
1092 class svn_sink(converter_sink, commandline):
1092 class svn_sink(converter_sink, commandline):
1093 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1093 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1094 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1094 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1095
1095
1096 def prerun(self):
1096 def prerun(self):
1097 if self.wc:
1097 if self.wc:
1098 os.chdir(self.wc)
1098 os.chdir(self.wc)
1099
1099
1100 def postrun(self):
1100 def postrun(self):
1101 if self.wc:
1101 if self.wc:
1102 os.chdir(self.cwd)
1102 os.chdir(self.cwd)
1103
1103
1104 def join(self, name):
1104 def join(self, name):
1105 return os.path.join(self.wc, '.svn', name)
1105 return os.path.join(self.wc, '.svn', name)
1106
1106
1107 def revmapfile(self):
1107 def revmapfile(self):
1108 return self.join('hg-shamap')
1108 return self.join('hg-shamap')
1109
1109
1110 def authorfile(self):
1110 def authorfile(self):
1111 return self.join('hg-authormap')
1111 return self.join('hg-authormap')
1112
1112
1113 def __init__(self, ui, path):
1113 def __init__(self, ui, path):
1114
1114
1115 converter_sink.__init__(self, ui, path)
1115 converter_sink.__init__(self, ui, path)
1116 commandline.__init__(self, ui, 'svn')
1116 commandline.__init__(self, ui, 'svn')
1117 self.delete = []
1117 self.delete = []
1118 self.setexec = []
1118 self.setexec = []
1119 self.delexec = []
1119 self.delexec = []
1120 self.copies = []
1120 self.copies = []
1121 self.wc = None
1121 self.wc = None
1122 self.cwd = pycompat.getcwd()
1122 self.cwd = pycompat.getcwd()
1123
1123
1124 created = False
1124 created = False
1125 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1125 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1126 self.wc = os.path.realpath(path)
1126 self.wc = os.path.realpath(path)
1127 self.run0('update')
1127 self.run0('update')
1128 else:
1128 else:
1129 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1129 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1130 path = os.path.realpath(path)
1130 path = os.path.realpath(path)
1131 if os.path.isdir(os.path.dirname(path)):
1131 if os.path.isdir(os.path.dirname(path)):
1132 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1132 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1133 ui.status(_('initializing svn repository %r\n') %
1133 ui.status(_('initializing svn repository %r\n') %
1134 os.path.basename(path))
1134 os.path.basename(path))
1135 commandline(ui, 'svnadmin').run0('create', path)
1135 commandline(ui, 'svnadmin').run0('create', path)
1136 created = path
1136 created = path
1137 path = util.normpath(path)
1137 path = util.normpath(path)
1138 if not path.startswith('/'):
1138 if not path.startswith('/'):
1139 path = '/' + path
1139 path = '/' + path
1140 path = 'file://' + path
1140 path = 'file://' + path
1141
1141
1142 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1142 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1143 '-wc')
1143 '-wc')
1144 ui.status(_('initializing svn working copy %r\n')
1144 ui.status(_('initializing svn working copy %r\n')
1145 % os.path.basename(wcpath))
1145 % os.path.basename(wcpath))
1146 self.run0('checkout', path, wcpath)
1146 self.run0('checkout', path, wcpath)
1147
1147
1148 self.wc = wcpath
1148 self.wc = wcpath
1149 self.opener = scmutil.opener(self.wc)
1149 self.opener = scmutil.opener(self.wc)
1150 self.wopener = scmutil.opener(self.wc)
1150 self.wopener = scmutil.opener(self.wc)
1151 self.childmap = mapfile(ui, self.join('hg-childmap'))
1151 self.childmap = mapfile(ui, self.join('hg-childmap'))
1152 if util.checkexec(self.wc):
1152 if util.checkexec(self.wc):
1153 self.is_exec = util.isexec
1153 self.is_exec = util.isexec
1154 else:
1154 else:
1155 self.is_exec = None
1155 self.is_exec = None
1156
1156
1157 if created:
1157 if created:
1158 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1158 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1159 fp = open(hook, 'w')
1159 fp = open(hook, 'w')
1160 fp.write(pre_revprop_change)
1160 fp.write(pre_revprop_change)
1161 fp.close()
1161 fp.close()
1162 util.setflags(hook, False, True)
1162 util.setflags(hook, False, True)
1163
1163
1164 output = self.run0('info')
1164 output = self.run0('info')
1165 self.uuid = self.uuid_re.search(output).group(1).strip()
1165 self.uuid = self.uuid_re.search(output).group(1).strip()
1166
1166
1167 def wjoin(self, *names):
1167 def wjoin(self, *names):
1168 return os.path.join(self.wc, *names)
1168 return os.path.join(self.wc, *names)
1169
1169
1170 @propertycache
1170 @propertycache
1171 def manifest(self):
1171 def manifest(self):
1172 # As of svn 1.7, the "add" command fails when receiving
1172 # As of svn 1.7, the "add" command fails when receiving
1173 # already tracked entries, so we have to track and filter them
1173 # already tracked entries, so we have to track and filter them
1174 # ourselves.
1174 # ourselves.
1175 m = set()
1175 m = set()
1176 output = self.run0('ls', recursive=True, xml=True)
1176 output = self.run0('ls', recursive=True, xml=True)
1177 doc = xml.dom.minidom.parseString(output)
1177 doc = xml.dom.minidom.parseString(output)
1178 for e in doc.getElementsByTagName('entry'):
1178 for e in doc.getElementsByTagName('entry'):
1179 for n in e.childNodes:
1179 for n in e.childNodes:
1180 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1180 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1181 continue
1181 continue
1182 name = ''.join(c.data for c in n.childNodes
1182 name = ''.join(c.data for c in n.childNodes
1183 if c.nodeType == c.TEXT_NODE)
1183 if c.nodeType == c.TEXT_NODE)
1184 # Entries are compared with names coming from
1184 # Entries are compared with names coming from
1185 # mercurial, so bytes with undefined encoding. Our
1185 # mercurial, so bytes with undefined encoding. Our
1186 # best bet is to assume they are in local
1186 # best bet is to assume they are in local
1187 # encoding. They will be passed to command line calls
1187 # encoding. They will be passed to command line calls
1188 # later anyway, so they better be.
1188 # later anyway, so they better be.
1189 m.add(encoding.tolocal(name.encode('utf-8')))
1189 m.add(encoding.tolocal(name.encode('utf-8')))
1190 break
1190 break
1191 return m
1191 return m
1192
1192
1193 def putfile(self, filename, flags, data):
1193 def putfile(self, filename, flags, data):
1194 if 'l' in flags:
1194 if 'l' in flags:
1195 self.wopener.symlink(data, filename)
1195 self.wopener.symlink(data, filename)
1196 else:
1196 else:
1197 try:
1197 try:
1198 if os.path.islink(self.wjoin(filename)):
1198 if os.path.islink(self.wjoin(filename)):
1199 os.unlink(filename)
1199 os.unlink(filename)
1200 except OSError:
1200 except OSError:
1201 pass
1201 pass
1202 self.wopener.write(filename, data)
1202 self.wopener.write(filename, data)
1203
1203
1204 if self.is_exec:
1204 if self.is_exec:
1205 if self.is_exec(self.wjoin(filename)):
1205 if self.is_exec(self.wjoin(filename)):
1206 if 'x' not in flags:
1206 if 'x' not in flags:
1207 self.delexec.append(filename)
1207 self.delexec.append(filename)
1208 else:
1208 else:
1209 if 'x' in flags:
1209 if 'x' in flags:
1210 self.setexec.append(filename)
1210 self.setexec.append(filename)
1211 util.setflags(self.wjoin(filename), False, 'x' in flags)
1211 util.setflags(self.wjoin(filename), False, 'x' in flags)
1212
1212
1213 def _copyfile(self, source, dest):
1213 def _copyfile(self, source, dest):
1214 # SVN's copy command pukes if the destination file exists, but
1214 # SVN's copy command pukes if the destination file exists, but
1215 # our copyfile method expects to record a copy that has
1215 # our copyfile method expects to record a copy that has
1216 # already occurred. Cross the semantic gap.
1216 # already occurred. Cross the semantic gap.
1217 wdest = self.wjoin(dest)
1217 wdest = self.wjoin(dest)
1218 exists = os.path.lexists(wdest)
1218 exists = os.path.lexists(wdest)
1219 if exists:
1219 if exists:
1220 fd, tempname = tempfile.mkstemp(
1220 fd, tempname = tempfile.mkstemp(
1221 prefix='hg-copy-', dir=os.path.dirname(wdest))
1221 prefix='hg-copy-', dir=os.path.dirname(wdest))
1222 os.close(fd)
1222 os.close(fd)
1223 os.unlink(tempname)
1223 os.unlink(tempname)
1224 os.rename(wdest, tempname)
1224 os.rename(wdest, tempname)
1225 try:
1225 try:
1226 self.run0('copy', source, dest)
1226 self.run0('copy', source, dest)
1227 finally:
1227 finally:
1228 self.manifest.add(dest)
1228 self.manifest.add(dest)
1229 if exists:
1229 if exists:
1230 try:
1230 try:
1231 os.unlink(wdest)
1231 os.unlink(wdest)
1232 except OSError:
1232 except OSError:
1233 pass
1233 pass
1234 os.rename(tempname, wdest)
1234 os.rename(tempname, wdest)
1235
1235
1236 def dirs_of(self, files):
1236 def dirs_of(self, files):
1237 dirs = set()
1237 dirs = set()
1238 for f in files:
1238 for f in files:
1239 if os.path.isdir(self.wjoin(f)):
1239 if os.path.isdir(self.wjoin(f)):
1240 dirs.add(f)
1240 dirs.add(f)
1241 i = len(f)
1241 i = len(f)
1242 for i in iter(lambda: f.rfind('/', 0, i), -1):
1242 for i in iter(lambda: f.rfind('/', 0, i), -1):
1243 dirs.add(f[:i])
1243 dirs.add(f[:i])
1244 return dirs
1244 return dirs
1245
1245
1246 def add_dirs(self, files):
1246 def add_dirs(self, files):
1247 add_dirs = [d for d in sorted(self.dirs_of(files))
1247 add_dirs = [d for d in sorted(self.dirs_of(files))
1248 if d not in self.manifest]
1248 if d not in self.manifest]
1249 if add_dirs:
1249 if add_dirs:
1250 self.manifest.update(add_dirs)
1250 self.manifest.update(add_dirs)
1251 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1251 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1252 return add_dirs
1252 return add_dirs
1253
1253
1254 def add_files(self, files):
1254 def add_files(self, files):
1255 files = [f for f in files if f not in self.manifest]
1255 files = [f for f in files if f not in self.manifest]
1256 if files:
1256 if files:
1257 self.manifest.update(files)
1257 self.manifest.update(files)
1258 self.xargs(files, 'add', quiet=True)
1258 self.xargs(files, 'add', quiet=True)
1259 return files
1259 return files
1260
1260
1261 def addchild(self, parent, child):
1261 def addchild(self, parent, child):
1262 self.childmap[parent] = child
1262 self.childmap[parent] = child
1263
1263
1264 def revid(self, rev):
1264 def revid(self, rev):
1265 return u"svn:%s@%s" % (self.uuid, rev)
1265 return u"svn:%s@%s" % (self.uuid, rev)
1266
1266
1267 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1267 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1268 cleanp2):
1268 cleanp2):
1269 for parent in parents:
1269 for parent in parents:
1270 try:
1270 try:
1271 return self.revid(self.childmap[parent])
1271 return self.revid(self.childmap[parent])
1272 except KeyError:
1272 except KeyError:
1273 pass
1273 pass
1274
1274
1275 # Apply changes to working copy
1275 # Apply changes to working copy
1276 for f, v in files:
1276 for f, v in files:
1277 data, mode = source.getfile(f, v)
1277 data, mode = source.getfile(f, v)
1278 if data is None:
1278 if data is None:
1279 self.delete.append(f)
1279 self.delete.append(f)
1280 else:
1280 else:
1281 self.putfile(f, mode, data)
1281 self.putfile(f, mode, data)
1282 if f in copies:
1282 if f in copies:
1283 self.copies.append([copies[f], f])
1283 self.copies.append([copies[f], f])
1284 if full:
1284 if full:
1285 self.delete.extend(sorted(self.manifest.difference(files)))
1285 self.delete.extend(sorted(self.manifest.difference(files)))
1286 files = [f[0] for f in files]
1286 files = [f[0] for f in files]
1287
1287
1288 entries = set(self.delete)
1288 entries = set(self.delete)
1289 files = frozenset(files)
1289 files = frozenset(files)
1290 entries.update(self.add_dirs(files.difference(entries)))
1290 entries.update(self.add_dirs(files.difference(entries)))
1291 if self.copies:
1291 if self.copies:
1292 for s, d in self.copies:
1292 for s, d in self.copies:
1293 self._copyfile(s, d)
1293 self._copyfile(s, d)
1294 self.copies = []
1294 self.copies = []
1295 if self.delete:
1295 if self.delete:
1296 self.xargs(self.delete, 'delete')
1296 self.xargs(self.delete, 'delete')
1297 for f in self.delete:
1297 for f in self.delete:
1298 self.manifest.remove(f)
1298 self.manifest.remove(f)
1299 self.delete = []
1299 self.delete = []
1300 entries.update(self.add_files(files.difference(entries)))
1300 entries.update(self.add_files(files.difference(entries)))
1301 if self.delexec:
1301 if self.delexec:
1302 self.xargs(self.delexec, 'propdel', 'svn:executable')
1302 self.xargs(self.delexec, 'propdel', 'svn:executable')
1303 self.delexec = []
1303 self.delexec = []
1304 if self.setexec:
1304 if self.setexec:
1305 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1305 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1306 self.setexec = []
1306 self.setexec = []
1307
1307
1308 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1308 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1309 fp = os.fdopen(fd, 'w')
1309 fp = os.fdopen(fd, 'w')
1310 fp.write(commit.desc)
1310 fp.write(commit.desc)
1311 fp.close()
1311 fp.close()
1312 try:
1312 try:
1313 output = self.run0('commit',
1313 output = self.run0('commit',
1314 username=util.shortuser(commit.author),
1314 username=util.shortuser(commit.author),
1315 file=messagefile,
1315 file=messagefile,
1316 encoding='utf-8')
1316 encoding='utf-8')
1317 try:
1317 try:
1318 rev = self.commit_re.search(output).group(1)
1318 rev = self.commit_re.search(output).group(1)
1319 except AttributeError:
1319 except AttributeError:
1320 if parents and not files:
1320 if parents and not files:
1321 return parents[0]
1321 return parents[0]
1322 self.ui.warn(_('unexpected svn output:\n'))
1322 self.ui.warn(_('unexpected svn output:\n'))
1323 self.ui.warn(output)
1323 self.ui.warn(output)
1324 raise error.Abort(_('unable to cope with svn output'))
1324 raise error.Abort(_('unable to cope with svn output'))
1325 if commit.rev:
1325 if commit.rev:
1326 self.run('propset', 'hg:convert-rev', commit.rev,
1326 self.run('propset', 'hg:convert-rev', commit.rev,
1327 revprop=True, revision=rev)
1327 revprop=True, revision=rev)
1328 if commit.branch and commit.branch != 'default':
1328 if commit.branch and commit.branch != 'default':
1329 self.run('propset', 'hg:convert-branch', commit.branch,
1329 self.run('propset', 'hg:convert-branch', commit.branch,
1330 revprop=True, revision=rev)
1330 revprop=True, revision=rev)
1331 for parent in parents:
1331 for parent in parents:
1332 self.addchild(parent, rev)
1332 self.addchild(parent, rev)
1333 return self.revid(rev)
1333 return self.revid(rev)
1334 finally:
1334 finally:
1335 os.unlink(messagefile)
1335 os.unlink(messagefile)
1336
1336
1337 def puttags(self, tags):
1337 def puttags(self, tags):
1338 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1338 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1339 return None, None
1339 return None, None
1340
1340
1341 def hascommitfrommap(self, rev):
1341 def hascommitfrommap(self, rev):
1342 # We trust that revisions referenced in a map still is present
1342 # We trust that revisions referenced in a map still is present
1343 # TODO: implement something better if necessary and feasible
1343 # TODO: implement something better if necessary and feasible
1344 return True
1344 return True
1345
1345
1346 def hascommitforsplicemap(self, rev):
1346 def hascommitforsplicemap(self, rev):
1347 # This is not correct as one can convert to an existing subversion
1347 # This is not correct as one can convert to an existing subversion
1348 # repository and childmap would not list all revisions. Too bad.
1348 # repository and childmap would not list all revisions. Too bad.
1349 if rev in self.childmap:
1349 if rev in self.childmap:
1350 return True
1350 return True
1351 raise error.Abort(_('splice map revision %s not found in subversion '
1351 raise error.Abort(_('splice map revision %s not found in subversion '
1352 'child map (revision lookups are not implemented)')
1352 'child map (revision lookups are not implemented)')
1353 % rev)
1353 % rev)
@@ -1,662 +1,664 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import platform
15 import platform
16 import stat
16 import stat
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19
19
20 from mercurial import (
20 from mercurial import (
21 dirstate,
21 dirstate,
22 error,
22 error,
23 httpconnection,
23 httpconnection,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pycompat,
26 scmutil,
27 scmutil,
27 util,
28 util,
28 )
29 )
29
30
30 shortname = '.hglf'
31 shortname = '.hglf'
31 shortnameslash = shortname + '/'
32 shortnameslash = shortname + '/'
32 longname = 'largefiles'
33 longname = 'largefiles'
33
34
34 # -- Private worker functions ------------------------------------------
35 # -- Private worker functions ------------------------------------------
35
36
36 def getminsize(ui, assumelfiles, opt, default=10):
37 def getminsize(ui, assumelfiles, opt, default=10):
37 lfsize = opt
38 lfsize = opt
38 if not lfsize and assumelfiles:
39 if not lfsize and assumelfiles:
39 lfsize = ui.config(longname, 'minsize', default=default)
40 lfsize = ui.config(longname, 'minsize', default=default)
40 if lfsize:
41 if lfsize:
41 try:
42 try:
42 lfsize = float(lfsize)
43 lfsize = float(lfsize)
43 except ValueError:
44 except ValueError:
44 raise error.Abort(_('largefiles: size must be number (not %s)\n')
45 raise error.Abort(_('largefiles: size must be number (not %s)\n')
45 % lfsize)
46 % lfsize)
46 if lfsize is None:
47 if lfsize is None:
47 raise error.Abort(_('minimum size for largefiles must be specified'))
48 raise error.Abort(_('minimum size for largefiles must be specified'))
48 return lfsize
49 return lfsize
49
50
50 def link(src, dest):
51 def link(src, dest):
51 """Try to create hardlink - if that fails, efficiently make a copy."""
52 """Try to create hardlink - if that fails, efficiently make a copy."""
52 util.makedirs(os.path.dirname(dest))
53 util.makedirs(os.path.dirname(dest))
53 try:
54 try:
54 util.oslink(src, dest)
55 util.oslink(src, dest)
55 except OSError:
56 except OSError:
56 # if hardlinks fail, fallback on atomic copy
57 # if hardlinks fail, fallback on atomic copy
57 with open(src, 'rb') as srcf:
58 with open(src, 'rb') as srcf:
58 with util.atomictempfile(dest) as dstf:
59 with util.atomictempfile(dest) as dstf:
59 for chunk in util.filechunkiter(srcf):
60 for chunk in util.filechunkiter(srcf):
60 dstf.write(chunk)
61 dstf.write(chunk)
61 os.chmod(dest, os.stat(src).st_mode)
62 os.chmod(dest, os.stat(src).st_mode)
62
63
63 def usercachepath(ui, hash):
64 def usercachepath(ui, hash):
64 '''Return the correct location in the "global" largefiles cache for a file
65 '''Return the correct location in the "global" largefiles cache for a file
65 with the given hash.
66 with the given hash.
66 This cache is used for sharing of largefiles across repositories - both
67 This cache is used for sharing of largefiles across repositories - both
67 to preserve download bandwidth and storage space.'''
68 to preserve download bandwidth and storage space.'''
68 return os.path.join(_usercachedir(ui), hash)
69 return os.path.join(_usercachedir(ui), hash)
69
70
70 def _usercachedir(ui):
71 def _usercachedir(ui):
71 '''Return the location of the "global" largefiles cache.'''
72 '''Return the location of the "global" largefiles cache.'''
72 path = ui.configpath(longname, 'usercache', None)
73 path = ui.configpath(longname, 'usercache', None)
73 if path:
74 if path:
74 return path
75 return path
75 if os.name == 'nt':
76 if pycompat.osname == 'nt':
76 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
77 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
77 if appdata:
78 if appdata:
78 return os.path.join(appdata, longname)
79 return os.path.join(appdata, longname)
79 elif platform.system() == 'Darwin':
80 elif platform.system() == 'Darwin':
80 home = os.getenv('HOME')
81 home = os.getenv('HOME')
81 if home:
82 if home:
82 return os.path.join(home, 'Library', 'Caches', longname)
83 return os.path.join(home, 'Library', 'Caches', longname)
83 elif os.name == 'posix':
84 elif pycompat.osname == 'posix':
84 path = os.getenv('XDG_CACHE_HOME')
85 path = os.getenv('XDG_CACHE_HOME')
85 if path:
86 if path:
86 return os.path.join(path, longname)
87 return os.path.join(path, longname)
87 home = os.getenv('HOME')
88 home = os.getenv('HOME')
88 if home:
89 if home:
89 return os.path.join(home, '.cache', longname)
90 return os.path.join(home, '.cache', longname)
90 else:
91 else:
91 raise error.Abort(_('unknown operating system: %s\n') % os.name)
92 raise error.Abort(_('unknown operating system: %s\n')
93 % pycompat.osname)
92 raise error.Abort(_('unknown %s usercache location') % longname)
94 raise error.Abort(_('unknown %s usercache location') % longname)
93
95
94 def inusercache(ui, hash):
96 def inusercache(ui, hash):
95 path = usercachepath(ui, hash)
97 path = usercachepath(ui, hash)
96 return os.path.exists(path)
98 return os.path.exists(path)
97
99
98 def findfile(repo, hash):
100 def findfile(repo, hash):
99 '''Return store path of the largefile with the specified hash.
101 '''Return store path of the largefile with the specified hash.
100 As a side effect, the file might be linked from user cache.
102 As a side effect, the file might be linked from user cache.
101 Return None if the file can't be found locally.'''
103 Return None if the file can't be found locally.'''
102 path, exists = findstorepath(repo, hash)
104 path, exists = findstorepath(repo, hash)
103 if exists:
105 if exists:
104 repo.ui.note(_('found %s in store\n') % hash)
106 repo.ui.note(_('found %s in store\n') % hash)
105 return path
107 return path
106 elif inusercache(repo.ui, hash):
108 elif inusercache(repo.ui, hash):
107 repo.ui.note(_('found %s in system cache\n') % hash)
109 repo.ui.note(_('found %s in system cache\n') % hash)
108 path = storepath(repo, hash)
110 path = storepath(repo, hash)
109 link(usercachepath(repo.ui, hash), path)
111 link(usercachepath(repo.ui, hash), path)
110 return path
112 return path
111 return None
113 return None
112
114
113 class largefilesdirstate(dirstate.dirstate):
115 class largefilesdirstate(dirstate.dirstate):
114 def __getitem__(self, key):
116 def __getitem__(self, key):
115 return super(largefilesdirstate, self).__getitem__(unixpath(key))
117 return super(largefilesdirstate, self).__getitem__(unixpath(key))
116 def normal(self, f):
118 def normal(self, f):
117 return super(largefilesdirstate, self).normal(unixpath(f))
119 return super(largefilesdirstate, self).normal(unixpath(f))
118 def remove(self, f):
120 def remove(self, f):
119 return super(largefilesdirstate, self).remove(unixpath(f))
121 return super(largefilesdirstate, self).remove(unixpath(f))
120 def add(self, f):
122 def add(self, f):
121 return super(largefilesdirstate, self).add(unixpath(f))
123 return super(largefilesdirstate, self).add(unixpath(f))
122 def drop(self, f):
124 def drop(self, f):
123 return super(largefilesdirstate, self).drop(unixpath(f))
125 return super(largefilesdirstate, self).drop(unixpath(f))
124 def forget(self, f):
126 def forget(self, f):
125 return super(largefilesdirstate, self).forget(unixpath(f))
127 return super(largefilesdirstate, self).forget(unixpath(f))
126 def normallookup(self, f):
128 def normallookup(self, f):
127 return super(largefilesdirstate, self).normallookup(unixpath(f))
129 return super(largefilesdirstate, self).normallookup(unixpath(f))
128 def _ignore(self, f):
130 def _ignore(self, f):
129 return False
131 return False
130 def write(self, tr=False):
132 def write(self, tr=False):
131 # (1) disable PENDING mode always
133 # (1) disable PENDING mode always
132 # (lfdirstate isn't yet managed as a part of the transaction)
134 # (lfdirstate isn't yet managed as a part of the transaction)
133 # (2) avoid develwarn 'use dirstate.write with ....'
135 # (2) avoid develwarn 'use dirstate.write with ....'
134 super(largefilesdirstate, self).write(None)
136 super(largefilesdirstate, self).write(None)
135
137
136 def openlfdirstate(ui, repo, create=True):
138 def openlfdirstate(ui, repo, create=True):
137 '''
139 '''
138 Return a dirstate object that tracks largefiles: i.e. its root is
140 Return a dirstate object that tracks largefiles: i.e. its root is
139 the repo root, but it is saved in .hg/largefiles/dirstate.
141 the repo root, but it is saved in .hg/largefiles/dirstate.
140 '''
142 '''
141 vfs = repo.vfs
143 vfs = repo.vfs
142 lfstoredir = longname
144 lfstoredir = longname
143 opener = scmutil.opener(vfs.join(lfstoredir))
145 opener = scmutil.opener(vfs.join(lfstoredir))
144 lfdirstate = largefilesdirstate(opener, ui, repo.root,
146 lfdirstate = largefilesdirstate(opener, ui, repo.root,
145 repo.dirstate._validate)
147 repo.dirstate._validate)
146
148
147 # If the largefiles dirstate does not exist, populate and create
149 # If the largefiles dirstate does not exist, populate and create
148 # it. This ensures that we create it on the first meaningful
150 # it. This ensures that we create it on the first meaningful
149 # largefiles operation in a new clone.
151 # largefiles operation in a new clone.
150 if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
152 if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
151 matcher = getstandinmatcher(repo)
153 matcher = getstandinmatcher(repo)
152 standins = repo.dirstate.walk(matcher, [], False, False)
154 standins = repo.dirstate.walk(matcher, [], False, False)
153
155
154 if len(standins) > 0:
156 if len(standins) > 0:
155 vfs.makedirs(lfstoredir)
157 vfs.makedirs(lfstoredir)
156
158
157 for standin in standins:
159 for standin in standins:
158 lfile = splitstandin(standin)
160 lfile = splitstandin(standin)
159 lfdirstate.normallookup(lfile)
161 lfdirstate.normallookup(lfile)
160 return lfdirstate
162 return lfdirstate
161
163
162 def lfdirstatestatus(lfdirstate, repo):
164 def lfdirstatestatus(lfdirstate, repo):
163 wctx = repo['.']
165 wctx = repo['.']
164 match = matchmod.always(repo.root, repo.getcwd())
166 match = matchmod.always(repo.root, repo.getcwd())
165 unsure, s = lfdirstate.status(match, [], False, False, False)
167 unsure, s = lfdirstate.status(match, [], False, False, False)
166 modified, clean = s.modified, s.clean
168 modified, clean = s.modified, s.clean
167 for lfile in unsure:
169 for lfile in unsure:
168 try:
170 try:
169 fctx = wctx[standin(lfile)]
171 fctx = wctx[standin(lfile)]
170 except LookupError:
172 except LookupError:
171 fctx = None
173 fctx = None
172 if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
174 if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
173 modified.append(lfile)
175 modified.append(lfile)
174 else:
176 else:
175 clean.append(lfile)
177 clean.append(lfile)
176 lfdirstate.normal(lfile)
178 lfdirstate.normal(lfile)
177 return s
179 return s
178
180
179 def listlfiles(repo, rev=None, matcher=None):
181 def listlfiles(repo, rev=None, matcher=None):
180 '''return a list of largefiles in the working copy or the
182 '''return a list of largefiles in the working copy or the
181 specified changeset'''
183 specified changeset'''
182
184
183 if matcher is None:
185 if matcher is None:
184 matcher = getstandinmatcher(repo)
186 matcher = getstandinmatcher(repo)
185
187
186 # ignore unknown files in working directory
188 # ignore unknown files in working directory
187 return [splitstandin(f)
189 return [splitstandin(f)
188 for f in repo[rev].walk(matcher)
190 for f in repo[rev].walk(matcher)
189 if rev is not None or repo.dirstate[f] != '?']
191 if rev is not None or repo.dirstate[f] != '?']
190
192
191 def instore(repo, hash, forcelocal=False):
193 def instore(repo, hash, forcelocal=False):
192 '''Return true if a largefile with the given hash exists in the store'''
194 '''Return true if a largefile with the given hash exists in the store'''
193 return os.path.exists(storepath(repo, hash, forcelocal))
195 return os.path.exists(storepath(repo, hash, forcelocal))
194
196
195 def storepath(repo, hash, forcelocal=False):
197 def storepath(repo, hash, forcelocal=False):
196 '''Return the correct location in the repository largefiles store for a
198 '''Return the correct location in the repository largefiles store for a
197 file with the given hash.'''
199 file with the given hash.'''
198 if not forcelocal and repo.shared():
200 if not forcelocal and repo.shared():
199 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
201 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
200 return repo.join(longname, hash)
202 return repo.join(longname, hash)
201
203
202 def findstorepath(repo, hash):
204 def findstorepath(repo, hash):
203 '''Search through the local store path(s) to find the file for the given
205 '''Search through the local store path(s) to find the file for the given
204 hash. If the file is not found, its path in the primary store is returned.
206 hash. If the file is not found, its path in the primary store is returned.
205 The return value is a tuple of (path, exists(path)).
207 The return value is a tuple of (path, exists(path)).
206 '''
208 '''
207 # For shared repos, the primary store is in the share source. But for
209 # For shared repos, the primary store is in the share source. But for
208 # backward compatibility, force a lookup in the local store if it wasn't
210 # backward compatibility, force a lookup in the local store if it wasn't
209 # found in the share source.
211 # found in the share source.
210 path = storepath(repo, hash, False)
212 path = storepath(repo, hash, False)
211
213
212 if instore(repo, hash):
214 if instore(repo, hash):
213 return (path, True)
215 return (path, True)
214 elif repo.shared() and instore(repo, hash, True):
216 elif repo.shared() and instore(repo, hash, True):
215 return storepath(repo, hash, True), True
217 return storepath(repo, hash, True), True
216
218
217 return (path, False)
219 return (path, False)
218
220
219 def copyfromcache(repo, hash, filename):
221 def copyfromcache(repo, hash, filename):
220 '''Copy the specified largefile from the repo or system cache to
222 '''Copy the specified largefile from the repo or system cache to
221 filename in the repository. Return true on success or false if the
223 filename in the repository. Return true on success or false if the
222 file was not found in either cache (which should not happened:
224 file was not found in either cache (which should not happened:
223 this is meant to be called only after ensuring that the needed
225 this is meant to be called only after ensuring that the needed
224 largefile exists in the cache).'''
226 largefile exists in the cache).'''
225 wvfs = repo.wvfs
227 wvfs = repo.wvfs
226 path = findfile(repo, hash)
228 path = findfile(repo, hash)
227 if path is None:
229 if path is None:
228 return False
230 return False
229 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
231 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
230 # The write may fail before the file is fully written, but we
232 # The write may fail before the file is fully written, but we
231 # don't use atomic writes in the working copy.
233 # don't use atomic writes in the working copy.
232 with open(path, 'rb') as srcfd:
234 with open(path, 'rb') as srcfd:
233 with wvfs(filename, 'wb') as destfd:
235 with wvfs(filename, 'wb') as destfd:
234 gothash = copyandhash(
236 gothash = copyandhash(
235 util.filechunkiter(srcfd), destfd)
237 util.filechunkiter(srcfd), destfd)
236 if gothash != hash:
238 if gothash != hash:
237 repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
239 repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
238 % (filename, path, gothash))
240 % (filename, path, gothash))
239 wvfs.unlink(filename)
241 wvfs.unlink(filename)
240 return False
242 return False
241 return True
243 return True
242
244
243 def copytostore(repo, rev, file, uploaded=False):
245 def copytostore(repo, rev, file, uploaded=False):
244 wvfs = repo.wvfs
246 wvfs = repo.wvfs
245 hash = readstandin(repo, file, rev)
247 hash = readstandin(repo, file, rev)
246 if instore(repo, hash):
248 if instore(repo, hash):
247 return
249 return
248 if wvfs.exists(file):
250 if wvfs.exists(file):
249 copytostoreabsolute(repo, wvfs.join(file), hash)
251 copytostoreabsolute(repo, wvfs.join(file), hash)
250 else:
252 else:
251 repo.ui.warn(_("%s: largefile %s not available from local store\n") %
253 repo.ui.warn(_("%s: largefile %s not available from local store\n") %
252 (file, hash))
254 (file, hash))
253
255
254 def copyalltostore(repo, node):
256 def copyalltostore(repo, node):
255 '''Copy all largefiles in a given revision to the store'''
257 '''Copy all largefiles in a given revision to the store'''
256
258
257 ctx = repo[node]
259 ctx = repo[node]
258 for filename in ctx.files():
260 for filename in ctx.files():
259 if isstandin(filename) and filename in ctx.manifest():
261 if isstandin(filename) and filename in ctx.manifest():
260 realfile = splitstandin(filename)
262 realfile = splitstandin(filename)
261 copytostore(repo, ctx.node(), realfile)
263 copytostore(repo, ctx.node(), realfile)
262
264
263 def copytostoreabsolute(repo, file, hash):
265 def copytostoreabsolute(repo, file, hash):
264 if inusercache(repo.ui, hash):
266 if inusercache(repo.ui, hash):
265 link(usercachepath(repo.ui, hash), storepath(repo, hash))
267 link(usercachepath(repo.ui, hash), storepath(repo, hash))
266 else:
268 else:
267 util.makedirs(os.path.dirname(storepath(repo, hash)))
269 util.makedirs(os.path.dirname(storepath(repo, hash)))
268 with open(file, 'rb') as srcf:
270 with open(file, 'rb') as srcf:
269 with util.atomictempfile(storepath(repo, hash),
271 with util.atomictempfile(storepath(repo, hash),
270 createmode=repo.store.createmode) as dstf:
272 createmode=repo.store.createmode) as dstf:
271 for chunk in util.filechunkiter(srcf):
273 for chunk in util.filechunkiter(srcf):
272 dstf.write(chunk)
274 dstf.write(chunk)
273 linktousercache(repo, hash)
275 linktousercache(repo, hash)
274
276
275 def linktousercache(repo, hash):
277 def linktousercache(repo, hash):
276 '''Link / copy the largefile with the specified hash from the store
278 '''Link / copy the largefile with the specified hash from the store
277 to the cache.'''
279 to the cache.'''
278 path = usercachepath(repo.ui, hash)
280 path = usercachepath(repo.ui, hash)
279 link(storepath(repo, hash), path)
281 link(storepath(repo, hash), path)
280
282
281 def getstandinmatcher(repo, rmatcher=None):
283 def getstandinmatcher(repo, rmatcher=None):
282 '''Return a match object that applies rmatcher to the standin directory'''
284 '''Return a match object that applies rmatcher to the standin directory'''
283 wvfs = repo.wvfs
285 wvfs = repo.wvfs
284 standindir = shortname
286 standindir = shortname
285
287
286 # no warnings about missing files or directories
288 # no warnings about missing files or directories
287 badfn = lambda f, msg: None
289 badfn = lambda f, msg: None
288
290
289 if rmatcher and not rmatcher.always():
291 if rmatcher and not rmatcher.always():
290 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
292 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
291 if not pats:
293 if not pats:
292 pats = [wvfs.join(standindir)]
294 pats = [wvfs.join(standindir)]
293 match = scmutil.match(repo[None], pats, badfn=badfn)
295 match = scmutil.match(repo[None], pats, badfn=badfn)
294 # if pats is empty, it would incorrectly always match, so clear _always
296 # if pats is empty, it would incorrectly always match, so clear _always
295 match._always = False
297 match._always = False
296 else:
298 else:
297 # no patterns: relative to repo root
299 # no patterns: relative to repo root
298 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
300 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
299 return match
301 return match
300
302
301 def composestandinmatcher(repo, rmatcher):
303 def composestandinmatcher(repo, rmatcher):
302 '''Return a matcher that accepts standins corresponding to the
304 '''Return a matcher that accepts standins corresponding to the
303 files accepted by rmatcher. Pass the list of files in the matcher
305 files accepted by rmatcher. Pass the list of files in the matcher
304 as the paths specified by the user.'''
306 as the paths specified by the user.'''
305 smatcher = getstandinmatcher(repo, rmatcher)
307 smatcher = getstandinmatcher(repo, rmatcher)
306 isstandin = smatcher.matchfn
308 isstandin = smatcher.matchfn
307 def composedmatchfn(f):
309 def composedmatchfn(f):
308 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
310 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
309 smatcher.matchfn = composedmatchfn
311 smatcher.matchfn = composedmatchfn
310
312
311 return smatcher
313 return smatcher
312
314
313 def standin(filename):
315 def standin(filename):
314 '''Return the repo-relative path to the standin for the specified big
316 '''Return the repo-relative path to the standin for the specified big
315 file.'''
317 file.'''
316 # Notes:
318 # Notes:
317 # 1) Some callers want an absolute path, but for instance addlargefiles
319 # 1) Some callers want an absolute path, but for instance addlargefiles
318 # needs it repo-relative so it can be passed to repo[None].add(). So
320 # needs it repo-relative so it can be passed to repo[None].add(). So
319 # leave it up to the caller to use repo.wjoin() to get an absolute path.
321 # leave it up to the caller to use repo.wjoin() to get an absolute path.
320 # 2) Join with '/' because that's what dirstate always uses, even on
322 # 2) Join with '/' because that's what dirstate always uses, even on
321 # Windows. Change existing separator to '/' first in case we are
323 # Windows. Change existing separator to '/' first in case we are
322 # passed filenames from an external source (like the command line).
324 # passed filenames from an external source (like the command line).
323 return shortnameslash + util.pconvert(filename)
325 return shortnameslash + util.pconvert(filename)
324
326
325 def isstandin(filename):
327 def isstandin(filename):
326 '''Return true if filename is a big file standin. filename must be
328 '''Return true if filename is a big file standin. filename must be
327 in Mercurial's internal form (slash-separated).'''
329 in Mercurial's internal form (slash-separated).'''
328 return filename.startswith(shortnameslash)
330 return filename.startswith(shortnameslash)
329
331
330 def splitstandin(filename):
332 def splitstandin(filename):
331 # Split on / because that's what dirstate always uses, even on Windows.
333 # Split on / because that's what dirstate always uses, even on Windows.
332 # Change local separator to / first just in case we are passed filenames
334 # Change local separator to / first just in case we are passed filenames
333 # from an external source (like the command line).
335 # from an external source (like the command line).
334 bits = util.pconvert(filename).split('/', 1)
336 bits = util.pconvert(filename).split('/', 1)
335 if len(bits) == 2 and bits[0] == shortname:
337 if len(bits) == 2 and bits[0] == shortname:
336 return bits[1]
338 return bits[1]
337 else:
339 else:
338 return None
340 return None
339
341
340 def updatestandin(repo, standin):
342 def updatestandin(repo, standin):
341 file = repo.wjoin(splitstandin(standin))
343 file = repo.wjoin(splitstandin(standin))
342 if repo.wvfs.exists(splitstandin(standin)):
344 if repo.wvfs.exists(splitstandin(standin)):
343 hash = hashfile(file)
345 hash = hashfile(file)
344 executable = getexecutable(file)
346 executable = getexecutable(file)
345 writestandin(repo, standin, hash, executable)
347 writestandin(repo, standin, hash, executable)
346 else:
348 else:
347 raise error.Abort(_('%s: file not found!') % splitstandin(standin))
349 raise error.Abort(_('%s: file not found!') % splitstandin(standin))
348
350
349 def readstandin(repo, filename, node=None):
351 def readstandin(repo, filename, node=None):
350 '''read hex hash from standin for filename at given node, or working
352 '''read hex hash from standin for filename at given node, or working
351 directory if no node is given'''
353 directory if no node is given'''
352 return repo[node][standin(filename)].data().strip()
354 return repo[node][standin(filename)].data().strip()
353
355
354 def writestandin(repo, standin, hash, executable):
356 def writestandin(repo, standin, hash, executable):
355 '''write hash to <repo.root>/<standin>'''
357 '''write hash to <repo.root>/<standin>'''
356 repo.wwrite(standin, hash + '\n', executable and 'x' or '')
358 repo.wwrite(standin, hash + '\n', executable and 'x' or '')
357
359
358 def copyandhash(instream, outfile):
360 def copyandhash(instream, outfile):
359 '''Read bytes from instream (iterable) and write them to outfile,
361 '''Read bytes from instream (iterable) and write them to outfile,
360 computing the SHA-1 hash of the data along the way. Return the hash.'''
362 computing the SHA-1 hash of the data along the way. Return the hash.'''
361 hasher = hashlib.sha1('')
363 hasher = hashlib.sha1('')
362 for data in instream:
364 for data in instream:
363 hasher.update(data)
365 hasher.update(data)
364 outfile.write(data)
366 outfile.write(data)
365 return hasher.hexdigest()
367 return hasher.hexdigest()
366
368
367 def hashrepofile(repo, file):
369 def hashrepofile(repo, file):
368 return hashfile(repo.wjoin(file))
370 return hashfile(repo.wjoin(file))
369
371
370 def hashfile(file):
372 def hashfile(file):
371 if not os.path.exists(file):
373 if not os.path.exists(file):
372 return ''
374 return ''
373 hasher = hashlib.sha1('')
375 hasher = hashlib.sha1('')
374 with open(file, 'rb') as fd:
376 with open(file, 'rb') as fd:
375 for data in util.filechunkiter(fd):
377 for data in util.filechunkiter(fd):
376 hasher.update(data)
378 hasher.update(data)
377 return hasher.hexdigest()
379 return hasher.hexdigest()
378
380
379 def getexecutable(filename):
381 def getexecutable(filename):
380 mode = os.stat(filename).st_mode
382 mode = os.stat(filename).st_mode
381 return ((mode & stat.S_IXUSR) and
383 return ((mode & stat.S_IXUSR) and
382 (mode & stat.S_IXGRP) and
384 (mode & stat.S_IXGRP) and
383 (mode & stat.S_IXOTH))
385 (mode & stat.S_IXOTH))
384
386
385 def urljoin(first, second, *arg):
387 def urljoin(first, second, *arg):
386 def join(left, right):
388 def join(left, right):
387 if not left.endswith('/'):
389 if not left.endswith('/'):
388 left += '/'
390 left += '/'
389 if right.startswith('/'):
391 if right.startswith('/'):
390 right = right[1:]
392 right = right[1:]
391 return left + right
393 return left + right
392
394
393 url = join(first, second)
395 url = join(first, second)
394 for a in arg:
396 for a in arg:
395 url = join(url, a)
397 url = join(url, a)
396 return url
398 return url
397
399
398 def hexsha1(data):
400 def hexsha1(data):
399 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
401 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
400 object data"""
402 object data"""
401 h = hashlib.sha1()
403 h = hashlib.sha1()
402 for chunk in util.filechunkiter(data):
404 for chunk in util.filechunkiter(data):
403 h.update(chunk)
405 h.update(chunk)
404 return h.hexdigest()
406 return h.hexdigest()
405
407
406 def httpsendfile(ui, filename):
408 def httpsendfile(ui, filename):
407 return httpconnection.httpsendfile(ui, filename, 'rb')
409 return httpconnection.httpsendfile(ui, filename, 'rb')
408
410
409 def unixpath(path):
411 def unixpath(path):
410 '''Return a version of path normalized for use with the lfdirstate.'''
412 '''Return a version of path normalized for use with the lfdirstate.'''
411 return util.pconvert(os.path.normpath(path))
413 return util.pconvert(os.path.normpath(path))
412
414
413 def islfilesrepo(repo):
415 def islfilesrepo(repo):
414 '''Return true if the repo is a largefile repo.'''
416 '''Return true if the repo is a largefile repo.'''
415 if ('largefiles' in repo.requirements and
417 if ('largefiles' in repo.requirements and
416 any(shortnameslash in f[0] for f in repo.store.datafiles())):
418 any(shortnameslash in f[0] for f in repo.store.datafiles())):
417 return True
419 return True
418
420
419 return any(openlfdirstate(repo.ui, repo, False))
421 return any(openlfdirstate(repo.ui, repo, False))
420
422
421 class storeprotonotcapable(Exception):
423 class storeprotonotcapable(Exception):
422 def __init__(self, storetypes):
424 def __init__(self, storetypes):
423 self.storetypes = storetypes
425 self.storetypes = storetypes
424
426
425 def getstandinsstate(repo):
427 def getstandinsstate(repo):
426 standins = []
428 standins = []
427 matcher = getstandinmatcher(repo)
429 matcher = getstandinmatcher(repo)
428 for standin in repo.dirstate.walk(matcher, [], False, False):
430 for standin in repo.dirstate.walk(matcher, [], False, False):
429 lfile = splitstandin(standin)
431 lfile = splitstandin(standin)
430 try:
432 try:
431 hash = readstandin(repo, lfile)
433 hash = readstandin(repo, lfile)
432 except IOError:
434 except IOError:
433 hash = None
435 hash = None
434 standins.append((lfile, hash))
436 standins.append((lfile, hash))
435 return standins
437 return standins
436
438
437 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
439 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
438 lfstandin = standin(lfile)
440 lfstandin = standin(lfile)
439 if lfstandin in repo.dirstate:
441 if lfstandin in repo.dirstate:
440 stat = repo.dirstate._map[lfstandin]
442 stat = repo.dirstate._map[lfstandin]
441 state, mtime = stat[0], stat[3]
443 state, mtime = stat[0], stat[3]
442 else:
444 else:
443 state, mtime = '?', -1
445 state, mtime = '?', -1
444 if state == 'n':
446 if state == 'n':
445 if (normallookup or mtime < 0 or
447 if (normallookup or mtime < 0 or
446 not repo.wvfs.exists(lfile)):
448 not repo.wvfs.exists(lfile)):
447 # state 'n' doesn't ensure 'clean' in this case
449 # state 'n' doesn't ensure 'clean' in this case
448 lfdirstate.normallookup(lfile)
450 lfdirstate.normallookup(lfile)
449 else:
451 else:
450 lfdirstate.normal(lfile)
452 lfdirstate.normal(lfile)
451 elif state == 'm':
453 elif state == 'm':
452 lfdirstate.normallookup(lfile)
454 lfdirstate.normallookup(lfile)
453 elif state == 'r':
455 elif state == 'r':
454 lfdirstate.remove(lfile)
456 lfdirstate.remove(lfile)
455 elif state == 'a':
457 elif state == 'a':
456 lfdirstate.add(lfile)
458 lfdirstate.add(lfile)
457 elif state == '?':
459 elif state == '?':
458 lfdirstate.drop(lfile)
460 lfdirstate.drop(lfile)
459
461
460 def markcommitted(orig, ctx, node):
462 def markcommitted(orig, ctx, node):
461 repo = ctx.repo()
463 repo = ctx.repo()
462
464
463 orig(node)
465 orig(node)
464
466
465 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
467 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
466 # because files coming from the 2nd parent are omitted in the latter.
468 # because files coming from the 2nd parent are omitted in the latter.
467 #
469 #
468 # The former should be used to get targets of "synclfdirstate",
470 # The former should be used to get targets of "synclfdirstate",
469 # because such files:
471 # because such files:
470 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
472 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
471 # - have to be marked as "n" after commit, but
473 # - have to be marked as "n" after commit, but
472 # - aren't listed in "repo[node].files()"
474 # - aren't listed in "repo[node].files()"
473
475
474 lfdirstate = openlfdirstate(repo.ui, repo)
476 lfdirstate = openlfdirstate(repo.ui, repo)
475 for f in ctx.files():
477 for f in ctx.files():
476 if isstandin(f):
478 if isstandin(f):
477 lfile = splitstandin(f)
479 lfile = splitstandin(f)
478 synclfdirstate(repo, lfdirstate, lfile, False)
480 synclfdirstate(repo, lfdirstate, lfile, False)
479 lfdirstate.write()
481 lfdirstate.write()
480
482
481 # As part of committing, copy all of the largefiles into the cache.
483 # As part of committing, copy all of the largefiles into the cache.
482 copyalltostore(repo, node)
484 copyalltostore(repo, node)
483
485
484 def getlfilestoupdate(oldstandins, newstandins):
486 def getlfilestoupdate(oldstandins, newstandins):
485 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
487 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
486 filelist = []
488 filelist = []
487 for f in changedstandins:
489 for f in changedstandins:
488 if f[0] not in filelist:
490 if f[0] not in filelist:
489 filelist.append(f[0])
491 filelist.append(f[0])
490 return filelist
492 return filelist
491
493
492 def getlfilestoupload(repo, missing, addfunc):
494 def getlfilestoupload(repo, missing, addfunc):
493 for i, n in enumerate(missing):
495 for i, n in enumerate(missing):
494 repo.ui.progress(_('finding outgoing largefiles'), i,
496 repo.ui.progress(_('finding outgoing largefiles'), i,
495 unit=_('revisions'), total=len(missing))
497 unit=_('revisions'), total=len(missing))
496 parents = [p for p in repo[n].parents() if p != node.nullid]
498 parents = [p for p in repo[n].parents() if p != node.nullid]
497
499
498 oldlfstatus = repo.lfstatus
500 oldlfstatus = repo.lfstatus
499 repo.lfstatus = False
501 repo.lfstatus = False
500 try:
502 try:
501 ctx = repo[n]
503 ctx = repo[n]
502 finally:
504 finally:
503 repo.lfstatus = oldlfstatus
505 repo.lfstatus = oldlfstatus
504
506
505 files = set(ctx.files())
507 files = set(ctx.files())
506 if len(parents) == 2:
508 if len(parents) == 2:
507 mc = ctx.manifest()
509 mc = ctx.manifest()
508 mp1 = ctx.parents()[0].manifest()
510 mp1 = ctx.parents()[0].manifest()
509 mp2 = ctx.parents()[1].manifest()
511 mp2 = ctx.parents()[1].manifest()
510 for f in mp1:
512 for f in mp1:
511 if f not in mc:
513 if f not in mc:
512 files.add(f)
514 files.add(f)
513 for f in mp2:
515 for f in mp2:
514 if f not in mc:
516 if f not in mc:
515 files.add(f)
517 files.add(f)
516 for f in mc:
518 for f in mc:
517 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
519 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
518 files.add(f)
520 files.add(f)
519 for fn in files:
521 for fn in files:
520 if isstandin(fn) and fn in ctx:
522 if isstandin(fn) and fn in ctx:
521 addfunc(fn, ctx[fn].data().strip())
523 addfunc(fn, ctx[fn].data().strip())
522 repo.ui.progress(_('finding outgoing largefiles'), None)
524 repo.ui.progress(_('finding outgoing largefiles'), None)
523
525
524 def updatestandinsbymatch(repo, match):
526 def updatestandinsbymatch(repo, match):
525 '''Update standins in the working directory according to specified match
527 '''Update standins in the working directory according to specified match
526
528
527 This returns (possibly modified) ``match`` object to be used for
529 This returns (possibly modified) ``match`` object to be used for
528 subsequent commit process.
530 subsequent commit process.
529 '''
531 '''
530
532
531 ui = repo.ui
533 ui = repo.ui
532
534
533 # Case 1: user calls commit with no specific files or
535 # Case 1: user calls commit with no specific files or
534 # include/exclude patterns: refresh and commit all files that
536 # include/exclude patterns: refresh and commit all files that
535 # are "dirty".
537 # are "dirty".
536 if match is None or match.always():
538 if match is None or match.always():
537 # Spend a bit of time here to get a list of files we know
539 # Spend a bit of time here to get a list of files we know
538 # are modified so we can compare only against those.
540 # are modified so we can compare only against those.
539 # It can cost a lot of time (several seconds)
541 # It can cost a lot of time (several seconds)
540 # otherwise to update all standins if the largefiles are
542 # otherwise to update all standins if the largefiles are
541 # large.
543 # large.
542 lfdirstate = openlfdirstate(ui, repo)
544 lfdirstate = openlfdirstate(ui, repo)
543 dirtymatch = matchmod.always(repo.root, repo.getcwd())
545 dirtymatch = matchmod.always(repo.root, repo.getcwd())
544 unsure, s = lfdirstate.status(dirtymatch, [], False, False,
546 unsure, s = lfdirstate.status(dirtymatch, [], False, False,
545 False)
547 False)
546 modifiedfiles = unsure + s.modified + s.added + s.removed
548 modifiedfiles = unsure + s.modified + s.added + s.removed
547 lfiles = listlfiles(repo)
549 lfiles = listlfiles(repo)
548 # this only loops through largefiles that exist (not
550 # this only loops through largefiles that exist (not
549 # removed/renamed)
551 # removed/renamed)
550 for lfile in lfiles:
552 for lfile in lfiles:
551 if lfile in modifiedfiles:
553 if lfile in modifiedfiles:
552 if repo.wvfs.exists(standin(lfile)):
554 if repo.wvfs.exists(standin(lfile)):
553 # this handles the case where a rebase is being
555 # this handles the case where a rebase is being
554 # performed and the working copy is not updated
556 # performed and the working copy is not updated
555 # yet.
557 # yet.
556 if repo.wvfs.exists(lfile):
558 if repo.wvfs.exists(lfile):
557 updatestandin(repo,
559 updatestandin(repo,
558 standin(lfile))
560 standin(lfile))
559
561
560 return match
562 return match
561
563
562 lfiles = listlfiles(repo)
564 lfiles = listlfiles(repo)
563 match._files = repo._subdirlfs(match.files(), lfiles)
565 match._files = repo._subdirlfs(match.files(), lfiles)
564
566
565 # Case 2: user calls commit with specified patterns: refresh
567 # Case 2: user calls commit with specified patterns: refresh
566 # any matching big files.
568 # any matching big files.
567 smatcher = composestandinmatcher(repo, match)
569 smatcher = composestandinmatcher(repo, match)
568 standins = repo.dirstate.walk(smatcher, [], False, False)
570 standins = repo.dirstate.walk(smatcher, [], False, False)
569
571
570 # No matching big files: get out of the way and pass control to
572 # No matching big files: get out of the way and pass control to
571 # the usual commit() method.
573 # the usual commit() method.
572 if not standins:
574 if not standins:
573 return match
575 return match
574
576
575 # Refresh all matching big files. It's possible that the
577 # Refresh all matching big files. It's possible that the
576 # commit will end up failing, in which case the big files will
578 # commit will end up failing, in which case the big files will
577 # stay refreshed. No harm done: the user modified them and
579 # stay refreshed. No harm done: the user modified them and
578 # asked to commit them, so sooner or later we're going to
580 # asked to commit them, so sooner or later we're going to
579 # refresh the standins. Might as well leave them refreshed.
581 # refresh the standins. Might as well leave them refreshed.
580 lfdirstate = openlfdirstate(ui, repo)
582 lfdirstate = openlfdirstate(ui, repo)
581 for fstandin in standins:
583 for fstandin in standins:
582 lfile = splitstandin(fstandin)
584 lfile = splitstandin(fstandin)
583 if lfdirstate[lfile] != 'r':
585 if lfdirstate[lfile] != 'r':
584 updatestandin(repo, fstandin)
586 updatestandin(repo, fstandin)
585
587
586 # Cook up a new matcher that only matches regular files or
588 # Cook up a new matcher that only matches regular files or
587 # standins corresponding to the big files requested by the
589 # standins corresponding to the big files requested by the
588 # user. Have to modify _files to prevent commit() from
590 # user. Have to modify _files to prevent commit() from
589 # complaining "not tracked" for big files.
591 # complaining "not tracked" for big files.
590 match = copy.copy(match)
592 match = copy.copy(match)
591 origmatchfn = match.matchfn
593 origmatchfn = match.matchfn
592
594
593 # Check both the list of largefiles and the list of
595 # Check both the list of largefiles and the list of
594 # standins because if a largefile was removed, it
596 # standins because if a largefile was removed, it
595 # won't be in the list of largefiles at this point
597 # won't be in the list of largefiles at this point
596 match._files += sorted(standins)
598 match._files += sorted(standins)
597
599
598 actualfiles = []
600 actualfiles = []
599 for f in match._files:
601 for f in match._files:
600 fstandin = standin(f)
602 fstandin = standin(f)
601
603
602 # For largefiles, only one of the normal and standin should be
604 # For largefiles, only one of the normal and standin should be
603 # committed (except if one of them is a remove). In the case of a
605 # committed (except if one of them is a remove). In the case of a
604 # standin removal, drop the normal file if it is unknown to dirstate.
606 # standin removal, drop the normal file if it is unknown to dirstate.
605 # Thus, skip plain largefile names but keep the standin.
607 # Thus, skip plain largefile names but keep the standin.
606 if f in lfiles or fstandin in standins:
608 if f in lfiles or fstandin in standins:
607 if repo.dirstate[fstandin] != 'r':
609 if repo.dirstate[fstandin] != 'r':
608 if repo.dirstate[f] != 'r':
610 if repo.dirstate[f] != 'r':
609 continue
611 continue
610 elif repo.dirstate[f] == '?':
612 elif repo.dirstate[f] == '?':
611 continue
613 continue
612
614
613 actualfiles.append(f)
615 actualfiles.append(f)
614 match._files = actualfiles
616 match._files = actualfiles
615
617
616 def matchfn(f):
618 def matchfn(f):
617 if origmatchfn(f):
619 if origmatchfn(f):
618 return f not in lfiles
620 return f not in lfiles
619 else:
621 else:
620 return f in standins
622 return f in standins
621
623
622 match.matchfn = matchfn
624 match.matchfn = matchfn
623
625
624 return match
626 return match
625
627
626 class automatedcommithook(object):
628 class automatedcommithook(object):
627 '''Stateful hook to update standins at the 1st commit of resuming
629 '''Stateful hook to update standins at the 1st commit of resuming
628
630
629 For efficiency, updating standins in the working directory should
631 For efficiency, updating standins in the working directory should
630 be avoided while automated committing (like rebase, transplant and
632 be avoided while automated committing (like rebase, transplant and
631 so on), because they should be updated before committing.
633 so on), because they should be updated before committing.
632
634
633 But the 1st commit of resuming automated committing (e.g. ``rebase
635 But the 1st commit of resuming automated committing (e.g. ``rebase
634 --continue``) should update them, because largefiles may be
636 --continue``) should update them, because largefiles may be
635 modified manually.
637 modified manually.
636 '''
638 '''
637 def __init__(self, resuming):
639 def __init__(self, resuming):
638 self.resuming = resuming
640 self.resuming = resuming
639
641
640 def __call__(self, repo, match):
642 def __call__(self, repo, match):
641 if self.resuming:
643 if self.resuming:
642 self.resuming = False # avoids updating at subsequent commits
644 self.resuming = False # avoids updating at subsequent commits
643 return updatestandinsbymatch(repo, match)
645 return updatestandinsbymatch(repo, match)
644 else:
646 else:
645 return match
647 return match
646
648
647 def getstatuswriter(ui, repo, forcibly=None):
649 def getstatuswriter(ui, repo, forcibly=None):
648 '''Return the function to write largefiles specific status out
650 '''Return the function to write largefiles specific status out
649
651
650 If ``forcibly`` is ``None``, this returns the last element of
652 If ``forcibly`` is ``None``, this returns the last element of
651 ``repo._lfstatuswriters`` as "default" writer function.
653 ``repo._lfstatuswriters`` as "default" writer function.
652
654
653 Otherwise, this returns the function to always write out (or
655 Otherwise, this returns the function to always write out (or
654 ignore if ``not forcibly``) status.
656 ignore if ``not forcibly``) status.
655 '''
657 '''
656 if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
658 if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
657 return repo._lfstatuswriters[-1]
659 return repo._lfstatuswriters[-1]
658 else:
660 else:
659 if forcibly:
661 if forcibly:
660 return ui.status # forcibly WRITE OUT
662 return ui.status # forcibly WRITE OUT
661 else:
663 else:
662 return lambda *msg, **opts: None # forcibly IGNORE
664 return lambda *msg, **opts: None # forcibly IGNORE
@@ -1,132 +1,133 b''
1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """extend schemes with shortcuts to repository swarms
6 """extend schemes with shortcuts to repository swarms
7
7
8 This extension allows you to specify shortcuts for parent URLs with a
8 This extension allows you to specify shortcuts for parent URLs with a
9 lot of repositories to act like a scheme, for example::
9 lot of repositories to act like a scheme, for example::
10
10
11 [schemes]
11 [schemes]
12 py = http://code.python.org/hg/
12 py = http://code.python.org/hg/
13
13
14 After that you can use it like::
14 After that you can use it like::
15
15
16 hg clone py://trunk/
16 hg clone py://trunk/
17
17
18 Additionally there is support for some more complex schemas, for
18 Additionally there is support for some more complex schemas, for
19 example used by Google Code::
19 example used by Google Code::
20
20
21 [schemes]
21 [schemes]
22 gcode = http://{1}.googlecode.com/hg/
22 gcode = http://{1}.googlecode.com/hg/
23
23
24 The syntax is taken from Mercurial templates, and you have unlimited
24 The syntax is taken from Mercurial templates, and you have unlimited
25 number of variables, starting with ``{1}`` and continuing with
25 number of variables, starting with ``{1}`` and continuing with
26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
28 just appended to an URL.
28 just appended to an URL.
29
29
30 For convenience, the extension adds these schemes by default::
30 For convenience, the extension adds these schemes by default::
31
31
32 [schemes]
32 [schemes]
33 py = http://hg.python.org/
33 py = http://hg.python.org/
34 bb = https://bitbucket.org/
34 bb = https://bitbucket.org/
35 bb+ssh = ssh://hg@bitbucket.org/
35 bb+ssh = ssh://hg@bitbucket.org/
36 gcode = https://{1}.googlecode.com/hg/
36 gcode = https://{1}.googlecode.com/hg/
37 kiln = https://{1}.kilnhg.com/Repo/
37 kiln = https://{1}.kilnhg.com/Repo/
38
38
39 You can override a predefined scheme by defining a new scheme with the
39 You can override a predefined scheme by defining a new scheme with the
40 same name.
40 same name.
41 """
41 """
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import os
44 import os
45 import re
45 import re
46
46
47 from mercurial.i18n import _
47 from mercurial.i18n import _
48 from mercurial import (
48 from mercurial import (
49 cmdutil,
49 cmdutil,
50 error,
50 error,
51 extensions,
51 extensions,
52 hg,
52 hg,
53 pycompat,
53 templater,
54 templater,
54 util,
55 util,
55 )
56 )
56
57
57 cmdtable = {}
58 cmdtable = {}
58 command = cmdutil.command(cmdtable)
59 command = cmdutil.command(cmdtable)
59 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
62 # leave the attribute unspecified.
63 # leave the attribute unspecified.
63 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
64
65
65
66
66 class ShortRepository(object):
67 class ShortRepository(object):
67 def __init__(self, url, scheme, templater):
68 def __init__(self, url, scheme, templater):
68 self.scheme = scheme
69 self.scheme = scheme
69 self.templater = templater
70 self.templater = templater
70 self.url = url
71 self.url = url
71 try:
72 try:
72 self.parts = max(map(int, re.findall(r'\{(\d+)\}', self.url)))
73 self.parts = max(map(int, re.findall(r'\{(\d+)\}', self.url)))
73 except ValueError:
74 except ValueError:
74 self.parts = 0
75 self.parts = 0
75
76
76 def __repr__(self):
77 def __repr__(self):
77 return '<ShortRepository: %s>' % self.scheme
78 return '<ShortRepository: %s>' % self.scheme
78
79
79 def instance(self, ui, url, create):
80 def instance(self, ui, url, create):
80 url = self.resolve(url)
81 url = self.resolve(url)
81 return hg._peerlookup(url).instance(ui, url, create)
82 return hg._peerlookup(url).instance(ui, url, create)
82
83
83 def resolve(self, url):
84 def resolve(self, url):
84 # Should this use the util.url class, or is manual parsing better?
85 # Should this use the util.url class, or is manual parsing better?
85 try:
86 try:
86 url = url.split('://', 1)[1]
87 url = url.split('://', 1)[1]
87 except IndexError:
88 except IndexError:
88 raise error.Abort(_("no '://' in scheme url '%s'") % url)
89 raise error.Abort(_("no '://' in scheme url '%s'") % url)
89 parts = url.split('/', self.parts)
90 parts = url.split('/', self.parts)
90 if len(parts) > self.parts:
91 if len(parts) > self.parts:
91 tail = parts[-1]
92 tail = parts[-1]
92 parts = parts[:-1]
93 parts = parts[:-1]
93 else:
94 else:
94 tail = ''
95 tail = ''
95 context = dict((str(i + 1), v) for i, v in enumerate(parts))
96 context = dict((str(i + 1), v) for i, v in enumerate(parts))
96 return ''.join(self.templater.process(self.url, context)) + tail
97 return ''.join(self.templater.process(self.url, context)) + tail
97
98
98 def hasdriveletter(orig, path):
99 def hasdriveletter(orig, path):
99 if path:
100 if path:
100 for scheme in schemes:
101 for scheme in schemes:
101 if path.startswith(scheme + ':'):
102 if path.startswith(scheme + ':'):
102 return False
103 return False
103 return orig(path)
104 return orig(path)
104
105
105 schemes = {
106 schemes = {
106 'py': 'http://hg.python.org/',
107 'py': 'http://hg.python.org/',
107 'bb': 'https://bitbucket.org/',
108 'bb': 'https://bitbucket.org/',
108 'bb+ssh': 'ssh://hg@bitbucket.org/',
109 'bb+ssh': 'ssh://hg@bitbucket.org/',
109 'gcode': 'https://{1}.googlecode.com/hg/',
110 'gcode': 'https://{1}.googlecode.com/hg/',
110 'kiln': 'https://{1}.kilnhg.com/Repo/'
111 'kiln': 'https://{1}.kilnhg.com/Repo/'
111 }
112 }
112
113
113 def extsetup(ui):
114 def extsetup(ui):
114 schemes.update(dict(ui.configitems('schemes')))
115 schemes.update(dict(ui.configitems('schemes')))
115 t = templater.engine(lambda x: x)
116 t = templater.engine(lambda x: x)
116 for scheme, url in schemes.items():
117 for scheme, url in schemes.items():
117 if (os.name == 'nt' and len(scheme) == 1 and scheme.isalpha()
118 if (pycompat.osname == 'nt' and len(scheme) == 1 and scheme.isalpha()
118 and os.path.exists('%s:\\' % scheme)):
119 and os.path.exists('%s:\\' % scheme)):
119 raise error.Abort(_('custom scheme %s:// conflicts with drive '
120 raise error.Abort(_('custom scheme %s:// conflicts with drive '
120 'letter %s:\\\n') % (scheme, scheme.upper()))
121 'letter %s:\\\n') % (scheme, scheme.upper()))
121 hg.schemes[scheme] = ShortRepository(url, scheme, t)
122 hg.schemes[scheme] = ShortRepository(url, scheme, t)
122
123
123 extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter)
124 extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter)
124
125
125 @command('debugexpandscheme', norepo=True)
126 @command('debugexpandscheme', norepo=True)
126 def expandscheme(ui, url, **opts):
127 def expandscheme(ui, url, **opts):
127 """given a repo path, provide the scheme-expanded path
128 """given a repo path, provide the scheme-expanded path
128 """
129 """
129 repo = hg._peerlookup(url)
130 repo = hg._peerlookup(url)
130 if isinstance(repo, ShortRepository):
131 if isinstance(repo, ShortRepository):
131 url = repo.resolve(url)
132 url = repo.resolve(url)
132 ui.write(url + '\n')
133 ui.write(url + '\n')
@@ -1,194 +1,194 b''
1 # win32mbcs.py -- MBCS filename support for Mercurial
1 # win32mbcs.py -- MBCS filename support for Mercurial
2 #
2 #
3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
4 #
4 #
5 # Version: 0.3
5 # Version: 0.3
6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2 or any later version.
9 # GNU General Public License version 2 or any later version.
10 #
10 #
11
11
12 '''allow the use of MBCS paths with problematic encodings
12 '''allow the use of MBCS paths with problematic encodings
13
13
14 Some MBCS encodings are not good for some path operations (i.e.
14 Some MBCS encodings are not good for some path operations (i.e.
15 splitting path, case conversion, etc.) with its encoded bytes. We call
15 splitting path, case conversion, etc.) with its encoded bytes. We call
16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
17 This extension can be used to fix the issue with those encodings by
17 This extension can be used to fix the issue with those encodings by
18 wrapping some functions to convert to Unicode string before path
18 wrapping some functions to convert to Unicode string before path
19 operation.
19 operation.
20
20
21 This extension is useful for:
21 This extension is useful for:
22
22
23 - Japanese Windows users using shift_jis encoding.
23 - Japanese Windows users using shift_jis encoding.
24 - Chinese Windows users using big5 encoding.
24 - Chinese Windows users using big5 encoding.
25 - All users who use a repository with one of problematic encodings on
25 - All users who use a repository with one of problematic encodings on
26 case-insensitive file system.
26 case-insensitive file system.
27
27
28 This extension is not needed for:
28 This extension is not needed for:
29
29
30 - Any user who use only ASCII chars in path.
30 - Any user who use only ASCII chars in path.
31 - Any user who do not use any of problematic encodings.
31 - Any user who do not use any of problematic encodings.
32
32
33 Note that there are some limitations on using this extension:
33 Note that there are some limitations on using this extension:
34
34
35 - You should use single encoding in one repository.
35 - You should use single encoding in one repository.
36 - If the repository path ends with 0x5c, .hg/hgrc cannot be read.
36 - If the repository path ends with 0x5c, .hg/hgrc cannot be read.
37 - win32mbcs is not compatible with fixutf8 extension.
37 - win32mbcs is not compatible with fixutf8 extension.
38
38
39 By default, win32mbcs uses encoding.encoding decided by Mercurial.
39 By default, win32mbcs uses encoding.encoding decided by Mercurial.
40 You can specify the encoding by config option::
40 You can specify the encoding by config option::
41
41
42 [win32mbcs]
42 [win32mbcs]
43 encoding = sjis
43 encoding = sjis
44
44
45 It is useful for the users who want to commit with UTF-8 log message.
45 It is useful for the users who want to commit with UTF-8 log message.
46 '''
46 '''
47 from __future__ import absolute_import
47 from __future__ import absolute_import
48
48
49 import os
49 import os
50 import sys
50 import sys
51
51
52 from mercurial.i18n import _
52 from mercurial.i18n import _
53 from mercurial import (
53 from mercurial import (
54 encoding,
54 encoding,
55 error,
55 error,
56 pycompat,
56 pycompat,
57 )
57 )
58
58
59 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
59 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
60 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # be specifying the version(s) of Mercurial they are tested with, or
61 # be specifying the version(s) of Mercurial they are tested with, or
62 # leave the attribute unspecified.
62 # leave the attribute unspecified.
63 testedwith = 'ships-with-hg-core'
63 testedwith = 'ships-with-hg-core'
64
64
65 _encoding = None # see extsetup
65 _encoding = None # see extsetup
66
66
67 def decode(arg):
67 def decode(arg):
68 if isinstance(arg, str):
68 if isinstance(arg, str):
69 uarg = arg.decode(_encoding)
69 uarg = arg.decode(_encoding)
70 if arg == uarg.encode(_encoding):
70 if arg == uarg.encode(_encoding):
71 return uarg
71 return uarg
72 raise UnicodeError("Not local encoding")
72 raise UnicodeError("Not local encoding")
73 elif isinstance(arg, tuple):
73 elif isinstance(arg, tuple):
74 return tuple(map(decode, arg))
74 return tuple(map(decode, arg))
75 elif isinstance(arg, list):
75 elif isinstance(arg, list):
76 return map(decode, arg)
76 return map(decode, arg)
77 elif isinstance(arg, dict):
77 elif isinstance(arg, dict):
78 for k, v in arg.items():
78 for k, v in arg.items():
79 arg[k] = decode(v)
79 arg[k] = decode(v)
80 return arg
80 return arg
81
81
82 def encode(arg):
82 def encode(arg):
83 if isinstance(arg, unicode):
83 if isinstance(arg, unicode):
84 return arg.encode(_encoding)
84 return arg.encode(_encoding)
85 elif isinstance(arg, tuple):
85 elif isinstance(arg, tuple):
86 return tuple(map(encode, arg))
86 return tuple(map(encode, arg))
87 elif isinstance(arg, list):
87 elif isinstance(arg, list):
88 return map(encode, arg)
88 return map(encode, arg)
89 elif isinstance(arg, dict):
89 elif isinstance(arg, dict):
90 for k, v in arg.items():
90 for k, v in arg.items():
91 arg[k] = encode(v)
91 arg[k] = encode(v)
92 return arg
92 return arg
93
93
94 def appendsep(s):
94 def appendsep(s):
95 # ensure the path ends with os.sep, appending it if necessary.
95 # ensure the path ends with os.sep, appending it if necessary.
96 try:
96 try:
97 us = decode(s)
97 us = decode(s)
98 except UnicodeError:
98 except UnicodeError:
99 us = s
99 us = s
100 if us and us[-1] not in ':/\\':
100 if us and us[-1] not in ':/\\':
101 s += pycompat.ossep
101 s += pycompat.ossep
102 return s
102 return s
103
103
104
104
105 def basewrapper(func, argtype, enc, dec, args, kwds):
105 def basewrapper(func, argtype, enc, dec, args, kwds):
106 # check check already converted, then call original
106 # check check already converted, then call original
107 for arg in args:
107 for arg in args:
108 if isinstance(arg, argtype):
108 if isinstance(arg, argtype):
109 return func(*args, **kwds)
109 return func(*args, **kwds)
110
110
111 try:
111 try:
112 # convert string arguments, call func, then convert back the
112 # convert string arguments, call func, then convert back the
113 # return value.
113 # return value.
114 return enc(func(*dec(args), **dec(kwds)))
114 return enc(func(*dec(args), **dec(kwds)))
115 except UnicodeError:
115 except UnicodeError:
116 raise error.Abort(_("[win32mbcs] filename conversion failed with"
116 raise error.Abort(_("[win32mbcs] filename conversion failed with"
117 " %s encoding\n") % (_encoding))
117 " %s encoding\n") % (_encoding))
118
118
119 def wrapper(func, args, kwds):
119 def wrapper(func, args, kwds):
120 return basewrapper(func, unicode, encode, decode, args, kwds)
120 return basewrapper(func, unicode, encode, decode, args, kwds)
121
121
122
122
123 def reversewrapper(func, args, kwds):
123 def reversewrapper(func, args, kwds):
124 return basewrapper(func, str, decode, encode, args, kwds)
124 return basewrapper(func, str, decode, encode, args, kwds)
125
125
126 def wrapperforlistdir(func, args, kwds):
126 def wrapperforlistdir(func, args, kwds):
127 # Ensure 'path' argument ends with os.sep to avoids
127 # Ensure 'path' argument ends with os.sep to avoids
128 # misinterpreting last 0x5c of MBCS 2nd byte as path separator.
128 # misinterpreting last 0x5c of MBCS 2nd byte as path separator.
129 if args:
129 if args:
130 args = list(args)
130 args = list(args)
131 args[0] = appendsep(args[0])
131 args[0] = appendsep(args[0])
132 if 'path' in kwds:
132 if 'path' in kwds:
133 kwds['path'] = appendsep(kwds['path'])
133 kwds['path'] = appendsep(kwds['path'])
134 return func(*args, **kwds)
134 return func(*args, **kwds)
135
135
136 def wrapname(name, wrapper):
136 def wrapname(name, wrapper):
137 module, name = name.rsplit('.', 1)
137 module, name = name.rsplit('.', 1)
138 module = sys.modules[module]
138 module = sys.modules[module]
139 func = getattr(module, name)
139 func = getattr(module, name)
140 def f(*args, **kwds):
140 def f(*args, **kwds):
141 return wrapper(func, args, kwds)
141 return wrapper(func, args, kwds)
142 f.__name__ = func.__name__
142 f.__name__ = func.__name__
143 setattr(module, name, f)
143 setattr(module, name, f)
144
144
145 # List of functions to be wrapped.
145 # List of functions to be wrapped.
146 # NOTE: os.path.dirname() and os.path.basename() are safe because
146 # NOTE: os.path.dirname() and os.path.basename() are safe because
147 # they use result of os.path.split()
147 # they use result of os.path.split()
148 funcs = '''os.path.join os.path.split os.path.splitext
148 funcs = '''os.path.join os.path.split os.path.splitext
149 os.path.normpath os.makedirs mercurial.util.endswithsep
149 os.path.normpath os.makedirs mercurial.util.endswithsep
150 mercurial.util.splitpath mercurial.util.fscasesensitive
150 mercurial.util.splitpath mercurial.util.fscasesensitive
151 mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath
151 mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath
152 mercurial.util.checkwinfilename mercurial.util.checkosfilename
152 mercurial.util.checkwinfilename mercurial.util.checkosfilename
153 mercurial.util.split'''
153 mercurial.util.split'''
154
154
155 # These functions are required to be called with local encoded string
155 # These functions are required to be called with local encoded string
156 # because they expects argument is local encoded string and cause
156 # because they expects argument is local encoded string and cause
157 # problem with unicode string.
157 # problem with unicode string.
158 rfuncs = '''mercurial.encoding.upper mercurial.encoding.lower'''
158 rfuncs = '''mercurial.encoding.upper mercurial.encoding.lower'''
159
159
160 # List of Windows specific functions to be wrapped.
160 # List of Windows specific functions to be wrapped.
161 winfuncs = '''os.path.splitunc'''
161 winfuncs = '''os.path.splitunc'''
162
162
163 # codec and alias names of sjis and big5 to be faked.
163 # codec and alias names of sjis and big5 to be faked.
164 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
164 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
165 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
165 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
166 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
166 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
167 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
167 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
168
168
169 def extsetup(ui):
169 def extsetup(ui):
170 # TODO: decide use of config section for this extension
170 # TODO: decide use of config section for this extension
171 if ((not os.path.supports_unicode_filenames) and
171 if ((not os.path.supports_unicode_filenames) and
172 (sys.platform != 'cygwin')):
172 (sys.platform != 'cygwin')):
173 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
173 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
174 return
174 return
175 # determine encoding for filename
175 # determine encoding for filename
176 global _encoding
176 global _encoding
177 _encoding = ui.config('win32mbcs', 'encoding', encoding.encoding)
177 _encoding = ui.config('win32mbcs', 'encoding', encoding.encoding)
178 # fake is only for relevant environment.
178 # fake is only for relevant environment.
179 if _encoding.lower() in problematic_encodings.split():
179 if _encoding.lower() in problematic_encodings.split():
180 for f in funcs.split():
180 for f in funcs.split():
181 wrapname(f, wrapper)
181 wrapname(f, wrapper)
182 if os.name == 'nt':
182 if pycompat.osname == 'nt':
183 for f in winfuncs.split():
183 for f in winfuncs.split():
184 wrapname(f, wrapper)
184 wrapname(f, wrapper)
185 wrapname("mercurial.osutil.listdir", wrapperforlistdir)
185 wrapname("mercurial.osutil.listdir", wrapperforlistdir)
186 # wrap functions to be called with local byte string arguments
186 # wrap functions to be called with local byte string arguments
187 for f in rfuncs.split():
187 for f in rfuncs.split():
188 wrapname(f, reversewrapper)
188 wrapname(f, reversewrapper)
189 # Check sys.args manually instead of using ui.debug() because
189 # Check sys.args manually instead of using ui.debug() because
190 # command line options is not yet applied when
190 # command line options is not yet applied when
191 # extensions.loadall() is called.
191 # extensions.loadall() is called.
192 if '--debug' in sys.argv:
192 if '--debug' in sys.argv:
193 ui.write(("[win32mbcs] activated with encoding: %s\n")
193 ui.write(("[win32mbcs] activated with encoding: %s\n")
194 % _encoding)
194 % _encoding)
General Comments 0
You need to be logged in to leave comments. Login now