##// END OF EJS Templates
Merge with crew.
Bryan O'Sullivan -
r5360:b98c377b merge default
parent child Browse files
Show More
@@ -0,0 +1,137 b''
1 # darcs support for the convert extension
2
3 from common import NoRepo, commit, converter_source
4 from mercurial.i18n import _
5 from mercurial import util
6 import os, shutil, tempfile
7
8 # The naming drift of ElementTree is fun!
9
10 try: from xml.etree.cElementTree import ElementTree
11 except ImportError:
12 try: from xml.etree.ElementTree import ElementTree
13 except ImportError:
14 try: from elementtree.cElementTree import ElementTree
15 except ImportError:
16 try: from elementtree.ElementTree import ElementTree
17 except ImportError: ElementTree = None
18
19
20 class darcs_source(converter_source):
21 def __init__(self, ui, path, rev=None):
22 super(darcs_source, self).__init__(ui, path, rev=rev)
23
24 if not os.path.exists(os.path.join(path, '_darcs', 'inventory')):
25 raise NoRepo("couldn't open darcs repo %s" % path)
26
27 if ElementTree is None:
28 raise util.Abort(_("Python ElementTree module is not available"))
29
30 self.path = os.path.realpath(path)
31
32 self.lastrev = None
33 self.changes = {}
34 self.parents = {}
35 self.tags = {}
36
37 def before(self):
38 self.tmppath = tempfile.mkdtemp(
39 prefix='convert-' + os.path.basename(self.path) + '-')
40 output, status = self.run('init', repodir=self.tmppath)
41 self.checkexit(status)
42
43 tree = self.xml('changes', '--xml-output', '--summary')
44 tagname = None
45 child = None
46 for elt in tree.findall('patch'):
47 node = elt.get('hash')
48 name = elt.findtext('name', '')
49 if name.startswith('TAG '):
50 tagname = name[4:].strip()
51 elif tagname is not None:
52 self.tags[tagname] = node
53 tagname = None
54 self.changes[node] = elt
55 self.parents[child] = [node]
56 child = node
57 self.parents[child] = []
58
59 def after(self):
60 self.ui.debug('cleaning up %s\n' % self.tmppath)
61 #shutil.rmtree(self.tmppath, ignore_errors=True)
62
63 def _run(self, cmd, *args, **kwargs):
64 cmdline = 'darcs %s --repodir=%r %s </dev/null' % (
65 cmd, kwargs.get('repodir', self.path), ' '.join(args))
66 self.ui.debug(cmdline, '\n')
67 return os.popen(cmdline, 'r')
68
69 def run(self, cmd, *args, **kwargs):
70 fp = self._run(cmd, *args, **kwargs)
71 output = fp.read()
72 return output, fp.close()
73
74 def checkexit(self, status, output=''):
75 if status:
76 if output:
77 ui.warn(_('darcs error:\n'))
78 ui.warn(output)
79 msg = util.explain_exit(status)[0]
80 raise util.Abort(_('darcs %s') % msg)
81
82 def xml(self, cmd, *opts):
83 etree = ElementTree()
84 fp = self._run(cmd, *opts)
85 etree.parse(fp)
86 self.checkexit(fp.close())
87 return etree.getroot()
88
89 def getheads(self):
90 return self.parents[None]
91
92 def getcommit(self, rev):
93 elt = self.changes[rev]
94 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
95 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
96 return commit(author=elt.get('author'), date=util.datestr(date),
97 desc=desc.strip(), parents=self.parents[rev])
98
99 def pull(self, rev):
100 output, status = self.run('pull %r --all --match="hash %s"' %
101 (self.path, rev),
102 '--no-test', '--no-posthook',
103 '--external-merge=/bin/false',
104 repodir=self.tmppath)
105 if status:
106 if output.find('We have conflicts in') == -1:
107 self.checkexit(status, output)
108 output, status = self.run('revert --all', repodir=self.tmppath)
109 self.checkexit(status, output)
110
111 def getchanges(self, rev):
112 self.pull(rev)
113 copies = {}
114 changes = []
115 for elt in self.changes[rev].find('summary').getchildren():
116 if elt.tag in ('add_directory', 'remove_directory'):
117 continue
118 if elt.tag == 'move':
119 changes.append((elt.get('from'), rev))
120 copies[elt.get('from')] = elt.get('to')
121 else:
122 changes.append((elt.text.strip(), rev))
123 changes.sort()
124 self.lastrev = rev
125 return changes, copies
126
127 def getfile(self, name, rev):
128 if rev != self.lastrev:
129 raise util.Abort(_('internal calling inconsistency'))
130 return open(os.path.join(self.tmppath, name), 'rb').read()
131
132 def getmode(self, name, rev):
133 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
134 return (mode & 0111) and 'x' or ''
135
136 def gettags(self):
137 return self.tags
@@ -1,251 +1,255 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Encoding: iso-8859-1
2 # Encoding: iso-8859-1
3 # vim: tw=80 ts=4 sw=4 noet
3 # vim: tw=80 ts=4 sw=4 noet
4 # -----------------------------------------------------------------------------
4 # -----------------------------------------------------------------------------
5 # Project : Basic Darcs to Mercurial conversion script
5 # Project : Basic Darcs to Mercurial conversion script
6 #
7 # *** DEPRECATED. Use the convert extension instead. This script will
8 # *** be removed soon.
9 #
6 # -----------------------------------------------------------------------------
10 # -----------------------------------------------------------------------------
7 # Authors : Sebastien Pierre <sebastien@xprima.com>
11 # Authors : Sebastien Pierre <sebastien@xprima.com>
8 # TK Soh <teekaysoh@gmail.com>
12 # TK Soh <teekaysoh@gmail.com>
9 # -----------------------------------------------------------------------------
13 # -----------------------------------------------------------------------------
10 # Creation : 24-May-2006
14 # Creation : 24-May-2006
11 # Last mod : 05-Jun-2006
12 # -----------------------------------------------------------------------------
15 # -----------------------------------------------------------------------------
13
16
14 import os, sys
17 import os, sys
15 import tempfile
18 import tempfile
16 import xml.dom.minidom as xml_dom
19 import xml.dom.minidom as xml_dom
17 from time import strptime, mktime
20 from time import strptime, mktime
18 import re
21 import re
19
22
20 DARCS_REPO = None
23 DARCS_REPO = None
21 HG_REPO = None
24 HG_REPO = None
22
25
23 USAGE = """\
26 USAGE = """\
24 %s DARCSREPO HGREPO [SKIP]
27 %s DARCSREPO HGREPO [SKIP]
25
28
26 Converts the given Darcs repository to a new Mercurial repository. The given
29 Converts the given Darcs repository to a new Mercurial repository. The given
27 HGREPO must not exist, as it will be created and filled up (this will avoid
30 HGREPO must not exist, as it will be created and filled up (this will avoid
28 overwriting valuable data.
31 overwriting valuable data.
29
32
30 In case an error occurs within the process, you can resume the process by
33 In case an error occurs within the process, you can resume the process by
31 giving the last successfuly applied change number.
34 giving the last successfuly applied change number.
32 """ % (os.path.basename(sys.argv[0]))
35 """ % (os.path.basename(sys.argv[0]))
33
36
34 # ------------------------------------------------------------------------------
37 # ------------------------------------------------------------------------------
35 #
38 #
36 # Utilities
39 # Utilities
37 #
40 #
38 # ------------------------------------------------------------------------------
41 # ------------------------------------------------------------------------------
39
42
40 def cmd(text, path=None, silent=False):
43 def cmd(text, path=None, silent=False):
41 """Executes a command, in the given directory (if any), and returns the
44 """Executes a command, in the given directory (if any), and returns the
42 command result as a string."""
45 command result as a string."""
43 cwd = None
46 cwd = None
44 if path:
47 if path:
45 path = os.path.abspath(path)
48 path = os.path.abspath(path)
46 cwd = os.getcwd()
49 cwd = os.getcwd()
47 os.chdir(path)
50 os.chdir(path)
48 if not silent: print "> ", text
51 if not silent: print "> ", text
49 res = os.popen(text).read()
52 res = os.popen(text).read()
50 if path:
53 if path:
51 os.chdir(cwd)
54 os.chdir(cwd)
52 return res
55 return res
53
56
54 def writefile(path, data):
57 def writefile(path, data):
55 """Writes the given data into the given file."""
58 """Writes the given data into the given file."""
56 f = file(path, "w") ; f.write(data) ; f.close()
59 f = file(path, "w") ; f.write(data) ; f.close()
57
60
58 def error( *args ):
61 def error( *args ):
59 sys.stderr.write("ERROR: ")
62 sys.stderr.write("ERROR: ")
60 for a in args: sys.stderr.write(str(a))
63 for a in args: sys.stderr.write(str(a))
61 sys.stderr.write("\n")
64 sys.stderr.write("\n")
62 sys.stderr.write("You can make manual fixes if necessary and then resume by"
65 sys.stderr.write("You can make manual fixes if necessary and then resume by"
63 " giving the last changeset number")
66 " giving the last changeset number")
64 sys.exit(-1)
67 sys.exit(-1)
65
68
66 # ------------------------------------------------------------------------------
69 # ------------------------------------------------------------------------------
67 #
70 #
68 # Darcs interface
71 # Darcs interface
69 #
72 #
70 # ------------------------------------------------------------------------------
73 # ------------------------------------------------------------------------------
71
74
72 def darcs_changes(darcsRepo):
75 def darcs_changes(darcsRepo):
73 """Gets the changes list from the given darcs repository. This returns the
76 """Gets the changes list from the given darcs repository. This returns the
74 chronological list of changes as (change name, change summary)."""
77 chronological list of changes as (change name, change summary)."""
75 changes = cmd("darcs changes --reverse --xml-output", darcsRepo)
78 changes = cmd("darcs changes --reverse --xml-output", darcsRepo)
76 doc = xml_dom.parseString(changes)
79 doc = xml_dom.parseString(changes)
77 for patch_node in doc.childNodes[0].childNodes:
80 for patch_node in doc.childNodes[0].childNodes:
78 name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
81 name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
79 comm = filter(lambda n: n.nodeName == "comment", patch_node.childNodes)
82 comm = filter(lambda n: n.nodeName == "comment", patch_node.childNodes)
80 if not name:continue
83 if not name:continue
81 else: name = name[0].childNodes[0].data
84 else: name = name[0].childNodes[0].data
82 if not comm: comm = ""
85 if not comm: comm = ""
83 else: comm = comm[0].childNodes[0].data
86 else: comm = comm[0].childNodes[0].data
84 author = patch_node.getAttribute("author")
87 author = patch_node.getAttribute("author")
85 date = patch_node.getAttribute("date")
88 date = patch_node.getAttribute("date")
86 chash = os.path.splitext(patch_node.getAttribute("hash"))[0]
89 chash = os.path.splitext(patch_node.getAttribute("hash"))[0]
87 yield author, date, name, chash, comm
90 yield author, date, name, chash, comm
88
91
89 def darcs_tip(darcs_repo):
92 def darcs_tip(darcs_repo):
90 changes = cmd("darcs changes",darcs_repo,silent=True)
93 changes = cmd("darcs changes",darcs_repo,silent=True)
91 changes = filter(lambda l: l.strip().startswith("* "), changes.split("\n"))
94 changes = filter(lambda l: l.strip().startswith("* "), changes.split("\n"))
92 return len(changes)
95 return len(changes)
93
96
94 def darcs_pull(hg_repo, darcs_repo, chash):
97 def darcs_pull(hg_repo, darcs_repo, chash):
95 old_tip = darcs_tip(darcs_repo)
98 old_tip = darcs_tip(darcs_repo)
96 res = cmd("darcs pull \"%s\" --all --match=\"hash %s\"" % (darcs_repo, chash), hg_repo)
99 res = cmd("darcs pull \"%s\" --all --match=\"hash %s\"" % (darcs_repo, chash), hg_repo)
97 if re.search('^We have conflicts in the following files:$', res, re.MULTILINE):
100 if re.search('^We have conflicts in the following files:$', res, re.MULTILINE):
98 print "Trying to revert files to work around conflict..."
101 print "Trying to revert files to work around conflict..."
99 rev_res = cmd ("darcs revert --all", hg_repo)
102 rev_res = cmd ("darcs revert --all", hg_repo)
100 print rev_res
103 print rev_res
101 print res
104 print res
102 new_tip = darcs_tip(darcs_repo)
105 new_tip = darcs_tip(darcs_repo)
103 if not new_tip != old_tip + 1:
106 if not new_tip != old_tip + 1:
104 error("Darcs pull did not work as expected: " + res)
107 error("Darcs pull did not work as expected: " + res)
105
108
106 def darcs_changes_summary(darcs_repo, chash):
109 def darcs_changes_summary(darcs_repo, chash):
107 """Gets the changes from the darcs summary. This returns the chronological
110 """Gets the changes from the darcs summary. This returns the chronological
108 list of changes as (change_type, args). Eg. ('add_file', 'foo.txt') or
111 list of changes as (change_type, args). Eg. ('add_file', 'foo.txt') or
109 ('move', ['foo.txt','bar.txt'])."""
112 ('move', ['foo.txt','bar.txt'])."""
110 change = cmd("darcs changes --summary --xml-output --match=\"hash %s\"" % (chash), darcs_repo)
113 change = cmd("darcs changes --summary --xml-output --match=\"hash %s\"" % (chash), darcs_repo)
111 doc = xml_dom.parseString(change)
114 doc = xml_dom.parseString(change)
112 for patch_node in doc.childNodes[0].childNodes:
115 for patch_node in doc.childNodes[0].childNodes:
113 summary_nodes = filter(lambda n: n.nodeName == "summary" and n.nodeType == n.ELEMENT_NODE, patch_node.childNodes)
116 summary_nodes = filter(lambda n: n.nodeName == "summary" and n.nodeType == n.ELEMENT_NODE, patch_node.childNodes)
114 for summary_node in summary_nodes:
117 for summary_node in summary_nodes:
115 change_nodes = filter(lambda n: n.nodeType == n.ELEMENT_NODE, summary_node.childNodes)
118 change_nodes = filter(lambda n: n.nodeType == n.ELEMENT_NODE, summary_node.childNodes)
116 if len(change_nodes) == 0:
119 if len(change_nodes) == 0:
117 name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
120 name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
118 if not name:
121 if not name:
119 error("Darcs patch has an empty summary node and no name: " + patch_node.toxml())
122 error("Darcs patch has an empty summary node and no name: " + patch_node.toxml())
120 name = name[0].childNodes[0].data.strip()
123 name = name[0].childNodes[0].data.strip()
121 (tag, sub_count) = re.subn('^TAG ', '', name, 1)
124 (tag, sub_count) = re.subn('^TAG ', '', name, 1)
122 if sub_count != 1:
125 if sub_count != 1:
123 error("Darcs patch has an empty summary node but doesn't look like a tag: " + patch_node.toxml());
126 error("Darcs patch has an empty summary node but doesn't look like a tag: " + patch_node.toxml());
124 for change_node in change_nodes:
127 for change_node in change_nodes:
125 change = change_node.nodeName
128 change = change_node.nodeName
126 if change == 'modify_file':
129 if change == 'modify_file':
127 yield change, change_node.childNodes[0].data.strip()
130 yield change, change_node.childNodes[0].data.strip()
128 elif change == 'add_file':
131 elif change == 'add_file':
129 yield change, change_node.childNodes[0].data.strip()
132 yield change, change_node.childNodes[0].data.strip()
130 elif change == 'remove_file':
133 elif change == 'remove_file':
131 yield change, change_node.childNodes[0].data.strip()
134 yield change, change_node.childNodes[0].data.strip()
132 elif change == 'add_directory':
135 elif change == 'add_directory':
133 yield change, change_node.childNodes[0].data.strip()
136 yield change, change_node.childNodes[0].data.strip()
134 elif change == 'remove_directory':
137 elif change == 'remove_directory':
135 yield change, change_node.childNodes[0].data.strip()
138 yield change, change_node.childNodes[0].data.strip()
136 elif change == 'move':
139 elif change == 'move':
137 yield change, (change_node.getAttribute('from'), change_node.getAttribute('to'))
140 yield change, (change_node.getAttribute('from'), change_node.getAttribute('to'))
138 else:
141 else:
139 error('Problem parsing summary xml: Unexpected element: ' + change_node.toxml())
142 error('Problem parsing summary xml: Unexpected element: ' + change_node.toxml())
140
143
141 # ------------------------------------------------------------------------------
144 # ------------------------------------------------------------------------------
142 #
145 #
143 # Mercurial interface
146 # Mercurial interface
144 #
147 #
145 # ------------------------------------------------------------------------------
148 # ------------------------------------------------------------------------------
146
149
147 def hg_commit( hg_repo, text, author, date ):
150 def hg_commit( hg_repo, text, author, date ):
148 fd, tmpfile = tempfile.mkstemp(prefix="darcs2hg_")
151 fd, tmpfile = tempfile.mkstemp(prefix="darcs2hg_")
149 writefile(tmpfile, text)
152 writefile(tmpfile, text)
150 old_tip = hg_tip(hg_repo)
153 old_tip = hg_tip(hg_repo)
151 cmd("hg add -X _darcs", hg_repo)
154 cmd("hg add -X _darcs", hg_repo)
152 cmd("hg remove -X _darcs --after", hg_repo)
155 cmd("hg remove -X _darcs --after", hg_repo)
153 res = cmd("hg commit -l %s -u \"%s\" -d \"%s 0\"" % (tmpfile, author, date), hg_repo)
156 res = cmd("hg commit -l %s -u \"%s\" -d \"%s 0\"" % (tmpfile, author, date), hg_repo)
154 os.close(fd)
157 os.close(fd)
155 os.unlink(tmpfile)
158 os.unlink(tmpfile)
156 new_tip = hg_tip(hg_repo)
159 new_tip = hg_tip(hg_repo)
157 if not new_tip == old_tip + 1:
160 if not new_tip == old_tip + 1:
158 # Sometimes we may have empty commits, we simply skip them
161 # Sometimes we may have empty commits, we simply skip them
159 if res.strip().lower().find("nothing changed") != -1:
162 if res.strip().lower().find("nothing changed") != -1:
160 pass
163 pass
161 else:
164 else:
162 error("Mercurial commit did not work as expected: " + res)
165 error("Mercurial commit did not work as expected: " + res)
163
166
164 def hg_tip( hg_repo ):
167 def hg_tip( hg_repo ):
165 """Returns the latest local revision number in the given repository."""
168 """Returns the latest local revision number in the given repository."""
166 tip = cmd("hg tip", hg_repo, silent=True)
169 tip = cmd("hg tip", hg_repo, silent=True)
167 tip = tip.split("\n")[0].split(":")[1].strip()
170 tip = tip.split("\n")[0].split(":")[1].strip()
168 return int(tip)
171 return int(tip)
169
172
170 def hg_rename( hg_repo, from_file, to_file ):
173 def hg_rename( hg_repo, from_file, to_file ):
171 cmd("hg rename --after \"%s\" \"%s\"" % (from_file, to_file), hg_repo);
174 cmd("hg rename --after \"%s\" \"%s\"" % (from_file, to_file), hg_repo);
172
175
173 def hg_tag ( hg_repo, text, author, date ):
176 def hg_tag ( hg_repo, text, author, date ):
174 old_tip = hg_tip(hg_repo)
177 old_tip = hg_tip(hg_repo)
175 res = cmd("hg tag -u \"%s\" -d \"%s 0\" \"%s\"" % (author, date, text), hg_repo)
178 res = cmd("hg tag -u \"%s\" -d \"%s 0\" \"%s\"" % (author, date, text), hg_repo)
176 new_tip = hg_tip(hg_repo)
179 new_tip = hg_tip(hg_repo)
177 if not new_tip == old_tip + 1:
180 if not new_tip == old_tip + 1:
178 error("Mercurial tag did not work as expected: " + res)
181 error("Mercurial tag did not work as expected: " + res)
179
182
180 def hg_handle_change( hg_repo, author, date, change, arg ):
183 def hg_handle_change( hg_repo, author, date, change, arg ):
181 """Processes a change event as output by darcs_changes_summary. These
184 """Processes a change event as output by darcs_changes_summary. These
182 consist of file move/rename/add/delete commands."""
185 consist of file move/rename/add/delete commands."""
183 if change == 'modify_file':
186 if change == 'modify_file':
184 pass
187 pass
185 elif change == 'add_file':
188 elif change == 'add_file':
186 pass
189 pass
187 elif change =='remove_file':
190 elif change =='remove_file':
188 pass
191 pass
189 elif change == 'add_directory':
192 elif change == 'add_directory':
190 pass
193 pass
191 elif change == 'remove_directory':
194 elif change == 'remove_directory':
192 pass
195 pass
193 elif change == 'move':
196 elif change == 'move':
194 hg_rename(hg_repo, arg[0], arg[1])
197 hg_rename(hg_repo, arg[0], arg[1])
195 elif change == 'tag':
198 elif change == 'tag':
196 hg_tag(hg_repo, arg, author, date)
199 hg_tag(hg_repo, arg, author, date)
197 else:
200 else:
198 error('Unknown change type ' + change + ': ' + arg)
201 error('Unknown change type ' + change + ': ' + arg)
199
202
200 # ------------------------------------------------------------------------------
203 # ------------------------------------------------------------------------------
201 #
204 #
202 # Main
205 # Main
203 #
206 #
204 # ------------------------------------------------------------------------------
207 # ------------------------------------------------------------------------------
205
208
206 if __name__ == "__main__":
209 if __name__ == "__main__":
207 args = sys.argv[1:]
210 args = sys.argv[1:]
208 # We parse the arguments
211 # We parse the arguments
209 if len(args) == 2:
212 if len(args) == 2:
210 darcs_repo = os.path.abspath(args[0])
213 darcs_repo = os.path.abspath(args[0])
211 hg_repo = os.path.abspath(args[1])
214 hg_repo = os.path.abspath(args[1])
212 skip = None
215 skip = None
213 elif len(args) == 3:
216 elif len(args) == 3:
214 darcs_repo = os.path.abspath(args[0])
217 darcs_repo = os.path.abspath(args[0])
215 hg_repo = os.path.abspath(args[1])
218 hg_repo = os.path.abspath(args[1])
216 skip = int(args[2])
219 skip = int(args[2])
217 else:
220 else:
218 print USAGE
221 print USAGE
219 sys.exit(-1)
222 sys.exit(-1)
223 print 'This command is deprecated. Use the convert extension instead.'
220 # Initializes the target repo
224 # Initializes the target repo
221 if not os.path.isdir(darcs_repo + "/_darcs"):
225 if not os.path.isdir(darcs_repo + "/_darcs"):
222 print "No darcs directory found at: " + darcs_repo
226 print "No darcs directory found at: " + darcs_repo
223 sys.exit(-1)
227 sys.exit(-1)
224 if not os.path.isdir(hg_repo):
228 if not os.path.isdir(hg_repo):
225 os.mkdir(hg_repo)
229 os.mkdir(hg_repo)
226 elif skip == None:
230 elif skip == None:
227 print "Given HG repository must not exist when no SKIP is specified."
231 print "Given HG repository must not exist when no SKIP is specified."
228 sys.exit(-1)
232 sys.exit(-1)
229 if skip == None:
233 if skip == None:
230 cmd("hg init \"%s\"" % (hg_repo))
234 cmd("hg init \"%s\"" % (hg_repo))
231 cmd("darcs initialize", hg_repo)
235 cmd("darcs initialize", hg_repo)
232 # Get the changes from the Darcs repository
236 # Get the changes from the Darcs repository
233 change_number = 0
237 change_number = 0
234 for author, date, summary, chash, description in darcs_changes(darcs_repo):
238 for author, date, summary, chash, description in darcs_changes(darcs_repo):
235 print "== changeset", change_number,
239 print "== changeset", change_number,
236 if skip != None and change_number <= skip:
240 if skip != None and change_number <= skip:
237 print "(skipping)"
241 print "(skipping)"
238 else:
242 else:
239 text = summary + "\n" + description
243 text = summary + "\n" + description
240 # The commit hash has a date like 20021020201112
244 # The commit hash has a date like 20021020201112
241 # --------------------------------YYYYMMDDHHMMSS
245 # --------------------------------YYYYMMDDHHMMSS
242 date = chash.split("-")[0]
246 date = chash.split("-")[0]
243 epoch = int(mktime(strptime(date, '%Y%m%d%H%M%S')))
247 epoch = int(mktime(strptime(date, '%Y%m%d%H%M%S')))
244 darcs_pull(hg_repo, darcs_repo, chash)
248 darcs_pull(hg_repo, darcs_repo, chash)
245 for change, arg in darcs_changes_summary(darcs_repo, chash):
249 for change, arg in darcs_changes_summary(darcs_repo, chash):
246 hg_handle_change(hg_repo, author, epoch, change, arg)
250 hg_handle_change(hg_repo, author, epoch, change, arg)
247 hg_commit(hg_repo, text, author, epoch)
251 hg_commit(hg_repo, text, author, epoch)
248 change_number += 1
252 change_number += 1
249 print "Darcs repository (_darcs) was not deleted. You can keep or remove it."
253 print "Darcs repository (_darcs) was not deleted. You can keep or remove it."
250
254
251 # EOF
255 # EOF
@@ -1,483 +1,489 b''
1 # convert.py Foreign SCM converter
1 # convert.py Foreign SCM converter
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from common import NoRepo, converter_source, converter_sink
8 from common import NoRepo, converter_source, converter_sink
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from git import convert_git
11 from git import convert_git
11 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
12 from subversion import convert_svn, debugsvnlog
13 from subversion import convert_svn, debugsvnlog
13
14
14 import os, shlex, shutil
15 import os, shlex, shutil
15 from mercurial import hg, ui, util, commands
16 from mercurial import hg, ui, util, commands
16 from mercurial.i18n import _
17 from mercurial.i18n import _
17
18
18 commands.norepo += " convert debugsvnlog"
19 commands.norepo += " convert debugsvnlog"
19
20
20 converters = [convert_cvs, convert_git, convert_svn, mercurial_source,
21 converters = [convert_cvs, convert_git, convert_svn, mercurial_source,
21 mercurial_sink]
22 mercurial_sink, darcs_source]
22
23
23 def convertsource(ui, path, **opts):
24 def convertsource(ui, path, **opts):
24 for c in converters:
25 for c in converters:
25 try:
26 try:
26 return c.getcommit and c(ui, path, **opts)
27 return c.getcommit and c(ui, path, **opts)
27 except (AttributeError, NoRepo):
28 except (AttributeError, NoRepo):
28 pass
29 pass
29 raise util.Abort('%s: unknown repository type' % path)
30 raise util.Abort('%s: unknown repository type' % path)
30
31
31 def convertsink(ui, path):
32 def convertsink(ui, path):
32 if not os.path.isdir(path):
33 if not os.path.isdir(path):
33 raise util.Abort("%s: not a directory" % path)
34 raise util.Abort("%s: not a directory" % path)
34 for c in converters:
35 for c in converters:
35 try:
36 try:
36 return c.putcommit and c(ui, path)
37 return c.putcommit and c(ui, path)
37 except (AttributeError, NoRepo):
38 except (AttributeError, NoRepo):
38 pass
39 pass
39 raise util.Abort('%s: unknown repository type' % path)
40 raise util.Abort('%s: unknown repository type' % path)
40
41
41 class converter(object):
42 class converter(object):
42 def __init__(self, ui, source, dest, revmapfile, filemapper, opts):
43 def __init__(self, ui, source, dest, revmapfile, filemapper, opts):
43
44
44 self.source = source
45 self.source = source
45 self.dest = dest
46 self.dest = dest
46 self.ui = ui
47 self.ui = ui
47 self.opts = opts
48 self.opts = opts
48 self.commitcache = {}
49 self.commitcache = {}
49 self.revmapfile = revmapfile
50 self.revmapfile = revmapfile
50 self.revmapfilefd = None
51 self.revmapfilefd = None
51 self.authors = {}
52 self.authors = {}
52 self.authorfile = None
53 self.authorfile = None
53 self.mapfile = filemapper
54 self.mapfile = filemapper
54
55
55 self.map = {}
56 self.map = {}
56 try:
57 try:
57 origrevmapfile = open(self.revmapfile, 'r')
58 origrevmapfile = open(self.revmapfile, 'r')
58 for l in origrevmapfile:
59 for l in origrevmapfile:
59 sv, dv = l[:-1].split()
60 sv, dv = l[:-1].split()
60 self.map[sv] = dv
61 self.map[sv] = dv
61 origrevmapfile.close()
62 origrevmapfile.close()
62 except IOError:
63 except IOError:
63 pass
64 pass
64
65
65 # Read first the dst author map if any
66 # Read first the dst author map if any
66 authorfile = self.dest.authorfile()
67 authorfile = self.dest.authorfile()
67 if authorfile and os.path.exists(authorfile):
68 if authorfile and os.path.exists(authorfile):
68 self.readauthormap(authorfile)
69 self.readauthormap(authorfile)
69 # Extend/Override with new author map if necessary
70 # Extend/Override with new author map if necessary
70 if opts.get('authors'):
71 if opts.get('authors'):
71 self.readauthormap(opts.get('authors'))
72 self.readauthormap(opts.get('authors'))
72 self.authorfile = self.dest.authorfile()
73 self.authorfile = self.dest.authorfile()
73
74
74 def walktree(self, heads):
75 def walktree(self, heads):
75 '''Return a mapping that identifies the uncommitted parents of every
76 '''Return a mapping that identifies the uncommitted parents of every
76 uncommitted changeset.'''
77 uncommitted changeset.'''
77 visit = heads
78 visit = heads
78 known = {}
79 known = {}
79 parents = {}
80 parents = {}
80 while visit:
81 while visit:
81 n = visit.pop(0)
82 n = visit.pop(0)
82 if n in known or n in self.map: continue
83 if n in known or n in self.map: continue
83 known[n] = 1
84 known[n] = 1
84 commit = self.cachecommit(n)
85 commit = self.cachecommit(n)
85 parents[n] = []
86 parents[n] = []
86 for p in commit.parents:
87 for p in commit.parents:
87 parents[n].append(p)
88 parents[n].append(p)
88 visit.append(p)
89 visit.append(p)
89
90
90 return parents
91 return parents
91
92
92 def toposort(self, parents):
93 def toposort(self, parents):
93 '''Return an ordering such that every uncommitted changeset is
94 '''Return an ordering such that every uncommitted changeset is
94 preceeded by all its uncommitted ancestors.'''
95 preceeded by all its uncommitted ancestors.'''
95 visit = parents.keys()
96 visit = parents.keys()
96 seen = {}
97 seen = {}
97 children = {}
98 children = {}
98
99
99 while visit:
100 while visit:
100 n = visit.pop(0)
101 n = visit.pop(0)
101 if n in seen: continue
102 if n in seen: continue
102 seen[n] = 1
103 seen[n] = 1
103 # Ensure that nodes without parents are present in the 'children'
104 # Ensure that nodes without parents are present in the 'children'
104 # mapping.
105 # mapping.
105 children.setdefault(n, [])
106 children.setdefault(n, [])
106 for p in parents[n]:
107 for p in parents[n]:
107 if not p in self.map:
108 if not p in self.map:
108 visit.append(p)
109 visit.append(p)
109 children.setdefault(p, []).append(n)
110 children.setdefault(p, []).append(n)
110
111
111 s = []
112 s = []
112 removed = {}
113 removed = {}
113 visit = children.keys()
114 visit = children.keys()
114 while visit:
115 while visit:
115 n = visit.pop(0)
116 n = visit.pop(0)
116 if n in removed: continue
117 if n in removed: continue
117 dep = 0
118 dep = 0
118 if n in parents:
119 if n in parents:
119 for p in parents[n]:
120 for p in parents[n]:
120 if p in self.map: continue
121 if p in self.map: continue
121 if p not in removed:
122 if p not in removed:
122 # we're still dependent
123 # we're still dependent
123 visit.append(n)
124 visit.append(n)
124 dep = 1
125 dep = 1
125 break
126 break
126
127
127 if not dep:
128 if not dep:
128 # all n's parents are in the list
129 # all n's parents are in the list
129 removed[n] = 1
130 removed[n] = 1
130 if n not in self.map:
131 if n not in self.map:
131 s.append(n)
132 s.append(n)
132 if n in children:
133 if n in children:
133 for c in children[n]:
134 for c in children[n]:
134 visit.insert(0, c)
135 visit.insert(0, c)
135
136
136 if self.opts.get('datesort'):
137 if self.opts.get('datesort'):
137 depth = {}
138 depth = {}
138 for n in s:
139 for n in s:
139 depth[n] = 0
140 depth[n] = 0
140 pl = [p for p in self.commitcache[n].parents
141 pl = [p for p in self.commitcache[n].parents
141 if p not in self.map]
142 if p not in self.map]
142 if pl:
143 if pl:
143 depth[n] = max([depth[p] for p in pl]) + 1
144 depth[n] = max([depth[p] for p in pl]) + 1
144
145
145 s = [(depth[n], self.commitcache[n].date, n) for n in s]
146 s = [(depth[n], self.commitcache[n].date, n) for n in s]
146 s.sort()
147 s.sort()
147 s = [e[2] for e in s]
148 s = [e[2] for e in s]
148
149
149 return s
150 return s
150
151
151 def mapentry(self, src, dst):
152 def mapentry(self, src, dst):
152 if self.revmapfilefd is None:
153 if self.revmapfilefd is None:
153 try:
154 try:
154 self.revmapfilefd = open(self.revmapfile, "a")
155 self.revmapfilefd = open(self.revmapfile, "a")
155 except IOError, (errno, strerror):
156 except IOError, (errno, strerror):
156 raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror))
157 raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror))
157 self.map[src] = dst
158 self.map[src] = dst
158 self.revmapfilefd.write("%s %s\n" % (src, dst))
159 self.revmapfilefd.write("%s %s\n" % (src, dst))
159 self.revmapfilefd.flush()
160 self.revmapfilefd.flush()
160
161
161 def writeauthormap(self):
162 def writeauthormap(self):
162 authorfile = self.authorfile
163 authorfile = self.authorfile
163 if authorfile:
164 if authorfile:
164 self.ui.status('Writing author map file %s\n' % authorfile)
165 self.ui.status('Writing author map file %s\n' % authorfile)
165 ofile = open(authorfile, 'w+')
166 ofile = open(authorfile, 'w+')
166 for author in self.authors:
167 for author in self.authors:
167 ofile.write("%s=%s\n" % (author, self.authors[author]))
168 ofile.write("%s=%s\n" % (author, self.authors[author]))
168 ofile.close()
169 ofile.close()
169
170
170 def readauthormap(self, authorfile):
171 def readauthormap(self, authorfile):
171 afile = open(authorfile, 'r')
172 afile = open(authorfile, 'r')
172 for line in afile:
173 for line in afile:
173 try:
174 try:
174 srcauthor = line.split('=')[0].strip()
175 srcauthor = line.split('=')[0].strip()
175 dstauthor = line.split('=')[1].strip()
176 dstauthor = line.split('=')[1].strip()
176 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
177 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
177 self.ui.status(
178 self.ui.status(
178 'Overriding mapping for author %s, was %s, will be %s\n'
179 'Overriding mapping for author %s, was %s, will be %s\n'
179 % (srcauthor, self.authors[srcauthor], dstauthor))
180 % (srcauthor, self.authors[srcauthor], dstauthor))
180 else:
181 else:
181 self.ui.debug('Mapping author %s to %s\n'
182 self.ui.debug('Mapping author %s to %s\n'
182 % (srcauthor, dstauthor))
183 % (srcauthor, dstauthor))
183 self.authors[srcauthor] = dstauthor
184 self.authors[srcauthor] = dstauthor
184 except IndexError:
185 except IndexError:
185 self.ui.warn(
186 self.ui.warn(
186 'Ignoring bad line in author file map %s: %s\n'
187 'Ignoring bad line in author file map %s: %s\n'
187 % (authorfile, line))
188 % (authorfile, line))
188 afile.close()
189 afile.close()
189
190
190 def cachecommit(self, rev):
191 def cachecommit(self, rev):
191 commit = self.source.getcommit(rev)
192 commit = self.source.getcommit(rev)
192 commit.author = self.authors.get(commit.author, commit.author)
193 commit.author = self.authors.get(commit.author, commit.author)
193 self.commitcache[rev] = commit
194 self.commitcache[rev] = commit
194 return commit
195 return commit
195
196
196 def copy(self, rev):
197 def copy(self, rev):
197 commit = self.commitcache[rev]
198 commit = self.commitcache[rev]
198 do_copies = hasattr(self.dest, 'copyfile')
199 do_copies = hasattr(self.dest, 'copyfile')
199 filenames = []
200 filenames = []
200
201
201 files, copies = self.source.getchanges(rev)
202 files, copies = self.source.getchanges(rev)
202 parents = [self.map[r] for r in commit.parents]
203 parents = [self.map[r] for r in commit.parents]
203 if commit.parents:
204 if commit.parents:
204 prev = commit.parents[0]
205 prev = commit.parents[0]
205 if prev not in self.commitcache:
206 if prev not in self.commitcache:
206 self.cachecommit(prev)
207 self.cachecommit(prev)
207 pbranch = self.commitcache[prev].branch
208 pbranch = self.commitcache[prev].branch
208 else:
209 else:
209 pbranch = None
210 pbranch = None
210 self.dest.setbranch(commit.branch, pbranch, parents)
211 self.dest.setbranch(commit.branch, pbranch, parents)
211 for f, v in files:
212 for f, v in files:
212 newf = self.mapfile(f)
213 newf = self.mapfile(f)
213 if not newf:
214 if not newf:
214 continue
215 continue
215 filenames.append(newf)
216 filenames.append(newf)
216 try:
217 try:
217 data = self.source.getfile(f, v)
218 data = self.source.getfile(f, v)
218 except IOError, inst:
219 except IOError, inst:
219 self.dest.delfile(newf)
220 self.dest.delfile(newf)
220 else:
221 else:
221 e = self.source.getmode(f, v)
222 e = self.source.getmode(f, v)
222 self.dest.putfile(newf, e, data)
223 self.dest.putfile(newf, e, data)
223 if do_copies:
224 if do_copies:
224 if f in copies:
225 if f in copies:
225 copyf = self.mapfile(copies[f])
226 copyf = self.mapfile(copies[f])
226 if copyf:
227 if copyf:
227 # Merely marks that a copy happened.
228 # Merely marks that a copy happened.
228 self.dest.copyfile(copyf, newf)
229 self.dest.copyfile(copyf, newf)
229
230
230 if not filenames and self.mapfile.active():
231 if not filenames and self.mapfile.active():
231 newnode = parents[0]
232 newnode = parents[0]
232 else:
233 else:
233 newnode = self.dest.putcommit(filenames, parents, commit)
234 newnode = self.dest.putcommit(filenames, parents, commit)
234 self.mapentry(rev, newnode)
235 self.mapentry(rev, newnode)
235
236
236 def convert(self):
237 def convert(self):
237 try:
238 try:
239 self.source.before()
238 self.dest.before()
240 self.dest.before()
239 self.source.setrevmap(self.map)
241 self.source.setrevmap(self.map)
240 self.ui.status("scanning source...\n")
242 self.ui.status("scanning source...\n")
241 heads = self.source.getheads()
243 heads = self.source.getheads()
242 parents = self.walktree(heads)
244 parents = self.walktree(heads)
243 self.ui.status("sorting...\n")
245 self.ui.status("sorting...\n")
244 t = self.toposort(parents)
246 t = self.toposort(parents)
245 num = len(t)
247 num = len(t)
246 c = None
248 c = None
247
249
248 self.ui.status("converting...\n")
250 self.ui.status("converting...\n")
249 for c in t:
251 for c in t:
250 num -= 1
252 num -= 1
251 desc = self.commitcache[c].desc
253 desc = self.commitcache[c].desc
252 if "\n" in desc:
254 if "\n" in desc:
253 desc = desc.splitlines()[0]
255 desc = desc.splitlines()[0]
254 self.ui.status("%d %s\n" % (num, desc))
256 self.ui.status("%d %s\n" % (num, desc))
255 self.copy(c)
257 self.copy(c)
256
258
257 tags = self.source.gettags()
259 tags = self.source.gettags()
258 ctags = {}
260 ctags = {}
259 for k in tags:
261 for k in tags:
260 v = tags[k]
262 v = tags[k]
261 if v in self.map:
263 if v in self.map:
262 ctags[k] = self.map[v]
264 ctags[k] = self.map[v]
263
265
264 if c and ctags:
266 if c and ctags:
265 nrev = self.dest.puttags(ctags)
267 nrev = self.dest.puttags(ctags)
266 # write another hash correspondence to override the previous
268 # write another hash correspondence to override the previous
267 # one so we don't end up with extra tag heads
269 # one so we don't end up with extra tag heads
268 if nrev:
270 if nrev:
269 self.mapentry(c, nrev)
271 self.mapentry(c, nrev)
270
272
271 self.writeauthormap()
273 self.writeauthormap()
272 finally:
274 finally:
273 self.cleanup()
275 self.cleanup()
274
276
275 def cleanup(self):
277 def cleanup(self):
276 self.dest.after()
278 try:
279 self.dest.after()
280 finally:
281 self.source.after()
277 if self.revmapfilefd:
282 if self.revmapfilefd:
278 self.revmapfilefd.close()
283 self.revmapfilefd.close()
279
284
280 def rpairs(name):
285 def rpairs(name):
281 e = len(name)
286 e = len(name)
282 while e != -1:
287 while e != -1:
283 yield name[:e], name[e+1:]
288 yield name[:e], name[e+1:]
284 e = name.rfind('/', 0, e)
289 e = name.rfind('/', 0, e)
285
290
286 class filemapper(object):
291 class filemapper(object):
287 '''Map and filter filenames when importing.
292 '''Map and filter filenames when importing.
288 A name can be mapped to itself, a new name, or None (omit from new
293 A name can be mapped to itself, a new name, or None (omit from new
289 repository).'''
294 repository).'''
290
295
291 def __init__(self, ui, path=None):
296 def __init__(self, ui, path=None):
292 self.ui = ui
297 self.ui = ui
293 self.include = {}
298 self.include = {}
294 self.exclude = {}
299 self.exclude = {}
295 self.rename = {}
300 self.rename = {}
296 if path:
301 if path:
297 if self.parse(path):
302 if self.parse(path):
298 raise util.Abort(_('errors in filemap'))
303 raise util.Abort(_('errors in filemap'))
299
304
300 def parse(self, path):
305 def parse(self, path):
301 errs = 0
306 errs = 0
302 def check(name, mapping, listname):
307 def check(name, mapping, listname):
303 if name in mapping:
308 if name in mapping:
304 self.ui.warn(_('%s:%d: %r already in %s list\n') %
309 self.ui.warn(_('%s:%d: %r already in %s list\n') %
305 (lex.infile, lex.lineno, name, listname))
310 (lex.infile, lex.lineno, name, listname))
306 return 1
311 return 1
307 return 0
312 return 0
308 lex = shlex.shlex(open(path), path, True)
313 lex = shlex.shlex(open(path), path, True)
309 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
314 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
310 cmd = lex.get_token()
315 cmd = lex.get_token()
311 while cmd:
316 while cmd:
312 if cmd == 'include':
317 if cmd == 'include':
313 name = lex.get_token()
318 name = lex.get_token()
314 errs += check(name, self.exclude, 'exclude')
319 errs += check(name, self.exclude, 'exclude')
315 self.include[name] = name
320 self.include[name] = name
316 elif cmd == 'exclude':
321 elif cmd == 'exclude':
317 name = lex.get_token()
322 name = lex.get_token()
318 errs += check(name, self.include, 'include')
323 errs += check(name, self.include, 'include')
319 errs += check(name, self.rename, 'rename')
324 errs += check(name, self.rename, 'rename')
320 self.exclude[name] = name
325 self.exclude[name] = name
321 elif cmd == 'rename':
326 elif cmd == 'rename':
322 src = lex.get_token()
327 src = lex.get_token()
323 dest = lex.get_token()
328 dest = lex.get_token()
324 errs += check(src, self.exclude, 'exclude')
329 errs += check(src, self.exclude, 'exclude')
325 self.rename[src] = dest
330 self.rename[src] = dest
326 elif cmd == 'source':
331 elif cmd == 'source':
327 errs += self.parse(lex.get_token())
332 errs += self.parse(lex.get_token())
328 else:
333 else:
329 self.ui.warn(_('%s:%d: unknown directive %r\n') %
334 self.ui.warn(_('%s:%d: unknown directive %r\n') %
330 (lex.infile, lex.lineno, cmd))
335 (lex.infile, lex.lineno, cmd))
331 errs += 1
336 errs += 1
332 cmd = lex.get_token()
337 cmd = lex.get_token()
333 return errs
338 return errs
334
339
335 def lookup(self, name, mapping):
340 def lookup(self, name, mapping):
336 for pre, suf in rpairs(name):
341 for pre, suf in rpairs(name):
337 try:
342 try:
338 return mapping[pre], pre, suf
343 return mapping[pre], pre, suf
339 except KeyError, err:
344 except KeyError, err:
340 pass
345 pass
341 return '', name, ''
346 return '', name, ''
342
347
343 def __call__(self, name):
348 def __call__(self, name):
344 if self.include:
349 if self.include:
345 inc = self.lookup(name, self.include)[0]
350 inc = self.lookup(name, self.include)[0]
346 else:
351 else:
347 inc = name
352 inc = name
348 if self.exclude:
353 if self.exclude:
349 exc = self.lookup(name, self.exclude)[0]
354 exc = self.lookup(name, self.exclude)[0]
350 else:
355 else:
351 exc = ''
356 exc = ''
352 if not inc or exc:
357 if not inc or exc:
353 return None
358 return None
354 newpre, pre, suf = self.lookup(name, self.rename)
359 newpre, pre, suf = self.lookup(name, self.rename)
355 if newpre:
360 if newpre:
356 if newpre == '.':
361 if newpre == '.':
357 return suf
362 return suf
358 if suf:
363 if suf:
359 return newpre + '/' + suf
364 return newpre + '/' + suf
360 return newpre
365 return newpre
361 return name
366 return name
362
367
363 def active(self):
368 def active(self):
364 return bool(self.include or self.exclude or self.rename)
369 return bool(self.include or self.exclude or self.rename)
365
370
366 def convert(ui, src, dest=None, revmapfile=None, **opts):
371 def convert(ui, src, dest=None, revmapfile=None, **opts):
367 """Convert a foreign SCM repository to a Mercurial one.
372 """Convert a foreign SCM repository to a Mercurial one.
368
373
369 Accepted source formats:
374 Accepted source formats:
370 - GIT
371 - CVS
375 - CVS
372 - SVN
376 - Darcs
377 - git
378 - Subversion
373
379
374 Accepted destination formats:
380 Accepted destination formats:
375 - Mercurial
381 - Mercurial
376
382
377 If no revision is given, all revisions will be converted. Otherwise,
383 If no revision is given, all revisions will be converted. Otherwise,
378 convert will only import up to the named revision (given in a format
384 convert will only import up to the named revision (given in a format
379 understood by the source).
385 understood by the source).
380
386
381 If no destination directory name is specified, it defaults to the
387 If no destination directory name is specified, it defaults to the
382 basename of the source with '-hg' appended. If the destination
388 basename of the source with '-hg' appended. If the destination
383 repository doesn't exist, it will be created.
389 repository doesn't exist, it will be created.
384
390
385 If <revmapfile> isn't given, it will be put in a default location
391 If <revmapfile> isn't given, it will be put in a default location
386 (<dest>/.hg/shamap by default). The <revmapfile> is a simple text
392 (<dest>/.hg/shamap by default). The <revmapfile> is a simple text
387 file that maps each source commit ID to the destination ID for
393 file that maps each source commit ID to the destination ID for
388 that revision, like so:
394 that revision, like so:
389 <source ID> <destination ID>
395 <source ID> <destination ID>
390
396
391 If the file doesn't exist, it's automatically created. It's updated
397 If the file doesn't exist, it's automatically created. It's updated
392 on each commit copied, so convert-repo can be interrupted and can
398 on each commit copied, so convert-repo can be interrupted and can
393 be run repeatedly to copy new commits.
399 be run repeatedly to copy new commits.
394
400
395 The [username mapping] file is a simple text file that maps each source
401 The [username mapping] file is a simple text file that maps each source
396 commit author to a destination commit author. It is handy for source SCMs
402 commit author to a destination commit author. It is handy for source SCMs
397 that use unix logins to identify authors (eg: CVS). One line per author
403 that use unix logins to identify authors (eg: CVS). One line per author
398 mapping and the line format is:
404 mapping and the line format is:
399 srcauthor=whatever string you want
405 srcauthor=whatever string you want
400
406
401 The filemap is a file that allows filtering and remapping of files
407 The filemap is a file that allows filtering and remapping of files
402 and directories. Comment lines start with '#'. Each line can
408 and directories. Comment lines start with '#'. Each line can
403 contain one of the following directives:
409 contain one of the following directives:
404
410
405 include path/to/file
411 include path/to/file
406
412
407 exclude path/to/file
413 exclude path/to/file
408
414
409 rename from/file to/file
415 rename from/file to/file
410
416
411 The 'include' directive causes a file, or all files under a
417 The 'include' directive causes a file, or all files under a
412 directory, to be included in the destination repository. The
418 directory, to be included in the destination repository. The
413 'exclude' directive causes files or directories to be omitted.
419 'exclude' directive causes files or directories to be omitted.
414 The 'rename' directive renames a file or directory. To rename
420 The 'rename' directive renames a file or directory. To rename
415 from a subdirectory into the root of the repository, use '.' as
421 from a subdirectory into the root of the repository, use '.' as
416 the path to rename to.
422 the path to rename to.
417 """
423 """
418
424
419 util._encoding = 'UTF-8'
425 util._encoding = 'UTF-8'
420
426
421 if not dest:
427 if not dest:
422 dest = hg.defaultdest(src) + "-hg"
428 dest = hg.defaultdest(src) + "-hg"
423 ui.status("assuming destination %s\n" % dest)
429 ui.status("assuming destination %s\n" % dest)
424
430
425 # Try to be smart and initalize things when required
431 # Try to be smart and initalize things when required
426 created = False
432 created = False
427 if os.path.isdir(dest):
433 if os.path.isdir(dest):
428 if len(os.listdir(dest)) > 0:
434 if len(os.listdir(dest)) > 0:
429 try:
435 try:
430 hg.repository(ui, dest)
436 hg.repository(ui, dest)
431 ui.status("destination %s is a Mercurial repository\n" % dest)
437 ui.status("destination %s is a Mercurial repository\n" % dest)
432 except hg.RepoError:
438 except hg.RepoError:
433 raise util.Abort(
439 raise util.Abort(
434 "destination directory %s is not empty.\n"
440 "destination directory %s is not empty.\n"
435 "Please specify an empty directory to be initialized\n"
441 "Please specify an empty directory to be initialized\n"
436 "or an already initialized mercurial repository"
442 "or an already initialized mercurial repository"
437 % dest)
443 % dest)
438 else:
444 else:
439 ui.status("initializing destination %s repository\n" % dest)
445 ui.status("initializing destination %s repository\n" % dest)
440 hg.repository(ui, dest, create=True)
446 hg.repository(ui, dest, create=True)
441 created = True
447 created = True
442 elif os.path.exists(dest):
448 elif os.path.exists(dest):
443 raise util.Abort("destination %s exists and is not a directory" % dest)
449 raise util.Abort("destination %s exists and is not a directory" % dest)
444 else:
450 else:
445 ui.status("initializing destination %s repository\n" % dest)
451 ui.status("initializing destination %s repository\n" % dest)
446 hg.repository(ui, dest, create=True)
452 hg.repository(ui, dest, create=True)
447 created = True
453 created = True
448
454
449 destc = convertsink(ui, dest)
455 destc = convertsink(ui, dest)
450
456
451 try:
457 try:
452 srcc = convertsource(ui, src, rev=opts.get('rev'))
458 srcc = convertsource(ui, src, rev=opts.get('rev'))
453 except Exception:
459 except Exception:
454 if created:
460 if created:
455 shutil.rmtree(dest, True)
461 shutil.rmtree(dest, True)
456 raise
462 raise
457
463
458 if not revmapfile:
464 if not revmapfile:
459 try:
465 try:
460 revmapfile = destc.revmapfile()
466 revmapfile = destc.revmapfile()
461 except:
467 except:
462 revmapfile = os.path.join(destc, "map")
468 revmapfile = os.path.join(destc, "map")
463
469
464
470
465 c = converter(ui, srcc, destc, revmapfile, filemapper(ui, opts['filemap']),
471 c = converter(ui, srcc, destc, revmapfile, filemapper(ui, opts['filemap']),
466 opts)
472 opts)
467 c.convert()
473 c.convert()
468
474
469
475
470 cmdtable = {
476 cmdtable = {
471 "convert":
477 "convert":
472 (convert,
478 (convert,
473 [('A', 'authors', '', 'username mapping filename'),
479 [('A', 'authors', '', 'username mapping filename'),
474 ('', 'filemap', '', 'remap file names using contents of file'),
480 ('', 'filemap', '', 'remap file names using contents of file'),
475 ('r', 'rev', '', 'import up to target revision REV'),
481 ('r', 'rev', '', 'import up to target revision REV'),
476 ('', 'datesort', None, 'try to sort changesets by date')],
482 ('', 'datesort', None, 'try to sort changesets by date')],
477 'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
483 'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
478 "debugsvnlog":
484 "debugsvnlog":
479 (debugsvnlog,
485 (debugsvnlog,
480 [],
486 [],
481 'hg debugsvnlog'),
487 'hg debugsvnlog'),
482 }
488 }
483
489
@@ -1,145 +1,151 b''
1 # common code for the convert extension
1 # common code for the convert extension
2 import base64
2 import base64
3 import cPickle as pickle
3 import cPickle as pickle
4
4
5 def encodeargs(args):
5 def encodeargs(args):
6 def encodearg(s):
6 def encodearg(s):
7 lines = base64.encodestring(s)
7 lines = base64.encodestring(s)
8 lines = [l.splitlines()[0] for l in lines]
8 lines = [l.splitlines()[0] for l in lines]
9 return ''.join(lines)
9 return ''.join(lines)
10
10
11 s = pickle.dumps(args)
11 s = pickle.dumps(args)
12 return encodearg(s)
12 return encodearg(s)
13
13
14 def decodeargs(s):
14 def decodeargs(s):
15 s = base64.decodestring(s)
15 s = base64.decodestring(s)
16 return pickle.loads(s)
16 return pickle.loads(s)
17
17
18 class NoRepo(Exception): pass
18 class NoRepo(Exception): pass
19
19
20 class commit(object):
20 class commit(object):
21 def __init__(self, author, date, desc, parents, branch=None, rev=None):
21 def __init__(self, author, date, desc, parents, branch=None, rev=None):
22 self.author = author
22 self.author = author
23 self.date = date
23 self.date = date
24 self.desc = desc
24 self.desc = desc
25 self.parents = parents
25 self.parents = parents
26 self.branch = branch
26 self.branch = branch
27 self.rev = rev
27 self.rev = rev
28
28
29 class converter_source(object):
29 class converter_source(object):
30 """Conversion source interface"""
30 """Conversion source interface"""
31
31
32 def __init__(self, ui, path, rev=None):
32 def __init__(self, ui, path, rev=None):
33 """Initialize conversion source (or raise NoRepo("message")
33 """Initialize conversion source (or raise NoRepo("message")
34 exception if path is not a valid repository)"""
34 exception if path is not a valid repository)"""
35 self.ui = ui
35 self.ui = ui
36 self.path = path
36 self.path = path
37 self.rev = rev
37 self.rev = rev
38
38
39 self.encoding = 'utf-8'
39 self.encoding = 'utf-8'
40
40
41 def before(self):
42 pass
43
44 def after(self):
45 pass
46
41 def setrevmap(self, revmap):
47 def setrevmap(self, revmap):
42 """set the map of already-converted revisions"""
48 """set the map of already-converted revisions"""
43 pass
49 pass
44
50
45 def getheads(self):
51 def getheads(self):
46 """Return a list of this repository's heads"""
52 """Return a list of this repository's heads"""
47 raise NotImplementedError()
53 raise NotImplementedError()
48
54
49 def getfile(self, name, rev):
55 def getfile(self, name, rev):
50 """Return file contents as a string"""
56 """Return file contents as a string"""
51 raise NotImplementedError()
57 raise NotImplementedError()
52
58
53 def getmode(self, name, rev):
59 def getmode(self, name, rev):
54 """Return file mode, eg. '', 'x', or 'l'"""
60 """Return file mode, eg. '', 'x', or 'l'"""
55 raise NotImplementedError()
61 raise NotImplementedError()
56
62
57 def getchanges(self, version):
63 def getchanges(self, version):
58 """Returns a tuple of (files, copies)
64 """Returns a tuple of (files, copies)
59 Files is a sorted list of (filename, id) tuples for all files changed
65 Files is a sorted list of (filename, id) tuples for all files changed
60 in version, where id is the source revision id of the file.
66 in version, where id is the source revision id of the file.
61
67
62 copies is a dictionary of dest: source
68 copies is a dictionary of dest: source
63 """
69 """
64 raise NotImplementedError()
70 raise NotImplementedError()
65
71
66 def getcommit(self, version):
72 def getcommit(self, version):
67 """Return the commit object for version"""
73 """Return the commit object for version"""
68 raise NotImplementedError()
74 raise NotImplementedError()
69
75
70 def gettags(self):
76 def gettags(self):
71 """Return the tags as a dictionary of name: revision"""
77 """Return the tags as a dictionary of name: revision"""
72 raise NotImplementedError()
78 raise NotImplementedError()
73
79
74 def recode(self, s, encoding=None):
80 def recode(self, s, encoding=None):
75 if not encoding:
81 if not encoding:
76 encoding = self.encoding or 'utf-8'
82 encoding = self.encoding or 'utf-8'
77
83
78 if isinstance(s, unicode):
84 if isinstance(s, unicode):
79 return s.encode("utf-8")
85 return s.encode("utf-8")
80 try:
86 try:
81 return s.decode(encoding).encode("utf-8")
87 return s.decode(encoding).encode("utf-8")
82 except:
88 except:
83 try:
89 try:
84 return s.decode("latin-1").encode("utf-8")
90 return s.decode("latin-1").encode("utf-8")
85 except:
91 except:
86 return s.decode(encoding, "replace").encode("utf-8")
92 return s.decode(encoding, "replace").encode("utf-8")
87
93
88 class converter_sink(object):
94 class converter_sink(object):
89 """Conversion sink (target) interface"""
95 """Conversion sink (target) interface"""
90
96
91 def __init__(self, ui, path):
97 def __init__(self, ui, path):
92 """Initialize conversion sink (or raise NoRepo("message")
98 """Initialize conversion sink (or raise NoRepo("message")
93 exception if path is not a valid repository)"""
99 exception if path is not a valid repository)"""
94 raise NotImplementedError()
100 raise NotImplementedError()
95
101
96 def getheads(self):
102 def getheads(self):
97 """Return a list of this repository's heads"""
103 """Return a list of this repository's heads"""
98 raise NotImplementedError()
104 raise NotImplementedError()
99
105
100 def revmapfile(self):
106 def revmapfile(self):
101 """Path to a file that will contain lines
107 """Path to a file that will contain lines
102 source_rev_id sink_rev_id
108 source_rev_id sink_rev_id
103 mapping equivalent revision identifiers for each system."""
109 mapping equivalent revision identifiers for each system."""
104 raise NotImplementedError()
110 raise NotImplementedError()
105
111
106 def authorfile(self):
112 def authorfile(self):
107 """Path to a file that will contain lines
113 """Path to a file that will contain lines
108 srcauthor=dstauthor
114 srcauthor=dstauthor
109 mapping equivalent authors identifiers for each system."""
115 mapping equivalent authors identifiers for each system."""
110 return None
116 return None
111
117
112 def putfile(self, f, e, data):
118 def putfile(self, f, e, data):
113 """Put file for next putcommit().
119 """Put file for next putcommit().
114 f: path to file
120 f: path to file
115 e: '', 'x', or 'l' (regular file, executable, or symlink)
121 e: '', 'x', or 'l' (regular file, executable, or symlink)
116 data: file contents"""
122 data: file contents"""
117 raise NotImplementedError()
123 raise NotImplementedError()
118
124
119 def delfile(self, f):
125 def delfile(self, f):
120 """Delete file for next putcommit().
126 """Delete file for next putcommit().
121 f: path to file"""
127 f: path to file"""
122 raise NotImplementedError()
128 raise NotImplementedError()
123
129
124 def putcommit(self, files, parents, commit):
130 def putcommit(self, files, parents, commit):
125 """Create a revision with all changed files listed in 'files'
131 """Create a revision with all changed files listed in 'files'
126 and having listed parents. 'commit' is a commit object containing
132 and having listed parents. 'commit' is a commit object containing
127 at a minimum the author, date, and message for this changeset.
133 at a minimum the author, date, and message for this changeset.
128 Called after putfile() and delfile() calls. Note that the sink
134 Called after putfile() and delfile() calls. Note that the sink
129 repository is not told to update itself to a particular revision
135 repository is not told to update itself to a particular revision
130 (or even what that revision would be) before it receives the
136 (or even what that revision would be) before it receives the
131 file data."""
137 file data."""
132 raise NotImplementedError()
138 raise NotImplementedError()
133
139
134 def puttags(self, tags):
140 def puttags(self, tags):
135 """Put tags into sink.
141 """Put tags into sink.
136 tags: {tagname: sink_rev_id, ...}"""
142 tags: {tagname: sink_rev_id, ...}"""
137 raise NotImplementedError()
143 raise NotImplementedError()
138
144
139 def setbranch(self, branch, pbranch, parents):
145 def setbranch(self, branch, pbranch, parents):
140 """Set the current branch name. Called before the first putfile
146 """Set the current branch name. Called before the first putfile
141 on the branch.
147 on the branch.
142 branch: branch name for subsequent commits
148 branch: branch name for subsequent commits
143 pbranch: branch name of parent commit
149 pbranch: branch name of parent commit
144 parents: destination revisions of parent"""
150 parents: destination revisions of parent"""
145 pass
151 pass
@@ -1,210 +1,213 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 # Note for hg->hg conversion: Old versions of Mercurial didn't trim
3 # Note for hg->hg conversion: Old versions of Mercurial didn't trim
4 # the whitespace from the ends of commit messages, but new versions
4 # the whitespace from the ends of commit messages, but new versions
5 # do. Changesets created by those older versions, then converted, may
5 # do. Changesets created by those older versions, then converted, may
6 # thus have different hashes for changesets that are otherwise
6 # thus have different hashes for changesets that are otherwise
7 # identical.
7 # identical.
8
8
9
9
10 import os, time
10 import os, time
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial.node import *
12 from mercurial.node import *
13 from mercurial import hg, lock, revlog, util
13 from mercurial import hg, lock, revlog, util
14
14
15 from common import NoRepo, commit, converter_source, converter_sink
15 from common import NoRepo, commit, converter_source, converter_sink
16
16
17 class mercurial_sink(converter_sink):
17 class mercurial_sink(converter_sink):
18 def __init__(self, ui, path):
18 def __init__(self, ui, path):
19 self.path = path
19 self.path = path
20 self.ui = ui
20 self.ui = ui
21 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
21 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
22 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
22 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
23 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
23 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
24 self.lastbranch = None
24 self.lastbranch = None
25 try:
25 try:
26 self.repo = hg.repository(self.ui, path)
26 self.repo = hg.repository(self.ui, path)
27 except:
27 except:
28 raise NoRepo("could not open hg repo %s as sink" % path)
28 raise NoRepo("could not open hg repo %s as sink" % path)
29 self.lock = None
29 self.lock = None
30 self.wlock = None
30 self.wlock = None
31
31
32 def before(self):
32 def before(self):
33 self.wlock = self.repo.wlock()
33 self.wlock = self.repo.wlock()
34 self.lock = self.repo.lock()
34 self.lock = self.repo.lock()
35 self.repo.dirstate.clear()
35 self.repo.dirstate.clear()
36
36
37 def after(self):
37 def after(self):
38 self.repo.dirstate.invalidate()
38 self.repo.dirstate.invalidate()
39 self.lock = None
39 self.lock = None
40 self.wlock = None
40 self.wlock = None
41
41
42 def revmapfile(self):
42 def revmapfile(self):
43 return os.path.join(self.path, ".hg", "shamap")
43 return os.path.join(self.path, ".hg", "shamap")
44
44
45 def authorfile(self):
45 def authorfile(self):
46 return os.path.join(self.path, ".hg", "authormap")
46 return os.path.join(self.path, ".hg", "authormap")
47
47
48 def getheads(self):
48 def getheads(self):
49 h = self.repo.changelog.heads()
49 h = self.repo.changelog.heads()
50 return [ hex(x) for x in h ]
50 return [ hex(x) for x in h ]
51
51
52 def putfile(self, f, e, data):
52 def putfile(self, f, e, data):
53 self.repo.wwrite(f, data, e)
53 self.repo.wwrite(f, data, e)
54 if f not in self.repo.dirstate:
54 if f not in self.repo.dirstate:
55 self.repo.dirstate.normallookup(f)
55 self.repo.dirstate.normallookup(f)
56
56
57 def copyfile(self, source, dest):
57 def copyfile(self, source, dest):
58 self.repo.copy(source, dest)
58 self.repo.copy(source, dest)
59
59
60 def delfile(self, f):
60 def delfile(self, f):
61 try:
61 try:
62 util.unlink(self.repo.wjoin(f))
62 util.unlink(self.repo.wjoin(f))
63 #self.repo.remove([f])
63 #self.repo.remove([f])
64 except OSError:
64 except OSError:
65 pass
65 pass
66
66
67 def setbranch(self, branch, pbranch, parents):
67 def setbranch(self, branch, pbranch, parents):
68 if (not self.clonebranches) or (branch == self.lastbranch):
68 if (not self.clonebranches) or (branch == self.lastbranch):
69 return
69 return
70
70
71 self.lastbranch = branch
71 self.lastbranch = branch
72 self.after()
72 self.after()
73 if not branch:
73 if not branch:
74 branch = 'default'
74 branch = 'default'
75 if not pbranch:
75 if not pbranch:
76 pbranch = 'default'
76 pbranch = 'default'
77
77
78 branchpath = os.path.join(self.path, branch)
78 branchpath = os.path.join(self.path, branch)
79 try:
79 try:
80 self.repo = hg.repository(self.ui, branchpath)
80 self.repo = hg.repository(self.ui, branchpath)
81 except:
81 except:
82 if not parents:
82 if not parents:
83 self.repo = hg.repository(self.ui, branchpath, create=True)
83 self.repo = hg.repository(self.ui, branchpath, create=True)
84 else:
84 else:
85 self.ui.note(_('cloning branch %s to %s\n') % (pbranch, branch))
85 self.ui.note(_('cloning branch %s to %s\n') % (pbranch, branch))
86 hg.clone(self.ui, os.path.join(self.path, pbranch),
86 hg.clone(self.ui, os.path.join(self.path, pbranch),
87 branchpath, rev=parents, update=False,
87 branchpath, rev=parents, update=False,
88 stream=True)
88 stream=True)
89 self.repo = hg.repository(self.ui, branchpath)
89 self.repo = hg.repository(self.ui, branchpath)
90
90
91 def putcommit(self, files, parents, commit):
91 def putcommit(self, files, parents, commit):
92 seen = {}
92 seen = {}
93 pl = []
93 pl = []
94 for p in parents:
94 for p in parents:
95 if p not in seen:
95 if p not in seen:
96 pl.append(p)
96 pl.append(p)
97 seen[p] = 1
97 seen[p] = 1
98 parents = pl
98 parents = pl
99
99
100 if len(parents) < 2: parents.append("0" * 40)
100 if len(parents) < 2: parents.append("0" * 40)
101 if len(parents) < 2: parents.append("0" * 40)
101 if len(parents) < 2: parents.append("0" * 40)
102 p2 = parents.pop(0)
102 p2 = parents.pop(0)
103
103
104 text = commit.desc
104 text = commit.desc
105 extra = {}
105 extra = {}
106 if self.branchnames and commit.branch:
106 if self.branchnames and commit.branch:
107 extra['branch'] = commit.branch
107 extra['branch'] = commit.branch
108 if commit.rev:
108 if commit.rev:
109 extra['convert_revision'] = commit.rev
109 extra['convert_revision'] = commit.rev
110
110
111 while parents:
111 while parents:
112 p1 = p2
112 p1 = p2
113 p2 = parents.pop(0)
113 p2 = parents.pop(0)
114 a = self.repo.rawcommit(files, text, commit.author, commit.date,
114 a = self.repo.rawcommit(files, text, commit.author, commit.date,
115 bin(p1), bin(p2), extra=extra)
115 bin(p1), bin(p2), extra=extra)
116 self.repo.dirstate.clear()
116 self.repo.dirstate.clear()
117 text = "(octopus merge fixup)\n"
117 text = "(octopus merge fixup)\n"
118 p2 = hg.hex(self.repo.changelog.tip())
118 p2 = hg.hex(self.repo.changelog.tip())
119
119
120 return p2
120 return p2
121
121
122 def puttags(self, tags):
122 def puttags(self, tags):
123 try:
123 try:
124 old = self.repo.wfile(".hgtags").read()
124 old = self.repo.wfile(".hgtags").read()
125 oldlines = old.splitlines(1)
125 oldlines = old.splitlines(1)
126 oldlines.sort()
126 oldlines.sort()
127 except:
127 except:
128 oldlines = []
128 oldlines = []
129
129
130 k = tags.keys()
130 k = tags.keys()
131 k.sort()
131 k.sort()
132 newlines = []
132 newlines = []
133 for tag in k:
133 for tag in k:
134 newlines.append("%s %s\n" % (tags[tag], tag))
134 newlines.append("%s %s\n" % (tags[tag], tag))
135
135
136 newlines.sort()
136 newlines.sort()
137
137
138 if newlines != oldlines:
138 if newlines != oldlines:
139 self.ui.status("updating tags\n")
139 self.ui.status("updating tags\n")
140 f = self.repo.wfile(".hgtags", "w")
140 f = self.repo.wfile(".hgtags", "w")
141 f.write("".join(newlines))
141 f.write("".join(newlines))
142 f.close()
142 f.close()
143 if not oldlines: self.repo.add([".hgtags"])
143 if not oldlines: self.repo.add([".hgtags"])
144 date = "%s 0" % int(time.mktime(time.gmtime()))
144 date = "%s 0" % int(time.mktime(time.gmtime()))
145 extra = {}
145 extra = {}
146 if self.tagsbranch != 'default':
146 if self.tagsbranch != 'default':
147 extra['branch'] = self.tagsbranch
147 extra['branch'] = self.tagsbranch
148 try:
148 try:
149 tagparent = self.repo.changectx(self.tagsbranch).node()
149 tagparent = self.repo.changectx(self.tagsbranch).node()
150 except hg.RepoError, inst:
150 except hg.RepoError, inst:
151 tagparent = nullid
151 tagparent = nullid
152 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
152 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
153 date, tagparent, nullid)
153 date, tagparent, nullid)
154 return hex(self.repo.changelog.tip())
154 return hex(self.repo.changelog.tip())
155
155
156 class mercurial_source(converter_source):
156 class mercurial_source(converter_source):
157 def __init__(self, ui, path, rev=None):
157 def __init__(self, ui, path, rev=None):
158 converter_source.__init__(self, ui, path, rev)
158 converter_source.__init__(self, ui, path, rev)
159 self.repo = hg.repository(self.ui, path)
159 try:
160 self.repo = hg.repository(self.ui, path)
161 except:
162 raise NoRepo("could not open hg repo %s as source" % path)
160 self.lastrev = None
163 self.lastrev = None
161 self.lastctx = None
164 self.lastctx = None
162
165
163 def changectx(self, rev):
166 def changectx(self, rev):
164 if self.lastrev != rev:
167 if self.lastrev != rev:
165 self.lastctx = self.repo.changectx(rev)
168 self.lastctx = self.repo.changectx(rev)
166 self.lastrev = rev
169 self.lastrev = rev
167 return self.lastctx
170 return self.lastctx
168
171
169 def getheads(self):
172 def getheads(self):
170 if self.rev:
173 if self.rev:
171 return [hex(self.repo.changectx(self.rev).node())]
174 return [hex(self.repo.changectx(self.rev).node())]
172 else:
175 else:
173 return [hex(node) for node in self.repo.heads()]
176 return [hex(node) for node in self.repo.heads()]
174
177
175 def getfile(self, name, rev):
178 def getfile(self, name, rev):
176 try:
179 try:
177 return self.changectx(rev).filectx(name).data()
180 return self.changectx(rev).filectx(name).data()
178 except revlog.LookupError, err:
181 except revlog.LookupError, err:
179 raise IOError(err)
182 raise IOError(err)
180
183
181 def getmode(self, name, rev):
184 def getmode(self, name, rev):
182 m = self.changectx(rev).manifest()
185 m = self.changectx(rev).manifest()
183 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
186 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
184
187
185 def getchanges(self, rev):
188 def getchanges(self, rev):
186 ctx = self.changectx(rev)
189 ctx = self.changectx(rev)
187 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
190 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
188 changes = [(name, rev) for name in m + a + r]
191 changes = [(name, rev) for name in m + a + r]
189 changes.sort()
192 changes.sort()
190 return (changes, self.getcopies(ctx, m + a))
193 return (changes, self.getcopies(ctx, m + a))
191
194
192 def getcopies(self, ctx, files):
195 def getcopies(self, ctx, files):
193 copies = {}
196 copies = {}
194 for name in files:
197 for name in files:
195 try:
198 try:
196 copies[name] = ctx.filectx(name).renamed()[0]
199 copies[name] = ctx.filectx(name).renamed()[0]
197 except TypeError:
200 except TypeError:
198 pass
201 pass
199 return copies
202 return copies
200
203
201 def getcommit(self, rev):
204 def getcommit(self, rev):
202 ctx = self.changectx(rev)
205 ctx = self.changectx(rev)
203 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
206 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
204 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
207 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
205 desc=ctx.description(), parents=parents,
208 desc=ctx.description(), parents=parents,
206 branch=ctx.branch())
209 branch=ctx.branch())
207
210
208 def gettags(self):
211 def gettags(self):
209 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
212 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
210 return dict([(name, hex(node)) for name, node in tags])
213 return dict([(name, hex(node)) for name, node in tags])
@@ -1,1696 +1,1696 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob
18
18
19 try:
19 try:
20 set = set
20 set = set
21 frozenset = frozenset
21 frozenset = frozenset
22 except NameError:
22 except NameError:
23 from sets import Set as set, ImmutableSet as frozenset
23 from sets import Set as set, ImmutableSet as frozenset
24
24
25 try:
25 try:
26 _encoding = os.environ.get("HGENCODING")
26 _encoding = os.environ.get("HGENCODING")
27 if sys.platform == 'darwin' and not _encoding:
27 if sys.platform == 'darwin' and not _encoding:
28 # On darwin, getpreferredencoding ignores the locale environment and
28 # On darwin, getpreferredencoding ignores the locale environment and
29 # always returns mac-roman. We override this if the environment is
29 # always returns mac-roman. We override this if the environment is
30 # not C (has been customized by the user).
30 # not C (has been customized by the user).
31 locale.setlocale(locale.LC_CTYPE, '')
31 locale.setlocale(locale.LC_CTYPE, '')
32 _encoding = locale.getlocale()[1]
32 _encoding = locale.getlocale()[1]
33 if not _encoding:
33 if not _encoding:
34 _encoding = locale.getpreferredencoding() or 'ascii'
34 _encoding = locale.getpreferredencoding() or 'ascii'
35 except locale.Error:
35 except locale.Error:
36 _encoding = 'ascii'
36 _encoding = 'ascii'
37 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
37 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
38 _fallbackencoding = 'ISO-8859-1'
38 _fallbackencoding = 'ISO-8859-1'
39
39
40 def tolocal(s):
40 def tolocal(s):
41 """
41 """
42 Convert a string from internal UTF-8 to local encoding
42 Convert a string from internal UTF-8 to local encoding
43
43
44 All internal strings should be UTF-8 but some repos before the
44 All internal strings should be UTF-8 but some repos before the
45 implementation of locale support may contain latin1 or possibly
45 implementation of locale support may contain latin1 or possibly
46 other character sets. We attempt to decode everything strictly
46 other character sets. We attempt to decode everything strictly
47 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
47 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
48 replace unknown characters.
48 replace unknown characters.
49 """
49 """
50 for e in ('UTF-8', _fallbackencoding):
50 for e in ('UTF-8', _fallbackencoding):
51 try:
51 try:
52 u = s.decode(e) # attempt strict decoding
52 u = s.decode(e) # attempt strict decoding
53 return u.encode(_encoding, "replace")
53 return u.encode(_encoding, "replace")
54 except LookupError, k:
54 except LookupError, k:
55 raise Abort(_("%s, please check your locale settings") % k)
55 raise Abort(_("%s, please check your locale settings") % k)
56 except UnicodeDecodeError:
56 except UnicodeDecodeError:
57 pass
57 pass
58 u = s.decode("utf-8", "replace") # last ditch
58 u = s.decode("utf-8", "replace") # last ditch
59 return u.encode(_encoding, "replace")
59 return u.encode(_encoding, "replace")
60
60
61 def fromlocal(s):
61 def fromlocal(s):
62 """
62 """
63 Convert a string from the local character encoding to UTF-8
63 Convert a string from the local character encoding to UTF-8
64
64
65 We attempt to decode strings using the encoding mode set by
65 We attempt to decode strings using the encoding mode set by
66 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
66 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
67 characters will cause an error message. Other modes include
67 characters will cause an error message. Other modes include
68 'replace', which replaces unknown characters with a special
68 'replace', which replaces unknown characters with a special
69 Unicode character, and 'ignore', which drops the character.
69 Unicode character, and 'ignore', which drops the character.
70 """
70 """
71 try:
71 try:
72 return s.decode(_encoding, _encodingmode).encode("utf-8")
72 return s.decode(_encoding, _encodingmode).encode("utf-8")
73 except UnicodeDecodeError, inst:
73 except UnicodeDecodeError, inst:
74 sub = s[max(0, inst.start-10):inst.start+10]
74 sub = s[max(0, inst.start-10):inst.start+10]
75 raise Abort("decoding near '%s': %s!" % (sub, inst))
75 raise Abort("decoding near '%s': %s!" % (sub, inst))
76 except LookupError, k:
76 except LookupError, k:
77 raise Abort(_("%s, please check your locale settings") % k)
77 raise Abort(_("%s, please check your locale settings") % k)
78
78
79 def locallen(s):
79 def locallen(s):
80 """Find the length in characters of a local string"""
80 """Find the length in characters of a local string"""
81 return len(s.decode(_encoding, "replace"))
81 return len(s.decode(_encoding, "replace"))
82
82
83 def localsub(s, a, b=None):
83 def localsub(s, a, b=None):
84 try:
84 try:
85 u = s.decode(_encoding, _encodingmode)
85 u = s.decode(_encoding, _encodingmode)
86 if b is not None:
86 if b is not None:
87 u = u[a:b]
87 u = u[a:b]
88 else:
88 else:
89 u = u[:a]
89 u = u[:a]
90 return u.encode(_encoding, _encodingmode)
90 return u.encode(_encoding, _encodingmode)
91 except UnicodeDecodeError, inst:
91 except UnicodeDecodeError, inst:
92 sub = s[max(0, inst.start-10), inst.start+10]
92 sub = s[max(0, inst.start-10), inst.start+10]
93 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
93 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
94
94
95 # used by parsedate
95 # used by parsedate
96 defaultdateformats = (
96 defaultdateformats = (
97 '%Y-%m-%d %H:%M:%S',
97 '%Y-%m-%d %H:%M:%S',
98 '%Y-%m-%d %I:%M:%S%p',
98 '%Y-%m-%d %I:%M:%S%p',
99 '%Y-%m-%d %H:%M',
99 '%Y-%m-%d %H:%M',
100 '%Y-%m-%d %I:%M%p',
100 '%Y-%m-%d %I:%M%p',
101 '%Y-%m-%d',
101 '%Y-%m-%d',
102 '%m-%d',
102 '%m-%d',
103 '%m/%d',
103 '%m/%d',
104 '%m/%d/%y',
104 '%m/%d/%y',
105 '%m/%d/%Y',
105 '%m/%d/%Y',
106 '%a %b %d %H:%M:%S %Y',
106 '%a %b %d %H:%M:%S %Y',
107 '%a %b %d %I:%M:%S%p %Y',
107 '%a %b %d %I:%M:%S%p %Y',
108 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
108 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
109 '%b %d %H:%M:%S %Y',
109 '%b %d %H:%M:%S %Y',
110 '%b %d %I:%M:%S%p %Y',
110 '%b %d %I:%M:%S%p %Y',
111 '%b %d %H:%M:%S',
111 '%b %d %H:%M:%S',
112 '%b %d %I:%M:%S%p',
112 '%b %d %I:%M:%S%p',
113 '%b %d %H:%M',
113 '%b %d %H:%M',
114 '%b %d %I:%M%p',
114 '%b %d %I:%M%p',
115 '%b %d %Y',
115 '%b %d %Y',
116 '%b %d',
116 '%b %d',
117 '%H:%M:%S',
117 '%H:%M:%S',
118 '%I:%M:%SP',
118 '%I:%M:%SP',
119 '%H:%M',
119 '%H:%M',
120 '%I:%M%p',
120 '%I:%M%p',
121 )
121 )
122
122
123 extendeddateformats = defaultdateformats + (
123 extendeddateformats = defaultdateformats + (
124 "%Y",
124 "%Y",
125 "%Y-%m",
125 "%Y-%m",
126 "%b",
126 "%b",
127 "%b %Y",
127 "%b %Y",
128 )
128 )
129
129
130 class SignalInterrupt(Exception):
130 class SignalInterrupt(Exception):
131 """Exception raised on SIGTERM and SIGHUP."""
131 """Exception raised on SIGTERM and SIGHUP."""
132
132
133 # differences from SafeConfigParser:
133 # differences from SafeConfigParser:
134 # - case-sensitive keys
134 # - case-sensitive keys
135 # - allows values that are not strings (this means that you may not
135 # - allows values that are not strings (this means that you may not
136 # be able to save the configuration to a file)
136 # be able to save the configuration to a file)
137 class configparser(ConfigParser.SafeConfigParser):
137 class configparser(ConfigParser.SafeConfigParser):
138 def optionxform(self, optionstr):
138 def optionxform(self, optionstr):
139 return optionstr
139 return optionstr
140
140
141 def set(self, section, option, value):
141 def set(self, section, option, value):
142 return ConfigParser.ConfigParser.set(self, section, option, value)
142 return ConfigParser.ConfigParser.set(self, section, option, value)
143
143
144 def _interpolate(self, section, option, rawval, vars):
144 def _interpolate(self, section, option, rawval, vars):
145 if not isinstance(rawval, basestring):
145 if not isinstance(rawval, basestring):
146 return rawval
146 return rawval
147 return ConfigParser.SafeConfigParser._interpolate(self, section,
147 return ConfigParser.SafeConfigParser._interpolate(self, section,
148 option, rawval, vars)
148 option, rawval, vars)
149
149
150 def cachefunc(func):
150 def cachefunc(func):
151 '''cache the result of function calls'''
151 '''cache the result of function calls'''
152 # XXX doesn't handle keywords args
152 # XXX doesn't handle keywords args
153 cache = {}
153 cache = {}
154 if func.func_code.co_argcount == 1:
154 if func.func_code.co_argcount == 1:
155 # we gain a small amount of time because
155 # we gain a small amount of time because
156 # we don't need to pack/unpack the list
156 # we don't need to pack/unpack the list
157 def f(arg):
157 def f(arg):
158 if arg not in cache:
158 if arg not in cache:
159 cache[arg] = func(arg)
159 cache[arg] = func(arg)
160 return cache[arg]
160 return cache[arg]
161 else:
161 else:
162 def f(*args):
162 def f(*args):
163 if args not in cache:
163 if args not in cache:
164 cache[args] = func(*args)
164 cache[args] = func(*args)
165 return cache[args]
165 return cache[args]
166
166
167 return f
167 return f
168
168
169 def pipefilter(s, cmd):
169 def pipefilter(s, cmd):
170 '''filter string S through command CMD, returning its output'''
170 '''filter string S through command CMD, returning its output'''
171 (pin, pout) = os.popen2(cmd, 'b')
171 (pin, pout) = os.popen2(cmd, 'b')
172 def writer():
172 def writer():
173 try:
173 try:
174 pin.write(s)
174 pin.write(s)
175 pin.close()
175 pin.close()
176 except IOError, inst:
176 except IOError, inst:
177 if inst.errno != errno.EPIPE:
177 if inst.errno != errno.EPIPE:
178 raise
178 raise
179
179
180 # we should use select instead on UNIX, but this will work on most
180 # we should use select instead on UNIX, but this will work on most
181 # systems, including Windows
181 # systems, including Windows
182 w = threading.Thread(target=writer)
182 w = threading.Thread(target=writer)
183 w.start()
183 w.start()
184 f = pout.read()
184 f = pout.read()
185 pout.close()
185 pout.close()
186 w.join()
186 w.join()
187 return f
187 return f
188
188
189 def tempfilter(s, cmd):
189 def tempfilter(s, cmd):
190 '''filter string S through a pair of temporary files with CMD.
190 '''filter string S through a pair of temporary files with CMD.
191 CMD is used as a template to create the real command to be run,
191 CMD is used as a template to create the real command to be run,
192 with the strings INFILE and OUTFILE replaced by the real names of
192 with the strings INFILE and OUTFILE replaced by the real names of
193 the temporary files generated.'''
193 the temporary files generated.'''
194 inname, outname = None, None
194 inname, outname = None, None
195 try:
195 try:
196 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
196 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
197 fp = os.fdopen(infd, 'wb')
197 fp = os.fdopen(infd, 'wb')
198 fp.write(s)
198 fp.write(s)
199 fp.close()
199 fp.close()
200 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
200 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
201 os.close(outfd)
201 os.close(outfd)
202 cmd = cmd.replace('INFILE', inname)
202 cmd = cmd.replace('INFILE', inname)
203 cmd = cmd.replace('OUTFILE', outname)
203 cmd = cmd.replace('OUTFILE', outname)
204 code = os.system(cmd)
204 code = os.system(cmd)
205 if sys.platform == 'OpenVMS' and code & 1:
205 if sys.platform == 'OpenVMS' and code & 1:
206 code = 0
206 code = 0
207 if code: raise Abort(_("command '%s' failed: %s") %
207 if code: raise Abort(_("command '%s' failed: %s") %
208 (cmd, explain_exit(code)))
208 (cmd, explain_exit(code)))
209 return open(outname, 'rb').read()
209 return open(outname, 'rb').read()
210 finally:
210 finally:
211 try:
211 try:
212 if inname: os.unlink(inname)
212 if inname: os.unlink(inname)
213 except: pass
213 except: pass
214 try:
214 try:
215 if outname: os.unlink(outname)
215 if outname: os.unlink(outname)
216 except: pass
216 except: pass
217
217
218 filtertable = {
218 filtertable = {
219 'tempfile:': tempfilter,
219 'tempfile:': tempfilter,
220 'pipe:': pipefilter,
220 'pipe:': pipefilter,
221 }
221 }
222
222
223 def filter(s, cmd):
223 def filter(s, cmd):
224 "filter a string through a command that transforms its input to its output"
224 "filter a string through a command that transforms its input to its output"
225 for name, fn in filtertable.iteritems():
225 for name, fn in filtertable.iteritems():
226 if cmd.startswith(name):
226 if cmd.startswith(name):
227 return fn(s, cmd[len(name):].lstrip())
227 return fn(s, cmd[len(name):].lstrip())
228 return pipefilter(s, cmd)
228 return pipefilter(s, cmd)
229
229
230 def binary(s):
230 def binary(s):
231 """return true if a string is binary data using diff's heuristic"""
231 """return true if a string is binary data using diff's heuristic"""
232 if s and '\0' in s[:4096]:
232 if s and '\0' in s[:4096]:
233 return True
233 return True
234 return False
234 return False
235
235
236 def unique(g):
236 def unique(g):
237 """return the uniq elements of iterable g"""
237 """return the uniq elements of iterable g"""
238 seen = {}
238 seen = {}
239 l = []
239 l = []
240 for f in g:
240 for f in g:
241 if f not in seen:
241 if f not in seen:
242 seen[f] = 1
242 seen[f] = 1
243 l.append(f)
243 l.append(f)
244 return l
244 return l
245
245
246 class Abort(Exception):
246 class Abort(Exception):
247 """Raised if a command needs to print an error and exit."""
247 """Raised if a command needs to print an error and exit."""
248
248
249 class UnexpectedOutput(Abort):
249 class UnexpectedOutput(Abort):
250 """Raised to print an error with part of output and exit."""
250 """Raised to print an error with part of output and exit."""
251
251
252 def always(fn): return True
252 def always(fn): return True
253 def never(fn): return False
253 def never(fn): return False
254
254
255 def expand_glob(pats):
255 def expand_glob(pats):
256 '''On Windows, expand the implicit globs in a list of patterns'''
256 '''On Windows, expand the implicit globs in a list of patterns'''
257 if os.name != 'nt':
257 if os.name != 'nt':
258 return list(pats)
258 return list(pats)
259 ret = []
259 ret = []
260 for p in pats:
260 for p in pats:
261 kind, name = patkind(p, None)
261 kind, name = patkind(p, None)
262 if kind is None:
262 if kind is None:
263 globbed = glob.glob(name)
263 globbed = glob.glob(name)
264 if globbed:
264 if globbed:
265 ret.extend(globbed)
265 ret.extend(globbed)
266 continue
266 continue
267 # if we couldn't expand the glob, just keep it around
267 # if we couldn't expand the glob, just keep it around
268 ret.append(p)
268 ret.append(p)
269 return ret
269 return ret
270
270
271 def patkind(name, dflt_pat='glob'):
271 def patkind(name, dflt_pat='glob'):
272 """Split a string into an optional pattern kind prefix and the
272 """Split a string into an optional pattern kind prefix and the
273 actual pattern."""
273 actual pattern."""
274 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
274 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
275 if name.startswith(prefix + ':'): return name.split(':', 1)
275 if name.startswith(prefix + ':'): return name.split(':', 1)
276 return dflt_pat, name
276 return dflt_pat, name
277
277
278 def globre(pat, head='^', tail='$'):
278 def globre(pat, head='^', tail='$'):
279 "convert a glob pattern into a regexp"
279 "convert a glob pattern into a regexp"
280 i, n = 0, len(pat)
280 i, n = 0, len(pat)
281 res = ''
281 res = ''
282 group = False
282 group = False
283 def peek(): return i < n and pat[i]
283 def peek(): return i < n and pat[i]
284 while i < n:
284 while i < n:
285 c = pat[i]
285 c = pat[i]
286 i = i+1
286 i = i+1
287 if c == '*':
287 if c == '*':
288 if peek() == '*':
288 if peek() == '*':
289 i += 1
289 i += 1
290 res += '.*'
290 res += '.*'
291 else:
291 else:
292 res += '[^/]*'
292 res += '[^/]*'
293 elif c == '?':
293 elif c == '?':
294 res += '.'
294 res += '.'
295 elif c == '[':
295 elif c == '[':
296 j = i
296 j = i
297 if j < n and pat[j] in '!]':
297 if j < n and pat[j] in '!]':
298 j += 1
298 j += 1
299 while j < n and pat[j] != ']':
299 while j < n and pat[j] != ']':
300 j += 1
300 j += 1
301 if j >= n:
301 if j >= n:
302 res += '\\['
302 res += '\\['
303 else:
303 else:
304 stuff = pat[i:j].replace('\\','\\\\')
304 stuff = pat[i:j].replace('\\','\\\\')
305 i = j + 1
305 i = j + 1
306 if stuff[0] == '!':
306 if stuff[0] == '!':
307 stuff = '^' + stuff[1:]
307 stuff = '^' + stuff[1:]
308 elif stuff[0] == '^':
308 elif stuff[0] == '^':
309 stuff = '\\' + stuff
309 stuff = '\\' + stuff
310 res = '%s[%s]' % (res, stuff)
310 res = '%s[%s]' % (res, stuff)
311 elif c == '{':
311 elif c == '{':
312 group = True
312 group = True
313 res += '(?:'
313 res += '(?:'
314 elif c == '}' and group:
314 elif c == '}' and group:
315 res += ')'
315 res += ')'
316 group = False
316 group = False
317 elif c == ',' and group:
317 elif c == ',' and group:
318 res += '|'
318 res += '|'
319 elif c == '\\':
319 elif c == '\\':
320 p = peek()
320 p = peek()
321 if p:
321 if p:
322 i += 1
322 i += 1
323 res += re.escape(p)
323 res += re.escape(p)
324 else:
324 else:
325 res += re.escape(c)
325 res += re.escape(c)
326 else:
326 else:
327 res += re.escape(c)
327 res += re.escape(c)
328 return head + res + tail
328 return head + res + tail
329
329
330 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
330 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
331
331
332 def pathto(root, n1, n2):
332 def pathto(root, n1, n2):
333 '''return the relative path from one place to another.
333 '''return the relative path from one place to another.
334 root should use os.sep to separate directories
334 root should use os.sep to separate directories
335 n1 should use os.sep to separate directories
335 n1 should use os.sep to separate directories
336 n2 should use "/" to separate directories
336 n2 should use "/" to separate directories
337 returns an os.sep-separated path.
337 returns an os.sep-separated path.
338
338
339 If n1 is a relative path, it's assumed it's
339 If n1 is a relative path, it's assumed it's
340 relative to root.
340 relative to root.
341 n2 should always be relative to root.
341 n2 should always be relative to root.
342 '''
342 '''
343 if not n1: return localpath(n2)
343 if not n1: return localpath(n2)
344 if os.path.isabs(n1):
344 if os.path.isabs(n1):
345 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
345 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
346 return os.path.join(root, localpath(n2))
346 return os.path.join(root, localpath(n2))
347 n2 = '/'.join((pconvert(root), n2))
347 n2 = '/'.join((pconvert(root), n2))
348 a, b = n1.split(os.sep), n2.split('/')
348 a, b = n1.split(os.sep), n2.split('/')
349 a.reverse()
349 a.reverse()
350 b.reverse()
350 b.reverse()
351 while a and b and a[-1] == b[-1]:
351 while a and b and a[-1] == b[-1]:
352 a.pop()
352 a.pop()
353 b.pop()
353 b.pop()
354 b.reverse()
354 b.reverse()
355 return os.sep.join((['..'] * len(a)) + b)
355 return os.sep.join((['..'] * len(a)) + b)
356
356
357 def canonpath(root, cwd, myname):
357 def canonpath(root, cwd, myname):
358 """return the canonical path of myname, given cwd and root"""
358 """return the canonical path of myname, given cwd and root"""
359 if root == os.sep:
359 if root == os.sep:
360 rootsep = os.sep
360 rootsep = os.sep
361 elif root.endswith(os.sep):
361 elif root.endswith(os.sep):
362 rootsep = root
362 rootsep = root
363 else:
363 else:
364 rootsep = root + os.sep
364 rootsep = root + os.sep
365 name = myname
365 name = myname
366 if not os.path.isabs(name):
366 if not os.path.isabs(name):
367 name = os.path.join(root, cwd, name)
367 name = os.path.join(root, cwd, name)
368 name = os.path.normpath(name)
368 name = os.path.normpath(name)
369 audit_path = path_auditor(root)
369 audit_path = path_auditor(root)
370 if name != rootsep and name.startswith(rootsep):
370 if name != rootsep and name.startswith(rootsep):
371 name = name[len(rootsep):]
371 name = name[len(rootsep):]
372 audit_path(name)
372 audit_path(name)
373 return pconvert(name)
373 return pconvert(name)
374 elif name == root:
374 elif name == root:
375 return ''
375 return ''
376 else:
376 else:
377 # Determine whether `name' is in the hierarchy at or beneath `root',
377 # Determine whether `name' is in the hierarchy at or beneath `root',
378 # by iterating name=dirname(name) until that causes no change (can't
378 # by iterating name=dirname(name) until that causes no change (can't
379 # check name == '/', because that doesn't work on windows). For each
379 # check name == '/', because that doesn't work on windows). For each
380 # `name', compare dev/inode numbers. If they match, the list `rel'
380 # `name', compare dev/inode numbers. If they match, the list `rel'
381 # holds the reversed list of components making up the relative file
381 # holds the reversed list of components making up the relative file
382 # name we want.
382 # name we want.
383 root_st = os.stat(root)
383 root_st = os.stat(root)
384 rel = []
384 rel = []
385 while True:
385 while True:
386 try:
386 try:
387 name_st = os.stat(name)
387 name_st = os.stat(name)
388 except OSError:
388 except OSError:
389 break
389 break
390 if samestat(name_st, root_st):
390 if samestat(name_st, root_st):
391 if not rel:
391 if not rel:
392 # name was actually the same as root (maybe a symlink)
392 # name was actually the same as root (maybe a symlink)
393 return ''
393 return ''
394 rel.reverse()
394 rel.reverse()
395 name = os.path.join(*rel)
395 name = os.path.join(*rel)
396 audit_path(name)
396 audit_path(name)
397 return pconvert(name)
397 return pconvert(name)
398 dirname, basename = os.path.split(name)
398 dirname, basename = os.path.split(name)
399 rel.append(basename)
399 rel.append(basename)
400 if dirname == name:
400 if dirname == name:
401 break
401 break
402 name = dirname
402 name = dirname
403
403
404 raise Abort('%s not under root' % myname)
404 raise Abort('%s not under root' % myname)
405
405
406 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
406 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
407 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
407 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
408
408
409 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
409 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
410 globbed=False, default=None):
410 globbed=False, default=None):
411 default = default or 'relpath'
411 default = default or 'relpath'
412 if default == 'relpath' and not globbed:
412 if default == 'relpath' and not globbed:
413 names = expand_glob(names)
413 names = expand_glob(names)
414 return _matcher(canonroot, cwd, names, inc, exc, default, src)
414 return _matcher(canonroot, cwd, names, inc, exc, default, src)
415
415
416 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
416 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
417 """build a function to match a set of file patterns
417 """build a function to match a set of file patterns
418
418
419 arguments:
419 arguments:
420 canonroot - the canonical root of the tree you're matching against
420 canonroot - the canonical root of the tree you're matching against
421 cwd - the current working directory, if relevant
421 cwd - the current working directory, if relevant
422 names - patterns to find
422 names - patterns to find
423 inc - patterns to include
423 inc - patterns to include
424 exc - patterns to exclude
424 exc - patterns to exclude
425 dflt_pat - if a pattern in names has no explicit type, assume this one
425 dflt_pat - if a pattern in names has no explicit type, assume this one
426 src - where these patterns came from (e.g. .hgignore)
426 src - where these patterns came from (e.g. .hgignore)
427
427
428 a pattern is one of:
428 a pattern is one of:
429 'glob:<glob>' - a glob relative to cwd
429 'glob:<glob>' - a glob relative to cwd
430 're:<regexp>' - a regular expression
430 're:<regexp>' - a regular expression
431 'path:<path>' - a path relative to canonroot
431 'path:<path>' - a path relative to canonroot
432 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
432 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
433 'relpath:<path>' - a path relative to cwd
433 'relpath:<path>' - a path relative to cwd
434 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
434 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
435 '<something>' - one of the cases above, selected by the dflt_pat argument
435 '<something>' - one of the cases above, selected by the dflt_pat argument
436
436
437 returns:
437 returns:
438 a 3-tuple containing
438 a 3-tuple containing
439 - list of roots (places where one should start a recursive walk of the fs);
439 - list of roots (places where one should start a recursive walk of the fs);
440 this often matches the explicit non-pattern names passed in, but also
440 this often matches the explicit non-pattern names passed in, but also
441 includes the initial part of glob: patterns that has no glob characters
441 includes the initial part of glob: patterns that has no glob characters
442 - a bool match(filename) function
442 - a bool match(filename) function
443 - a bool indicating if any patterns were passed in
443 - a bool indicating if any patterns were passed in
444 """
444 """
445
445
446 # a common case: no patterns at all
446 # a common case: no patterns at all
447 if not names and not inc and not exc:
447 if not names and not inc and not exc:
448 return [], always, False
448 return [], always, False
449
449
450 def contains_glob(name):
450 def contains_glob(name):
451 for c in name:
451 for c in name:
452 if c in _globchars: return True
452 if c in _globchars: return True
453 return False
453 return False
454
454
455 def regex(kind, name, tail):
455 def regex(kind, name, tail):
456 '''convert a pattern into a regular expression'''
456 '''convert a pattern into a regular expression'''
457 if not name:
457 if not name:
458 return ''
458 return ''
459 if kind == 're':
459 if kind == 're':
460 return name
460 return name
461 elif kind == 'path':
461 elif kind == 'path':
462 return '^' + re.escape(name) + '(?:/|$)'
462 return '^' + re.escape(name) + '(?:/|$)'
463 elif kind == 'relglob':
463 elif kind == 'relglob':
464 return globre(name, '(?:|.*/)', tail)
464 return globre(name, '(?:|.*/)', tail)
465 elif kind == 'relpath':
465 elif kind == 'relpath':
466 return re.escape(name) + '(?:/|$)'
466 return re.escape(name) + '(?:/|$)'
467 elif kind == 'relre':
467 elif kind == 'relre':
468 if name.startswith('^'):
468 if name.startswith('^'):
469 return name
469 return name
470 return '.*' + name
470 return '.*' + name
471 return globre(name, '', tail)
471 return globre(name, '', tail)
472
472
473 def matchfn(pats, tail):
473 def matchfn(pats, tail):
474 """build a matching function from a set of patterns"""
474 """build a matching function from a set of patterns"""
475 if not pats:
475 if not pats:
476 return
476 return
477 try:
477 try:
478 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
478 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
479 return re.compile(pat).match
479 return re.compile(pat).match
480 except OverflowError:
480 except OverflowError:
481 # We're using a Python with a tiny regex engine and we
481 # We're using a Python with a tiny regex engine and we
482 # made it explode, so we'll divide the pattern list in two
482 # made it explode, so we'll divide the pattern list in two
483 # until it works
483 # until it works
484 l = len(pats)
484 l = len(pats)
485 if l < 2:
485 if l < 2:
486 raise
486 raise
487 a, b = matchfn(pats[:l/2], tail), matchfn(pats[l/2:], tail)
487 a, b = matchfn(pats[:l/2], tail), matchfn(pats[l/2:], tail)
488 return lambda s: a(s) or b(s)
488 return lambda s: a(s) or b(s)
489 except re.error:
489 except re.error:
490 for k, p in pats:
490 for k, p in pats:
491 try:
491 try:
492 re.compile('(?:%s)' % regex(k, p, tail))
492 re.compile('(?:%s)' % regex(k, p, tail))
493 except re.error:
493 except re.error:
494 if src:
494 if src:
495 raise Abort("%s: invalid pattern (%s): %s" %
495 raise Abort("%s: invalid pattern (%s): %s" %
496 (src, k, p))
496 (src, k, p))
497 else:
497 else:
498 raise Abort("invalid pattern (%s): %s" % (k, p))
498 raise Abort("invalid pattern (%s): %s" % (k, p))
499 raise Abort("invalid pattern")
499 raise Abort("invalid pattern")
500
500
501 def globprefix(pat):
501 def globprefix(pat):
502 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
502 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
503 root = []
503 root = []
504 for p in pat.split('/'):
504 for p in pat.split('/'):
505 if contains_glob(p): break
505 if contains_glob(p): break
506 root.append(p)
506 root.append(p)
507 return '/'.join(root) or '.'
507 return '/'.join(root) or '.'
508
508
509 def normalizepats(names, default):
509 def normalizepats(names, default):
510 pats = []
510 pats = []
511 roots = []
511 roots = []
512 anypats = False
512 anypats = False
513 for kind, name in [patkind(p, default) for p in names]:
513 for kind, name in [patkind(p, default) for p in names]:
514 if kind in ('glob', 'relpath'):
514 if kind in ('glob', 'relpath'):
515 name = canonpath(canonroot, cwd, name)
515 name = canonpath(canonroot, cwd, name)
516 elif kind in ('relglob', 'path'):
516 elif kind in ('relglob', 'path'):
517 name = normpath(name)
517 name = normpath(name)
518
518
519 pats.append((kind, name))
519 pats.append((kind, name))
520
520
521 if kind in ('glob', 're', 'relglob', 'relre'):
521 if kind in ('glob', 're', 'relglob', 'relre'):
522 anypats = True
522 anypats = True
523
523
524 if kind == 'glob':
524 if kind == 'glob':
525 root = globprefix(name)
525 root = globprefix(name)
526 roots.append(root)
526 roots.append(root)
527 elif kind in ('relpath', 'path'):
527 elif kind in ('relpath', 'path'):
528 roots.append(name or '.')
528 roots.append(name or '.')
529 elif kind == 'relglob':
529 elif kind == 'relglob':
530 roots.append('.')
530 roots.append('.')
531 return roots, pats, anypats
531 return roots, pats, anypats
532
532
533 roots, pats, anypats = normalizepats(names, dflt_pat)
533 roots, pats, anypats = normalizepats(names, dflt_pat)
534
534
535 patmatch = matchfn(pats, '$') or always
535 patmatch = matchfn(pats, '$') or always
536 incmatch = always
536 incmatch = always
537 if inc:
537 if inc:
538 dummy, inckinds, dummy = normalizepats(inc, 'glob')
538 dummy, inckinds, dummy = normalizepats(inc, 'glob')
539 incmatch = matchfn(inckinds, '(?:/|$)')
539 incmatch = matchfn(inckinds, '(?:/|$)')
540 excmatch = lambda fn: False
540 excmatch = lambda fn: False
541 if exc:
541 if exc:
542 dummy, exckinds, dummy = normalizepats(exc, 'glob')
542 dummy, exckinds, dummy = normalizepats(exc, 'glob')
543 excmatch = matchfn(exckinds, '(?:/|$)')
543 excmatch = matchfn(exckinds, '(?:/|$)')
544
544
545 if not names and inc and not exc:
545 if not names and inc and not exc:
546 # common case: hgignore patterns
546 # common case: hgignore patterns
547 match = incmatch
547 match = incmatch
548 else:
548 else:
549 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
549 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
550
550
551 return (roots, match, (inc or exc or anypats) and True)
551 return (roots, match, (inc or exc or anypats) and True)
552
552
553 _hgexecutable = None
553 _hgexecutable = None
554
554
555 def hgexecutable():
555 def hgexecutable():
556 """return location of the 'hg' executable.
556 """return location of the 'hg' executable.
557
557
558 Defaults to $HG or 'hg' in the search path.
558 Defaults to $HG or 'hg' in the search path.
559 """
559 """
560 if _hgexecutable is None:
560 if _hgexecutable is None:
561 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
561 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
562 return _hgexecutable
562 return _hgexecutable
563
563
564 def set_hgexecutable(path):
564 def set_hgexecutable(path):
565 """set location of the 'hg' executable"""
565 """set location of the 'hg' executable"""
566 global _hgexecutable
566 global _hgexecutable
567 _hgexecutable = path
567 _hgexecutable = path
568
568
569 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
569 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
570 '''enhanced shell command execution.
570 '''enhanced shell command execution.
571 run with environment maybe modified, maybe in different dir.
571 run with environment maybe modified, maybe in different dir.
572
572
573 if command fails and onerr is None, return status. if ui object,
573 if command fails and onerr is None, return status. if ui object,
574 print error message and return status, else raise onerr object as
574 print error message and return status, else raise onerr object as
575 exception.'''
575 exception.'''
576 def py2shell(val):
576 def py2shell(val):
577 'convert python object into string that is useful to shell'
577 'convert python object into string that is useful to shell'
578 if val in (None, False):
578 if val in (None, False):
579 return '0'
579 return '0'
580 if val == True:
580 if val == True:
581 return '1'
581 return '1'
582 return str(val)
582 return str(val)
583 oldenv = {}
583 oldenv = {}
584 for k in environ:
584 for k in environ:
585 oldenv[k] = os.environ.get(k)
585 oldenv[k] = os.environ.get(k)
586 if cwd is not None:
586 if cwd is not None:
587 oldcwd = os.getcwd()
587 oldcwd = os.getcwd()
588 origcmd = cmd
588 origcmd = cmd
589 if os.name == 'nt':
589 if os.name == 'nt':
590 cmd = '"%s"' % cmd
590 cmd = '"%s"' % cmd
591 try:
591 try:
592 for k, v in environ.iteritems():
592 for k, v in environ.iteritems():
593 os.environ[k] = py2shell(v)
593 os.environ[k] = py2shell(v)
594 os.environ['HG'] = hgexecutable()
594 os.environ['HG'] = hgexecutable()
595 if cwd is not None and oldcwd != cwd:
595 if cwd is not None and oldcwd != cwd:
596 os.chdir(cwd)
596 os.chdir(cwd)
597 rc = os.system(cmd)
597 rc = os.system(cmd)
598 if sys.platform == 'OpenVMS' and rc & 1:
598 if sys.platform == 'OpenVMS' and rc & 1:
599 rc = 0
599 rc = 0
600 if rc and onerr:
600 if rc and onerr:
601 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
601 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
602 explain_exit(rc)[0])
602 explain_exit(rc)[0])
603 if errprefix:
603 if errprefix:
604 errmsg = '%s: %s' % (errprefix, errmsg)
604 errmsg = '%s: %s' % (errprefix, errmsg)
605 try:
605 try:
606 onerr.warn(errmsg + '\n')
606 onerr.warn(errmsg + '\n')
607 except AttributeError:
607 except AttributeError:
608 raise onerr(errmsg)
608 raise onerr(errmsg)
609 return rc
609 return rc
610 finally:
610 finally:
611 for k, v in oldenv.iteritems():
611 for k, v in oldenv.iteritems():
612 if v is None:
612 if v is None:
613 del os.environ[k]
613 del os.environ[k]
614 else:
614 else:
615 os.environ[k] = v
615 os.environ[k] = v
616 if cwd is not None and oldcwd != cwd:
616 if cwd is not None and oldcwd != cwd:
617 os.chdir(oldcwd)
617 os.chdir(oldcwd)
618
618
619 # os.path.lexists is not available on python2.3
619 # os.path.lexists is not available on python2.3
620 def lexists(filename):
620 def lexists(filename):
621 "test whether a file with this name exists. does not follow symlinks"
621 "test whether a file with this name exists. does not follow symlinks"
622 try:
622 try:
623 os.lstat(filename)
623 os.lstat(filename)
624 except:
624 except:
625 return False
625 return False
626 return True
626 return True
627
627
628 def rename(src, dst):
628 def rename(src, dst):
629 """forcibly rename a file"""
629 """forcibly rename a file"""
630 try:
630 try:
631 os.rename(src, dst)
631 os.rename(src, dst)
632 except OSError, err: # FIXME: check err (EEXIST ?)
632 except OSError, err: # FIXME: check err (EEXIST ?)
633 # on windows, rename to existing file is not allowed, so we
633 # on windows, rename to existing file is not allowed, so we
634 # must delete destination first. but if file is open, unlink
634 # must delete destination first. but if file is open, unlink
635 # schedules it for delete but does not delete it. rename
635 # schedules it for delete but does not delete it. rename
636 # happens immediately even for open files, so we create
636 # happens immediately even for open files, so we create
637 # temporary file, delete it, rename destination to that name,
637 # temporary file, delete it, rename destination to that name,
638 # then delete that. then rename is safe to do.
638 # then delete that. then rename is safe to do.
639 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
639 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
640 os.close(fd)
640 os.close(fd)
641 os.unlink(temp)
641 os.unlink(temp)
642 os.rename(dst, temp)
642 os.rename(dst, temp)
643 os.unlink(temp)
643 os.unlink(temp)
644 os.rename(src, dst)
644 os.rename(src, dst)
645
645
646 def unlink(f):
646 def unlink(f):
647 """unlink and remove the directory if it is empty"""
647 """unlink and remove the directory if it is empty"""
648 os.unlink(f)
648 os.unlink(f)
649 # try removing directories that might now be empty
649 # try removing directories that might now be empty
650 try:
650 try:
651 os.removedirs(os.path.dirname(f))
651 os.removedirs(os.path.dirname(f))
652 except OSError:
652 except OSError:
653 pass
653 pass
654
654
655 def copyfile(src, dest):
655 def copyfile(src, dest):
656 "copy a file, preserving mode"
656 "copy a file, preserving mode"
657 if os.path.islink(src):
657 if os.path.islink(src):
658 try:
658 try:
659 os.unlink(dest)
659 os.unlink(dest)
660 except:
660 except:
661 pass
661 pass
662 os.symlink(os.readlink(src), dest)
662 os.symlink(os.readlink(src), dest)
663 else:
663 else:
664 try:
664 try:
665 shutil.copyfile(src, dest)
665 shutil.copyfile(src, dest)
666 shutil.copymode(src, dest)
666 shutil.copymode(src, dest)
667 except shutil.Error, inst:
667 except shutil.Error, inst:
668 raise Abort(str(inst))
668 raise Abort(str(inst))
669
669
670 def copyfiles(src, dst, hardlink=None):
670 def copyfiles(src, dst, hardlink=None):
671 """Copy a directory tree using hardlinks if possible"""
671 """Copy a directory tree using hardlinks if possible"""
672
672
673 if hardlink is None:
673 if hardlink is None:
674 hardlink = (os.stat(src).st_dev ==
674 hardlink = (os.stat(src).st_dev ==
675 os.stat(os.path.dirname(dst)).st_dev)
675 os.stat(os.path.dirname(dst)).st_dev)
676
676
677 if os.path.isdir(src):
677 if os.path.isdir(src):
678 os.mkdir(dst)
678 os.mkdir(dst)
679 for name in os.listdir(src):
679 for name in os.listdir(src):
680 srcname = os.path.join(src, name)
680 srcname = os.path.join(src, name)
681 dstname = os.path.join(dst, name)
681 dstname = os.path.join(dst, name)
682 copyfiles(srcname, dstname, hardlink)
682 copyfiles(srcname, dstname, hardlink)
683 else:
683 else:
684 if hardlink:
684 if hardlink:
685 try:
685 try:
686 os_link(src, dst)
686 os_link(src, dst)
687 except (IOError, OSError):
687 except (IOError, OSError):
688 hardlink = False
688 hardlink = False
689 shutil.copy(src, dst)
689 shutil.copy(src, dst)
690 else:
690 else:
691 shutil.copy(src, dst)
691 shutil.copy(src, dst)
692
692
693 class path_auditor(object):
693 class path_auditor(object):
694 '''ensure that a filesystem path contains no banned components.
694 '''ensure that a filesystem path contains no banned components.
695 the following properties of a path are checked:
695 the following properties of a path are checked:
696
696
697 - under top-level .hg
697 - under top-level .hg
698 - starts at the root of a windows drive
698 - starts at the root of a windows drive
699 - contains ".."
699 - contains ".."
700 - traverses a symlink (e.g. a/symlink_here/b)
700 - traverses a symlink (e.g. a/symlink_here/b)
701 - inside a nested repository'''
701 - inside a nested repository'''
702
702
703 def __init__(self, root):
703 def __init__(self, root):
704 self.audited = set()
704 self.audited = set()
705 self.auditeddir = set()
705 self.auditeddir = set()
706 self.root = root
706 self.root = root
707
707
708 def __call__(self, path):
708 def __call__(self, path):
709 if path in self.audited:
709 if path in self.audited:
710 return
710 return
711 normpath = os.path.normcase(path)
711 normpath = os.path.normcase(path)
712 parts = normpath.split(os.sep)
712 parts = normpath.split(os.sep)
713 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
713 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
714 or os.pardir in parts):
714 or os.pardir in parts):
715 raise Abort(_("path contains illegal component: %s") % path)
715 raise Abort(_("path contains illegal component: %s") % path)
716 def check(prefix):
716 def check(prefix):
717 curpath = os.path.join(self.root, prefix)
717 curpath = os.path.join(self.root, prefix)
718 try:
718 try:
719 st = os.lstat(curpath)
719 st = os.lstat(curpath)
720 except OSError, err:
720 except OSError, err:
721 # EINVAL can be raised as invalid path syntax under win32.
721 # EINVAL can be raised as invalid path syntax under win32.
722 # They must be ignored for patterns can be checked too.
722 # They must be ignored for patterns can be checked too.
723 if err.errno not in (errno.ENOENT, errno.EINVAL):
723 if err.errno not in (errno.ENOENT, errno.EINVAL):
724 raise
724 raise
725 else:
725 else:
726 if stat.S_ISLNK(st.st_mode):
726 if stat.S_ISLNK(st.st_mode):
727 raise Abort(_('path %r traverses symbolic link %r') %
727 raise Abort(_('path %r traverses symbolic link %r') %
728 (path, prefix))
728 (path, prefix))
729 elif (stat.S_ISDIR(st.st_mode) and
729 elif (stat.S_ISDIR(st.st_mode) and
730 os.path.isdir(os.path.join(curpath, '.hg'))):
730 os.path.isdir(os.path.join(curpath, '.hg'))):
731 raise Abort(_('path %r is inside repo %r') %
731 raise Abort(_('path %r is inside repo %r') %
732 (path, prefix))
732 (path, prefix))
733
733
734 prefixes = []
734 prefixes = []
735 for c in strutil.rfindall(normpath, os.sep):
735 for c in strutil.rfindall(normpath, os.sep):
736 prefix = normpath[:c]
736 prefix = normpath[:c]
737 if prefix in self.auditeddir:
737 if prefix in self.auditeddir:
738 break
738 break
739 check(prefix)
739 check(prefix)
740 prefixes.append(prefix)
740 prefixes.append(prefix)
741
741
742 self.audited.add(path)
742 self.audited.add(path)
743 # only add prefixes to the cache after checking everything: we don't
743 # only add prefixes to the cache after checking everything: we don't
744 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
744 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
745 self.auditeddir.update(prefixes)
745 self.auditeddir.update(prefixes)
746
746
747 def _makelock_file(info, pathname):
747 def _makelock_file(info, pathname):
748 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
748 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
749 os.write(ld, info)
749 os.write(ld, info)
750 os.close(ld)
750 os.close(ld)
751
751
752 def _readlock_file(pathname):
752 def _readlock_file(pathname):
753 return posixfile(pathname).read()
753 return posixfile(pathname).read()
754
754
755 def nlinks(pathname):
755 def nlinks(pathname):
756 """Return number of hardlinks for the given file."""
756 """Return number of hardlinks for the given file."""
757 return os.lstat(pathname).st_nlink
757 return os.lstat(pathname).st_nlink
758
758
759 if hasattr(os, 'link'):
759 if hasattr(os, 'link'):
760 os_link = os.link
760 os_link = os.link
761 else:
761 else:
762 def os_link(src, dst):
762 def os_link(src, dst):
763 raise OSError(0, _("Hardlinks not supported"))
763 raise OSError(0, _("Hardlinks not supported"))
764
764
765 def fstat(fp):
765 def fstat(fp):
766 '''stat file object that may not have fileno method.'''
766 '''stat file object that may not have fileno method.'''
767 try:
767 try:
768 return os.fstat(fp.fileno())
768 return os.fstat(fp.fileno())
769 except AttributeError:
769 except AttributeError:
770 return os.stat(fp.name)
770 return os.stat(fp.name)
771
771
772 posixfile = file
772 posixfile = file
773
773
774 def is_win_9x():
774 def is_win_9x():
775 '''return true if run on windows 95, 98 or me.'''
775 '''return true if run on windows 95, 98 or me.'''
776 try:
776 try:
777 return sys.getwindowsversion()[3] == 1
777 return sys.getwindowsversion()[3] == 1
778 except AttributeError:
778 except AttributeError:
779 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
779 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
780
780
781 getuser_fallback = None
781 getuser_fallback = None
782
782
783 def getuser():
783 def getuser():
784 '''return name of current user'''
784 '''return name of current user'''
785 try:
785 try:
786 return getpass.getuser()
786 return getpass.getuser()
787 except ImportError:
787 except ImportError:
788 # import of pwd will fail on windows - try fallback
788 # import of pwd will fail on windows - try fallback
789 if getuser_fallback:
789 if getuser_fallback:
790 return getuser_fallback()
790 return getuser_fallback()
791 # raised if win32api not available
791 # raised if win32api not available
792 raise Abort(_('user name not available - set USERNAME '
792 raise Abort(_('user name not available - set USERNAME '
793 'environment variable'))
793 'environment variable'))
794
794
795 def username(uid=None):
795 def username(uid=None):
796 """Return the name of the user with the given uid.
796 """Return the name of the user with the given uid.
797
797
798 If uid is None, return the name of the current user."""
798 If uid is None, return the name of the current user."""
799 try:
799 try:
800 import pwd
800 import pwd
801 if uid is None:
801 if uid is None:
802 uid = os.getuid()
802 uid = os.getuid()
803 try:
803 try:
804 return pwd.getpwuid(uid)[0]
804 return pwd.getpwuid(uid)[0]
805 except KeyError:
805 except KeyError:
806 return str(uid)
806 return str(uid)
807 except ImportError:
807 except ImportError:
808 return None
808 return None
809
809
810 def groupname(gid=None):
810 def groupname(gid=None):
811 """Return the name of the group with the given gid.
811 """Return the name of the group with the given gid.
812
812
813 If gid is None, return the name of the current group."""
813 If gid is None, return the name of the current group."""
814 try:
814 try:
815 import grp
815 import grp
816 if gid is None:
816 if gid is None:
817 gid = os.getgid()
817 gid = os.getgid()
818 try:
818 try:
819 return grp.getgrgid(gid)[0]
819 return grp.getgrgid(gid)[0]
820 except KeyError:
820 except KeyError:
821 return str(gid)
821 return str(gid)
822 except ImportError:
822 except ImportError:
823 return None
823 return None
824
824
825 # File system features
825 # File system features
826
826
827 def checkfolding(path):
827 def checkfolding(path):
828 """
828 """
829 Check whether the given path is on a case-sensitive filesystem
829 Check whether the given path is on a case-sensitive filesystem
830
830
831 Requires a path (like /foo/.hg) ending with a foldable final
831 Requires a path (like /foo/.hg) ending with a foldable final
832 directory component.
832 directory component.
833 """
833 """
834 s1 = os.stat(path)
834 s1 = os.stat(path)
835 d, b = os.path.split(path)
835 d, b = os.path.split(path)
836 p2 = os.path.join(d, b.upper())
836 p2 = os.path.join(d, b.upper())
837 if path == p2:
837 if path == p2:
838 p2 = os.path.join(d, b.lower())
838 p2 = os.path.join(d, b.lower())
839 try:
839 try:
840 s2 = os.stat(p2)
840 s2 = os.stat(p2)
841 if s2 == s1:
841 if s2 == s1:
842 return False
842 return False
843 return True
843 return True
844 except:
844 except:
845 return True
845 return True
846
846
847 def checkexec(path):
847 def checkexec(path):
848 """
848 """
849 Check whether the given path is on a filesystem with UNIX-like exec flags
849 Check whether the given path is on a filesystem with UNIX-like exec flags
850
850
851 Requires a directory (like /foo/.hg)
851 Requires a directory (like /foo/.hg)
852 """
852 """
853 try:
853 try:
854 fh, fn = tempfile.mkstemp("", "", path)
854 fh, fn = tempfile.mkstemp("", "", path)
855 os.close(fh)
855 os.close(fh)
856 m = os.stat(fn).st_mode
856 m = os.stat(fn).st_mode
857 os.chmod(fn, m ^ 0111)
857 os.chmod(fn, m ^ 0111)
858 r = (os.stat(fn).st_mode != m)
858 r = (os.stat(fn).st_mode != m)
859 os.unlink(fn)
859 os.unlink(fn)
860 except (IOError,OSError):
860 except (IOError,OSError):
861 # we don't care, the user probably won't be able to commit anyway
861 # we don't care, the user probably won't be able to commit anyway
862 return False
862 return False
863 return r
863 return r
864
864
865 def execfunc(path, fallback):
865 def execfunc(path, fallback):
866 '''return an is_exec() function with default to fallback'''
866 '''return an is_exec() function with default to fallback'''
867 if checkexec(path):
867 if checkexec(path):
868 return lambda x: is_exec(os.path.join(path, x))
868 return lambda x: is_exec(os.path.join(path, x))
869 return fallback
869 return fallback
870
870
871 def checklink(path):
871 def checklink(path):
872 """check whether the given path is on a symlink-capable filesystem"""
872 """check whether the given path is on a symlink-capable filesystem"""
873 # mktemp is not racy because symlink creation will fail if the
873 # mktemp is not racy because symlink creation will fail if the
874 # file already exists
874 # file already exists
875 name = tempfile.mktemp(dir=path)
875 name = tempfile.mktemp(dir=path)
876 try:
876 try:
877 os.symlink(".", name)
877 os.symlink(".", name)
878 os.unlink(name)
878 os.unlink(name)
879 return True
879 return True
880 except (OSError, AttributeError):
880 except (OSError, AttributeError):
881 return False
881 return False
882
882
883 def linkfunc(path, fallback):
883 def linkfunc(path, fallback):
884 '''return an is_link() function with default to fallback'''
884 '''return an is_link() function with default to fallback'''
885 if checklink(path):
885 if checklink(path):
886 return lambda x: os.path.islink(os.path.join(path, x))
886 return lambda x: os.path.islink(os.path.join(path, x))
887 return fallback
887 return fallback
888
888
889 _umask = os.umask(0)
889 _umask = os.umask(0)
890 os.umask(_umask)
890 os.umask(_umask)
891
891
892 def needbinarypatch():
892 def needbinarypatch():
893 """return True if patches should be applied in binary mode by default."""
893 """return True if patches should be applied in binary mode by default."""
894 return os.name == 'nt'
894 return os.name == 'nt'
895
895
896 # Platform specific variants
896 # Platform specific variants
897 if os.name == 'nt':
897 if os.name == 'nt':
898 import msvcrt
898 import msvcrt
899 nulldev = 'NUL:'
899 nulldev = 'NUL:'
900
900
901 class winstdout:
901 class winstdout:
902 '''stdout on windows misbehaves if sent through a pipe'''
902 '''stdout on windows misbehaves if sent through a pipe'''
903
903
904 def __init__(self, fp):
904 def __init__(self, fp):
905 self.fp = fp
905 self.fp = fp
906
906
907 def __getattr__(self, key):
907 def __getattr__(self, key):
908 return getattr(self.fp, key)
908 return getattr(self.fp, key)
909
909
910 def close(self):
910 def close(self):
911 try:
911 try:
912 self.fp.close()
912 self.fp.close()
913 except: pass
913 except: pass
914
914
915 def write(self, s):
915 def write(self, s):
916 try:
916 try:
917 return self.fp.write(s)
917 return self.fp.write(s)
918 except IOError, inst:
918 except IOError, inst:
919 if inst.errno != 0: raise
919 if inst.errno != 0: raise
920 self.close()
920 self.close()
921 raise IOError(errno.EPIPE, 'Broken pipe')
921 raise IOError(errno.EPIPE, 'Broken pipe')
922
922
923 def flush(self):
923 def flush(self):
924 try:
924 try:
925 return self.fp.flush()
925 return self.fp.flush()
926 except IOError, inst:
926 except IOError, inst:
927 if inst.errno != errno.EINVAL: raise
927 if inst.errno != errno.EINVAL: raise
928 self.close()
928 self.close()
929 raise IOError(errno.EPIPE, 'Broken pipe')
929 raise IOError(errno.EPIPE, 'Broken pipe')
930
930
931 sys.stdout = winstdout(sys.stdout)
931 sys.stdout = winstdout(sys.stdout)
932
932
933 def system_rcpath():
933 def system_rcpath():
934 try:
934 try:
935 return system_rcpath_win32()
935 return system_rcpath_win32()
936 except:
936 except:
937 return [r'c:\mercurial\mercurial.ini']
937 return [r'c:\mercurial\mercurial.ini']
938
938
939 def user_rcpath():
939 def user_rcpath():
940 '''return os-specific hgrc search path to the user dir'''
940 '''return os-specific hgrc search path to the user dir'''
941 try:
941 try:
942 userrc = user_rcpath_win32()
942 userrc = user_rcpath_win32()
943 except:
943 except:
944 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
944 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
945 path = [userrc]
945 path = [userrc]
946 userprofile = os.environ.get('USERPROFILE')
946 userprofile = os.environ.get('USERPROFILE')
947 if userprofile:
947 if userprofile:
948 path.append(os.path.join(userprofile, 'mercurial.ini'))
948 path.append(os.path.join(userprofile, 'mercurial.ini'))
949 return path
949 return path
950
950
951 def parse_patch_output(output_line):
951 def parse_patch_output(output_line):
952 """parses the output produced by patch and returns the file name"""
952 """parses the output produced by patch and returns the file name"""
953 pf = output_line[14:]
953 pf = output_line[14:]
954 if pf[0] == '`':
954 if pf[0] == '`':
955 pf = pf[1:-1] # Remove the quotes
955 pf = pf[1:-1] # Remove the quotes
956 return pf
956 return pf
957
957
958 def testpid(pid):
958 def testpid(pid):
959 '''return False if pid dead, True if running or not known'''
959 '''return False if pid dead, True if running or not known'''
960 return True
960 return True
961
961
962 def set_exec(f, mode):
962 def set_exec(f, mode):
963 pass
963 pass
964
964
965 def set_link(f, mode):
965 def set_link(f, mode):
966 pass
966 pass
967
967
968 def set_binary(fd):
968 def set_binary(fd):
969 msvcrt.setmode(fd.fileno(), os.O_BINARY)
969 msvcrt.setmode(fd.fileno(), os.O_BINARY)
970
970
971 def pconvert(path):
971 def pconvert(path):
972 return path.replace("\\", "/")
972 return path.replace("\\", "/")
973
973
974 def localpath(path):
974 def localpath(path):
975 return path.replace('/', '\\')
975 return path.replace('/', '\\')
976
976
977 def normpath(path):
977 def normpath(path):
978 return pconvert(os.path.normpath(path))
978 return pconvert(os.path.normpath(path))
979
979
980 makelock = _makelock_file
980 makelock = _makelock_file
981 readlock = _readlock_file
981 readlock = _readlock_file
982
982
983 def samestat(s1, s2):
983 def samestat(s1, s2):
984 return False
984 return False
985
985
986 # A sequence of backslashes is special iff it precedes a double quote:
986 # A sequence of backslashes is special iff it precedes a double quote:
987 # - if there's an even number of backslashes, the double quote is not
987 # - if there's an even number of backslashes, the double quote is not
988 # quoted (i.e. it ends the quoted region)
988 # quoted (i.e. it ends the quoted region)
989 # - if there's an odd number of backslashes, the double quote is quoted
989 # - if there's an odd number of backslashes, the double quote is quoted
990 # - in both cases, every pair of backslashes is unquoted into a single
990 # - in both cases, every pair of backslashes is unquoted into a single
991 # backslash
991 # backslash
992 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
992 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
993 # So, to quote a string, we must surround it in double quotes, double
993 # So, to quote a string, we must surround it in double quotes, double
994 # the number of backslashes that preceed double quotes and add another
994 # the number of backslashes that preceed double quotes and add another
995 # backslash before every double quote (being careful with the double
995 # backslash before every double quote (being careful with the double
996 # quote we've appended to the end)
996 # quote we've appended to the end)
997 _quotere = None
997 _quotere = None
998 def shellquote(s):
998 def shellquote(s):
999 global _quotere
999 global _quotere
1000 if _quotere is None:
1000 if _quotere is None:
1001 _quotere = re.compile(r'(\\*)("|\\$)')
1001 _quotere = re.compile(r'(\\*)("|\\$)')
1002 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1002 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1003
1003
1004 def quotecommand(cmd):
1004 def quotecommand(cmd):
1005 """Build a command string suitable for os.popen* calls."""
1005 """Build a command string suitable for os.popen* calls."""
1006 # The extra quotes are needed because popen* runs the command
1006 # The extra quotes are needed because popen* runs the command
1007 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1007 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1008 return '"' + cmd + '"'
1008 return '"' + cmd + '"'
1009
1009
1010 def explain_exit(code):
1010 def explain_exit(code):
1011 return _("exited with status %d") % code, code
1011 return _("exited with status %d") % code, code
1012
1012
1013 # if you change this stub into a real check, please try to implement the
1013 # if you change this stub into a real check, please try to implement the
1014 # username and groupname functions above, too.
1014 # username and groupname functions above, too.
1015 def isowner(fp, st=None):
1015 def isowner(fp, st=None):
1016 return True
1016 return True
1017
1017
1018 def find_in_path(name, path, default=None):
1018 def find_in_path(name, path, default=None):
1019 '''find name in search path. path can be string (will be split
1019 '''find name in search path. path can be string (will be split
1020 with os.pathsep), or iterable thing that returns strings. if name
1020 with os.pathsep), or iterable thing that returns strings. if name
1021 found, return path to name. else return default. name is looked up
1021 found, return path to name. else return default. name is looked up
1022 using cmd.exe rules, using PATHEXT.'''
1022 using cmd.exe rules, using PATHEXT.'''
1023 if isinstance(path, str):
1023 if isinstance(path, str):
1024 path = path.split(os.pathsep)
1024 path = path.split(os.pathsep)
1025
1025
1026 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1026 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1027 pathext = pathext.lower().split(os.pathsep)
1027 pathext = pathext.lower().split(os.pathsep)
1028 isexec = os.path.splitext(name)[1].lower() in pathext
1028 isexec = os.path.splitext(name)[1].lower() in pathext
1029
1029
1030 for p in path:
1030 for p in path:
1031 p_name = os.path.join(p, name)
1031 p_name = os.path.join(p, name)
1032
1032
1033 if isexec and os.path.exists(p_name):
1033 if isexec and os.path.exists(p_name):
1034 return p_name
1034 return p_name
1035
1035
1036 for ext in pathext:
1036 for ext in pathext:
1037 p_name_ext = p_name + ext
1037 p_name_ext = p_name + ext
1038 if os.path.exists(p_name_ext):
1038 if os.path.exists(p_name_ext):
1039 return p_name_ext
1039 return p_name_ext
1040 return default
1040 return default
1041
1041
1042 def set_signal_handler():
1042 def set_signal_handler():
1043 try:
1043 try:
1044 set_signal_handler_win32()
1044 set_signal_handler_win32()
1045 except NameError:
1045 except NameError:
1046 pass
1046 pass
1047
1047
1048 try:
1048 try:
1049 # override functions with win32 versions if possible
1049 # override functions with win32 versions if possible
1050 from util_win32 import *
1050 from util_win32 import *
1051 if not is_win_9x():
1051 if not is_win_9x():
1052 posixfile = posixfile_nt
1052 posixfile = posixfile_nt
1053 except ImportError:
1053 except ImportError:
1054 pass
1054 pass
1055
1055
1056 else:
1056 else:
1057 nulldev = '/dev/null'
1057 nulldev = '/dev/null'
1058
1058
1059 def rcfiles(path):
1059 def rcfiles(path):
1060 rcs = [os.path.join(path, 'hgrc')]
1060 rcs = [os.path.join(path, 'hgrc')]
1061 rcdir = os.path.join(path, 'hgrc.d')
1061 rcdir = os.path.join(path, 'hgrc.d')
1062 try:
1062 try:
1063 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
1063 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
1064 if f.endswith(".rc")])
1064 if f.endswith(".rc")])
1065 except OSError:
1065 except OSError:
1066 pass
1066 pass
1067 return rcs
1067 return rcs
1068
1068
1069 def system_rcpath():
1069 def system_rcpath():
1070 path = []
1070 path = []
1071 # old mod_python does not set sys.argv
1071 # old mod_python does not set sys.argv
1072 if len(getattr(sys, 'argv', [])) > 0:
1072 if len(getattr(sys, 'argv', [])) > 0:
1073 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1073 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1074 '/../etc/mercurial'))
1074 '/../etc/mercurial'))
1075 path.extend(rcfiles('/etc/mercurial'))
1075 path.extend(rcfiles('/etc/mercurial'))
1076 return path
1076 return path
1077
1077
1078 def user_rcpath():
1078 def user_rcpath():
1079 return [os.path.expanduser('~/.hgrc')]
1079 return [os.path.expanduser('~/.hgrc')]
1080
1080
1081 def parse_patch_output(output_line):
1081 def parse_patch_output(output_line):
1082 """parses the output produced by patch and returns the file name"""
1082 """parses the output produced by patch and returns the file name"""
1083 pf = output_line[14:]
1083 pf = output_line[14:]
1084 if os.sys.platform == 'OpenVMS':
1084 if os.sys.platform == 'OpenVMS':
1085 if pf[0] == '`':
1085 if pf[0] == '`':
1086 pf = pf[1:-1] # Remove the quotes
1086 pf = pf[1:-1] # Remove the quotes
1087 else:
1087 else:
1088 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1088 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1089 pf = pf[1:-1] # Remove the quotes
1089 pf = pf[1:-1] # Remove the quotes
1090 return pf
1090 return pf
1091
1091
1092 def is_exec(f):
1092 def is_exec(f):
1093 """check whether a file is executable"""
1093 """check whether a file is executable"""
1094 return (os.lstat(f).st_mode & 0100 != 0)
1094 return (os.lstat(f).st_mode & 0100 != 0)
1095
1095
1096 def set_exec(f, mode):
1096 def set_exec(f, mode):
1097 s = os.lstat(f).st_mode
1097 s = os.lstat(f).st_mode
1098 if stat.S_ISLNK(s) or (s & 0100 != 0) == mode:
1098 if stat.S_ISLNK(s) or (s & 0100 != 0) == mode:
1099 return
1099 return
1100 if mode:
1100 if mode:
1101 # Turn on +x for every +r bit when making a file executable
1101 # Turn on +x for every +r bit when making a file executable
1102 # and obey umask.
1102 # and obey umask.
1103 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1103 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1104 else:
1104 else:
1105 os.chmod(f, s & 0666)
1105 os.chmod(f, s & 0666)
1106
1106
1107 def set_link(f, mode):
1107 def set_link(f, mode):
1108 """make a file a symbolic link/regular file
1108 """make a file a symbolic link/regular file
1109
1109
1110 if a file is changed to a link, its contents become the link data
1110 if a file is changed to a link, its contents become the link data
1111 if a link is changed to a file, its link data become its contents
1111 if a link is changed to a file, its link data become its contents
1112 """
1112 """
1113
1113
1114 m = os.path.islink(f)
1114 m = os.path.islink(f)
1115 if m == bool(mode):
1115 if m == bool(mode):
1116 return
1116 return
1117
1117
1118 if mode: # switch file to link
1118 if mode: # switch file to link
1119 data = file(f).read()
1119 data = file(f).read()
1120 os.unlink(f)
1120 os.unlink(f)
1121 os.symlink(data, f)
1121 os.symlink(data, f)
1122 else:
1122 else:
1123 data = os.readlink(f)
1123 data = os.readlink(f)
1124 os.unlink(f)
1124 os.unlink(f)
1125 file(f, "w").write(data)
1125 file(f, "w").write(data)
1126
1126
1127 def set_binary(fd):
1127 def set_binary(fd):
1128 pass
1128 pass
1129
1129
1130 def pconvert(path):
1130 def pconvert(path):
1131 return path
1131 return path
1132
1132
1133 def localpath(path):
1133 def localpath(path):
1134 return path
1134 return path
1135
1135
1136 normpath = os.path.normpath
1136 normpath = os.path.normpath
1137 samestat = os.path.samestat
1137 samestat = os.path.samestat
1138
1138
1139 def makelock(info, pathname):
1139 def makelock(info, pathname):
1140 try:
1140 try:
1141 os.symlink(info, pathname)
1141 os.symlink(info, pathname)
1142 except OSError, why:
1142 except OSError, why:
1143 if why.errno == errno.EEXIST:
1143 if why.errno == errno.EEXIST:
1144 raise
1144 raise
1145 else:
1145 else:
1146 _makelock_file(info, pathname)
1146 _makelock_file(info, pathname)
1147
1147
1148 def readlock(pathname):
1148 def readlock(pathname):
1149 try:
1149 try:
1150 return os.readlink(pathname)
1150 return os.readlink(pathname)
1151 except OSError, why:
1151 except OSError, why:
1152 if why.errno in (errno.EINVAL, errno.ENOSYS):
1152 if why.errno in (errno.EINVAL, errno.ENOSYS):
1153 return _readlock_file(pathname)
1153 return _readlock_file(pathname)
1154 else:
1154 else:
1155 raise
1155 raise
1156
1156
1157 def shellquote(s):
1157 def shellquote(s):
1158 if os.sys.platform == 'OpenVMS':
1158 if os.sys.platform == 'OpenVMS':
1159 return '"%s"' % s
1159 return '"%s"' % s
1160 else:
1160 else:
1161 return "'%s'" % s.replace("'", "'\\''")
1161 return "'%s'" % s.replace("'", "'\\''")
1162
1162
1163 def quotecommand(cmd):
1163 def quotecommand(cmd):
1164 return cmd
1164 return cmd
1165
1165
1166 def testpid(pid):
1166 def testpid(pid):
1167 '''return False if pid dead, True if running or not sure'''
1167 '''return False if pid dead, True if running or not sure'''
1168 if os.sys.platform == 'OpenVMS':
1168 if os.sys.platform == 'OpenVMS':
1169 return True
1169 return True
1170 try:
1170 try:
1171 os.kill(pid, 0)
1171 os.kill(pid, 0)
1172 return True
1172 return True
1173 except OSError, inst:
1173 except OSError, inst:
1174 return inst.errno != errno.ESRCH
1174 return inst.errno != errno.ESRCH
1175
1175
1176 def explain_exit(code):
1176 def explain_exit(code):
1177 """return a 2-tuple (desc, code) describing a process's status"""
1177 """return a 2-tuple (desc, code) describing a process's status"""
1178 if os.WIFEXITED(code):
1178 if os.WIFEXITED(code):
1179 val = os.WEXITSTATUS(code)
1179 val = os.WEXITSTATUS(code)
1180 return _("exited with status %d") % val, val
1180 return _("exited with status %d") % val, val
1181 elif os.WIFSIGNALED(code):
1181 elif os.WIFSIGNALED(code):
1182 val = os.WTERMSIG(code)
1182 val = os.WTERMSIG(code)
1183 return _("killed by signal %d") % val, val
1183 return _("killed by signal %d") % val, val
1184 elif os.WIFSTOPPED(code):
1184 elif os.WIFSTOPPED(code):
1185 val = os.WSTOPSIG(code)
1185 val = os.WSTOPSIG(code)
1186 return _("stopped by signal %d") % val, val
1186 return _("stopped by signal %d") % val, val
1187 raise ValueError(_("invalid exit code"))
1187 raise ValueError(_("invalid exit code"))
1188
1188
1189 def isowner(fp, st=None):
1189 def isowner(fp, st=None):
1190 """Return True if the file object f belongs to the current user.
1190 """Return True if the file object f belongs to the current user.
1191
1191
1192 The return value of a util.fstat(f) may be passed as the st argument.
1192 The return value of a util.fstat(f) may be passed as the st argument.
1193 """
1193 """
1194 if st is None:
1194 if st is None:
1195 st = fstat(fp)
1195 st = fstat(fp)
1196 return st.st_uid == os.getuid()
1196 return st.st_uid == os.getuid()
1197
1197
1198 def find_in_path(name, path, default=None):
1198 def find_in_path(name, path, default=None):
1199 '''find name in search path. path can be string (will be split
1199 '''find name in search path. path can be string (will be split
1200 with os.pathsep), or iterable thing that returns strings. if name
1200 with os.pathsep), or iterable thing that returns strings. if name
1201 found, return path to name. else return default.'''
1201 found, return path to name. else return default.'''
1202 if isinstance(path, str):
1202 if isinstance(path, str):
1203 path = path.split(os.pathsep)
1203 path = path.split(os.pathsep)
1204 for p in path:
1204 for p in path:
1205 p_name = os.path.join(p, name)
1205 p_name = os.path.join(p, name)
1206 if os.path.exists(p_name):
1206 if os.path.exists(p_name):
1207 return p_name
1207 return p_name
1208 return default
1208 return default
1209
1209
1210 def set_signal_handler():
1210 def set_signal_handler():
1211 pass
1211 pass
1212
1212
1213 def find_exe(name, default=None):
1213 def find_exe(name, default=None):
1214 '''find path of an executable.
1214 '''find path of an executable.
1215 if name contains a path component, return it as is. otherwise,
1215 if name contains a path component, return it as is. otherwise,
1216 use normal executable search path.'''
1216 use normal executable search path.'''
1217
1217
1218 if os.sep in name or sys.platform == 'OpenVMS':
1218 if os.sep in name or sys.platform == 'OpenVMS':
1219 # don't check the executable bit. if the file isn't
1219 # don't check the executable bit. if the file isn't
1220 # executable, whoever tries to actually run it will give a
1220 # executable, whoever tries to actually run it will give a
1221 # much more useful error message.
1221 # much more useful error message.
1222 return name
1222 return name
1223 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1223 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1224
1224
1225 def _buildencodefun():
1225 def _buildencodefun():
1226 e = '_'
1226 e = '_'
1227 win_reserved = [ord(x) for x in '\\:*?"<>|']
1227 win_reserved = [ord(x) for x in '\\:*?"<>|']
1228 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1228 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1229 for x in (range(32) + range(126, 256) + win_reserved):
1229 for x in (range(32) + range(126, 256) + win_reserved):
1230 cmap[chr(x)] = "~%02x" % x
1230 cmap[chr(x)] = "~%02x" % x
1231 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1231 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1232 cmap[chr(x)] = e + chr(x).lower()
1232 cmap[chr(x)] = e + chr(x).lower()
1233 dmap = {}
1233 dmap = {}
1234 for k, v in cmap.iteritems():
1234 for k, v in cmap.iteritems():
1235 dmap[v] = k
1235 dmap[v] = k
1236 def decode(s):
1236 def decode(s):
1237 i = 0
1237 i = 0
1238 while i < len(s):
1238 while i < len(s):
1239 for l in xrange(1, 4):
1239 for l in xrange(1, 4):
1240 try:
1240 try:
1241 yield dmap[s[i:i+l]]
1241 yield dmap[s[i:i+l]]
1242 i += l
1242 i += l
1243 break
1243 break
1244 except KeyError:
1244 except KeyError:
1245 pass
1245 pass
1246 else:
1246 else:
1247 raise KeyError
1247 raise KeyError
1248 return (lambda s: "".join([cmap[c] for c in s]),
1248 return (lambda s: "".join([cmap[c] for c in s]),
1249 lambda s: "".join(list(decode(s))))
1249 lambda s: "".join(list(decode(s))))
1250
1250
1251 encodefilename, decodefilename = _buildencodefun()
1251 encodefilename, decodefilename = _buildencodefun()
1252
1252
1253 def encodedopener(openerfn, fn):
1253 def encodedopener(openerfn, fn):
1254 def o(path, *args, **kw):
1254 def o(path, *args, **kw):
1255 return openerfn(fn(path), *args, **kw)
1255 return openerfn(fn(path), *args, **kw)
1256 return o
1256 return o
1257
1257
1258 def mktempcopy(name, emptyok=False):
1258 def mktempcopy(name, emptyok=False):
1259 """Create a temporary file with the same contents from name
1259 """Create a temporary file with the same contents from name
1260
1260
1261 The permission bits are copied from the original file.
1261 The permission bits are copied from the original file.
1262
1262
1263 If the temporary file is going to be truncated immediately, you
1263 If the temporary file is going to be truncated immediately, you
1264 can use emptyok=True as an optimization.
1264 can use emptyok=True as an optimization.
1265
1265
1266 Returns the name of the temporary file.
1266 Returns the name of the temporary file.
1267 """
1267 """
1268 d, fn = os.path.split(name)
1268 d, fn = os.path.split(name)
1269 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1269 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1270 os.close(fd)
1270 os.close(fd)
1271 # Temporary files are created with mode 0600, which is usually not
1271 # Temporary files are created with mode 0600, which is usually not
1272 # what we want. If the original file already exists, just copy
1272 # what we want. If the original file already exists, just copy
1273 # its mode. Otherwise, manually obey umask.
1273 # its mode. Otherwise, manually obey umask.
1274 try:
1274 try:
1275 st_mode = os.lstat(name).st_mode
1275 st_mode = os.lstat(name).st_mode
1276 except OSError, inst:
1276 except OSError, inst:
1277 if inst.errno != errno.ENOENT:
1277 if inst.errno != errno.ENOENT:
1278 raise
1278 raise
1279 st_mode = 0666 & ~_umask
1279 st_mode = 0666 & ~_umask
1280 os.chmod(temp, st_mode)
1280 os.chmod(temp, st_mode)
1281 if emptyok:
1281 if emptyok:
1282 return temp
1282 return temp
1283 try:
1283 try:
1284 try:
1284 try:
1285 ifp = posixfile(name, "rb")
1285 ifp = posixfile(name, "rb")
1286 except IOError, inst:
1286 except IOError, inst:
1287 if inst.errno == errno.ENOENT:
1287 if inst.errno == errno.ENOENT:
1288 return temp
1288 return temp
1289 if not getattr(inst, 'filename', None):
1289 if not getattr(inst, 'filename', None):
1290 inst.filename = name
1290 inst.filename = name
1291 raise
1291 raise
1292 ofp = posixfile(temp, "wb")
1292 ofp = posixfile(temp, "wb")
1293 for chunk in filechunkiter(ifp):
1293 for chunk in filechunkiter(ifp):
1294 ofp.write(chunk)
1294 ofp.write(chunk)
1295 ifp.close()
1295 ifp.close()
1296 ofp.close()
1296 ofp.close()
1297 except:
1297 except:
1298 try: os.unlink(temp)
1298 try: os.unlink(temp)
1299 except: pass
1299 except: pass
1300 raise
1300 raise
1301 return temp
1301 return temp
1302
1302
1303 class atomictempfile(posixfile):
1303 class atomictempfile(posixfile):
1304 """file-like object that atomically updates a file
1304 """file-like object that atomically updates a file
1305
1305
1306 All writes will be redirected to a temporary copy of the original
1306 All writes will be redirected to a temporary copy of the original
1307 file. When rename is called, the copy is renamed to the original
1307 file. When rename is called, the copy is renamed to the original
1308 name, making the changes visible.
1308 name, making the changes visible.
1309 """
1309 """
1310 def __init__(self, name, mode):
1310 def __init__(self, name, mode):
1311 self.__name = name
1311 self.__name = name
1312 self.temp = mktempcopy(name, emptyok=('w' in mode))
1312 self.temp = mktempcopy(name, emptyok=('w' in mode))
1313 posixfile.__init__(self, self.temp, mode)
1313 posixfile.__init__(self, self.temp, mode)
1314
1314
1315 def rename(self):
1315 def rename(self):
1316 if not self.closed:
1316 if not self.closed:
1317 posixfile.close(self)
1317 posixfile.close(self)
1318 rename(self.temp, localpath(self.__name))
1318 rename(self.temp, localpath(self.__name))
1319
1319
1320 def __del__(self):
1320 def __del__(self):
1321 if not self.closed:
1321 if not self.closed:
1322 try:
1322 try:
1323 os.unlink(self.temp)
1323 os.unlink(self.temp)
1324 except: pass
1324 except: pass
1325 posixfile.close(self)
1325 posixfile.close(self)
1326
1326
1327 class opener(object):
1327 class opener(object):
1328 """Open files relative to a base directory
1328 """Open files relative to a base directory
1329
1329
1330 This class is used to hide the details of COW semantics and
1330 This class is used to hide the details of COW semantics and
1331 remote file access from higher level code.
1331 remote file access from higher level code.
1332 """
1332 """
1333 def __init__(self, base, audit=True):
1333 def __init__(self, base, audit=True):
1334 self.base = base
1334 self.base = base
1335 if audit:
1335 if audit:
1336 self.audit_path = path_auditor(base)
1336 self.audit_path = path_auditor(base)
1337 else:
1337 else:
1338 self.audit_path = always
1338 self.audit_path = always
1339
1339
1340 def __getattr__(self, name):
1340 def __getattr__(self, name):
1341 if name == '_can_symlink':
1341 if name == '_can_symlink':
1342 self._can_symlink = checklink(self.base)
1342 self._can_symlink = checklink(self.base)
1343 return self._can_symlink
1343 return self._can_symlink
1344 raise AttributeError(name)
1344 raise AttributeError(name)
1345
1345
1346 def __call__(self, path, mode="r", text=False, atomictemp=False):
1346 def __call__(self, path, mode="r", text=False, atomictemp=False):
1347 self.audit_path(path)
1347 self.audit_path(path)
1348 f = os.path.join(self.base, path)
1348 f = os.path.join(self.base, path)
1349
1349
1350 if not text and "b" not in mode:
1350 if not text and "b" not in mode:
1351 mode += "b" # for that other OS
1351 mode += "b" # for that other OS
1352
1352
1353 if mode[0] != "r":
1353 if mode[0] != "r":
1354 try:
1354 try:
1355 nlink = nlinks(f)
1355 nlink = nlinks(f)
1356 except OSError:
1356 except OSError:
1357 nlink = 0
1357 nlink = 0
1358 d = os.path.dirname(f)
1358 d = os.path.dirname(f)
1359 if not os.path.isdir(d):
1359 if not os.path.isdir(d):
1360 os.makedirs(d)
1360 os.makedirs(d)
1361 if atomictemp:
1361 if atomictemp:
1362 return atomictempfile(f, mode)
1362 return atomictempfile(f, mode)
1363 if nlink > 1:
1363 if nlink > 1:
1364 rename(mktempcopy(f), f)
1364 rename(mktempcopy(f), f)
1365 return posixfile(f, mode)
1365 return posixfile(f, mode)
1366
1366
1367 def symlink(self, src, dst):
1367 def symlink(self, src, dst):
1368 self.audit_path(dst)
1368 self.audit_path(dst)
1369 linkname = os.path.join(self.base, dst)
1369 linkname = os.path.join(self.base, dst)
1370 try:
1370 try:
1371 os.unlink(linkname)
1371 os.unlink(linkname)
1372 except OSError:
1372 except OSError:
1373 pass
1373 pass
1374
1374
1375 dirname = os.path.dirname(linkname)
1375 dirname = os.path.dirname(linkname)
1376 if not os.path.exists(dirname):
1376 if not os.path.exists(dirname):
1377 os.makedirs(dirname)
1377 os.makedirs(dirname)
1378
1378
1379 if self._can_symlink:
1379 if self._can_symlink:
1380 try:
1380 try:
1381 os.symlink(src, linkname)
1381 os.symlink(src, linkname)
1382 except OSError, err:
1382 except OSError, err:
1383 raise OSError(err.errno, _('could not symlink to %r: %s') %
1383 raise OSError(err.errno, _('could not symlink to %r: %s') %
1384 (src, err.strerror), linkname)
1384 (src, err.strerror), linkname)
1385 else:
1385 else:
1386 f = self(dst, "w")
1386 f = self(dst, "w")
1387 f.write(src)
1387 f.write(src)
1388 f.close()
1388 f.close()
1389
1389
1390 class chunkbuffer(object):
1390 class chunkbuffer(object):
1391 """Allow arbitrary sized chunks of data to be efficiently read from an
1391 """Allow arbitrary sized chunks of data to be efficiently read from an
1392 iterator over chunks of arbitrary size."""
1392 iterator over chunks of arbitrary size."""
1393
1393
1394 def __init__(self, in_iter, targetsize = 2**16):
1394 def __init__(self, in_iter, targetsize = 2**16):
1395 """in_iter is the iterator that's iterating over the input chunks.
1395 """in_iter is the iterator that's iterating over the input chunks.
1396 targetsize is how big a buffer to try to maintain."""
1396 targetsize is how big a buffer to try to maintain."""
1397 self.in_iter = iter(in_iter)
1397 self.in_iter = iter(in_iter)
1398 self.buf = ''
1398 self.buf = ''
1399 self.targetsize = int(targetsize)
1399 self.targetsize = int(targetsize)
1400 if self.targetsize <= 0:
1400 if self.targetsize <= 0:
1401 raise ValueError(_("targetsize must be greater than 0, was %d") %
1401 raise ValueError(_("targetsize must be greater than 0, was %d") %
1402 targetsize)
1402 targetsize)
1403 self.iterempty = False
1403 self.iterempty = False
1404
1404
1405 def fillbuf(self):
1405 def fillbuf(self):
1406 """Ignore target size; read every chunk from iterator until empty."""
1406 """Ignore target size; read every chunk from iterator until empty."""
1407 if not self.iterempty:
1407 if not self.iterempty:
1408 collector = cStringIO.StringIO()
1408 collector = cStringIO.StringIO()
1409 collector.write(self.buf)
1409 collector.write(self.buf)
1410 for ch in self.in_iter:
1410 for ch in self.in_iter:
1411 collector.write(ch)
1411 collector.write(ch)
1412 self.buf = collector.getvalue()
1412 self.buf = collector.getvalue()
1413 self.iterempty = True
1413 self.iterempty = True
1414
1414
1415 def read(self, l):
1415 def read(self, l):
1416 """Read L bytes of data from the iterator of chunks of data.
1416 """Read L bytes of data from the iterator of chunks of data.
1417 Returns less than L bytes if the iterator runs dry."""
1417 Returns less than L bytes if the iterator runs dry."""
1418 if l > len(self.buf) and not self.iterempty:
1418 if l > len(self.buf) and not self.iterempty:
1419 # Clamp to a multiple of self.targetsize
1419 # Clamp to a multiple of self.targetsize
1420 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1420 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1421 collector = cStringIO.StringIO()
1421 collector = cStringIO.StringIO()
1422 collector.write(self.buf)
1422 collector.write(self.buf)
1423 collected = len(self.buf)
1423 collected = len(self.buf)
1424 for chunk in self.in_iter:
1424 for chunk in self.in_iter:
1425 collector.write(chunk)
1425 collector.write(chunk)
1426 collected += len(chunk)
1426 collected += len(chunk)
1427 if collected >= targetsize:
1427 if collected >= targetsize:
1428 break
1428 break
1429 if collected < targetsize:
1429 if collected < targetsize:
1430 self.iterempty = True
1430 self.iterempty = True
1431 self.buf = collector.getvalue()
1431 self.buf = collector.getvalue()
1432 s, self.buf = self.buf[:l], buffer(self.buf, l)
1432 s, self.buf = self.buf[:l], buffer(self.buf, l)
1433 return s
1433 return s
1434
1434
1435 def filechunkiter(f, size=65536, limit=None):
1435 def filechunkiter(f, size=65536, limit=None):
1436 """Create a generator that produces the data in the file size
1436 """Create a generator that produces the data in the file size
1437 (default 65536) bytes at a time, up to optional limit (default is
1437 (default 65536) bytes at a time, up to optional limit (default is
1438 to read all data). Chunks may be less than size bytes if the
1438 to read all data). Chunks may be less than size bytes if the
1439 chunk is the last chunk in the file, or the file is a socket or
1439 chunk is the last chunk in the file, or the file is a socket or
1440 some other type of file that sometimes reads less data than is
1440 some other type of file that sometimes reads less data than is
1441 requested."""
1441 requested."""
1442 assert size >= 0
1442 assert size >= 0
1443 assert limit is None or limit >= 0
1443 assert limit is None or limit >= 0
1444 while True:
1444 while True:
1445 if limit is None: nbytes = size
1445 if limit is None: nbytes = size
1446 else: nbytes = min(limit, size)
1446 else: nbytes = min(limit, size)
1447 s = nbytes and f.read(nbytes)
1447 s = nbytes and f.read(nbytes)
1448 if not s: break
1448 if not s: break
1449 if limit: limit -= len(s)
1449 if limit: limit -= len(s)
1450 yield s
1450 yield s
1451
1451
1452 def makedate():
1452 def makedate():
1453 lt = time.localtime()
1453 lt = time.localtime()
1454 if lt[8] == 1 and time.daylight:
1454 if lt[8] == 1 and time.daylight:
1455 tz = time.altzone
1455 tz = time.altzone
1456 else:
1456 else:
1457 tz = time.timezone
1457 tz = time.timezone
1458 return time.mktime(lt), tz
1458 return time.mktime(lt), tz
1459
1459
1460 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1460 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1461 """represent a (unixtime, offset) tuple as a localized time.
1461 """represent a (unixtime, offset) tuple as a localized time.
1462 unixtime is seconds since the epoch, and offset is the time zone's
1462 unixtime is seconds since the epoch, and offset is the time zone's
1463 number of seconds away from UTC. if timezone is false, do not
1463 number of seconds away from UTC. if timezone is false, do not
1464 append time zone to string."""
1464 append time zone to string."""
1465 t, tz = date or makedate()
1465 t, tz = date or makedate()
1466 s = time.strftime(format, time.gmtime(float(t) - tz))
1466 s = time.strftime(format, time.gmtime(float(t) - tz))
1467 if timezone:
1467 if timezone:
1468 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1468 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1469 return s
1469 return s
1470
1470
1471 def strdate(string, format, defaults):
1471 def strdate(string, format, defaults=[]):
1472 """parse a localized time string and return a (unixtime, offset) tuple.
1472 """parse a localized time string and return a (unixtime, offset) tuple.
1473 if the string cannot be parsed, ValueError is raised."""
1473 if the string cannot be parsed, ValueError is raised."""
1474 def timezone(string):
1474 def timezone(string):
1475 tz = string.split()[-1]
1475 tz = string.split()[-1]
1476 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1476 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1477 tz = int(tz)
1477 tz = int(tz)
1478 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1478 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1479 return offset
1479 return offset
1480 if tz == "GMT" or tz == "UTC":
1480 if tz == "GMT" or tz == "UTC":
1481 return 0
1481 return 0
1482 return None
1482 return None
1483
1483
1484 # NOTE: unixtime = localunixtime + offset
1484 # NOTE: unixtime = localunixtime + offset
1485 offset, date = timezone(string), string
1485 offset, date = timezone(string), string
1486 if offset != None:
1486 if offset != None:
1487 date = " ".join(string.split()[:-1])
1487 date = " ".join(string.split()[:-1])
1488
1488
1489 # add missing elements from defaults
1489 # add missing elements from defaults
1490 for part in defaults:
1490 for part in defaults:
1491 found = [True for p in part if ("%"+p) in format]
1491 found = [True for p in part if ("%"+p) in format]
1492 if not found:
1492 if not found:
1493 date += "@" + defaults[part]
1493 date += "@" + defaults[part]
1494 format += "@%" + part[0]
1494 format += "@%" + part[0]
1495
1495
1496 timetuple = time.strptime(date, format)
1496 timetuple = time.strptime(date, format)
1497 localunixtime = int(calendar.timegm(timetuple))
1497 localunixtime = int(calendar.timegm(timetuple))
1498 if offset is None:
1498 if offset is None:
1499 # local timezone
1499 # local timezone
1500 unixtime = int(time.mktime(timetuple))
1500 unixtime = int(time.mktime(timetuple))
1501 offset = unixtime - localunixtime
1501 offset = unixtime - localunixtime
1502 else:
1502 else:
1503 unixtime = localunixtime + offset
1503 unixtime = localunixtime + offset
1504 return unixtime, offset
1504 return unixtime, offset
1505
1505
1506 def parsedate(string, formats=None, defaults=None):
1506 def parsedate(string, formats=None, defaults=None):
1507 """parse a localized time string and return a (unixtime, offset) tuple.
1507 """parse a localized time string and return a (unixtime, offset) tuple.
1508 The date may be a "unixtime offset" string or in one of the specified
1508 The date may be a "unixtime offset" string or in one of the specified
1509 formats."""
1509 formats."""
1510 if not string:
1510 if not string:
1511 return 0, 0
1511 return 0, 0
1512 if not formats:
1512 if not formats:
1513 formats = defaultdateformats
1513 formats = defaultdateformats
1514 string = string.strip()
1514 string = string.strip()
1515 try:
1515 try:
1516 when, offset = map(int, string.split(' '))
1516 when, offset = map(int, string.split(' '))
1517 except ValueError:
1517 except ValueError:
1518 # fill out defaults
1518 # fill out defaults
1519 if not defaults:
1519 if not defaults:
1520 defaults = {}
1520 defaults = {}
1521 now = makedate()
1521 now = makedate()
1522 for part in "d mb yY HI M S".split():
1522 for part in "d mb yY HI M S".split():
1523 if part not in defaults:
1523 if part not in defaults:
1524 if part[0] in "HMS":
1524 if part[0] in "HMS":
1525 defaults[part] = "00"
1525 defaults[part] = "00"
1526 elif part[0] in "dm":
1526 elif part[0] in "dm":
1527 defaults[part] = "1"
1527 defaults[part] = "1"
1528 else:
1528 else:
1529 defaults[part] = datestr(now, "%" + part[0], False)
1529 defaults[part] = datestr(now, "%" + part[0], False)
1530
1530
1531 for format in formats:
1531 for format in formats:
1532 try:
1532 try:
1533 when, offset = strdate(string, format, defaults)
1533 when, offset = strdate(string, format, defaults)
1534 except ValueError:
1534 except ValueError:
1535 pass
1535 pass
1536 else:
1536 else:
1537 break
1537 break
1538 else:
1538 else:
1539 raise Abort(_('invalid date: %r ') % string)
1539 raise Abort(_('invalid date: %r ') % string)
1540 # validate explicit (probably user-specified) date and
1540 # validate explicit (probably user-specified) date and
1541 # time zone offset. values must fit in signed 32 bits for
1541 # time zone offset. values must fit in signed 32 bits for
1542 # current 32-bit linux runtimes. timezones go from UTC-12
1542 # current 32-bit linux runtimes. timezones go from UTC-12
1543 # to UTC+14
1543 # to UTC+14
1544 if abs(when) > 0x7fffffff:
1544 if abs(when) > 0x7fffffff:
1545 raise Abort(_('date exceeds 32 bits: %d') % when)
1545 raise Abort(_('date exceeds 32 bits: %d') % when)
1546 if offset < -50400 or offset > 43200:
1546 if offset < -50400 or offset > 43200:
1547 raise Abort(_('impossible time zone offset: %d') % offset)
1547 raise Abort(_('impossible time zone offset: %d') % offset)
1548 return when, offset
1548 return when, offset
1549
1549
1550 def matchdate(date):
1550 def matchdate(date):
1551 """Return a function that matches a given date match specifier
1551 """Return a function that matches a given date match specifier
1552
1552
1553 Formats include:
1553 Formats include:
1554
1554
1555 '{date}' match a given date to the accuracy provided
1555 '{date}' match a given date to the accuracy provided
1556
1556
1557 '<{date}' on or before a given date
1557 '<{date}' on or before a given date
1558
1558
1559 '>{date}' on or after a given date
1559 '>{date}' on or after a given date
1560
1560
1561 """
1561 """
1562
1562
1563 def lower(date):
1563 def lower(date):
1564 return parsedate(date, extendeddateformats)[0]
1564 return parsedate(date, extendeddateformats)[0]
1565
1565
1566 def upper(date):
1566 def upper(date):
1567 d = dict(mb="12", HI="23", M="59", S="59")
1567 d = dict(mb="12", HI="23", M="59", S="59")
1568 for days in "31 30 29".split():
1568 for days in "31 30 29".split():
1569 try:
1569 try:
1570 d["d"] = days
1570 d["d"] = days
1571 return parsedate(date, extendeddateformats, d)[0]
1571 return parsedate(date, extendeddateformats, d)[0]
1572 except:
1572 except:
1573 pass
1573 pass
1574 d["d"] = "28"
1574 d["d"] = "28"
1575 return parsedate(date, extendeddateformats, d)[0]
1575 return parsedate(date, extendeddateformats, d)[0]
1576
1576
1577 if date[0] == "<":
1577 if date[0] == "<":
1578 when = upper(date[1:])
1578 when = upper(date[1:])
1579 return lambda x: x <= when
1579 return lambda x: x <= when
1580 elif date[0] == ">":
1580 elif date[0] == ">":
1581 when = lower(date[1:])
1581 when = lower(date[1:])
1582 return lambda x: x >= when
1582 return lambda x: x >= when
1583 elif date[0] == "-":
1583 elif date[0] == "-":
1584 try:
1584 try:
1585 days = int(date[1:])
1585 days = int(date[1:])
1586 except ValueError:
1586 except ValueError:
1587 raise Abort(_("invalid day spec: %s") % date[1:])
1587 raise Abort(_("invalid day spec: %s") % date[1:])
1588 when = makedate()[0] - days * 3600 * 24
1588 when = makedate()[0] - days * 3600 * 24
1589 return lambda x: x >= when
1589 return lambda x: x >= when
1590 elif " to " in date:
1590 elif " to " in date:
1591 a, b = date.split(" to ")
1591 a, b = date.split(" to ")
1592 start, stop = lower(a), upper(b)
1592 start, stop = lower(a), upper(b)
1593 return lambda x: x >= start and x <= stop
1593 return lambda x: x >= start and x <= stop
1594 else:
1594 else:
1595 start, stop = lower(date), upper(date)
1595 start, stop = lower(date), upper(date)
1596 return lambda x: x >= start and x <= stop
1596 return lambda x: x >= start and x <= stop
1597
1597
1598 def shortuser(user):
1598 def shortuser(user):
1599 """Return a short representation of a user name or email address."""
1599 """Return a short representation of a user name or email address."""
1600 f = user.find('@')
1600 f = user.find('@')
1601 if f >= 0:
1601 if f >= 0:
1602 user = user[:f]
1602 user = user[:f]
1603 f = user.find('<')
1603 f = user.find('<')
1604 if f >= 0:
1604 if f >= 0:
1605 user = user[f+1:]
1605 user = user[f+1:]
1606 f = user.find(' ')
1606 f = user.find(' ')
1607 if f >= 0:
1607 if f >= 0:
1608 user = user[:f]
1608 user = user[:f]
1609 f = user.find('.')
1609 f = user.find('.')
1610 if f >= 0:
1610 if f >= 0:
1611 user = user[:f]
1611 user = user[:f]
1612 return user
1612 return user
1613
1613
1614 def ellipsis(text, maxlength=400):
1614 def ellipsis(text, maxlength=400):
1615 """Trim string to at most maxlength (default: 400) characters."""
1615 """Trim string to at most maxlength (default: 400) characters."""
1616 if len(text) <= maxlength:
1616 if len(text) <= maxlength:
1617 return text
1617 return text
1618 else:
1618 else:
1619 return "%s..." % (text[:maxlength-3])
1619 return "%s..." % (text[:maxlength-3])
1620
1620
1621 def walkrepos(path):
1621 def walkrepos(path):
1622 '''yield every hg repository under path, recursively.'''
1622 '''yield every hg repository under path, recursively.'''
1623 def errhandler(err):
1623 def errhandler(err):
1624 if err.filename == path:
1624 if err.filename == path:
1625 raise err
1625 raise err
1626
1626
1627 for root, dirs, files in os.walk(path, onerror=errhandler):
1627 for root, dirs, files in os.walk(path, onerror=errhandler):
1628 for d in dirs:
1628 for d in dirs:
1629 if d == '.hg':
1629 if d == '.hg':
1630 yield root
1630 yield root
1631 dirs[:] = []
1631 dirs[:] = []
1632 break
1632 break
1633
1633
1634 _rcpath = None
1634 _rcpath = None
1635
1635
1636 def os_rcpath():
1636 def os_rcpath():
1637 '''return default os-specific hgrc search path'''
1637 '''return default os-specific hgrc search path'''
1638 path = system_rcpath()
1638 path = system_rcpath()
1639 path.extend(user_rcpath())
1639 path.extend(user_rcpath())
1640 path = [os.path.normpath(f) for f in path]
1640 path = [os.path.normpath(f) for f in path]
1641 return path
1641 return path
1642
1642
1643 def rcpath():
1643 def rcpath():
1644 '''return hgrc search path. if env var HGRCPATH is set, use it.
1644 '''return hgrc search path. if env var HGRCPATH is set, use it.
1645 for each item in path, if directory, use files ending in .rc,
1645 for each item in path, if directory, use files ending in .rc,
1646 else use item.
1646 else use item.
1647 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1647 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1648 if no HGRCPATH, use default os-specific path.'''
1648 if no HGRCPATH, use default os-specific path.'''
1649 global _rcpath
1649 global _rcpath
1650 if _rcpath is None:
1650 if _rcpath is None:
1651 if 'HGRCPATH' in os.environ:
1651 if 'HGRCPATH' in os.environ:
1652 _rcpath = []
1652 _rcpath = []
1653 for p in os.environ['HGRCPATH'].split(os.pathsep):
1653 for p in os.environ['HGRCPATH'].split(os.pathsep):
1654 if not p: continue
1654 if not p: continue
1655 if os.path.isdir(p):
1655 if os.path.isdir(p):
1656 for f in os.listdir(p):
1656 for f in os.listdir(p):
1657 if f.endswith('.rc'):
1657 if f.endswith('.rc'):
1658 _rcpath.append(os.path.join(p, f))
1658 _rcpath.append(os.path.join(p, f))
1659 else:
1659 else:
1660 _rcpath.append(p)
1660 _rcpath.append(p)
1661 else:
1661 else:
1662 _rcpath = os_rcpath()
1662 _rcpath = os_rcpath()
1663 return _rcpath
1663 return _rcpath
1664
1664
1665 def bytecount(nbytes):
1665 def bytecount(nbytes):
1666 '''return byte count formatted as readable string, with units'''
1666 '''return byte count formatted as readable string, with units'''
1667
1667
1668 units = (
1668 units = (
1669 (100, 1<<30, _('%.0f GB')),
1669 (100, 1<<30, _('%.0f GB')),
1670 (10, 1<<30, _('%.1f GB')),
1670 (10, 1<<30, _('%.1f GB')),
1671 (1, 1<<30, _('%.2f GB')),
1671 (1, 1<<30, _('%.2f GB')),
1672 (100, 1<<20, _('%.0f MB')),
1672 (100, 1<<20, _('%.0f MB')),
1673 (10, 1<<20, _('%.1f MB')),
1673 (10, 1<<20, _('%.1f MB')),
1674 (1, 1<<20, _('%.2f MB')),
1674 (1, 1<<20, _('%.2f MB')),
1675 (100, 1<<10, _('%.0f KB')),
1675 (100, 1<<10, _('%.0f KB')),
1676 (10, 1<<10, _('%.1f KB')),
1676 (10, 1<<10, _('%.1f KB')),
1677 (1, 1<<10, _('%.2f KB')),
1677 (1, 1<<10, _('%.2f KB')),
1678 (1, 1, _('%.0f bytes')),
1678 (1, 1, _('%.0f bytes')),
1679 )
1679 )
1680
1680
1681 for multiplier, divisor, format in units:
1681 for multiplier, divisor, format in units:
1682 if nbytes >= divisor * multiplier:
1682 if nbytes >= divisor * multiplier:
1683 return format % (nbytes / float(divisor))
1683 return format % (nbytes / float(divisor))
1684 return units[-1][2] % nbytes
1684 return units[-1][2] % nbytes
1685
1685
1686 def drop_scheme(scheme, path):
1686 def drop_scheme(scheme, path):
1687 sc = scheme + ':'
1687 sc = scheme + ':'
1688 if path.startswith(sc):
1688 if path.startswith(sc):
1689 path = path[len(sc):]
1689 path = path[len(sc):]
1690 if path.startswith('//'):
1690 if path.startswith('//'):
1691 path = path[2:]
1691 path = path[2:]
1692 return path
1692 return path
1693
1693
1694 def uirepr(s):
1694 def uirepr(s):
1695 # Avoid double backslash in Windows path repr()
1695 # Avoid double backslash in Windows path repr()
1696 return repr(s).replace('\\\\', '\\')
1696 return repr(s).replace('\\\\', '\\')
General Comments 0
You need to be logged in to leave comments. Login now