Show More
@@ -0,0 +1,39 b'' | |||||
|
1 | Octopus Merge Support | |||
|
2 | ===================== | |||
|
3 | ||||
|
4 | This will be moderately complicated, as we'll need to synthesize phony | |||
|
5 | changeset entries to explode the octopus into "revisions" that only | |||
|
6 | have two parents each. For today, we can probably just do something like | |||
|
7 | ||||
|
8 | aaaaaaaaaaaaaaaaaaXX{20 bytes of exploded node's hex sha} | |||
|
9 | ||||
|
10 | where XX is a counter (so we could have as many as 255 parents in a | |||
|
11 | git commit - more than I think we'd ever see.) That means that we can | |||
|
12 | install some check in this extension to disallow checking out or | |||
|
13 | otherwise interacting with the `aaaaaaaaaaaaaaaaaa` revisions. | |||
|
14 | ||||
|
15 | ||||
|
16 | Interface Creation | |||
|
17 | ==================== | |||
|
18 | ||||
|
19 | We at least need an interface definition for `changelog` in core that | |||
|
20 | this extension can satisfy, and again for `basicstore`. | |||
|
21 | ||||
|
22 | ||||
|
23 | Reason About Locking | |||
|
24 | ==================== | |||
|
25 | ||||
|
26 | We should spend some time thinking hard about locking, especially on | |||
|
27 | .git/index etc. We're probably adequately locking the _git_ | |||
|
28 | repository, but may not have enough locking correctness in places | |||
|
29 | where hg does locking that git isn't aware of (notably the working | |||
|
30 | copy, which I believe Git does not lock.) | |||
|
31 | ||||
|
32 | Clean up requirements | |||
|
33 | ===================== | |||
|
34 | ||||
|
35 | Right now (for historical reasons, mainly) hgext.git uses a | |||
|
36 | .hg/this-is-git file to detect repositories that should be treated as | |||
|
37 | git. We should look in the .hg/requires for the "git" requirement | |||
|
38 | instead (we already set this requirement, so it's mostly keying off | |||
|
39 | that instead of using an empty file.) |
@@ -0,0 +1,259 b'' | |||||
|
1 | """grant Mercurial the ability to operate on Git repositories. (EXPERIMENTAL) | |||
|
2 | ||||
|
3 | This is currently super experimental. It probably will consume your | |||
|
4 | firstborn a la Rumpelstiltskin, etc. | |||
|
5 | """ | |||
|
6 | ||||
|
7 | from __future__ import absolute_import | |||
|
8 | ||||
|
9 | import os | |||
|
10 | ||||
|
11 | import pygit2 | |||
|
12 | ||||
|
13 | from mercurial.i18n import _ | |||
|
14 | ||||
|
15 | from mercurial import ( | |||
|
16 | commands, | |||
|
17 | error, | |||
|
18 | extensions, | |||
|
19 | localrepo, | |||
|
20 | pycompat, | |||
|
21 | store, | |||
|
22 | util, | |||
|
23 | ) | |||
|
24 | ||||
|
25 | from . import ( | |||
|
26 | dirstate, | |||
|
27 | gitlog, | |||
|
28 | gitutil, | |||
|
29 | index, | |||
|
30 | ) | |||
|
31 | ||||
|
32 | ||||
|
33 | # TODO: extract an interface for this in core | |||
|
34 | class gitstore(object): # store.basicstore): | |||
|
35 | def __init__(self, path, vfstype): | |||
|
36 | self.vfs = vfstype(path) | |||
|
37 | self.path = self.vfs.base | |||
|
38 | self.createmode = store._calcmode(self.vfs) | |||
|
39 | # above lines should go away in favor of: | |||
|
40 | # super(gitstore, self).__init__(path, vfstype) | |||
|
41 | ||||
|
42 | self.git = pygit2.Repository( | |||
|
43 | os.path.normpath(os.path.join(path, b'..', b'.git')) | |||
|
44 | ) | |||
|
45 | self._progress_factory = lambda *args, **kwargs: None | |||
|
46 | ||||
|
47 | @util.propertycache | |||
|
48 | def _db(self): | |||
|
49 | # We lazy-create the database because we want to thread a | |||
|
50 | # progress callback down to the indexing process if it's | |||
|
51 | # required, and we don't have a ui handle in makestore(). | |||
|
52 | return index.get_index(self.git, self._progress_factory) | |||
|
53 | ||||
|
54 | def join(self, f): | |||
|
55 | """Fake store.join method for git repositories. | |||
|
56 | ||||
|
57 | For the most part, store.join is used for @storecache | |||
|
58 | decorators to invalidate caches when various files | |||
|
59 | change. We'll map the ones we care about, and ignore the rest. | |||
|
60 | """ | |||
|
61 | if f in (b'00changelog.i', b'00manifest.i'): | |||
|
62 | # This is close enough: in order for the changelog cache | |||
|
63 | # to be invalidated, HEAD will have to change. | |||
|
64 | return os.path.join(self.path, b'HEAD') | |||
|
65 | elif f == b'lock': | |||
|
66 | # TODO: we probably want to map this to a git lock, I | |||
|
67 | # suspect index.lock. We should figure out what the | |||
|
68 | # most-alike file is in git-land. For now we're risking | |||
|
69 | # bad concurrency errors if another git client is used. | |||
|
70 | return os.path.join(self.path, b'hgit-bogus-lock') | |||
|
71 | elif f in (b'obsstore', b'phaseroots', b'narrowspec', b'bookmarks'): | |||
|
72 | return os.path.join(self.path, b'..', b'.hg', f) | |||
|
73 | raise NotImplementedError(b'Need to pick file for %s.' % f) | |||
|
74 | ||||
|
75 | def changelog(self, trypending): | |||
|
76 | # TODO we don't have a plan for trypending in hg's git support yet | |||
|
77 | return gitlog.changelog(self.git, self._db) | |||
|
78 | ||||
|
79 | def manifestlog(self, repo, storenarrowmatch): | |||
|
80 | # TODO handle storenarrowmatch and figure out if we need the repo arg | |||
|
81 | return gitlog.manifestlog(self.git, self._db) | |||
|
82 | ||||
|
83 | def invalidatecaches(self): | |||
|
84 | pass | |||
|
85 | ||||
|
86 | def write(self, tr=None): | |||
|
87 | # normally this handles things like fncache writes, which we don't have | |||
|
88 | pass | |||
|
89 | ||||
|
90 | ||||
|
91 | def _makestore(orig, requirements, storebasepath, vfstype): | |||
|
92 | if os.path.exists( | |||
|
93 | os.path.join(storebasepath, b'this-is-git') | |||
|
94 | ) and os.path.exists(os.path.join(storebasepath, b'..', b'.git')): | |||
|
95 | return gitstore(storebasepath, vfstype) | |||
|
96 | return orig(requirements, storebasepath, vfstype) | |||
|
97 | ||||
|
98 | ||||
|
99 | class gitfilestorage(object): | |||
|
100 | def file(self, path): | |||
|
101 | if path[0:1] == b'/': | |||
|
102 | path = path[1:] | |||
|
103 | return gitlog.filelog(self.store.git, self.store._db, path) | |||
|
104 | ||||
|
105 | ||||
|
106 | def _makefilestorage(orig, requirements, features, **kwargs): | |||
|
107 | store = kwargs['store'] | |||
|
108 | if isinstance(store, gitstore): | |||
|
109 | return gitfilestorage | |||
|
110 | return orig(requirements, features, **kwargs) | |||
|
111 | ||||
|
112 | ||||
|
113 | def _setupdothg(ui, path): | |||
|
114 | dothg = os.path.join(path, b'.hg') | |||
|
115 | if os.path.exists(dothg): | |||
|
116 | ui.warn(_(b'git repo already initialized for hg\n')) | |||
|
117 | else: | |||
|
118 | os.mkdir(os.path.join(path, b'.hg')) | |||
|
119 | # TODO is it ok to extend .git/info/exclude like this? | |||
|
120 | with open( | |||
|
121 | os.path.join(path, b'.git', b'info', b'exclude'), 'ab' | |||
|
122 | ) as exclude: | |||
|
123 | exclude.write(b'\n.hg\n') | |||
|
124 | with open(os.path.join(dothg, b'this-is-git'), 'wb') as f: | |||
|
125 | pass | |||
|
126 | with open(os.path.join(dothg, b'requirements'), 'wb') as f: | |||
|
127 | f.write(b'git\n') | |||
|
128 | ||||
|
129 | ||||
|
130 | _BMS_PREFIX = 'refs/heads/' | |||
|
131 | ||||
|
132 | ||||
|
133 | class gitbmstore(object): | |||
|
134 | def __init__(self, gitrepo): | |||
|
135 | self.gitrepo = gitrepo | |||
|
136 | ||||
|
137 | def __contains__(self, name): | |||
|
138 | return ( | |||
|
139 | _BMS_PREFIX + pycompat.fsdecode(name) | |||
|
140 | ) in self.gitrepo.references | |||
|
141 | ||||
|
142 | def __iter__(self): | |||
|
143 | for r in self.gitrepo.listall_references(): | |||
|
144 | if r.startswith(_BMS_PREFIX): | |||
|
145 | yield pycompat.fsencode(r[len(_BMS_PREFIX) :]) | |||
|
146 | ||||
|
147 | def __getitem__(self, k): | |||
|
148 | return ( | |||
|
149 | self.gitrepo.references[_BMS_PREFIX + pycompat.fsdecode(k)] | |||
|
150 | .peel() | |||
|
151 | .id.raw | |||
|
152 | ) | |||
|
153 | ||||
|
154 | def get(self, k, default=None): | |||
|
155 | try: | |||
|
156 | if k in self: | |||
|
157 | return self[k] | |||
|
158 | return default | |||
|
159 | except pygit2.InvalidSpecError: | |||
|
160 | return default | |||
|
161 | ||||
|
162 | @property | |||
|
163 | def active(self): | |||
|
164 | h = self.gitrepo.references['HEAD'] | |||
|
165 | if not isinstance(h.target, str) or not h.target.startswith( | |||
|
166 | _BMS_PREFIX | |||
|
167 | ): | |||
|
168 | return None | |||
|
169 | return pycompat.fsencode(h.target[len(_BMS_PREFIX) :]) | |||
|
170 | ||||
|
171 | @active.setter | |||
|
172 | def active(self, mark): | |||
|
173 | raise NotImplementedError | |||
|
174 | ||||
|
175 | def names(self, node): | |||
|
176 | r = [] | |||
|
177 | for ref in self.gitrepo.listall_references(): | |||
|
178 | if not ref.startswith(_BMS_PREFIX): | |||
|
179 | continue | |||
|
180 | if self.gitrepo.references[ref].peel().id.raw != node: | |||
|
181 | continue | |||
|
182 | r.append(pycompat.fsencode(ref[len(_BMS_PREFIX) :])) | |||
|
183 | return r | |||
|
184 | ||||
|
185 | # Cleanup opportunity: this is *identical* to core's bookmarks store. | |||
|
186 | def expandname(self, bname): | |||
|
187 | if bname == b'.': | |||
|
188 | if self.active: | |||
|
189 | return self.active | |||
|
190 | raise error.RepoLookupError(_(b"no active bookmark")) | |||
|
191 | return bname | |||
|
192 | ||||
|
193 | def applychanges(self, repo, tr, changes): | |||
|
194 | """Apply a list of changes to bookmarks | |||
|
195 | """ | |||
|
196 | # TODO: this should respect transactions, but that's going to | |||
|
197 | # require enlarging the gitbmstore to know how to do in-memory | |||
|
198 | # temporary writes and read those back prior to transaction | |||
|
199 | # finalization. | |||
|
200 | for name, node in changes: | |||
|
201 | if node is None: | |||
|
202 | self.gitrepo.references.delete( | |||
|
203 | _BMS_PREFIX + pycompat.fsdecode(name) | |||
|
204 | ) | |||
|
205 | else: | |||
|
206 | self.gitrepo.references.create( | |||
|
207 | _BMS_PREFIX + pycompat.fsdecode(name), | |||
|
208 | gitutil.togitnode(node), | |||
|
209 | force=True, | |||
|
210 | ) | |||
|
211 | ||||
|
212 | ||||
|
213 | def init(orig, ui, dest=b'.', **opts): | |||
|
214 | if opts.get('git', False): | |||
|
215 | path = os.path.abspath(dest) | |||
|
216 | # TODO: walk up looking for the git repo | |||
|
217 | _setupdothg(ui, path) | |||
|
218 | return 0 | |||
|
219 | return orig(ui, dest=dest, **opts) | |||
|
220 | ||||
|
221 | ||||
|
222 | def reposetup(ui, repo): | |||
|
223 | if isinstance(repo.store, gitstore): | |||
|
224 | orig = repo.__class__ | |||
|
225 | repo.store._progress_factory = repo.ui.makeprogress | |||
|
226 | ||||
|
227 | class gitlocalrepo(orig): | |||
|
228 | def _makedirstate(self): | |||
|
229 | # TODO narrow support here | |||
|
230 | return dirstate.gitdirstate( | |||
|
231 | self.ui, self.vfs.base, self.store.git | |||
|
232 | ) | |||
|
233 | ||||
|
234 | def commit(self, *args, **kwargs): | |||
|
235 | ret = orig.commit(self, *args, **kwargs) | |||
|
236 | tid = self.store.git[gitutil.togitnode(ret)].tree.id | |||
|
237 | # DANGER! This will flush any writes staged to the | |||
|
238 | # index in Git, but we're sidestepping the index in a | |||
|
239 | # way that confuses git when we commit. Alas. | |||
|
240 | self.store.git.index.read_tree(tid) | |||
|
241 | self.store.git.index.write() | |||
|
242 | return ret | |||
|
243 | ||||
|
244 | @property | |||
|
245 | def _bookmarks(self): | |||
|
246 | return gitbmstore(self.store.git) | |||
|
247 | ||||
|
248 | repo.__class__ = gitlocalrepo | |||
|
249 | return repo | |||
|
250 | ||||
|
251 | ||||
|
252 | def extsetup(ui): | |||
|
253 | extensions.wrapfunction(localrepo, b'makestore', _makestore) | |||
|
254 | extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage) | |||
|
255 | # Inject --git flag for `hg init` | |||
|
256 | entry = extensions.wrapcommand(commands.table, b'init', init) | |||
|
257 | entry[1].extend( | |||
|
258 | [(b'', b'git', None, b'setup up a git repository instead of hg')] | |||
|
259 | ) |
@@ -0,0 +1,295 b'' | |||||
|
1 | from __future__ import absolute_import | |||
|
2 | ||||
|
3 | import contextlib | |||
|
4 | import errno | |||
|
5 | import os | |||
|
6 | ||||
|
7 | import pygit2 | |||
|
8 | ||||
|
9 | from mercurial import ( | |||
|
10 | error, | |||
|
11 | extensions, | |||
|
12 | match as matchmod, | |||
|
13 | node as nodemod, | |||
|
14 | pycompat, | |||
|
15 | scmutil, | |||
|
16 | util, | |||
|
17 | ) | |||
|
18 | from mercurial.interfaces import ( | |||
|
19 | dirstate as intdirstate, | |||
|
20 | util as interfaceutil, | |||
|
21 | ) | |||
|
22 | ||||
|
23 | from . import gitutil | |||
|
24 | ||||
|
25 | ||||
|
26 | def readpatternfile(orig, filepath, warn, sourceinfo=False): | |||
|
27 | if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')): | |||
|
28 | return orig(filepath, warn, sourceinfo=False) | |||
|
29 | result = [] | |||
|
30 | warnings = [] | |||
|
31 | with open(filepath, b'rb') as fp: | |||
|
32 | for l in fp: | |||
|
33 | l = l.strip() | |||
|
34 | if not l or l.startswith(b'#'): | |||
|
35 | continue | |||
|
36 | if l.startswith(b'!'): | |||
|
37 | warnings.append(b'unsupported ignore pattern %s' % l) | |||
|
38 | continue | |||
|
39 | if l.startswith(b'/'): | |||
|
40 | result.append(b'rootglob:' + l[1:]) | |||
|
41 | else: | |||
|
42 | result.append(b'relglob:' + l) | |||
|
43 | return result, warnings | |||
|
44 | ||||
|
45 | ||||
|
46 | extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile) | |||
|
47 | ||||
|
48 | ||||
|
49 | _STATUS_MAP = { | |||
|
50 | pygit2.GIT_STATUS_CONFLICTED: b'm', | |||
|
51 | pygit2.GIT_STATUS_CURRENT: b'n', | |||
|
52 | pygit2.GIT_STATUS_IGNORED: b'?', | |||
|
53 | pygit2.GIT_STATUS_INDEX_DELETED: b'r', | |||
|
54 | pygit2.GIT_STATUS_INDEX_MODIFIED: b'n', | |||
|
55 | pygit2.GIT_STATUS_INDEX_NEW: b'a', | |||
|
56 | pygit2.GIT_STATUS_INDEX_RENAMED: b'a', | |||
|
57 | pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n', | |||
|
58 | pygit2.GIT_STATUS_WT_DELETED: b'r', | |||
|
59 | pygit2.GIT_STATUS_WT_MODIFIED: b'n', | |||
|
60 | pygit2.GIT_STATUS_WT_NEW: b'?', | |||
|
61 | pygit2.GIT_STATUS_WT_RENAMED: b'a', | |||
|
62 | pygit2.GIT_STATUS_WT_TYPECHANGE: b'n', | |||
|
63 | pygit2.GIT_STATUS_WT_UNREADABLE: b'?', | |||
|
64 | pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: 'm', | |||
|
65 | } | |||
|
66 | ||||
|
67 | ||||
|
68 | @interfaceutil.implementer(intdirstate.idirstate) | |||
|
69 | class gitdirstate(object): | |||
|
70 | def __init__(self, ui, root, gitrepo): | |||
|
71 | self._ui = ui | |||
|
72 | self._root = os.path.dirname(root) | |||
|
73 | self.git = gitrepo | |||
|
74 | self._plchangecallbacks = {} | |||
|
75 | ||||
|
76 | def p1(self): | |||
|
77 | return self.git.head.peel().id.raw | |||
|
78 | ||||
|
79 | def p2(self): | |||
|
80 | # TODO: MERGE_HEAD? something like that, right? | |||
|
81 | return nodemod.nullid | |||
|
82 | ||||
|
83 | def setparents(self, p1, p2=nodemod.nullid): | |||
|
84 | assert p2 == nodemod.nullid, b'TODO merging support' | |||
|
85 | self.git.head.set_target(gitutil.togitnode(p1)) | |||
|
86 | ||||
|
87 | @util.propertycache | |||
|
88 | def identity(self): | |||
|
89 | return util.filestat.frompath( | |||
|
90 | os.path.join(self._root, b'.git', b'index') | |||
|
91 | ) | |||
|
92 | ||||
|
93 | def branch(self): | |||
|
94 | return b'default' | |||
|
95 | ||||
|
96 | def parents(self): | |||
|
97 | # TODO how on earth do we find p2 if a merge is in flight? | |||
|
98 | return self.p1(), nodemod.nullid | |||
|
99 | ||||
|
100 | def __iter__(self): | |||
|
101 | return (pycompat.fsencode(f.path) for f in self.git.index) | |||
|
102 | ||||
|
103 | def items(self): | |||
|
104 | for ie in self.git.index: | |||
|
105 | yield ie.path, None # value should be a dirstatetuple | |||
|
106 | ||||
|
107 | # py2,3 compat forward | |||
|
108 | iteritems = items | |||
|
109 | ||||
|
110 | def __getitem__(self, filename): | |||
|
111 | try: | |||
|
112 | gs = self.git.status_file(filename) | |||
|
113 | except KeyError: | |||
|
114 | return b'?' | |||
|
115 | return _STATUS_MAP[gs] | |||
|
116 | ||||
|
117 | def __contains__(self, filename): | |||
|
118 | try: | |||
|
119 | gs = self.git.status_file(filename) | |||
|
120 | return _STATUS_MAP[gs] != b'?' | |||
|
121 | except KeyError: | |||
|
122 | return False | |||
|
123 | ||||
|
124 | def status(self, match, subrepos, ignored, clean, unknown): | |||
|
125 | # TODO handling of clean files - can we get that from git.status()? | |||
|
126 | modified, added, removed, deleted, unknown, ignored, clean = ( | |||
|
127 | [], | |||
|
128 | [], | |||
|
129 | [], | |||
|
130 | [], | |||
|
131 | [], | |||
|
132 | [], | |||
|
133 | [], | |||
|
134 | ) | |||
|
135 | gstatus = self.git.status() | |||
|
136 | for path, status in gstatus.items(): | |||
|
137 | path = pycompat.fsencode(path) | |||
|
138 | if status == pygit2.GIT_STATUS_IGNORED: | |||
|
139 | if path.endswith(b'/'): | |||
|
140 | continue | |||
|
141 | ignored.append(path) | |||
|
142 | elif status in ( | |||
|
143 | pygit2.GIT_STATUS_WT_MODIFIED, | |||
|
144 | pygit2.GIT_STATUS_INDEX_MODIFIED, | |||
|
145 | pygit2.GIT_STATUS_WT_MODIFIED | |||
|
146 | | pygit2.GIT_STATUS_INDEX_MODIFIED, | |||
|
147 | ): | |||
|
148 | modified.append(path) | |||
|
149 | elif status == pygit2.GIT_STATUS_INDEX_NEW: | |||
|
150 | added.append(path) | |||
|
151 | elif status == pygit2.GIT_STATUS_WT_NEW: | |||
|
152 | unknown.append(path) | |||
|
153 | elif status == pygit2.GIT_STATUS_WT_DELETED: | |||
|
154 | deleted.append(path) | |||
|
155 | elif status == pygit2.GIT_STATUS_INDEX_DELETED: | |||
|
156 | removed.append(path) | |||
|
157 | else: | |||
|
158 | raise error.Abort( | |||
|
159 | b'unhandled case: status for %r is %r' % (path, status) | |||
|
160 | ) | |||
|
161 | ||||
|
162 | # TODO are we really always sure of status here? | |||
|
163 | return ( | |||
|
164 | False, | |||
|
165 | scmutil.status( | |||
|
166 | modified, added, removed, deleted, unknown, ignored, clean | |||
|
167 | ), | |||
|
168 | ) | |||
|
169 | ||||
|
170 | def flagfunc(self, buildfallback): | |||
|
171 | # TODO we can do better | |||
|
172 | return buildfallback() | |||
|
173 | ||||
|
174 | def getcwd(self): | |||
|
175 | # TODO is this a good way to do this? | |||
|
176 | return os.path.dirname( | |||
|
177 | os.path.dirname(pycompat.fsencode(self.git.path)) | |||
|
178 | ) | |||
|
179 | ||||
|
180 | def normalize(self, path): | |||
|
181 | normed = util.normcase(path) | |||
|
182 | assert normed == path, b"TODO handling of case folding: %s != %s" % ( | |||
|
183 | normed, | |||
|
184 | path, | |||
|
185 | ) | |||
|
186 | return path | |||
|
187 | ||||
|
188 | @property | |||
|
189 | def _checklink(self): | |||
|
190 | return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path))) | |||
|
191 | ||||
|
192 | def copies(self): | |||
|
193 | # TODO support copies? | |||
|
194 | return {} | |||
|
195 | ||||
|
196 | # # TODO what the heck is this | |||
|
197 | _filecache = set() | |||
|
198 | ||||
|
199 | def pendingparentchange(self): | |||
|
200 | # TODO: we need to implement the context manager bits and | |||
|
201 | # correctly stage/revert index edits. | |||
|
202 | return False | |||
|
203 | ||||
|
204 | def write(self, tr): | |||
|
205 | # TODO: call parent change callbacks | |||
|
206 | ||||
|
207 | if tr: | |||
|
208 | ||||
|
209 | def writeinner(category): | |||
|
210 | self.git.index.write() | |||
|
211 | ||||
|
212 | tr.addpending(b'gitdirstate', writeinner) | |||
|
213 | else: | |||
|
214 | self.git.index.write() | |||
|
215 | ||||
|
216 | def pathto(self, f, cwd=None): | |||
|
217 | if cwd is None: | |||
|
218 | cwd = self.getcwd() | |||
|
219 | # TODO core dirstate does something about slashes here | |||
|
220 | assert isinstance(f, bytes) | |||
|
221 | r = util.pathto(self._root, cwd, f) | |||
|
222 | return r | |||
|
223 | ||||
|
224 | def matches(self, match): | |||
|
225 | for x in self.git.index: | |||
|
226 | p = pycompat.fsencode(x.path) | |||
|
227 | if match(p): | |||
|
228 | yield p | |||
|
229 | ||||
|
230 | def normal(self, f, parentfiledata=None): | |||
|
231 | """Mark a file normal and clean.""" | |||
|
232 | # TODO: for now we just let libgit2 re-stat the file. We can | |||
|
233 | # clearly do better. | |||
|
234 | ||||
|
235 | def normallookup(self, f): | |||
|
236 | """Mark a file normal, but possibly dirty.""" | |||
|
237 | # TODO: for now we just let libgit2 re-stat the file. We can | |||
|
238 | # clearly do better. | |||
|
239 | ||||
|
240 | def walk(self, match, subrepos, unknown, ignored, full=True): | |||
|
241 | # TODO: we need to use .status() and not iterate the index, | |||
|
242 | # because the index doesn't force a re-walk and so `hg add` of | |||
|
243 | # a new file without an intervening call to status will | |||
|
244 | # silently do nothing. | |||
|
245 | r = {} | |||
|
246 | cwd = self.getcwd() | |||
|
247 | for path, status in self.git.status().items(): | |||
|
248 | if path.startswith('.hg/'): | |||
|
249 | continue | |||
|
250 | path = pycompat.fsencode(path) | |||
|
251 | if not match(path): | |||
|
252 | continue | |||
|
253 | # TODO construct the stat info from the status object? | |||
|
254 | try: | |||
|
255 | s = os.stat(os.path.join(cwd, path)) | |||
|
256 | except OSError as e: | |||
|
257 | if e.errno != errno.ENOENT: | |||
|
258 | raise | |||
|
259 | continue | |||
|
260 | r[path] = s | |||
|
261 | return r | |||
|
262 | ||||
|
263 | def savebackup(self, tr, backupname): | |||
|
264 | # TODO: figure out a strategy for saving index backups. | |||
|
265 | pass | |||
|
266 | ||||
|
267 | def restorebackup(self, tr, backupname): | |||
|
268 | # TODO: figure out a strategy for saving index backups. | |||
|
269 | pass | |||
|
270 | ||||
|
271 | def add(self, f): | |||
|
272 | self.git.index.add(pycompat.fsdecode(f)) | |||
|
273 | ||||
|
274 | def drop(self, f): | |||
|
275 | self.git.index.remove(pycompat.fsdecode(f)) | |||
|
276 | ||||
|
277 | def remove(self, f): | |||
|
278 | self.git.index.remove(pycompat.fsdecode(f)) | |||
|
279 | ||||
|
280 | def copied(self, path): | |||
|
281 | # TODO: track copies? | |||
|
282 | return None | |||
|
283 | ||||
|
284 | @contextlib.contextmanager | |||
|
285 | def parentchange(self): | |||
|
286 | # TODO: track this maybe? | |||
|
287 | yield | |||
|
288 | ||||
|
289 | def addparentchangecallback(self, category, callback): | |||
|
290 | # TODO: should this be added to the dirstate interface? | |||
|
291 | self._plchangecallbacks[category] = callback | |||
|
292 | ||||
|
293 | def clearbackup(self, tr, backupname): | |||
|
294 | # TODO | |||
|
295 | pass |
@@ -0,0 +1,463 b'' | |||||
|
1 | from __future__ import absolute_import | |||
|
2 | ||||
|
3 | import pygit2 | |||
|
4 | ||||
|
5 | from mercurial.i18n import _ | |||
|
6 | ||||
|
7 | from mercurial import ( | |||
|
8 | ancestor, | |||
|
9 | changelog as hgchangelog, | |||
|
10 | dagop, | |||
|
11 | encoding, | |||
|
12 | error, | |||
|
13 | manifest, | |||
|
14 | node as nodemod, | |||
|
15 | pycompat, | |||
|
16 | ) | |||
|
17 | from mercurial.interfaces import ( | |||
|
18 | repository, | |||
|
19 | util as interfaceutil, | |||
|
20 | ) | |||
|
21 | from mercurial.utils import stringutil | |||
|
22 | from . import ( | |||
|
23 | gitutil, | |||
|
24 | index, | |||
|
25 | manifest as gitmanifest, | |||
|
26 | ) | |||
|
27 | ||||
|
28 | ||||
|
29 | class baselog(object): # revlog.revlog): | |||
|
30 | """Common implementations between changelog and manifestlog.""" | |||
|
31 | ||||
|
32 | def __init__(self, gr, db): | |||
|
33 | self.gitrepo = gr | |||
|
34 | self._db = db | |||
|
35 | ||||
|
36 | def __len__(self): | |||
|
37 | return int( | |||
|
38 | self._db.execute('SELECT COUNT(*) FROM changelog').fetchone()[0] | |||
|
39 | ) | |||
|
40 | ||||
|
41 | def rev(self, n): | |||
|
42 | if n == nodemod.nullid: | |||
|
43 | return -1 | |||
|
44 | t = self._db.execute( | |||
|
45 | 'SELECT rev FROM changelog WHERE node = ?', (gitutil.togitnode(n),) | |||
|
46 | ).fetchone() | |||
|
47 | if t is None: | |||
|
48 | raise error.LookupError(n, b'00changelog.i', _(b'no node %d')) | |||
|
49 | return t[0] | |||
|
50 | ||||
|
51 | def node(self, r): | |||
|
52 | if r == nodemod.nullrev: | |||
|
53 | return nodemod.nullid | |||
|
54 | t = self._db.execute( | |||
|
55 | 'SELECT node FROM changelog WHERE rev = ?', (r,) | |||
|
56 | ).fetchone() | |||
|
57 | if t is None: | |||
|
58 | raise error.LookupError(r, b'00changelog.i', _(b'no node')) | |||
|
59 | return nodemod.bin(t[0]) | |||
|
60 | ||||
|
61 | def hasnode(self, n): | |||
|
62 | t = self._db.execute( | |||
|
63 | 'SELECT node FROM changelog WHERE node = ?', (n,) | |||
|
64 | ).fetchone() | |||
|
65 | return t is not None | |||
|
66 | ||||
|
67 | ||||
|
68 | class baselogindex(object): | |||
|
69 | def __init__(self, log): | |||
|
70 | self._log = log | |||
|
71 | ||||
|
72 | def has_node(self, n): | |||
|
73 | return self._log.rev(n) != -1 | |||
|
74 | ||||
|
75 | def __len__(self): | |||
|
76 | return len(self._log) | |||
|
77 | ||||
|
78 | def __getitem__(self, idx): | |||
|
79 | p1rev, p2rev = self._log.parentrevs(idx) | |||
|
80 | # TODO: it's messy that the index leaks so far out of the | |||
|
81 | # storage layer that we have to implement things like reading | |||
|
82 | # this raw tuple, which exposes revlog internals. | |||
|
83 | return ( | |||
|
84 | # Pretend offset is just the index, since we don't really care. | |||
|
85 | idx, | |||
|
86 | # Same with lengths | |||
|
87 | idx, # length | |||
|
88 | idx, # rawsize | |||
|
89 | -1, # delta base | |||
|
90 | idx, # linkrev TODO is this right? | |||
|
91 | p1rev, | |||
|
92 | p2rev, | |||
|
93 | self._log.node(idx), | |||
|
94 | ) | |||
|
95 | ||||
|
96 | ||||
|
97 | # TODO: an interface for the changelog type? | |||
|
98 | class changelog(baselog): | |||
|
99 | def __contains__(self, rev): | |||
|
100 | try: | |||
|
101 | self.node(rev) | |||
|
102 | return True | |||
|
103 | except error.LookupError: | |||
|
104 | return False | |||
|
105 | ||||
|
106 | @property | |||
|
107 | def filteredrevs(self): | |||
|
108 | # TODO: we should probably add a refs/hg/ namespace for hidden | |||
|
109 | # heads etc, but that's an idea for later. | |||
|
110 | return set() | |||
|
111 | ||||
|
112 | @property | |||
|
113 | def index(self): | |||
|
114 | return baselogindex(self) | |||
|
115 | ||||
|
116 | @property | |||
|
117 | def nodemap(self): | |||
|
118 | r = { | |||
|
119 | nodemod.bin(v[0]): v[1] | |||
|
120 | for v in self._db.execute('SELECT node, rev FROM changelog') | |||
|
121 | } | |||
|
122 | r[nodemod.nullid] = nodemod.nullrev | |||
|
123 | return r | |||
|
124 | ||||
|
125 | def tip(self): | |||
|
126 | t = self._db.execute( | |||
|
127 | 'SELECT node FROM changelog ORDER BY rev DESC LIMIT 1' | |||
|
128 | ).fetchone() | |||
|
129 | if t: | |||
|
130 | return nodemod.bin(t[0]) | |||
|
131 | return nodemod.nullid | |||
|
132 | ||||
|
133 | def revs(self, start=0, stop=None): | |||
|
134 | if stop is None: | |||
|
135 | stop = self.tip() | |||
|
136 | t = self._db.execute( | |||
|
137 | 'SELECT rev FROM changelog ' | |||
|
138 | 'WHERE rev >= ? AND rev <= ? ' | |||
|
139 | 'ORDER BY REV ASC', | |||
|
140 | (start, stop), | |||
|
141 | ) | |||
|
142 | return (int(r[0]) for r in t) | |||
|
143 | ||||
|
144 | def _partialmatch(self, id): | |||
|
145 | if nodemod.wdirhex.startswith(id): | |||
|
146 | raise error.WdirUnsupported | |||
|
147 | candidates = [ | |||
|
148 | nodemod.bin(x[0]) | |||
|
149 | for x in self._db.execute( | |||
|
150 | 'SELECT node FROM changelog WHERE node LIKE ?', (id + b'%',) | |||
|
151 | ) | |||
|
152 | ] | |||
|
153 | if nodemod.nullhex.startswith(id): | |||
|
154 | candidates.append(nodemod.nullid) | |||
|
155 | if len(candidates) > 1: | |||
|
156 | raise error.AmbiguousPrefixLookupError( | |||
|
157 | id, b'00changelog.i', _(b'ambiguous identifier') | |||
|
158 | ) | |||
|
159 | if candidates: | |||
|
160 | return candidates[0] | |||
|
161 | return None | |||
|
162 | ||||
|
163 | def flags(self, rev): | |||
|
164 | return 0 | |||
|
165 | ||||
|
166 | def shortest(self, node, minlength=1): | |||
|
167 | nodehex = nodemod.hex(node) | |||
|
168 | for attempt in pycompat.xrange(minlength, len(nodehex) + 1): | |||
|
169 | candidate = nodehex[:attempt] | |||
|
170 | matches = int( | |||
|
171 | self._db.execute( | |||
|
172 | 'SELECT COUNT(*) FROM changelog WHERE node LIKE ?', | |||
|
173 | (pycompat.sysstr(nodehex + b'%'),), | |||
|
174 | ).fetchone()[0] | |||
|
175 | ) | |||
|
176 | if matches == 1: | |||
|
177 | return candidate | |||
|
178 | return nodehex | |||
|
179 | ||||
|
180 | def headrevs(self, revs=None): | |||
|
181 | realheads = [ | |||
|
182 | int(x[0]) | |||
|
183 | for x in self._db.execute( | |||
|
184 | 'SELECT rev FROM changelog ' | |||
|
185 | 'INNER JOIN heads ON changelog.node = heads.node' | |||
|
186 | ) | |||
|
187 | ] | |||
|
188 | if revs: | |||
|
189 | return sorted([r for r in revs if r in realheads]) | |||
|
190 | return sorted(realheads) | |||
|
191 | ||||
|
192 | def changelogrevision(self, nodeorrev): | |||
|
193 | # Ensure we have a node id | |||
|
194 | if isinstance(nodeorrev, int): | |||
|
195 | n = self.node(nodeorrev) | |||
|
196 | else: | |||
|
197 | n = nodeorrev | |||
|
198 | # handle looking up nullid | |||
|
199 | if n == nodemod.nullid: | |||
|
200 | return hgchangelog._changelogrevision(extra={}) | |||
|
201 | hn = gitutil.togitnode(n) | |||
|
202 | # We've got a real commit! | |||
|
203 | files = [ | |||
|
204 | r[0] | |||
|
205 | for r in self._db.execute( | |||
|
206 | 'SELECT filename FROM changedfiles ' | |||
|
207 | 'WHERE node = ? and filenode != ?', | |||
|
208 | (hn, gitutil.nullgit), | |||
|
209 | ) | |||
|
210 | ] | |||
|
211 | filesremoved = [ | |||
|
212 | r[0] | |||
|
213 | for r in self._db.execute( | |||
|
214 | 'SELECT filename FROM changedfiles ' | |||
|
215 | 'WHERE node = ? and filenode = ?', | |||
|
216 | (hn, nodemod.nullhex), | |||
|
217 | ) | |||
|
218 | ] | |||
|
219 | c = self.gitrepo[hn] | |||
|
220 | return hgchangelog._changelogrevision( | |||
|
221 | manifest=n, # pretend manifest the same as the commit node | |||
|
222 | user=b'%s <%s>' | |||
|
223 | % (c.author.name.encode('utf8'), c.author.email.encode('utf8')), | |||
|
224 | date=(c.author.time, -c.author.offset * 60), | |||
|
225 | files=files, | |||
|
226 | # TODO filesadded in the index | |||
|
227 | filesremoved=filesremoved, | |||
|
228 | description=c.message.encode('utf8'), | |||
|
229 | # TODO do we want to handle extra? how? | |||
|
230 | extra={b'branch': b'default'}, | |||
|
231 | ) | |||
|
232 | ||||
|
233 | def ancestors(self, revs, stoprev=0, inclusive=False): | |||
|
234 | revs = list(revs) | |||
|
235 | tip = self.rev(self.tip()) | |||
|
236 | for r in revs: | |||
|
237 | if r > tip: | |||
|
238 | raise IndexError(b'Invalid rev %r' % r) | |||
|
239 | return ancestor.lazyancestors( | |||
|
240 | self.parentrevs, revs, stoprev=stoprev, inclusive=inclusive | |||
|
241 | ) | |||
|
242 | ||||
|
243 | # Cleanup opportunity: this is *identical* to the revlog.py version | |||
|
244 | def descendants(self, revs): | |||
|
245 | return dagop.descendantrevs(revs, self.revs, self.parentrevs) | |||
|
246 | ||||
|
247 | def reachableroots(self, minroot, heads, roots, includepath=False): | |||
|
248 | return dagop._reachablerootspure( | |||
|
249 | self.parentrevs, minroot, roots, heads, includepath | |||
|
250 | ) | |||
|
251 | ||||
|
252 | # Cleanup opportunity: this is *identical* to the revlog.py version | |||
|
253 | def isancestor(self, a, b): | |||
|
254 | a, b = self.rev(a), self.rev(b) | |||
|
255 | return self.isancestorrev(a, b) | |||
|
256 | ||||
|
257 | # Cleanup opportunity: this is *identical* to the revlog.py version | |||
|
258 | def isancestorrev(self, a, b): | |||
|
259 | if a == nodemod.nullrev: | |||
|
260 | return True | |||
|
261 | elif a == b: | |||
|
262 | return True | |||
|
263 | elif a > b: | |||
|
264 | return False | |||
|
265 | return bool(self.reachableroots(a, [b], [a], includepath=False)) | |||
|
266 | ||||
|
267 | def parentrevs(self, rev): | |||
|
268 | n = self.node(rev) | |||
|
269 | hn = gitutil.togitnode(n) | |||
|
270 | c = self.gitrepo[hn] | |||
|
271 | p1 = p2 = nodemod.nullrev | |||
|
272 | if c.parents: | |||
|
273 | p1 = self.rev(c.parents[0].id.raw) | |||
|
274 | if len(c.parents) > 2: | |||
|
275 | raise error.Abort(b'TODO octopus merge handling') | |||
|
276 | if len(c.parents) == 2: | |||
|
277 | p2 = self.rev(c.parents[0].id.raw) | |||
|
278 | return p1, p2 | |||
|
279 | ||||
|
280 | # Private method is used at least by the tags code. | |||
|
281 | _uncheckedparentrevs = parentrevs | |||
|
282 | ||||
|
283 | def commonancestorsheads(self, a, b): | |||
|
284 | # TODO the revlog verson of this has a C path, so we probably | |||
|
285 | # need to optimize this... | |||
|
286 | a, b = self.rev(a), self.rev(b) | |||
|
287 | return [ | |||
|
288 | self.node(n) | |||
|
289 | for n in ancestor.commonancestorsheads(self.parentrevs, a, b) | |||
|
290 | ] | |||
|
291 | ||||
|
292 | def branchinfo(self, rev): | |||
|
293 | """Git doesn't do named branches, so just put everything on default.""" | |||
|
294 | return b'default', False | |||
|
295 | ||||
|
296 | def delayupdate(self, tr): | |||
|
297 | # TODO: I think we can elide this because we're just dropping | |||
|
298 | # an object in the git repo? | |||
|
299 | pass | |||
|
300 | ||||
|
301 | def add( | |||
|
302 | self, | |||
|
303 | manifest, | |||
|
304 | files, | |||
|
305 | desc, | |||
|
306 | transaction, | |||
|
307 | p1, | |||
|
308 | p2, | |||
|
309 | user, | |||
|
310 | date=None, | |||
|
311 | extra=None, | |||
|
312 | p1copies=None, | |||
|
313 | p2copies=None, | |||
|
314 | filesadded=None, | |||
|
315 | filesremoved=None, | |||
|
316 | ): | |||
|
317 | parents = [] | |||
|
318 | hp1, hp2 = gitutil.togitnode(p1), gitutil.togitnode(p2) | |||
|
319 | if p1 != nodemod.nullid: | |||
|
320 | parents.append(hp1) | |||
|
321 | if p2 and p2 != nodemod.nullid: | |||
|
322 | parents.append(hp2) | |||
|
323 | assert date is not None | |||
|
324 | timestamp, tz = date | |||
|
325 | sig = pygit2.Signature( | |||
|
326 | encoding.unifromlocal(stringutil.person(user)), | |||
|
327 | encoding.unifromlocal(stringutil.email(user)), | |||
|
328 | timestamp, | |||
|
329 | -(tz // 60), | |||
|
330 | ) | |||
|
331 | oid = self.gitrepo.create_commit( | |||
|
332 | None, sig, sig, desc, gitutil.togitnode(manifest), parents | |||
|
333 | ) | |||
|
334 | # Set up an internal reference to force the commit into the | |||
|
335 | # changelog. Hypothetically, we could even use this refs/hg/ | |||
|
336 | # namespace to allow for anonymous heads on git repos, which | |||
|
337 | # would be neat. | |||
|
338 | self.gitrepo.references.create( | |||
|
339 | 'refs/hg/internal/latest-commit', oid, force=True | |||
|
340 | ) | |||
|
341 | # Reindex now to pick up changes. We omit the progress | |||
|
342 | # callback because this will be very quick. | |||
|
343 | index._index_repo(self.gitrepo, self._db) | |||
|
344 | return oid.raw | |||
|
345 | ||||
|
346 | ||||
|
347 | class manifestlog(baselog): | |||
|
348 | def __getitem__(self, node): | |||
|
349 | return self.get(b'', node) | |||
|
350 | ||||
|
351 | def get(self, relpath, node): | |||
|
352 | if node == nodemod.nullid: | |||
|
353 | # TODO: this should almost certainly be a memgittreemanifestctx | |||
|
354 | return manifest.memtreemanifestctx(self, relpath) | |||
|
355 | commit = self.gitrepo[gitutil.togitnode(node)] | |||
|
356 | t = commit.tree | |||
|
357 | if relpath: | |||
|
358 | parts = relpath.split(b'/') | |||
|
359 | for p in parts: | |||
|
360 | te = t[p] | |||
|
361 | t = self.gitrepo[te.id] | |||
|
362 | return gitmanifest.gittreemanifestctx(self.gitrepo, t) | |||
|
363 | ||||
|
364 | ||||
|
365 | @interfaceutil.implementer(repository.ifilestorage) | |||
|
366 | class filelog(baselog): | |||
|
367 | def __init__(self, gr, db, path): | |||
|
368 | super(filelog, self).__init__(gr, db) | |||
|
369 | assert isinstance(path, bytes) | |||
|
370 | self.path = path | |||
|
371 | ||||
|
372 | def read(self, node): | |||
|
373 | if node == nodemod.nullid: | |||
|
374 | return b'' | |||
|
375 | return self.gitrepo[gitutil.togitnode(node)].data | |||
|
376 | ||||
|
377 | def lookup(self, node): | |||
|
378 | if len(node) not in (20, 40): | |||
|
379 | node = int(node) | |||
|
380 | if isinstance(node, int): | |||
|
381 | assert False, b'todo revnums for nodes' | |||
|
382 | if len(node) == 40: | |||
|
383 | node = nodemod.bin(node) | |||
|
384 | hnode = gitutil.togitnode(node) | |||
|
385 | if hnode in self.gitrepo: | |||
|
386 | return node | |||
|
387 | raise error.LookupError(self.path, node, _(b'no match found')) | |||
|
388 | ||||
|
389 | def cmp(self, node, text): | |||
|
390 | """Returns True if text is different than content at `node`.""" | |||
|
391 | return self.read(node) != text | |||
|
392 | ||||
|
393 | def add(self, text, meta, transaction, link, p1=None, p2=None): | |||
|
394 | assert not meta # Should we even try to handle this? | |||
|
395 | return self.gitrepo.create_blob(text).raw | |||
|
396 | ||||
|
397 | def __iter__(self): | |||
|
398 | for clrev in self._db.execute( | |||
|
399 | ''' | |||
|
400 | SELECT rev FROM changelog | |||
|
401 | INNER JOIN changedfiles ON changelog.node = changedfiles.node | |||
|
402 | WHERE changedfiles.filename = ? AND changedfiles.filenode != ? | |||
|
403 | ''', | |||
|
404 | (pycompat.fsdecode(self.path), gitutil.nullgit), | |||
|
405 | ): | |||
|
406 | yield clrev[0] | |||
|
407 | ||||
|
408 | def linkrev(self, fr): | |||
|
409 | return fr | |||
|
410 | ||||
|
411 | def rev(self, node): | |||
|
412 | row = self._db.execute( | |||
|
413 | ''' | |||
|
414 | SELECT rev FROM changelog | |||
|
415 | INNER JOIN changedfiles ON changelog.node = changedfiles.node | |||
|
416 | WHERE changedfiles.filename = ? AND changedfiles.filenode = ?''', | |||
|
417 | (pycompat.fsdecode(self.path), gitutil.togitnode(node)), | |||
|
418 | ).fetchone() | |||
|
419 | if row is None: | |||
|
420 | raise error.LookupError(self.path, node, _(b'no such node')) | |||
|
421 | return int(row[0]) | |||
|
422 | ||||
|
423 | def node(self, rev): | |||
|
424 | maybe = self._db.execute( | |||
|
425 | '''SELECT filenode FROM changedfiles | |||
|
426 | INNER JOIN changelog ON changelog.node = changedfiles.node | |||
|
427 | WHERE changelog.rev = ? AND filename = ? | |||
|
428 | ''', | |||
|
429 | (rev, pycompat.fsdecode(self.path)), | |||
|
430 | ).fetchone() | |||
|
431 | if maybe is None: | |||
|
432 | raise IndexError('gitlog %r out of range %d' % (self.path, rev)) | |||
|
433 | return nodemod.bin(maybe[0]) | |||
|
434 | ||||
|
435 | def parents(self, node): | |||
|
436 | gn = gitutil.togitnode(node) | |||
|
437 | gp = pycompat.fsdecode(self.path) | |||
|
438 | ps = [] | |||
|
439 | for p in self._db.execute( | |||
|
440 | '''SELECT p1filenode, p2filenode FROM changedfiles | |||
|
441 | WHERE filenode = ? AND filename = ? | |||
|
442 | ''', | |||
|
443 | (gn, gp), | |||
|
444 | ).fetchone(): | |||
|
445 | if p is None: | |||
|
446 | commit = self._db.execute( | |||
|
447 | "SELECT node FROM changedfiles " | |||
|
448 | "WHERE filenode = ? AND filename = ?", | |||
|
449 | (gn, gp), | |||
|
450 | ).fetchone()[0] | |||
|
451 | # This filelog is missing some data. Build the | |||
|
452 | # filelog, then recurse (which will always find data). | |||
|
453 | if pycompat.ispy3: | |||
|
454 | commit = commit.decode('ascii') | |||
|
455 | index.fill_in_filelog(self.gitrepo, self._db, commit, gp, gn) | |||
|
456 | return self.parents(node) | |||
|
457 | else: | |||
|
458 | ps.append(nodemod.bin(p)) | |||
|
459 | return ps | |||
|
460 | ||||
|
461 | def renamed(self, node): | |||
|
462 | # TODO: renames/copies | |||
|
463 | return False |
@@ -0,0 +1,26 b'' | |||||
|
1 | """utilities to assist in working with pygit2""" | |||
|
2 | from __future__ import absolute_import | |||
|
3 | ||||
|
4 | from mercurial.node import bin, hex, nullid | |||
|
5 | ||||
|
6 | from mercurial import pycompat | |||
|
7 | ||||
|
8 | ||||
|
9 | def togitnode(n): | |||
|
10 | """Wrapper to convert a Mercurial binary node to a unicode hexlified node. | |||
|
11 | ||||
|
12 | pygit2 and sqlite both need nodes as strings, not bytes. | |||
|
13 | """ | |||
|
14 | assert len(n) == 20 | |||
|
15 | return pycompat.sysstr(hex(n)) | |||
|
16 | ||||
|
17 | ||||
|
18 | def fromgitnode(n): | |||
|
19 | """Opposite of togitnode.""" | |||
|
20 | assert len(n) == 40 | |||
|
21 | if pycompat.ispy3: | |||
|
22 | return bin(n.encode('ascii')) | |||
|
23 | return bin(n) | |||
|
24 | ||||
|
25 | ||||
|
26 | nullgit = togitnode(nullid) |
@@ -0,0 +1,346 b'' | |||||
|
1 | from __future__ import absolute_import | |||
|
2 | ||||
|
3 | import collections | |||
|
4 | import os | |||
|
5 | import sqlite3 | |||
|
6 | ||||
|
7 | import pygit2 | |||
|
8 | ||||
|
9 | from mercurial.i18n import _ | |||
|
10 | ||||
|
11 | from mercurial import ( | |||
|
12 | encoding, | |||
|
13 | error, | |||
|
14 | node as nodemod, | |||
|
15 | pycompat, | |||
|
16 | ) | |||
|
17 | ||||
|
18 | from . import gitutil | |||
|
19 | ||||
|
20 | ||||
|
21 | _CURRENT_SCHEMA_VERSION = 1 | |||
|
22 | _SCHEMA = ( | |||
|
23 | """ | |||
|
24 | CREATE TABLE refs ( | |||
|
25 | -- node and name are unique together. There may be more than one name for | |||
|
26 | -- a given node, and there may be no name at all for a given node (in the | |||
|
27 | -- case of an anonymous hg head). | |||
|
28 | node TEXT NOT NULL, | |||
|
29 | name TEXT | |||
|
30 | ); | |||
|
31 | ||||
|
32 | -- The "possible heads" of the repository, which we use to figure out | |||
|
33 | -- if we need to re-walk the changelog. | |||
|
34 | CREATE TABLE possible_heads ( | |||
|
35 | node TEXT NOT NULL | |||
|
36 | ); | |||
|
37 | ||||
|
38 | -- The topological heads of the changelog, which hg depends on. | |||
|
39 | CREATE TABLE heads ( | |||
|
40 | node TEXT NOT NULL | |||
|
41 | ); | |||
|
42 | ||||
|
43 | -- A total ordering of the changelog | |||
|
44 | CREATE TABLE changelog ( | |||
|
45 | rev INTEGER NOT NULL PRIMARY KEY, | |||
|
46 | node TEXT NOT NULL, | |||
|
47 | p1 TEXT, | |||
|
48 | p2 TEXT | |||
|
49 | ); | |||
|
50 | ||||
|
51 | CREATE UNIQUE INDEX changelog_node_idx ON changelog(node); | |||
|
52 | CREATE UNIQUE INDEX changelog_node_rev_idx ON changelog(rev, node); | |||
|
53 | ||||
|
54 | -- Changed files for each commit, which lets us dynamically build | |||
|
55 | -- filelogs. | |||
|
56 | CREATE TABLE changedfiles ( | |||
|
57 | node TEXT NOT NULL, | |||
|
58 | filename TEXT NOT NULL, | |||
|
59 | -- 40 zeroes for deletions | |||
|
60 | filenode TEXT NOT NULL, | |||
|
61 | -- to handle filelog parentage: | |||
|
62 | p1node TEXT, | |||
|
63 | p1filenode TEXT, | |||
|
64 | p2node TEXT, | |||
|
65 | p2filenode TEXT | |||
|
66 | ); | |||
|
67 | ||||
|
68 | CREATE INDEX changedfiles_nodes_idx | |||
|
69 | ON changedfiles(node); | |||
|
70 | ||||
|
71 | PRAGMA user_version=%d | |||
|
72 | """ | |||
|
73 | % _CURRENT_SCHEMA_VERSION | |||
|
74 | ) | |||
|
75 | ||||
|
76 | ||||
|
77 | def _createdb(path): | |||
|
78 | # print('open db', path) | |||
|
79 | # import traceback | |||
|
80 | # traceback.print_stack() | |||
|
81 | db = sqlite3.connect(encoding.strfromlocal(path)) | |||
|
82 | db.text_factory = bytes | |||
|
83 | ||||
|
84 | res = db.execute('PRAGMA user_version').fetchone()[0] | |||
|
85 | ||||
|
86 | # New database. | |||
|
87 | if res == 0: | |||
|
88 | for statement in _SCHEMA.split(';'): | |||
|
89 | db.execute(statement.strip()) | |||
|
90 | ||||
|
91 | db.commit() | |||
|
92 | ||||
|
93 | elif res == _CURRENT_SCHEMA_VERSION: | |||
|
94 | pass | |||
|
95 | ||||
|
96 | else: | |||
|
97 | raise error.Abort(_(b'sqlite database has unrecognized version')) | |||
|
98 | ||||
|
99 | db.execute('PRAGMA journal_mode=WAL') | |||
|
100 | ||||
|
101 | return db | |||
|
102 | ||||
|
103 | ||||
|
104 | _OUR_ORDER = ( | |||
|
105 | pygit2.GIT_SORT_TOPOLOGICAL | pygit2.GIT_SORT_TIME | pygit2.GIT_SORT_REVERSE | |||
|
106 | ) | |||
|
107 | ||||
|
108 | _DIFF_FLAGS = 1 << 21 # GIT_DIFF_FORCE_BINARY, which isn't exposed by pygit2 | |||
|
109 | ||||
|
110 | ||||
|
111 | def _find_nearest_ancestor_introducing_node( | |||
|
112 | db, gitrepo, file_path, walk_start, filenode | |||
|
113 | ): | |||
|
114 | """Find the nearest ancestor that introduces a file node. | |||
|
115 | ||||
|
116 | Args: | |||
|
117 | db: a handle to our sqlite database. | |||
|
118 | gitrepo: A pygit2.Repository instance. | |||
|
119 | file_path: the path of a file in the repo | |||
|
120 | walk_start: a pygit2.Oid that is a commit where we should start walking | |||
|
121 | for our nearest ancestor. | |||
|
122 | ||||
|
123 | Returns: | |||
|
124 | A hexlified SHA that is the commit ID of the next-nearest parent. | |||
|
125 | """ | |||
|
126 | assert isinstance(file_path, str), 'file_path must be str, got %r' % type( | |||
|
127 | file_path | |||
|
128 | ) | |||
|
129 | assert isinstance(filenode, str), 'filenode must be str, got %r' % type( | |||
|
130 | filenode | |||
|
131 | ) | |||
|
132 | parent_options = { | |||
|
133 | row[0].decode('ascii') | |||
|
134 | for row in db.execute( | |||
|
135 | 'SELECT node FROM changedfiles ' | |||
|
136 | 'WHERE filename = ? AND filenode = ?', | |||
|
137 | (file_path, filenode), | |||
|
138 | ) | |||
|
139 | } | |||
|
140 | inner_walker = gitrepo.walk(walk_start, _OUR_ORDER) | |||
|
141 | for w in inner_walker: | |||
|
142 | if w.id.hex in parent_options: | |||
|
143 | return w.id.hex | |||
|
144 | raise error.ProgrammingError( | |||
|
145 | 'Unable to find introducing commit for %s node %s from %s', | |||
|
146 | (file_path, filenode, walk_start), | |||
|
147 | ) | |||
|
148 | ||||
|
149 | ||||
|
150 | def fill_in_filelog(gitrepo, db, startcommit, path, startfilenode): | |||
|
151 | """Given a starting commit and path, fill in a filelog's parent pointers. | |||
|
152 | ||||
|
153 | Args: | |||
|
154 | gitrepo: a pygit2.Repository | |||
|
155 | db: a handle to our sqlite database | |||
|
156 | startcommit: a hexlified node id for the commit to start at | |||
|
157 | path: the path of the file whose parent pointers we should fill in. | |||
|
158 | filenode: the hexlified node id of the file at startcommit | |||
|
159 | ||||
|
160 | TODO: make filenode optional | |||
|
161 | """ | |||
|
162 | assert isinstance( | |||
|
163 | startcommit, str | |||
|
164 | ), 'startcommit must be str, got %r' % type(startcommit) | |||
|
165 | assert isinstance( | |||
|
166 | startfilenode, str | |||
|
167 | ), 'startfilenode must be str, got %r' % type(startfilenode) | |||
|
168 | visit = collections.deque([(startcommit, startfilenode)]) | |||
|
169 | while visit: | |||
|
170 | cnode, filenode = visit.popleft() | |||
|
171 | commit = gitrepo[cnode] | |||
|
172 | parents = [] | |||
|
173 | for parent in commit.parents: | |||
|
174 | t = parent.tree | |||
|
175 | for comp in path.split('/'): | |||
|
176 | try: | |||
|
177 | t = gitrepo[t[comp].id] | |||
|
178 | except KeyError: | |||
|
179 | break | |||
|
180 | else: | |||
|
181 | introducer = _find_nearest_ancestor_introducing_node( | |||
|
182 | db, gitrepo, path, parent.id, t.id.hex | |||
|
183 | ) | |||
|
184 | parents.append((introducer, t.id.hex)) | |||
|
185 | p1node = p1fnode = p2node = p2fnode = gitutil.nullgit | |||
|
186 | for par, parfnode in parents: | |||
|
187 | found = int( | |||
|
188 | db.execute( | |||
|
189 | 'SELECT COUNT(*) FROM changedfiles WHERE ' | |||
|
190 | 'node = ? AND filename = ? AND filenode = ? AND ' | |||
|
191 | 'p1node NOT NULL', | |||
|
192 | (par, path, parfnode), | |||
|
193 | ).fetchone()[0] | |||
|
194 | ) | |||
|
195 | if found == 0: | |||
|
196 | assert par is not None | |||
|
197 | visit.append((par, parfnode)) | |||
|
198 | if parents: | |||
|
199 | p1node, p1fnode = parents[0] | |||
|
200 | if len(parents) == 2: | |||
|
201 | p2node, p2fnode = parents[1] | |||
|
202 | if len(parents) > 2: | |||
|
203 | raise error.ProgrammingError( | |||
|
204 | b"git support can't handle octopus merges" | |||
|
205 | ) | |||
|
206 | db.execute( | |||
|
207 | 'UPDATE changedfiles SET ' | |||
|
208 | 'p1node = ?, p1filenode = ?, p2node = ?, p2filenode = ? ' | |||
|
209 | 'WHERE node = ? AND filename = ? AND filenode = ?', | |||
|
210 | (p1node, p1fnode, p2node, p2fnode, commit.id.hex, path, filenode), | |||
|
211 | ) | |||
|
212 | db.commit() | |||
|
213 | ||||
|
214 | ||||
|
215 | def _index_repo(gitrepo, db, progress_factory=lambda *args, **kwargs: None): | |||
|
216 | # Identify all references so we can tell the walker to visit all of them. | |||
|
217 | all_refs = gitrepo.listall_references() | |||
|
218 | possible_heads = set() | |||
|
219 | prog = progress_factory(b'refs') | |||
|
220 | for pos, ref in enumerate(all_refs): | |||
|
221 | if prog is not None: | |||
|
222 | prog.update(pos) | |||
|
223 | if not ( | |||
|
224 | ref.startswith('refs/heads/') # local branch | |||
|
225 | or ref.startswith('refs/tags/') # tag | |||
|
226 | or ref.startswith('refs/remotes/') # remote branch | |||
|
227 | or ref.startswith('refs/hg/') # from this extension | |||
|
228 | ): | |||
|
229 | continue | |||
|
230 | try: | |||
|
231 | start = gitrepo.lookup_reference(ref).peel(pygit2.GIT_OBJ_COMMIT) | |||
|
232 | except ValueError: | |||
|
233 | # No commit to be found, so we don't care for hg's purposes. | |||
|
234 | continue | |||
|
235 | possible_heads.add(start.id) | |||
|
236 | # Optimization: if the list of heads hasn't changed, don't | |||
|
237 | # reindex, the changelog. This doesn't matter on small | |||
|
238 | # repositories, but on even moderately deep histories (eg cpython) | |||
|
239 | # this is a very important performance win. | |||
|
240 | # | |||
|
241 | # TODO: we should figure out how to incrementally index history | |||
|
242 | # (preferably by detecting rewinds!) so that we don't have to do a | |||
|
243 | # full changelog walk every time a new commit is created. | |||
|
244 | cache_heads = {x[0] for x in db.execute('SELECT node FROM possible_heads')} | |||
|
245 | walker = None | |||
|
246 | cur_cache_heads = {h.hex for h in possible_heads} | |||
|
247 | if cur_cache_heads == cache_heads: | |||
|
248 | return | |||
|
249 | for start in possible_heads: | |||
|
250 | if walker is None: | |||
|
251 | walker = gitrepo.walk(start, _OUR_ORDER) | |||
|
252 | else: | |||
|
253 | walker.push(start) | |||
|
254 | ||||
|
255 | # Empty out the existing changelog. Even for large-ish histories | |||
|
256 | # we can do the top-level "walk all the commits" dance very | |||
|
257 | # quickly as long as we don't need to figure out the changed files | |||
|
258 | # list. | |||
|
259 | db.execute('DELETE FROM changelog') | |||
|
260 | if prog is not None: | |||
|
261 | prog.complete() | |||
|
262 | prog = progress_factory(b'commits') | |||
|
263 | # This walker is sure to visit all the revisions in history, but | |||
|
264 | # only once. | |||
|
265 | for pos, commit in enumerate(walker): | |||
|
266 | if prog is not None: | |||
|
267 | prog.update(pos) | |||
|
268 | p1 = p2 = nodemod.nullhex | |||
|
269 | if len(commit.parents) > 2: | |||
|
270 | raise error.ProgrammingError( | |||
|
271 | ( | |||
|
272 | b"git support can't handle octopus merges, " | |||
|
273 | b"found a commit with %d parents :(" | |||
|
274 | ) | |||
|
275 | % len(commit.parents) | |||
|
276 | ) | |||
|
277 | if commit.parents: | |||
|
278 | p1 = commit.parents[0].id.hex | |||
|
279 | if len(commit.parents) == 2: | |||
|
280 | p2 = commit.parents[1].id.hex | |||
|
281 | db.execute( | |||
|
282 | 'INSERT INTO changelog (rev, node, p1, p2) VALUES(?, ?, ?, ?)', | |||
|
283 | (pos, commit.id.hex, p1, p2), | |||
|
284 | ) | |||
|
285 | ||||
|
286 | num_changedfiles = db.execute( | |||
|
287 | "SELECT COUNT(*) from changedfiles WHERE node = ?", | |||
|
288 | (commit.id.hex,), | |||
|
289 | ).fetchone()[0] | |||
|
290 | if not num_changedfiles: | |||
|
291 | files = {} | |||
|
292 | # I *think* we only need to check p1 for changed files | |||
|
293 | # (and therefore linkrevs), because any node that would | |||
|
294 | # actually have this commit as a linkrev would be | |||
|
295 | # completely new in this rev. | |||
|
296 | p1 = commit.parents[0].id.hex if commit.parents else None | |||
|
297 | if p1 is not None: | |||
|
298 | patchgen = gitrepo.diff(p1, commit.id.hex, flags=_DIFF_FLAGS) | |||
|
299 | else: | |||
|
300 | patchgen = commit.tree.diff_to_tree( | |||
|
301 | swap=True, flags=_DIFF_FLAGS | |||
|
302 | ) | |||
|
303 | new_files = (p.delta.new_file for p in patchgen) | |||
|
304 | files = { | |||
|
305 | nf.path: nf.id.hex | |||
|
306 | for nf in new_files | |||
|
307 | if nf.id.raw != nodemod.nullid | |||
|
308 | } | |||
|
309 | for p, n in files.items(): | |||
|
310 | # We intentionally set NULLs for any file parentage | |||
|
311 | # information so it'll get demand-computed later. We | |||
|
312 | # used to do it right here, and it was _very_ slow. | |||
|
313 | db.execute( | |||
|
314 | 'INSERT INTO changedfiles (' | |||
|
315 | 'node, filename, filenode, p1node, p1filenode, p2node, ' | |||
|
316 | 'p2filenode) VALUES(?, ?, ?, ?, ?, ?, ?)', | |||
|
317 | (commit.id.hex, p, n, None, None, None, None), | |||
|
318 | ) | |||
|
319 | db.execute('DELETE FROM heads') | |||
|
320 | db.execute('DELETE FROM possible_heads') | |||
|
321 | for hid in possible_heads: | |||
|
322 | h = hid.hex | |||
|
323 | db.execute('INSERT INTO possible_heads (node) VALUES(?)', (h,)) | |||
|
324 | haschild = db.execute( | |||
|
325 | 'SELECT COUNT(*) FROM changelog WHERE p1 = ? OR p2 = ?', (h, h) | |||
|
326 | ).fetchone()[0] | |||
|
327 | if not haschild: | |||
|
328 | db.execute('INSERT INTO heads (node) VALUES(?)', (h,)) | |||
|
329 | ||||
|
330 | db.commit() | |||
|
331 | if prog is not None: | |||
|
332 | prog.complete() | |||
|
333 | ||||
|
334 | ||||
|
335 | def get_index(gitrepo, progress_factory=lambda *args, **kwargs: None): | |||
|
336 | cachepath = os.path.join( | |||
|
337 | pycompat.fsencode(gitrepo.path), b'..', b'.hg', b'cache' | |||
|
338 | ) | |||
|
339 | if not os.path.exists(cachepath): | |||
|
340 | os.makedirs(cachepath) | |||
|
341 | dbpath = os.path.join(cachepath, b'git-commits.sqlite') | |||
|
342 | db = _createdb(dbpath) | |||
|
343 | # TODO check against gitrepo heads before doing a full index | |||
|
344 | # TODO thread a ui.progress call into this layer | |||
|
345 | _index_repo(gitrepo, db, progress_factory) | |||
|
346 | return db |
@@ -0,0 +1,293 b'' | |||||
|
1 | from __future__ import absolute_import | |||
|
2 | ||||
|
3 | import pygit2 | |||
|
4 | ||||
|
5 | from mercurial import ( | |||
|
6 | match as matchmod, | |||
|
7 | pathutil, | |||
|
8 | pycompat, | |||
|
9 | util, | |||
|
10 | ) | |||
|
11 | from mercurial.interfaces import ( | |||
|
12 | repository, | |||
|
13 | util as interfaceutil, | |||
|
14 | ) | |||
|
15 | from . import gitutil | |||
|
16 | ||||
|
17 | ||||
|
18 | @interfaceutil.implementer(repository.imanifestdict) | |||
|
19 | class gittreemanifest(object): | |||
|
20 | """Expose git trees (and optionally a builder's overlay) as a manifestdict. | |||
|
21 | ||||
|
22 | Very similar to mercurial.manifest.treemanifest. | |||
|
23 | """ | |||
|
24 | ||||
|
25 | def __init__(self, git_repo, root_tree, pending_changes): | |||
|
26 | """Initializer. | |||
|
27 | ||||
|
28 | Args: | |||
|
29 | git_repo: The git_repo we're walking (required to look up child | |||
|
30 | trees). | |||
|
31 | root_tree: The root Git tree object for this manifest. | |||
|
32 | pending_changes: A dict in which pending changes will be | |||
|
33 | tracked. The enclosing memgittreemanifestctx will use this to | |||
|
34 | construct any required Tree objects in Git during it's | |||
|
35 | `write()` method. | |||
|
36 | """ | |||
|
37 | self._git_repo = git_repo | |||
|
38 | self._tree = root_tree | |||
|
39 | if pending_changes is None: | |||
|
40 | pending_changes = {} | |||
|
41 | # dict of path: Optional[Tuple(node, flags)] | |||
|
42 | self._pending_changes = pending_changes | |||
|
43 | ||||
|
44 | def _resolve_entry(self, path): | |||
|
45 | """Given a path, load its node and flags, or raise KeyError if missing. | |||
|
46 | ||||
|
47 | This takes into account any pending writes in the builder. | |||
|
48 | """ | |||
|
49 | upath = pycompat.fsdecode(path) | |||
|
50 | ent = None | |||
|
51 | if path in self._pending_changes: | |||
|
52 | val = self._pending_changes[path] | |||
|
53 | if val is None: | |||
|
54 | raise KeyError | |||
|
55 | return val | |||
|
56 | t = self._tree | |||
|
57 | comps = upath.split('/') | |||
|
58 | for comp in comps[:-1]: | |||
|
59 | te = self._tree[comp] | |||
|
60 | t = self._git_repo[te.id] | |||
|
61 | ent = t[comps[-1]] | |||
|
62 | if ent.filemode == pygit2.GIT_FILEMODE_BLOB: | |||
|
63 | flags = b'' | |||
|
64 | elif ent.filemode == pygit2.GIT_FILEMODE_BLOB_EXECUTABLE: | |||
|
65 | flags = b'x' | |||
|
66 | elif ent.filemode == pygit2.GIT_FILEMODE_LINK: | |||
|
67 | flags = b'l' | |||
|
68 | else: | |||
|
69 | raise ValueError('unsupported mode %s' % oct(ent.filemode)) | |||
|
70 | return ent.id.raw, flags | |||
|
71 | ||||
|
72 | def __getitem__(self, path): | |||
|
73 | return self._resolve_entry(path)[0] | |||
|
74 | ||||
|
75 | def find(self, path): | |||
|
76 | return self._resolve_entry(path) | |||
|
77 | ||||
|
78 | def __len__(self): | |||
|
79 | return len(list(self.walk(matchmod.always()))) | |||
|
80 | ||||
|
81 | def __nonzero__(self): | |||
|
82 | try: | |||
|
83 | next(iter(self)) | |||
|
84 | return True | |||
|
85 | except StopIteration: | |||
|
86 | return False | |||
|
87 | ||||
|
88 | __bool__ = __nonzero__ | |||
|
89 | ||||
|
90 | def __contains__(self, path): | |||
|
91 | try: | |||
|
92 | self._resolve_entry(path) | |||
|
93 | return True | |||
|
94 | except KeyError: | |||
|
95 | return False | |||
|
96 | ||||
|
97 | def iterkeys(self): | |||
|
98 | return self.walk(matchmod.always()) | |||
|
99 | ||||
|
100 | def keys(self): | |||
|
101 | return list(self.iterkeys()) | |||
|
102 | ||||
|
103 | def __iter__(self): | |||
|
104 | return self.iterkeys() | |||
|
105 | ||||
|
106 | def __setitem__(self, path, node): | |||
|
107 | self._pending_changes[path] = node, self.flags(path) | |||
|
108 | ||||
|
109 | def __delitem__(self, path): | |||
|
110 | # TODO: should probably KeyError for already-deleted files? | |||
|
111 | self._pending_changes[path] = None | |||
|
112 | ||||
|
113 | def filesnotin(self, other, match=None): | |||
|
114 | if match is not None: | |||
|
115 | match = matchmod.badmatch(match, lambda path, msg: None) | |||
|
116 | sm2 = set(other.walk(match)) | |||
|
117 | return {f for f in self.walk(match) if f not in sm2} | |||
|
118 | return {f for f in self if f not in other} | |||
|
119 | ||||
|
120 | @util.propertycache | |||
|
121 | def _dirs(self): | |||
|
122 | return pathutil.dirs(self) | |||
|
123 | ||||
|
124 | def hasdir(self, dir): | |||
|
125 | return dir in self._dirs | |||
|
126 | ||||
|
127 | def diff(self, other, match=None, clean=False): | |||
|
128 | # TODO | |||
|
129 | assert False | |||
|
130 | ||||
|
131 | def setflag(self, path, flag): | |||
|
132 | node, unused_flag = self._resolve_entry(path) | |||
|
133 | self._pending_changes[path] = node, flag | |||
|
134 | ||||
|
135 | def get(self, path, default=None): | |||
|
136 | try: | |||
|
137 | return self._resolve_entry(path)[0] | |||
|
138 | except KeyError: | |||
|
139 | return default | |||
|
140 | ||||
|
141 | def flags(self, path): | |||
|
142 | try: | |||
|
143 | return self._resolve_entry(path)[1] | |||
|
144 | except KeyError: | |||
|
145 | return b'' | |||
|
146 | ||||
|
147 | def copy(self): | |||
|
148 | pass | |||
|
149 | ||||
|
150 | def items(self): | |||
|
151 | for f in self: | |||
|
152 | # TODO: build a proper iterator version of this | |||
|
153 | yield self[f] | |||
|
154 | ||||
|
155 | def iteritems(self): | |||
|
156 | return self.items() | |||
|
157 | ||||
|
158 | def iterentries(self): | |||
|
159 | for f in self: | |||
|
160 | # TODO: build a proper iterator version of this | |||
|
161 | yield self._resolve_entry(f) | |||
|
162 | ||||
|
163 | def text(self): | |||
|
164 | assert False # TODO can this method move out of the manifest iface? | |||
|
165 | ||||
|
166 | def _walkonetree(self, tree, match, subdir): | |||
|
167 | for te in tree: | |||
|
168 | # TODO: can we prune dir walks with the matcher? | |||
|
169 | realname = subdir + pycompat.fsencode(te.name) | |||
|
170 | if te.type == r'tree': | |||
|
171 | for inner in self._walkonetree( | |||
|
172 | self._git_repo[te.id], match, realname + b'/' | |||
|
173 | ): | |||
|
174 | yield inner | |||
|
175 | if not match(realname): | |||
|
176 | continue | |||
|
177 | yield pycompat.fsencode(realname) | |||
|
178 | ||||
|
179 | def walk(self, match): | |||
|
180 | # TODO: this is a very lazy way to merge in the pending | |||
|
181 | # changes. There is absolutely room for optimization here by | |||
|
182 | # being clever about walking over the sets... | |||
|
183 | baseline = set(self._walkonetree(self._tree, match, b'')) | |||
|
184 | deleted = {p for p, v in self._pending_changes.items() if v is None} | |||
|
185 | pend = {p for p in self._pending_changes if match(p)} | |||
|
186 | return iter(sorted((baseline | pend) - deleted)) | |||
|
187 | ||||
|
188 | ||||
|
189 | @interfaceutil.implementer(repository.imanifestrevisionstored) | |||
|
190 | class gittreemanifestctx(object): | |||
|
191 | def __init__(self, repo, gittree): | |||
|
192 | self._repo = repo | |||
|
193 | self._tree = gittree | |||
|
194 | ||||
|
195 | def read(self): | |||
|
196 | return gittreemanifest(self._repo, self._tree, None) | |||
|
197 | ||||
|
198 | def copy(self): | |||
|
199 | # NB: it's important that we return a memgittreemanifestctx | |||
|
200 | # because the caller expects a mutable manifest. | |||
|
201 | return memgittreemanifestctx(self._repo, self._tree) | |||
|
202 | ||||
|
203 | def find(self, path): | |||
|
204 | self.read()[path] | |||
|
205 | ||||
|
206 | ||||
|
207 | @interfaceutil.implementer(repository.imanifestrevisionwritable) | |||
|
208 | class memgittreemanifestctx(object): | |||
|
209 | def __init__(self, repo, tree): | |||
|
210 | self._repo = repo | |||
|
211 | self._tree = tree | |||
|
212 | # dict of path: Optional[Tuple(node, flags)] | |||
|
213 | self._pending_changes = {} | |||
|
214 | ||||
|
215 | def read(self): | |||
|
216 | return gittreemanifest(self._repo, self._tree, self._pending_changes) | |||
|
217 | ||||
|
218 | def copy(self): | |||
|
219 | # TODO: if we have a builder in play, what should happen here? | |||
|
220 | # Maybe we can shuffle copy() into the immutable interface. | |||
|
221 | return memgittreemanifestctx(self._repo, self._tree) | |||
|
222 | ||||
|
223 | def write(self, transaction, link, p1, p2, added, removed, match=None): | |||
|
224 | # We're not (for now, anyway) going to audit filenames, so we | |||
|
225 | # can ignore added and removed. | |||
|
226 | ||||
|
227 | # TODO what does this match argument get used for? hopefully | |||
|
228 | # just narrow? | |||
|
229 | assert not match or isinstance(match, matchmod.alwaysmatcher) | |||
|
230 | ||||
|
231 | touched_dirs = pathutil.dirs(self._pending_changes) | |||
|
232 | trees = { | |||
|
233 | b'': self._tree, | |||
|
234 | } | |||
|
235 | # path: treebuilder | |||
|
236 | builders = { | |||
|
237 | b'': self._repo.TreeBuilder(self._tree), | |||
|
238 | } | |||
|
239 | # get a TreeBuilder for every tree in the touched_dirs set | |||
|
240 | for d in sorted(touched_dirs, key=lambda x: (len(x), x)): | |||
|
241 | if d == b'': | |||
|
242 | # loaded root tree above | |||
|
243 | continue | |||
|
244 | comps = d.split(b'/') | |||
|
245 | full = b'' | |||
|
246 | for part in comps: | |||
|
247 | parent = trees[full] | |||
|
248 | try: | |||
|
249 | new = self._repo[parent[pycompat.fsdecode(part)]] | |||
|
250 | except KeyError: | |||
|
251 | # new directory | |||
|
252 | new = None | |||
|
253 | full += b'/' + part | |||
|
254 | if new is not None: | |||
|
255 | # existing directory | |||
|
256 | trees[full] = new | |||
|
257 | builders[full] = self._repo.TreeBuilder(new) | |||
|
258 | else: | |||
|
259 | # new directory, use an empty dict to easily | |||
|
260 | # generate KeyError as any nested new dirs get | |||
|
261 | # created. | |||
|
262 | trees[full] = {} | |||
|
263 | builders[full] = self._repo.TreeBuilder() | |||
|
264 | for f, info in self._pending_changes.items(): | |||
|
265 | if b'/' not in f: | |||
|
266 | dirname = b'' | |||
|
267 | basename = f | |||
|
268 | else: | |||
|
269 | dirname, basename = f.rsplit(b'/', 1) | |||
|
270 | dirname = b'/' + dirname | |||
|
271 | if info is None: | |||
|
272 | builders[dirname].remove(pycompat.fsdecode(basename)) | |||
|
273 | else: | |||
|
274 | n, fl = info | |||
|
275 | mode = { | |||
|
276 | b'': pygit2.GIT_FILEMODE_BLOB, | |||
|
277 | b'x': pygit2.GIT_FILEMODE_BLOB_EXECUTABLE, | |||
|
278 | b'l': pygit2.GIT_FILEMODE_LINK, | |||
|
279 | }[fl] | |||
|
280 | builders[dirname].insert( | |||
|
281 | pycompat.fsdecode(basename), gitutil.togitnode(n), mode | |||
|
282 | ) | |||
|
283 | # This visits the buffered TreeBuilders in deepest-first | |||
|
284 | # order, bubbling up the edits. | |||
|
285 | for b in sorted(builders, key=len, reverse=True): | |||
|
286 | if b == b'': | |||
|
287 | break | |||
|
288 | cb = builders[b] | |||
|
289 | dn, bn = b.rsplit(b'/', 1) | |||
|
290 | builders[dn].insert( | |||
|
291 | pycompat.fsdecode(bn), cb.write(), pygit2.GIT_FILEMODE_TREE | |||
|
292 | ) | |||
|
293 | return builders[b''].write().raw |
@@ -0,0 +1,223 b'' | |||||
|
1 | This test requires pygit2: | |||
|
2 | > $PYTHON -c 'import pygit2' || exit 80 | |||
|
3 | ||||
|
4 | Setup: | |||
|
5 | > GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME | |||
|
6 | > GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL | |||
|
7 | > GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0000"; export GIT_AUTHOR_DATE | |||
|
8 | > GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME | |||
|
9 | > GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL | |||
|
10 | > GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE | |||
|
11 | ||||
|
12 | > count=10 | |||
|
13 | > gitcommit() { | |||
|
14 | > GIT_AUTHOR_DATE="2007-01-01 00:00:$count +0000"; | |||
|
15 | > GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" | |||
|
16 | > git commit "$@" >/dev/null 2>/dev/null || echo "git commit error" | |||
|
17 | > count=`expr $count + 1` | |||
|
18 | > } | |||
|
19 | ||||
|
20 | > echo "[extensions]" >> $HGRCPATH | |||
|
21 | > echo "git=" >> $HGRCPATH | |||
|
22 | ||||
|
23 | Make a new repo with git: | |||
|
24 | $ mkdir foo | |||
|
25 | $ cd foo | |||
|
26 | $ git init | |||
|
27 | Initialized empty Git repository in $TESTTMP/foo/.git/ | |||
|
28 | Ignore the .hg directory within git: | |||
|
29 | $ echo .hg >> .git/info/exclude | |||
|
30 | $ echo alpha > alpha | |||
|
31 | $ git add alpha | |||
|
32 | $ gitcommit -am 'Add alpha' | |||
|
33 | $ echo beta > beta | |||
|
34 | $ git add beta | |||
|
35 | $ gitcommit -am 'Add beta' | |||
|
36 | $ echo gamma > gamma | |||
|
37 | $ git status | |||
|
38 | On branch master | |||
|
39 | Untracked files: | |||
|
40 | (use "git add <file>..." to include in what will be committed) | |||
|
41 | gamma | |||
|
42 | ||||
|
43 | nothing added to commit but untracked files present (use "git add" to track) | |||
|
44 | ||||
|
45 | Without creating the .hg, hg status fails: | |||
|
46 | $ hg status | |||
|
47 | abort: no repository found in '$TESTTMP/foo' (.hg not found)! | |||
|
48 | [255] | |||
|
49 | But if you run hg init --git, it works: | |||
|
50 | $ hg init --git | |||
|
51 | $ hg id --traceback | |||
|
52 | 3d9be8deba43 tip master | |||
|
53 | $ hg status | |||
|
54 | ? gamma | |||
|
55 | Log works too: | |||
|
56 | $ hg log | |||
|
57 | changeset: 1:3d9be8deba43 | |||
|
58 | bookmark: master | |||
|
59 | tag: tip | |||
|
60 | user: test <test@example.org> | |||
|
61 | date: Mon Jan 01 00:00:11 2007 +0000 | |||
|
62 | summary: Add beta | |||
|
63 | ||||
|
64 | changeset: 0:c5864c9d16fb | |||
|
65 | user: test <test@example.org> | |||
|
66 | date: Mon Jan 01 00:00:10 2007 +0000 | |||
|
67 | summary: Add alpha | |||
|
68 | ||||
|
69 | ||||
|
70 | ||||
|
71 | and bookmarks: | |||
|
72 | $ hg bookmarks | |||
|
73 | * master 1:3d9be8deba43 | |||
|
74 | ||||
|
75 | diff even works transparently in both systems: | |||
|
76 | $ echo blah >> alpha | |||
|
77 | $ git diff | |||
|
78 | diff --git a/alpha b/alpha | |||
|
79 | index 4a58007..faed1b7 100644 | |||
|
80 | --- a/alpha | |||
|
81 | +++ b/alpha | |||
|
82 | @@ -1* +1,2 @@ (glob) | |||
|
83 | alpha | |||
|
84 | +blah | |||
|
85 | $ hg diff --git | |||
|
86 | diff --git a/alpha b/alpha | |||
|
87 | --- a/alpha | |||
|
88 | +++ b/alpha | |||
|
89 | @@ -1,1 +1,2 @@ | |||
|
90 | alpha | |||
|
91 | +blah | |||
|
92 | ||||
|
93 | Remove a file, it shows as such: | |||
|
94 | $ rm alpha | |||
|
95 | $ hg status | |||
|
96 | ! alpha | |||
|
97 | ? gamma | |||
|
98 | ||||
|
99 | Revert works: | |||
|
100 | $ hg revert alpha --traceback | |||
|
101 | $ hg status | |||
|
102 | ? gamma | |||
|
103 | $ git status | |||
|
104 | On branch master | |||
|
105 | Untracked files: | |||
|
106 | (use "git add <file>..." to include in what will be committed) | |||
|
107 | gamma | |||
|
108 | ||||
|
109 | nothing added to commit but untracked files present (use "git add" to track) | |||
|
110 | ||||
|
111 | Add shows sanely in both: | |||
|
112 | $ hg add gamma | |||
|
113 | $ hg status | |||
|
114 | A gamma | |||
|
115 | $ hg files | |||
|
116 | alpha | |||
|
117 | beta | |||
|
118 | gamma | |||
|
119 | $ git ls-files | |||
|
120 | alpha | |||
|
121 | beta | |||
|
122 | gamma | |||
|
123 | $ git status | |||
|
124 | On branch master | |||
|
125 | Changes to be committed: | |||
|
126 | (use "git restore --staged <file>..." to unstage) | |||
|
127 | new file: gamma | |||
|
128 | ||||
|
129 | ||||
|
130 | forget does what it should as well: | |||
|
131 | $ hg forget gamma | |||
|
132 | $ hg status | |||
|
133 | ? gamma | |||
|
134 | $ git status | |||
|
135 | On branch master | |||
|
136 | Untracked files: | |||
|
137 | (use "git add <file>..." to include in what will be committed) | |||
|
138 | gamma | |||
|
139 | ||||
|
140 | nothing added to commit but untracked files present (use "git add" to track) | |||
|
141 | ||||
|
142 | clean up untracked file | |||
|
143 | $ rm gamma | |||
|
144 | ||||
|
145 | hg log FILE | |||
|
146 | ||||
|
147 | $ echo a >> alpha | |||
|
148 | $ hg ci -m 'more alpha' --traceback --date '1583522787 18000' | |||
|
149 | $ echo b >> beta | |||
|
150 | $ hg ci -m 'more beta' | |||
|
151 | $ echo a >> alpha | |||
|
152 | $ hg ci -m 'even more alpha' | |||
|
153 | $ hg log -G alpha | |||
|
154 | @ changeset: 4:6626247b7dc8 | |||
|
155 | : bookmark: master | |||
|
156 | : tag: tip | |||
|
157 | : user: test <test> | |||
|
158 | : date: Thu Jan 01 00:00:00 1970 +0000 | |||
|
159 | : summary: even more alpha | |||
|
160 | : | |||
|
161 | o changeset: 2:a1983dd7fb19 | |||
|
162 | : user: test <test> | |||
|
163 | : date: Fri Mar 06 14:26:27 2020 -0500 | |||
|
164 | : summary: more alpha | |||
|
165 | : | |||
|
166 | o changeset: 0:c5864c9d16fb | |||
|
167 | user: test <test@example.org> | |||
|
168 | date: Mon Jan 01 00:00:10 2007 +0000 | |||
|
169 | summary: Add alpha | |||
|
170 | ||||
|
171 | $ hg log -G beta | |||
|
172 | o changeset: 3:d8ee22687733 | |||
|
173 | : user: test <test> | |||
|
174 | : date: Thu Jan 01 00:00:00 1970 +0000 | |||
|
175 | : summary: more beta | |||
|
176 | : | |||
|
177 | o changeset: 1:3d9be8deba43 | |||
|
178 | | user: test <test@example.org> | |||
|
179 | ~ date: Mon Jan 01 00:00:11 2007 +0000 | |||
|
180 | summary: Add beta | |||
|
181 | ||||
|
182 | ||||
|
183 | node|shortest works correctly | |||
|
184 | $ hg log -r tip --template "{node|shortest}\n" | |||
|
185 | 6626 | |||
|
186 | ||||
|
187 | hg annotate | |||
|
188 | ||||
|
189 | $ hg annotate alpha | |||
|
190 | 0: alpha | |||
|
191 | 2: a | |||
|
192 | 4: a | |||
|
193 | $ hg annotate beta | |||
|
194 | 1: beta | |||
|
195 | 3: b | |||
|
196 | ||||
|
197 | ||||
|
198 | Files in subdirectories. TODO: case-folding support, make this `A` | |||
|
199 | instead of `a`. | |||
|
200 | ||||
|
201 | $ mkdir a | |||
|
202 | $ echo "This is file mu." > a/mu | |||
|
203 | $ hg ci -A -m 'Introduce file a/mu' | |||
|
204 | adding a/mu | |||
|
205 | ||||
|
206 | Both hg and git agree a/mu is part of the repo | |||
|
207 | ||||
|
208 | $ git ls-files | |||
|
209 | a/mu | |||
|
210 | alpha | |||
|
211 | beta | |||
|
212 | $ hg files | |||
|
213 | a/mu | |||
|
214 | alpha | |||
|
215 | beta | |||
|
216 | ||||
|
217 | hg and git status both clean | |||
|
218 | ||||
|
219 | $ git status | |||
|
220 | On branch master | |||
|
221 | nothing to commit, working tree clean | |||
|
222 | $ hg status | |||
|
223 |
@@ -1,1739 +1,1740 b'' | |||||
1 | # |
|
1 | # | |
2 | # This is the mercurial setup script. |
|
2 | # This is the mercurial setup script. | |
3 | # |
|
3 | # | |
4 | # 'python setup.py install', or |
|
4 | # 'python setup.py install', or | |
5 | # 'python setup.py --help' for more options |
|
5 | # 'python setup.py --help' for more options | |
6 |
|
6 | |||
7 | import os |
|
7 | import os | |
8 |
|
8 | |||
9 | # Mercurial will never work on Python 3 before 3.5 due to a lack |
|
9 | # Mercurial will never work on Python 3 before 3.5 due to a lack | |
10 | # of % formatting on bytestrings, and can't work on 3.6.0 or 3.6.1 |
|
10 | # of % formatting on bytestrings, and can't work on 3.6.0 or 3.6.1 | |
11 | # due to a bug in % formatting in bytestrings. |
|
11 | # due to a bug in % formatting in bytestrings. | |
12 | # We cannot support Python 3.5.0, 3.5.1, 3.5.2 because of bug in |
|
12 | # We cannot support Python 3.5.0, 3.5.1, 3.5.2 because of bug in | |
13 | # codecs.escape_encode() where it raises SystemError on empty bytestring |
|
13 | # codecs.escape_encode() where it raises SystemError on empty bytestring | |
14 | # bug link: https://bugs.python.org/issue25270 |
|
14 | # bug link: https://bugs.python.org/issue25270 | |
15 | supportedpy = ','.join( |
|
15 | supportedpy = ','.join( | |
16 | [ |
|
16 | [ | |
17 | '>=2.7', |
|
17 | '>=2.7', | |
18 | '!=3.0.*', |
|
18 | '!=3.0.*', | |
19 | '!=3.1.*', |
|
19 | '!=3.1.*', | |
20 | '!=3.2.*', |
|
20 | '!=3.2.*', | |
21 | '!=3.3.*', |
|
21 | '!=3.3.*', | |
22 | '!=3.4.*', |
|
22 | '!=3.4.*', | |
23 | '!=3.5.0', |
|
23 | '!=3.5.0', | |
24 | '!=3.5.1', |
|
24 | '!=3.5.1', | |
25 | '!=3.5.2', |
|
25 | '!=3.5.2', | |
26 | '!=3.6.0', |
|
26 | '!=3.6.0', | |
27 | '!=3.6.1', |
|
27 | '!=3.6.1', | |
28 | ] |
|
28 | ] | |
29 | ) |
|
29 | ) | |
30 |
|
30 | |||
31 | import sys, platform |
|
31 | import sys, platform | |
32 | import sysconfig |
|
32 | import sysconfig | |
33 |
|
33 | |||
34 | if sys.version_info[0] >= 3: |
|
34 | if sys.version_info[0] >= 3: | |
35 | printf = eval('print') |
|
35 | printf = eval('print') | |
36 | libdir_escape = 'unicode_escape' |
|
36 | libdir_escape = 'unicode_escape' | |
37 |
|
37 | |||
38 | def sysstr(s): |
|
38 | def sysstr(s): | |
39 | return s.decode('latin-1') |
|
39 | return s.decode('latin-1') | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | else: |
|
42 | else: | |
43 | libdir_escape = 'string_escape' |
|
43 | libdir_escape = 'string_escape' | |
44 |
|
44 | |||
45 | def printf(*args, **kwargs): |
|
45 | def printf(*args, **kwargs): | |
46 | f = kwargs.get('file', sys.stdout) |
|
46 | f = kwargs.get('file', sys.stdout) | |
47 | end = kwargs.get('end', '\n') |
|
47 | end = kwargs.get('end', '\n') | |
48 | f.write(b' '.join(args) + end) |
|
48 | f.write(b' '.join(args) + end) | |
49 |
|
49 | |||
50 | def sysstr(s): |
|
50 | def sysstr(s): | |
51 | return s |
|
51 | return s | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | # Attempt to guide users to a modern pip - this means that 2.6 users |
|
54 | # Attempt to guide users to a modern pip - this means that 2.6 users | |
55 | # should have a chance of getting a 4.2 release, and when we ratchet |
|
55 | # should have a chance of getting a 4.2 release, and when we ratchet | |
56 | # the version requirement forward again hopefully everyone will get |
|
56 | # the version requirement forward again hopefully everyone will get | |
57 | # something that works for them. |
|
57 | # something that works for them. | |
58 | if sys.version_info < (2, 7, 0, 'final'): |
|
58 | if sys.version_info < (2, 7, 0, 'final'): | |
59 | pip_message = ( |
|
59 | pip_message = ( | |
60 | 'This may be due to an out of date pip. ' |
|
60 | 'This may be due to an out of date pip. ' | |
61 | 'Make sure you have pip >= 9.0.1.' |
|
61 | 'Make sure you have pip >= 9.0.1.' | |
62 | ) |
|
62 | ) | |
63 | try: |
|
63 | try: | |
64 | import pip |
|
64 | import pip | |
65 |
|
65 | |||
66 | pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]]) |
|
66 | pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]]) | |
67 | if pip_version < (9, 0, 1): |
|
67 | if pip_version < (9, 0, 1): | |
68 | pip_message = ( |
|
68 | pip_message = ( | |
69 | 'Your pip version is out of date, please install ' |
|
69 | 'Your pip version is out of date, please install ' | |
70 | 'pip >= 9.0.1. pip {} detected.'.format(pip.__version__) |
|
70 | 'pip >= 9.0.1. pip {} detected.'.format(pip.__version__) | |
71 | ) |
|
71 | ) | |
72 | else: |
|
72 | else: | |
73 | # pip is new enough - it must be something else |
|
73 | # pip is new enough - it must be something else | |
74 | pip_message = '' |
|
74 | pip_message = '' | |
75 | except Exception: |
|
75 | except Exception: | |
76 | pass |
|
76 | pass | |
77 | error = """ |
|
77 | error = """ | |
78 | Mercurial does not support Python older than 2.7. |
|
78 | Mercurial does not support Python older than 2.7. | |
79 | Python {py} detected. |
|
79 | Python {py} detected. | |
80 | {pip} |
|
80 | {pip} | |
81 | """.format( |
|
81 | """.format( | |
82 | py=sys.version_info, pip=pip_message |
|
82 | py=sys.version_info, pip=pip_message | |
83 | ) |
|
83 | ) | |
84 | printf(error, file=sys.stderr) |
|
84 | printf(error, file=sys.stderr) | |
85 | sys.exit(1) |
|
85 | sys.exit(1) | |
86 |
|
86 | |||
87 | if sys.version_info[0] >= 3: |
|
87 | if sys.version_info[0] >= 3: | |
88 | DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX'] |
|
88 | DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX'] | |
89 | else: |
|
89 | else: | |
90 | # deprecated in Python 3 |
|
90 | # deprecated in Python 3 | |
91 | DYLIB_SUFFIX = sysconfig.get_config_vars()['SO'] |
|
91 | DYLIB_SUFFIX = sysconfig.get_config_vars()['SO'] | |
92 |
|
92 | |||
93 | # Solaris Python packaging brain damage |
|
93 | # Solaris Python packaging brain damage | |
94 | try: |
|
94 | try: | |
95 | import hashlib |
|
95 | import hashlib | |
96 |
|
96 | |||
97 | sha = hashlib.sha1() |
|
97 | sha = hashlib.sha1() | |
98 | except ImportError: |
|
98 | except ImportError: | |
99 | try: |
|
99 | try: | |
100 | import sha |
|
100 | import sha | |
101 |
|
101 | |||
102 | sha.sha # silence unused import warning |
|
102 | sha.sha # silence unused import warning | |
103 | except ImportError: |
|
103 | except ImportError: | |
104 | raise SystemExit( |
|
104 | raise SystemExit( | |
105 | "Couldn't import standard hashlib (incomplete Python install)." |
|
105 | "Couldn't import standard hashlib (incomplete Python install)." | |
106 | ) |
|
106 | ) | |
107 |
|
107 | |||
108 | try: |
|
108 | try: | |
109 | import zlib |
|
109 | import zlib | |
110 |
|
110 | |||
111 | zlib.compressobj # silence unused import warning |
|
111 | zlib.compressobj # silence unused import warning | |
112 | except ImportError: |
|
112 | except ImportError: | |
113 | raise SystemExit( |
|
113 | raise SystemExit( | |
114 | "Couldn't import standard zlib (incomplete Python install)." |
|
114 | "Couldn't import standard zlib (incomplete Python install)." | |
115 | ) |
|
115 | ) | |
116 |
|
116 | |||
117 | # The base IronPython distribution (as of 2.7.1) doesn't support bz2 |
|
117 | # The base IronPython distribution (as of 2.7.1) doesn't support bz2 | |
118 | isironpython = False |
|
118 | isironpython = False | |
119 | try: |
|
119 | try: | |
120 | isironpython = ( |
|
120 | isironpython = ( | |
121 | platform.python_implementation().lower().find("ironpython") != -1 |
|
121 | platform.python_implementation().lower().find("ironpython") != -1 | |
122 | ) |
|
122 | ) | |
123 | except AttributeError: |
|
123 | except AttributeError: | |
124 | pass |
|
124 | pass | |
125 |
|
125 | |||
126 | if isironpython: |
|
126 | if isironpython: | |
127 | sys.stderr.write("warning: IronPython detected (no bz2 support)\n") |
|
127 | sys.stderr.write("warning: IronPython detected (no bz2 support)\n") | |
128 | else: |
|
128 | else: | |
129 | try: |
|
129 | try: | |
130 | import bz2 |
|
130 | import bz2 | |
131 |
|
131 | |||
132 | bz2.BZ2Compressor # silence unused import warning |
|
132 | bz2.BZ2Compressor # silence unused import warning | |
133 | except ImportError: |
|
133 | except ImportError: | |
134 | raise SystemExit( |
|
134 | raise SystemExit( | |
135 | "Couldn't import standard bz2 (incomplete Python install)." |
|
135 | "Couldn't import standard bz2 (incomplete Python install)." | |
136 | ) |
|
136 | ) | |
137 |
|
137 | |||
138 | ispypy = "PyPy" in sys.version |
|
138 | ispypy = "PyPy" in sys.version | |
139 |
|
139 | |||
140 | hgrustext = os.environ.get('HGWITHRUSTEXT') |
|
140 | hgrustext = os.environ.get('HGWITHRUSTEXT') | |
141 | # TODO record it for proper rebuild upon changes |
|
141 | # TODO record it for proper rebuild upon changes | |
142 | # (see mercurial/__modulepolicy__.py) |
|
142 | # (see mercurial/__modulepolicy__.py) | |
143 | if hgrustext != 'cpython' and hgrustext is not None: |
|
143 | if hgrustext != 'cpython' and hgrustext is not None: | |
144 | hgrustext = 'direct-ffi' |
|
144 | hgrustext = 'direct-ffi' | |
145 |
|
145 | |||
146 | import ctypes |
|
146 | import ctypes | |
147 | import errno |
|
147 | import errno | |
148 | import stat, subprocess, time |
|
148 | import stat, subprocess, time | |
149 | import re |
|
149 | import re | |
150 | import shutil |
|
150 | import shutil | |
151 | import tempfile |
|
151 | import tempfile | |
152 | from distutils import log |
|
152 | from distutils import log | |
153 |
|
153 | |||
154 | # We have issues with setuptools on some platforms and builders. Until |
|
154 | # We have issues with setuptools on some platforms and builders. Until | |
155 | # those are resolved, setuptools is opt-in except for platforms where |
|
155 | # those are resolved, setuptools is opt-in except for platforms where | |
156 | # we don't have issues. |
|
156 | # we don't have issues. | |
157 | issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ |
|
157 | issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ | |
158 | if issetuptools: |
|
158 | if issetuptools: | |
159 | from setuptools import setup |
|
159 | from setuptools import setup | |
160 | else: |
|
160 | else: | |
161 | from distutils.core import setup |
|
161 | from distutils.core import setup | |
162 | from distutils.ccompiler import new_compiler |
|
162 | from distutils.ccompiler import new_compiler | |
163 | from distutils.core import Command, Extension |
|
163 | from distutils.core import Command, Extension | |
164 | from distutils.dist import Distribution |
|
164 | from distutils.dist import Distribution | |
165 | from distutils.command.build import build |
|
165 | from distutils.command.build import build | |
166 | from distutils.command.build_ext import build_ext |
|
166 | from distutils.command.build_ext import build_ext | |
167 | from distutils.command.build_py import build_py |
|
167 | from distutils.command.build_py import build_py | |
168 | from distutils.command.build_scripts import build_scripts |
|
168 | from distutils.command.build_scripts import build_scripts | |
169 | from distutils.command.install import install |
|
169 | from distutils.command.install import install | |
170 | from distutils.command.install_lib import install_lib |
|
170 | from distutils.command.install_lib import install_lib | |
171 | from distutils.command.install_scripts import install_scripts |
|
171 | from distutils.command.install_scripts import install_scripts | |
172 | from distutils.spawn import spawn, find_executable |
|
172 | from distutils.spawn import spawn, find_executable | |
173 | from distutils import file_util |
|
173 | from distutils import file_util | |
174 | from distutils.errors import ( |
|
174 | from distutils.errors import ( | |
175 | CCompilerError, |
|
175 | CCompilerError, | |
176 | DistutilsError, |
|
176 | DistutilsError, | |
177 | DistutilsExecError, |
|
177 | DistutilsExecError, | |
178 | ) |
|
178 | ) | |
179 | from distutils.sysconfig import get_python_inc, get_config_var |
|
179 | from distutils.sysconfig import get_python_inc, get_config_var | |
180 | from distutils.version import StrictVersion |
|
180 | from distutils.version import StrictVersion | |
181 |
|
181 | |||
182 | # Explain to distutils.StrictVersion how our release candidates are versionned |
|
182 | # Explain to distutils.StrictVersion how our release candidates are versionned | |
183 | StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$') |
|
183 | StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$') | |
184 |
|
184 | |||
185 |
|
185 | |||
186 | def write_if_changed(path, content): |
|
186 | def write_if_changed(path, content): | |
187 | """Write content to a file iff the content hasn't changed.""" |
|
187 | """Write content to a file iff the content hasn't changed.""" | |
188 | if os.path.exists(path): |
|
188 | if os.path.exists(path): | |
189 | with open(path, 'rb') as fh: |
|
189 | with open(path, 'rb') as fh: | |
190 | current = fh.read() |
|
190 | current = fh.read() | |
191 | else: |
|
191 | else: | |
192 | current = b'' |
|
192 | current = b'' | |
193 |
|
193 | |||
194 | if current != content: |
|
194 | if current != content: | |
195 | with open(path, 'wb') as fh: |
|
195 | with open(path, 'wb') as fh: | |
196 | fh.write(content) |
|
196 | fh.write(content) | |
197 |
|
197 | |||
198 |
|
198 | |||
199 | scripts = ['hg'] |
|
199 | scripts = ['hg'] | |
200 | if os.name == 'nt': |
|
200 | if os.name == 'nt': | |
201 | # We remove hg.bat if we are able to build hg.exe. |
|
201 | # We remove hg.bat if we are able to build hg.exe. | |
202 | scripts.append('contrib/win32/hg.bat') |
|
202 | scripts.append('contrib/win32/hg.bat') | |
203 |
|
203 | |||
204 |
|
204 | |||
205 | def cancompile(cc, code): |
|
205 | def cancompile(cc, code): | |
206 | tmpdir = tempfile.mkdtemp(prefix='hg-install-') |
|
206 | tmpdir = tempfile.mkdtemp(prefix='hg-install-') | |
207 | devnull = oldstderr = None |
|
207 | devnull = oldstderr = None | |
208 | try: |
|
208 | try: | |
209 | fname = os.path.join(tmpdir, 'testcomp.c') |
|
209 | fname = os.path.join(tmpdir, 'testcomp.c') | |
210 | f = open(fname, 'w') |
|
210 | f = open(fname, 'w') | |
211 | f.write(code) |
|
211 | f.write(code) | |
212 | f.close() |
|
212 | f.close() | |
213 | # Redirect stderr to /dev/null to hide any error messages |
|
213 | # Redirect stderr to /dev/null to hide any error messages | |
214 | # from the compiler. |
|
214 | # from the compiler. | |
215 | # This will have to be changed if we ever have to check |
|
215 | # This will have to be changed if we ever have to check | |
216 | # for a function on Windows. |
|
216 | # for a function on Windows. | |
217 | devnull = open('/dev/null', 'w') |
|
217 | devnull = open('/dev/null', 'w') | |
218 | oldstderr = os.dup(sys.stderr.fileno()) |
|
218 | oldstderr = os.dup(sys.stderr.fileno()) | |
219 | os.dup2(devnull.fileno(), sys.stderr.fileno()) |
|
219 | os.dup2(devnull.fileno(), sys.stderr.fileno()) | |
220 | objects = cc.compile([fname], output_dir=tmpdir) |
|
220 | objects = cc.compile([fname], output_dir=tmpdir) | |
221 | cc.link_executable(objects, os.path.join(tmpdir, "a.out")) |
|
221 | cc.link_executable(objects, os.path.join(tmpdir, "a.out")) | |
222 | return True |
|
222 | return True | |
223 | except Exception: |
|
223 | except Exception: | |
224 | return False |
|
224 | return False | |
225 | finally: |
|
225 | finally: | |
226 | if oldstderr is not None: |
|
226 | if oldstderr is not None: | |
227 | os.dup2(oldstderr, sys.stderr.fileno()) |
|
227 | os.dup2(oldstderr, sys.stderr.fileno()) | |
228 | if devnull is not None: |
|
228 | if devnull is not None: | |
229 | devnull.close() |
|
229 | devnull.close() | |
230 | shutil.rmtree(tmpdir) |
|
230 | shutil.rmtree(tmpdir) | |
231 |
|
231 | |||
232 |
|
232 | |||
233 | # simplified version of distutils.ccompiler.CCompiler.has_function |
|
233 | # simplified version of distutils.ccompiler.CCompiler.has_function | |
234 | # that actually removes its temporary files. |
|
234 | # that actually removes its temporary files. | |
235 | def hasfunction(cc, funcname): |
|
235 | def hasfunction(cc, funcname): | |
236 | code = 'int main(void) { %s(); }\n' % funcname |
|
236 | code = 'int main(void) { %s(); }\n' % funcname | |
237 | return cancompile(cc, code) |
|
237 | return cancompile(cc, code) | |
238 |
|
238 | |||
239 |
|
239 | |||
240 | def hasheader(cc, headername): |
|
240 | def hasheader(cc, headername): | |
241 | code = '#include <%s>\nint main(void) { return 0; }\n' % headername |
|
241 | code = '#include <%s>\nint main(void) { return 0; }\n' % headername | |
242 | return cancompile(cc, code) |
|
242 | return cancompile(cc, code) | |
243 |
|
243 | |||
244 |
|
244 | |||
245 | # py2exe needs to be installed to work |
|
245 | # py2exe needs to be installed to work | |
246 | try: |
|
246 | try: | |
247 | import py2exe |
|
247 | import py2exe | |
248 |
|
248 | |||
249 | py2exe.Distribution # silence unused import warning |
|
249 | py2exe.Distribution # silence unused import warning | |
250 | py2exeloaded = True |
|
250 | py2exeloaded = True | |
251 | # import py2exe's patched Distribution class |
|
251 | # import py2exe's patched Distribution class | |
252 | from distutils.core import Distribution |
|
252 | from distutils.core import Distribution | |
253 | except ImportError: |
|
253 | except ImportError: | |
254 | py2exeloaded = False |
|
254 | py2exeloaded = False | |
255 |
|
255 | |||
256 |
|
256 | |||
257 | def runcmd(cmd, env, cwd=None): |
|
257 | def runcmd(cmd, env, cwd=None): | |
258 | p = subprocess.Popen( |
|
258 | p = subprocess.Popen( | |
259 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd |
|
259 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd | |
260 | ) |
|
260 | ) | |
261 | out, err = p.communicate() |
|
261 | out, err = p.communicate() | |
262 | return p.returncode, out, err |
|
262 | return p.returncode, out, err | |
263 |
|
263 | |||
264 |
|
264 | |||
265 | class hgcommand(object): |
|
265 | class hgcommand(object): | |
266 | def __init__(self, cmd, env): |
|
266 | def __init__(self, cmd, env): | |
267 | self.cmd = cmd |
|
267 | self.cmd = cmd | |
268 | self.env = env |
|
268 | self.env = env | |
269 |
|
269 | |||
270 | def run(self, args): |
|
270 | def run(self, args): | |
271 | cmd = self.cmd + args |
|
271 | cmd = self.cmd + args | |
272 | returncode, out, err = runcmd(cmd, self.env) |
|
272 | returncode, out, err = runcmd(cmd, self.env) | |
273 | err = filterhgerr(err) |
|
273 | err = filterhgerr(err) | |
274 | if err or returncode != 0: |
|
274 | if err or returncode != 0: | |
275 | printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr) |
|
275 | printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr) | |
276 | printf(err, file=sys.stderr) |
|
276 | printf(err, file=sys.stderr) | |
277 | return '' |
|
277 | return '' | |
278 | return out |
|
278 | return out | |
279 |
|
279 | |||
280 |
|
280 | |||
281 | def filterhgerr(err): |
|
281 | def filterhgerr(err): | |
282 | # If root is executing setup.py, but the repository is owned by |
|
282 | # If root is executing setup.py, but the repository is owned by | |
283 | # another user (as in "sudo python setup.py install") we will get |
|
283 | # another user (as in "sudo python setup.py install") we will get | |
284 | # trust warnings since the .hg/hgrc file is untrusted. That is |
|
284 | # trust warnings since the .hg/hgrc file is untrusted. That is | |
285 | # fine, we don't want to load it anyway. Python may warn about |
|
285 | # fine, we don't want to load it anyway. Python may warn about | |
286 | # a missing __init__.py in mercurial/locale, we also ignore that. |
|
286 | # a missing __init__.py in mercurial/locale, we also ignore that. | |
287 | err = [ |
|
287 | err = [ | |
288 | e |
|
288 | e | |
289 | for e in err.splitlines() |
|
289 | for e in err.splitlines() | |
290 | if ( |
|
290 | if ( | |
291 | not e.startswith(b'not trusting file') |
|
291 | not e.startswith(b'not trusting file') | |
292 | and not e.startswith(b'warning: Not importing') |
|
292 | and not e.startswith(b'warning: Not importing') | |
293 | and not e.startswith(b'obsolete feature not enabled') |
|
293 | and not e.startswith(b'obsolete feature not enabled') | |
294 | and not e.startswith(b'*** failed to import extension') |
|
294 | and not e.startswith(b'*** failed to import extension') | |
295 | and not e.startswith(b'devel-warn:') |
|
295 | and not e.startswith(b'devel-warn:') | |
296 | and not ( |
|
296 | and not ( | |
297 | e.startswith(b'(third party extension') |
|
297 | e.startswith(b'(third party extension') | |
298 | and e.endswith(b'or newer of Mercurial; disabling)') |
|
298 | and e.endswith(b'or newer of Mercurial; disabling)') | |
299 | ) |
|
299 | ) | |
300 | ) |
|
300 | ) | |
301 | ] |
|
301 | ] | |
302 | return b'\n'.join(b' ' + e for e in err) |
|
302 | return b'\n'.join(b' ' + e for e in err) | |
303 |
|
303 | |||
304 |
|
304 | |||
305 | def findhg(): |
|
305 | def findhg(): | |
306 | """Try to figure out how we should invoke hg for examining the local |
|
306 | """Try to figure out how we should invoke hg for examining the local | |
307 | repository contents. |
|
307 | repository contents. | |
308 |
|
308 | |||
309 | Returns an hgcommand object.""" |
|
309 | Returns an hgcommand object.""" | |
310 | # By default, prefer the "hg" command in the user's path. This was |
|
310 | # By default, prefer the "hg" command in the user's path. This was | |
311 | # presumably the hg command that the user used to create this repository. |
|
311 | # presumably the hg command that the user used to create this repository. | |
312 | # |
|
312 | # | |
313 | # This repository may require extensions or other settings that would not |
|
313 | # This repository may require extensions or other settings that would not | |
314 | # be enabled by running the hg script directly from this local repository. |
|
314 | # be enabled by running the hg script directly from this local repository. | |
315 | hgenv = os.environ.copy() |
|
315 | hgenv = os.environ.copy() | |
316 | # Use HGPLAIN to disable hgrc settings that would change output formatting, |
|
316 | # Use HGPLAIN to disable hgrc settings that would change output formatting, | |
317 | # and disable localization for the same reasons. |
|
317 | # and disable localization for the same reasons. | |
318 | hgenv['HGPLAIN'] = '1' |
|
318 | hgenv['HGPLAIN'] = '1' | |
319 | hgenv['LANGUAGE'] = 'C' |
|
319 | hgenv['LANGUAGE'] = 'C' | |
320 | hgcmd = ['hg'] |
|
320 | hgcmd = ['hg'] | |
321 | # Run a simple "hg log" command just to see if using hg from the user's |
|
321 | # Run a simple "hg log" command just to see if using hg from the user's | |
322 | # path works and can successfully interact with this repository. Windows |
|
322 | # path works and can successfully interact with this repository. Windows | |
323 | # gives precedence to hg.exe in the current directory, so fall back to the |
|
323 | # gives precedence to hg.exe in the current directory, so fall back to the | |
324 | # python invocation of local hg, where pythonXY.dll can always be found. |
|
324 | # python invocation of local hg, where pythonXY.dll can always be found. | |
325 | check_cmd = ['log', '-r.', '-Ttest'] |
|
325 | check_cmd = ['log', '-r.', '-Ttest'] | |
326 | if os.name != 'nt' or not os.path.exists("hg.exe"): |
|
326 | if os.name != 'nt' or not os.path.exists("hg.exe"): | |
327 | try: |
|
327 | try: | |
328 | retcode, out, err = runcmd(hgcmd + check_cmd, hgenv) |
|
328 | retcode, out, err = runcmd(hgcmd + check_cmd, hgenv) | |
329 | except EnvironmentError: |
|
329 | except EnvironmentError: | |
330 | retcode = -1 |
|
330 | retcode = -1 | |
331 | if retcode == 0 and not filterhgerr(err): |
|
331 | if retcode == 0 and not filterhgerr(err): | |
332 | return hgcommand(hgcmd, hgenv) |
|
332 | return hgcommand(hgcmd, hgenv) | |
333 |
|
333 | |||
334 | # Fall back to trying the local hg installation. |
|
334 | # Fall back to trying the local hg installation. | |
335 | hgenv = localhgenv() |
|
335 | hgenv = localhgenv() | |
336 | hgcmd = [sys.executable, 'hg'] |
|
336 | hgcmd = [sys.executable, 'hg'] | |
337 | try: |
|
337 | try: | |
338 | retcode, out, err = runcmd(hgcmd + check_cmd, hgenv) |
|
338 | retcode, out, err = runcmd(hgcmd + check_cmd, hgenv) | |
339 | except EnvironmentError: |
|
339 | except EnvironmentError: | |
340 | retcode = -1 |
|
340 | retcode = -1 | |
341 | if retcode == 0 and not filterhgerr(err): |
|
341 | if retcode == 0 and not filterhgerr(err): | |
342 | return hgcommand(hgcmd, hgenv) |
|
342 | return hgcommand(hgcmd, hgenv) | |
343 |
|
343 | |||
344 | raise SystemExit( |
|
344 | raise SystemExit( | |
345 | 'Unable to find a working hg binary to extract the ' |
|
345 | 'Unable to find a working hg binary to extract the ' | |
346 | 'version from the repository tags' |
|
346 | 'version from the repository tags' | |
347 | ) |
|
347 | ) | |
348 |
|
348 | |||
349 |
|
349 | |||
350 | def localhgenv(): |
|
350 | def localhgenv(): | |
351 | """Get an environment dictionary to use for invoking or importing |
|
351 | """Get an environment dictionary to use for invoking or importing | |
352 | mercurial from the local repository.""" |
|
352 | mercurial from the local repository.""" | |
353 | # Execute hg out of this directory with a custom environment which takes |
|
353 | # Execute hg out of this directory with a custom environment which takes | |
354 | # care to not use any hgrc files and do no localization. |
|
354 | # care to not use any hgrc files and do no localization. | |
355 | env = { |
|
355 | env = { | |
356 | 'HGMODULEPOLICY': 'py', |
|
356 | 'HGMODULEPOLICY': 'py', | |
357 | 'HGRCPATH': '', |
|
357 | 'HGRCPATH': '', | |
358 | 'LANGUAGE': 'C', |
|
358 | 'LANGUAGE': 'C', | |
359 | 'PATH': '', |
|
359 | 'PATH': '', | |
360 | } # make pypi modules that use os.environ['PATH'] happy |
|
360 | } # make pypi modules that use os.environ['PATH'] happy | |
361 | if 'LD_LIBRARY_PATH' in os.environ: |
|
361 | if 'LD_LIBRARY_PATH' in os.environ: | |
362 | env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH'] |
|
362 | env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH'] | |
363 | if 'SystemRoot' in os.environ: |
|
363 | if 'SystemRoot' in os.environ: | |
364 | # SystemRoot is required by Windows to load various DLLs. See: |
|
364 | # SystemRoot is required by Windows to load various DLLs. See: | |
365 | # https://bugs.python.org/issue13524#msg148850 |
|
365 | # https://bugs.python.org/issue13524#msg148850 | |
366 | env['SystemRoot'] = os.environ['SystemRoot'] |
|
366 | env['SystemRoot'] = os.environ['SystemRoot'] | |
367 | return env |
|
367 | return env | |
368 |
|
368 | |||
369 |
|
369 | |||
370 | version = '' |
|
370 | version = '' | |
371 |
|
371 | |||
372 | if os.path.isdir('.hg'): |
|
372 | if os.path.isdir('.hg'): | |
373 | hg = findhg() |
|
373 | hg = findhg() | |
374 | cmd = ['log', '-r', '.', '--template', '{tags}\n'] |
|
374 | cmd = ['log', '-r', '.', '--template', '{tags}\n'] | |
375 | numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()] |
|
375 | numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()] | |
376 | hgid = sysstr(hg.run(['id', '-i'])).strip() |
|
376 | hgid = sysstr(hg.run(['id', '-i'])).strip() | |
377 | if not hgid: |
|
377 | if not hgid: | |
378 | # Bail out if hg is having problems interacting with this repository, |
|
378 | # Bail out if hg is having problems interacting with this repository, | |
379 | # rather than falling through and producing a bogus version number. |
|
379 | # rather than falling through and producing a bogus version number. | |
380 | # Continuing with an invalid version number will break extensions |
|
380 | # Continuing with an invalid version number will break extensions | |
381 | # that define minimumhgversion. |
|
381 | # that define minimumhgversion. | |
382 | raise SystemExit('Unable to determine hg version from local repository') |
|
382 | raise SystemExit('Unable to determine hg version from local repository') | |
383 | if numerictags: # tag(s) found |
|
383 | if numerictags: # tag(s) found | |
384 | version = numerictags[-1] |
|
384 | version = numerictags[-1] | |
385 | if hgid.endswith('+'): # propagate the dirty status to the tag |
|
385 | if hgid.endswith('+'): # propagate the dirty status to the tag | |
386 | version += '+' |
|
386 | version += '+' | |
387 | else: # no tag found |
|
387 | else: # no tag found | |
388 | ltagcmd = ['parents', '--template', '{latesttag}'] |
|
388 | ltagcmd = ['parents', '--template', '{latesttag}'] | |
389 | ltag = sysstr(hg.run(ltagcmd)) |
|
389 | ltag = sysstr(hg.run(ltagcmd)) | |
390 | changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag] |
|
390 | changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag] | |
391 | changessince = len(hg.run(changessincecmd).splitlines()) |
|
391 | changessince = len(hg.run(changessincecmd).splitlines()) | |
392 | version = '%s+%s-%s' % (ltag, changessince, hgid) |
|
392 | version = '%s+%s-%s' % (ltag, changessince, hgid) | |
393 | if version.endswith('+'): |
|
393 | if version.endswith('+'): | |
394 | version += time.strftime('%Y%m%d') |
|
394 | version += time.strftime('%Y%m%d') | |
395 | elif os.path.exists('.hg_archival.txt'): |
|
395 | elif os.path.exists('.hg_archival.txt'): | |
396 | kw = dict( |
|
396 | kw = dict( | |
397 | [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')] |
|
397 | [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')] | |
398 | ) |
|
398 | ) | |
399 | if 'tag' in kw: |
|
399 | if 'tag' in kw: | |
400 | version = kw['tag'] |
|
400 | version = kw['tag'] | |
401 | elif 'latesttag' in kw: |
|
401 | elif 'latesttag' in kw: | |
402 | if 'changessincelatesttag' in kw: |
|
402 | if 'changessincelatesttag' in kw: | |
403 | version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw |
|
403 | version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw | |
404 | else: |
|
404 | else: | |
405 | version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw |
|
405 | version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw | |
406 | else: |
|
406 | else: | |
407 | version = kw.get('node', '')[:12] |
|
407 | version = kw.get('node', '')[:12] | |
408 |
|
408 | |||
409 | if version: |
|
409 | if version: | |
410 | versionb = version |
|
410 | versionb = version | |
411 | if not isinstance(versionb, bytes): |
|
411 | if not isinstance(versionb, bytes): | |
412 | versionb = versionb.encode('ascii') |
|
412 | versionb = versionb.encode('ascii') | |
413 |
|
413 | |||
414 | write_if_changed( |
|
414 | write_if_changed( | |
415 | 'mercurial/__version__.py', |
|
415 | 'mercurial/__version__.py', | |
416 | b''.join( |
|
416 | b''.join( | |
417 | [ |
|
417 | [ | |
418 | b'# this file is autogenerated by setup.py\n' |
|
418 | b'# this file is autogenerated by setup.py\n' | |
419 | b'version = b"%s"\n' % versionb, |
|
419 | b'version = b"%s"\n' % versionb, | |
420 | ] |
|
420 | ] | |
421 | ), |
|
421 | ), | |
422 | ) |
|
422 | ) | |
423 |
|
423 | |||
424 | try: |
|
424 | try: | |
425 | oldpolicy = os.environ.get('HGMODULEPOLICY', None) |
|
425 | oldpolicy = os.environ.get('HGMODULEPOLICY', None) | |
426 | os.environ['HGMODULEPOLICY'] = 'py' |
|
426 | os.environ['HGMODULEPOLICY'] = 'py' | |
427 | from mercurial import __version__ |
|
427 | from mercurial import __version__ | |
428 |
|
428 | |||
429 | version = __version__.version |
|
429 | version = __version__.version | |
430 | except ImportError: |
|
430 | except ImportError: | |
431 | version = b'unknown' |
|
431 | version = b'unknown' | |
432 | finally: |
|
432 | finally: | |
433 | if oldpolicy is None: |
|
433 | if oldpolicy is None: | |
434 | del os.environ['HGMODULEPOLICY'] |
|
434 | del os.environ['HGMODULEPOLICY'] | |
435 | else: |
|
435 | else: | |
436 | os.environ['HGMODULEPOLICY'] = oldpolicy |
|
436 | os.environ['HGMODULEPOLICY'] = oldpolicy | |
437 |
|
437 | |||
438 |
|
438 | |||
439 | class hgbuild(build): |
|
439 | class hgbuild(build): | |
440 | # Insert hgbuildmo first so that files in mercurial/locale/ are found |
|
440 | # Insert hgbuildmo first so that files in mercurial/locale/ are found | |
441 | # when build_py is run next. |
|
441 | # when build_py is run next. | |
442 | sub_commands = [('build_mo', None)] + build.sub_commands |
|
442 | sub_commands = [('build_mo', None)] + build.sub_commands | |
443 |
|
443 | |||
444 |
|
444 | |||
445 | class hgbuildmo(build): |
|
445 | class hgbuildmo(build): | |
446 |
|
446 | |||
447 | description = "build translations (.mo files)" |
|
447 | description = "build translations (.mo files)" | |
448 |
|
448 | |||
449 | def run(self): |
|
449 | def run(self): | |
450 | if not find_executable('msgfmt'): |
|
450 | if not find_executable('msgfmt'): | |
451 | self.warn( |
|
451 | self.warn( | |
452 | "could not find msgfmt executable, no translations " |
|
452 | "could not find msgfmt executable, no translations " | |
453 | "will be built" |
|
453 | "will be built" | |
454 | ) |
|
454 | ) | |
455 | return |
|
455 | return | |
456 |
|
456 | |||
457 | podir = 'i18n' |
|
457 | podir = 'i18n' | |
458 | if not os.path.isdir(podir): |
|
458 | if not os.path.isdir(podir): | |
459 | self.warn("could not find %s/ directory" % podir) |
|
459 | self.warn("could not find %s/ directory" % podir) | |
460 | return |
|
460 | return | |
461 |
|
461 | |||
462 | join = os.path.join |
|
462 | join = os.path.join | |
463 | for po in os.listdir(podir): |
|
463 | for po in os.listdir(podir): | |
464 | if not po.endswith('.po'): |
|
464 | if not po.endswith('.po'): | |
465 | continue |
|
465 | continue | |
466 | pofile = join(podir, po) |
|
466 | pofile = join(podir, po) | |
467 | modir = join('locale', po[:-3], 'LC_MESSAGES') |
|
467 | modir = join('locale', po[:-3], 'LC_MESSAGES') | |
468 | mofile = join(modir, 'hg.mo') |
|
468 | mofile = join(modir, 'hg.mo') | |
469 | mobuildfile = join('mercurial', mofile) |
|
469 | mobuildfile = join('mercurial', mofile) | |
470 | cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile] |
|
470 | cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile] | |
471 | if sys.platform != 'sunos5': |
|
471 | if sys.platform != 'sunos5': | |
472 | # msgfmt on Solaris does not know about -c |
|
472 | # msgfmt on Solaris does not know about -c | |
473 | cmd.append('-c') |
|
473 | cmd.append('-c') | |
474 | self.mkpath(join('mercurial', modir)) |
|
474 | self.mkpath(join('mercurial', modir)) | |
475 | self.make_file([pofile], mobuildfile, spawn, (cmd,)) |
|
475 | self.make_file([pofile], mobuildfile, spawn, (cmd,)) | |
476 |
|
476 | |||
477 |
|
477 | |||
478 | class hgdist(Distribution): |
|
478 | class hgdist(Distribution): | |
479 | pure = False |
|
479 | pure = False | |
480 | rust = hgrustext is not None |
|
480 | rust = hgrustext is not None | |
481 | cffi = ispypy |
|
481 | cffi = ispypy | |
482 |
|
482 | |||
483 | global_options = Distribution.global_options + [ |
|
483 | global_options = Distribution.global_options + [ | |
484 | ('pure', None, "use pure (slow) Python code instead of C extensions"), |
|
484 | ('pure', None, "use pure (slow) Python code instead of C extensions"), | |
485 | ('rust', None, "use Rust extensions additionally to C extensions"), |
|
485 | ('rust', None, "use Rust extensions additionally to C extensions"), | |
486 | ] |
|
486 | ] | |
487 |
|
487 | |||
488 | def has_ext_modules(self): |
|
488 | def has_ext_modules(self): | |
489 | # self.ext_modules is emptied in hgbuildpy.finalize_options which is |
|
489 | # self.ext_modules is emptied in hgbuildpy.finalize_options which is | |
490 | # too late for some cases |
|
490 | # too late for some cases | |
491 | return not self.pure and Distribution.has_ext_modules(self) |
|
491 | return not self.pure and Distribution.has_ext_modules(self) | |
492 |
|
492 | |||
493 |
|
493 | |||
494 | # This is ugly as a one-liner. So use a variable. |
|
494 | # This is ugly as a one-liner. So use a variable. | |
495 | buildextnegops = dict(getattr(build_ext, 'negative_options', {})) |
|
495 | buildextnegops = dict(getattr(build_ext, 'negative_options', {})) | |
496 | buildextnegops['no-zstd'] = 'zstd' |
|
496 | buildextnegops['no-zstd'] = 'zstd' | |
497 | buildextnegops['no-rust'] = 'rust' |
|
497 | buildextnegops['no-rust'] = 'rust' | |
498 |
|
498 | |||
499 |
|
499 | |||
500 | class hgbuildext(build_ext): |
|
500 | class hgbuildext(build_ext): | |
501 | user_options = build_ext.user_options + [ |
|
501 | user_options = build_ext.user_options + [ | |
502 | ('zstd', None, 'compile zstd bindings [default]'), |
|
502 | ('zstd', None, 'compile zstd bindings [default]'), | |
503 | ('no-zstd', None, 'do not compile zstd bindings'), |
|
503 | ('no-zstd', None, 'do not compile zstd bindings'), | |
504 | ( |
|
504 | ( | |
505 | 'rust', |
|
505 | 'rust', | |
506 | None, |
|
506 | None, | |
507 | 'compile Rust extensions if they are in use ' |
|
507 | 'compile Rust extensions if they are in use ' | |
508 | '(requires Cargo) [default]', |
|
508 | '(requires Cargo) [default]', | |
509 | ), |
|
509 | ), | |
510 | ('no-rust', None, 'do not compile Rust extensions'), |
|
510 | ('no-rust', None, 'do not compile Rust extensions'), | |
511 | ] |
|
511 | ] | |
512 |
|
512 | |||
513 | boolean_options = build_ext.boolean_options + ['zstd', 'rust'] |
|
513 | boolean_options = build_ext.boolean_options + ['zstd', 'rust'] | |
514 | negative_opt = buildextnegops |
|
514 | negative_opt = buildextnegops | |
515 |
|
515 | |||
516 | def initialize_options(self): |
|
516 | def initialize_options(self): | |
517 | self.zstd = True |
|
517 | self.zstd = True | |
518 | self.rust = True |
|
518 | self.rust = True | |
519 |
|
519 | |||
520 | return build_ext.initialize_options(self) |
|
520 | return build_ext.initialize_options(self) | |
521 |
|
521 | |||
522 | def finalize_options(self): |
|
522 | def finalize_options(self): | |
523 | # Unless overridden by the end user, build extensions in parallel. |
|
523 | # Unless overridden by the end user, build extensions in parallel. | |
524 | # Only influences behavior on Python 3.5+. |
|
524 | # Only influences behavior on Python 3.5+. | |
525 | if getattr(self, 'parallel', None) is None: |
|
525 | if getattr(self, 'parallel', None) is None: | |
526 | self.parallel = True |
|
526 | self.parallel = True | |
527 |
|
527 | |||
528 | return build_ext.finalize_options(self) |
|
528 | return build_ext.finalize_options(self) | |
529 |
|
529 | |||
530 | def build_extensions(self): |
|
530 | def build_extensions(self): | |
531 | ruststandalones = [ |
|
531 | ruststandalones = [ | |
532 | e for e in self.extensions if isinstance(e, RustStandaloneExtension) |
|
532 | e for e in self.extensions if isinstance(e, RustStandaloneExtension) | |
533 | ] |
|
533 | ] | |
534 | self.extensions = [ |
|
534 | self.extensions = [ | |
535 | e for e in self.extensions if e not in ruststandalones |
|
535 | e for e in self.extensions if e not in ruststandalones | |
536 | ] |
|
536 | ] | |
537 | # Filter out zstd if disabled via argument. |
|
537 | # Filter out zstd if disabled via argument. | |
538 | if not self.zstd: |
|
538 | if not self.zstd: | |
539 | self.extensions = [ |
|
539 | self.extensions = [ | |
540 | e for e in self.extensions if e.name != 'mercurial.zstd' |
|
540 | e for e in self.extensions if e.name != 'mercurial.zstd' | |
541 | ] |
|
541 | ] | |
542 |
|
542 | |||
543 | # Build Rust standalon extensions if it'll be used |
|
543 | # Build Rust standalon extensions if it'll be used | |
544 | # and its build is not explictely disabled (for external build |
|
544 | # and its build is not explictely disabled (for external build | |
545 | # as Linux distributions would do) |
|
545 | # as Linux distributions would do) | |
546 | if self.distribution.rust and self.rust and hgrustext != 'direct-ffi': |
|
546 | if self.distribution.rust and self.rust and hgrustext != 'direct-ffi': | |
547 | for rustext in ruststandalones: |
|
547 | for rustext in ruststandalones: | |
548 | rustext.build('' if self.inplace else self.build_lib) |
|
548 | rustext.build('' if self.inplace else self.build_lib) | |
549 |
|
549 | |||
550 | return build_ext.build_extensions(self) |
|
550 | return build_ext.build_extensions(self) | |
551 |
|
551 | |||
552 | def build_extension(self, ext): |
|
552 | def build_extension(self, ext): | |
553 | if ( |
|
553 | if ( | |
554 | self.distribution.rust |
|
554 | self.distribution.rust | |
555 | and self.rust |
|
555 | and self.rust | |
556 | and isinstance(ext, RustExtension) |
|
556 | and isinstance(ext, RustExtension) | |
557 | ): |
|
557 | ): | |
558 | ext.rustbuild() |
|
558 | ext.rustbuild() | |
559 | try: |
|
559 | try: | |
560 | build_ext.build_extension(self, ext) |
|
560 | build_ext.build_extension(self, ext) | |
561 | except CCompilerError: |
|
561 | except CCompilerError: | |
562 | if not getattr(ext, 'optional', False): |
|
562 | if not getattr(ext, 'optional', False): | |
563 | raise |
|
563 | raise | |
564 | log.warn( |
|
564 | log.warn( | |
565 | "Failed to build optional extension '%s' (skipping)", ext.name |
|
565 | "Failed to build optional extension '%s' (skipping)", ext.name | |
566 | ) |
|
566 | ) | |
567 |
|
567 | |||
568 |
|
568 | |||
569 | class hgbuildscripts(build_scripts): |
|
569 | class hgbuildscripts(build_scripts): | |
570 | def run(self): |
|
570 | def run(self): | |
571 | if os.name != 'nt' or self.distribution.pure: |
|
571 | if os.name != 'nt' or self.distribution.pure: | |
572 | return build_scripts.run(self) |
|
572 | return build_scripts.run(self) | |
573 |
|
573 | |||
574 | exebuilt = False |
|
574 | exebuilt = False | |
575 | try: |
|
575 | try: | |
576 | self.run_command('build_hgexe') |
|
576 | self.run_command('build_hgexe') | |
577 | exebuilt = True |
|
577 | exebuilt = True | |
578 | except (DistutilsError, CCompilerError): |
|
578 | except (DistutilsError, CCompilerError): | |
579 | log.warn('failed to build optional hg.exe') |
|
579 | log.warn('failed to build optional hg.exe') | |
580 |
|
580 | |||
581 | if exebuilt: |
|
581 | if exebuilt: | |
582 | # Copying hg.exe to the scripts build directory ensures it is |
|
582 | # Copying hg.exe to the scripts build directory ensures it is | |
583 | # installed by the install_scripts command. |
|
583 | # installed by the install_scripts command. | |
584 | hgexecommand = self.get_finalized_command('build_hgexe') |
|
584 | hgexecommand = self.get_finalized_command('build_hgexe') | |
585 | dest = os.path.join(self.build_dir, 'hg.exe') |
|
585 | dest = os.path.join(self.build_dir, 'hg.exe') | |
586 | self.mkpath(self.build_dir) |
|
586 | self.mkpath(self.build_dir) | |
587 | self.copy_file(hgexecommand.hgexepath, dest) |
|
587 | self.copy_file(hgexecommand.hgexepath, dest) | |
588 |
|
588 | |||
589 | # Remove hg.bat because it is redundant with hg.exe. |
|
589 | # Remove hg.bat because it is redundant with hg.exe. | |
590 | self.scripts.remove('contrib/win32/hg.bat') |
|
590 | self.scripts.remove('contrib/win32/hg.bat') | |
591 |
|
591 | |||
592 | return build_scripts.run(self) |
|
592 | return build_scripts.run(self) | |
593 |
|
593 | |||
594 |
|
594 | |||
595 | class hgbuildpy(build_py): |
|
595 | class hgbuildpy(build_py): | |
596 | def finalize_options(self): |
|
596 | def finalize_options(self): | |
597 | build_py.finalize_options(self) |
|
597 | build_py.finalize_options(self) | |
598 |
|
598 | |||
599 | if self.distribution.pure: |
|
599 | if self.distribution.pure: | |
600 | self.distribution.ext_modules = [] |
|
600 | self.distribution.ext_modules = [] | |
601 | elif self.distribution.cffi: |
|
601 | elif self.distribution.cffi: | |
602 | from mercurial.cffi import ( |
|
602 | from mercurial.cffi import ( | |
603 | bdiffbuild, |
|
603 | bdiffbuild, | |
604 | mpatchbuild, |
|
604 | mpatchbuild, | |
605 | ) |
|
605 | ) | |
606 |
|
606 | |||
607 | exts = [ |
|
607 | exts = [ | |
608 | mpatchbuild.ffi.distutils_extension(), |
|
608 | mpatchbuild.ffi.distutils_extension(), | |
609 | bdiffbuild.ffi.distutils_extension(), |
|
609 | bdiffbuild.ffi.distutils_extension(), | |
610 | ] |
|
610 | ] | |
611 | # cffi modules go here |
|
611 | # cffi modules go here | |
612 | if sys.platform == 'darwin': |
|
612 | if sys.platform == 'darwin': | |
613 | from mercurial.cffi import osutilbuild |
|
613 | from mercurial.cffi import osutilbuild | |
614 |
|
614 | |||
615 | exts.append(osutilbuild.ffi.distutils_extension()) |
|
615 | exts.append(osutilbuild.ffi.distutils_extension()) | |
616 | self.distribution.ext_modules = exts |
|
616 | self.distribution.ext_modules = exts | |
617 | else: |
|
617 | else: | |
618 | h = os.path.join(get_python_inc(), 'Python.h') |
|
618 | h = os.path.join(get_python_inc(), 'Python.h') | |
619 | if not os.path.exists(h): |
|
619 | if not os.path.exists(h): | |
620 | raise SystemExit( |
|
620 | raise SystemExit( | |
621 | 'Python headers are required to build ' |
|
621 | 'Python headers are required to build ' | |
622 | 'Mercurial but weren\'t found in %s' % h |
|
622 | 'Mercurial but weren\'t found in %s' % h | |
623 | ) |
|
623 | ) | |
624 |
|
624 | |||
625 | def run(self): |
|
625 | def run(self): | |
626 | basepath = os.path.join(self.build_lib, 'mercurial') |
|
626 | basepath = os.path.join(self.build_lib, 'mercurial') | |
627 | self.mkpath(basepath) |
|
627 | self.mkpath(basepath) | |
628 |
|
628 | |||
629 | rust = self.distribution.rust |
|
629 | rust = self.distribution.rust | |
630 | if self.distribution.pure: |
|
630 | if self.distribution.pure: | |
631 | modulepolicy = 'py' |
|
631 | modulepolicy = 'py' | |
632 | elif self.build_lib == '.': |
|
632 | elif self.build_lib == '.': | |
633 | # in-place build should run without rebuilding and Rust extensions |
|
633 | # in-place build should run without rebuilding and Rust extensions | |
634 | modulepolicy = 'rust+c-allow' if rust else 'allow' |
|
634 | modulepolicy = 'rust+c-allow' if rust else 'allow' | |
635 | else: |
|
635 | else: | |
636 | modulepolicy = 'rust+c' if rust else 'c' |
|
636 | modulepolicy = 'rust+c' if rust else 'c' | |
637 |
|
637 | |||
638 | content = b''.join( |
|
638 | content = b''.join( | |
639 | [ |
|
639 | [ | |
640 | b'# this file is autogenerated by setup.py\n', |
|
640 | b'# this file is autogenerated by setup.py\n', | |
641 | b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'), |
|
641 | b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'), | |
642 | ] |
|
642 | ] | |
643 | ) |
|
643 | ) | |
644 | write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content) |
|
644 | write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content) | |
645 |
|
645 | |||
646 | build_py.run(self) |
|
646 | build_py.run(self) | |
647 |
|
647 | |||
648 |
|
648 | |||
649 | class buildhgextindex(Command): |
|
649 | class buildhgextindex(Command): | |
650 | description = 'generate prebuilt index of hgext (for frozen package)' |
|
650 | description = 'generate prebuilt index of hgext (for frozen package)' | |
651 | user_options = [] |
|
651 | user_options = [] | |
652 | _indexfilename = 'hgext/__index__.py' |
|
652 | _indexfilename = 'hgext/__index__.py' | |
653 |
|
653 | |||
654 | def initialize_options(self): |
|
654 | def initialize_options(self): | |
655 | pass |
|
655 | pass | |
656 |
|
656 | |||
657 | def finalize_options(self): |
|
657 | def finalize_options(self): | |
658 | pass |
|
658 | pass | |
659 |
|
659 | |||
660 | def run(self): |
|
660 | def run(self): | |
661 | if os.path.exists(self._indexfilename): |
|
661 | if os.path.exists(self._indexfilename): | |
662 | with open(self._indexfilename, 'w') as f: |
|
662 | with open(self._indexfilename, 'w') as f: | |
663 | f.write('# empty\n') |
|
663 | f.write('# empty\n') | |
664 |
|
664 | |||
665 | # here no extension enabled, disabled() lists up everything |
|
665 | # here no extension enabled, disabled() lists up everything | |
666 | code = ( |
|
666 | code = ( | |
667 | 'import pprint; from mercurial import extensions; ' |
|
667 | 'import pprint; from mercurial import extensions; ' | |
668 | 'ext = extensions.disabled();' |
|
668 | 'ext = extensions.disabled();' | |
669 | 'ext.pop("__index__", None);' |
|
669 | 'ext.pop("__index__", None);' | |
670 | 'pprint.pprint(ext)' |
|
670 | 'pprint.pprint(ext)' | |
671 | ) |
|
671 | ) | |
672 | returncode, out, err = runcmd( |
|
672 | returncode, out, err = runcmd( | |
673 | [sys.executable, '-c', code], localhgenv() |
|
673 | [sys.executable, '-c', code], localhgenv() | |
674 | ) |
|
674 | ) | |
675 | if err or returncode != 0: |
|
675 | if err or returncode != 0: | |
676 | raise DistutilsExecError(err) |
|
676 | raise DistutilsExecError(err) | |
677 |
|
677 | |||
678 | with open(self._indexfilename, 'wb') as f: |
|
678 | with open(self._indexfilename, 'wb') as f: | |
679 | f.write(b'# this file is autogenerated by setup.py\n') |
|
679 | f.write(b'# this file is autogenerated by setup.py\n') | |
680 | f.write(b'docs = ') |
|
680 | f.write(b'docs = ') | |
681 | f.write(out) |
|
681 | f.write(out) | |
682 |
|
682 | |||
683 |
|
683 | |||
684 | class buildhgexe(build_ext): |
|
684 | class buildhgexe(build_ext): | |
685 | description = 'compile hg.exe from mercurial/exewrapper.c' |
|
685 | description = 'compile hg.exe from mercurial/exewrapper.c' | |
686 | user_options = build_ext.user_options + [ |
|
686 | user_options = build_ext.user_options + [ | |
687 | ( |
|
687 | ( | |
688 | 'long-paths-support', |
|
688 | 'long-paths-support', | |
689 | None, |
|
689 | None, | |
690 | 'enable support for long paths on ' |
|
690 | 'enable support for long paths on ' | |
691 | 'Windows (off by default and ' |
|
691 | 'Windows (off by default and ' | |
692 | 'experimental)', |
|
692 | 'experimental)', | |
693 | ), |
|
693 | ), | |
694 | ] |
|
694 | ] | |
695 |
|
695 | |||
696 | LONG_PATHS_MANIFEST = """ |
|
696 | LONG_PATHS_MANIFEST = """ | |
697 | <?xml version="1.0" encoding="UTF-8" standalone="yes"?> |
|
697 | <?xml version="1.0" encoding="UTF-8" standalone="yes"?> | |
698 | <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> |
|
698 | <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> | |
699 | <application> |
|
699 | <application> | |
700 | <windowsSettings |
|
700 | <windowsSettings | |
701 | xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings"> |
|
701 | xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings"> | |
702 | <ws2:longPathAware>true</ws2:longPathAware> |
|
702 | <ws2:longPathAware>true</ws2:longPathAware> | |
703 | </windowsSettings> |
|
703 | </windowsSettings> | |
704 | </application> |
|
704 | </application> | |
705 | </assembly>""" |
|
705 | </assembly>""" | |
706 |
|
706 | |||
707 | def initialize_options(self): |
|
707 | def initialize_options(self): | |
708 | build_ext.initialize_options(self) |
|
708 | build_ext.initialize_options(self) | |
709 | self.long_paths_support = False |
|
709 | self.long_paths_support = False | |
710 |
|
710 | |||
711 | def build_extensions(self): |
|
711 | def build_extensions(self): | |
712 | if os.name != 'nt': |
|
712 | if os.name != 'nt': | |
713 | return |
|
713 | return | |
714 | if isinstance(self.compiler, HackedMingw32CCompiler): |
|
714 | if isinstance(self.compiler, HackedMingw32CCompiler): | |
715 | self.compiler.compiler_so = self.compiler.compiler # no -mdll |
|
715 | self.compiler.compiler_so = self.compiler.compiler # no -mdll | |
716 | self.compiler.dll_libraries = [] # no -lmsrvc90 |
|
716 | self.compiler.dll_libraries = [] # no -lmsrvc90 | |
717 |
|
717 | |||
718 | pythonlib = None |
|
718 | pythonlib = None | |
719 |
|
719 | |||
720 | if getattr(sys, 'dllhandle', None): |
|
720 | if getattr(sys, 'dllhandle', None): | |
721 | # Different Python installs can have different Python library |
|
721 | # Different Python installs can have different Python library | |
722 | # names. e.g. the official CPython distribution uses pythonXY.dll |
|
722 | # names. e.g. the official CPython distribution uses pythonXY.dll | |
723 | # and MinGW uses libpythonX.Y.dll. |
|
723 | # and MinGW uses libpythonX.Y.dll. | |
724 | _kernel32 = ctypes.windll.kernel32 |
|
724 | _kernel32 = ctypes.windll.kernel32 | |
725 | _kernel32.GetModuleFileNameA.argtypes = [ |
|
725 | _kernel32.GetModuleFileNameA.argtypes = [ | |
726 | ctypes.c_void_p, |
|
726 | ctypes.c_void_p, | |
727 | ctypes.c_void_p, |
|
727 | ctypes.c_void_p, | |
728 | ctypes.c_ulong, |
|
728 | ctypes.c_ulong, | |
729 | ] |
|
729 | ] | |
730 | _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong |
|
730 | _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong | |
731 | size = 1000 |
|
731 | size = 1000 | |
732 | buf = ctypes.create_string_buffer(size + 1) |
|
732 | buf = ctypes.create_string_buffer(size + 1) | |
733 | filelen = _kernel32.GetModuleFileNameA( |
|
733 | filelen = _kernel32.GetModuleFileNameA( | |
734 | sys.dllhandle, ctypes.byref(buf), size |
|
734 | sys.dllhandle, ctypes.byref(buf), size | |
735 | ) |
|
735 | ) | |
736 |
|
736 | |||
737 | if filelen > 0 and filelen != size: |
|
737 | if filelen > 0 and filelen != size: | |
738 | dllbasename = os.path.basename(buf.value) |
|
738 | dllbasename = os.path.basename(buf.value) | |
739 | if not dllbasename.lower().endswith(b'.dll'): |
|
739 | if not dllbasename.lower().endswith(b'.dll'): | |
740 | raise SystemExit( |
|
740 | raise SystemExit( | |
741 | 'Python DLL does not end with .dll: %s' % dllbasename |
|
741 | 'Python DLL does not end with .dll: %s' % dllbasename | |
742 | ) |
|
742 | ) | |
743 | pythonlib = dllbasename[:-4] |
|
743 | pythonlib = dllbasename[:-4] | |
744 |
|
744 | |||
745 | if not pythonlib: |
|
745 | if not pythonlib: | |
746 | log.warn( |
|
746 | log.warn( | |
747 | 'could not determine Python DLL filename; assuming pythonXY' |
|
747 | 'could not determine Python DLL filename; assuming pythonXY' | |
748 | ) |
|
748 | ) | |
749 |
|
749 | |||
750 | hv = sys.hexversion |
|
750 | hv = sys.hexversion | |
751 | pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF) |
|
751 | pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF) | |
752 |
|
752 | |||
753 | log.info('using %s as Python library name' % pythonlib) |
|
753 | log.info('using %s as Python library name' % pythonlib) | |
754 | with open('mercurial/hgpythonlib.h', 'wb') as f: |
|
754 | with open('mercurial/hgpythonlib.h', 'wb') as f: | |
755 | f.write(b'/* this file is autogenerated by setup.py */\n') |
|
755 | f.write(b'/* this file is autogenerated by setup.py */\n') | |
756 | f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib) |
|
756 | f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib) | |
757 |
|
757 | |||
758 | macros = None |
|
758 | macros = None | |
759 | if sys.version_info[0] >= 3: |
|
759 | if sys.version_info[0] >= 3: | |
760 | macros = [('_UNICODE', None), ('UNICODE', None)] |
|
760 | macros = [('_UNICODE', None), ('UNICODE', None)] | |
761 |
|
761 | |||
762 | objects = self.compiler.compile( |
|
762 | objects = self.compiler.compile( | |
763 | ['mercurial/exewrapper.c'], |
|
763 | ['mercurial/exewrapper.c'], | |
764 | output_dir=self.build_temp, |
|
764 | output_dir=self.build_temp, | |
765 | macros=macros, |
|
765 | macros=macros, | |
766 | ) |
|
766 | ) | |
767 | dir = os.path.dirname(self.get_ext_fullpath('dummy')) |
|
767 | dir = os.path.dirname(self.get_ext_fullpath('dummy')) | |
768 | self.hgtarget = os.path.join(dir, 'hg') |
|
768 | self.hgtarget = os.path.join(dir, 'hg') | |
769 | self.compiler.link_executable( |
|
769 | self.compiler.link_executable( | |
770 | objects, self.hgtarget, libraries=[], output_dir=self.build_temp |
|
770 | objects, self.hgtarget, libraries=[], output_dir=self.build_temp | |
771 | ) |
|
771 | ) | |
772 | if self.long_paths_support: |
|
772 | if self.long_paths_support: | |
773 | self.addlongpathsmanifest() |
|
773 | self.addlongpathsmanifest() | |
774 |
|
774 | |||
775 | def addlongpathsmanifest(self): |
|
775 | def addlongpathsmanifest(self): | |
776 | r"""Add manifest pieces so that hg.exe understands long paths |
|
776 | r"""Add manifest pieces so that hg.exe understands long paths | |
777 |
|
777 | |||
778 | This is an EXPERIMENTAL feature, use with care. |
|
778 | This is an EXPERIMENTAL feature, use with care. | |
779 | To enable long paths support, one needs to do two things: |
|
779 | To enable long paths support, one needs to do two things: | |
780 | - build Mercurial with --long-paths-support option |
|
780 | - build Mercurial with --long-paths-support option | |
781 | - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\ |
|
781 | - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\ | |
782 | LongPathsEnabled to have value 1. |
|
782 | LongPathsEnabled to have value 1. | |
783 |
|
783 | |||
784 | Please ignore 'warning 81010002: Unrecognized Element "longPathAware"'; |
|
784 | Please ignore 'warning 81010002: Unrecognized Element "longPathAware"'; | |
785 | it happens because Mercurial uses mt.exe circa 2008, which is not |
|
785 | it happens because Mercurial uses mt.exe circa 2008, which is not | |
786 | yet aware of long paths support in the manifest (I think so at least). |
|
786 | yet aware of long paths support in the manifest (I think so at least). | |
787 | This does not stop mt.exe from embedding/merging the XML properly. |
|
787 | This does not stop mt.exe from embedding/merging the XML properly. | |
788 |
|
788 | |||
789 | Why resource #1 should be used for .exe manifests? I don't know and |
|
789 | Why resource #1 should be used for .exe manifests? I don't know and | |
790 | wasn't able to find an explanation for mortals. But it seems to work. |
|
790 | wasn't able to find an explanation for mortals. But it seems to work. | |
791 | """ |
|
791 | """ | |
792 | exefname = self.compiler.executable_filename(self.hgtarget) |
|
792 | exefname = self.compiler.executable_filename(self.hgtarget) | |
793 | fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest') |
|
793 | fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest') | |
794 | os.close(fdauto) |
|
794 | os.close(fdauto) | |
795 | with open(manfname, 'w') as f: |
|
795 | with open(manfname, 'w') as f: | |
796 | f.write(self.LONG_PATHS_MANIFEST) |
|
796 | f.write(self.LONG_PATHS_MANIFEST) | |
797 | log.info("long paths manifest is written to '%s'" % manfname) |
|
797 | log.info("long paths manifest is written to '%s'" % manfname) | |
798 | inputresource = '-inputresource:%s;#1' % exefname |
|
798 | inputresource = '-inputresource:%s;#1' % exefname | |
799 | outputresource = '-outputresource:%s;#1' % exefname |
|
799 | outputresource = '-outputresource:%s;#1' % exefname | |
800 | log.info("running mt.exe to update hg.exe's manifest in-place") |
|
800 | log.info("running mt.exe to update hg.exe's manifest in-place") | |
801 | # supplying both -manifest and -inputresource to mt.exe makes |
|
801 | # supplying both -manifest and -inputresource to mt.exe makes | |
802 | # it merge the embedded and supplied manifests in the -outputresource |
|
802 | # it merge the embedded and supplied manifests in the -outputresource | |
803 | self.spawn( |
|
803 | self.spawn( | |
804 | [ |
|
804 | [ | |
805 | 'mt.exe', |
|
805 | 'mt.exe', | |
806 | '-nologo', |
|
806 | '-nologo', | |
807 | '-manifest', |
|
807 | '-manifest', | |
808 | manfname, |
|
808 | manfname, | |
809 | inputresource, |
|
809 | inputresource, | |
810 | outputresource, |
|
810 | outputresource, | |
811 | ] |
|
811 | ] | |
812 | ) |
|
812 | ) | |
813 | log.info("done updating hg.exe's manifest") |
|
813 | log.info("done updating hg.exe's manifest") | |
814 | os.remove(manfname) |
|
814 | os.remove(manfname) | |
815 |
|
815 | |||
816 | @property |
|
816 | @property | |
817 | def hgexepath(self): |
|
817 | def hgexepath(self): | |
818 | dir = os.path.dirname(self.get_ext_fullpath('dummy')) |
|
818 | dir = os.path.dirname(self.get_ext_fullpath('dummy')) | |
819 | return os.path.join(self.build_temp, dir, 'hg.exe') |
|
819 | return os.path.join(self.build_temp, dir, 'hg.exe') | |
820 |
|
820 | |||
821 |
|
821 | |||
822 | class hgbuilddoc(Command): |
|
822 | class hgbuilddoc(Command): | |
823 | description = 'build documentation' |
|
823 | description = 'build documentation' | |
824 | user_options = [ |
|
824 | user_options = [ | |
825 | ('man', None, 'generate man pages'), |
|
825 | ('man', None, 'generate man pages'), | |
826 | ('html', None, 'generate html pages'), |
|
826 | ('html', None, 'generate html pages'), | |
827 | ] |
|
827 | ] | |
828 |
|
828 | |||
829 | def initialize_options(self): |
|
829 | def initialize_options(self): | |
830 | self.man = None |
|
830 | self.man = None | |
831 | self.html = None |
|
831 | self.html = None | |
832 |
|
832 | |||
833 | def finalize_options(self): |
|
833 | def finalize_options(self): | |
834 | # If --man or --html are set, only generate what we're told to. |
|
834 | # If --man or --html are set, only generate what we're told to. | |
835 | # Otherwise generate everything. |
|
835 | # Otherwise generate everything. | |
836 | have_subset = self.man is not None or self.html is not None |
|
836 | have_subset = self.man is not None or self.html is not None | |
837 |
|
837 | |||
838 | if have_subset: |
|
838 | if have_subset: | |
839 | self.man = True if self.man else False |
|
839 | self.man = True if self.man else False | |
840 | self.html = True if self.html else False |
|
840 | self.html = True if self.html else False | |
841 | else: |
|
841 | else: | |
842 | self.man = True |
|
842 | self.man = True | |
843 | self.html = True |
|
843 | self.html = True | |
844 |
|
844 | |||
845 | def run(self): |
|
845 | def run(self): | |
846 | def normalizecrlf(p): |
|
846 | def normalizecrlf(p): | |
847 | with open(p, 'rb') as fh: |
|
847 | with open(p, 'rb') as fh: | |
848 | orig = fh.read() |
|
848 | orig = fh.read() | |
849 |
|
849 | |||
850 | if b'\r\n' not in orig: |
|
850 | if b'\r\n' not in orig: | |
851 | return |
|
851 | return | |
852 |
|
852 | |||
853 | log.info('normalizing %s to LF line endings' % p) |
|
853 | log.info('normalizing %s to LF line endings' % p) | |
854 | with open(p, 'wb') as fh: |
|
854 | with open(p, 'wb') as fh: | |
855 | fh.write(orig.replace(b'\r\n', b'\n')) |
|
855 | fh.write(orig.replace(b'\r\n', b'\n')) | |
856 |
|
856 | |||
857 | def gentxt(root): |
|
857 | def gentxt(root): | |
858 | txt = 'doc/%s.txt' % root |
|
858 | txt = 'doc/%s.txt' % root | |
859 | log.info('generating %s' % txt) |
|
859 | log.info('generating %s' % txt) | |
860 | res, out, err = runcmd( |
|
860 | res, out, err = runcmd( | |
861 | [sys.executable, 'gendoc.py', root], os.environ, cwd='doc' |
|
861 | [sys.executable, 'gendoc.py', root], os.environ, cwd='doc' | |
862 | ) |
|
862 | ) | |
863 | if res: |
|
863 | if res: | |
864 | raise SystemExit( |
|
864 | raise SystemExit( | |
865 | 'error running gendoc.py: %s' % '\n'.join([out, err]) |
|
865 | 'error running gendoc.py: %s' % '\n'.join([out, err]) | |
866 | ) |
|
866 | ) | |
867 |
|
867 | |||
868 | with open(txt, 'wb') as fh: |
|
868 | with open(txt, 'wb') as fh: | |
869 | fh.write(out) |
|
869 | fh.write(out) | |
870 |
|
870 | |||
871 | def gengendoc(root): |
|
871 | def gengendoc(root): | |
872 | gendoc = 'doc/%s.gendoc.txt' % root |
|
872 | gendoc = 'doc/%s.gendoc.txt' % root | |
873 |
|
873 | |||
874 | log.info('generating %s' % gendoc) |
|
874 | log.info('generating %s' % gendoc) | |
875 | res, out, err = runcmd( |
|
875 | res, out, err = runcmd( | |
876 | [sys.executable, 'gendoc.py', '%s.gendoc' % root], |
|
876 | [sys.executable, 'gendoc.py', '%s.gendoc' % root], | |
877 | os.environ, |
|
877 | os.environ, | |
878 | cwd='doc', |
|
878 | cwd='doc', | |
879 | ) |
|
879 | ) | |
880 | if res: |
|
880 | if res: | |
881 | raise SystemExit( |
|
881 | raise SystemExit( | |
882 | 'error running gendoc: %s' % '\n'.join([out, err]) |
|
882 | 'error running gendoc: %s' % '\n'.join([out, err]) | |
883 | ) |
|
883 | ) | |
884 |
|
884 | |||
885 | with open(gendoc, 'wb') as fh: |
|
885 | with open(gendoc, 'wb') as fh: | |
886 | fh.write(out) |
|
886 | fh.write(out) | |
887 |
|
887 | |||
888 | def genman(root): |
|
888 | def genman(root): | |
889 | log.info('generating doc/%s' % root) |
|
889 | log.info('generating doc/%s' % root) | |
890 | res, out, err = runcmd( |
|
890 | res, out, err = runcmd( | |
891 | [ |
|
891 | [ | |
892 | sys.executable, |
|
892 | sys.executable, | |
893 | 'runrst', |
|
893 | 'runrst', | |
894 | 'hgmanpage', |
|
894 | 'hgmanpage', | |
895 | '--halt', |
|
895 | '--halt', | |
896 | 'warning', |
|
896 | 'warning', | |
897 | '--strip-elements-with-class', |
|
897 | '--strip-elements-with-class', | |
898 | 'htmlonly', |
|
898 | 'htmlonly', | |
899 | '%s.txt' % root, |
|
899 | '%s.txt' % root, | |
900 | root, |
|
900 | root, | |
901 | ], |
|
901 | ], | |
902 | os.environ, |
|
902 | os.environ, | |
903 | cwd='doc', |
|
903 | cwd='doc', | |
904 | ) |
|
904 | ) | |
905 | if res: |
|
905 | if res: | |
906 | raise SystemExit( |
|
906 | raise SystemExit( | |
907 | 'error running runrst: %s' % '\n'.join([out, err]) |
|
907 | 'error running runrst: %s' % '\n'.join([out, err]) | |
908 | ) |
|
908 | ) | |
909 |
|
909 | |||
910 | normalizecrlf('doc/%s' % root) |
|
910 | normalizecrlf('doc/%s' % root) | |
911 |
|
911 | |||
912 | def genhtml(root): |
|
912 | def genhtml(root): | |
913 | log.info('generating doc/%s.html' % root) |
|
913 | log.info('generating doc/%s.html' % root) | |
914 | res, out, err = runcmd( |
|
914 | res, out, err = runcmd( | |
915 | [ |
|
915 | [ | |
916 | sys.executable, |
|
916 | sys.executable, | |
917 | 'runrst', |
|
917 | 'runrst', | |
918 | 'html', |
|
918 | 'html', | |
919 | '--halt', |
|
919 | '--halt', | |
920 | 'warning', |
|
920 | 'warning', | |
921 | '--link-stylesheet', |
|
921 | '--link-stylesheet', | |
922 | '--stylesheet-path', |
|
922 | '--stylesheet-path', | |
923 | 'style.css', |
|
923 | 'style.css', | |
924 | '%s.txt' % root, |
|
924 | '%s.txt' % root, | |
925 | '%s.html' % root, |
|
925 | '%s.html' % root, | |
926 | ], |
|
926 | ], | |
927 | os.environ, |
|
927 | os.environ, | |
928 | cwd='doc', |
|
928 | cwd='doc', | |
929 | ) |
|
929 | ) | |
930 | if res: |
|
930 | if res: | |
931 | raise SystemExit( |
|
931 | raise SystemExit( | |
932 | 'error running runrst: %s' % '\n'.join([out, err]) |
|
932 | 'error running runrst: %s' % '\n'.join([out, err]) | |
933 | ) |
|
933 | ) | |
934 |
|
934 | |||
935 | normalizecrlf('doc/%s.html' % root) |
|
935 | normalizecrlf('doc/%s.html' % root) | |
936 |
|
936 | |||
937 | # This logic is duplicated in doc/Makefile. |
|
937 | # This logic is duplicated in doc/Makefile. | |
938 | sources = { |
|
938 | sources = { | |
939 | f |
|
939 | f | |
940 | for f in os.listdir('mercurial/helptext') |
|
940 | for f in os.listdir('mercurial/helptext') | |
941 | if re.search(r'[0-9]\.txt$', f) |
|
941 | if re.search(r'[0-9]\.txt$', f) | |
942 | } |
|
942 | } | |
943 |
|
943 | |||
944 | # common.txt is a one-off. |
|
944 | # common.txt is a one-off. | |
945 | gentxt('common') |
|
945 | gentxt('common') | |
946 |
|
946 | |||
947 | for source in sorted(sources): |
|
947 | for source in sorted(sources): | |
948 | assert source[-4:] == '.txt' |
|
948 | assert source[-4:] == '.txt' | |
949 | root = source[:-4] |
|
949 | root = source[:-4] | |
950 |
|
950 | |||
951 | gentxt(root) |
|
951 | gentxt(root) | |
952 | gengendoc(root) |
|
952 | gengendoc(root) | |
953 |
|
953 | |||
954 | if self.man: |
|
954 | if self.man: | |
955 | genman(root) |
|
955 | genman(root) | |
956 | if self.html: |
|
956 | if self.html: | |
957 | genhtml(root) |
|
957 | genhtml(root) | |
958 |
|
958 | |||
959 |
|
959 | |||
960 | class hginstall(install): |
|
960 | class hginstall(install): | |
961 |
|
961 | |||
962 | user_options = install.user_options + [ |
|
962 | user_options = install.user_options + [ | |
963 | ( |
|
963 | ( | |
964 | 'old-and-unmanageable', |
|
964 | 'old-and-unmanageable', | |
965 | None, |
|
965 | None, | |
966 | 'noop, present for eggless setuptools compat', |
|
966 | 'noop, present for eggless setuptools compat', | |
967 | ), |
|
967 | ), | |
968 | ( |
|
968 | ( | |
969 | 'single-version-externally-managed', |
|
969 | 'single-version-externally-managed', | |
970 | None, |
|
970 | None, | |
971 | 'noop, present for eggless setuptools compat', |
|
971 | 'noop, present for eggless setuptools compat', | |
972 | ), |
|
972 | ), | |
973 | ] |
|
973 | ] | |
974 |
|
974 | |||
975 | # Also helps setuptools not be sad while we refuse to create eggs. |
|
975 | # Also helps setuptools not be sad while we refuse to create eggs. | |
976 | single_version_externally_managed = True |
|
976 | single_version_externally_managed = True | |
977 |
|
977 | |||
978 | def get_sub_commands(self): |
|
978 | def get_sub_commands(self): | |
979 | # Screen out egg related commands to prevent egg generation. But allow |
|
979 | # Screen out egg related commands to prevent egg generation. But allow | |
980 | # mercurial.egg-info generation, since that is part of modern |
|
980 | # mercurial.egg-info generation, since that is part of modern | |
981 | # packaging. |
|
981 | # packaging. | |
982 | excl = {'bdist_egg'} |
|
982 | excl = {'bdist_egg'} | |
983 | return filter(lambda x: x not in excl, install.get_sub_commands(self)) |
|
983 | return filter(lambda x: x not in excl, install.get_sub_commands(self)) | |
984 |
|
984 | |||
985 |
|
985 | |||
986 | class hginstalllib(install_lib): |
|
986 | class hginstalllib(install_lib): | |
987 | ''' |
|
987 | ''' | |
988 | This is a specialization of install_lib that replaces the copy_file used |
|
988 | This is a specialization of install_lib that replaces the copy_file used | |
989 | there so that it supports setting the mode of files after copying them, |
|
989 | there so that it supports setting the mode of files after copying them, | |
990 | instead of just preserving the mode that the files originally had. If your |
|
990 | instead of just preserving the mode that the files originally had. If your | |
991 | system has a umask of something like 027, preserving the permissions when |
|
991 | system has a umask of something like 027, preserving the permissions when | |
992 | copying will lead to a broken install. |
|
992 | copying will lead to a broken install. | |
993 |
|
993 | |||
994 | Note that just passing keep_permissions=False to copy_file would be |
|
994 | Note that just passing keep_permissions=False to copy_file would be | |
995 | insufficient, as it might still be applying a umask. |
|
995 | insufficient, as it might still be applying a umask. | |
996 | ''' |
|
996 | ''' | |
997 |
|
997 | |||
998 | def run(self): |
|
998 | def run(self): | |
999 | realcopyfile = file_util.copy_file |
|
999 | realcopyfile = file_util.copy_file | |
1000 |
|
1000 | |||
1001 | def copyfileandsetmode(*args, **kwargs): |
|
1001 | def copyfileandsetmode(*args, **kwargs): | |
1002 | src, dst = args[0], args[1] |
|
1002 | src, dst = args[0], args[1] | |
1003 | dst, copied = realcopyfile(*args, **kwargs) |
|
1003 | dst, copied = realcopyfile(*args, **kwargs) | |
1004 | if copied: |
|
1004 | if copied: | |
1005 | st = os.stat(src) |
|
1005 | st = os.stat(src) | |
1006 | # Persist executable bit (apply it to group and other if user |
|
1006 | # Persist executable bit (apply it to group and other if user | |
1007 | # has it) |
|
1007 | # has it) | |
1008 | if st[stat.ST_MODE] & stat.S_IXUSR: |
|
1008 | if st[stat.ST_MODE] & stat.S_IXUSR: | |
1009 | setmode = int('0755', 8) |
|
1009 | setmode = int('0755', 8) | |
1010 | else: |
|
1010 | else: | |
1011 | setmode = int('0644', 8) |
|
1011 | setmode = int('0644', 8) | |
1012 | m = stat.S_IMODE(st[stat.ST_MODE]) |
|
1012 | m = stat.S_IMODE(st[stat.ST_MODE]) | |
1013 | m = (m & ~int('0777', 8)) | setmode |
|
1013 | m = (m & ~int('0777', 8)) | setmode | |
1014 | os.chmod(dst, m) |
|
1014 | os.chmod(dst, m) | |
1015 |
|
1015 | |||
1016 | file_util.copy_file = copyfileandsetmode |
|
1016 | file_util.copy_file = copyfileandsetmode | |
1017 | try: |
|
1017 | try: | |
1018 | install_lib.run(self) |
|
1018 | install_lib.run(self) | |
1019 | finally: |
|
1019 | finally: | |
1020 | file_util.copy_file = realcopyfile |
|
1020 | file_util.copy_file = realcopyfile | |
1021 |
|
1021 | |||
1022 |
|
1022 | |||
1023 | class hginstallscripts(install_scripts): |
|
1023 | class hginstallscripts(install_scripts): | |
1024 | ''' |
|
1024 | ''' | |
1025 | This is a specialization of install_scripts that replaces the @LIBDIR@ with |
|
1025 | This is a specialization of install_scripts that replaces the @LIBDIR@ with | |
1026 | the configured directory for modules. If possible, the path is made relative |
|
1026 | the configured directory for modules. If possible, the path is made relative | |
1027 | to the directory for scripts. |
|
1027 | to the directory for scripts. | |
1028 | ''' |
|
1028 | ''' | |
1029 |
|
1029 | |||
1030 | def initialize_options(self): |
|
1030 | def initialize_options(self): | |
1031 | install_scripts.initialize_options(self) |
|
1031 | install_scripts.initialize_options(self) | |
1032 |
|
1032 | |||
1033 | self.install_lib = None |
|
1033 | self.install_lib = None | |
1034 |
|
1034 | |||
1035 | def finalize_options(self): |
|
1035 | def finalize_options(self): | |
1036 | install_scripts.finalize_options(self) |
|
1036 | install_scripts.finalize_options(self) | |
1037 | self.set_undefined_options('install', ('install_lib', 'install_lib')) |
|
1037 | self.set_undefined_options('install', ('install_lib', 'install_lib')) | |
1038 |
|
1038 | |||
1039 | def run(self): |
|
1039 | def run(self): | |
1040 | install_scripts.run(self) |
|
1040 | install_scripts.run(self) | |
1041 |
|
1041 | |||
1042 | # It only makes sense to replace @LIBDIR@ with the install path if |
|
1042 | # It only makes sense to replace @LIBDIR@ with the install path if | |
1043 | # the install path is known. For wheels, the logic below calculates |
|
1043 | # the install path is known. For wheels, the logic below calculates | |
1044 | # the libdir to be "../..". This is because the internal layout of a |
|
1044 | # the libdir to be "../..". This is because the internal layout of a | |
1045 | # wheel archive looks like: |
|
1045 | # wheel archive looks like: | |
1046 | # |
|
1046 | # | |
1047 | # mercurial-3.6.1.data/scripts/hg |
|
1047 | # mercurial-3.6.1.data/scripts/hg | |
1048 | # mercurial/__init__.py |
|
1048 | # mercurial/__init__.py | |
1049 | # |
|
1049 | # | |
1050 | # When installing wheels, the subdirectories of the "<pkg>.data" |
|
1050 | # When installing wheels, the subdirectories of the "<pkg>.data" | |
1051 | # directory are translated to system local paths and files therein |
|
1051 | # directory are translated to system local paths and files therein | |
1052 | # are copied in place. The mercurial/* files are installed into the |
|
1052 | # are copied in place. The mercurial/* files are installed into the | |
1053 | # site-packages directory. However, the site-packages directory |
|
1053 | # site-packages directory. However, the site-packages directory | |
1054 | # isn't known until wheel install time. This means we have no clue |
|
1054 | # isn't known until wheel install time. This means we have no clue | |
1055 | # at wheel generation time what the installed site-packages directory |
|
1055 | # at wheel generation time what the installed site-packages directory | |
1056 | # will be. And, wheels don't appear to provide the ability to register |
|
1056 | # will be. And, wheels don't appear to provide the ability to register | |
1057 | # custom code to run during wheel installation. This all means that |
|
1057 | # custom code to run during wheel installation. This all means that | |
1058 | # we can't reliably set the libdir in wheels: the default behavior |
|
1058 | # we can't reliably set the libdir in wheels: the default behavior | |
1059 | # of looking in sys.path must do. |
|
1059 | # of looking in sys.path must do. | |
1060 |
|
1060 | |||
1061 | if ( |
|
1061 | if ( | |
1062 | os.path.splitdrive(self.install_dir)[0] |
|
1062 | os.path.splitdrive(self.install_dir)[0] | |
1063 | != os.path.splitdrive(self.install_lib)[0] |
|
1063 | != os.path.splitdrive(self.install_lib)[0] | |
1064 | ): |
|
1064 | ): | |
1065 | # can't make relative paths from one drive to another, so use an |
|
1065 | # can't make relative paths from one drive to another, so use an | |
1066 | # absolute path instead |
|
1066 | # absolute path instead | |
1067 | libdir = self.install_lib |
|
1067 | libdir = self.install_lib | |
1068 | else: |
|
1068 | else: | |
1069 | libdir = os.path.relpath(self.install_lib, self.install_dir) |
|
1069 | libdir = os.path.relpath(self.install_lib, self.install_dir) | |
1070 |
|
1070 | |||
1071 | for outfile in self.outfiles: |
|
1071 | for outfile in self.outfiles: | |
1072 | with open(outfile, 'rb') as fp: |
|
1072 | with open(outfile, 'rb') as fp: | |
1073 | data = fp.read() |
|
1073 | data = fp.read() | |
1074 |
|
1074 | |||
1075 | # skip binary files |
|
1075 | # skip binary files | |
1076 | if b'\0' in data: |
|
1076 | if b'\0' in data: | |
1077 | continue |
|
1077 | continue | |
1078 |
|
1078 | |||
1079 | # During local installs, the shebang will be rewritten to the final |
|
1079 | # During local installs, the shebang will be rewritten to the final | |
1080 | # install path. During wheel packaging, the shebang has a special |
|
1080 | # install path. During wheel packaging, the shebang has a special | |
1081 | # value. |
|
1081 | # value. | |
1082 | if data.startswith(b'#!python'): |
|
1082 | if data.startswith(b'#!python'): | |
1083 | log.info( |
|
1083 | log.info( | |
1084 | 'not rewriting @LIBDIR@ in %s because install path ' |
|
1084 | 'not rewriting @LIBDIR@ in %s because install path ' | |
1085 | 'not known' % outfile |
|
1085 | 'not known' % outfile | |
1086 | ) |
|
1086 | ) | |
1087 | continue |
|
1087 | continue | |
1088 |
|
1088 | |||
1089 | data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape)) |
|
1089 | data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape)) | |
1090 | with open(outfile, 'wb') as fp: |
|
1090 | with open(outfile, 'wb') as fp: | |
1091 | fp.write(data) |
|
1091 | fp.write(data) | |
1092 |
|
1092 | |||
1093 |
|
1093 | |||
1094 | # virtualenv installs custom distutils/__init__.py and |
|
1094 | # virtualenv installs custom distutils/__init__.py and | |
1095 | # distutils/distutils.cfg files which essentially proxy back to the |
|
1095 | # distutils/distutils.cfg files which essentially proxy back to the | |
1096 | # "real" distutils in the main Python install. The presence of this |
|
1096 | # "real" distutils in the main Python install. The presence of this | |
1097 | # directory causes py2exe to pick up the "hacked" distutils package |
|
1097 | # directory causes py2exe to pick up the "hacked" distutils package | |
1098 | # from the virtualenv and "import distutils" will fail from the py2exe |
|
1098 | # from the virtualenv and "import distutils" will fail from the py2exe | |
1099 | # build because the "real" distutils files can't be located. |
|
1099 | # build because the "real" distutils files can't be located. | |
1100 | # |
|
1100 | # | |
1101 | # We work around this by monkeypatching the py2exe code finding Python |
|
1101 | # We work around this by monkeypatching the py2exe code finding Python | |
1102 | # modules to replace the found virtualenv distutils modules with the |
|
1102 | # modules to replace the found virtualenv distutils modules with the | |
1103 | # original versions via filesystem scanning. This is a bit hacky. But |
|
1103 | # original versions via filesystem scanning. This is a bit hacky. But | |
1104 | # it allows us to use virtualenvs for py2exe packaging, which is more |
|
1104 | # it allows us to use virtualenvs for py2exe packaging, which is more | |
1105 | # deterministic and reproducible. |
|
1105 | # deterministic and reproducible. | |
1106 | # |
|
1106 | # | |
1107 | # It's worth noting that the common StackOverflow suggestions for this |
|
1107 | # It's worth noting that the common StackOverflow suggestions for this | |
1108 | # problem involve copying the original distutils files into the |
|
1108 | # problem involve copying the original distutils files into the | |
1109 | # virtualenv or into the staging directory after setup() is invoked. |
|
1109 | # virtualenv or into the staging directory after setup() is invoked. | |
1110 | # The former is very brittle and can easily break setup(). Our hacking |
|
1110 | # The former is very brittle and can easily break setup(). Our hacking | |
1111 | # of the found modules routine has a similar result as copying the files |
|
1111 | # of the found modules routine has a similar result as copying the files | |
1112 | # manually. But it makes fewer assumptions about how py2exe works and |
|
1112 | # manually. But it makes fewer assumptions about how py2exe works and | |
1113 | # is less brittle. |
|
1113 | # is less brittle. | |
1114 |
|
1114 | |||
1115 | # This only catches virtualenvs made with virtualenv (as opposed to |
|
1115 | # This only catches virtualenvs made with virtualenv (as opposed to | |
1116 | # venv, which is likely what Python 3 uses). |
|
1116 | # venv, which is likely what Python 3 uses). | |
1117 | py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None |
|
1117 | py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None | |
1118 |
|
1118 | |||
1119 | if py2exehacked: |
|
1119 | if py2exehacked: | |
1120 | from distutils.command.py2exe import py2exe as buildpy2exe |
|
1120 | from distutils.command.py2exe import py2exe as buildpy2exe | |
1121 | from py2exe.mf import Module as py2exemodule |
|
1121 | from py2exe.mf import Module as py2exemodule | |
1122 |
|
1122 | |||
1123 | class hgbuildpy2exe(buildpy2exe): |
|
1123 | class hgbuildpy2exe(buildpy2exe): | |
1124 | def find_needed_modules(self, mf, files, modules): |
|
1124 | def find_needed_modules(self, mf, files, modules): | |
1125 | res = buildpy2exe.find_needed_modules(self, mf, files, modules) |
|
1125 | res = buildpy2exe.find_needed_modules(self, mf, files, modules) | |
1126 |
|
1126 | |||
1127 | # Replace virtualenv's distutils modules with the real ones. |
|
1127 | # Replace virtualenv's distutils modules with the real ones. | |
1128 | modules = {} |
|
1128 | modules = {} | |
1129 | for k, v in res.modules.items(): |
|
1129 | for k, v in res.modules.items(): | |
1130 | if k != 'distutils' and not k.startswith('distutils.'): |
|
1130 | if k != 'distutils' and not k.startswith('distutils.'): | |
1131 | modules[k] = v |
|
1131 | modules[k] = v | |
1132 |
|
1132 | |||
1133 | res.modules = modules |
|
1133 | res.modules = modules | |
1134 |
|
1134 | |||
1135 | import opcode |
|
1135 | import opcode | |
1136 |
|
1136 | |||
1137 | distutilsreal = os.path.join( |
|
1137 | distutilsreal = os.path.join( | |
1138 | os.path.dirname(opcode.__file__), 'distutils' |
|
1138 | os.path.dirname(opcode.__file__), 'distutils' | |
1139 | ) |
|
1139 | ) | |
1140 |
|
1140 | |||
1141 | for root, dirs, files in os.walk(distutilsreal): |
|
1141 | for root, dirs, files in os.walk(distutilsreal): | |
1142 | for f in sorted(files): |
|
1142 | for f in sorted(files): | |
1143 | if not f.endswith('.py'): |
|
1143 | if not f.endswith('.py'): | |
1144 | continue |
|
1144 | continue | |
1145 |
|
1145 | |||
1146 | full = os.path.join(root, f) |
|
1146 | full = os.path.join(root, f) | |
1147 |
|
1147 | |||
1148 | parents = ['distutils'] |
|
1148 | parents = ['distutils'] | |
1149 |
|
1149 | |||
1150 | if root != distutilsreal: |
|
1150 | if root != distutilsreal: | |
1151 | rel = os.path.relpath(root, distutilsreal) |
|
1151 | rel = os.path.relpath(root, distutilsreal) | |
1152 | parents.extend(p for p in rel.split(os.sep)) |
|
1152 | parents.extend(p for p in rel.split(os.sep)) | |
1153 |
|
1153 | |||
1154 | modname = '%s.%s' % ('.'.join(parents), f[:-3]) |
|
1154 | modname = '%s.%s' % ('.'.join(parents), f[:-3]) | |
1155 |
|
1155 | |||
1156 | if modname.startswith('distutils.tests.'): |
|
1156 | if modname.startswith('distutils.tests.'): | |
1157 | continue |
|
1157 | continue | |
1158 |
|
1158 | |||
1159 | if modname.endswith('.__init__'): |
|
1159 | if modname.endswith('.__init__'): | |
1160 | modname = modname[: -len('.__init__')] |
|
1160 | modname = modname[: -len('.__init__')] | |
1161 | path = os.path.dirname(full) |
|
1161 | path = os.path.dirname(full) | |
1162 | else: |
|
1162 | else: | |
1163 | path = None |
|
1163 | path = None | |
1164 |
|
1164 | |||
1165 | res.modules[modname] = py2exemodule( |
|
1165 | res.modules[modname] = py2exemodule( | |
1166 | modname, full, path=path |
|
1166 | modname, full, path=path | |
1167 | ) |
|
1167 | ) | |
1168 |
|
1168 | |||
1169 | if 'distutils' not in res.modules: |
|
1169 | if 'distutils' not in res.modules: | |
1170 | raise SystemExit('could not find distutils modules') |
|
1170 | raise SystemExit('could not find distutils modules') | |
1171 |
|
1171 | |||
1172 | return res |
|
1172 | return res | |
1173 |
|
1173 | |||
1174 |
|
1174 | |||
1175 | cmdclass = { |
|
1175 | cmdclass = { | |
1176 | 'build': hgbuild, |
|
1176 | 'build': hgbuild, | |
1177 | 'build_doc': hgbuilddoc, |
|
1177 | 'build_doc': hgbuilddoc, | |
1178 | 'build_mo': hgbuildmo, |
|
1178 | 'build_mo': hgbuildmo, | |
1179 | 'build_ext': hgbuildext, |
|
1179 | 'build_ext': hgbuildext, | |
1180 | 'build_py': hgbuildpy, |
|
1180 | 'build_py': hgbuildpy, | |
1181 | 'build_scripts': hgbuildscripts, |
|
1181 | 'build_scripts': hgbuildscripts, | |
1182 | 'build_hgextindex': buildhgextindex, |
|
1182 | 'build_hgextindex': buildhgextindex, | |
1183 | 'install': hginstall, |
|
1183 | 'install': hginstall, | |
1184 | 'install_lib': hginstalllib, |
|
1184 | 'install_lib': hginstalllib, | |
1185 | 'install_scripts': hginstallscripts, |
|
1185 | 'install_scripts': hginstallscripts, | |
1186 | 'build_hgexe': buildhgexe, |
|
1186 | 'build_hgexe': buildhgexe, | |
1187 | } |
|
1187 | } | |
1188 |
|
1188 | |||
1189 | if py2exehacked: |
|
1189 | if py2exehacked: | |
1190 | cmdclass['py2exe'] = hgbuildpy2exe |
|
1190 | cmdclass['py2exe'] = hgbuildpy2exe | |
1191 |
|
1191 | |||
1192 | packages = [ |
|
1192 | packages = [ | |
1193 | 'mercurial', |
|
1193 | 'mercurial', | |
1194 | 'mercurial.cext', |
|
1194 | 'mercurial.cext', | |
1195 | 'mercurial.cffi', |
|
1195 | 'mercurial.cffi', | |
1196 | 'mercurial.defaultrc', |
|
1196 | 'mercurial.defaultrc', | |
1197 | 'mercurial.helptext', |
|
1197 | 'mercurial.helptext', | |
1198 | 'mercurial.helptext.internals', |
|
1198 | 'mercurial.helptext.internals', | |
1199 | 'mercurial.hgweb', |
|
1199 | 'mercurial.hgweb', | |
1200 | 'mercurial.interfaces', |
|
1200 | 'mercurial.interfaces', | |
1201 | 'mercurial.pure', |
|
1201 | 'mercurial.pure', | |
1202 | 'mercurial.thirdparty', |
|
1202 | 'mercurial.thirdparty', | |
1203 | 'mercurial.thirdparty.attr', |
|
1203 | 'mercurial.thirdparty.attr', | |
1204 | 'mercurial.thirdparty.zope', |
|
1204 | 'mercurial.thirdparty.zope', | |
1205 | 'mercurial.thirdparty.zope.interface', |
|
1205 | 'mercurial.thirdparty.zope.interface', | |
1206 | 'mercurial.utils', |
|
1206 | 'mercurial.utils', | |
1207 | 'mercurial.revlogutils', |
|
1207 | 'mercurial.revlogutils', | |
1208 | 'mercurial.testing', |
|
1208 | 'mercurial.testing', | |
1209 | 'hgext', |
|
1209 | 'hgext', | |
1210 | 'hgext.convert', |
|
1210 | 'hgext.convert', | |
1211 | 'hgext.fsmonitor', |
|
1211 | 'hgext.fsmonitor', | |
1212 | 'hgext.fastannotate', |
|
1212 | 'hgext.fastannotate', | |
1213 | 'hgext.fsmonitor.pywatchman', |
|
1213 | 'hgext.fsmonitor.pywatchman', | |
|
1214 | 'hgext.git', | |||
1214 | 'hgext.highlight', |
|
1215 | 'hgext.highlight', | |
1215 | 'hgext.hooklib', |
|
1216 | 'hgext.hooklib', | |
1216 | 'hgext.infinitepush', |
|
1217 | 'hgext.infinitepush', | |
1217 | 'hgext.largefiles', |
|
1218 | 'hgext.largefiles', | |
1218 | 'hgext.lfs', |
|
1219 | 'hgext.lfs', | |
1219 | 'hgext.narrow', |
|
1220 | 'hgext.narrow', | |
1220 | 'hgext.remotefilelog', |
|
1221 | 'hgext.remotefilelog', | |
1221 | 'hgext.zeroconf', |
|
1222 | 'hgext.zeroconf', | |
1222 | 'hgext3rd', |
|
1223 | 'hgext3rd', | |
1223 | 'hgdemandimport', |
|
1224 | 'hgdemandimport', | |
1224 | ] |
|
1225 | ] | |
1225 | if sys.version_info[0] == 2: |
|
1226 | if sys.version_info[0] == 2: | |
1226 | packages.extend( |
|
1227 | packages.extend( | |
1227 | [ |
|
1228 | [ | |
1228 | 'mercurial.thirdparty.concurrent', |
|
1229 | 'mercurial.thirdparty.concurrent', | |
1229 | 'mercurial.thirdparty.concurrent.futures', |
|
1230 | 'mercurial.thirdparty.concurrent.futures', | |
1230 | ] |
|
1231 | ] | |
1231 | ) |
|
1232 | ) | |
1232 |
|
1233 | |||
1233 | if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ: |
|
1234 | if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ: | |
1234 | # py2exe can't cope with namespace packages very well, so we have to |
|
1235 | # py2exe can't cope with namespace packages very well, so we have to | |
1235 | # install any hgext3rd.* extensions that we want in the final py2exe |
|
1236 | # install any hgext3rd.* extensions that we want in the final py2exe | |
1236 | # image here. This is gross, but you gotta do what you gotta do. |
|
1237 | # image here. This is gross, but you gotta do what you gotta do. | |
1237 | packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' ')) |
|
1238 | packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' ')) | |
1238 |
|
1239 | |||
1239 | common_depends = [ |
|
1240 | common_depends = [ | |
1240 | 'mercurial/bitmanipulation.h', |
|
1241 | 'mercurial/bitmanipulation.h', | |
1241 | 'mercurial/compat.h', |
|
1242 | 'mercurial/compat.h', | |
1242 | 'mercurial/cext/util.h', |
|
1243 | 'mercurial/cext/util.h', | |
1243 | ] |
|
1244 | ] | |
1244 | common_include_dirs = ['mercurial'] |
|
1245 | common_include_dirs = ['mercurial'] | |
1245 |
|
1246 | |||
1246 | osutil_cflags = [] |
|
1247 | osutil_cflags = [] | |
1247 | osutil_ldflags = [] |
|
1248 | osutil_ldflags = [] | |
1248 |
|
1249 | |||
1249 | # platform specific macros |
|
1250 | # platform specific macros | |
1250 | for plat, func in [('bsd', 'setproctitle')]: |
|
1251 | for plat, func in [('bsd', 'setproctitle')]: | |
1251 | if re.search(plat, sys.platform) and hasfunction(new_compiler(), func): |
|
1252 | if re.search(plat, sys.platform) and hasfunction(new_compiler(), func): | |
1252 | osutil_cflags.append('-DHAVE_%s' % func.upper()) |
|
1253 | osutil_cflags.append('-DHAVE_%s' % func.upper()) | |
1253 |
|
1254 | |||
1254 | for plat, macro, code in [ |
|
1255 | for plat, macro, code in [ | |
1255 | ( |
|
1256 | ( | |
1256 | 'bsd|darwin', |
|
1257 | 'bsd|darwin', | |
1257 | 'BSD_STATFS', |
|
1258 | 'BSD_STATFS', | |
1258 | ''' |
|
1259 | ''' | |
1259 | #include <sys/param.h> |
|
1260 | #include <sys/param.h> | |
1260 | #include <sys/mount.h> |
|
1261 | #include <sys/mount.h> | |
1261 | int main() { struct statfs s; return sizeof(s.f_fstypename); } |
|
1262 | int main() { struct statfs s; return sizeof(s.f_fstypename); } | |
1262 | ''', |
|
1263 | ''', | |
1263 | ), |
|
1264 | ), | |
1264 | ( |
|
1265 | ( | |
1265 | 'linux', |
|
1266 | 'linux', | |
1266 | 'LINUX_STATFS', |
|
1267 | 'LINUX_STATFS', | |
1267 | ''' |
|
1268 | ''' | |
1268 | #include <linux/magic.h> |
|
1269 | #include <linux/magic.h> | |
1269 | #include <sys/vfs.h> |
|
1270 | #include <sys/vfs.h> | |
1270 | int main() { struct statfs s; return sizeof(s.f_type); } |
|
1271 | int main() { struct statfs s; return sizeof(s.f_type); } | |
1271 | ''', |
|
1272 | ''', | |
1272 | ), |
|
1273 | ), | |
1273 | ]: |
|
1274 | ]: | |
1274 | if re.search(plat, sys.platform) and cancompile(new_compiler(), code): |
|
1275 | if re.search(plat, sys.platform) and cancompile(new_compiler(), code): | |
1275 | osutil_cflags.append('-DHAVE_%s' % macro) |
|
1276 | osutil_cflags.append('-DHAVE_%s' % macro) | |
1276 |
|
1277 | |||
1277 | if sys.platform == 'darwin': |
|
1278 | if sys.platform == 'darwin': | |
1278 | osutil_ldflags += ['-framework', 'ApplicationServices'] |
|
1279 | osutil_ldflags += ['-framework', 'ApplicationServices'] | |
1279 |
|
1280 | |||
1280 | xdiff_srcs = [ |
|
1281 | xdiff_srcs = [ | |
1281 | 'mercurial/thirdparty/xdiff/xdiffi.c', |
|
1282 | 'mercurial/thirdparty/xdiff/xdiffi.c', | |
1282 | 'mercurial/thirdparty/xdiff/xprepare.c', |
|
1283 | 'mercurial/thirdparty/xdiff/xprepare.c', | |
1283 | 'mercurial/thirdparty/xdiff/xutils.c', |
|
1284 | 'mercurial/thirdparty/xdiff/xutils.c', | |
1284 | ] |
|
1285 | ] | |
1285 |
|
1286 | |||
1286 | xdiff_headers = [ |
|
1287 | xdiff_headers = [ | |
1287 | 'mercurial/thirdparty/xdiff/xdiff.h', |
|
1288 | 'mercurial/thirdparty/xdiff/xdiff.h', | |
1288 | 'mercurial/thirdparty/xdiff/xdiffi.h', |
|
1289 | 'mercurial/thirdparty/xdiff/xdiffi.h', | |
1289 | 'mercurial/thirdparty/xdiff/xinclude.h', |
|
1290 | 'mercurial/thirdparty/xdiff/xinclude.h', | |
1290 | 'mercurial/thirdparty/xdiff/xmacros.h', |
|
1291 | 'mercurial/thirdparty/xdiff/xmacros.h', | |
1291 | 'mercurial/thirdparty/xdiff/xprepare.h', |
|
1292 | 'mercurial/thirdparty/xdiff/xprepare.h', | |
1292 | 'mercurial/thirdparty/xdiff/xtypes.h', |
|
1293 | 'mercurial/thirdparty/xdiff/xtypes.h', | |
1293 | 'mercurial/thirdparty/xdiff/xutils.h', |
|
1294 | 'mercurial/thirdparty/xdiff/xutils.h', | |
1294 | ] |
|
1295 | ] | |
1295 |
|
1296 | |||
1296 |
|
1297 | |||
1297 | class RustCompilationError(CCompilerError): |
|
1298 | class RustCompilationError(CCompilerError): | |
1298 | """Exception class for Rust compilation errors.""" |
|
1299 | """Exception class for Rust compilation errors.""" | |
1299 |
|
1300 | |||
1300 |
|
1301 | |||
1301 | class RustExtension(Extension): |
|
1302 | class RustExtension(Extension): | |
1302 | """Base classes for concrete Rust Extension classes. |
|
1303 | """Base classes for concrete Rust Extension classes. | |
1303 | """ |
|
1304 | """ | |
1304 |
|
1305 | |||
1305 | rusttargetdir = os.path.join('rust', 'target', 'release') |
|
1306 | rusttargetdir = os.path.join('rust', 'target', 'release') | |
1306 |
|
1307 | |||
1307 | def __init__( |
|
1308 | def __init__( | |
1308 | self, mpath, sources, rustlibname, subcrate, py3_features=None, **kw |
|
1309 | self, mpath, sources, rustlibname, subcrate, py3_features=None, **kw | |
1309 | ): |
|
1310 | ): | |
1310 | Extension.__init__(self, mpath, sources, **kw) |
|
1311 | Extension.__init__(self, mpath, sources, **kw) | |
1311 | srcdir = self.rustsrcdir = os.path.join('rust', subcrate) |
|
1312 | srcdir = self.rustsrcdir = os.path.join('rust', subcrate) | |
1312 | self.py3_features = py3_features |
|
1313 | self.py3_features = py3_features | |
1313 |
|
1314 | |||
1314 | # adding Rust source and control files to depends so that the extension |
|
1315 | # adding Rust source and control files to depends so that the extension | |
1315 | # gets rebuilt if they've changed |
|
1316 | # gets rebuilt if they've changed | |
1316 | self.depends.append(os.path.join(srcdir, 'Cargo.toml')) |
|
1317 | self.depends.append(os.path.join(srcdir, 'Cargo.toml')) | |
1317 | cargo_lock = os.path.join(srcdir, 'Cargo.lock') |
|
1318 | cargo_lock = os.path.join(srcdir, 'Cargo.lock') | |
1318 | if os.path.exists(cargo_lock): |
|
1319 | if os.path.exists(cargo_lock): | |
1319 | self.depends.append(cargo_lock) |
|
1320 | self.depends.append(cargo_lock) | |
1320 | for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')): |
|
1321 | for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')): | |
1321 | self.depends.extend( |
|
1322 | self.depends.extend( | |
1322 | os.path.join(dirpath, fname) |
|
1323 | os.path.join(dirpath, fname) | |
1323 | for fname in fnames |
|
1324 | for fname in fnames | |
1324 | if os.path.splitext(fname)[1] == '.rs' |
|
1325 | if os.path.splitext(fname)[1] == '.rs' | |
1325 | ) |
|
1326 | ) | |
1326 |
|
1327 | |||
1327 | @staticmethod |
|
1328 | @staticmethod | |
1328 | def rustdylibsuffix(): |
|
1329 | def rustdylibsuffix(): | |
1329 | """Return the suffix for shared libraries produced by rustc. |
|
1330 | """Return the suffix for shared libraries produced by rustc. | |
1330 |
|
1331 | |||
1331 | See also: https://doc.rust-lang.org/reference/linkage.html |
|
1332 | See also: https://doc.rust-lang.org/reference/linkage.html | |
1332 | """ |
|
1333 | """ | |
1333 | if sys.platform == 'darwin': |
|
1334 | if sys.platform == 'darwin': | |
1334 | return '.dylib' |
|
1335 | return '.dylib' | |
1335 | elif os.name == 'nt': |
|
1336 | elif os.name == 'nt': | |
1336 | return '.dll' |
|
1337 | return '.dll' | |
1337 | else: |
|
1338 | else: | |
1338 | return '.so' |
|
1339 | return '.so' | |
1339 |
|
1340 | |||
1340 | def rustbuild(self): |
|
1341 | def rustbuild(self): | |
1341 | env = os.environ.copy() |
|
1342 | env = os.environ.copy() | |
1342 | if 'HGTEST_RESTOREENV' in env: |
|
1343 | if 'HGTEST_RESTOREENV' in env: | |
1343 | # Mercurial tests change HOME to a temporary directory, |
|
1344 | # Mercurial tests change HOME to a temporary directory, | |
1344 | # but, if installed with rustup, the Rust toolchain needs |
|
1345 | # but, if installed with rustup, the Rust toolchain needs | |
1345 | # HOME to be correct (otherwise the 'no default toolchain' |
|
1346 | # HOME to be correct (otherwise the 'no default toolchain' | |
1346 | # error message is issued and the build fails). |
|
1347 | # error message is issued and the build fails). | |
1347 | # This happens currently with test-hghave.t, which does |
|
1348 | # This happens currently with test-hghave.t, which does | |
1348 | # invoke this build. |
|
1349 | # invoke this build. | |
1349 |
|
1350 | |||
1350 | # Unix only fix (os.path.expanduser not really reliable if |
|
1351 | # Unix only fix (os.path.expanduser not really reliable if | |
1351 | # HOME is shadowed like this) |
|
1352 | # HOME is shadowed like this) | |
1352 | import pwd |
|
1353 | import pwd | |
1353 |
|
1354 | |||
1354 | env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir |
|
1355 | env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir | |
1355 |
|
1356 | |||
1356 | cargocmd = ['cargo', 'rustc', '-vv', '--release'] |
|
1357 | cargocmd = ['cargo', 'rustc', '-vv', '--release'] | |
1357 |
|
1358 | |||
1358 | feature_flags = [] |
|
1359 | feature_flags = [] | |
1359 |
|
1360 | |||
1360 | if sys.version_info[0] == 3 and self.py3_features is not None: |
|
1361 | if sys.version_info[0] == 3 and self.py3_features is not None: | |
1361 | feature_flags.append(self.py3_features) |
|
1362 | feature_flags.append(self.py3_features) | |
1362 | cargocmd.append('--no-default-features') |
|
1363 | cargocmd.append('--no-default-features') | |
1363 |
|
1364 | |||
1364 | rust_features = env.get("HG_RUST_FEATURES") |
|
1365 | rust_features = env.get("HG_RUST_FEATURES") | |
1365 | if rust_features: |
|
1366 | if rust_features: | |
1366 | feature_flags.append(rust_features) |
|
1367 | feature_flags.append(rust_features) | |
1367 |
|
1368 | |||
1368 | cargocmd.extend(('--features', " ".join(feature_flags))) |
|
1369 | cargocmd.extend(('--features', " ".join(feature_flags))) | |
1369 |
|
1370 | |||
1370 | cargocmd.append('--') |
|
1371 | cargocmd.append('--') | |
1371 | if sys.platform == 'darwin': |
|
1372 | if sys.platform == 'darwin': | |
1372 | cargocmd.extend( |
|
1373 | cargocmd.extend( | |
1373 | ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup") |
|
1374 | ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup") | |
1374 | ) |
|
1375 | ) | |
1375 | try: |
|
1376 | try: | |
1376 | subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir) |
|
1377 | subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir) | |
1377 | except OSError as exc: |
|
1378 | except OSError as exc: | |
1378 | if exc.errno == errno.ENOENT: |
|
1379 | if exc.errno == errno.ENOENT: | |
1379 | raise RustCompilationError("Cargo not found") |
|
1380 | raise RustCompilationError("Cargo not found") | |
1380 | elif exc.errno == errno.EACCES: |
|
1381 | elif exc.errno == errno.EACCES: | |
1381 | raise RustCompilationError( |
|
1382 | raise RustCompilationError( | |
1382 | "Cargo found, but permisssion to execute it is denied" |
|
1383 | "Cargo found, but permisssion to execute it is denied" | |
1383 | ) |
|
1384 | ) | |
1384 | else: |
|
1385 | else: | |
1385 | raise |
|
1386 | raise | |
1386 | except subprocess.CalledProcessError: |
|
1387 | except subprocess.CalledProcessError: | |
1387 | raise RustCompilationError( |
|
1388 | raise RustCompilationError( | |
1388 | "Cargo failed. Working directory: %r, " |
|
1389 | "Cargo failed. Working directory: %r, " | |
1389 | "command: %r, environment: %r" |
|
1390 | "command: %r, environment: %r" | |
1390 | % (self.rustsrcdir, cargocmd, env) |
|
1391 | % (self.rustsrcdir, cargocmd, env) | |
1391 | ) |
|
1392 | ) | |
1392 |
|
1393 | |||
1393 |
|
1394 | |||
1394 | class RustEnhancedExtension(RustExtension): |
|
1395 | class RustEnhancedExtension(RustExtension): | |
1395 | """A C Extension, conditionally enhanced with Rust code. |
|
1396 | """A C Extension, conditionally enhanced with Rust code. | |
1396 |
|
1397 | |||
1397 | If the HGWITHRUSTEXT environment variable is set to something else |
|
1398 | If the HGWITHRUSTEXT environment variable is set to something else | |
1398 | than 'cpython', the Rust sources get compiled and linked within |
|
1399 | than 'cpython', the Rust sources get compiled and linked within | |
1399 | the C target shared library object. |
|
1400 | the C target shared library object. | |
1400 | """ |
|
1401 | """ | |
1401 |
|
1402 | |||
1402 | def __init__(self, mpath, sources, rustlibname, subcrate, **kw): |
|
1403 | def __init__(self, mpath, sources, rustlibname, subcrate, **kw): | |
1403 | RustExtension.__init__( |
|
1404 | RustExtension.__init__( | |
1404 | self, mpath, sources, rustlibname, subcrate, **kw |
|
1405 | self, mpath, sources, rustlibname, subcrate, **kw | |
1405 | ) |
|
1406 | ) | |
1406 | if hgrustext != 'direct-ffi': |
|
1407 | if hgrustext != 'direct-ffi': | |
1407 | return |
|
1408 | return | |
1408 | self.extra_compile_args.append('-DWITH_RUST') |
|
1409 | self.extra_compile_args.append('-DWITH_RUST') | |
1409 | self.libraries.append(rustlibname) |
|
1410 | self.libraries.append(rustlibname) | |
1410 | self.library_dirs.append(self.rusttargetdir) |
|
1411 | self.library_dirs.append(self.rusttargetdir) | |
1411 |
|
1412 | |||
1412 | def rustbuild(self): |
|
1413 | def rustbuild(self): | |
1413 | if hgrustext == 'direct-ffi': |
|
1414 | if hgrustext == 'direct-ffi': | |
1414 | RustExtension.rustbuild(self) |
|
1415 | RustExtension.rustbuild(self) | |
1415 |
|
1416 | |||
1416 |
|
1417 | |||
1417 | class RustStandaloneExtension(RustExtension): |
|
1418 | class RustStandaloneExtension(RustExtension): | |
1418 | def __init__(self, pydottedname, rustcrate, dylibname, **kw): |
|
1419 | def __init__(self, pydottedname, rustcrate, dylibname, **kw): | |
1419 | RustExtension.__init__( |
|
1420 | RustExtension.__init__( | |
1420 | self, pydottedname, [], dylibname, rustcrate, **kw |
|
1421 | self, pydottedname, [], dylibname, rustcrate, **kw | |
1421 | ) |
|
1422 | ) | |
1422 | self.dylibname = dylibname |
|
1423 | self.dylibname = dylibname | |
1423 |
|
1424 | |||
1424 | def build(self, target_dir): |
|
1425 | def build(self, target_dir): | |
1425 | self.rustbuild() |
|
1426 | self.rustbuild() | |
1426 | target = [target_dir] |
|
1427 | target = [target_dir] | |
1427 | target.extend(self.name.split('.')) |
|
1428 | target.extend(self.name.split('.')) | |
1428 | target[-1] += DYLIB_SUFFIX |
|
1429 | target[-1] += DYLIB_SUFFIX | |
1429 | shutil.copy2( |
|
1430 | shutil.copy2( | |
1430 | os.path.join( |
|
1431 | os.path.join( | |
1431 | self.rusttargetdir, self.dylibname + self.rustdylibsuffix() |
|
1432 | self.rusttargetdir, self.dylibname + self.rustdylibsuffix() | |
1432 | ), |
|
1433 | ), | |
1433 | os.path.join(*target), |
|
1434 | os.path.join(*target), | |
1434 | ) |
|
1435 | ) | |
1435 |
|
1436 | |||
1436 |
|
1437 | |||
1437 | extmodules = [ |
|
1438 | extmodules = [ | |
1438 | Extension( |
|
1439 | Extension( | |
1439 | 'mercurial.cext.base85', |
|
1440 | 'mercurial.cext.base85', | |
1440 | ['mercurial/cext/base85.c'], |
|
1441 | ['mercurial/cext/base85.c'], | |
1441 | include_dirs=common_include_dirs, |
|
1442 | include_dirs=common_include_dirs, | |
1442 | depends=common_depends, |
|
1443 | depends=common_depends, | |
1443 | ), |
|
1444 | ), | |
1444 | Extension( |
|
1445 | Extension( | |
1445 | 'mercurial.cext.bdiff', |
|
1446 | 'mercurial.cext.bdiff', | |
1446 | ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs, |
|
1447 | ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs, | |
1447 | include_dirs=common_include_dirs, |
|
1448 | include_dirs=common_include_dirs, | |
1448 | depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers, |
|
1449 | depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers, | |
1449 | ), |
|
1450 | ), | |
1450 | Extension( |
|
1451 | Extension( | |
1451 | 'mercurial.cext.mpatch', |
|
1452 | 'mercurial.cext.mpatch', | |
1452 | ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'], |
|
1453 | ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'], | |
1453 | include_dirs=common_include_dirs, |
|
1454 | include_dirs=common_include_dirs, | |
1454 | depends=common_depends, |
|
1455 | depends=common_depends, | |
1455 | ), |
|
1456 | ), | |
1456 | RustEnhancedExtension( |
|
1457 | RustEnhancedExtension( | |
1457 | 'mercurial.cext.parsers', |
|
1458 | 'mercurial.cext.parsers', | |
1458 | [ |
|
1459 | [ | |
1459 | 'mercurial/cext/charencode.c', |
|
1460 | 'mercurial/cext/charencode.c', | |
1460 | 'mercurial/cext/dirs.c', |
|
1461 | 'mercurial/cext/dirs.c', | |
1461 | 'mercurial/cext/manifest.c', |
|
1462 | 'mercurial/cext/manifest.c', | |
1462 | 'mercurial/cext/parsers.c', |
|
1463 | 'mercurial/cext/parsers.c', | |
1463 | 'mercurial/cext/pathencode.c', |
|
1464 | 'mercurial/cext/pathencode.c', | |
1464 | 'mercurial/cext/revlog.c', |
|
1465 | 'mercurial/cext/revlog.c', | |
1465 | ], |
|
1466 | ], | |
1466 | 'hgdirectffi', |
|
1467 | 'hgdirectffi', | |
1467 | 'hg-direct-ffi', |
|
1468 | 'hg-direct-ffi', | |
1468 | include_dirs=common_include_dirs, |
|
1469 | include_dirs=common_include_dirs, | |
1469 | depends=common_depends |
|
1470 | depends=common_depends | |
1470 | + [ |
|
1471 | + [ | |
1471 | 'mercurial/cext/charencode.h', |
|
1472 | 'mercurial/cext/charencode.h', | |
1472 | 'mercurial/cext/revlog.h', |
|
1473 | 'mercurial/cext/revlog.h', | |
1473 | 'rust/hg-core/src/ancestors.rs', |
|
1474 | 'rust/hg-core/src/ancestors.rs', | |
1474 | 'rust/hg-core/src/lib.rs', |
|
1475 | 'rust/hg-core/src/lib.rs', | |
1475 | ], |
|
1476 | ], | |
1476 | ), |
|
1477 | ), | |
1477 | Extension( |
|
1478 | Extension( | |
1478 | 'mercurial.cext.osutil', |
|
1479 | 'mercurial.cext.osutil', | |
1479 | ['mercurial/cext/osutil.c'], |
|
1480 | ['mercurial/cext/osutil.c'], | |
1480 | include_dirs=common_include_dirs, |
|
1481 | include_dirs=common_include_dirs, | |
1481 | extra_compile_args=osutil_cflags, |
|
1482 | extra_compile_args=osutil_cflags, | |
1482 | extra_link_args=osutil_ldflags, |
|
1483 | extra_link_args=osutil_ldflags, | |
1483 | depends=common_depends, |
|
1484 | depends=common_depends, | |
1484 | ), |
|
1485 | ), | |
1485 | Extension( |
|
1486 | Extension( | |
1486 | 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations', |
|
1487 | 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations', | |
1487 | [ |
|
1488 | [ | |
1488 | 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c', |
|
1489 | 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c', | |
1489 | ], |
|
1490 | ], | |
1490 | ), |
|
1491 | ), | |
1491 | Extension( |
|
1492 | Extension( | |
1492 | 'mercurial.thirdparty.sha1dc', |
|
1493 | 'mercurial.thirdparty.sha1dc', | |
1493 | [ |
|
1494 | [ | |
1494 | 'mercurial/thirdparty/sha1dc/cext.c', |
|
1495 | 'mercurial/thirdparty/sha1dc/cext.c', | |
1495 | 'mercurial/thirdparty/sha1dc/lib/sha1.c', |
|
1496 | 'mercurial/thirdparty/sha1dc/lib/sha1.c', | |
1496 | 'mercurial/thirdparty/sha1dc/lib/ubc_check.c', |
|
1497 | 'mercurial/thirdparty/sha1dc/lib/ubc_check.c', | |
1497 | ], |
|
1498 | ], | |
1498 | ), |
|
1499 | ), | |
1499 | Extension( |
|
1500 | Extension( | |
1500 | 'hgext.fsmonitor.pywatchman.bser', ['hgext/fsmonitor/pywatchman/bser.c'] |
|
1501 | 'hgext.fsmonitor.pywatchman.bser', ['hgext/fsmonitor/pywatchman/bser.c'] | |
1501 | ), |
|
1502 | ), | |
1502 | RustStandaloneExtension( |
|
1503 | RustStandaloneExtension( | |
1503 | 'mercurial.rustext', 'hg-cpython', 'librusthg', py3_features='python3' |
|
1504 | 'mercurial.rustext', 'hg-cpython', 'librusthg', py3_features='python3' | |
1504 | ), |
|
1505 | ), | |
1505 | ] |
|
1506 | ] | |
1506 |
|
1507 | |||
1507 |
|
1508 | |||
1508 | sys.path.insert(0, 'contrib/python-zstandard') |
|
1509 | sys.path.insert(0, 'contrib/python-zstandard') | |
1509 | import setup_zstd |
|
1510 | import setup_zstd | |
1510 |
|
1511 | |||
1511 | extmodules.append( |
|
1512 | extmodules.append( | |
1512 | setup_zstd.get_c_extension( |
|
1513 | setup_zstd.get_c_extension( | |
1513 | name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__)) |
|
1514 | name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__)) | |
1514 | ) |
|
1515 | ) | |
1515 | ) |
|
1516 | ) | |
1516 |
|
1517 | |||
1517 | try: |
|
1518 | try: | |
1518 | from distutils import cygwinccompiler |
|
1519 | from distutils import cygwinccompiler | |
1519 |
|
1520 | |||
1520 | # the -mno-cygwin option has been deprecated for years |
|
1521 | # the -mno-cygwin option has been deprecated for years | |
1521 | mingw32compilerclass = cygwinccompiler.Mingw32CCompiler |
|
1522 | mingw32compilerclass = cygwinccompiler.Mingw32CCompiler | |
1522 |
|
1523 | |||
1523 | class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler): |
|
1524 | class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler): | |
1524 | def __init__(self, *args, **kwargs): |
|
1525 | def __init__(self, *args, **kwargs): | |
1525 | mingw32compilerclass.__init__(self, *args, **kwargs) |
|
1526 | mingw32compilerclass.__init__(self, *args, **kwargs) | |
1526 | for i in 'compiler compiler_so linker_exe linker_so'.split(): |
|
1527 | for i in 'compiler compiler_so linker_exe linker_so'.split(): | |
1527 | try: |
|
1528 | try: | |
1528 | getattr(self, i).remove('-mno-cygwin') |
|
1529 | getattr(self, i).remove('-mno-cygwin') | |
1529 | except ValueError: |
|
1530 | except ValueError: | |
1530 | pass |
|
1531 | pass | |
1531 |
|
1532 | |||
1532 | cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler |
|
1533 | cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler | |
1533 | except ImportError: |
|
1534 | except ImportError: | |
1534 | # the cygwinccompiler package is not available on some Python |
|
1535 | # the cygwinccompiler package is not available on some Python | |
1535 | # distributions like the ones from the optware project for Synology |
|
1536 | # distributions like the ones from the optware project for Synology | |
1536 | # DiskStation boxes |
|
1537 | # DiskStation boxes | |
1537 | class HackedMingw32CCompiler(object): |
|
1538 | class HackedMingw32CCompiler(object): | |
1538 | pass |
|
1539 | pass | |
1539 |
|
1540 | |||
1540 |
|
1541 | |||
1541 | if os.name == 'nt': |
|
1542 | if os.name == 'nt': | |
1542 | # Allow compiler/linker flags to be added to Visual Studio builds. Passing |
|
1543 | # Allow compiler/linker flags to be added to Visual Studio builds. Passing | |
1543 | # extra_link_args to distutils.extensions.Extension() doesn't have any |
|
1544 | # extra_link_args to distutils.extensions.Extension() doesn't have any | |
1544 | # effect. |
|
1545 | # effect. | |
1545 | from distutils import msvccompiler |
|
1546 | from distutils import msvccompiler | |
1546 |
|
1547 | |||
1547 | msvccompilerclass = msvccompiler.MSVCCompiler |
|
1548 | msvccompilerclass = msvccompiler.MSVCCompiler | |
1548 |
|
1549 | |||
1549 | class HackedMSVCCompiler(msvccompiler.MSVCCompiler): |
|
1550 | class HackedMSVCCompiler(msvccompiler.MSVCCompiler): | |
1550 | def initialize(self): |
|
1551 | def initialize(self): | |
1551 | msvccompilerclass.initialize(self) |
|
1552 | msvccompilerclass.initialize(self) | |
1552 | # "warning LNK4197: export 'func' specified multiple times" |
|
1553 | # "warning LNK4197: export 'func' specified multiple times" | |
1553 | self.ldflags_shared.append('/ignore:4197') |
|
1554 | self.ldflags_shared.append('/ignore:4197') | |
1554 | self.ldflags_shared_debug.append('/ignore:4197') |
|
1555 | self.ldflags_shared_debug.append('/ignore:4197') | |
1555 |
|
1556 | |||
1556 | msvccompiler.MSVCCompiler = HackedMSVCCompiler |
|
1557 | msvccompiler.MSVCCompiler = HackedMSVCCompiler | |
1557 |
|
1558 | |||
1558 | packagedata = { |
|
1559 | packagedata = { | |
1559 | 'mercurial': [ |
|
1560 | 'mercurial': [ | |
1560 | 'locale/*/LC_MESSAGES/hg.mo', |
|
1561 | 'locale/*/LC_MESSAGES/hg.mo', | |
1561 | 'defaultrc/*.rc', |
|
1562 | 'defaultrc/*.rc', | |
1562 | 'dummycert.pem', |
|
1563 | 'dummycert.pem', | |
1563 | ], |
|
1564 | ], | |
1564 | 'mercurial.helptext': ['*.txt',], |
|
1565 | 'mercurial.helptext': ['*.txt',], | |
1565 | 'mercurial.helptext.internals': ['*.txt',], |
|
1566 | 'mercurial.helptext.internals': ['*.txt',], | |
1566 | } |
|
1567 | } | |
1567 |
|
1568 | |||
1568 |
|
1569 | |||
1569 | def ordinarypath(p): |
|
1570 | def ordinarypath(p): | |
1570 | return p and p[0] != '.' and p[-1] != '~' |
|
1571 | return p and p[0] != '.' and p[-1] != '~' | |
1571 |
|
1572 | |||
1572 |
|
1573 | |||
1573 | for root in ('templates',): |
|
1574 | for root in ('templates',): | |
1574 | for curdir, dirs, files in os.walk(os.path.join('mercurial', root)): |
|
1575 | for curdir, dirs, files in os.walk(os.path.join('mercurial', root)): | |
1575 | curdir = curdir.split(os.sep, 1)[1] |
|
1576 | curdir = curdir.split(os.sep, 1)[1] | |
1576 | dirs[:] = filter(ordinarypath, dirs) |
|
1577 | dirs[:] = filter(ordinarypath, dirs) | |
1577 | for f in filter(ordinarypath, files): |
|
1578 | for f in filter(ordinarypath, files): | |
1578 | f = os.path.join(curdir, f) |
|
1579 | f = os.path.join(curdir, f) | |
1579 | packagedata['mercurial'].append(f) |
|
1580 | packagedata['mercurial'].append(f) | |
1580 |
|
1581 | |||
1581 | datafiles = [] |
|
1582 | datafiles = [] | |
1582 |
|
1583 | |||
1583 | # distutils expects version to be str/unicode. Converting it to |
|
1584 | # distutils expects version to be str/unicode. Converting it to | |
1584 | # unicode on Python 2 still works because it won't contain any |
|
1585 | # unicode on Python 2 still works because it won't contain any | |
1585 | # non-ascii bytes and will be implicitly converted back to bytes |
|
1586 | # non-ascii bytes and will be implicitly converted back to bytes | |
1586 | # when operated on. |
|
1587 | # when operated on. | |
1587 | assert isinstance(version, bytes) |
|
1588 | assert isinstance(version, bytes) | |
1588 | setupversion = version.decode('ascii') |
|
1589 | setupversion = version.decode('ascii') | |
1589 |
|
1590 | |||
1590 | extra = {} |
|
1591 | extra = {} | |
1591 |
|
1592 | |||
1592 | py2exepackages = [ |
|
1593 | py2exepackages = [ | |
1593 | 'hgdemandimport', |
|
1594 | 'hgdemandimport', | |
1594 | 'hgext3rd', |
|
1595 | 'hgext3rd', | |
1595 | 'hgext', |
|
1596 | 'hgext', | |
1596 | 'email', |
|
1597 | 'email', | |
1597 | # implicitly imported per module policy |
|
1598 | # implicitly imported per module policy | |
1598 | # (cffi wouldn't be used as a frozen exe) |
|
1599 | # (cffi wouldn't be used as a frozen exe) | |
1599 | 'mercurial.cext', |
|
1600 | 'mercurial.cext', | |
1600 | #'mercurial.cffi', |
|
1601 | #'mercurial.cffi', | |
1601 | 'mercurial.pure', |
|
1602 | 'mercurial.pure', | |
1602 | ] |
|
1603 | ] | |
1603 |
|
1604 | |||
1604 | py2exeexcludes = [] |
|
1605 | py2exeexcludes = [] | |
1605 | py2exedllexcludes = ['crypt32.dll'] |
|
1606 | py2exedllexcludes = ['crypt32.dll'] | |
1606 |
|
1607 | |||
1607 | if issetuptools: |
|
1608 | if issetuptools: | |
1608 | extra['python_requires'] = supportedpy |
|
1609 | extra['python_requires'] = supportedpy | |
1609 |
|
1610 | |||
1610 | if py2exeloaded: |
|
1611 | if py2exeloaded: | |
1611 | extra['console'] = [ |
|
1612 | extra['console'] = [ | |
1612 | { |
|
1613 | { | |
1613 | 'script': 'hg', |
|
1614 | 'script': 'hg', | |
1614 | 'copyright': 'Copyright (C) 2005-2020 Matt Mackall and others', |
|
1615 | 'copyright': 'Copyright (C) 2005-2020 Matt Mackall and others', | |
1615 | 'product_version': version, |
|
1616 | 'product_version': version, | |
1616 | } |
|
1617 | } | |
1617 | ] |
|
1618 | ] | |
1618 | # Sub command of 'build' because 'py2exe' does not handle sub_commands. |
|
1619 | # Sub command of 'build' because 'py2exe' does not handle sub_commands. | |
1619 | # Need to override hgbuild because it has a private copy of |
|
1620 | # Need to override hgbuild because it has a private copy of | |
1620 | # build.sub_commands. |
|
1621 | # build.sub_commands. | |
1621 | hgbuild.sub_commands.insert(0, ('build_hgextindex', None)) |
|
1622 | hgbuild.sub_commands.insert(0, ('build_hgextindex', None)) | |
1622 | # put dlls in sub directory so that they won't pollute PATH |
|
1623 | # put dlls in sub directory so that they won't pollute PATH | |
1623 | extra['zipfile'] = 'lib/library.zip' |
|
1624 | extra['zipfile'] = 'lib/library.zip' | |
1624 |
|
1625 | |||
1625 | # We allow some configuration to be supplemented via environment |
|
1626 | # We allow some configuration to be supplemented via environment | |
1626 | # variables. This is better than setup.cfg files because it allows |
|
1627 | # variables. This is better than setup.cfg files because it allows | |
1627 | # supplementing configs instead of replacing them. |
|
1628 | # supplementing configs instead of replacing them. | |
1628 | extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES') |
|
1629 | extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES') | |
1629 | if extrapackages: |
|
1630 | if extrapackages: | |
1630 | py2exepackages.extend(extrapackages.split(' ')) |
|
1631 | py2exepackages.extend(extrapackages.split(' ')) | |
1631 |
|
1632 | |||
1632 | excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES') |
|
1633 | excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES') | |
1633 | if excludes: |
|
1634 | if excludes: | |
1634 | py2exeexcludes.extend(excludes.split(' ')) |
|
1635 | py2exeexcludes.extend(excludes.split(' ')) | |
1635 |
|
1636 | |||
1636 | dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES') |
|
1637 | dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES') | |
1637 | if dllexcludes: |
|
1638 | if dllexcludes: | |
1638 | py2exedllexcludes.extend(dllexcludes.split(' ')) |
|
1639 | py2exedllexcludes.extend(dllexcludes.split(' ')) | |
1639 |
|
1640 | |||
1640 | if os.name == 'nt': |
|
1641 | if os.name == 'nt': | |
1641 | # Windows binary file versions for exe/dll files must have the |
|
1642 | # Windows binary file versions for exe/dll files must have the | |
1642 | # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535 |
|
1643 | # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535 | |
1643 | setupversion = setupversion.split(r'+', 1)[0] |
|
1644 | setupversion = setupversion.split(r'+', 1)[0] | |
1644 |
|
1645 | |||
1645 | if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'): |
|
1646 | if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'): | |
1646 | version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines() |
|
1647 | version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines() | |
1647 | if version: |
|
1648 | if version: | |
1648 | version = version[0] |
|
1649 | version = version[0] | |
1649 | if sys.version_info[0] == 3: |
|
1650 | if sys.version_info[0] == 3: | |
1650 | version = version.decode('utf-8') |
|
1651 | version = version.decode('utf-8') | |
1651 | xcode4 = version.startswith('Xcode') and StrictVersion( |
|
1652 | xcode4 = version.startswith('Xcode') and StrictVersion( | |
1652 | version.split()[1] |
|
1653 | version.split()[1] | |
1653 | ) >= StrictVersion('4.0') |
|
1654 | ) >= StrictVersion('4.0') | |
1654 | xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None |
|
1655 | xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None | |
1655 | else: |
|
1656 | else: | |
1656 | # xcodebuild returns empty on OS X Lion with XCode 4.3 not |
|
1657 | # xcodebuild returns empty on OS X Lion with XCode 4.3 not | |
1657 | # installed, but instead with only command-line tools. Assume |
|
1658 | # installed, but instead with only command-line tools. Assume | |
1658 | # that only happens on >= Lion, thus no PPC support. |
|
1659 | # that only happens on >= Lion, thus no PPC support. | |
1659 | xcode4 = True |
|
1660 | xcode4 = True | |
1660 | xcode51 = False |
|
1661 | xcode51 = False | |
1661 |
|
1662 | |||
1662 | # XCode 4.0 dropped support for ppc architecture, which is hardcoded in |
|
1663 | # XCode 4.0 dropped support for ppc architecture, which is hardcoded in | |
1663 | # distutils.sysconfig |
|
1664 | # distutils.sysconfig | |
1664 | if xcode4: |
|
1665 | if xcode4: | |
1665 | os.environ['ARCHFLAGS'] = '' |
|
1666 | os.environ['ARCHFLAGS'] = '' | |
1666 |
|
1667 | |||
1667 | # XCode 5.1 changes clang such that it now fails to compile if the |
|
1668 | # XCode 5.1 changes clang such that it now fails to compile if the | |
1668 | # -mno-fused-madd flag is passed, but the version of Python shipped with |
|
1669 | # -mno-fused-madd flag is passed, but the version of Python shipped with | |
1669 | # OS X 10.9 Mavericks includes this flag. This causes problems in all |
|
1670 | # OS X 10.9 Mavericks includes this flag. This causes problems in all | |
1670 | # C extension modules, and a bug has been filed upstream at |
|
1671 | # C extension modules, and a bug has been filed upstream at | |
1671 | # http://bugs.python.org/issue21244. We also need to patch this here |
|
1672 | # http://bugs.python.org/issue21244. We also need to patch this here | |
1672 | # so Mercurial can continue to compile in the meantime. |
|
1673 | # so Mercurial can continue to compile in the meantime. | |
1673 | if xcode51: |
|
1674 | if xcode51: | |
1674 | cflags = get_config_var('CFLAGS') |
|
1675 | cflags = get_config_var('CFLAGS') | |
1675 | if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None: |
|
1676 | if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None: | |
1676 | os.environ['CFLAGS'] = ( |
|
1677 | os.environ['CFLAGS'] = ( | |
1677 | os.environ.get('CFLAGS', '') + ' -Qunused-arguments' |
|
1678 | os.environ.get('CFLAGS', '') + ' -Qunused-arguments' | |
1678 | ) |
|
1679 | ) | |
1679 |
|
1680 | |||
1680 | setup( |
|
1681 | setup( | |
1681 | name='mercurial', |
|
1682 | name='mercurial', | |
1682 | version=setupversion, |
|
1683 | version=setupversion, | |
1683 | author='Matt Mackall and many others', |
|
1684 | author='Matt Mackall and many others', | |
1684 | author_email='mercurial@mercurial-scm.org', |
|
1685 | author_email='mercurial@mercurial-scm.org', | |
1685 | url='https://mercurial-scm.org/', |
|
1686 | url='https://mercurial-scm.org/', | |
1686 | download_url='https://mercurial-scm.org/release/', |
|
1687 | download_url='https://mercurial-scm.org/release/', | |
1687 | description=( |
|
1688 | description=( | |
1688 | 'Fast scalable distributed SCM (revision control, version ' |
|
1689 | 'Fast scalable distributed SCM (revision control, version ' | |
1689 | 'control) system' |
|
1690 | 'control) system' | |
1690 | ), |
|
1691 | ), | |
1691 | long_description=( |
|
1692 | long_description=( | |
1692 | 'Mercurial is a distributed SCM tool written in Python.' |
|
1693 | 'Mercurial is a distributed SCM tool written in Python.' | |
1693 | ' It is used by a number of large projects that require' |
|
1694 | ' It is used by a number of large projects that require' | |
1694 | ' fast, reliable distributed revision control, such as ' |
|
1695 | ' fast, reliable distributed revision control, such as ' | |
1695 | 'Mozilla.' |
|
1696 | 'Mozilla.' | |
1696 | ), |
|
1697 | ), | |
1697 | license='GNU GPLv2 or any later version', |
|
1698 | license='GNU GPLv2 or any later version', | |
1698 | classifiers=[ |
|
1699 | classifiers=[ | |
1699 | 'Development Status :: 6 - Mature', |
|
1700 | 'Development Status :: 6 - Mature', | |
1700 | 'Environment :: Console', |
|
1701 | 'Environment :: Console', | |
1701 | 'Intended Audience :: Developers', |
|
1702 | 'Intended Audience :: Developers', | |
1702 | 'Intended Audience :: System Administrators', |
|
1703 | 'Intended Audience :: System Administrators', | |
1703 | 'License :: OSI Approved :: GNU General Public License (GPL)', |
|
1704 | 'License :: OSI Approved :: GNU General Public License (GPL)', | |
1704 | 'Natural Language :: Danish', |
|
1705 | 'Natural Language :: Danish', | |
1705 | 'Natural Language :: English', |
|
1706 | 'Natural Language :: English', | |
1706 | 'Natural Language :: German', |
|
1707 | 'Natural Language :: German', | |
1707 | 'Natural Language :: Italian', |
|
1708 | 'Natural Language :: Italian', | |
1708 | 'Natural Language :: Japanese', |
|
1709 | 'Natural Language :: Japanese', | |
1709 | 'Natural Language :: Portuguese (Brazilian)', |
|
1710 | 'Natural Language :: Portuguese (Brazilian)', | |
1710 | 'Operating System :: Microsoft :: Windows', |
|
1711 | 'Operating System :: Microsoft :: Windows', | |
1711 | 'Operating System :: OS Independent', |
|
1712 | 'Operating System :: OS Independent', | |
1712 | 'Operating System :: POSIX', |
|
1713 | 'Operating System :: POSIX', | |
1713 | 'Programming Language :: C', |
|
1714 | 'Programming Language :: C', | |
1714 | 'Programming Language :: Python', |
|
1715 | 'Programming Language :: Python', | |
1715 | 'Topic :: Software Development :: Version Control', |
|
1716 | 'Topic :: Software Development :: Version Control', | |
1716 | ], |
|
1717 | ], | |
1717 | scripts=scripts, |
|
1718 | scripts=scripts, | |
1718 | packages=packages, |
|
1719 | packages=packages, | |
1719 | ext_modules=extmodules, |
|
1720 | ext_modules=extmodules, | |
1720 | data_files=datafiles, |
|
1721 | data_files=datafiles, | |
1721 | package_data=packagedata, |
|
1722 | package_data=packagedata, | |
1722 | cmdclass=cmdclass, |
|
1723 | cmdclass=cmdclass, | |
1723 | distclass=hgdist, |
|
1724 | distclass=hgdist, | |
1724 | options={ |
|
1725 | options={ | |
1725 | 'py2exe': { |
|
1726 | 'py2exe': { | |
1726 | 'bundle_files': 3, |
|
1727 | 'bundle_files': 3, | |
1727 | 'dll_excludes': py2exedllexcludes, |
|
1728 | 'dll_excludes': py2exedllexcludes, | |
1728 | 'excludes': py2exeexcludes, |
|
1729 | 'excludes': py2exeexcludes, | |
1729 | 'packages': py2exepackages, |
|
1730 | 'packages': py2exepackages, | |
1730 | }, |
|
1731 | }, | |
1731 | 'bdist_mpkg': { |
|
1732 | 'bdist_mpkg': { | |
1732 | 'zipdist': False, |
|
1733 | 'zipdist': False, | |
1733 | 'license': 'COPYING', |
|
1734 | 'license': 'COPYING', | |
1734 | 'readme': 'contrib/packaging/macosx/Readme.html', |
|
1735 | 'readme': 'contrib/packaging/macosx/Readme.html', | |
1735 | 'welcome': 'contrib/packaging/macosx/Welcome.html', |
|
1736 | 'welcome': 'contrib/packaging/macosx/Welcome.html', | |
1736 | }, |
|
1737 | }, | |
1737 | }, |
|
1738 | }, | |
1738 | **extra |
|
1739 | **extra | |
1739 | ) |
|
1740 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now