Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,72 +1,72 b'' | |||
|
1 | 1 | |
|
2 | 2 | .PHONY: clean docs docs-clean docs-cleanup test test-clean test-only test-only-postgres test-only-mysql web-build generate-pkgs pip-packages build-nix |
|
3 | 3 | |
|
4 | 4 | NODE_PATH=./node_modules |
|
5 | 5 | WEBPACK=./node_binaries/webpack |
|
6 | 6 | GRUNT=./node_binaries/grunt |
|
7 | 7 | # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py |
|
8 | 8 | OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES} |
|
9 | 9 | |
|
10 | 10 | clean: |
|
11 | 11 | make test-clean |
|
12 | 12 | find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';' |
|
13 | 13 | |
|
14 | 14 | test: |
|
15 | 15 | make test-clean |
|
16 | 16 | make test-only |
|
17 | 17 | |
|
18 | 18 | test-clean: |
|
19 | 19 | rm -rf coverage.xml htmlcov junit.xml pylint.log result |
|
20 | 20 | find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';' |
|
21 | 21 | find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';' |
|
22 | 22 | |
|
23 | 23 | test-only: |
|
24 | 24 | PYTHONHASHSEED=random \ |
|
25 | 25 | py.test -x -vv -r xw -p no:sugar --cov=rhodecode \ |
|
26 | 26 | --cov-report=term-missing --cov-report=html \ |
|
27 | 27 | rhodecode |
|
28 | 28 | |
|
29 | 29 | test-only-mysql: |
|
30 | 30 | PYTHONHASHSEED=random \ |
|
31 | 31 | py.test -x -vv -r xw -p no:sugar --cov=rhodecode \ |
|
32 | 32 | --cov-report=term-missing --cov-report=html \ |
|
33 | 33 | --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \ |
|
34 | 34 | rhodecode |
|
35 | 35 | |
|
36 | 36 | test-only-postgres: |
|
37 | 37 | PYTHONHASHSEED=random \ |
|
38 | 38 | py.test -x -vv -r xw -p no:sugar --cov=rhodecode \ |
|
39 | 39 | --cov-report=term-missing --cov-report=html \ |
|
40 | 40 | --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \ |
|
41 | 41 | rhodecode |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | docs: |
|
45 | 45 | (cd docs; nix-build default.nix -o result; make clean html) |
|
46 | 46 | |
|
47 | 47 | docs-clean: |
|
48 | 48 | (cd docs; make clean) |
|
49 | 49 | |
|
50 | 50 | docs-cleanup: |
|
51 | 51 | (cd docs; make cleanup) |
|
52 | 52 | |
|
53 | 53 | web-build: |
|
54 | 54 | NODE_PATH=$(NODE_PATH) $(GRUNT) |
|
55 | 55 | |
|
56 | 56 | generate-pkgs: |
|
57 | 57 | nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses --no-binary :all:" |
|
58 | 58 | |
|
59 | 59 | pip-packages: |
|
60 | 60 | python ${OUTDATED_PACKAGES} |
|
61 | 61 | |
|
62 | 62 | generate-js-pkgs: |
|
63 | 63 | rm -rf node_modules && \ |
|
64 |
nix-shell pkgs/shell-generate.nix --command "node2nix --input package.json -o pkgs/node-packages.nix -e pkgs/node-env.nix -c pkgs/node-default.nix -d --flatten --nodejs- |
|
|
64 | nix-shell pkgs/shell-generate.nix --command "node2nix --input package.json -o pkgs/node-packages.nix -e pkgs/node-env.nix -c pkgs/node-default.nix -d --flatten --nodejs-12" && \ | |
|
65 | 65 | sed -i -e 's/http:\/\//https:\/\//g' pkgs/node-packages.nix |
|
66 | 66 | |
|
67 | 67 | generate-license-meta: |
|
68 | 68 | nix-build pkgs/license-generate.nix -o result-license && \ |
|
69 | 69 | cat result-license/licenses.json | python -m json.tool > rhodecode/config/licenses.json |
|
70 | 70 | |
|
71 | 71 | build-nix: |
|
72 | 72 | nix-build --show-trace --option sandbox false --option max-jobs 4 --option cores 4 |
@@ -1,542 +1,542 b'' | |||
|
1 | 1 | # This file originates from node2nix |
|
2 | 2 | |
|
3 | 3 | {stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}: |
|
4 | 4 | |
|
5 | 5 | let |
|
6 | 6 | python = if nodejs ? python then nodejs.python else python2; |
|
7 | 7 | |
|
8 | 8 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise |
|
9 | 9 | tarWrapper = runCommand "tarWrapper" {} '' |
|
10 | 10 | mkdir -p $out/bin |
|
11 | 11 | |
|
12 | 12 | cat > $out/bin/tar <<EOF |
|
13 | 13 | #! ${stdenv.shell} -e |
|
14 | $(type -p tar) "\$@" --warning=no-unknown-keyword | |
|
14 | $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore | |
|
15 | 15 | EOF |
|
16 | 16 | |
|
17 | 17 | chmod +x $out/bin/tar |
|
18 | 18 | ''; |
|
19 | 19 | |
|
20 | 20 | # Function that generates a TGZ file from a NPM project |
|
21 | 21 | buildNodeSourceDist = |
|
22 | 22 | { name, version, src, ... }: |
|
23 | 23 | |
|
24 | 24 | stdenv.mkDerivation { |
|
25 | 25 | name = "node-tarball-${name}-${version}"; |
|
26 | 26 | inherit src; |
|
27 | 27 | buildInputs = [ nodejs ]; |
|
28 | 28 | buildPhase = '' |
|
29 | 29 | export HOME=$TMPDIR |
|
30 | 30 | tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts) |
|
31 | 31 | ''; |
|
32 | 32 | installPhase = '' |
|
33 | 33 | mkdir -p $out/tarballs |
|
34 | 34 | mv $tgzFile $out/tarballs |
|
35 | 35 | mkdir -p $out/nix-support |
|
36 | 36 | echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products |
|
37 | 37 | ''; |
|
38 | 38 | }; |
|
39 | 39 | |
|
40 | 40 | includeDependencies = {dependencies}: |
|
41 | 41 | stdenv.lib.optionalString (dependencies != []) |
|
42 | 42 | (stdenv.lib.concatMapStrings (dependency: |
|
43 | 43 | '' |
|
44 | 44 | # Bundle the dependencies of the package |
|
45 | 45 | mkdir -p node_modules |
|
46 | 46 | cd node_modules |
|
47 | 47 | |
|
48 | 48 | # Only include dependencies if they don't exist. They may also be bundled in the package. |
|
49 | 49 | if [ ! -e "${dependency.name}" ] |
|
50 | 50 | then |
|
51 | 51 | ${composePackage dependency} |
|
52 | 52 | fi |
|
53 | 53 | |
|
54 | 54 | cd .. |
|
55 | 55 | '' |
|
56 | 56 | ) dependencies); |
|
57 | 57 | |
|
58 | 58 | # Recursively composes the dependencies of a package |
|
59 | 59 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: |
|
60 | '' | |
|
60 | builtins.addErrorContext "while evaluating node package '${packageName}'" '' | |
|
61 | 61 | DIR=$(pwd) |
|
62 | 62 | cd $TMPDIR |
|
63 | 63 | |
|
64 | 64 | unpackFile ${src} |
|
65 | 65 | |
|
66 | 66 | # Make the base dir in which the target dependency resides first |
|
67 | 67 | mkdir -p "$(dirname "$DIR/${packageName}")" |
|
68 | 68 | |
|
69 | 69 | if [ -f "${src}" ] |
|
70 | 70 | then |
|
71 | 71 | # Figure out what directory has been unpacked |
|
72 | 72 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" |
|
73 | 73 | |
|
74 | 74 | # Restore write permissions to make building work |
|
75 |
find "$packageDir" -type d - |
|
|
75 | find "$packageDir" -type d -exec chmod u+x {} \; | |
|
76 | 76 | chmod -R u+w "$packageDir" |
|
77 | 77 | |
|
78 | 78 | # Move the extracted tarball into the output folder |
|
79 | 79 | mv "$packageDir" "$DIR/${packageName}" |
|
80 | 80 | elif [ -d "${src}" ] |
|
81 | 81 | then |
|
82 | 82 | # Get a stripped name (without hash) of the source directory. |
|
83 | 83 | # On old nixpkgs it's already set internally. |
|
84 | 84 | if [ -z "$strippedName" ] |
|
85 | 85 | then |
|
86 | 86 | strippedName="$(stripHash ${src})" |
|
87 | 87 | fi |
|
88 | 88 | |
|
89 | 89 | # Restore write permissions to make building work |
|
90 | 90 | chmod -R u+w "$strippedName" |
|
91 | 91 | |
|
92 | 92 | # Move the extracted directory into the output folder |
|
93 | 93 | mv "$strippedName" "$DIR/${packageName}" |
|
94 | 94 | fi |
|
95 | 95 | |
|
96 | 96 | # Unset the stripped name to not confuse the next unpack step |
|
97 | 97 | unset strippedName |
|
98 | 98 | |
|
99 | 99 | # Include the dependencies of the package |
|
100 | 100 | cd "$DIR/${packageName}" |
|
101 | 101 | ${includeDependencies { inherit dependencies; }} |
|
102 | 102 | cd .. |
|
103 | 103 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} |
|
104 | 104 | ''; |
|
105 | 105 | |
|
106 | 106 | pinpointDependencies = {dependencies, production}: |
|
107 | 107 | let |
|
108 | 108 | pinpointDependenciesFromPackageJSON = writeTextFile { |
|
109 | 109 | name = "pinpointDependencies.js"; |
|
110 | 110 | text = '' |
|
111 | 111 | var fs = require('fs'); |
|
112 | 112 | var path = require('path'); |
|
113 | 113 | |
|
114 | 114 | function resolveDependencyVersion(location, name) { |
|
115 | 115 | if(location == process.env['NIX_STORE']) { |
|
116 | 116 | return null; |
|
117 | 117 | } else { |
|
118 | 118 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); |
|
119 | 119 | |
|
120 | 120 | if(fs.existsSync(dependencyPackageJSON)) { |
|
121 | 121 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); |
|
122 | 122 | |
|
123 | 123 | if(dependencyPackageObj.name == name) { |
|
124 | 124 | return dependencyPackageObj.version; |
|
125 | 125 | } |
|
126 | 126 | } else { |
|
127 | 127 | return resolveDependencyVersion(path.resolve(location, ".."), name); |
|
128 | 128 | } |
|
129 | 129 | } |
|
130 | 130 | } |
|
131 | 131 | |
|
132 | 132 | function replaceDependencies(dependencies) { |
|
133 | 133 | if(typeof dependencies == "object" && dependencies !== null) { |
|
134 | 134 | for(var dependency in dependencies) { |
|
135 | 135 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); |
|
136 | 136 | |
|
137 | 137 | if(resolvedVersion === null) { |
|
138 | 138 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); |
|
139 | 139 | } else { |
|
140 | 140 | dependencies[dependency] = resolvedVersion; |
|
141 | 141 | } |
|
142 | 142 | } |
|
143 | 143 | } |
|
144 | 144 | } |
|
145 | 145 | |
|
146 | 146 | /* Read the package.json configuration */ |
|
147 | 147 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); |
|
148 | 148 | |
|
149 | 149 | /* Pinpoint all dependencies */ |
|
150 | 150 | replaceDependencies(packageObj.dependencies); |
|
151 | 151 | if(process.argv[2] == "development") { |
|
152 | 152 | replaceDependencies(packageObj.devDependencies); |
|
153 | 153 | } |
|
154 | 154 | replaceDependencies(packageObj.optionalDependencies); |
|
155 | 155 | |
|
156 | 156 | /* Write the fixed package.json file */ |
|
157 | 157 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); |
|
158 | 158 | ''; |
|
159 | 159 | }; |
|
160 | 160 | in |
|
161 | 161 | '' |
|
162 | 162 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} |
|
163 | 163 | |
|
164 | 164 | ${stdenv.lib.optionalString (dependencies != []) |
|
165 | 165 | '' |
|
166 | 166 | if [ -d node_modules ] |
|
167 | 167 | then |
|
168 | 168 | cd node_modules |
|
169 | 169 | ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} |
|
170 | 170 | cd .. |
|
171 | 171 | fi |
|
172 | 172 | ''} |
|
173 | 173 | ''; |
|
174 | 174 | |
|
175 | 175 | # Recursively traverses all dependencies of a package and pinpoints all |
|
176 | 176 | # dependencies in the package.json file to the versions that are actually |
|
177 | 177 | # being used. |
|
178 | 178 | |
|
179 | 179 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: |
|
180 | 180 | '' |
|
181 | 181 | if [ -d "${packageName}" ] |
|
182 | 182 | then |
|
183 | 183 | cd "${packageName}" |
|
184 | 184 | ${pinpointDependencies { inherit dependencies production; }} |
|
185 | 185 | cd .. |
|
186 | 186 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} |
|
187 | 187 | fi |
|
188 | 188 | ''; |
|
189 | 189 | |
|
190 | 190 | # Extract the Node.js source code which is used to compile packages with |
|
191 | 191 | # native bindings |
|
192 | 192 | nodeSources = runCommand "node-sources" {} '' |
|
193 | 193 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} |
|
194 | 194 | mv node-* $out |
|
195 | 195 | ''; |
|
196 | 196 | |
|
197 | 197 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) |
|
198 | 198 | addIntegrityFieldsScript = writeTextFile { |
|
199 | 199 | name = "addintegrityfields.js"; |
|
200 | 200 | text = '' |
|
201 | 201 | var fs = require('fs'); |
|
202 | 202 | var path = require('path'); |
|
203 | 203 | |
|
204 | 204 | function augmentDependencies(baseDir, dependencies) { |
|
205 | 205 | for(var dependencyName in dependencies) { |
|
206 | 206 | var dependency = dependencies[dependencyName]; |
|
207 | 207 | |
|
208 | 208 | // Open package.json and augment metadata fields |
|
209 | 209 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); |
|
210 | 210 | var packageJSONPath = path.join(packageJSONDir, "package.json"); |
|
211 | 211 | |
|
212 | 212 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored |
|
213 | 213 | console.log("Adding metadata fields to: "+packageJSONPath); |
|
214 | 214 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); |
|
215 | 215 | |
|
216 | 216 | if(dependency.integrity) { |
|
217 | 217 | packageObj["_integrity"] = dependency.integrity; |
|
218 | 218 | } else { |
|
219 | 219 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. |
|
220 | 220 | } |
|
221 | 221 | |
|
222 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. | |
|
222 | if(dependency.resolved) { | |
|
223 | packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided | |
|
224 | } else { | |
|
225 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. | |
|
226 | } | |
|
227 | ||
|
228 | if(dependency.from !== undefined) { // Adopt from property if one has been provided | |
|
229 | packageObj["_from"] = dependency.from; | |
|
230 | } | |
|
231 | ||
|
223 | 232 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); |
|
224 | 233 | } |
|
225 | 234 | |
|
226 | 235 | // Augment transitive dependencies |
|
227 | 236 | if(dependency.dependencies !== undefined) { |
|
228 | 237 | augmentDependencies(packageJSONDir, dependency.dependencies); |
|
229 | 238 | } |
|
230 | 239 | } |
|
231 | 240 | } |
|
232 | 241 | |
|
233 | 242 | if(fs.existsSync("./package-lock.json")) { |
|
234 | 243 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); |
|
235 | 244 | |
|
236 | 245 | if(packageLock.lockfileVersion !== 1) { |
|
237 | 246 | process.stderr.write("Sorry, I only understand lock file version 1!\n"); |
|
238 | 247 | process.exit(1); |
|
239 | 248 | } |
|
240 | 249 | |
|
241 | 250 | if(packageLock.dependencies !== undefined) { |
|
242 | 251 | augmentDependencies(".", packageLock.dependencies); |
|
243 | 252 | } |
|
244 | 253 | } |
|
245 | 254 | ''; |
|
246 | 255 | }; |
|
247 | 256 | |
|
248 | 257 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes |
|
249 | 258 | reconstructPackageLock = writeTextFile { |
|
250 | 259 | name = "addintegrityfields.js"; |
|
251 | 260 | text = '' |
|
252 | 261 | var fs = require('fs'); |
|
253 | 262 | var path = require('path'); |
|
254 | 263 | |
|
255 | 264 | var packageObj = JSON.parse(fs.readFileSync("package.json")); |
|
256 | 265 | |
|
257 | 266 | var lockObj = { |
|
258 | 267 | name: packageObj.name, |
|
259 | 268 | version: packageObj.version, |
|
260 | 269 | lockfileVersion: 1, |
|
261 | 270 | requires: true, |
|
262 | 271 | dependencies: {} |
|
263 | 272 | }; |
|
264 | 273 | |
|
265 | 274 | function augmentPackageJSON(filePath, dependencies) { |
|
266 | 275 | var packageJSON = path.join(filePath, "package.json"); |
|
267 | 276 | if(fs.existsSync(packageJSON)) { |
|
268 | 277 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); |
|
269 | 278 | dependencies[packageObj.name] = { |
|
270 | 279 | version: packageObj.version, |
|
271 | 280 | integrity: "sha1-000000000000000000000000000=", |
|
272 | 281 | dependencies: {} |
|
273 | 282 | }; |
|
274 | 283 | processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); |
|
275 | 284 | } |
|
276 | 285 | } |
|
277 | 286 | |
|
278 | 287 | function processDependencies(dir, dependencies) { |
|
279 | 288 | if(fs.existsSync(dir)) { |
|
280 | 289 | var files = fs.readdirSync(dir); |
|
281 | 290 | |
|
282 | 291 | files.forEach(function(entry) { |
|
283 | 292 | var filePath = path.join(dir, entry); |
|
284 | 293 | var stats = fs.statSync(filePath); |
|
285 | 294 | |
|
286 | 295 | if(stats.isDirectory()) { |
|
287 | 296 | if(entry.substr(0, 1) == "@") { |
|
288 | 297 | // When we encounter a namespace folder, augment all packages belonging to the scope |
|
289 | 298 | var pkgFiles = fs.readdirSync(filePath); |
|
290 | 299 | |
|
291 | 300 | pkgFiles.forEach(function(entry) { |
|
292 | 301 | if(stats.isDirectory()) { |
|
293 | 302 | var pkgFilePath = path.join(filePath, entry); |
|
294 | 303 | augmentPackageJSON(pkgFilePath, dependencies); |
|
295 | 304 | } |
|
296 | 305 | }); |
|
297 | 306 | } else { |
|
298 | 307 | augmentPackageJSON(filePath, dependencies); |
|
299 | 308 | } |
|
300 | 309 | } |
|
301 | 310 | }); |
|
302 | 311 | } |
|
303 | 312 | } |
|
304 | 313 | |
|
305 | 314 | processDependencies("node_modules", lockObj.dependencies); |
|
306 | 315 | |
|
307 | 316 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); |
|
308 | 317 | ''; |
|
309 | 318 | }; |
|
310 | 319 | |
|
311 | # Builds and composes an NPM package including all its dependencies | |
|
312 | buildNodePackage = | |
|
313 | { name | |
|
314 | , packageName | |
|
315 | , version | |
|
316 | , dependencies ? [] | |
|
317 | , buildInputs ? [] | |
|
318 | , production ? true | |
|
319 | , npmFlags ? "" | |
|
320 | , dontNpmInstall ? false | |
|
321 | , bypassCache ? false | |
|
322 | , preRebuild ? "" | |
|
323 | , dontStrip ? true | |
|
324 | , unpackPhase ? "true" | |
|
325 | , buildPhase ? "true" | |
|
326 | , ... }@args: | |
|
327 | ||
|
320 | prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: | |
|
328 | 321 | let |
|
329 | 322 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; |
|
330 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; | |
|
331 | 323 | in |
|
332 | stdenv.mkDerivation ({ | |
|
333 | name = "node-${name}-${version}"; | |
|
334 | buildInputs = [ tarWrapper python nodejs ] | |
|
335 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux | |
|
336 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool | |
|
337 | ++ buildInputs; | |
|
338 | ||
|
339 | inherit dontStrip; # Stripping may fail a build for some package deployments | |
|
340 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; | |
|
341 | ||
|
342 | compositionScript = composePackage args; | |
|
343 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | |
|
344 | ||
|
345 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; | |
|
346 | ||
|
347 | installPhase = '' | |
|
348 | # Create and enter a root node_modules/ folder | |
|
349 | mkdir -p $out/lib/node_modules | |
|
350 | cd $out/lib/node_modules | |
|
351 | ||
|
352 | # Compose the package and all its dependencies | |
|
353 | source $compositionScriptPath | |
|
354 | ||
|
324 | '' | |
|
355 | 325 | # Pinpoint the versions of all dependencies to the ones that are actually being used |
|
356 | 326 | echo "pinpointing versions of dependencies..." |
|
357 | 327 | source $pinpointDependenciesScriptPath |
|
358 | 328 | |
|
359 | 329 | # Patch the shebangs of the bundled modules to prevent them from |
|
360 | 330 | # calling executables outside the Nix store as much as possible |
|
361 | 331 | patchShebangs . |
|
362 | 332 | |
|
363 | 333 | # Deploy the Node.js package by running npm install. Since the |
|
364 | 334 | # dependencies have been provided already by ourselves, it should not |
|
365 | 335 | # attempt to install them again, which is good, because we want to make |
|
366 | 336 | # it Nix's responsibility. If it needs to install any dependencies |
|
367 | 337 | # anyway (e.g. because the dependency parameters are |
|
368 | 338 | # incomplete/incorrect), it fails. |
|
369 | 339 | # |
|
370 | 340 | # The other responsibilities of NPM are kept -- version checks, build |
|
371 | 341 | # steps, postprocessing etc. |
|
372 | 342 | |
|
373 | 343 | export HOME=$TMPDIR |
|
374 | 344 | cd "${packageName}" |
|
375 | 345 | runHook preRebuild |
|
376 | 346 | |
|
377 | 347 | ${stdenv.lib.optionalString bypassCache '' |
|
378 | if [ ! -f package-lock.json ] | |
|
379 | then | |
|
380 | echo "No package-lock.json file found, reconstructing..." | |
|
381 | node ${reconstructPackageLock} | |
|
382 | fi | |
|
348 | ${stdenv.lib.optionalString reconstructLock '' | |
|
349 | if [ -f package-lock.json ] | |
|
350 | then | |
|
351 | echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" | |
|
352 | echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" | |
|
353 | rm package-lock.json | |
|
354 | else | |
|
355 | echo "No package-lock.json file found, reconstructing..." | |
|
356 | fi | |
|
357 | ||
|
358 | node ${reconstructPackageLock} | |
|
359 | ''} | |
|
383 | 360 | |
|
384 | 361 | node ${addIntegrityFieldsScript} |
|
385 | 362 | ''} |
|
386 | 363 | |
|
387 | 364 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild |
|
388 | 365 | |
|
389 | if [ "$dontNpmInstall" != "1" ] | |
|
366 | if [ "''${dontNpmInstall-}" != "1" ] | |
|
390 | 367 | then |
|
391 | 368 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. |
|
392 | 369 | rm -f npm-shrinkwrap.json |
|
393 | 370 | |
|
394 | 371 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install |
|
395 | 372 | fi |
|
373 | ''; | |
|
374 | ||
|
375 | # Builds and composes an NPM package including all its dependencies | |
|
376 | buildNodePackage = | |
|
377 | { name | |
|
378 | , packageName | |
|
379 | , version | |
|
380 | , dependencies ? [] | |
|
381 | , buildInputs ? [] | |
|
382 | , production ? true | |
|
383 | , npmFlags ? "" | |
|
384 | , dontNpmInstall ? false | |
|
385 | , bypassCache ? false | |
|
386 | , reconstructLock ? false | |
|
387 | , preRebuild ? "" | |
|
388 | , dontStrip ? true | |
|
389 | , unpackPhase ? "true" | |
|
390 | , buildPhase ? "true" | |
|
391 | , ... }@args: | |
|
392 | ||
|
393 | let | |
|
394 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; | |
|
395 | in | |
|
396 | stdenv.mkDerivation ({ | |
|
397 | name = "node_${name}-${version}"; | |
|
398 | buildInputs = [ tarWrapper python nodejs ] | |
|
399 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux | |
|
400 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool | |
|
401 | ++ buildInputs; | |
|
402 | ||
|
403 | inherit nodejs; | |
|
404 | ||
|
405 | inherit dontStrip; # Stripping may fail a build for some package deployments | |
|
406 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; | |
|
407 | ||
|
408 | compositionScript = composePackage args; | |
|
409 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | |
|
410 | ||
|
411 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; | |
|
412 | ||
|
413 | installPhase = '' | |
|
414 | # Create and enter a root node_modules/ folder | |
|
415 | mkdir -p $out/lib/node_modules | |
|
416 | cd $out/lib/node_modules | |
|
417 | ||
|
418 | # Compose the package and all its dependencies | |
|
419 | source $compositionScriptPath | |
|
420 | ||
|
421 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} | |
|
396 | 422 | |
|
397 | 423 | # Create symlink to the deployed executable folder, if applicable |
|
398 | 424 | if [ -d "$out/lib/node_modules/.bin" ] |
|
399 | 425 | then |
|
400 | 426 | ln -s $out/lib/node_modules/.bin $out/bin |
|
401 | 427 | fi |
|
402 | 428 | |
|
403 | 429 | # Create symlinks to the deployed manual page folders, if applicable |
|
404 | 430 | if [ -d "$out/lib/node_modules/${packageName}/man" ] |
|
405 | 431 | then |
|
406 | 432 | mkdir -p $out/share |
|
407 | 433 | for dir in "$out/lib/node_modules/${packageName}/man/"* |
|
408 | 434 | do |
|
409 | 435 | mkdir -p $out/share/man/$(basename "$dir") |
|
410 | 436 | for page in "$dir"/* |
|
411 | 437 | do |
|
412 | 438 | ln -s $page $out/share/man/$(basename "$dir") |
|
413 | 439 | done |
|
414 | 440 | done |
|
415 | 441 | fi |
|
416 | 442 | |
|
417 | 443 | # Run post install hook, if provided |
|
418 | 444 | runHook postInstall |
|
419 | 445 | ''; |
|
420 | 446 | } // extraArgs); |
|
421 | 447 | |
|
422 | 448 | # Builds a development shell |
|
423 | 449 | buildNodeShell = |
|
424 | 450 | { name |
|
425 | 451 | , packageName |
|
426 | 452 | , version |
|
427 | 453 | , src |
|
428 | 454 | , dependencies ? [] |
|
429 | 455 | , buildInputs ? [] |
|
430 | 456 | , production ? true |
|
431 | 457 | , npmFlags ? "" |
|
432 | 458 | , dontNpmInstall ? false |
|
433 | 459 | , bypassCache ? false |
|
460 | , reconstructLock ? false | |
|
434 | 461 | , dontStrip ? true |
|
435 | 462 | , unpackPhase ? "true" |
|
436 | 463 | , buildPhase ? "true" |
|
437 | 464 | , ... }@args: |
|
438 | 465 | |
|
439 | 466 | let |
|
440 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; | |
|
441 | ||
|
442 | 467 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; |
|
443 | 468 | |
|
444 | 469 | nodeDependencies = stdenv.mkDerivation ({ |
|
445 | 470 | name = "node-dependencies-${name}-${version}"; |
|
446 | 471 | |
|
447 | 472 | buildInputs = [ tarWrapper python nodejs ] |
|
448 | 473 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux |
|
449 | 474 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool |
|
450 | 475 | ++ buildInputs; |
|
451 | 476 | |
|
452 | 477 | inherit dontStrip; # Stripping may fail a build for some package deployments |
|
453 | 478 | inherit dontNpmInstall unpackPhase buildPhase; |
|
454 | 479 | |
|
455 | 480 | includeScript = includeDependencies { inherit dependencies; }; |
|
456 | 481 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; |
|
457 | 482 | |
|
458 | 483 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; |
|
459 | 484 | |
|
460 | 485 | installPhase = '' |
|
461 | 486 | mkdir -p $out/${packageName} |
|
462 | 487 | cd $out/${packageName} |
|
463 | 488 | |
|
464 | 489 | source $includeScriptPath |
|
465 | 490 | |
|
466 | 491 | # Create fake package.json to make the npm commands work properly |
|
467 | 492 | cp ${src}/package.json . |
|
468 | 493 | chmod 644 package.json |
|
469 | 494 | ${stdenv.lib.optionalString bypassCache '' |
|
470 | 495 | if [ -f ${src}/package-lock.json ] |
|
471 | 496 | then |
|
472 | 497 | cp ${src}/package-lock.json . |
|
473 | 498 | fi |
|
474 | 499 | ''} |
|
475 | 500 | |
|
476 | # Pinpoint the versions of all dependencies to the ones that are actually being used | |
|
477 | echo "pinpointing versions of dependencies..." | |
|
501 | # Go to the parent folder to make sure that all packages are pinpointed | |
|
478 | 502 | cd .. |
|
479 | 503 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} |
|
480 | 504 | |
|
481 | source $pinpointDependenciesScriptPath | |
|
482 | cd ${packageName} | |
|
483 | ||
|
484 | # Patch the shebangs of the bundled modules to prevent them from | |
|
485 | # calling executables outside the Nix store as much as possible | |
|
486 | patchShebangs . | |
|
487 | ||
|
488 | export HOME=$PWD | |
|
505 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} | |
|
489 | 506 | |
|
490 | ${stdenv.lib.optionalString bypassCache '' | |
|
491 | if [ ! -f package-lock.json ] | |
|
492 | then | |
|
493 | echo "No package-lock.json file found, reconstructing..." | |
|
494 | node ${reconstructPackageLock} | |
|
495 | fi | |
|
496 | ||
|
497 | node ${addIntegrityFieldsScript} | |
|
498 | ''} | |
|
499 | ||
|
500 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild | |
|
501 | ||
|
502 | ${stdenv.lib.optionalString (!dontNpmInstall) '' | |
|
503 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. | |
|
504 | rm -f npm-shrinkwrap.json | |
|
505 | ||
|
506 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install | |
|
507 | ''} | |
|
508 | ||
|
507 | # Expose the executables that were installed | |
|
509 | 508 | cd .. |
|
510 | 509 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} |
|
511 | 510 | |
|
512 | 511 | mv ${packageName} lib |
|
513 | 512 | ln -s $out/lib/node_modules/.bin $out/bin |
|
514 | 513 | ''; |
|
515 | 514 | } // extraArgs); |
|
516 | 515 | in |
|
517 | 516 | stdenv.mkDerivation { |
|
518 | 517 | name = "node-shell-${name}-${version}"; |
|
519 | 518 | |
|
520 | 519 | buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs; |
|
521 | 520 | buildCommand = '' |
|
522 | 521 | mkdir -p $out/bin |
|
523 | 522 | cat > $out/bin/shell <<EOF |
|
524 | 523 | #! ${stdenv.shell} -e |
|
525 | 524 | $shellHook |
|
526 | 525 | exec ${stdenv.shell} |
|
527 | 526 | EOF |
|
528 | 527 | chmod +x $out/bin/shell |
|
529 | 528 | ''; |
|
530 | 529 | |
|
531 | 530 | # Provide the dependencies in a development shell through the NODE_PATH environment variable |
|
532 | 531 | inherit nodeDependencies; |
|
533 | 532 | shellHook = stdenv.lib.optionalString (dependencies != []) '' |
|
534 | export NODE_PATH=$nodeDependencies/lib/node_modules | |
|
533 | export NODE_PATH=${nodeDependencies}/lib/node_modules | |
|
534 | export PATH="${nodeDependencies}/bin:$PATH" | |
|
535 | 535 | ''; |
|
536 | 536 | }; |
|
537 | 537 | in |
|
538 | 538 | { |
|
539 | 539 | buildNodeSourceDist = stdenv.lib.makeOverridable buildNodeSourceDist; |
|
540 | 540 | buildNodePackage = stdenv.lib.makeOverridable buildNodePackage; |
|
541 | 541 | buildNodeShell = stdenv.lib.makeOverridable buildNodeShell; |
|
542 | 542 | } |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,83 +1,83 b'' | |||
|
1 | 1 | /* webpack.config.js */ |
|
2 | 2 | require('style-loader'); |
|
3 | 3 | require('css-loader'); |
|
4 | 4 | var path = require('path'); |
|
5 | 5 | |
|
6 | 6 | const projectName = 'rhodecode-components'; |
|
7 | 7 | let destinationDirectory = path.join(process.cwd(), 'rhodecode', 'public', 'js') |
|
8 | 8 | |
|
9 | 9 | if (process.env.RC_STATIC_DIR) { |
|
10 | 10 | destinationDirectory = process.env.RC_STATIC_DIR; |
|
11 | 11 | } |
|
12 | 12 | |
|
13 | 13 | // doing it this way because it seems that plugin via grunt does not pick up .babelrc |
|
14 | 14 | let babelRCOptions = { |
|
15 | 15 | "presets": [ |
|
16 | 16 | ["env", { |
|
17 | 17 | "targets": { |
|
18 | 18 | "browsers": ["last 2 versions"] |
|
19 | 19 | } |
|
20 | 20 | }] |
|
21 | 21 | ], |
|
22 | 22 | "plugins": ["transform-object-rest-spread"] |
|
23 | } | |
|
23 | }; | |
|
24 | 24 | |
|
25 | 25 | module.exports = { |
|
26 | 26 | // Tell Webpack which file kicks off our app. |
|
27 | 27 | entry: { |
|
28 | 28 | main: path.resolve(__dirname, 'rhodecode/public/js/src/components/index.js'), |
|
29 | 29 | }, |
|
30 | 30 | output: { |
|
31 | 31 | filename: 'rhodecode-components.js', |
|
32 | 32 | path: path.resolve(destinationDirectory) |
|
33 | 33 | }, |
|
34 | 34 | // Tell Webpack which directories to look in to resolve import statements. |
|
35 | 35 | // Normally Webpack will look in node_modules by default but since weβre overriding |
|
36 | // the property weβll need to tell it to look there. | |
|
36 | 37 | resolve: { |
|
37 | 38 | modules: [ |
|
38 | 39 | path.resolve(__dirname, 'node_modules'), |
|
39 | 40 | ] |
|
40 | 41 | }, |
|
41 | 42 | // These rules tell Webpack how to process different module types. |
|
42 | 43 | // Remember, *everything* is a module in Webpack. That includes |
|
43 | 44 | // CSS, and (thanks to our loader) HTML. |
|
44 | 45 | module: { |
|
45 | 46 | rules: [ |
|
46 | 47 | { |
|
47 | 48 | test: /style-polymer.css/, |
|
48 | 49 | use: 'raw-loader' |
|
49 | 50 | }, |
|
50 | 51 | { |
|
51 | 52 | // If you see a file that ends in .html, send it to these loaders. |
|
52 | 53 | test: /\.html$/, |
|
53 | 54 | // This is an example of chained loaders in Webpack. |
|
54 | 55 | // Chained loaders run last to first. So it will run |
|
55 | 56 | // polymer-webpack-loader, and hand the output to |
|
56 | 57 | // babel-loader. This let's us transpile JS in our `<script>` elements. |
|
57 | 58 | use: [ |
|
58 | {loader: 'babel-loader', | |
|
59 | options: babelRCOptions}, | |
|
59 | {loader: 'babel-loader', options: babelRCOptions}, | |
|
60 | 60 | {loader: 'polymer-webpack-loader', |
|
61 | 61 | options: { |
|
62 | 62 | processStyleLinks: true, |
|
63 | 63 | } |
|
64 | 64 | } |
|
65 | 65 | ], |
|
66 | 66 | }, |
|
67 | 67 | { |
|
68 | 68 | // If you see a file that ends in .js, just send it to the babel-loader. |
|
69 | 69 | test: /\.js$/, |
|
70 | 70 | use: {loader: 'babel-loader', options: babelRCOptions} |
|
71 | 71 | // Optionally exclude node_modules from transpilation except for polymer-webpack-loader: |
|
72 | 72 | // exclude: /node_modules\/(?!polymer-webpack-loader\/).*/ |
|
73 | 73 | }, |
|
74 | 74 | // this is required because of bug: |
|
75 | 75 | // https://github.com/webpack-contrib/polymer-webpack-loader/issues/49 |
|
76 | 76 | { |
|
77 | 77 | test: /intl-messageformat.min.js/, |
|
78 | 78 | use: 'imports-loader?this=>window' |
|
79 | 79 | } |
|
80 | 80 | ] |
|
81 | 81 | }, |
|
82 | 82 | plugins: [] |
|
83 | 83 | }; |
General Comments 0
You need to be logged in to leave comments.
Login now