Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,55 b'' | |||||
|
1 | ||||
|
2 | ============================== | |||
|
3 | Generate the Nix expressions | |||
|
4 | ============================== | |||
|
5 | ||||
|
6 | Details can be found in the repository of `RhodeCode Enterprise CE`_ inside of | |||
|
7 | the file `docs/contributing/dependencies.rst`. | |||
|
8 | ||||
|
9 | Start the environment as follows: | |||
|
10 | ||||
|
11 | .. code:: shell | |||
|
12 | ||||
|
13 | nix-shell pkgs/shell-generate.nix | |||
|
14 | ||||
|
15 | ||||
|
16 | ||||
|
17 | Python dependencies | |||
|
18 | =================== | |||
|
19 | ||||
|
20 | .. code:: shell | |||
|
21 | ||||
|
22 | pip2nix generate --licenses | |||
|
23 | # or | |||
|
24 | nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses" | |||
|
25 | ||||
|
26 | ||||
|
27 | NodeJS dependencies | |||
|
28 | =================== | |||
|
29 | ||||
|
30 | .. code:: shell | |||
|
31 | ||||
|
32 | # switch to pkgs dir | |||
|
33 | pushd pkgs | |||
|
34 | node2nix --input ../package.json \ | |||
|
35 | -o node-packages.nix \ | |||
|
36 | -e node-env.nix \ | |||
|
37 | -c node-default.nix \ | |||
|
38 | -d --flatten --nodejs-6 | |||
|
39 | popd | |||
|
40 | ||||
|
41 | ||||
|
42 | ||||
|
43 | Bower dependencies | |||
|
44 | ================== | |||
|
45 | ||||
|
46 | .. code:: shell | |||
|
47 | ||||
|
48 | bower2nix bower.json pkgs/bower-packages.nix | |||
|
49 | # or | |||
|
50 | nix-shell pkgs/shell-generate.nix --command "bower2nix bower.json pkgs/bower-packages.nix" | |||
|
51 | ||||
|
52 | ||||
|
53 | .. Links | |||
|
54 | ||||
|
55 | .. _RhodeCode Enterprise CE: https://code.rhodecode.com/rhodecode-enterprise-ce |
@@ -0,0 +1,17 b'' | |||||
|
1 | { pkgs | |||
|
2 | , pythonPackages | |||
|
3 | }: | |||
|
4 | ||||
|
5 | rec { | |||
|
6 | pip2nix-src = pkgs.fetchzip { | |||
|
7 | url = https://github.com/johbo/pip2nix/archive/51e6fdae34d0e8ded9efeef7a8601730249687a6.tar.gz; | |||
|
8 | sha256 = "02a4jjgi7lsvf8mhrxsd56s9a3yg20081rl9bgc2m84w60v2gbz2"; | |||
|
9 | }; | |||
|
10 | ||||
|
11 | pip2nix = import pip2nix-src { | |||
|
12 | inherit | |||
|
13 | pkgs | |||
|
14 | pythonPackages; | |||
|
15 | }; | |||
|
16 | ||||
|
17 | } |
@@ -0,0 +1,52 b'' | |||||
|
1 | { pkgs ? (import <nixpkgs> {}) | |||
|
2 | , pythonPackages ? "python27Packages" | |||
|
3 | }: | |||
|
4 | ||||
|
5 | with pkgs.lib; | |||
|
6 | ||||
|
7 | let _pythonPackages = pythonPackages; in | |||
|
8 | let | |||
|
9 | pythonPackages = getAttr _pythonPackages pkgs; | |||
|
10 | ||||
|
11 | pip2nix = import ./nix-common/pip2nix.nix { | |||
|
12 | inherit | |||
|
13 | pkgs | |||
|
14 | pythonPackages; | |||
|
15 | }; | |||
|
16 | ||||
|
17 | in | |||
|
18 | ||||
|
19 | pkgs.stdenv.mkDerivation { | |||
|
20 | name = "pip2nix-generated"; | |||
|
21 | buildInputs = [ | |||
|
22 | # Allows to generate python packages | |||
|
23 | pip2nix.pip2nix | |||
|
24 | pythonPackages.pip-tools | |||
|
25 | ||||
|
26 | # Allows to generate bower dependencies | |||
|
27 | pkgs.nodePackages.bower2nix | |||
|
28 | ||||
|
29 | # Allows to generate node dependencies | |||
|
30 | pkgs.nodePackages.node2nix | |||
|
31 | ||||
|
32 | # We need mysql_config to be around | |||
|
33 | pkgs.mysql | |||
|
34 | ||||
|
35 | # We need postgresql to be around | |||
|
36 | pkgs.postgresql | |||
|
37 | ||||
|
38 | # Curl is needed for pycurl | |||
|
39 | pkgs.curl | |||
|
40 | ]; | |||
|
41 | ||||
|
42 | shellHook = '' | |||
|
43 | runHook preShellHook | |||
|
44 | runHook postShellHook | |||
|
45 | ''; | |||
|
46 | ||||
|
47 | preShellHook = '' | |||
|
48 | echo "Starting Generate Shell" | |||
|
49 | # Custom prompt to distinguish from other dev envs. | |||
|
50 | export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] " | |||
|
51 | ''; | |||
|
52 | } |
@@ -10,7 +10,7 b'' | |||||
10 | "paper-tooltip": "PolymerElements/paper-tooltip#^1.1.2", |
|
10 | "paper-tooltip": "PolymerElements/paper-tooltip#^1.1.2", | |
11 | "paper-toast": "PolymerElements/paper-toast#^1.3.0", |
|
11 | "paper-toast": "PolymerElements/paper-toast#^1.3.0", | |
12 | "paper-toggle-button": "PolymerElements/paper-toggle-button#^1.2.0", |
|
12 | "paper-toggle-button": "PolymerElements/paper-toggle-button#^1.2.0", | |
13 |
"iron-ajax": "PolymerElements/iron-ajax#^1.4. |
|
13 | "iron-ajax": "PolymerElements/iron-ajax#^1.4.4", | |
14 | "iron-autogrow-textarea": "PolymerElements/iron-autogrow-textarea#^1.0.13", |
|
14 | "iron-autogrow-textarea": "PolymerElements/iron-autogrow-textarea#^1.0.13", | |
15 | "iron-a11y-keys": "PolymerElements/iron-a11y-keys#^1.0.6" |
|
15 | "iron-a11y-keys": "PolymerElements/iron-a11y-keys#^1.0.6" | |
16 | } |
|
16 | } |
@@ -1,41 +1,40 b'' | |||||
1 | # Nix environment for the community edition |
|
1 | # Nix environment for the community edition | |
2 | # |
|
2 | # | |
3 |
# This shall be as lean as possible, just producing the |
|
3 | # This shall be as lean as possible, just producing the enterprise-ce | |
4 | # derivation. For advanced tweaks to pimp up the development environment we use |
|
4 | # derivation. For advanced tweaks to pimp up the development environment we use | |
5 | # "shell.nix" so that it does not have to clutter this file. |
|
5 | # "shell.nix" so that it does not have to clutter this file. | |
|
6 | # | |||
|
7 | # Configuration, set values in "~/.nixpkgs/config.nix". | |||
|
8 | # example | |||
|
9 | # { | |||
|
10 | # # Thoughts on how to configure the dev environment | |||
|
11 | # rc = { | |||
|
12 | # codeInternalUrl = "https://usr:token@internal-code.rhodecode.com"; | |||
|
13 | # sources = { | |||
|
14 | # rhodecode-vcsserver = "/home/user/work/rhodecode-vcsserver"; | |||
|
15 | # rhodecode-enterprise-ce = "/home/user/work/rhodecode-enterprise-ce"; | |||
|
16 | # rhodecode-enterprise-ee = "/home/user/work/rhodecode-enterprise-ee"; | |||
|
17 | # }; | |||
|
18 | # }; | |||
|
19 | # } | |||
6 |
|
20 | |||
7 | args@ |
|
21 | args@ | |
8 | { pythonPackages ? "python27Packages" |
|
22 | { pythonPackages ? "python27Packages" | |
9 | , pythonExternalOverrides ? self: super: {} |
|
23 | , pythonExternalOverrides ? self: super: {} | |
10 |
, doCheck ? |
|
24 | , doCheck ? false | |
11 | , ... |
|
25 | , ... | |
12 | }: |
|
26 | }: | |
13 |
|
27 | |||
14 | let |
|
28 | let | |
15 |
|
||||
16 | # Use nixpkgs from args or import them. We use this indirect approach |
|
29 | # Use nixpkgs from args or import them. We use this indirect approach | |
17 | # through args to be able to use the name `pkgs` for our customized packages. |
|
30 | # through args to be able to use the name `pkgs` for our customized packages. | |
18 | # Otherwise we will end up with an infinite recursion. |
|
31 | # Otherwise we will end up with an infinite recursion. | |
19 |
|
|
32 | pkgs = args.pkgs or (import <nixpkgs> { }); | |
20 |
|
33 | |||
21 | # johbo: Interim bridge which allows us to build with the upcoming |
|
34 | # Works with the new python-packages, still can fallback to the old | |
22 | # nixos.16.09 branch (unstable at the moment of writing this note) and the |
|
35 | # variant. | |
23 | # current stable nixos-16.03. |
|
36 | basePythonPackagesUnfix = basePythonPackages.__unfix__ or ( | |
24 | backwardsCompatibleFetchgit = { ... }@args: |
|
37 | self: basePythonPackages.override (a: { inherit self; })); | |
25 | let |
|
|||
26 | origSources = nixpkgs.fetchgit args; |
|
|||
27 | in |
|
|||
28 | nixpkgs.lib.overrideDerivation origSources (oldAttrs: { |
|
|||
29 | NIX_PREFETCH_GIT_CHECKOUT_HOOK = '' |
|
|||
30 | find $out -name '.git*' -print0 | xargs -0 rm -rf |
|
|||
31 | ''; |
|
|||
32 | }); |
|
|||
33 |
|
||||
34 | # Create a customized version of nixpkgs which should be used throughout the |
|
|||
35 | # rest of this file. |
|
|||
36 | pkgs = nixpkgs.overridePackages (self: super: { |
|
|||
37 | fetchgit = backwardsCompatibleFetchgit; |
|
|||
38 | }); |
|
|||
39 |
|
38 | |||
40 | # Evaluates to the last segment of a file system path. |
|
39 | # Evaluates to the last segment of a file system path. | |
41 | basename = path: with pkgs.lib; last (splitString "/" path); |
|
40 | basename = path: with pkgs.lib; last (splitString "/" path); | |
@@ -46,7 +45,7 b' let' | |||||
46 | ext = last (splitString "." path); |
|
45 | ext = last (splitString "." path); | |
47 | in |
|
46 | in | |
48 | !builtins.elem (basename path) [ |
|
47 | !builtins.elem (basename path) [ | |
49 | ".git" ".hg" "__pycache__" ".eggs" |
|
48 | ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev" | |
50 | "bower_components" "node_modules" |
|
49 | "bower_components" "node_modules" | |
51 | "build" "data" "result" "tmp"] && |
|
50 | "build" "data" "result" "tmp"] && | |
52 | !builtins.elem ext ["egg-info" "pyc"] && |
|
51 | !builtins.elem ext ["egg-info" "pyc"] && | |
@@ -54,18 +53,20 b' let' | |||||
54 | # it would still be good to restore it since we want to ignore "result-*". |
|
53 | # it would still be good to restore it since we want to ignore "result-*". | |
55 | !hasPrefix "result" path; |
|
54 | !hasPrefix "result" path; | |
56 |
|
55 | |||
57 | basePythonPackages = with builtins; if isAttrs pythonPackages |
|
56 | sources = | |
58 | then pythonPackages |
|
57 | let | |
59 | else getAttr pythonPackages pkgs; |
|
58 | inherit (pkgs.lib) all isString attrValues; | |
|
59 | sourcesConfig = pkgs.config.rc.sources or {}; | |||
|
60 | in | |||
|
61 | # Ensure that sources are configured as strings. Using a path | |||
|
62 | # would result in a copy into the nix store. | |||
|
63 | assert all isString (attrValues sourcesConfig); | |||
|
64 | sourcesConfig; | |||
60 |
|
65 | |||
61 | buildBowerComponents = |
|
66 | version = builtins.readFile "${rhodecode-enterprise-ce-src}/rhodecode/VERSION"; | |
62 | pkgs.buildBowerComponents or |
|
|||
63 | (import ./pkgs/backport-16.03-build-bower-components.nix { inherit pkgs; }); |
|
|||
64 |
|
||||
65 | sources = pkgs.config.rc.sources or {}; |
|
|||
66 | version = builtins.readFile ./rhodecode/VERSION; |
|
|||
67 | rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.; |
|
67 | rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.; | |
68 |
|
68 | |||
|
69 | buildBowerComponents = pkgs.buildBowerComponents; | |||
69 | nodeEnv = import ./pkgs/node-default.nix { |
|
70 | nodeEnv = import ./pkgs/node-default.nix { | |
70 | inherit pkgs; |
|
71 | inherit pkgs; | |
71 | }; |
|
72 | }; | |
@@ -77,133 +78,145 b' let' | |||||
77 | src = rhodecode-enterprise-ce-src; |
|
78 | src = rhodecode-enterprise-ce-src; | |
78 | }; |
|
79 | }; | |
79 |
|
80 | |||
80 | pythonGeneratedPackages = self: basePythonPackages.override (a: { |
|
81 | rhodecode-testdata-src = sources.rhodecode-testdata or ( | |
81 | inherit self; |
|
82 | pkgs.fetchhg { | |
82 | }) |
|
83 | url = "https://code.rhodecode.com/upstream/rc_testdata"; | |
83 | // (scopedImport { |
|
84 | rev = "v0.10.0"; | |
84 | self = self; |
|
85 | sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0"; | |
85 | super = basePythonPackages; |
|
86 | }); | |
86 | inherit pkgs; |
|
|||
87 | inherit (pkgs) fetchurl fetchgit; |
|
|||
88 | } ./pkgs/python-packages.nix); |
|
|||
89 |
|
87 | |||
90 | pythonOverrides = import ./pkgs/python-packages-overrides.nix { |
|
88 | rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" { | |
91 |
|
|
89 | inherit | |
92 | basePythonPackages |
|
90 | doCheck | |
93 |
|
|
91 | pkgs | |
|
92 | pythonPackages; | |||
94 | }; |
|
93 | }; | |
95 |
|
94 | |||
96 | pythonLocalOverrides = self: super: { |
|
95 | pythonLocalOverrides = self: super: { | |
97 | rhodecode-enterprise-ce = |
|
96 | rhodecode-enterprise-ce = | |
98 | let |
|
97 | let | |
99 | linkNodeAndBowerPackages = '' |
|
98 | linkNodeAndBowerPackages = '' | |
100 | echo "Export RhodeCode CE path" |
|
|||
101 | export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src} |
|
99 | export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src} | |
102 | echo "Link node packages" |
|
100 | ||
|
101 | echo "[BEGIN]: Link node packages" | |||
103 | rm -fr node_modules |
|
102 | rm -fr node_modules | |
104 | mkdir node_modules |
|
103 | mkdir node_modules | |
105 | # johbo: Linking individual packages allows us to run "npm install" |
|
104 | # johbo: Linking individual packages allows us to run "npm install" | |
106 | # inside of a shell to try things out. Re-entering the shell will |
|
105 | # inside of a shell to try things out. Re-entering the shell will | |
107 | # restore a clean environment. |
|
106 | # restore a clean environment. | |
108 | ln -s ${nodeDependencies}/lib/node_modules/* node_modules/ |
|
107 | ln -s ${nodeDependencies}/lib/node_modules/* node_modules/ | |
|
108 | echo "[DONE]: Link node packages" | |||
109 |
|
109 | |||
110 |
echo " |
|
110 | echo "[BEGIN]: Link bower packages" | |
111 |
|
||||
112 | echo "Link bower packages" |
|
|||
113 | rm -fr bower_components |
|
111 | rm -fr bower_components | |
114 | mkdir bower_components |
|
112 | mkdir bower_components | |
|
113 | ln -s ${bowerComponents}/bower_components/* bower_components/ | |||
|
114 | echo "[DONE]: Link bower packages" | |||
|
115 | ''; | |||
115 |
|
116 | |||
116 | ln -s ${bowerComponents}/bower_components/* bower_components/ |
|
117 | releaseName = "RhodeCodeEnterpriseCE-${version}"; | |
117 | echo "DONE: Link bower packages" |
|
|||
118 | ''; |
|
|||
119 | in super.rhodecode-enterprise-ce.override (attrs: { |
|
118 | in super.rhodecode-enterprise-ce.override (attrs: { | |
120 |
|
||||
121 | inherit |
|
119 | inherit | |
122 | doCheck |
|
120 | doCheck | |
123 | version; |
|
121 | version; | |
|
122 | ||||
124 | name = "rhodecode-enterprise-ce-${version}"; |
|
123 | name = "rhodecode-enterprise-ce-${version}"; | |
125 | releaseName = "RhodeCodeEnterpriseCE-${version}"; |
|
124 | releaseName = releaseName; | |
126 | src = rhodecode-enterprise-ce-src; |
|
125 | src = rhodecode-enterprise-ce-src; | |
127 | dontStrip = true; # prevent strip, we don't need it. |
|
126 | dontStrip = true; # prevent strip, we don't need it. | |
128 |
|
127 | |||
129 | buildInputs = |
|
128 | # expose following attributed outside | |
130 | attrs.buildInputs ++ |
|
|||
131 | (with self; [ |
|
|||
132 | pkgs.nodePackages.bower |
|
|||
133 | pkgs.nodePackages.grunt-cli |
|
|||
134 | pkgs.subversion |
|
|||
135 | rhodecode-testdata |
|
|||
136 | ]); |
|
|||
137 |
|
||||
138 | #TODO: either move this into overrides, OR use the new machanics from |
|
|||
139 | # pip2nix and requiremtn.txt file |
|
|||
140 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ (with self; [ |
|
|||
141 | rhodecode-tools |
|
|||
142 | ]); |
|
|||
143 |
|
||||
144 | # TODO: johbo: Make a nicer way to expose the parts. Maybe |
|
|||
145 | # pkgs/default.nix? |
|
|||
146 | passthru = { |
|
129 | passthru = { | |
147 | inherit |
|
130 | inherit | |
|
131 | rhodecode-testdata | |||
148 | bowerComponents |
|
132 | bowerComponents | |
149 | linkNodeAndBowerPackages |
|
133 | linkNodeAndBowerPackages | |
150 | myPythonPackagesUnfix |
|
134 | myPythonPackagesUnfix | |
151 |
pythonLocalOverrides |
|
135 | pythonLocalOverrides | |
|
136 | pythonCommunityOverrides; | |||
|
137 | ||||
152 | pythonPackages = self; |
|
138 | pythonPackages = self; | |
153 | }; |
|
139 | }; | |
154 |
|
140 | |||
|
141 | buildInputs = | |||
|
142 | attrs.buildInputs or [] ++ [ | |||
|
143 | rhodecode-testdata | |||
|
144 | pkgs.nodePackages.bower | |||
|
145 | pkgs.nodePackages.grunt-cli | |||
|
146 | ]; | |||
|
147 | ||||
|
148 | #NOTE: option to inject additional propagatedBuildInputs | |||
|
149 | propagatedBuildInputs = | |||
|
150 | attrs.propagatedBuildInputs or [] ++ [ | |||
|
151 | ||||
|
152 | ]; | |||
|
153 | ||||
155 | LC_ALL = "en_US.UTF-8"; |
|
154 | LC_ALL = "en_US.UTF-8"; | |
156 | LOCALE_ARCHIVE = |
|
155 | LOCALE_ARCHIVE = | |
157 |
if pkgs.stdenv |
|
156 | if pkgs.stdenv.isLinux | |
158 | then "${pkgs.glibcLocales}/lib/locale/locale-archive" |
|
157 | then "${pkgs.glibcLocales}/lib/locale/locale-archive" | |
159 | else ""; |
|
158 | else ""; | |
160 |
|
159 | |||
|
160 | # Add bin directory to path so that tests can find 'rhodecode'. | |||
161 | preCheck = '' |
|
161 | preCheck = '' | |
162 | export PATH="$out/bin:$PATH" |
|
162 | export PATH="$out/bin:$PATH" | |
163 | ''; |
|
163 | ''; | |
164 |
|
164 | |||
|
165 | # custom check phase for testing | |||
|
166 | checkPhase = '' | |||
|
167 | runHook preCheck | |||
|
168 | PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=rhodecode --cov-report=term-missing rhodecode | |||
|
169 | runHook postCheck | |||
|
170 | ''; | |||
|
171 | ||||
165 | postCheck = '' |
|
172 | postCheck = '' | |
166 | rm -rf $out/lib/${self.python.libPrefix}/site-packages/pytest_pylons |
|
173 | echo "Cleanup of rhodecode/tests" | |
167 | rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests |
|
174 | rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests | |
168 | ''; |
|
175 | ''; | |
169 |
|
176 | |||
170 | preBuild = linkNodeAndBowerPackages + '' |
|
177 | preBuild = '' | |
|
178 | ||||
|
179 | echo "Building frontend assets" | |||
|
180 | ${linkNodeAndBowerPackages} | |||
171 | grunt |
|
181 | grunt | |
172 | rm -fr node_modules |
|
182 | rm -fr node_modules | |
173 | ''; |
|
183 | ''; | |
174 |
|
184 | |||
175 | postInstall = '' |
|
185 | postInstall = '' | |
176 | echo "Writing meta information for rccontrol to nix-support/rccontrol" |
|
186 | echo "Writing enterprise-ce meta information for rccontrol to nix-support/rccontrol" | |
177 | mkdir -p $out/nix-support/rccontrol |
|
187 | mkdir -p $out/nix-support/rccontrol | |
178 | cp -v rhodecode/VERSION $out/nix-support/rccontrol/version |
|
188 | cp -v rhodecode/VERSION $out/nix-support/rccontrol/version | |
179 |
echo " |
|
189 | echo "[DONE]: enterprise-ce meta information for rccontrol written" | |
|
190 | ||||
|
191 | mkdir -p $out/etc | |||
|
192 | cp configs/production.ini $out/etc | |||
|
193 | echo "[DONE]: saved enterprise-ce production.ini into $out/etc" | |||
180 |
|
194 | |||
181 | # python based programs need to be wrapped |
|
195 | # python based programs need to be wrapped | |
182 | #ln -s ${self.python}/bin/* $out/bin/ |
|
196 | mkdir -p $out/bin | |
|
197 | # rhodecode-tools | |||
|
198 | ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/ | |||
|
199 | ||||
|
200 | # required binaries from dependencies | |||
|
201 | #ln -s ${self.python}/bin/python $out/bin | |||
183 | ln -s ${self.pyramid}/bin/* $out/bin/ |
|
202 | ln -s ${self.pyramid}/bin/* $out/bin/ | |
184 | ln -s ${self.gunicorn}/bin/gunicorn $out/bin/ |
|
203 | ln -s ${self.gunicorn}/bin/gunicorn $out/bin/ | |
185 | ln -s ${self.supervisor}/bin/supervisor* $out/bin/ |
|
204 | ln -s ${self.supervisor}/bin/supervisor* $out/bin/ | |
186 | ln -s ${self.pastescript}/bin/paster $out/bin/ |
|
205 | ln -s ${self.pastescript}/bin/paster $out/bin/ | |
187 | ln -s ${self.channelstream}/bin/channelstream $out/bin/ |
|
206 | ln -s ${self.channelstream}/bin/channelstream $out/bin/ | |
188 | ln -s ${self.celery}/bin/celery $out/bin/ |
|
207 | ln -s ${self.celery}/bin/celery $out/bin/ | |
|
208 | echo "[DONE]: created symlinks into $out/bin" | |||
189 |
|
209 | |||
190 | # rhodecode-tools |
|
|||
191 | ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/ |
|
|||
192 |
|
||||
193 | # note that condition should be restricted when adding further tools |
|
|||
194 | for file in $out/bin/*; |
|
210 | for file in $out/bin/*; | |
195 | do |
|
211 | do | |
196 | wrapProgram $file \ |
|
212 | wrapProgram $file \ | |
197 |
|
|
213 | --prefix PATH : $PATH \ | |
198 |
|
|
214 | --prefix PYTHONPATH : $PYTHONPATH \ | |
199 |
|
|
215 | --set PYTHONHASHSEED random | |
200 | done |
|
216 | done | |
201 |
|
217 | |||
202 | mkdir $out/etc |
|
218 | echo "[DONE]: enterprise-ce binary wrapping" | |
203 | cp configs/production.ini $out/etc |
|
|||
204 |
|
219 | |||
205 |
|
||||
206 | # TODO: johbo: Make part of ac-tests |
|
|||
207 | if [ ! -f rhodecode/public/js/scripts.js ]; then |
|
220 | if [ ! -f rhodecode/public/js/scripts.js ]; then | |
208 | echo "Missing scripts.js" |
|
221 | echo "Missing scripts.js" | |
209 | exit 1 |
|
222 | exit 1 | |
@@ -213,31 +226,33 b' let' | |||||
213 | exit 1 |
|
226 | exit 1 | |
214 | fi |
|
227 | fi | |
215 | ''; |
|
228 | ''; | |
216 |
|
||||
217 | }); |
|
229 | }); | |
218 |
|
230 | |||
219 | rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" { |
|
|||
220 | inherit |
|
|||
221 | doCheck |
|
|||
222 | pkgs |
|
|||
223 | pythonPackages; |
|
|||
224 | }; |
|
|||
225 |
|
||||
226 | }; |
|
231 | }; | |
227 |
|
232 | |||
228 | rhodecode-testdata-src = sources.rhodecode-testdata or ( |
|
233 | basePythonPackages = with builtins; | |
229 | pkgs.fetchhg { |
|
234 | if isAttrs pythonPackages then | |
230 | url = "https://code.rhodecode.com/upstream/rc_testdata"; |
|
235 | pythonPackages | |
231 | rev = "v0.10.0"; |
|
236 | else | |
232 | sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0"; |
|
237 | getAttr pythonPackages pkgs; | |
233 | }); |
|
238 | ||
|
239 | pythonGeneratedPackages = import ./pkgs/python-packages.nix { | |||
|
240 | inherit pkgs; | |||
|
241 | inherit (pkgs) fetchurl fetchgit fetchhg; | |||
|
242 | }; | |||
|
243 | ||||
|
244 | pythonCommunityOverrides = import ./pkgs/python-packages-overrides.nix { | |||
|
245 | inherit pkgs basePythonPackages; | |||
|
246 | }; | |||
234 |
|
247 | |||
235 | # Apply all overrides and fix the final package set |
|
248 | # Apply all overrides and fix the final package set | |
236 | myPythonPackagesUnfix = with pkgs.lib; |
|
249 | myPythonPackagesUnfix = with pkgs.lib; | |
237 | (extends pythonExternalOverrides |
|
250 | (extends pythonExternalOverrides | |
238 | (extends pythonLocalOverrides |
|
251 | (extends pythonLocalOverrides | |
239 | (extends pythonOverrides |
|
252 | (extends pythonCommunityOverrides | |
240 |
|
|
253 | (extends pythonGeneratedPackages | |
|
254 | basePythonPackagesUnfix)))); | |||
|
255 | ||||
241 | myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix); |
|
256 | myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix); | |
242 |
|
257 | |||
243 | in myPythonPackages.rhodecode-enterprise-ce |
|
258 | in myPythonPackages.rhodecode-enterprise-ce |
@@ -47,7 +47,7 b' Switch nix to the latest STABLE channel' | |||||
47 |
|
47 | |||
48 | run:: |
|
48 | run:: | |
49 |
|
49 | |||
50 |
nix-channel --add https://nixos.org/channels/nixos-1 |
|
50 | nix-channel --add https://nixos.org/channels/nixos-18.03 nixpkgs | |
51 |
|
51 | |||
52 | Followed by:: |
|
52 | Followed by:: | |
53 |
|
53 |
@@ -1,6 +1,13 b'' | |||||
1 | { |
|
1 | { | |
2 | "name": "rhodecode-enterprise", |
|
2 | "name": "rhodecode-enterprise", | |
3 |
"version": "0.0 |
|
3 | "version": "1.0.0", | |
|
4 | "private": true, | |||
|
5 | "description" : "RhodeCode JS packaged", | |||
|
6 | "license": "SEE LICENSE IN LICENSE.txt", | |||
|
7 | "repository" : { | |||
|
8 | "type" : "hg", | |||
|
9 | "url" : "https://code.rhodecode.com/rhodecode-enterprise-ce" | |||
|
10 | }, | |||
4 | "devDependencies": { |
|
11 | "devDependencies": { | |
5 | "grunt": "^0.4.5", |
|
12 | "grunt": "^0.4.5", | |
6 | "grunt-contrib-copy": "^1.0.0", |
|
13 | "grunt-contrib-copy": "^1.0.0", | |
@@ -14,7 +21,7 b'' | |||||
14 | "grunt-vulcanize": "^1.0.0", |
|
21 | "grunt-vulcanize": "^1.0.0", | |
15 | "node2nix": "^1.0.0", |
|
22 | "node2nix": "^1.0.0", | |
16 | "jshint": "^2.9.1-rc3", |
|
23 | "jshint": "^2.9.1-rc3", | |
17 |
"bower": "^1. |
|
24 | "bower": "^1.8.4", | |
18 | "jquery": "1.11.3", |
|
25 | "jquery": "1.11.3", | |
19 | "favico.js": "^0.3.10", |
|
26 | "favico.js": "^0.3.10", | |
20 | "clipboard": "^1.7.1", |
|
27 | "clipboard": "^1.7.1", |
@@ -1,33 +1,34 b'' | |||||
|
1 | # Generated by bower2nix v3.2.0 (https://github.com/rvl/bower2nix) | |||
1 | { fetchbower, buildEnv }: |
|
2 | { fetchbower, buildEnv }: | |
2 | buildEnv { name = "bower-env"; ignoreCollisions = true; paths = [ |
|
3 | buildEnv { name = "bower-env"; ignoreCollisions = true; paths = [ | |
3 |
(fetchbower "webcomponentsjs" "0.7.2 |
|
4 | (fetchbower "webcomponentsjs" "0.7.24" "^0.7.22" "0d6hfsd51n7qykzci9cw5vlsrajvffaf5ic3azlp2rfz76m651qj") | |
4 |
(fetchbower "polymer" "Polymer/polymer#1. |
|
5 | (fetchbower "polymer" "Polymer/polymer#1.11.3" "Polymer/polymer#^1.6.1" "0n11ag2pmczw5yv3m76bh0a7hvicqvcaiv7knixx1r704pw107s6") | |
5 |
(fetchbower "paper-button" "PolymerElements/paper-button#1.0.1 |
|
6 | (fetchbower "paper-button" "PolymerElements/paper-button#1.0.15" "PolymerElements/paper-button#^1.0.13" "0zabrp8p4s9md1hlwg0rqmbx0k87a41lsg9pzk747hcb349gblg0") | |
6 |
(fetchbower "paper-spinner" "PolymerElements/paper-spinner#1.2. |
|
7 | (fetchbower "paper-spinner" "PolymerElements/paper-spinner#1.2.1" "PolymerElements/paper-spinner#^1.2.0" "0d6xc9fd2ipcli7w77yrn1k0z9j373c9y1f16db2840cyb4rvii8") | |
7 |
(fetchbower "paper-tooltip" "PolymerElements/paper-tooltip#1.1. |
|
8 | (fetchbower "paper-tooltip" "PolymerElements/paper-tooltip#1.1.4" "PolymerElements/paper-tooltip#^1.1.2" "0j8s09dxqql8mgnvb7x382scq98xk2vjgylk06bsd1gphp3d3qzm") | |
8 |
(fetchbower "paper-toast" "PolymerElements/paper-toast#1.3. |
|
9 | (fetchbower "paper-toast" "PolymerElements/paper-toast#1.3.1" "PolymerElements/paper-toast#^1.3.0" "1s0csv8dwgdyg4psq1zrd6vivlpsgzi4sjqllwqmlwhfnxfl5ql4") | |
9 |
(fetchbower "paper-toggle-button" "PolymerElements/paper-toggle-button#1. |
|
10 | (fetchbower "paper-toggle-button" "PolymerElements/paper-toggle-button#1.3.0" "PolymerElements/paper-toggle-button#^1.2.0" "0hvv2y406lzlrkkcmv9nnd99bmcgcrhcx86q3axxv8k3580gqq97") | |
10 |
(fetchbower "iron-ajax" "PolymerElements/iron-ajax#1.4.4" "PolymerElements/iron-ajax#^1.4.4" "0jp |
|
11 | (fetchbower "iron-ajax" "PolymerElements/iron-ajax#1.4.4" "PolymerElements/iron-ajax#^1.4.4" "0vs4dqcw5y02kj11ivzs901s5nwn97fk01xz2jmpy2fgh6l9q5yr") | |
11 |
(fetchbower "iron-autogrow-textarea" "PolymerElements/iron-autogrow-textarea#1.0.1 |
|
12 | (fetchbower "iron-autogrow-textarea" "PolymerElements/iron-autogrow-textarea#1.0.15" "PolymerElements/iron-autogrow-textarea#^1.0.13" "1jw40ki5w21il0i9pwjywk4y6mk9lrj8fm57vfg9nlpbiqm2vswb") | |
12 |
(fetchbower "iron-a11y-keys" "PolymerElements/iron-a11y-keys#1.0. |
|
13 | (fetchbower "iron-a11y-keys" "PolymerElements/iron-a11y-keys#1.0.9" "PolymerElements/iron-a11y-keys#^1.0.6" "07c2wm1p9g52qidl67a43yb7pzd88ygycgghlwzjbh2vkwrs40kp") | |
13 |
(fetchbower "iron-flex-layout" "PolymerElements/iron-flex-layout#1.3. |
|
14 | (fetchbower "iron-flex-layout" "PolymerElements/iron-flex-layout#1.3.9" "PolymerElements/iron-flex-layout#^1.0.0" "1r54la4n8n0lq97vdxnlpdrarxsiwp2b3vfvby9il3j4y4s8vi4h") | |
14 |
(fetchbower "paper-behaviors" "PolymerElements/paper-behaviors#1.0.1 |
|
15 | (fetchbower "paper-behaviors" "PolymerElements/paper-behaviors#1.0.13" "PolymerElements/paper-behaviors#^1.0.0" "0yljykkdg9p67dinplmp6hc5ma6sp95ykah8kz6id5z8gjmsd05b") | |
15 |
(fetchbower "paper-material" "PolymerElements/paper-material#1.0. |
|
16 | (fetchbower "paper-material" "PolymerElements/paper-material#1.0.7" "PolymerElements/paper-material#^1.0.0" "1q9r3i5f61y6hmd18h3fcmn7y29yznraz83f9256z8cc0vglfjdb") | |
16 |
(fetchbower "paper-styles" "PolymerElements/paper-styles#1.1 |
|
17 | (fetchbower "paper-styles" "PolymerElements/paper-styles#1.3.1" "PolymerElements/paper-styles#^1.0.0" "11fcxp9kx6sqp2yq0883psn8xyw5d3i753mimqbx8aqa5abvrk4q") | |
17 |
(fetchbower "neon-animation" "PolymerElements/neon-animation#1.2. |
|
18 | (fetchbower "neon-animation" "PolymerElements/neon-animation#1.2.5" "PolymerElements/neon-animation#^1.0.0" "144sq9ijw1nnp2jagpa1ammrc018kp1y6nlmgq1v1iishv4ylsl5") | |
18 |
(fetchbower "iron-a11y-announcer" "PolymerElements/iron-a11y-announcer#1.0. |
|
19 | (fetchbower "iron-a11y-announcer" "PolymerElements/iron-a11y-announcer#1.0.6" "PolymerElements/iron-a11y-announcer#^1.0.0" "1az02v91s17v9bir868pifv0s2lwxchm0i4l20176f98366813zk") | |
19 |
(fetchbower "iron-overlay-behavior" "PolymerElements/iron-overlay-behavior#1. |
|
20 | (fetchbower "iron-overlay-behavior" "PolymerElements/iron-overlay-behavior#1.10.4" "PolymerElements/iron-overlay-behavior#^1.0.9" "0px6s756cgqzxzq53fgk1297j07gyfykqkhdzmj9fwyyrwiv1g8z") | |
20 |
(fetchbower "iron-fit-behavior" "PolymerElements/iron-fit-behavior#1.2. |
|
21 | (fetchbower "iron-fit-behavior" "PolymerElements/iron-fit-behavior#1.2.7" "PolymerElements/iron-fit-behavior#^1.1.0" "0b864x9cdxadvzkdcn1d3yvi32kqccfv8j467337rgcq193l60jb") | |
21 |
(fetchbower "iron-checked-element-behavior" "PolymerElements/iron-checked-element-behavior#1.0. |
|
22 | (fetchbower "iron-checked-element-behavior" "PolymerElements/iron-checked-element-behavior#1.0.6" "PolymerElements/iron-checked-element-behavior#^1.0.0" "165nh5vkdsr4d5vkq4sj1sz0igfy6vgx844ys164mqg8gkjncvxm") | |
22 |
(fetchbower "promise-polyfill" "polymerlabs/promise-polyfill#1.0.1" "polymerlabs/promise-polyfill#^1.0.0" "0 |
|
23 | (fetchbower "promise-polyfill" "polymerlabs/promise-polyfill#1.0.1" "polymerlabs/promise-polyfill#^1.0.0" "0warxr9fk2d3cvgiq81djxwiky73gxxr5s3xa97d3c83q1lidlzy") | |
23 |
(fetchbower "iron-behaviors" "PolymerElements/iron-behaviors#1.0.1 |
|
24 | (fetchbower "iron-behaviors" "PolymerElements/iron-behaviors#1.0.18" "PolymerElements/iron-behaviors#^1.0.0" "1icx212kh1cfdvk3v7pdqic9c48pdhnknq8ajx0dlps1l5sm69xh") | |
24 |
(fetchbower "iron-validatable-behavior" "PolymerElements/iron-validatable-behavior#1.1. |
|
25 | (fetchbower "iron-validatable-behavior" "PolymerElements/iron-validatable-behavior#1.1.2" "PolymerElements/iron-validatable-behavior#^1.0.0" "1gjjn08y5s43p7aizif24i2yvkd1sasy77dix62irkwzig3favqr") | |
25 |
(fetchbower "iron-form-element-behavior" "PolymerElements/iron-form-element-behavior#1.0. |
|
26 | (fetchbower "iron-form-element-behavior" "PolymerElements/iron-form-element-behavior#1.0.7" "PolymerElements/iron-form-element-behavior#^1.0.0" "02qbxqsqxjzy086cqbv6pgibmi0888q757p95ig6x3nc62xk81an") | |
26 |
(fetchbower "iron-a11y-keys-behavior" " |
|
27 | (fetchbower "iron-a11y-keys-behavior" "PolymerElements/iron-a11y-keys-behavior#1.1.9" "PolymerElements/iron-a11y-keys-behavior#^1.0.0" "0dy1ca1nb9v9y968q998vgs147fkmn4irnnrdfl40ln1bln45qx9") | |
27 |
(fetchbower "paper-ripple" "PolymerElements/paper-ripple#1.0. |
|
28 | (fetchbower "paper-ripple" "PolymerElements/paper-ripple#1.0.10" "PolymerElements/paper-ripple#^1.0.0" "0d5gjf7cw7hk520h6xnav1xrpd948qc8mzvqgqycqkad4j4vdck7") | |
28 |
(fetchbower "font-roboto" "PolymerElements/font-roboto#1.0 |
|
29 | (fetchbower "font-roboto" "PolymerElements/font-roboto#1.1.0" "PolymerElements/font-roboto#^1.0.1" "0z4msvaa5pnr84j2r957g313fmdbdbrknhdw1axy5g48845yv04s") | |
29 |
(fetchbower "iron-meta" "PolymerElements/iron-meta#1.1. |
|
30 | (fetchbower "iron-meta" "PolymerElements/iron-meta#1.1.3" "PolymerElements/iron-meta#^1.0.0" "13lsj648ibkyw3lha6g6r7afmk4yxvgdi63bkpy54wx63gfx7xir") | |
30 |
(fetchbower "iron-resizable-behavior" "PolymerElements/iron-resizable-behavior#1.0. |
|
31 | (fetchbower "iron-resizable-behavior" "PolymerElements/iron-resizable-behavior#1.0.6" "PolymerElements/iron-resizable-behavior#^1.0.0" "1r8qk670nigqpw50x1m5yvbx7p9sahiwlf0f9z71v508d63vrbi1") | |
31 |
(fetchbower "iron-selector" "PolymerElements/iron-selector#1.5. |
|
32 | (fetchbower "iron-selector" "PolymerElements/iron-selector#1.5.3" "PolymerElements/iron-selector#^1.0.0" "1362pq6vy113h4y6hn31hhp52hh8g269s5aj7vsq266v7y59igf6") | |
32 |
(fetchbower "web-animations-js" "web-animations/web-animations-js#2. |
|
33 | (fetchbower "web-animations-js" "web-animations/web-animations-js#2.3.1" "web-animations/web-animations-js#^2.2.0" "16haz886711qrcf9h9wrjwf5hrz2c69l4jxlq0iyzar823c51qkq") | |
33 | ]; } |
|
34 | ]; } |
@@ -1,12 +1,14 b'' | |||||
1 |
# This file has been generated by node2nix 1. |
|
1 | # This file has been generated by node2nix 1.5.3. Do not edit! | |
2 |
|
2 | |||
3 | {pkgs ? import <nixpkgs> { |
|
3 | {pkgs ? import <nixpkgs> { | |
4 | inherit system; |
|
4 | inherit system; | |
5 | }, system ? builtins.currentSystem}: |
|
5 | }, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-6_x"}: | |
6 |
|
6 | |||
7 | let |
|
7 | let | |
8 | nodeEnv = import ./node-env.nix { |
|
8 | nodeEnv = import ./node-env.nix { | |
9 |
inherit (pkgs) stdenv python utillinux runCommand writeTextFile |
|
9 | inherit (pkgs) stdenv python2 utillinux runCommand writeTextFile; | |
|
10 | inherit nodejs; | |||
|
11 | libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null; | |||
10 | }; |
|
12 | }; | |
11 | in |
|
13 | in | |
12 | import ./node-packages.nix { |
|
14 | import ./node-packages.nix { |
@@ -1,24 +1,26 b'' | |||||
1 | # This file originates from node2nix |
|
1 | # This file originates from node2nix | |
2 |
|
2 | |||
3 |
{stdenv, |
|
3 | {stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}: | |
4 |
|
4 | |||
5 | let |
|
5 | let | |
|
6 | python = if nodejs ? python then nodejs.python else python2; | |||
|
7 | ||||
6 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise |
|
8 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise | |
7 | tarWrapper = runCommand "tarWrapper" {} '' |
|
9 | tarWrapper = runCommand "tarWrapper" {} '' | |
8 | mkdir -p $out/bin |
|
10 | mkdir -p $out/bin | |
9 |
|
11 | |||
10 | cat > $out/bin/tar <<EOF |
|
12 | cat > $out/bin/tar <<EOF | |
11 | #! ${stdenv.shell} -e |
|
13 | #! ${stdenv.shell} -e | |
12 | $(type -p tar) "\$@" --warning=no-unknown-keyword |
|
14 | $(type -p tar) "\$@" --warning=no-unknown-keyword | |
13 | EOF |
|
15 | EOF | |
14 |
|
16 | |||
15 | chmod +x $out/bin/tar |
|
17 | chmod +x $out/bin/tar | |
16 | ''; |
|
18 | ''; | |
17 |
|
19 | |||
18 | # Function that generates a TGZ file from a NPM project |
|
20 | # Function that generates a TGZ file from a NPM project | |
19 | buildNodeSourceDist = |
|
21 | buildNodeSourceDist = | |
20 | { name, version, src, ... }: |
|
22 | { name, version, src, ... }: | |
21 |
|
23 | |||
22 | stdenv.mkDerivation { |
|
24 | stdenv.mkDerivation { | |
23 | name = "node-tarball-${name}-${version}"; |
|
25 | name = "node-tarball-${name}-${version}"; | |
24 | inherit src; |
|
26 | inherit src; | |
@@ -42,151 +44,311 b' let' | |||||
42 | # Bundle the dependencies of the package |
|
44 | # Bundle the dependencies of the package | |
43 | mkdir -p node_modules |
|
45 | mkdir -p node_modules | |
44 | cd node_modules |
|
46 | cd node_modules | |
45 |
|
47 | |||
46 | # Only include dependencies if they don't exist. They may also be bundled in the package. |
|
48 | # Only include dependencies if they don't exist. They may also be bundled in the package. | |
47 | if [ ! -e "${dependency.name}" ] |
|
49 | if [ ! -e "${dependency.name}" ] | |
48 | then |
|
50 | then | |
49 | ${composePackage dependency} |
|
51 | ${composePackage dependency} | |
50 | fi |
|
52 | fi | |
51 |
|
53 | |||
52 | cd .. |
|
54 | cd .. | |
53 | '' |
|
55 | '' | |
54 | ) dependencies); |
|
56 | ) dependencies); | |
55 |
|
57 | |||
56 | # Recursively composes the dependencies of a package |
|
58 | # Recursively composes the dependencies of a package | |
57 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: |
|
59 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: | |
58 | let |
|
|||
59 | fixImpureDependencies = writeTextFile { |
|
|||
60 | name = "fixDependencies.js"; |
|
|||
61 | text = '' |
|
|||
62 | var fs = require('fs'); |
|
|||
63 | var url = require('url'); |
|
|||
64 |
|
||||
65 | /* |
|
|||
66 | * Replaces an impure version specification by * |
|
|||
67 | */ |
|
|||
68 | function replaceImpureVersionSpec(versionSpec) { |
|
|||
69 | var parsedUrl = url.parse(versionSpec); |
|
|||
70 |
|
||||
71 | if(versionSpec == "latest" || versionSpec == "unstable" || |
|
|||
72 | versionSpec.substr(0, 2) == ".." || dependency.substr(0, 2) == "./" || dependency.substr(0, 2) == "~/" || dependency.substr(0, 1) == '/') |
|
|||
73 | return '*'; |
|
|||
74 | else if(parsedUrl.protocol == "git:" || parsedUrl.protocol == "git+ssh:" || parsedUrl.protocol == "git+http:" || parsedUrl.protocol == "git+https:" || |
|
|||
75 | parsedUrl.protocol == "http:" || parsedUrl.protocol == "https:") |
|
|||
76 | return '*'; |
|
|||
77 | else |
|
|||
78 | return versionSpec; |
|
|||
79 | } |
|
|||
80 |
|
||||
81 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); |
|
|||
82 |
|
||||
83 | /* Replace dependencies */ |
|
|||
84 | if(packageObj.dependencies !== undefined) { |
|
|||
85 | for(var dependency in packageObj.dependencies) { |
|
|||
86 | var versionSpec = packageObj.dependencies[dependency]; |
|
|||
87 | packageObj.dependencies[dependency] = replaceImpureVersionSpec(versionSpec); |
|
|||
88 | } |
|
|||
89 | } |
|
|||
90 |
|
||||
91 | /* Replace development dependencies */ |
|
|||
92 | if(packageObj.devDependencies !== undefined) { |
|
|||
93 | for(var dependency in packageObj.devDependencies) { |
|
|||
94 | var versionSpec = packageObj.devDependencies[dependency]; |
|
|||
95 | packageObj.devDependencies[dependency] = replaceImpureVersionSpec(versionSpec); |
|
|||
96 | } |
|
|||
97 | } |
|
|||
98 |
|
||||
99 | /* Replace optional dependencies */ |
|
|||
100 | if(packageObj.optionalDependencies !== undefined) { |
|
|||
101 | for(var dependency in packageObj.optionalDependencies) { |
|
|||
102 | var versionSpec = packageObj.optionalDependencies[dependency]; |
|
|||
103 | packageObj.optionalDependencies[dependency] = replaceImpureVersionSpec(versionSpec); |
|
|||
104 | } |
|
|||
105 | } |
|
|||
106 |
|
||||
107 | /* Write the fixed JSON file */ |
|
|||
108 | fs.writeFileSync("package.json", JSON.stringify(packageObj)); |
|
|||
109 | ''; |
|
|||
110 | }; |
|
|||
111 | in |
|
|||
112 |
'' |
|
60 | '' | |
113 | DIR=$(pwd) |
|
61 | DIR=$(pwd) | |
114 | cd $TMPDIR |
|
62 | cd $TMPDIR | |
115 |
|
63 | |||
116 | unpackFile ${src} |
|
64 | unpackFile ${src} | |
117 |
|
65 | |||
118 | # Make the base dir in which the target dependency resides first |
|
66 | # Make the base dir in which the target dependency resides first | |
119 | mkdir -p "$(dirname "$DIR/${packageName}")" |
|
67 | mkdir -p "$(dirname "$DIR/${packageName}")" | |
120 |
|
68 | |||
121 | if [ -f "${src}" ] |
|
69 | if [ -f "${src}" ] | |
122 | then |
|
70 | then | |
123 | # Figure out what directory has been unpacked |
|
71 | # Figure out what directory has been unpacked | |
124 |
packageDir=$(find . - |
|
72 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" | |
125 |
|
73 | |||
126 | # Restore write permissions to make building work |
|
74 | # Restore write permissions to make building work | |
|
75 | find "$packageDir" -type d -print0 | xargs -0 chmod u+x | |||
127 | chmod -R u+w "$packageDir" |
|
76 | chmod -R u+w "$packageDir" | |
128 |
|
77 | |||
129 | # Move the extracted tarball into the output folder |
|
78 | # Move the extracted tarball into the output folder | |
130 | mv "$packageDir" "$DIR/${packageName}" |
|
79 | mv "$packageDir" "$DIR/${packageName}" | |
131 | elif [ -d "${src}" ] |
|
80 | elif [ -d "${src}" ] | |
132 | then |
|
81 | then | |
|
82 | # Get a stripped name (without hash) of the source directory. | |||
|
83 | # On old nixpkgs it's already set internally. | |||
|
84 | if [ -z "$strippedName" ] | |||
|
85 | then | |||
|
86 | strippedName="$(stripHash ${src})" | |||
|
87 | fi | |||
|
88 | ||||
133 | # Restore write permissions to make building work |
|
89 | # Restore write permissions to make building work | |
134 |
|
|
90 | chmod -R u+w "$strippedName" | |
135 |
|
91 | |||
136 |
|
|
92 | # Move the extracted directory into the output folder | |
137 |
|
|
93 | mv "$strippedName" "$DIR/${packageName}" | |
138 | fi |
|
94 | fi | |
139 |
|
95 | |||
140 | # Unset the stripped name to not confuse the next unpack step |
|
96 | # Unset the stripped name to not confuse the next unpack step | |
141 | unset strippedName |
|
97 | unset strippedName | |
142 |
|
98 | |||
143 | # Some version specifiers (latest, unstable, URLs, file paths) force NPM to make remote connections or consult paths outside the Nix store. |
|
99 | # Include the dependencies of the package | |
144 | # The following JavaScript replaces these by * to prevent that |
|
|||
145 | cd "$DIR/${packageName}" |
|
100 | cd "$DIR/${packageName}" | |
146 | node ${fixImpureDependencies} |
|
|||
147 |
|
||||
148 | # Include the dependencies of the package |
|
|||
149 | ${includeDependencies { inherit dependencies; }} |
|
101 | ${includeDependencies { inherit dependencies; }} | |
150 | cd .. |
|
102 | cd .. | |
151 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} |
|
103 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | |
152 | ''; |
|
104 | ''; | |
153 |
|
105 | |||
|
106 | pinpointDependencies = {dependencies, production}: | |||
|
107 | let | |||
|
108 | pinpointDependenciesFromPackageJSON = writeTextFile { | |||
|
109 | name = "pinpointDependencies.js"; | |||
|
110 | text = '' | |||
|
111 | var fs = require('fs'); | |||
|
112 | var path = require('path'); | |||
|
113 | ||||
|
114 | function resolveDependencyVersion(location, name) { | |||
|
115 | if(location == process.env['NIX_STORE']) { | |||
|
116 | return null; | |||
|
117 | } else { | |||
|
118 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); | |||
|
119 | ||||
|
120 | if(fs.existsSync(dependencyPackageJSON)) { | |||
|
121 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); | |||
|
122 | ||||
|
123 | if(dependencyPackageObj.name == name) { | |||
|
124 | return dependencyPackageObj.version; | |||
|
125 | } | |||
|
126 | } else { | |||
|
127 | return resolveDependencyVersion(path.resolve(location, ".."), name); | |||
|
128 | } | |||
|
129 | } | |||
|
130 | } | |||
|
131 | ||||
|
132 | function replaceDependencies(dependencies) { | |||
|
133 | if(typeof dependencies == "object" && dependencies !== null) { | |||
|
134 | for(var dependency in dependencies) { | |||
|
135 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); | |||
|
136 | ||||
|
137 | if(resolvedVersion === null) { | |||
|
138 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); | |||
|
139 | } else { | |||
|
140 | dependencies[dependency] = resolvedVersion; | |||
|
141 | } | |||
|
142 | } | |||
|
143 | } | |||
|
144 | } | |||
|
145 | ||||
|
146 | /* Read the package.json configuration */ | |||
|
147 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); | |||
|
148 | ||||
|
149 | /* Pinpoint all dependencies */ | |||
|
150 | replaceDependencies(packageObj.dependencies); | |||
|
151 | if(process.argv[2] == "development") { | |||
|
152 | replaceDependencies(packageObj.devDependencies); | |||
|
153 | } | |||
|
154 | replaceDependencies(packageObj.optionalDependencies); | |||
|
155 | ||||
|
156 | /* Write the fixed package.json file */ | |||
|
157 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); | |||
|
158 | ''; | |||
|
159 | }; | |||
|
160 | in | |||
|
161 | '' | |||
|
162 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} | |||
|
163 | ||||
|
164 | ${stdenv.lib.optionalString (dependencies != []) | |||
|
165 | '' | |||
|
166 | if [ -d node_modules ] | |||
|
167 | then | |||
|
168 | cd node_modules | |||
|
169 | ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} | |||
|
170 | cd .. | |||
|
171 | fi | |||
|
172 | ''} | |||
|
173 | ''; | |||
|
174 | ||||
|
175 | # Recursively traverses all dependencies of a package and pinpoints all | |||
|
176 | # dependencies in the package.json file to the versions that are actually | |||
|
177 | # being used. | |||
|
178 | ||||
|
179 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: | |||
|
180 | '' | |||
|
181 | if [ -d "${packageName}" ] | |||
|
182 | then | |||
|
183 | cd "${packageName}" | |||
|
184 | ${pinpointDependencies { inherit dependencies production; }} | |||
|
185 | cd .. | |||
|
186 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} | |||
|
187 | fi | |||
|
188 | ''; | |||
|
189 | ||||
154 | # Extract the Node.js source code which is used to compile packages with |
|
190 | # Extract the Node.js source code which is used to compile packages with | |
155 | # native bindings |
|
191 | # native bindings | |
156 | nodeSources = runCommand "node-sources" {} '' |
|
192 | nodeSources = runCommand "node-sources" {} '' | |
157 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} |
|
193 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} | |
158 | mv node-* $out |
|
194 | mv node-* $out | |
159 | ''; |
|
195 | ''; | |
160 |
|
196 | |||
|
197 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) | |||
|
198 | addIntegrityFieldsScript = writeTextFile { | |||
|
199 | name = "addintegrityfields.js"; | |||
|
200 | text = '' | |||
|
201 | var fs = require('fs'); | |||
|
202 | var path = require('path'); | |||
|
203 | ||||
|
204 | function augmentDependencies(baseDir, dependencies) { | |||
|
205 | for(var dependencyName in dependencies) { | |||
|
206 | var dependency = dependencies[dependencyName]; | |||
|
207 | ||||
|
208 | // Open package.json and augment metadata fields | |||
|
209 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); | |||
|
210 | var packageJSONPath = path.join(packageJSONDir, "package.json"); | |||
|
211 | ||||
|
212 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored | |||
|
213 | console.log("Adding metadata fields to: "+packageJSONPath); | |||
|
214 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); | |||
|
215 | ||||
|
216 | if(dependency.integrity) { | |||
|
217 | packageObj["_integrity"] = dependency.integrity; | |||
|
218 | } else { | |||
|
219 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. | |||
|
220 | } | |||
|
221 | ||||
|
222 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. | |||
|
223 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); | |||
|
224 | } | |||
|
225 | ||||
|
226 | // Augment transitive dependencies | |||
|
227 | if(dependency.dependencies !== undefined) { | |||
|
228 | augmentDependencies(packageJSONDir, dependency.dependencies); | |||
|
229 | } | |||
|
230 | } | |||
|
231 | } | |||
|
232 | ||||
|
233 | if(fs.existsSync("./package-lock.json")) { | |||
|
234 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); | |||
|
235 | ||||
|
236 | if(packageLock.lockfileVersion !== 1) { | |||
|
237 | process.stderr.write("Sorry, I only understand lock file version 1!\n"); | |||
|
238 | process.exit(1); | |||
|
239 | } | |||
|
240 | ||||
|
241 | if(packageLock.dependencies !== undefined) { | |||
|
242 | augmentDependencies(".", packageLock.dependencies); | |||
|
243 | } | |||
|
244 | } | |||
|
245 | ''; | |||
|
246 | }; | |||
|
247 | ||||
|
248 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes | |||
|
249 | reconstructPackageLock = writeTextFile { | |||
|
250 | name = "addintegrityfields.js"; | |||
|
251 | text = '' | |||
|
252 | var fs = require('fs'); | |||
|
253 | var path = require('path'); | |||
|
254 | ||||
|
255 | var packageObj = JSON.parse(fs.readFileSync("package.json")); | |||
|
256 | ||||
|
257 | var lockObj = { | |||
|
258 | name: packageObj.name, | |||
|
259 | version: packageObj.version, | |||
|
260 | lockfileVersion: 1, | |||
|
261 | requires: true, | |||
|
262 | dependencies: {} | |||
|
263 | }; | |||
|
264 | ||||
|
265 | function augmentPackageJSON(filePath, dependencies) { | |||
|
266 | var packageJSON = path.join(filePath, "package.json"); | |||
|
267 | if(fs.existsSync(packageJSON)) { | |||
|
268 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); | |||
|
269 | dependencies[packageObj.name] = { | |||
|
270 | version: packageObj.version, | |||
|
271 | integrity: "sha1-000000000000000000000000000=", | |||
|
272 | dependencies: {} | |||
|
273 | }; | |||
|
274 | processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); | |||
|
275 | } | |||
|
276 | } | |||
|
277 | ||||
|
278 | function processDependencies(dir, dependencies) { | |||
|
279 | if(fs.existsSync(dir)) { | |||
|
280 | var files = fs.readdirSync(dir); | |||
|
281 | ||||
|
282 | files.forEach(function(entry) { | |||
|
283 | var filePath = path.join(dir, entry); | |||
|
284 | var stats = fs.statSync(filePath); | |||
|
285 | ||||
|
286 | if(stats.isDirectory()) { | |||
|
287 | if(entry.substr(0, 1) == "@") { | |||
|
288 | // When we encounter a namespace folder, augment all packages belonging to the scope | |||
|
289 | var pkgFiles = fs.readdirSync(filePath); | |||
|
290 | ||||
|
291 | pkgFiles.forEach(function(entry) { | |||
|
292 | if(stats.isDirectory()) { | |||
|
293 | var pkgFilePath = path.join(filePath, entry); | |||
|
294 | augmentPackageJSON(pkgFilePath, dependencies); | |||
|
295 | } | |||
|
296 | }); | |||
|
297 | } else { | |||
|
298 | augmentPackageJSON(filePath, dependencies); | |||
|
299 | } | |||
|
300 | } | |||
|
301 | }); | |||
|
302 | } | |||
|
303 | } | |||
|
304 | ||||
|
305 | processDependencies("node_modules", lockObj.dependencies); | |||
|
306 | ||||
|
307 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); | |||
|
308 | ''; | |||
|
309 | }; | |||
|
310 | ||||
161 | # Builds and composes an NPM package including all its dependencies |
|
311 | # Builds and composes an NPM package including all its dependencies | |
162 | buildNodePackage = { name, packageName, version, dependencies ? [], production ? true, npmFlags ? "", dontNpmInstall ? false, preRebuild ? "", ... }@args: |
|
312 | buildNodePackage = { name, packageName, version, dependencies ? [], production ? true, npmFlags ? "", dontNpmInstall ? false, bypassCache ? false, preRebuild ? "", ... }@args: | |
163 |
|
313 | |||
|
314 | let | |||
|
315 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; | |||
|
316 | in | |||
164 | stdenv.lib.makeOverridable stdenv.mkDerivation (builtins.removeAttrs args [ "dependencies" ] // { |
|
317 | stdenv.lib.makeOverridable stdenv.mkDerivation (builtins.removeAttrs args [ "dependencies" ] // { | |
165 | name = "node-${name}-${version}"; |
|
318 | name = "node-${name}-${version}"; | |
166 | buildInputs = [ tarWrapper python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ args.buildInputs or []; |
|
319 | buildInputs = [ tarWrapper python nodejs ] | |
|
320 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux | |||
|
321 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool | |||
|
322 | ++ args.buildInputs or []; | |||
167 | dontStrip = args.dontStrip or true; # Striping may fail a build for some package deployments |
|
323 | dontStrip = args.dontStrip or true; # Striping may fail a build for some package deployments | |
168 |
|
324 | |||
169 | inherit dontNpmInstall preRebuild; |
|
325 | inherit dontNpmInstall preRebuild; | |
170 |
|
326 | |||
171 | unpackPhase = args.unpackPhase or "true"; |
|
327 | unpackPhase = args.unpackPhase or "true"; | |
172 |
|
328 | |||
173 | buildPhase = args.buildPhase or "true"; |
|
329 | buildPhase = args.buildPhase or "true"; | |
174 |
|
330 | |||
175 | compositionScript = composePackage args; |
|
331 | compositionScript = composePackage args; | |
176 | passAsFile = [ "compositionScript" ]; |
|
332 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | |
177 |
|
333 | |||
|
334 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; | |||
|
335 | ||||
178 | installPhase = args.installPhase or '' |
|
336 | installPhase = args.installPhase or '' | |
179 | # Create and enter a root node_modules/ folder |
|
337 | # Create and enter a root node_modules/ folder | |
180 | mkdir -p $out/lib/node_modules |
|
338 | mkdir -p $out/lib/node_modules | |
181 | cd $out/lib/node_modules |
|
339 | cd $out/lib/node_modules | |
182 |
|
340 | |||
183 | # Compose the package and all its dependencies |
|
341 | # Compose the package and all its dependencies | |
184 | source $compositionScriptPath |
|
342 | source $compositionScriptPath | |
185 |
|
343 | |||
|
344 | # Pinpoint the versions of all dependencies to the ones that are actually being used | |||
|
345 | echo "pinpointing versions of dependencies..." | |||
|
346 | source $pinpointDependenciesScriptPath | |||
|
347 | ||||
186 | # Patch the shebangs of the bundled modules to prevent them from |
|
348 | # Patch the shebangs of the bundled modules to prevent them from | |
187 | # calling executables outside the Nix store as much as possible |
|
349 | # calling executables outside the Nix store as much as possible | |
188 | patchShebangs . |
|
350 | patchShebangs . | |
189 |
|
351 | |||
190 | # Deploy the Node.js package by running npm install. Since the |
|
352 | # Deploy the Node.js package by running npm install. Since the | |
191 | # dependencies have been provided already by ourselves, it should not |
|
353 | # dependencies have been provided already by ourselves, it should not | |
192 | # attempt to install them again, which is good, because we want to make |
|
354 | # attempt to install them again, which is good, because we want to make | |
@@ -196,23 +358,37 b' let' | |||||
196 | # |
|
358 | # | |
197 | # The other responsibilities of NPM are kept -- version checks, build |
|
359 | # The other responsibilities of NPM are kept -- version checks, build | |
198 | # steps, postprocessing etc. |
|
360 | # steps, postprocessing etc. | |
199 |
|
361 | |||
200 | export HOME=$TMPDIR |
|
362 | export HOME=$TMPDIR | |
201 | cd "${packageName}" |
|
363 | cd "${packageName}" | |
202 | runHook preRebuild |
|
364 | runHook preRebuild | |
203 | npm --registry http://www.example.com --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild |
|
365 | ||
204 |
|
366 | ${stdenv.lib.optionalString bypassCache '' | ||
|
367 | if [ ! -f package-lock.json ] | |||
|
368 | then | |||
|
369 | echo "No package-lock.json file found, reconstructing..." | |||
|
370 | node ${reconstructPackageLock} | |||
|
371 | fi | |||
|
372 | ||||
|
373 | node ${addIntegrityFieldsScript} | |||
|
374 | ''} | |||
|
375 | ||||
|
376 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild | |||
|
377 | ||||
205 | if [ "$dontNpmInstall" != "1" ] |
|
378 | if [ "$dontNpmInstall" != "1" ] | |
206 | then |
|
379 | then | |
207 | npm --registry http://www.example.com --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install |
|
380 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. | |
|
381 | rm -f npm-shrinkwrap.json | |||
|
382 | ||||
|
383 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install | |||
208 | fi |
|
384 | fi | |
209 |
|
385 | |||
210 | # Create symlink to the deployed executable folder, if applicable |
|
386 | # Create symlink to the deployed executable folder, if applicable | |
211 | if [ -d "$out/lib/node_modules/.bin" ] |
|
387 | if [ -d "$out/lib/node_modules/.bin" ] | |
212 | then |
|
388 | then | |
213 | ln -s $out/lib/node_modules/.bin $out/bin |
|
389 | ln -s $out/lib/node_modules/.bin $out/bin | |
214 | fi |
|
390 | fi | |
215 |
|
391 | |||
216 | # Create symlinks to the deployed manual page folders, if applicable |
|
392 | # Create symlinks to the deployed manual page folders, if applicable | |
217 | if [ -d "$out/lib/node_modules/${packageName}/man" ] |
|
393 | if [ -d "$out/lib/node_modules/${packageName}/man" ] | |
218 | then |
|
394 | then | |
@@ -226,51 +402,86 b' let' | |||||
226 | done |
|
402 | done | |
227 | done |
|
403 | done | |
228 | fi |
|
404 | fi | |
|
405 | ||||
|
406 | # Run post install hook, if provided | |||
|
407 | runHook postInstall | |||
229 | ''; |
|
408 | ''; | |
230 | }); |
|
409 | }); | |
231 |
|
410 | |||
232 | # Builds a development shell |
|
411 | # Builds a development shell | |
233 | buildNodeShell = { name, packageName, version, src, dependencies ? [], production ? true, npmFlags ? "", dontNpmInstall ? false, ... }@args: |
|
412 | buildNodeShell = { name, packageName, version, src, dependencies ? [], production ? true, npmFlags ? "", dontNpmInstall ? false, bypassCache ? false, ... }@args: | |
234 | let |
|
413 | let | |
|
414 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; | |||
|
415 | ||||
235 | nodeDependencies = stdenv.mkDerivation { |
|
416 | nodeDependencies = stdenv.mkDerivation { | |
236 | name = "node-dependencies-${name}-${version}"; |
|
417 | name = "node-dependencies-${name}-${version}"; | |
237 |
|
418 | |||
238 | buildInputs = [ tarWrapper python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ args.buildInputs or []; |
|
419 | buildInputs = [ tarWrapper python nodejs ] | |
239 |
|
420 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux | ||
|
421 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool | |||
|
422 | ++ args.buildInputs or []; | |||
|
423 | ||||
240 | includeScript = includeDependencies { inherit dependencies; }; |
|
424 | includeScript = includeDependencies { inherit dependencies; }; | |
241 | passAsFile = [ "includeScript" ]; |
|
425 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; | |
242 |
|
426 | |||
|
427 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; | |||
|
428 | ||||
243 | buildCommand = '' |
|
429 | buildCommand = '' | |
244 |
mkdir -p $out/ |
|
430 | mkdir -p $out/${packageName} | |
245 |
cd $out/ |
|
431 | cd $out/${packageName} | |
|
432 | ||||
246 | source $includeScriptPath |
|
433 | source $includeScriptPath | |
247 |
|
434 | |||
248 | # Create fake package.json to make the npm commands work properly |
|
435 | # Create fake package.json to make the npm commands work properly | |
249 |
c |
|
436 | cp ${src}/package.json . | |
250 | { |
|
437 | chmod 644 package.json | |
251 | "name": "${packageName}", |
|
438 | ${stdenv.lib.optionalString bypassCache '' | |
252 | "version": "${version}" |
|
439 | if [ -f ${src}/package-lock.json ] | |
253 |
|
|
440 | then | |
254 | EOF |
|
441 | cp ${src}/package-lock.json . | |
255 |
|
442 | fi | ||
|
443 | ''} | |||
|
444 | ||||
|
445 | # Pinpoint the versions of all dependencies to the ones that are actually being used | |||
|
446 | echo "pinpointing versions of dependencies..." | |||
|
447 | cd .. | |||
|
448 | source $pinpointDependenciesScriptPath | |||
|
449 | cd ${packageName} | |||
|
450 | ||||
256 | # Patch the shebangs of the bundled modules to prevent them from |
|
451 | # Patch the shebangs of the bundled modules to prevent them from | |
257 | # calling executables outside the Nix store as much as possible |
|
452 | # calling executables outside the Nix store as much as possible | |
258 | patchShebangs . |
|
453 | patchShebangs . | |
259 |
|
454 | |||
260 |
export HOME=$ |
|
455 | export HOME=$PWD | |
261 | npm --registry http://www.example.com --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild |
|
456 | ||
262 |
|
457 | ${stdenv.lib.optionalString bypassCache '' | ||
263 | ${stdenv.lib.optionalString (!dontNpmInstall) '' |
|
458 | if [ ! -f package-lock.json ] | |
264 | npm --registry http://www.example.com --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install |
|
459 | then | |
|
460 | echo "No package-lock.json file found, reconstructing..." | |||
|
461 | node ${reconstructPackageLock} | |||
|
462 | fi | |||
|
463 | ||||
|
464 | node ${addIntegrityFieldsScript} | |||
265 | ''} |
|
465 | ''} | |
266 |
|
466 | |||
|
467 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild | |||
|
468 | ||||
|
469 | ${stdenv.lib.optionalString (!dontNpmInstall) '' | |||
|
470 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. | |||
|
471 | rm -f npm-shrinkwrap.json | |||
|
472 | ||||
|
473 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install | |||
|
474 | ''} | |||
|
475 | ||||
|
476 | cd .. | |||
|
477 | mv ${packageName} lib | |||
267 | ln -s $out/lib/node_modules/.bin $out/bin |
|
478 | ln -s $out/lib/node_modules/.bin $out/bin | |
268 | ''; |
|
479 | ''; | |
269 | }; |
|
480 | }; | |
270 | in |
|
481 | in | |
271 | stdenv.lib.makeOverridable stdenv.mkDerivation { |
|
482 | stdenv.lib.makeOverridable stdenv.mkDerivation { | |
272 | name = "node-shell-${name}-${version}"; |
|
483 | name = "node-shell-${name}-${version}"; | |
273 |
|
484 | |||
274 | buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ args.buildInputs or []; |
|
485 | buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ args.buildInputs or []; | |
275 | buildCommand = '' |
|
486 | buildCommand = '' | |
276 | mkdir -p $out/bin |
|
487 | mkdir -p $out/bin | |
@@ -281,7 +492,7 b' let' | |||||
281 | EOF |
|
492 | EOF | |
282 | chmod +x $out/bin/shell |
|
493 | chmod +x $out/bin/shell | |
283 | ''; |
|
494 | ''; | |
284 |
|
495 | |||
285 | # Provide the dependencies in a development shell through the NODE_PATH environment variable |
|
496 | # Provide the dependencies in a development shell through the NODE_PATH environment variable | |
286 | inherit nodeDependencies; |
|
497 | inherit nodeDependencies; | |
287 | shellHook = stdenv.lib.optionalString (dependencies != []) '' |
|
498 | shellHook = stdenv.lib.optionalString (dependencies != []) '' |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file renamed from pkgs/patch-beaker-lock-func-debug.diff to pkgs/patch_beaker/patch-beaker-lock-func-debug.diff |
|
NO CONTENT: file renamed from pkgs/patch-beaker-lock-func-debug.diff to pkgs/patch_beaker/patch-beaker-lock-func-debug.diff |
1 | NO CONTENT: file renamed from pkgs/patch-beaker-metadata-reuse.diff to pkgs/patch_beaker/patch-beaker-metadata-reuse.diff |
|
NO CONTENT: file renamed from pkgs/patch-beaker-metadata-reuse.diff to pkgs/patch_beaker/patch-beaker-metadata-reuse.diff |
@@ -4,10 +4,13 b'' | |||||
4 | # python-packages.nix. The main objective is to add needed dependencies of C |
|
4 | # python-packages.nix. The main objective is to add needed dependencies of C | |
5 | # libraries and tweak the build instructions where needed. |
|
5 | # libraries and tweak the build instructions where needed. | |
6 |
|
6 | |||
7 | { pkgs, basePythonPackages }: |
|
7 | { pkgs | |
|
8 | , basePythonPackages | |||
|
9 | }: | |||
8 |
|
10 | |||
9 | let |
|
11 | let | |
10 | sed = "sed -i"; |
|
12 | sed = "sed -i"; | |
|
13 | ||||
11 | localLicenses = { |
|
14 | localLicenses = { | |
12 | repoze = { |
|
15 | repoze = { | |
13 | fullName = "Repoze License"; |
|
16 | fullName = "Repoze License"; | |
@@ -19,33 +22,33 b' in' | |||||
19 |
|
22 | |||
20 | self: super: { |
|
23 | self: super: { | |
21 |
|
24 | |||
22 | appenlight-client = super.appenlight-client.override (attrs: { |
|
25 | "appenlight-client" = super."appenlight-client".override (attrs: { | |
23 | meta = { |
|
26 | meta = { | |
24 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
27 | license = [ pkgs.lib.licenses.bsdOriginal ]; | |
25 | }; |
|
28 | }; | |
26 | }); |
|
29 | }); | |
27 |
|
30 | |||
28 | beaker = super.beaker.override (attrs: { |
|
31 | "beaker" = super."beaker".override (attrs: { | |
29 | patches = [ |
|
32 | patches = [ | |
30 | ./patch-beaker-lock-func-debug.diff |
|
33 | ./patch_beaker/patch-beaker-lock-func-debug.diff | |
31 | ./patch-beaker-metadata-reuse.diff |
|
34 | ./patch_beaker/patch-beaker-metadata-reuse.diff | |
32 | ]; |
|
35 | ]; | |
33 | }); |
|
36 | }); | |
34 |
|
37 | |||
35 | future = super.future.override (attrs: { |
|
38 | "future" = super."future".override (attrs: { | |
36 | meta = { |
|
39 | meta = { | |
37 | license = [ pkgs.lib.licenses.mit ]; |
|
40 | license = [ pkgs.lib.licenses.mit ]; | |
38 | }; |
|
41 | }; | |
39 | }); |
|
42 | }); | |
40 |
|
43 | |||
41 | testpath = super.testpath.override (attrs: { |
|
44 | "testpath" = super."testpath".override (attrs: { | |
42 | meta = { |
|
45 | meta = { | |
43 | license = [ pkgs.lib.licenses.mit ]; |
|
46 | license = [ pkgs.lib.licenses.mit ]; | |
44 | }; |
|
47 | }; | |
45 | }); |
|
48 | }); | |
46 |
|
49 | |||
47 | gnureadline = super.gnureadline.override (attrs: { |
|
50 | "gnureadline" = super."gnureadline".override (attrs: { | |
48 |
buildInputs = |
|
51 | buildInputs = [ | |
49 | pkgs.ncurses |
|
52 | pkgs.ncurses | |
50 | ]; |
|
53 | ]; | |
51 | patchPhase = '' |
|
54 | patchPhase = '' | |
@@ -53,56 +56,50 b' self: super: {' | |||||
53 | ''; |
|
56 | ''; | |
54 | }); |
|
57 | }); | |
55 |
|
58 | |||
56 | gunicorn = super.gunicorn.override (attrs: { |
|
59 | "gunicorn" = super."gunicorn".override (attrs: { | |
57 |
propagatedBuildInputs = |
|
60 | propagatedBuildInputs = [ | |
58 | # johbo: futures is needed as long as we are on Python 2, otherwise |
|
61 | # johbo: futures is needed as long as we are on Python 2, otherwise | |
59 | # gunicorn explodes if used with multiple threads per worker. |
|
62 | # gunicorn explodes if used with multiple threads per worker. | |
60 | self.futures |
|
63 | self."futures" | |
61 | ]; |
|
64 | ]; | |
62 | }); |
|
65 | }); | |
63 |
|
66 | |||
64 |
|
|
67 | "nbconvert" = super."nbconvert".override (attrs: { | |
65 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
68 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ | |
66 | # marcink: plug in jupyter-client for notebook rendering |
|
69 | # marcink: plug in jupyter-client for notebook rendering | |
67 | self.jupyter-client |
|
70 | self."jupyter-client" | |
68 | ]; |
|
71 | ]; | |
69 | }); |
|
72 | }); | |
70 |
|
73 | |||
71 | ipython = super.ipython.override (attrs: { |
|
74 | "ipython" = super."ipython".override (attrs: { | |
72 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
75 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ | |
73 | self.gnureadline |
|
76 | self."gnureadline" | |
74 | ]; |
|
77 | ]; | |
75 | }); |
|
78 | }); | |
76 |
|
79 | |||
77 | lxml = super.lxml.override (attrs: { |
|
80 | "lxml" = super."lxml".override (attrs: { | |
78 | # johbo: On 16.09 we need this to compile on darwin, otherwise compilation |
|
81 | buildInputs = [ | |
79 | # fails on Darwin. |
|
|||
80 | hardeningDisable = if pkgs.stdenv.isDarwin then [ "format" ] else null; |
|
|||
81 | buildInputs = with self; [ |
|
|||
82 | pkgs.libxml2 |
|
82 | pkgs.libxml2 | |
83 | pkgs.libxslt |
|
83 | pkgs.libxslt | |
84 | ]; |
|
84 | ]; | |
|
85 | propagatedBuildInputs = [ | |||
|
86 | # Needed, so that "setup.py bdist_wheel" does work | |||
|
87 | self."wheel" | |||
|
88 | ]; | |||
85 | }); |
|
89 | }); | |
86 |
|
90 | |||
87 | mysql-python = super.mysql-python.override (attrs: { |
|
91 | "mysql-python" = super."mysql-python".override (attrs: { | |
88 |
buildInputs = |
|
92 | buildInputs = [ | |
89 | pkgs.openssl |
|
93 | pkgs.openssl | |
90 | ]; |
|
94 | ]; | |
91 |
propagatedBuildInputs = |
|
95 | propagatedBuildInputs = [ | |
92 | pkgs.libmysql |
|
96 | pkgs.libmysql | |
93 | pkgs.zlib |
|
97 | pkgs.zlib | |
94 | ]; |
|
98 | ]; | |
95 | }); |
|
99 | }); | |
96 |
|
100 | |||
97 |
|
|
101 | "psycopg2" = super."psycopg2".override (attrs: { | |
98 | buildInputs = attrs.buildInputs ++ |
|
102 | propagatedBuildInputs = [ | |
99 | pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.darwin.IOKit; |
|
|||
100 | }); |
|
|||
101 |
|
||||
102 | psycopg2 = super.psycopg2.override (attrs: { |
|
|||
103 | buildInputs = attrs.buildInputs ++ |
|
|||
104 | pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.openssl; |
|
|||
105 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
|||
106 | pkgs.postgresql |
|
103 | pkgs.postgresql | |
107 | ]; |
|
104 | ]; | |
108 | meta = { |
|
105 | meta = { | |
@@ -110,8 +107,8 b' self: super: {' | |||||
110 | }; |
|
107 | }; | |
111 | }); |
|
108 | }); | |
112 |
|
109 | |||
113 | pycurl = super.pycurl.override (attrs: { |
|
110 | "pycurl" = super."pycurl".override (attrs: { | |
114 |
propagatedBuildInputs = |
|
111 | propagatedBuildInputs = [ | |
115 | pkgs.curl |
|
112 | pkgs.curl | |
116 | pkgs.openssl |
|
113 | pkgs.openssl | |
117 | ]; |
|
114 | ]; | |
@@ -120,30 +117,23 b' self: super: {' | |||||
120 | export PYCURL_SSL_LIBRARY=openssl |
|
117 | export PYCURL_SSL_LIBRARY=openssl | |
121 | ''; |
|
118 | ''; | |
122 | meta = { |
|
119 | meta = { | |
123 | # TODO: It is LGPL and MIT |
|
|||
124 | license = pkgs.lib.licenses.mit; |
|
120 | license = pkgs.lib.licenses.mit; | |
125 | }; |
|
121 | }; | |
126 | }); |
|
122 | }); | |
127 |
|
123 | |||
128 | pyramid = super.pyramid.override (attrs: { |
|
124 | "pyramid" = super."pyramid".override (attrs: { | |
129 | postFixup = '' |
|
|||
130 | wrapPythonPrograms |
|
|||
131 | # TODO: johbo: "wrapPython" adds this magic line which |
|
|||
132 | # confuses pserve. |
|
|||
133 | ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped |
|
|||
134 | ''; |
|
|||
135 | meta = { |
|
125 | meta = { | |
136 | license = localLicenses.repoze; |
|
126 | license = localLicenses.repoze; | |
137 | }; |
|
127 | }; | |
138 | }); |
|
128 | }); | |
139 |
|
129 | |||
140 | pyramid-debugtoolbar = super.pyramid-debugtoolbar.override (attrs: { |
|
130 | "pyramid-debugtoolbar" = super."pyramid-debugtoolbar".override (attrs: { | |
141 | meta = { |
|
131 | meta = { | |
142 | license = [ pkgs.lib.licenses.bsdOriginal localLicenses.repoze ]; |
|
132 | license = [ pkgs.lib.licenses.bsdOriginal localLicenses.repoze ]; | |
143 | }; |
|
133 | }; | |
144 | }); |
|
134 | }); | |
145 |
|
135 | |||
146 | pysqlite = super.pysqlite.override (attrs: { |
|
136 | "pysqlite" = super."pysqlite".override (attrs: { | |
147 | propagatedBuildInputs = [ |
|
137 | propagatedBuildInputs = [ | |
148 | pkgs.sqlite |
|
138 | pkgs.sqlite | |
149 | ]; |
|
139 | ]; | |
@@ -152,41 +142,39 b' self: super: {' | |||||
152 | }; |
|
142 | }; | |
153 | }); |
|
143 | }); | |
154 |
|
144 | |||
155 | pytest-runner = super.pytest-runner.override (attrs: { |
|
145 | "pytest-runner" = super."pytest-runner".override (attrs: { | |
156 | propagatedBuildInputs = [ |
|
146 | propagatedBuildInputs = [ | |
157 | self.setuptools-scm |
|
147 | self."setuptools-scm" | |
|
148 | ]; | |||
|
149 | }); | |||
|
150 | ||||
|
151 | "python-ldap" = super."python-ldap".override (attrs: { | |||
|
152 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ | |||
|
153 | pkgs.openldap | |||
|
154 | pkgs.cyrus_sasl | |||
|
155 | pkgs.openssl | |||
158 | ]; |
|
156 | ]; | |
159 | }); |
|
157 | }); | |
160 |
|
158 | |||
161 |
python- |
|
159 | "python-pam" = super."python-pam".override (attrs: { | |
162 |
propagatedBuildInputs = |
|
160 | propagatedBuildInputs = [ | |
163 |
pkgs. |
|
161 | pkgs.pam | |
164 | pkgs.openldap |
|
|||
165 | pkgs.openssl |
|
|||
166 | ]; |
|
162 | ]; | |
167 | # TODO: johbo: Remove the "or" once we drop 16.03 support. |
|
163 | # TODO: johbo: Check if this can be avoided, or transform into | |
168 | NIX_CFLAGS_COMPILE = "-I${pkgs.cyrus_sasl.dev or pkgs.cyrus_sasl}/include/sasl"; |
|
164 | # a real patch | |
|
165 | patchPhase = '' | |||
|
166 | substituteInPlace pam.py \ | |||
|
167 | --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"' | |||
|
168 | ''; | |||
|
169 | }); | |||
|
170 | ||||
|
171 | "pyzmq" = super."pyzmq".override (attrs: { | |||
|
172 | buildInputs = [ | |||
|
173 | pkgs.czmq | |||
|
174 | ]; | |||
169 | }); |
|
175 | }); | |
170 |
|
176 | |||
171 |
|
|
177 | "urlobject" = super."urlobject".override (attrs: { | |
172 | let |
|
|||
173 | includeLibPam = pkgs.stdenv.isLinux; |
|
|||
174 | in { |
|
|||
175 | # TODO: johbo: Move the option up into the default.nix, we should |
|
|||
176 | # include python-pam only on supported platforms. |
|
|||
177 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ |
|
|||
178 | pkgs.lib.optional includeLibPam [ |
|
|||
179 | pkgs.pam |
|
|||
180 | ]; |
|
|||
181 | # TODO: johbo: Check if this can be avoided, or transform into |
|
|||
182 | # a real patch |
|
|||
183 | patchPhase = pkgs.lib.optionals includeLibPam '' |
|
|||
184 | substituteInPlace pam.py \ |
|
|||
185 | --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"' |
|
|||
186 | ''; |
|
|||
187 | }); |
|
|||
188 |
|
||||
189 | urlobject = super.urlobject.override (attrs: { |
|
|||
190 | meta = { |
|
178 | meta = { | |
191 | license = { |
|
179 | license = { | |
192 | spdxId = "Unlicense"; |
|
180 | spdxId = "Unlicense"; | |
@@ -196,56 +184,56 b' self: super: {' | |||||
196 | }; |
|
184 | }; | |
197 | }); |
|
185 | }); | |
198 |
|
186 | |||
199 | docutils = super.docutils.override (attrs: { |
|
187 | "docutils" = super."docutils".override (attrs: { | |
200 | meta = { |
|
188 | meta = { | |
201 | license = pkgs.lib.licenses.bsd2; |
|
189 | license = pkgs.lib.licenses.bsd2; | |
202 | }; |
|
190 | }; | |
203 | }); |
|
191 | }); | |
204 |
|
192 | |||
205 | colander = super.colander.override (attrs: { |
|
193 | "colander" = super."colander".override (attrs: { | |
206 | meta = { |
|
194 | meta = { | |
207 | license = localLicenses.repoze; |
|
195 | license = localLicenses.repoze; | |
208 | }; |
|
196 | }; | |
209 | }); |
|
197 | }); | |
210 |
|
198 | |||
211 | pyramid-beaker = super.pyramid-beaker.override (attrs: { |
|
199 | "pyramid-beaker" = super."pyramid-beaker".override (attrs: { | |
212 | meta = { |
|
200 | meta = { | |
213 | license = localLicenses.repoze; |
|
201 | license = localLicenses.repoze; | |
214 | }; |
|
202 | }; | |
215 | }); |
|
203 | }); | |
216 |
|
204 | |||
217 | pyramid-mako = super.pyramid-mako.override (attrs: { |
|
205 | "pyramid-mako" = super."pyramid-mako".override (attrs: { | |
218 | meta = { |
|
206 | meta = { | |
219 | license = localLicenses.repoze; |
|
207 | license = localLicenses.repoze; | |
220 | }; |
|
208 | }; | |
221 | }); |
|
209 | }); | |
222 |
|
210 | |||
223 | repoze.lru = super.repoze.lru.override (attrs: { |
|
211 | "repoze.lru" = super."repoze.lru".override (attrs: { | |
224 | meta = { |
|
212 | meta = { | |
225 | license = localLicenses.repoze; |
|
213 | license = localLicenses.repoze; | |
226 | }; |
|
214 | }; | |
227 | }); |
|
215 | }); | |
228 |
|
216 | |||
229 | python-editor = super.python-editor.override (attrs: { |
|
217 | "python-editor" = super."python-editor".override (attrs: { | |
230 | meta = { |
|
218 | meta = { | |
231 | license = pkgs.lib.licenses.asl20; |
|
219 | license = pkgs.lib.licenses.asl20; | |
232 | }; |
|
220 | }; | |
233 | }); |
|
221 | }); | |
234 |
|
222 | |||
235 | translationstring = super.translationstring.override (attrs: { |
|
223 | "translationstring" = super."translationstring".override (attrs: { | |
236 | meta = { |
|
224 | meta = { | |
237 | license = localLicenses.repoze; |
|
225 | license = localLicenses.repoze; | |
238 | }; |
|
226 | }; | |
239 | }); |
|
227 | }); | |
240 |
|
228 | |||
241 | venusian = super.venusian.override (attrs: { |
|
229 | "venusian" = super."venusian".override (attrs: { | |
242 | meta = { |
|
230 | meta = { | |
243 | license = localLicenses.repoze; |
|
231 | license = localLicenses.repoze; | |
244 | }; |
|
232 | }; | |
245 | }); |
|
233 | }); | |
246 |
|
234 | |||
247 | # Avoid that setuptools is replaced, this leads to trouble |
|
235 | # Avoid that base packages screw up the build process | |
248 |
|
|
236 | inherit (basePythonPackages) | |
249 | setuptools = basePythonPackages.setuptools; |
|
237 | setuptools; | |
250 |
|
238 | |||
251 | } |
|
239 | } |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,232 +1,16 b'' | |||||
1 | # |
|
1 | # This file defines how to "build" for packaging. | |
2 | # About |
|
|||
3 | # ===== |
|
|||
4 | # |
|
|||
5 | # This file defines jobs for our CI system and the attribute "build" is used |
|
|||
6 | # as the input for packaging. |
|
|||
7 | # |
|
|||
8 | # |
|
|||
9 | # CI details |
|
|||
10 | # ========== |
|
|||
11 | # |
|
|||
12 | # This file defines an attribute set of derivations. Each of these attributes is |
|
|||
13 | # then used in our CI system as one job to run. This way we keep the |
|
|||
14 | # configuration for the CI jobs as well under version control. |
|
|||
15 | # |
|
|||
16 | # Run CI jobs locally |
|
|||
17 | # ------------------- |
|
|||
18 | # |
|
|||
19 | # Since it is all based on normal Nix derivations, the jobs can be tested |
|
|||
20 | # locally with a run of "nix-build" like the following example: |
|
|||
21 | # |
|
|||
22 | # nix-build release.nix -A test-api -I vcsserver=~/rhodecode-vcsserver |
|
|||
23 | # |
|
|||
24 | # Note: Replace "~/rhodecode-vcsserver" with a path where a clone of the |
|
|||
25 | # vcsserver resides. |
|
|||
26 |
|
2 | |||
27 | { pkgs ? import <nixpkgs> {} |
|
3 | { pkgs ? import <nixpkgs> {} | |
28 | , doCheck ? true |
|
4 | , doCheck ? true | |
29 | }: |
|
5 | }: | |
30 |
|
6 | |||
31 | let |
|
7 | let | |
32 |
|
8 | enterprise_ce = import ./default.nix { | ||
33 | inherit (pkgs) |
|
|||
34 | stdenv |
|
|||
35 | system; |
|
|||
36 |
|
||||
37 | testing = import <nixpkgs/nixos/lib/testing.nix> { |
|
|||
38 | inherit system; |
|
|||
39 | }; |
|
|||
40 |
|
||||
41 | runInMachine = testing.runInMachine; |
|
|||
42 |
|
||||
43 | sphinx = import ./docs/default.nix {}; |
|
|||
44 |
|
||||
45 | mkDocs = kind: stdenv.mkDerivation { |
|
|||
46 | name = kind; |
|
|||
47 | srcs = [ |
|
|||
48 | (./. + (builtins.toPath "/${kind}")) |
|
|||
49 | (builtins.filterSource |
|
|||
50 | (path: type: baseNameOf path == "VERSION") |
|
|||
51 | ./rhodecode) |
|
|||
52 | ]; |
|
|||
53 | sourceRoot = kind; |
|
|||
54 | buildInputs = [ sphinx ]; |
|
|||
55 | configurePhase = null; |
|
|||
56 | buildPhase = '' |
|
|||
57 | make SPHINXBUILD=sphinx-build html |
|
|||
58 | ''; |
|
|||
59 | installPhase = '' |
|
|||
60 | mkdir -p $out |
|
|||
61 | mv _build/html $out/ |
|
|||
62 |
|
||||
63 | mkdir -p $out/nix-support |
|
|||
64 | echo "doc manual $out/html index.html" >> \ |
|
|||
65 | "$out/nix-support/hydra-build-products" |
|
|||
66 | ''; |
|
|||
67 | }; |
|
|||
68 |
|
||||
69 | enterprise = import ./default.nix { |
|
|||
70 | inherit |
|
9 | inherit | |
|
10 | doCheck | |||
71 | pkgs; |
|
11 | pkgs; | |
72 |
|
||||
73 | # TODO: for quick local testing |
|
|||
74 | doCheck = false; |
|
|||
75 | }; |
|
|||
76 |
|
||||
77 | test-cfg = stdenv.mkDerivation { |
|
|||
78 | name = "test-cfg"; |
|
|||
79 | unpackPhase = "true"; |
|
|||
80 | buildInputs = [ |
|
|||
81 | enterprise.src |
|
|||
82 | ]; |
|
|||
83 | installPhase = '' |
|
|||
84 | mkdir -p $out/etc |
|
|||
85 | cp ${enterprise.src}/test.ini $out/etc/enterprise.ini |
|
|||
86 | # TODO: johbo: Needed, so that the login works, this causes |
|
|||
87 | # probably some side effects |
|
|||
88 | substituteInPlace $out/etc/enterprise.ini --replace "is_test = True" "" |
|
|||
89 |
|
||||
90 | # Gevent configuration |
|
|||
91 | cp $out/etc/enterprise.ini $out/etc/enterprise-gevent.ini; |
|
|||
92 | cat >> $out/etc/enterprise-gevent.ini <<EOF |
|
|||
93 |
|
||||
94 | [server:main] |
|
|||
95 | use = egg:gunicorn#main |
|
|||
96 | worker_class = gevent |
|
|||
97 | EOF |
|
|||
98 |
|
||||
99 | cp ${enterprise.src}/vcsserver/test.ini $out/etc/vcsserver.ini |
|
|||
100 | ''; |
|
|||
101 | }; |
|
|||
102 |
|
||||
103 | ac-test-drv = import ./acceptance_tests { |
|
|||
104 | withExternals = false; |
|
|||
105 | }; |
|
12 | }; | |
106 |
|
13 | |||
107 | # TODO: johbo: Currently abusing buildPythonPackage to make the |
|
14 | in { | |
108 | # needed environment for the ac-test tools. |
|
15 | build = enterprise_ce; | |
109 | mkAcTests = { |
|
16 | } | |
110 | # Path to an INI file which will be used to run Enterprise. |
|
|||
111 | # |
|
|||
112 | # Intended usage is to provide different configuration files to |
|
|||
113 | # run the tests against a different configuration. |
|
|||
114 | enterpriseCfg ? "${test-cfg}/etc/enterprise.ini" |
|
|||
115 |
|
||||
116 | # Path to an INI file which will be used to run the VCSServer. |
|
|||
117 | , vcsserverCfg ? "${test-cfg}/etc/vcsserver.ini" |
|
|||
118 | }: pkgs.pythonPackages.buildPythonPackage { |
|
|||
119 | name = "enterprise-ac-tests"; |
|
|||
120 | src = ./acceptance_tests; |
|
|||
121 |
|
||||
122 | buildInputs = with pkgs; [ |
|
|||
123 | curl |
|
|||
124 | enterprise |
|
|||
125 | ac-test-drv |
|
|||
126 | ]; |
|
|||
127 |
|
||||
128 | buildPhase = '' |
|
|||
129 | cp ${enterpriseCfg} enterprise.ini |
|
|||
130 |
|
||||
131 | echo "Creating a fake home directory" |
|
|||
132 | mkdir fake-home |
|
|||
133 | export HOME=$PWD/fake-home |
|
|||
134 |
|
||||
135 | echo "Creating a repository directory" |
|
|||
136 | mkdir repos |
|
|||
137 |
|
||||
138 | echo "Preparing the database" |
|
|||
139 | rc-setup-app \ |
|
|||
140 | --user=admin \ |
|
|||
141 | --email=admin@example.com \ |
|
|||
142 | --password=secret \ |
|
|||
143 | --api-key=9999999999999999999999999999999999999999 \ |
|
|||
144 | --force-yes \ |
|
|||
145 | --repos=$PWD/repos \ |
|
|||
146 | enterprise.ini > /dev/null |
|
|||
147 |
|
||||
148 | echo "Starting rc-server" |
|
|||
149 | vcsserver --config ${vcsserverCfg} >vcsserver.log 2>&1 & |
|
|||
150 | rc-server enterprise.ini >rc-server.log 2>&1 & |
|
|||
151 |
|
||||
152 | while ! curl -f -s http://localhost:5000 > /dev/null |
|
|||
153 | do |
|
|||
154 | echo "Waiting for server to be ready..." |
|
|||
155 | sleep 3 |
|
|||
156 | done |
|
|||
157 | echo "Webserver is ready." |
|
|||
158 |
|
||||
159 | echo "Starting the test run" |
|
|||
160 | py.test -c example.ini -vs --maxfail=5 tests |
|
|||
161 |
|
||||
162 | echo "Kill rc-server" |
|
|||
163 | kill %2 |
|
|||
164 | kill %1 |
|
|||
165 | ''; |
|
|||
166 |
|
||||
167 | # TODO: johbo: Use the install phase again once the normal mkDerivation |
|
|||
168 | # can be used again. |
|
|||
169 | postInstall = '' |
|
|||
170 | mkdir -p $out |
|
|||
171 | cp enterprise.ini $out |
|
|||
172 | cp ${vcsserverCfg} $out/vcsserver.ini |
|
|||
173 | cp rc-server.log $out |
|
|||
174 | cp vcsserver.log $out |
|
|||
175 |
|
||||
176 | mkdir -p $out/nix-support |
|
|||
177 | echo "report config $out enterprise.ini" >> $out/nix-support/hydra-build-products |
|
|||
178 | echo "report config $out vcsserver.ini" >> $out/nix-support/hydra-build-products |
|
|||
179 | echo "report rc-server $out rc-server.log" >> $out/nix-support/hydra-build-products |
|
|||
180 | echo "report vcsserver $out vcsserver.log" >> $out/nix-support/hydra-build-products |
|
|||
181 | ''; |
|
|||
182 | }; |
|
|||
183 |
|
||||
184 | vcsserver = import <vcsserver> { |
|
|||
185 | inherit pkgs; |
|
|||
186 |
|
||||
187 | # TODO: johbo: Think of a more elegant solution to this problem |
|
|||
188 | pythonExternalOverrides = self: super: (enterprise.myPythonPackagesUnfix self); |
|
|||
189 | }; |
|
|||
190 |
|
||||
191 | runTests = optionString: (enterprise.override (attrs: { |
|
|||
192 | doCheck = true; |
|
|||
193 | name = "test-run"; |
|
|||
194 | buildInputs = attrs.buildInputs ++ [ |
|
|||
195 | vcsserver |
|
|||
196 | ]; |
|
|||
197 | checkPhase = '' |
|
|||
198 | py.test ${optionString} -vv -ra |
|
|||
199 | ''; |
|
|||
200 | buildPhase = attrs.shellHook; |
|
|||
201 | installPhase = '' |
|
|||
202 | echo "Intentionally not installing anything" |
|
|||
203 | ''; |
|
|||
204 | meta.description = "Enterprise test run ${optionString}"; |
|
|||
205 | })); |
|
|||
206 |
|
||||
207 | jobs = { |
|
|||
208 |
|
||||
209 | build = enterprise; |
|
|||
210 |
|
||||
211 | # johbo: Currently this is simply running the tests against the sources. Nicer |
|
|||
212 | # would be to run xdist and against the installed application, so that we also |
|
|||
213 | # cover the impact of installing the application. |
|
|||
214 | test-api = runTests "rhodecode/api"; |
|
|||
215 | test-functional = runTests "rhodecode/tests/functional"; |
|
|||
216 | test-rest = runTests "rhodecode/tests --ignore=rhodecode/tests/functional"; |
|
|||
217 | test-full = runTests "rhodecode"; |
|
|||
218 |
|
||||
219 | docs = mkDocs "docs"; |
|
|||
220 |
|
||||
221 | aggregate = pkgs.releaseTools.aggregate { |
|
|||
222 | name = "aggregated-jobs"; |
|
|||
223 | constituents = [ |
|
|||
224 | jobs.build |
|
|||
225 | jobs.test-api |
|
|||
226 | jobs.test-rest |
|
|||
227 | jobs.docs |
|
|||
228 | ]; |
|
|||
229 | }; |
|
|||
230 | }; |
|
|||
231 |
|
||||
232 | in jobs |
|
@@ -1,7 +1,6 b'' | |||||
1 | ## core |
|
1 | ## dependencies | |
2 | setuptools==39.2.0 |
|
|||
3 | setuptools-scm==1.15.6 |
|
|||
4 |
|
2 | |||
|
3 | setuptools-scm==2.1.0 | |||
5 | amqp==2.3.1 |
|
4 | amqp==2.3.1 | |
6 | authomatic==0.1.0.post1 |
|
5 | authomatic==0.1.0.post1 | |
7 | babel==1.3 |
|
6 | babel==1.3 | |
@@ -59,7 +58,7 b' pyramid-mako==1.0.2' | |||||
59 | pyramid==1.9.2 |
|
58 | pyramid==1.9.2 | |
60 | pysqlite==2.8.3 |
|
59 | pysqlite==2.8.3 | |
61 | python-dateutil |
|
60 | python-dateutil | |
62 |
python-ldap== |
|
61 | python-ldap==3.1.0 | |
63 | python-memcached==1.59 |
|
62 | python-memcached==1.59 | |
64 | python-pam==1.8.2 |
|
63 | python-pam==1.8.2 | |
65 | pytz==2018.4 |
|
64 | pytz==2018.4 |
@@ -310,11 +310,7 b'' | |||||
310 | }, |
|
310 | }, | |
311 | "python2.7-requests-2.9.1": { |
|
311 | "python2.7-requests-2.9.1": { | |
312 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
312 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" | |
313 |
}, |
|
313 | }, | |
314 | "python2.7-setuptools-19.4": { |
|
|||
315 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0", |
|
|||
316 | "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0" |
|
|||
317 | }, |
|
|||
318 | "python2.7-setuptools-scm-1.15.6": { |
|
314 | "python2.7-setuptools-scm-1.15.6": { | |
319 | "MIT License": "http://spdx.org/licenses/MIT" |
|
315 | "MIT License": "http://spdx.org/licenses/MIT" | |
320 | }, |
|
316 | }, |
@@ -1,6 +1,9 b'' | |||||
|
1 | # This file contains the adjustments which are desired for a development | |||
|
2 | # environment. | |||
|
3 | ||||
1 | { pkgs ? (import <nixpkgs> {}) |
|
4 | { pkgs ? (import <nixpkgs> {}) | |
2 | , pythonPackages ? "python27Packages" |
|
5 | , pythonPackages ? "python27Packages" | |
3 |
, doCheck ? |
|
6 | , doCheck ? false | |
4 | , sourcesOverrides ? {} |
|
7 | , sourcesOverrides ? {} | |
5 | , doDevelopInstall ? true |
|
8 | , doDevelopInstall ? true | |
6 | }: |
|
9 | }: | |
@@ -10,7 +13,10 b' let' | |||||
10 | sources = (pkgs.config.rc.sources or {}) // sourcesOverrides; |
|
13 | sources = (pkgs.config.rc.sources or {}) // sourcesOverrides; | |
11 |
|
14 | |||
12 | enterprise-ce = import ./default.nix { |
|
15 | enterprise-ce = import ./default.nix { | |
13 | inherit pkgs pythonPackages doCheck; |
|
16 | inherit | |
|
17 | pkgs | |||
|
18 | pythonPackages | |||
|
19 | doCheck; | |||
14 | }; |
|
20 | }; | |
15 |
|
21 | |||
16 | ce-pythonPackages = enterprise-ce.pythonPackages; |
|
22 | ce-pythonPackages = enterprise-ce.pythonPackages; | |
@@ -22,14 +28,17 b' let' | |||||
22 | let |
|
28 | let | |
23 | path = pkgs.lib.attrByPath [attributeName] null sources; |
|
29 | path = pkgs.lib.attrByPath [attributeName] null sources; | |
24 | doIt = doDevelopInstall && path != null; |
|
30 | doIt = doDevelopInstall && path != null; | |
|
31 | ||||
25 | in |
|
32 | in | |
26 | pkgs.lib.optionalString doIt ( |
|
33 | # do develop installation with empty hosts to skip any package duplicates to | |
27 | builtins.trace "Develop install of ${attributeName} from ${path}" '' |
|
34 | # be replaced. This only pushes the package to be locally available | |
28 | echo "Develop install of '${attributeName}' from '${path}' [BEGIN]" |
|
35 | pkgs.lib.optionalString doIt ('' | |
|
36 | echo "[BEGIN] Develop install of '${attributeName}' from '${path}'" | |||
29 | pushd ${path} |
|
37 | pushd ${path} | |
30 | python setup.py develop --prefix $tmp_path --allow-hosts "" |
|
38 | python setup.py develop --prefix $tmp_path --allow-hosts "" | |
31 | popd |
|
39 | popd | |
32 |
echo "Develop install of '${attributeName}' from '${path}' |
|
40 | echo "[DONE] Develop install of '${attributeName}' from '${path}'" | |
|
41 | echo "" | |||
33 | ''); |
|
42 | ''); | |
34 |
|
43 | |||
35 | # This method looks up a path from `pkgs.config.rc.sources` and imports the |
|
44 | # This method looks up a path from `pkgs.config.rc.sources` and imports the | |
@@ -38,13 +47,16 b' let' | |||||
38 | optionalDevelopInstallBuildInputs = attributeName: |
|
47 | optionalDevelopInstallBuildInputs = attributeName: | |
39 | let |
|
48 | let | |
40 | path = pkgs.lib.attrByPath [attributeName] null sources; |
|
49 | path = pkgs.lib.attrByPath [attributeName] null sources; | |
|
50 | doIt = doDevelopInstall && path != null && pkgs.lib.pathExists "${nixFile}"; | |||
41 | nixFile = "${path}/default.nix"; |
|
51 | nixFile = "${path}/default.nix"; | |
42 | doIt = doDevelopInstall && path != null && pkgs.lib.pathExists "${nixFile}"; |
|
52 | ||
43 | derivate = import "${nixFile}" { |
|
53 | derivate = import "${nixFile}" { | |
44 | inherit doCheck pkgs pythonPackages; |
|
54 | inherit doCheck pkgs pythonPackages; | |
45 | }; |
|
55 | }; | |
46 | in |
|
56 | in | |
47 |
pkgs.lib.lists.optionals doIt |
|
57 | pkgs.lib.lists.optionals doIt ( | |
|
58 | derivate.propagatedBuildInputs | |||
|
59 | ); | |||
48 |
|
60 | |||
49 | developInstalls = [ "rhodecode-vcsserver" ]; |
|
61 | developInstalls = [ "rhodecode-vcsserver" ]; | |
50 |
|
62 | |||
@@ -53,31 +65,52 b' in enterprise-ce.override (attrs: {' | |||||
53 | # make development a little bit more convenient. |
|
65 | # make development a little bit more convenient. | |
54 | src = null; |
|
66 | src = null; | |
55 |
|
67 | |||
|
68 | # Add dependencies which are useful for the development environment. | |||
56 | buildInputs = |
|
69 | buildInputs = | |
57 | attrs.buildInputs ++ |
|
70 | attrs.buildInputs ++ | |
58 | pkgs.lib.lists.concatMap optionalDevelopInstallBuildInputs developInstalls ++ |
|
|||
59 | (with ce-pythonPackages; [ |
|
71 | (with ce-pythonPackages; [ | |
60 | bumpversion |
|
72 | bumpversion | |
61 | invoke |
|
73 | invoke | |
62 | ipdb |
|
74 | ipdb | |
63 | ]); |
|
75 | ]); | |
64 |
|
76 | |||
65 | # Somewhat snappier setup of the development environment |
|
77 | # place to inject some required libs from develop installs | |
66 | # TODO: think of supporting a stable path again, so that multiple shells |
|
78 | propagatedBuildInputs = | |
67 | # can share it. |
|
79 | attrs.propagatedBuildInputs ++ | |
68 | preShellHook = enterprise-ce.linkNodeAndBowerPackages + '' |
|
80 | pkgs.lib.lists.concatMap optionalDevelopInstallBuildInputs developInstalls; | |
|
81 | ||||
|
82 | ||||
|
83 | # Make sure we execute both hooks | |||
|
84 | shellHook = '' | |||
|
85 | runHook preShellHook | |||
|
86 | runHook postShellHook | |||
|
87 | ''; | |||
|
88 | ||||
|
89 | preShellHook = '' | |||
|
90 | echo "Entering CE-Shell" | |||
|
91 | ||||
69 | # Custom prompt to distinguish from other dev envs. |
|
92 | # Custom prompt to distinguish from other dev envs. | |
70 | export PS1="\n\[\033[1;32m\][CE-shell:\w]$\[\033[0m\] " |
|
93 | export PS1="\n\[\033[1;32m\][CE-shell:\w]$\[\033[0m\] " | |
71 |
|
94 | |||
|
95 | echo "Building frontend assets" | |||
|
96 | ${enterprise-ce.linkNodeAndBowerPackages} | |||
|
97 | ||||
72 | # Setup a temporary directory. |
|
98 | # Setup a temporary directory. | |
73 | tmp_path=$(mktemp -d) |
|
99 | tmp_path=$(mktemp -d) | |
74 | export PATH="$tmp_path/bin:$PATH" |
|
100 | export PATH="$tmp_path/bin:$PATH" | |
75 | export PYTHONPATH="$tmp_path/${ce-pythonPackages.python.sitePackages}:$PYTHONPATH" |
|
101 | export PYTHONPATH="$tmp_path/${ce-pythonPackages.python.sitePackages}:$PYTHONPATH" | |
76 | mkdir -p $tmp_path/${ce-pythonPackages.python.sitePackages} |
|
102 | mkdir -p $tmp_path/${ce-pythonPackages.python.sitePackages} | |
77 |
|
103 | |||
78 |
# Develop installation |
|
104 | # Develop installation | |
|
105 | echo "[BEGIN]: develop install of rhodecode-enterprise-ce" | |||
79 | python setup.py develop --prefix $tmp_path --allow-hosts "" |
|
106 | python setup.py develop --prefix $tmp_path --allow-hosts "" | |
80 | echo "Additional develop installs" |
|
107 | ''; | |
81 | '' + pkgs.lib.strings.concatMapStrings optionalDevelopInstall developInstalls; |
|
108 | ||
|
109 | postShellHook = '' | |||
|
110 | echo "** Additional develop installs **" | |||
|
111 | '' + | |||
|
112 | pkgs.lib.strings.concatMapStrings optionalDevelopInstall developInstalls | |||
|
113 | + '' | |||
|
114 | ''; | |||
82 |
|
115 | |||
83 | }) |
|
116 | }) |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now