##// END OF EJS Templates
release: Merge default into stable for release preparation
milka -
r4622:a884bc27 merge stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,54 b''
1 |RCE| 4.23.0 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-01-10
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13 - Artifacts: expose additional headers, and content-disposition for downloads from artifacts exposing the real name of the file.
14 - Token access: allow token in headers not only in GET/URL.
15 - File-store: added a stream upload endpoint, it allows to upload GBs of data into artifact store efficiently.
16 Can be used for backups etc.
17 - Pull requests: expose commit versions in the pull-request commit list.
18
19 General
20 ^^^^^^^
21
22 - Deps: bumped redis to 3.5.3
23 - rcextensions: improve examples
24 - Setup: added optional parameters to apply a default license, or skip re-creation of database at install.
25 - Docs: update headers for NGINX
26 - Beaker cache: remove no longer used beaker cache init
27
28
29 Security
30 ^^^^^^^^
31
32
33
34 Performance
35 ^^^^^^^^^^^
36
37 - Core: speed up cache loading on application startup.
38 - Core: allow loading all auth plugins in once place for CE/EE code.
39 - Application: not use config.scan(), and replace all @add_view decorator into a explicit add_view call for faster app start.
40
41
42 Fixes
43 ^^^^^
44
45 - Svn: don't print exceptions in case of safe calls
46 - Vcsserver: use safer maxfd reporting, some linux systems get a problem with this
47 - Hooks-daemon: fixed problem with lost hooks value from .ini file.
48 - Exceptions: fixed truncated exception text
49
50
51 Upgrade notes
52 ^^^^^^^^^^^^^
53
54 - Scheduled release 4.24.0
@@ -1,6 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.23.2
2 current_version = 4.24.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:rhodecode/VERSION]
5 [bumpversion:file:rhodecode/VERSION]
6
@@ -1,33 +1,28 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:rc_tools_pinned]
7 [task:rc_tools_pinned]
8 done = true
9
8
10 [task:fixes_on_stable]
9 [task:fixes_on_stable]
11 done = true
12
10
13 [task:pip2nix_generated]
11 [task:pip2nix_generated]
14 done = true
15
12
16 [task:changelog_updated]
13 [task:changelog_updated]
17 done = true
18
14
19 [task:generate_api_docs]
15 [task:generate_api_docs]
20 done = true
16
17 [task:updated_translation]
21
18
22 [release]
19 [release]
23 state = prepared
20 state = in_progress
24 version = 4.23.2
21 version = 4.24.0
25
26 [task:updated_translation]
27
22
28 [task:generate_js_routes]
23 [task:generate_js_routes]
29
24
30 [task:updated_trial_license]
25 [task:updated_trial_license]
31
26
32 [task:generate_oss_licenses]
27 [task:generate_oss_licenses]
33
28
@@ -1,69 +1,98 b''
1 .DEFAULT_GOAL := help
1
2
2 .PHONY: clean docs docs-clean docs-cleanup test test-clean test-only test-only-postgres test-only-mysql web-build generate-pkgs pip-packages
3 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
4 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
3
5
4 NODE_PATH=./node_modules
6 NODE_PATH=./node_modules
5 WEBPACK=./node_binaries/webpack
7 WEBPACK=./node_binaries/webpack
6 GRUNT=./node_binaries/grunt
8 GRUNT=./node_binaries/grunt
7 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
8 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
9
9
10 clean:
10 .PHONY: clean
11 clean: ## full clean
11 make test-clean
12 make test-clean
12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
13 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
13
14
14 test:
15
16 .PHONY: test
17 test: ## run test-clean and tests
15 make test-clean
18 make test-clean
16 make test-only
19 make test-only
17
20
18 test-clean:
21
22 .PHONY:test-clean
23 test-clean: ## run test-clean and tests
19 rm -rf coverage.xml htmlcov junit.xml pylint.log result
24 rm -rf coverage.xml htmlcov junit.xml pylint.log result
20 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
25 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
21 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
26 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
22
27
23 test-only:
28
29 .PHONY: test-only
30 test-only: ## run tests
24 PYTHONHASHSEED=random \
31 PYTHONHASHSEED=random \
25 py.test -x -vv -r xw -p no:sugar --cov=rhodecode \
32 py.test -x -vv -r xw -p no:sugar \
26 --cov-report=term-missing --cov-report=html \
33 --cov=rhodecode --cov-report=term-missing --cov-report=html \
27 rhodecode
34 rhodecode
28
35
29 test-only-mysql:
36
37 .PHONY: test-only-mysql
38 test-only-mysql: ## run tests against mysql
30 PYTHONHASHSEED=random \
39 PYTHONHASHSEED=random \
31 py.test -x -vv -r xw -p no:sugar --cov=rhodecode \
40 py.test -x -vv -r xw -p no:sugar \
32 --cov-report=term-missing --cov-report=html \
41 --cov=rhodecode --cov-report=term-missing --cov-report=html \
33 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
42 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
34 rhodecode
43 rhodecode
35
44
36 test-only-postgres:
45
46 .PHONY: test-only-postgres
47 test-only-postgres: ## run tests against postgres
37 PYTHONHASHSEED=random \
48 PYTHONHASHSEED=random \
38 py.test -x -vv -r xw -p no:sugar --cov=rhodecode \
49 py.test -x -vv -r xw -p no:sugar \
39 --cov-report=term-missing --cov-report=html \
50 --cov=rhodecode --cov-report=term-missing --cov-report=html \
40 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
51 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
41 rhodecode
52 rhodecode
42
53
43
54 .PHONY: docs
44 docs:
55 docs: ## build docs
45 (cd docs; nix-build default.nix -o result; make clean html)
56 (cd docs; nix-build default.nix -o result; make clean html)
46
57
47 docs-clean:
58
59 .PHONY: docs-clean
60 docs-clean: ## Cleanup docs
48 (cd docs; make clean)
61 (cd docs; make clean)
49
62
50 docs-cleanup:
63
64 .PHONY: docs-cleanup
65 docs-cleanup: ## Cleanup docs
51 (cd docs; make cleanup)
66 (cd docs; make cleanup)
52
67
53 web-build:
68
69 .PHONY: web-build
70 web-build: ## Build static/js
54 NODE_PATH=$(NODE_PATH) $(GRUNT)
71 NODE_PATH=$(NODE_PATH) $(GRUNT)
55
72
56 generate-pkgs:
73
74 .PHONY: generate-pkgs
75 generate-pkgs: ## generate new python packages
57 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
76 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
58
77
59 pip-packages:
78
79 .PHONY: pip-packages
80 pip-packages: ## show outdated packages
60 python ${OUTDATED_PACKAGES}
81 python ${OUTDATED_PACKAGES}
61
82
62 generate-js-pkgs:
83
84 .PHONY: generate-js-pkgs
85 generate-js-pkgs: ## generate js packages
63 rm -rf node_modules && \
86 rm -rf node_modules && \
64 nix-shell pkgs/shell-generate.nix --command "node2nix --input package.json -o pkgs/node-packages.nix -e pkgs/node-env.nix -c pkgs/node-default.nix -d --flatten --nodejs-8" && \
87 nix-shell pkgs/shell-generate.nix --command "node2nix --input package.json -o pkgs/node-packages.nix -e pkgs/node-env.nix -c pkgs/node-default.nix -d --flatten --nodejs-8" && \
65 sed -i -e 's/http:\/\//https:\/\//g' pkgs/node-packages.nix
88 sed -i -e 's/http:\/\//https:\/\//g' pkgs/node-packages.nix
66
89
67 generate-license-meta:
90
91 .PHONY: generate-license-meta
92 generate-license-meta: ## Generate license metadata
68 nix-build pkgs/license-generate.nix -o result-license && \
93 nix-build pkgs/license-generate.nix -o result-license && \
69 cat result-license/licenses.json | python -m json.tool > rhodecode/config/licenses.json No newline at end of file
94 cat result-license/licenses.json | python -m json.tool > rhodecode/config/licenses.json
95
96 .PHONY: help
97 help:
98 @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-24s\033[0m %s\n", $$1, $$2}'
@@ -1,151 +1,152 b''
1 .. _rhodecode-release-notes-ref:
1 .. _rhodecode-release-notes-ref:
2
2
3 Release Notes
3 Release Notes
4 =============
4 =============
5
5
6 |RCE| 4.x Versions
6 |RCE| 4.x Versions
7 ------------------
7 ------------------
8
8
9 .. toctree::
9 .. toctree::
10 :maxdepth: 1
10 :maxdepth: 1
11
11
12 release-notes-4.24.0.rst
12 release-notes-4.23.2.rst
13 release-notes-4.23.2.rst
13 release-notes-4.23.1.rst
14 release-notes-4.23.1.rst
14 release-notes-4.23.0.rst
15 release-notes-4.23.0.rst
15 release-notes-4.22.0.rst
16 release-notes-4.22.0.rst
16 release-notes-4.21.0.rst
17 release-notes-4.21.0.rst
17 release-notes-4.20.1.rst
18 release-notes-4.20.1.rst
18 release-notes-4.20.0.rst
19 release-notes-4.20.0.rst
19 release-notes-4.19.3.rst
20 release-notes-4.19.3.rst
20 release-notes-4.19.2.rst
21 release-notes-4.19.2.rst
21 release-notes-4.19.1.rst
22 release-notes-4.19.1.rst
22 release-notes-4.19.0.rst
23 release-notes-4.19.0.rst
23 release-notes-4.18.3.rst
24 release-notes-4.18.3.rst
24 release-notes-4.18.2.rst
25 release-notes-4.18.2.rst
25 release-notes-4.18.1.rst
26 release-notes-4.18.1.rst
26 release-notes-4.18.0.rst
27 release-notes-4.18.0.rst
27 release-notes-4.17.4.rst
28 release-notes-4.17.4.rst
28 release-notes-4.17.3.rst
29 release-notes-4.17.3.rst
29 release-notes-4.17.2.rst
30 release-notes-4.17.2.rst
30 release-notes-4.17.1.rst
31 release-notes-4.17.1.rst
31 release-notes-4.17.0.rst
32 release-notes-4.17.0.rst
32 release-notes-4.16.2.rst
33 release-notes-4.16.2.rst
33 release-notes-4.16.1.rst
34 release-notes-4.16.1.rst
34 release-notes-4.16.0.rst
35 release-notes-4.16.0.rst
35 release-notes-4.15.2.rst
36 release-notes-4.15.2.rst
36 release-notes-4.15.1.rst
37 release-notes-4.15.1.rst
37 release-notes-4.15.0.rst
38 release-notes-4.15.0.rst
38 release-notes-4.14.1.rst
39 release-notes-4.14.1.rst
39 release-notes-4.14.0.rst
40 release-notes-4.14.0.rst
40 release-notes-4.13.3.rst
41 release-notes-4.13.3.rst
41 release-notes-4.13.2.rst
42 release-notes-4.13.2.rst
42 release-notes-4.13.1.rst
43 release-notes-4.13.1.rst
43 release-notes-4.13.0.rst
44 release-notes-4.13.0.rst
44 release-notes-4.12.4.rst
45 release-notes-4.12.4.rst
45 release-notes-4.12.3.rst
46 release-notes-4.12.3.rst
46 release-notes-4.12.2.rst
47 release-notes-4.12.2.rst
47 release-notes-4.12.1.rst
48 release-notes-4.12.1.rst
48 release-notes-4.12.0.rst
49 release-notes-4.12.0.rst
49 release-notes-4.11.6.rst
50 release-notes-4.11.6.rst
50 release-notes-4.11.5.rst
51 release-notes-4.11.5.rst
51 release-notes-4.11.4.rst
52 release-notes-4.11.4.rst
52 release-notes-4.11.3.rst
53 release-notes-4.11.3.rst
53 release-notes-4.11.2.rst
54 release-notes-4.11.2.rst
54 release-notes-4.11.1.rst
55 release-notes-4.11.1.rst
55 release-notes-4.11.0.rst
56 release-notes-4.11.0.rst
56 release-notes-4.10.6.rst
57 release-notes-4.10.6.rst
57 release-notes-4.10.5.rst
58 release-notes-4.10.5.rst
58 release-notes-4.10.4.rst
59 release-notes-4.10.4.rst
59 release-notes-4.10.3.rst
60 release-notes-4.10.3.rst
60 release-notes-4.10.2.rst
61 release-notes-4.10.2.rst
61 release-notes-4.10.1.rst
62 release-notes-4.10.1.rst
62 release-notes-4.10.0.rst
63 release-notes-4.10.0.rst
63 release-notes-4.9.1.rst
64 release-notes-4.9.1.rst
64 release-notes-4.9.0.rst
65 release-notes-4.9.0.rst
65 release-notes-4.8.0.rst
66 release-notes-4.8.0.rst
66 release-notes-4.7.2.rst
67 release-notes-4.7.2.rst
67 release-notes-4.7.1.rst
68 release-notes-4.7.1.rst
68 release-notes-4.7.0.rst
69 release-notes-4.7.0.rst
69 release-notes-4.6.1.rst
70 release-notes-4.6.1.rst
70 release-notes-4.6.0.rst
71 release-notes-4.6.0.rst
71 release-notes-4.5.2.rst
72 release-notes-4.5.2.rst
72 release-notes-4.5.1.rst
73 release-notes-4.5.1.rst
73 release-notes-4.5.0.rst
74 release-notes-4.5.0.rst
74 release-notes-4.4.2.rst
75 release-notes-4.4.2.rst
75 release-notes-4.4.1.rst
76 release-notes-4.4.1.rst
76 release-notes-4.4.0.rst
77 release-notes-4.4.0.rst
77 release-notes-4.3.1.rst
78 release-notes-4.3.1.rst
78 release-notes-4.3.0.rst
79 release-notes-4.3.0.rst
79 release-notes-4.2.1.rst
80 release-notes-4.2.1.rst
80 release-notes-4.2.0.rst
81 release-notes-4.2.0.rst
81 release-notes-4.1.2.rst
82 release-notes-4.1.2.rst
82 release-notes-4.1.1.rst
83 release-notes-4.1.1.rst
83 release-notes-4.1.0.rst
84 release-notes-4.1.0.rst
84 release-notes-4.0.1.rst
85 release-notes-4.0.1.rst
85 release-notes-4.0.0.rst
86 release-notes-4.0.0.rst
86
87
87 |RCE| 3.x Versions
88 |RCE| 3.x Versions
88 ------------------
89 ------------------
89
90
90 .. toctree::
91 .. toctree::
91 :maxdepth: 1
92 :maxdepth: 1
92
93
93 release-notes-3.8.4.rst
94 release-notes-3.8.4.rst
94 release-notes-3.8.3.rst
95 release-notes-3.8.3.rst
95 release-notes-3.8.2.rst
96 release-notes-3.8.2.rst
96 release-notes-3.8.1.rst
97 release-notes-3.8.1.rst
97 release-notes-3.8.0.rst
98 release-notes-3.8.0.rst
98 release-notes-3.7.1.rst
99 release-notes-3.7.1.rst
99 release-notes-3.7.0.rst
100 release-notes-3.7.0.rst
100 release-notes-3.6.1.rst
101 release-notes-3.6.1.rst
101 release-notes-3.6.0.rst
102 release-notes-3.6.0.rst
102 release-notes-3.5.2.rst
103 release-notes-3.5.2.rst
103 release-notes-3.5.1.rst
104 release-notes-3.5.1.rst
104 release-notes-3.5.0.rst
105 release-notes-3.5.0.rst
105 release-notes-3.4.1.rst
106 release-notes-3.4.1.rst
106 release-notes-3.4.0.rst
107 release-notes-3.4.0.rst
107 release-notes-3.3.4.rst
108 release-notes-3.3.4.rst
108 release-notes-3.3.3.rst
109 release-notes-3.3.3.rst
109 release-notes-3.3.2.rst
110 release-notes-3.3.2.rst
110 release-notes-3.3.1.rst
111 release-notes-3.3.1.rst
111 release-notes-3.3.0.rst
112 release-notes-3.3.0.rst
112 release-notes-3.2.3.rst
113 release-notes-3.2.3.rst
113 release-notes-3.2.2.rst
114 release-notes-3.2.2.rst
114 release-notes-3.2.1.rst
115 release-notes-3.2.1.rst
115 release-notes-3.2.0.rst
116 release-notes-3.2.0.rst
116 release-notes-3.1.1.rst
117 release-notes-3.1.1.rst
117 release-notes-3.1.0.rst
118 release-notes-3.1.0.rst
118 release-notes-3.0.2.rst
119 release-notes-3.0.2.rst
119 release-notes-3.0.1.rst
120 release-notes-3.0.1.rst
120 release-notes-3.0.0.rst
121 release-notes-3.0.0.rst
121
122
122 |RCE| 2.x Versions
123 |RCE| 2.x Versions
123 ------------------
124 ------------------
124
125
125 .. toctree::
126 .. toctree::
126 :maxdepth: 1
127 :maxdepth: 1
127
128
128 release-notes-2.2.8.rst
129 release-notes-2.2.8.rst
129 release-notes-2.2.7.rst
130 release-notes-2.2.7.rst
130 release-notes-2.2.6.rst
131 release-notes-2.2.6.rst
131 release-notes-2.2.5.rst
132 release-notes-2.2.5.rst
132 release-notes-2.2.4.rst
133 release-notes-2.2.4.rst
133 release-notes-2.2.3.rst
134 release-notes-2.2.3.rst
134 release-notes-2.2.2.rst
135 release-notes-2.2.2.rst
135 release-notes-2.2.1.rst
136 release-notes-2.2.1.rst
136 release-notes-2.2.0.rst
137 release-notes-2.2.0.rst
137 release-notes-2.1.0.rst
138 release-notes-2.1.0.rst
138 release-notes-2.0.2.rst
139 release-notes-2.0.2.rst
139 release-notes-2.0.1.rst
140 release-notes-2.0.1.rst
140 release-notes-2.0.0.rst
141 release-notes-2.0.0.rst
141
142
142 |RCE| 1.x Versions
143 |RCE| 1.x Versions
143 ------------------
144 ------------------
144
145
145 .. toctree::
146 .. toctree::
146 :maxdepth: 1
147 :maxdepth: 1
147
148
148 release-notes-1.7.2.rst
149 release-notes-1.7.2.rst
149 release-notes-1.7.1.rst
150 release-notes-1.7.1.rst
150 release-notes-1.7.0.rst
151 release-notes-1.7.0.rst
151 release-notes-1.6.0.rst
152 release-notes-1.6.0.rst
@@ -1,2509 +1,2509 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "alembic" = super.buildPythonPackage {
7 "alembic" = super.buildPythonPackage {
8 name = "alembic-1.4.2";
8 name = "alembic-1.4.2";
9 doCheck = false;
9 doCheck = false;
10 propagatedBuildInputs = [
10 propagatedBuildInputs = [
11 self."sqlalchemy"
11 self."sqlalchemy"
12 self."mako"
12 self."mako"
13 self."python-editor"
13 self."python-editor"
14 self."python-dateutil"
14 self."python-dateutil"
15 ];
15 ];
16 src = fetchurl {
16 src = fetchurl {
17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
19 };
19 };
20 meta = {
20 meta = {
21 license = [ pkgs.lib.licenses.mit ];
21 license = [ pkgs.lib.licenses.mit ];
22 };
22 };
23 };
23 };
24 "amqp" = super.buildPythonPackage {
24 "amqp" = super.buildPythonPackage {
25 name = "amqp-2.5.2";
25 name = "amqp-2.5.2";
26 doCheck = false;
26 doCheck = false;
27 propagatedBuildInputs = [
27 propagatedBuildInputs = [
28 self."vine"
28 self."vine"
29 ];
29 ];
30 src = fetchurl {
30 src = fetchurl {
31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
33 };
33 };
34 meta = {
34 meta = {
35 license = [ pkgs.lib.licenses.bsdOriginal ];
35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 };
36 };
37 };
37 };
38 "apispec" = super.buildPythonPackage {
38 "apispec" = super.buildPythonPackage {
39 name = "apispec-1.0.0";
39 name = "apispec-1.0.0";
40 doCheck = false;
40 doCheck = false;
41 propagatedBuildInputs = [
41 propagatedBuildInputs = [
42 self."PyYAML"
42 self."PyYAML"
43 ];
43 ];
44 src = fetchurl {
44 src = fetchurl {
45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
47 };
47 };
48 meta = {
48 meta = {
49 license = [ pkgs.lib.licenses.mit ];
49 license = [ pkgs.lib.licenses.mit ];
50 };
50 };
51 };
51 };
52 "appenlight-client" = super.buildPythonPackage {
52 "appenlight-client" = super.buildPythonPackage {
53 name = "appenlight-client-0.6.26";
53 name = "appenlight-client-0.6.26";
54 doCheck = false;
54 doCheck = false;
55 propagatedBuildInputs = [
55 propagatedBuildInputs = [
56 self."webob"
56 self."webob"
57 self."requests"
57 self."requests"
58 self."six"
58 self."six"
59 ];
59 ];
60 src = fetchurl {
60 src = fetchurl {
61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
63 };
63 };
64 meta = {
64 meta = {
65 license = [ pkgs.lib.licenses.bsdOriginal ];
65 license = [ pkgs.lib.licenses.bsdOriginal ];
66 };
66 };
67 };
67 };
68 "asn1crypto" = super.buildPythonPackage {
68 "asn1crypto" = super.buildPythonPackage {
69 name = "asn1crypto-0.24.0";
69 name = "asn1crypto-0.24.0";
70 doCheck = false;
70 doCheck = false;
71 src = fetchurl {
71 src = fetchurl {
72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
74 };
74 };
75 meta = {
75 meta = {
76 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
77 };
77 };
78 };
78 };
79 "atomicwrites" = super.buildPythonPackage {
79 "atomicwrites" = super.buildPythonPackage {
80 name = "atomicwrites-1.3.0";
80 name = "atomicwrites-1.3.0";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.mit ];
87 license = [ pkgs.lib.licenses.mit ];
88 };
88 };
89 };
89 };
90 "attrs" = super.buildPythonPackage {
90 "attrs" = super.buildPythonPackage {
91 name = "attrs-19.3.0";
91 name = "attrs-19.3.0";
92 doCheck = false;
92 doCheck = false;
93 src = fetchurl {
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
96 };
96 };
97 meta = {
97 meta = {
98 license = [ pkgs.lib.licenses.mit ];
98 license = [ pkgs.lib.licenses.mit ];
99 };
99 };
100 };
100 };
101 "babel" = super.buildPythonPackage {
101 "babel" = super.buildPythonPackage {
102 name = "babel-1.3";
102 name = "babel-1.3";
103 doCheck = false;
103 doCheck = false;
104 propagatedBuildInputs = [
104 propagatedBuildInputs = [
105 self."pytz"
105 self."pytz"
106 ];
106 ];
107 src = fetchurl {
107 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
110 };
110 };
111 meta = {
111 meta = {
112 license = [ pkgs.lib.licenses.bsdOriginal ];
112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 };
113 };
114 };
114 };
115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
116 name = "backports.shutil-get-terminal-size-1.0.0";
116 name = "backports.shutil-get-terminal-size-1.0.0";
117 doCheck = false;
117 doCheck = false;
118 src = fetchurl {
118 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
121 };
121 };
122 meta = {
122 meta = {
123 license = [ pkgs.lib.licenses.mit ];
123 license = [ pkgs.lib.licenses.mit ];
124 };
124 };
125 };
125 };
126 "beaker" = super.buildPythonPackage {
126 "beaker" = super.buildPythonPackage {
127 name = "beaker-1.9.1";
127 name = "beaker-1.9.1";
128 doCheck = false;
128 doCheck = false;
129 propagatedBuildInputs = [
129 propagatedBuildInputs = [
130 self."funcsigs"
130 self."funcsigs"
131 ];
131 ];
132 src = fetchurl {
132 src = fetchurl {
133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
135 };
135 };
136 meta = {
136 meta = {
137 license = [ pkgs.lib.licenses.bsdOriginal ];
137 license = [ pkgs.lib.licenses.bsdOriginal ];
138 };
138 };
139 };
139 };
140 "beautifulsoup4" = super.buildPythonPackage {
140 "beautifulsoup4" = super.buildPythonPackage {
141 name = "beautifulsoup4-4.6.3";
141 name = "beautifulsoup4-4.6.3";
142 doCheck = false;
142 doCheck = false;
143 src = fetchurl {
143 src = fetchurl {
144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
146 };
146 };
147 meta = {
147 meta = {
148 license = [ pkgs.lib.licenses.mit ];
148 license = [ pkgs.lib.licenses.mit ];
149 };
149 };
150 };
150 };
151 "billiard" = super.buildPythonPackage {
151 "billiard" = super.buildPythonPackage {
152 name = "billiard-3.6.1.0";
152 name = "billiard-3.6.1.0";
153 doCheck = false;
153 doCheck = false;
154 src = fetchurl {
154 src = fetchurl {
155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
157 };
157 };
158 meta = {
158 meta = {
159 license = [ pkgs.lib.licenses.bsdOriginal ];
159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 };
160 };
161 };
161 };
162 "bleach" = super.buildPythonPackage {
162 "bleach" = super.buildPythonPackage {
163 name = "bleach-3.1.3";
163 name = "bleach-3.1.3";
164 doCheck = false;
164 doCheck = false;
165 propagatedBuildInputs = [
165 propagatedBuildInputs = [
166 self."six"
166 self."six"
167 self."webencodings"
167 self."webencodings"
168 ];
168 ];
169 src = fetchurl {
169 src = fetchurl {
170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
172 };
172 };
173 meta = {
173 meta = {
174 license = [ pkgs.lib.licenses.asl20 ];
174 license = [ pkgs.lib.licenses.asl20 ];
175 };
175 };
176 };
176 };
177 "bumpversion" = super.buildPythonPackage {
177 "bumpversion" = super.buildPythonPackage {
178 name = "bumpversion-0.5.3";
178 name = "bumpversion-0.5.3";
179 doCheck = false;
179 doCheck = false;
180 src = fetchurl {
180 src = fetchurl {
181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
183 };
183 };
184 meta = {
184 meta = {
185 license = [ pkgs.lib.licenses.mit ];
185 license = [ pkgs.lib.licenses.mit ];
186 };
186 };
187 };
187 };
188 "cachetools" = super.buildPythonPackage {
188 "cachetools" = super.buildPythonPackage {
189 name = "cachetools-3.1.1";
189 name = "cachetools-3.1.1";
190 doCheck = false;
190 doCheck = false;
191 src = fetchurl {
191 src = fetchurl {
192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
194 };
194 };
195 meta = {
195 meta = {
196 license = [ pkgs.lib.licenses.mit ];
196 license = [ pkgs.lib.licenses.mit ];
197 };
197 };
198 };
198 };
199 "celery" = super.buildPythonPackage {
199 "celery" = super.buildPythonPackage {
200 name = "celery-4.3.0";
200 name = "celery-4.3.0";
201 doCheck = false;
201 doCheck = false;
202 propagatedBuildInputs = [
202 propagatedBuildInputs = [
203 self."pytz"
203 self."pytz"
204 self."billiard"
204 self."billiard"
205 self."kombu"
205 self."kombu"
206 self."vine"
206 self."vine"
207 ];
207 ];
208 src = fetchurl {
208 src = fetchurl {
209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
211 };
211 };
212 meta = {
212 meta = {
213 license = [ pkgs.lib.licenses.bsdOriginal ];
213 license = [ pkgs.lib.licenses.bsdOriginal ];
214 };
214 };
215 };
215 };
216 "certifi" = super.buildPythonPackage {
216 "certifi" = super.buildPythonPackage {
217 name = "certifi-2020.4.5.1";
217 name = "certifi-2020.4.5.1";
218 doCheck = false;
218 doCheck = false;
219 src = fetchurl {
219 src = fetchurl {
220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
222 };
222 };
223 meta = {
223 meta = {
224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
225 };
225 };
226 };
226 };
227 "cffi" = super.buildPythonPackage {
227 "cffi" = super.buildPythonPackage {
228 name = "cffi-1.12.3";
228 name = "cffi-1.12.3";
229 doCheck = false;
229 doCheck = false;
230 propagatedBuildInputs = [
230 propagatedBuildInputs = [
231 self."pycparser"
231 self."pycparser"
232 ];
232 ];
233 src = fetchurl {
233 src = fetchurl {
234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
236 };
236 };
237 meta = {
237 meta = {
238 license = [ pkgs.lib.licenses.mit ];
238 license = [ pkgs.lib.licenses.mit ];
239 };
239 };
240 };
240 };
241 "chameleon" = super.buildPythonPackage {
241 "chameleon" = super.buildPythonPackage {
242 name = "chameleon-2.24";
242 name = "chameleon-2.24";
243 doCheck = false;
243 doCheck = false;
244 src = fetchurl {
244 src = fetchurl {
245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
247 };
247 };
248 meta = {
248 meta = {
249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
250 };
250 };
251 };
251 };
252 "channelstream" = super.buildPythonPackage {
252 "channelstream" = super.buildPythonPackage {
253 name = "channelstream-0.6.14";
253 name = "channelstream-0.6.14";
254 doCheck = false;
254 doCheck = false;
255 propagatedBuildInputs = [
255 propagatedBuildInputs = [
256 self."gevent"
256 self."gevent"
257 self."ws4py"
257 self."ws4py"
258 self."marshmallow"
258 self."marshmallow"
259 self."python-dateutil"
259 self."python-dateutil"
260 self."pyramid"
260 self."pyramid"
261 self."pyramid-jinja2"
261 self."pyramid-jinja2"
262 self."pyramid-apispec"
262 self."pyramid-apispec"
263 self."itsdangerous"
263 self."itsdangerous"
264 self."requests"
264 self."requests"
265 self."six"
265 self."six"
266 ];
266 ];
267 src = fetchurl {
267 src = fetchurl {
268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
270 };
270 };
271 meta = {
271 meta = {
272 license = [ pkgs.lib.licenses.bsdOriginal ];
272 license = [ pkgs.lib.licenses.bsdOriginal ];
273 };
273 };
274 };
274 };
275 "chardet" = super.buildPythonPackage {
275 "chardet" = super.buildPythonPackage {
276 name = "chardet-3.0.4";
276 name = "chardet-3.0.4";
277 doCheck = false;
277 doCheck = false;
278 src = fetchurl {
278 src = fetchurl {
279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
281 };
281 };
282 meta = {
282 meta = {
283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
284 };
284 };
285 };
285 };
286 "click" = super.buildPythonPackage {
286 "click" = super.buildPythonPackage {
287 name = "click-7.0";
287 name = "click-7.0";
288 doCheck = false;
288 doCheck = false;
289 src = fetchurl {
289 src = fetchurl {
290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
292 };
292 };
293 meta = {
293 meta = {
294 license = [ pkgs.lib.licenses.bsdOriginal ];
294 license = [ pkgs.lib.licenses.bsdOriginal ];
295 };
295 };
296 };
296 };
297 "colander" = super.buildPythonPackage {
297 "colander" = super.buildPythonPackage {
298 name = "colander-1.7.0";
298 name = "colander-1.7.0";
299 doCheck = false;
299 doCheck = false;
300 propagatedBuildInputs = [
300 propagatedBuildInputs = [
301 self."translationstring"
301 self."translationstring"
302 self."iso8601"
302 self."iso8601"
303 self."enum34"
303 self."enum34"
304 ];
304 ];
305 src = fetchurl {
305 src = fetchurl {
306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
308 };
308 };
309 meta = {
309 meta = {
310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
311 };
311 };
312 };
312 };
313 "configobj" = super.buildPythonPackage {
313 "configobj" = super.buildPythonPackage {
314 name = "configobj-5.0.6";
314 name = "configobj-5.0.6";
315 doCheck = false;
315 doCheck = false;
316 propagatedBuildInputs = [
316 propagatedBuildInputs = [
317 self."six"
317 self."six"
318 ];
318 ];
319 src = fetchurl {
319 src = fetchurl {
320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
322 };
322 };
323 meta = {
323 meta = {
324 license = [ pkgs.lib.licenses.bsdOriginal ];
324 license = [ pkgs.lib.licenses.bsdOriginal ];
325 };
325 };
326 };
326 };
327 "configparser" = super.buildPythonPackage {
327 "configparser" = super.buildPythonPackage {
328 name = "configparser-4.0.2";
328 name = "configparser-4.0.2";
329 doCheck = false;
329 doCheck = false;
330 src = fetchurl {
330 src = fetchurl {
331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
333 };
333 };
334 meta = {
334 meta = {
335 license = [ pkgs.lib.licenses.mit ];
335 license = [ pkgs.lib.licenses.mit ];
336 };
336 };
337 };
337 };
338 "contextlib2" = super.buildPythonPackage {
338 "contextlib2" = super.buildPythonPackage {
339 name = "contextlib2-0.6.0.post1";
339 name = "contextlib2-0.6.0.post1";
340 doCheck = false;
340 doCheck = false;
341 src = fetchurl {
341 src = fetchurl {
342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
344 };
344 };
345 meta = {
345 meta = {
346 license = [ pkgs.lib.licenses.psfl ];
346 license = [ pkgs.lib.licenses.psfl ];
347 };
347 };
348 };
348 };
349 "cov-core" = super.buildPythonPackage {
349 "cov-core" = super.buildPythonPackage {
350 name = "cov-core-1.15.0";
350 name = "cov-core-1.15.0";
351 doCheck = false;
351 doCheck = false;
352 propagatedBuildInputs = [
352 propagatedBuildInputs = [
353 self."coverage"
353 self."coverage"
354 ];
354 ];
355 src = fetchurl {
355 src = fetchurl {
356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
358 };
358 };
359 meta = {
359 meta = {
360 license = [ pkgs.lib.licenses.mit ];
360 license = [ pkgs.lib.licenses.mit ];
361 };
361 };
362 };
362 };
363 "coverage" = super.buildPythonPackage {
363 "coverage" = super.buildPythonPackage {
364 name = "coverage-4.5.4";
364 name = "coverage-4.5.4";
365 doCheck = false;
365 doCheck = false;
366 src = fetchurl {
366 src = fetchurl {
367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
369 };
369 };
370 meta = {
370 meta = {
371 license = [ pkgs.lib.licenses.asl20 ];
371 license = [ pkgs.lib.licenses.asl20 ];
372 };
372 };
373 };
373 };
374 "cryptography" = super.buildPythonPackage {
374 "cryptography" = super.buildPythonPackage {
375 name = "cryptography-2.6.1";
375 name = "cryptography-2.6.1";
376 doCheck = false;
376 doCheck = false;
377 propagatedBuildInputs = [
377 propagatedBuildInputs = [
378 self."asn1crypto"
378 self."asn1crypto"
379 self."six"
379 self."six"
380 self."cffi"
380 self."cffi"
381 self."enum34"
381 self."enum34"
382 self."ipaddress"
382 self."ipaddress"
383 ];
383 ];
384 src = fetchurl {
384 src = fetchurl {
385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
387 };
387 };
388 meta = {
388 meta = {
389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
390 };
390 };
391 };
391 };
392 "cssselect" = super.buildPythonPackage {
392 "cssselect" = super.buildPythonPackage {
393 name = "cssselect-1.0.3";
393 name = "cssselect-1.0.3";
394 doCheck = false;
394 doCheck = false;
395 src = fetchurl {
395 src = fetchurl {
396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
398 };
398 };
399 meta = {
399 meta = {
400 license = [ pkgs.lib.licenses.bsdOriginal ];
400 license = [ pkgs.lib.licenses.bsdOriginal ];
401 };
401 };
402 };
402 };
403 "cssutils" = super.buildPythonPackage {
403 "cssutils" = super.buildPythonPackage {
404 name = "cssutils-1.0.2";
404 name = "cssutils-1.0.2";
405 doCheck = false;
405 doCheck = false;
406 src = fetchurl {
406 src = fetchurl {
407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
409 };
409 };
410 meta = {
410 meta = {
411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
412 };
412 };
413 };
413 };
414 "decorator" = super.buildPythonPackage {
414 "decorator" = super.buildPythonPackage {
415 name = "decorator-4.1.2";
415 name = "decorator-4.1.2";
416 doCheck = false;
416 doCheck = false;
417 src = fetchurl {
417 src = fetchurl {
418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
420 };
420 };
421 meta = {
421 meta = {
422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
423 };
423 };
424 };
424 };
425 "deform" = super.buildPythonPackage {
425 "deform" = super.buildPythonPackage {
426 name = "deform-2.0.8";
426 name = "deform-2.0.8";
427 doCheck = false;
427 doCheck = false;
428 propagatedBuildInputs = [
428 propagatedBuildInputs = [
429 self."chameleon"
429 self."chameleon"
430 self."colander"
430 self."colander"
431 self."iso8601"
431 self."iso8601"
432 self."peppercorn"
432 self."peppercorn"
433 self."translationstring"
433 self."translationstring"
434 self."zope.deprecation"
434 self."zope.deprecation"
435 ];
435 ];
436 src = fetchurl {
436 src = fetchurl {
437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
439 };
439 };
440 meta = {
440 meta = {
441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
442 };
442 };
443 };
443 };
444 "defusedxml" = super.buildPythonPackage {
444 "defusedxml" = super.buildPythonPackage {
445 name = "defusedxml-0.6.0";
445 name = "defusedxml-0.6.0";
446 doCheck = false;
446 doCheck = false;
447 src = fetchurl {
447 src = fetchurl {
448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
450 };
450 };
451 meta = {
451 meta = {
452 license = [ pkgs.lib.licenses.psfl ];
452 license = [ pkgs.lib.licenses.psfl ];
453 };
453 };
454 };
454 };
455 "dm.xmlsec.binding" = super.buildPythonPackage {
455 "dm.xmlsec.binding" = super.buildPythonPackage {
456 name = "dm.xmlsec.binding-1.3.7";
456 name = "dm.xmlsec.binding-1.3.7";
457 doCheck = false;
457 doCheck = false;
458 propagatedBuildInputs = [
458 propagatedBuildInputs = [
459 self."setuptools"
459 self."setuptools"
460 self."lxml"
460 self."lxml"
461 ];
461 ];
462 src = fetchurl {
462 src = fetchurl {
463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
465 };
465 };
466 meta = {
466 meta = {
467 license = [ pkgs.lib.licenses.bsdOriginal ];
467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 };
468 };
469 };
469 };
470 "docutils" = super.buildPythonPackage {
470 "docutils" = super.buildPythonPackage {
471 name = "docutils-0.16";
471 name = "docutils-0.16";
472 doCheck = false;
472 doCheck = false;
473 src = fetchurl {
473 src = fetchurl {
474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
476 };
476 };
477 meta = {
477 meta = {
478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
479 };
479 };
480 };
480 };
481 "dogpile.cache" = super.buildPythonPackage {
481 "dogpile.cache" = super.buildPythonPackage {
482 name = "dogpile.cache-0.9.0";
482 name = "dogpile.cache-0.9.0";
483 doCheck = false;
483 doCheck = false;
484 propagatedBuildInputs = [
484 propagatedBuildInputs = [
485 self."decorator"
485 self."decorator"
486 ];
486 ];
487 src = fetchurl {
487 src = fetchurl {
488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
490 };
490 };
491 meta = {
491 meta = {
492 license = [ pkgs.lib.licenses.bsdOriginal ];
492 license = [ pkgs.lib.licenses.bsdOriginal ];
493 };
493 };
494 };
494 };
495 "dogpile.core" = super.buildPythonPackage {
495 "dogpile.core" = super.buildPythonPackage {
496 name = "dogpile.core-0.4.1";
496 name = "dogpile.core-0.4.1";
497 doCheck = false;
497 doCheck = false;
498 src = fetchurl {
498 src = fetchurl {
499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
501 };
501 };
502 meta = {
502 meta = {
503 license = [ pkgs.lib.licenses.bsdOriginal ];
503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 };
504 };
505 };
505 };
506 "ecdsa" = super.buildPythonPackage {
506 "ecdsa" = super.buildPythonPackage {
507 name = "ecdsa-0.13.2";
507 name = "ecdsa-0.13.2";
508 doCheck = false;
508 doCheck = false;
509 src = fetchurl {
509 src = fetchurl {
510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
512 };
512 };
513 meta = {
513 meta = {
514 license = [ pkgs.lib.licenses.mit ];
514 license = [ pkgs.lib.licenses.mit ];
515 };
515 };
516 };
516 };
517 "elasticsearch" = super.buildPythonPackage {
517 "elasticsearch" = super.buildPythonPackage {
518 name = "elasticsearch-6.3.1";
518 name = "elasticsearch-6.3.1";
519 doCheck = false;
519 doCheck = false;
520 propagatedBuildInputs = [
520 propagatedBuildInputs = [
521 self."urllib3"
521 self."urllib3"
522 ];
522 ];
523 src = fetchurl {
523 src = fetchurl {
524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
526 };
526 };
527 meta = {
527 meta = {
528 license = [ pkgs.lib.licenses.asl20 ];
528 license = [ pkgs.lib.licenses.asl20 ];
529 };
529 };
530 };
530 };
531 "elasticsearch-dsl" = super.buildPythonPackage {
531 "elasticsearch-dsl" = super.buildPythonPackage {
532 name = "elasticsearch-dsl-6.3.1";
532 name = "elasticsearch-dsl-6.3.1";
533 doCheck = false;
533 doCheck = false;
534 propagatedBuildInputs = [
534 propagatedBuildInputs = [
535 self."six"
535 self."six"
536 self."python-dateutil"
536 self."python-dateutil"
537 self."elasticsearch"
537 self."elasticsearch"
538 self."ipaddress"
538 self."ipaddress"
539 ];
539 ];
540 src = fetchurl {
540 src = fetchurl {
541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
543 };
543 };
544 meta = {
544 meta = {
545 license = [ pkgs.lib.licenses.asl20 ];
545 license = [ pkgs.lib.licenses.asl20 ];
546 };
546 };
547 };
547 };
548 "elasticsearch1" = super.buildPythonPackage {
548 "elasticsearch1" = super.buildPythonPackage {
549 name = "elasticsearch1-1.10.0";
549 name = "elasticsearch1-1.10.0";
550 doCheck = false;
550 doCheck = false;
551 propagatedBuildInputs = [
551 propagatedBuildInputs = [
552 self."urllib3"
552 self."urllib3"
553 ];
553 ];
554 src = fetchurl {
554 src = fetchurl {
555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
557 };
557 };
558 meta = {
558 meta = {
559 license = [ pkgs.lib.licenses.asl20 ];
559 license = [ pkgs.lib.licenses.asl20 ];
560 };
560 };
561 };
561 };
562 "elasticsearch1-dsl" = super.buildPythonPackage {
562 "elasticsearch1-dsl" = super.buildPythonPackage {
563 name = "elasticsearch1-dsl-0.0.12";
563 name = "elasticsearch1-dsl-0.0.12";
564 doCheck = false;
564 doCheck = false;
565 propagatedBuildInputs = [
565 propagatedBuildInputs = [
566 self."six"
566 self."six"
567 self."python-dateutil"
567 self."python-dateutil"
568 self."elasticsearch1"
568 self."elasticsearch1"
569 ];
569 ];
570 src = fetchurl {
570 src = fetchurl {
571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
573 };
573 };
574 meta = {
574 meta = {
575 license = [ pkgs.lib.licenses.asl20 ];
575 license = [ pkgs.lib.licenses.asl20 ];
576 };
576 };
577 };
577 };
578 "elasticsearch2" = super.buildPythonPackage {
578 "elasticsearch2" = super.buildPythonPackage {
579 name = "elasticsearch2-2.5.1";
579 name = "elasticsearch2-2.5.1";
580 doCheck = false;
580 doCheck = false;
581 propagatedBuildInputs = [
581 propagatedBuildInputs = [
582 self."urllib3"
582 self."urllib3"
583 ];
583 ];
584 src = fetchurl {
584 src = fetchurl {
585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
587 };
587 };
588 meta = {
588 meta = {
589 license = [ pkgs.lib.licenses.asl20 ];
589 license = [ pkgs.lib.licenses.asl20 ];
590 };
590 };
591 };
591 };
592 "entrypoints" = super.buildPythonPackage {
592 "entrypoints" = super.buildPythonPackage {
593 name = "entrypoints-0.2.2";
593 name = "entrypoints-0.2.2";
594 doCheck = false;
594 doCheck = false;
595 propagatedBuildInputs = [
595 propagatedBuildInputs = [
596 self."configparser"
596 self."configparser"
597 ];
597 ];
598 src = fetchurl {
598 src = fetchurl {
599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
601 };
601 };
602 meta = {
602 meta = {
603 license = [ pkgs.lib.licenses.mit ];
603 license = [ pkgs.lib.licenses.mit ];
604 };
604 };
605 };
605 };
606 "enum34" = super.buildPythonPackage {
606 "enum34" = super.buildPythonPackage {
607 name = "enum34-1.1.10";
607 name = "enum34-1.1.10";
608 doCheck = false;
608 doCheck = false;
609 src = fetchurl {
609 src = fetchurl {
610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
612 };
612 };
613 meta = {
613 meta = {
614 license = [ pkgs.lib.licenses.bsdOriginal ];
614 license = [ pkgs.lib.licenses.bsdOriginal ];
615 };
615 };
616 };
616 };
617 "formencode" = super.buildPythonPackage {
617 "formencode" = super.buildPythonPackage {
618 name = "formencode-1.2.4";
618 name = "formencode-1.2.4";
619 doCheck = false;
619 doCheck = false;
620 src = fetchurl {
620 src = fetchurl {
621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
623 };
623 };
624 meta = {
624 meta = {
625 license = [ pkgs.lib.licenses.psfl ];
625 license = [ pkgs.lib.licenses.psfl ];
626 };
626 };
627 };
627 };
628 "funcsigs" = super.buildPythonPackage {
628 "funcsigs" = super.buildPythonPackage {
629 name = "funcsigs-1.0.2";
629 name = "funcsigs-1.0.2";
630 doCheck = false;
630 doCheck = false;
631 src = fetchurl {
631 src = fetchurl {
632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
634 };
634 };
635 meta = {
635 meta = {
636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
637 };
637 };
638 };
638 };
639 "functools32" = super.buildPythonPackage {
639 "functools32" = super.buildPythonPackage {
640 name = "functools32-3.2.3.post2";
640 name = "functools32-3.2.3.post2";
641 doCheck = false;
641 doCheck = false;
642 src = fetchurl {
642 src = fetchurl {
643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
645 };
645 };
646 meta = {
646 meta = {
647 license = [ pkgs.lib.licenses.psfl ];
647 license = [ pkgs.lib.licenses.psfl ];
648 };
648 };
649 };
649 };
650 "future" = super.buildPythonPackage {
650 "future" = super.buildPythonPackage {
651 name = "future-0.14.3";
651 name = "future-0.14.3";
652 doCheck = false;
652 doCheck = false;
653 src = fetchurl {
653 src = fetchurl {
654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
656 };
656 };
657 meta = {
657 meta = {
658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
659 };
659 };
660 };
660 };
661 "futures" = super.buildPythonPackage {
661 "futures" = super.buildPythonPackage {
662 name = "futures-3.0.2";
662 name = "futures-3.0.2";
663 doCheck = false;
663 doCheck = false;
664 src = fetchurl {
664 src = fetchurl {
665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
667 };
667 };
668 meta = {
668 meta = {
669 license = [ pkgs.lib.licenses.bsdOriginal ];
669 license = [ pkgs.lib.licenses.bsdOriginal ];
670 };
670 };
671 };
671 };
672 "gevent" = super.buildPythonPackage {
672 "gevent" = super.buildPythonPackage {
673 name = "gevent-1.5.0";
673 name = "gevent-1.5.0";
674 doCheck = false;
674 doCheck = false;
675 propagatedBuildInputs = [
675 propagatedBuildInputs = [
676 self."greenlet"
676 self."greenlet"
677 ];
677 ];
678 src = fetchurl {
678 src = fetchurl {
679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
681 };
681 };
682 meta = {
682 meta = {
683 license = [ pkgs.lib.licenses.mit ];
683 license = [ pkgs.lib.licenses.mit ];
684 };
684 };
685 };
685 };
686 "gnureadline" = super.buildPythonPackage {
686 "gnureadline" = super.buildPythonPackage {
687 name = "gnureadline-6.3.8";
687 name = "gnureadline-6.3.8";
688 doCheck = false;
688 doCheck = false;
689 src = fetchurl {
689 src = fetchurl {
690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
692 };
692 };
693 meta = {
693 meta = {
694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
695 };
695 };
696 };
696 };
697 "gprof2dot" = super.buildPythonPackage {
697 "gprof2dot" = super.buildPythonPackage {
698 name = "gprof2dot-2017.9.19";
698 name = "gprof2dot-2017.9.19";
699 doCheck = false;
699 doCheck = false;
700 src = fetchurl {
700 src = fetchurl {
701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
703 };
703 };
704 meta = {
704 meta = {
705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
706 };
706 };
707 };
707 };
708 "greenlet" = super.buildPythonPackage {
708 "greenlet" = super.buildPythonPackage {
709 name = "greenlet-0.4.15";
709 name = "greenlet-0.4.15";
710 doCheck = false;
710 doCheck = false;
711 src = fetchurl {
711 src = fetchurl {
712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
714 };
714 };
715 meta = {
715 meta = {
716 license = [ pkgs.lib.licenses.mit ];
716 license = [ pkgs.lib.licenses.mit ];
717 };
717 };
718 };
718 };
719 "gunicorn" = super.buildPythonPackage {
719 "gunicorn" = super.buildPythonPackage {
720 name = "gunicorn-19.9.0";
720 name = "gunicorn-19.9.0";
721 doCheck = false;
721 doCheck = false;
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
728 };
728 };
729 };
729 };
730 "hupper" = super.buildPythonPackage {
730 "hupper" = super.buildPythonPackage {
731 name = "hupper-1.10.2";
731 name = "hupper-1.10.2";
732 doCheck = false;
732 doCheck = false;
733 src = fetchurl {
733 src = fetchurl {
734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
736 };
736 };
737 meta = {
737 meta = {
738 license = [ pkgs.lib.licenses.mit ];
738 license = [ pkgs.lib.licenses.mit ];
739 };
739 };
740 };
740 };
741 "idna" = super.buildPythonPackage {
741 "idna" = super.buildPythonPackage {
742 name = "idna-2.8";
742 name = "idna-2.8";
743 doCheck = false;
743 doCheck = false;
744 src = fetchurl {
744 src = fetchurl {
745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
747 };
747 };
748 meta = {
748 meta = {
749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
750 };
750 };
751 };
751 };
752 "importlib-metadata" = super.buildPythonPackage {
752 "importlib-metadata" = super.buildPythonPackage {
753 name = "importlib-metadata-1.6.0";
753 name = "importlib-metadata-1.6.0";
754 doCheck = false;
754 doCheck = false;
755 propagatedBuildInputs = [
755 propagatedBuildInputs = [
756 self."zipp"
756 self."zipp"
757 self."pathlib2"
757 self."pathlib2"
758 self."contextlib2"
758 self."contextlib2"
759 self."configparser"
759 self."configparser"
760 ];
760 ];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.asl20 ];
766 license = [ pkgs.lib.licenses.asl20 ];
767 };
767 };
768 };
768 };
769 "infrae.cache" = super.buildPythonPackage {
769 "infrae.cache" = super.buildPythonPackage {
770 name = "infrae.cache-1.0.1";
770 name = "infrae.cache-1.0.1";
771 doCheck = false;
771 doCheck = false;
772 propagatedBuildInputs = [
772 propagatedBuildInputs = [
773 self."beaker"
773 self."beaker"
774 self."repoze.lru"
774 self."repoze.lru"
775 ];
775 ];
776 src = fetchurl {
776 src = fetchurl {
777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
779 };
779 };
780 meta = {
780 meta = {
781 license = [ pkgs.lib.licenses.zpl21 ];
781 license = [ pkgs.lib.licenses.zpl21 ];
782 };
782 };
783 };
783 };
784 "invoke" = super.buildPythonPackage {
784 "invoke" = super.buildPythonPackage {
785 name = "invoke-0.13.0";
785 name = "invoke-0.13.0";
786 doCheck = false;
786 doCheck = false;
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ pkgs.lib.licenses.bsdOriginal ];
792 license = [ pkgs.lib.licenses.bsdOriginal ];
793 };
793 };
794 };
794 };
795 "ipaddress" = super.buildPythonPackage {
795 "ipaddress" = super.buildPythonPackage {
796 name = "ipaddress-1.0.23";
796 name = "ipaddress-1.0.23";
797 doCheck = false;
797 doCheck = false;
798 src = fetchurl {
798 src = fetchurl {
799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
801 };
801 };
802 meta = {
802 meta = {
803 license = [ pkgs.lib.licenses.psfl ];
803 license = [ pkgs.lib.licenses.psfl ];
804 };
804 };
805 };
805 };
806 "ipdb" = super.buildPythonPackage {
806 "ipdb" = super.buildPythonPackage {
807 name = "ipdb-0.13.2";
807 name = "ipdb-0.13.2";
808 doCheck = false;
808 doCheck = false;
809 propagatedBuildInputs = [
809 propagatedBuildInputs = [
810 self."setuptools"
810 self."setuptools"
811 self."ipython"
811 self."ipython"
812 ];
812 ];
813 src = fetchurl {
813 src = fetchurl {
814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
816 };
816 };
817 meta = {
817 meta = {
818 license = [ pkgs.lib.licenses.bsdOriginal ];
818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 };
819 };
820 };
820 };
821 "ipython" = super.buildPythonPackage {
821 "ipython" = super.buildPythonPackage {
822 name = "ipython-5.1.0";
822 name = "ipython-5.1.0";
823 doCheck = false;
823 doCheck = false;
824 propagatedBuildInputs = [
824 propagatedBuildInputs = [
825 self."setuptools"
825 self."setuptools"
826 self."decorator"
826 self."decorator"
827 self."pickleshare"
827 self."pickleshare"
828 self."simplegeneric"
828 self."simplegeneric"
829 self."traitlets"
829 self."traitlets"
830 self."prompt-toolkit"
830 self."prompt-toolkit"
831 self."pygments"
831 self."pygments"
832 self."pexpect"
832 self."pexpect"
833 self."backports.shutil-get-terminal-size"
833 self."backports.shutil-get-terminal-size"
834 self."pathlib2"
834 self."pathlib2"
835 self."pexpect"
835 self."pexpect"
836 ];
836 ];
837 src = fetchurl {
837 src = fetchurl {
838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
840 };
840 };
841 meta = {
841 meta = {
842 license = [ pkgs.lib.licenses.bsdOriginal ];
842 license = [ pkgs.lib.licenses.bsdOriginal ];
843 };
843 };
844 };
844 };
845 "ipython-genutils" = super.buildPythonPackage {
845 "ipython-genutils" = super.buildPythonPackage {
846 name = "ipython-genutils-0.2.0";
846 name = "ipython-genutils-0.2.0";
847 doCheck = false;
847 doCheck = false;
848 src = fetchurl {
848 src = fetchurl {
849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
851 };
851 };
852 meta = {
852 meta = {
853 license = [ pkgs.lib.licenses.bsdOriginal ];
853 license = [ pkgs.lib.licenses.bsdOriginal ];
854 };
854 };
855 };
855 };
856 "iso8601" = super.buildPythonPackage {
856 "iso8601" = super.buildPythonPackage {
857 name = "iso8601-0.1.12";
857 name = "iso8601-0.1.12";
858 doCheck = false;
858 doCheck = false;
859 src = fetchurl {
859 src = fetchurl {
860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
862 };
862 };
863 meta = {
863 meta = {
864 license = [ pkgs.lib.licenses.mit ];
864 license = [ pkgs.lib.licenses.mit ];
865 };
865 };
866 };
866 };
867 "isodate" = super.buildPythonPackage {
867 "isodate" = super.buildPythonPackage {
868 name = "isodate-0.6.0";
868 name = "isodate-0.6.0";
869 doCheck = false;
869 doCheck = false;
870 propagatedBuildInputs = [
870 propagatedBuildInputs = [
871 self."six"
871 self."six"
872 ];
872 ];
873 src = fetchurl {
873 src = fetchurl {
874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
876 };
876 };
877 meta = {
877 meta = {
878 license = [ pkgs.lib.licenses.bsdOriginal ];
878 license = [ pkgs.lib.licenses.bsdOriginal ];
879 };
879 };
880 };
880 };
881 "itsdangerous" = super.buildPythonPackage {
881 "itsdangerous" = super.buildPythonPackage {
882 name = "itsdangerous-1.1.0";
882 name = "itsdangerous-1.1.0";
883 doCheck = false;
883 doCheck = false;
884 src = fetchurl {
884 src = fetchurl {
885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
887 };
887 };
888 meta = {
888 meta = {
889 license = [ pkgs.lib.licenses.bsdOriginal ];
889 license = [ pkgs.lib.licenses.bsdOriginal ];
890 };
890 };
891 };
891 };
892 "jinja2" = super.buildPythonPackage {
892 "jinja2" = super.buildPythonPackage {
893 name = "jinja2-2.9.6";
893 name = "jinja2-2.9.6";
894 doCheck = false;
894 doCheck = false;
895 propagatedBuildInputs = [
895 propagatedBuildInputs = [
896 self."markupsafe"
896 self."markupsafe"
897 ];
897 ];
898 src = fetchurl {
898 src = fetchurl {
899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
901 };
901 };
902 meta = {
902 meta = {
903 license = [ pkgs.lib.licenses.bsdOriginal ];
903 license = [ pkgs.lib.licenses.bsdOriginal ];
904 };
904 };
905 };
905 };
906 "jsonschema" = super.buildPythonPackage {
906 "jsonschema" = super.buildPythonPackage {
907 name = "jsonschema-2.6.0";
907 name = "jsonschema-2.6.0";
908 doCheck = false;
908 doCheck = false;
909 propagatedBuildInputs = [
909 propagatedBuildInputs = [
910 self."functools32"
910 self."functools32"
911 ];
911 ];
912 src = fetchurl {
912 src = fetchurl {
913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
915 };
915 };
916 meta = {
916 meta = {
917 license = [ pkgs.lib.licenses.mit ];
917 license = [ pkgs.lib.licenses.mit ];
918 };
918 };
919 };
919 };
920 "jupyter-client" = super.buildPythonPackage {
920 "jupyter-client" = super.buildPythonPackage {
921 name = "jupyter-client-5.0.0";
921 name = "jupyter-client-5.0.0";
922 doCheck = false;
922 doCheck = false;
923 propagatedBuildInputs = [
923 propagatedBuildInputs = [
924 self."traitlets"
924 self."traitlets"
925 self."jupyter-core"
925 self."jupyter-core"
926 self."pyzmq"
926 self."pyzmq"
927 self."python-dateutil"
927 self."python-dateutil"
928 ];
928 ];
929 src = fetchurl {
929 src = fetchurl {
930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
932 };
932 };
933 meta = {
933 meta = {
934 license = [ pkgs.lib.licenses.bsdOriginal ];
934 license = [ pkgs.lib.licenses.bsdOriginal ];
935 };
935 };
936 };
936 };
937 "jupyter-core" = super.buildPythonPackage {
937 "jupyter-core" = super.buildPythonPackage {
938 name = "jupyter-core-4.5.0";
938 name = "jupyter-core-4.5.0";
939 doCheck = false;
939 doCheck = false;
940 propagatedBuildInputs = [
940 propagatedBuildInputs = [
941 self."traitlets"
941 self."traitlets"
942 ];
942 ];
943 src = fetchurl {
943 src = fetchurl {
944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
946 };
946 };
947 meta = {
947 meta = {
948 license = [ pkgs.lib.licenses.bsdOriginal ];
948 license = [ pkgs.lib.licenses.bsdOriginal ];
949 };
949 };
950 };
950 };
951 "kombu" = super.buildPythonPackage {
951 "kombu" = super.buildPythonPackage {
952 name = "kombu-4.6.6";
952 name = "kombu-4.6.6";
953 doCheck = false;
953 doCheck = false;
954 propagatedBuildInputs = [
954 propagatedBuildInputs = [
955 self."amqp"
955 self."amqp"
956 self."importlib-metadata"
956 self."importlib-metadata"
957 ];
957 ];
958 src = fetchurl {
958 src = fetchurl {
959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
961 };
961 };
962 meta = {
962 meta = {
963 license = [ pkgs.lib.licenses.bsdOriginal ];
963 license = [ pkgs.lib.licenses.bsdOriginal ];
964 };
964 };
965 };
965 };
966 "lxml" = super.buildPythonPackage {
966 "lxml" = super.buildPythonPackage {
967 name = "lxml-4.2.5";
967 name = "lxml-4.2.5";
968 doCheck = false;
968 doCheck = false;
969 src = fetchurl {
969 src = fetchurl {
970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
972 };
972 };
973 meta = {
973 meta = {
974 license = [ pkgs.lib.licenses.bsdOriginal ];
974 license = [ pkgs.lib.licenses.bsdOriginal ];
975 };
975 };
976 };
976 };
977 "mako" = super.buildPythonPackage {
977 "mako" = super.buildPythonPackage {
978 name = "mako-1.1.0";
978 name = "mako-1.1.0";
979 doCheck = false;
979 doCheck = false;
980 propagatedBuildInputs = [
980 propagatedBuildInputs = [
981 self."markupsafe"
981 self."markupsafe"
982 ];
982 ];
983 src = fetchurl {
983 src = fetchurl {
984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
986 };
986 };
987 meta = {
987 meta = {
988 license = [ pkgs.lib.licenses.mit ];
988 license = [ pkgs.lib.licenses.mit ];
989 };
989 };
990 };
990 };
991 "markdown" = super.buildPythonPackage {
991 "markdown" = super.buildPythonPackage {
992 name = "markdown-2.6.11";
992 name = "markdown-2.6.11";
993 doCheck = false;
993 doCheck = false;
994 src = fetchurl {
994 src = fetchurl {
995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
997 };
997 };
998 meta = {
998 meta = {
999 license = [ pkgs.lib.licenses.bsdOriginal ];
999 license = [ pkgs.lib.licenses.bsdOriginal ];
1000 };
1000 };
1001 };
1001 };
1002 "markupsafe" = super.buildPythonPackage {
1002 "markupsafe" = super.buildPythonPackage {
1003 name = "markupsafe-1.1.1";
1003 name = "markupsafe-1.1.1";
1004 doCheck = false;
1004 doCheck = false;
1005 src = fetchurl {
1005 src = fetchurl {
1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1008 };
1008 };
1009 meta = {
1009 meta = {
1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1011 };
1011 };
1012 };
1012 };
1013 "marshmallow" = super.buildPythonPackage {
1013 "marshmallow" = super.buildPythonPackage {
1014 name = "marshmallow-2.18.0";
1014 name = "marshmallow-2.18.0";
1015 doCheck = false;
1015 doCheck = false;
1016 src = fetchurl {
1016 src = fetchurl {
1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1019 };
1019 };
1020 meta = {
1020 meta = {
1021 license = [ pkgs.lib.licenses.mit ];
1021 license = [ pkgs.lib.licenses.mit ];
1022 };
1022 };
1023 };
1023 };
1024 "mistune" = super.buildPythonPackage {
1024 "mistune" = super.buildPythonPackage {
1025 name = "mistune-0.8.4";
1025 name = "mistune-0.8.4";
1026 doCheck = false;
1026 doCheck = false;
1027 src = fetchurl {
1027 src = fetchurl {
1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1030 };
1030 };
1031 meta = {
1031 meta = {
1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1033 };
1033 };
1034 };
1034 };
1035 "mock" = super.buildPythonPackage {
1035 "mock" = super.buildPythonPackage {
1036 name = "mock-3.0.5";
1036 name = "mock-3.0.5";
1037 doCheck = false;
1037 doCheck = false;
1038 propagatedBuildInputs = [
1038 propagatedBuildInputs = [
1039 self."six"
1039 self."six"
1040 self."funcsigs"
1040 self."funcsigs"
1041 ];
1041 ];
1042 src = fetchurl {
1042 src = fetchurl {
1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1045 };
1045 };
1046 meta = {
1046 meta = {
1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1048 };
1048 };
1049 };
1049 };
1050 "more-itertools" = super.buildPythonPackage {
1050 "more-itertools" = super.buildPythonPackage {
1051 name = "more-itertools-5.0.0";
1051 name = "more-itertools-5.0.0";
1052 doCheck = false;
1052 doCheck = false;
1053 propagatedBuildInputs = [
1053 propagatedBuildInputs = [
1054 self."six"
1054 self."six"
1055 ];
1055 ];
1056 src = fetchurl {
1056 src = fetchurl {
1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1059 };
1059 };
1060 meta = {
1060 meta = {
1061 license = [ pkgs.lib.licenses.mit ];
1061 license = [ pkgs.lib.licenses.mit ];
1062 };
1062 };
1063 };
1063 };
1064 "msgpack-python" = super.buildPythonPackage {
1064 "msgpack-python" = super.buildPythonPackage {
1065 name = "msgpack-python-0.5.6";
1065 name = "msgpack-python-0.5.6";
1066 doCheck = false;
1066 doCheck = false;
1067 src = fetchurl {
1067 src = fetchurl {
1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1070 };
1070 };
1071 meta = {
1071 meta = {
1072 license = [ pkgs.lib.licenses.asl20 ];
1072 license = [ pkgs.lib.licenses.asl20 ];
1073 };
1073 };
1074 };
1074 };
1075 "mysql-python" = super.buildPythonPackage {
1075 "mysql-python" = super.buildPythonPackage {
1076 name = "mysql-python-1.2.5";
1076 name = "mysql-python-1.2.5";
1077 doCheck = false;
1077 doCheck = false;
1078 src = fetchurl {
1078 src = fetchurl {
1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1081 };
1081 };
1082 meta = {
1082 meta = {
1083 license = [ pkgs.lib.licenses.gpl1 ];
1083 license = [ pkgs.lib.licenses.gpl1 ];
1084 };
1084 };
1085 };
1085 };
1086 "nbconvert" = super.buildPythonPackage {
1086 "nbconvert" = super.buildPythonPackage {
1087 name = "nbconvert-5.3.1";
1087 name = "nbconvert-5.3.1";
1088 doCheck = false;
1088 doCheck = false;
1089 propagatedBuildInputs = [
1089 propagatedBuildInputs = [
1090 self."mistune"
1090 self."mistune"
1091 self."jinja2"
1091 self."jinja2"
1092 self."pygments"
1092 self."pygments"
1093 self."traitlets"
1093 self."traitlets"
1094 self."jupyter-core"
1094 self."jupyter-core"
1095 self."nbformat"
1095 self."nbformat"
1096 self."entrypoints"
1096 self."entrypoints"
1097 self."bleach"
1097 self."bleach"
1098 self."pandocfilters"
1098 self."pandocfilters"
1099 self."testpath"
1099 self."testpath"
1100 ];
1100 ];
1101 src = fetchurl {
1101 src = fetchurl {
1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1104 };
1104 };
1105 meta = {
1105 meta = {
1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 };
1107 };
1108 };
1108 };
1109 "nbformat" = super.buildPythonPackage {
1109 "nbformat" = super.buildPythonPackage {
1110 name = "nbformat-4.4.0";
1110 name = "nbformat-4.4.0";
1111 doCheck = false;
1111 doCheck = false;
1112 propagatedBuildInputs = [
1112 propagatedBuildInputs = [
1113 self."ipython-genutils"
1113 self."ipython-genutils"
1114 self."traitlets"
1114 self."traitlets"
1115 self."jsonschema"
1115 self."jsonschema"
1116 self."jupyter-core"
1116 self."jupyter-core"
1117 ];
1117 ];
1118 src = fetchurl {
1118 src = fetchurl {
1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1121 };
1121 };
1122 meta = {
1122 meta = {
1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1124 };
1124 };
1125 };
1125 };
1126 "packaging" = super.buildPythonPackage {
1126 "packaging" = super.buildPythonPackage {
1127 name = "packaging-20.3";
1127 name = "packaging-20.3";
1128 doCheck = false;
1128 doCheck = false;
1129 propagatedBuildInputs = [
1129 propagatedBuildInputs = [
1130 self."pyparsing"
1130 self."pyparsing"
1131 self."six"
1131 self."six"
1132 ];
1132 ];
1133 src = fetchurl {
1133 src = fetchurl {
1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1136 };
1136 };
1137 meta = {
1137 meta = {
1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1139 };
1139 };
1140 };
1140 };
1141 "pandocfilters" = super.buildPythonPackage {
1141 "pandocfilters" = super.buildPythonPackage {
1142 name = "pandocfilters-1.4.2";
1142 name = "pandocfilters-1.4.2";
1143 doCheck = false;
1143 doCheck = false;
1144 src = fetchurl {
1144 src = fetchurl {
1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1147 };
1147 };
1148 meta = {
1148 meta = {
1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1150 };
1150 };
1151 };
1151 };
1152 "paste" = super.buildPythonPackage {
1152 "paste" = super.buildPythonPackage {
1153 name = "paste-3.4.0";
1153 name = "paste-3.4.0";
1154 doCheck = false;
1154 doCheck = false;
1155 propagatedBuildInputs = [
1155 propagatedBuildInputs = [
1156 self."six"
1156 self."six"
1157 ];
1157 ];
1158 src = fetchurl {
1158 src = fetchurl {
1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1161 };
1161 };
1162 meta = {
1162 meta = {
1163 license = [ pkgs.lib.licenses.mit ];
1163 license = [ pkgs.lib.licenses.mit ];
1164 };
1164 };
1165 };
1165 };
1166 "pastedeploy" = super.buildPythonPackage {
1166 "pastedeploy" = super.buildPythonPackage {
1167 name = "pastedeploy-2.1.0";
1167 name = "pastedeploy-2.1.0";
1168 doCheck = false;
1168 doCheck = false;
1169 src = fetchurl {
1169 src = fetchurl {
1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1172 };
1172 };
1173 meta = {
1173 meta = {
1174 license = [ pkgs.lib.licenses.mit ];
1174 license = [ pkgs.lib.licenses.mit ];
1175 };
1175 };
1176 };
1176 };
1177 "pastescript" = super.buildPythonPackage {
1177 "pastescript" = super.buildPythonPackage {
1178 name = "pastescript-3.2.0";
1178 name = "pastescript-3.2.0";
1179 doCheck = false;
1179 doCheck = false;
1180 propagatedBuildInputs = [
1180 propagatedBuildInputs = [
1181 self."paste"
1181 self."paste"
1182 self."pastedeploy"
1182 self."pastedeploy"
1183 self."six"
1183 self."six"
1184 ];
1184 ];
1185 src = fetchurl {
1185 src = fetchurl {
1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1188 };
1188 };
1189 meta = {
1189 meta = {
1190 license = [ pkgs.lib.licenses.mit ];
1190 license = [ pkgs.lib.licenses.mit ];
1191 };
1191 };
1192 };
1192 };
1193 "pathlib2" = super.buildPythonPackage {
1193 "pathlib2" = super.buildPythonPackage {
1194 name = "pathlib2-2.3.5";
1194 name = "pathlib2-2.3.5";
1195 doCheck = false;
1195 doCheck = false;
1196 propagatedBuildInputs = [
1196 propagatedBuildInputs = [
1197 self."six"
1197 self."six"
1198 self."scandir"
1198 self."scandir"
1199 ];
1199 ];
1200 src = fetchurl {
1200 src = fetchurl {
1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1203 };
1203 };
1204 meta = {
1204 meta = {
1205 license = [ pkgs.lib.licenses.mit ];
1205 license = [ pkgs.lib.licenses.mit ];
1206 };
1206 };
1207 };
1207 };
1208 "peppercorn" = super.buildPythonPackage {
1208 "peppercorn" = super.buildPythonPackage {
1209 name = "peppercorn-0.6";
1209 name = "peppercorn-0.6";
1210 doCheck = false;
1210 doCheck = false;
1211 src = fetchurl {
1211 src = fetchurl {
1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1214 };
1214 };
1215 meta = {
1215 meta = {
1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1217 };
1217 };
1218 };
1218 };
1219 "pexpect" = super.buildPythonPackage {
1219 "pexpect" = super.buildPythonPackage {
1220 name = "pexpect-4.8.0";
1220 name = "pexpect-4.8.0";
1221 doCheck = false;
1221 doCheck = false;
1222 propagatedBuildInputs = [
1222 propagatedBuildInputs = [
1223 self."ptyprocess"
1223 self."ptyprocess"
1224 ];
1224 ];
1225 src = fetchurl {
1225 src = fetchurl {
1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1228 };
1228 };
1229 meta = {
1229 meta = {
1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1231 };
1231 };
1232 };
1232 };
1233 "pickleshare" = super.buildPythonPackage {
1233 "pickleshare" = super.buildPythonPackage {
1234 name = "pickleshare-0.7.5";
1234 name = "pickleshare-0.7.5";
1235 doCheck = false;
1235 doCheck = false;
1236 propagatedBuildInputs = [
1236 propagatedBuildInputs = [
1237 self."pathlib2"
1237 self."pathlib2"
1238 ];
1238 ];
1239 src = fetchurl {
1239 src = fetchurl {
1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1242 };
1242 };
1243 meta = {
1243 meta = {
1244 license = [ pkgs.lib.licenses.mit ];
1244 license = [ pkgs.lib.licenses.mit ];
1245 };
1245 };
1246 };
1246 };
1247 "plaster" = super.buildPythonPackage {
1247 "plaster" = super.buildPythonPackage {
1248 name = "plaster-1.0";
1248 name = "plaster-1.0";
1249 doCheck = false;
1249 doCheck = false;
1250 propagatedBuildInputs = [
1250 propagatedBuildInputs = [
1251 self."setuptools"
1251 self."setuptools"
1252 ];
1252 ];
1253 src = fetchurl {
1253 src = fetchurl {
1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1256 };
1256 };
1257 meta = {
1257 meta = {
1258 license = [ pkgs.lib.licenses.mit ];
1258 license = [ pkgs.lib.licenses.mit ];
1259 };
1259 };
1260 };
1260 };
1261 "plaster-pastedeploy" = super.buildPythonPackage {
1261 "plaster-pastedeploy" = super.buildPythonPackage {
1262 name = "plaster-pastedeploy-0.7";
1262 name = "plaster-pastedeploy-0.7";
1263 doCheck = false;
1263 doCheck = false;
1264 propagatedBuildInputs = [
1264 propagatedBuildInputs = [
1265 self."pastedeploy"
1265 self."pastedeploy"
1266 self."plaster"
1266 self."plaster"
1267 ];
1267 ];
1268 src = fetchurl {
1268 src = fetchurl {
1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1271 };
1271 };
1272 meta = {
1272 meta = {
1273 license = [ pkgs.lib.licenses.mit ];
1273 license = [ pkgs.lib.licenses.mit ];
1274 };
1274 };
1275 };
1275 };
1276 "pluggy" = super.buildPythonPackage {
1276 "pluggy" = super.buildPythonPackage {
1277 name = "pluggy-0.13.1";
1277 name = "pluggy-0.13.1";
1278 doCheck = false;
1278 doCheck = false;
1279 propagatedBuildInputs = [
1279 propagatedBuildInputs = [
1280 self."importlib-metadata"
1280 self."importlib-metadata"
1281 ];
1281 ];
1282 src = fetchurl {
1282 src = fetchurl {
1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1285 };
1285 };
1286 meta = {
1286 meta = {
1287 license = [ pkgs.lib.licenses.mit ];
1287 license = [ pkgs.lib.licenses.mit ];
1288 };
1288 };
1289 };
1289 };
1290 "premailer" = super.buildPythonPackage {
1290 "premailer" = super.buildPythonPackage {
1291 name = "premailer-3.6.1";
1291 name = "premailer-3.6.1";
1292 doCheck = false;
1292 doCheck = false;
1293 propagatedBuildInputs = [
1293 propagatedBuildInputs = [
1294 self."lxml"
1294 self."lxml"
1295 self."cssselect"
1295 self."cssselect"
1296 self."cssutils"
1296 self."cssutils"
1297 self."requests"
1297 self."requests"
1298 self."cachetools"
1298 self."cachetools"
1299 ];
1299 ];
1300 src = fetchurl {
1300 src = fetchurl {
1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1303 };
1303 };
1304 meta = {
1304 meta = {
1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1306 };
1306 };
1307 };
1307 };
1308 "prompt-toolkit" = super.buildPythonPackage {
1308 "prompt-toolkit" = super.buildPythonPackage {
1309 name = "prompt-toolkit-1.0.18";
1309 name = "prompt-toolkit-1.0.18";
1310 doCheck = false;
1310 doCheck = false;
1311 propagatedBuildInputs = [
1311 propagatedBuildInputs = [
1312 self."six"
1312 self."six"
1313 self."wcwidth"
1313 self."wcwidth"
1314 ];
1314 ];
1315 src = fetchurl {
1315 src = fetchurl {
1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1318 };
1318 };
1319 meta = {
1319 meta = {
1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1321 };
1321 };
1322 };
1322 };
1323 "psutil" = super.buildPythonPackage {
1323 "psutil" = super.buildPythonPackage {
1324 name = "psutil-5.7.0";
1324 name = "psutil-5.7.0";
1325 doCheck = false;
1325 doCheck = false;
1326 src = fetchurl {
1326 src = fetchurl {
1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1329 };
1329 };
1330 meta = {
1330 meta = {
1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1332 };
1332 };
1333 };
1333 };
1334 "psycopg2" = super.buildPythonPackage {
1334 "psycopg2" = super.buildPythonPackage {
1335 name = "psycopg2-2.8.4";
1335 name = "psycopg2-2.8.4";
1336 doCheck = false;
1336 doCheck = false;
1337 src = fetchurl {
1337 src = fetchurl {
1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1340 };
1340 };
1341 meta = {
1341 meta = {
1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1343 };
1343 };
1344 };
1344 };
1345 "ptyprocess" = super.buildPythonPackage {
1345 "ptyprocess" = super.buildPythonPackage {
1346 name = "ptyprocess-0.6.0";
1346 name = "ptyprocess-0.6.0";
1347 doCheck = false;
1347 doCheck = false;
1348 src = fetchurl {
1348 src = fetchurl {
1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1351 };
1351 };
1352 meta = {
1352 meta = {
1353 license = [ ];
1353 license = [ ];
1354 };
1354 };
1355 };
1355 };
1356 "py" = super.buildPythonPackage {
1356 "py" = super.buildPythonPackage {
1357 name = "py-1.8.0";
1357 name = "py-1.8.0";
1358 doCheck = false;
1358 doCheck = false;
1359 src = fetchurl {
1359 src = fetchurl {
1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1362 };
1362 };
1363 meta = {
1363 meta = {
1364 license = [ pkgs.lib.licenses.mit ];
1364 license = [ pkgs.lib.licenses.mit ];
1365 };
1365 };
1366 };
1366 };
1367 "py-bcrypt" = super.buildPythonPackage {
1367 "py-bcrypt" = super.buildPythonPackage {
1368 name = "py-bcrypt-0.4";
1368 name = "py-bcrypt-0.4";
1369 doCheck = false;
1369 doCheck = false;
1370 src = fetchurl {
1370 src = fetchurl {
1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1373 };
1373 };
1374 meta = {
1374 meta = {
1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1376 };
1376 };
1377 };
1377 };
1378 "py-gfm" = super.buildPythonPackage {
1378 "py-gfm" = super.buildPythonPackage {
1379 name = "py-gfm-0.1.4";
1379 name = "py-gfm-0.1.4";
1380 doCheck = false;
1380 doCheck = false;
1381 propagatedBuildInputs = [
1381 propagatedBuildInputs = [
1382 self."setuptools"
1382 self."setuptools"
1383 self."markdown"
1383 self."markdown"
1384 ];
1384 ];
1385 src = fetchurl {
1385 src = fetchurl {
1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1388 };
1388 };
1389 meta = {
1389 meta = {
1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1391 };
1391 };
1392 };
1392 };
1393 "pyasn1" = super.buildPythonPackage {
1393 "pyasn1" = super.buildPythonPackage {
1394 name = "pyasn1-0.4.8";
1394 name = "pyasn1-0.4.8";
1395 doCheck = false;
1395 doCheck = false;
1396 src = fetchurl {
1396 src = fetchurl {
1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1399 };
1399 };
1400 meta = {
1400 meta = {
1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1402 };
1402 };
1403 };
1403 };
1404 "pyasn1-modules" = super.buildPythonPackage {
1404 "pyasn1-modules" = super.buildPythonPackage {
1405 name = "pyasn1-modules-0.2.6";
1405 name = "pyasn1-modules-0.2.6";
1406 doCheck = false;
1406 doCheck = false;
1407 propagatedBuildInputs = [
1407 propagatedBuildInputs = [
1408 self."pyasn1"
1408 self."pyasn1"
1409 ];
1409 ];
1410 src = fetchurl {
1410 src = fetchurl {
1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1413 };
1413 };
1414 meta = {
1414 meta = {
1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1416 };
1416 };
1417 };
1417 };
1418 "pycparser" = super.buildPythonPackage {
1418 "pycparser" = super.buildPythonPackage {
1419 name = "pycparser-2.20";
1419 name = "pycparser-2.20";
1420 doCheck = false;
1420 doCheck = false;
1421 src = fetchurl {
1421 src = fetchurl {
1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1424 };
1424 };
1425 meta = {
1425 meta = {
1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1427 };
1427 };
1428 };
1428 };
1429 "pycrypto" = super.buildPythonPackage {
1429 "pycrypto" = super.buildPythonPackage {
1430 name = "pycrypto-2.6.1";
1430 name = "pycrypto-2.6.1";
1431 doCheck = false;
1431 doCheck = false;
1432 src = fetchurl {
1432 src = fetchurl {
1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1435 };
1435 };
1436 meta = {
1436 meta = {
1437 license = [ pkgs.lib.licenses.publicDomain ];
1437 license = [ pkgs.lib.licenses.publicDomain ];
1438 };
1438 };
1439 };
1439 };
1440 "pycurl" = super.buildPythonPackage {
1440 "pycurl" = super.buildPythonPackage {
1441 name = "pycurl-7.43.0.3";
1441 name = "pycurl-7.43.0.3";
1442 doCheck = false;
1442 doCheck = false;
1443 src = fetchurl {
1443 src = fetchurl {
1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1446 };
1446 };
1447 meta = {
1447 meta = {
1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1449 };
1449 };
1450 };
1450 };
1451 "pygments" = super.buildPythonPackage {
1451 "pygments" = super.buildPythonPackage {
1452 name = "pygments-2.4.2";
1452 name = "pygments-2.4.2";
1453 doCheck = false;
1453 doCheck = false;
1454 src = fetchurl {
1454 src = fetchurl {
1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1457 };
1457 };
1458 meta = {
1458 meta = {
1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1460 };
1460 };
1461 };
1461 };
1462 "pymysql" = super.buildPythonPackage {
1462 "pymysql" = super.buildPythonPackage {
1463 name = "pymysql-0.8.1";
1463 name = "pymysql-0.8.1";
1464 doCheck = false;
1464 doCheck = false;
1465 src = fetchurl {
1465 src = fetchurl {
1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1468 };
1468 };
1469 meta = {
1469 meta = {
1470 license = [ pkgs.lib.licenses.mit ];
1470 license = [ pkgs.lib.licenses.mit ];
1471 };
1471 };
1472 };
1472 };
1473 "pyotp" = super.buildPythonPackage {
1473 "pyotp" = super.buildPythonPackage {
1474 name = "pyotp-2.3.0";
1474 name = "pyotp-2.3.0";
1475 doCheck = false;
1475 doCheck = false;
1476 src = fetchurl {
1476 src = fetchurl {
1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1479 };
1479 };
1480 meta = {
1480 meta = {
1481 license = [ pkgs.lib.licenses.mit ];
1481 license = [ pkgs.lib.licenses.mit ];
1482 };
1482 };
1483 };
1483 };
1484 "pyparsing" = super.buildPythonPackage {
1484 "pyparsing" = super.buildPythonPackage {
1485 name = "pyparsing-2.4.7";
1485 name = "pyparsing-2.4.7";
1486 doCheck = false;
1486 doCheck = false;
1487 src = fetchurl {
1487 src = fetchurl {
1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1490 };
1490 };
1491 meta = {
1491 meta = {
1492 license = [ pkgs.lib.licenses.mit ];
1492 license = [ pkgs.lib.licenses.mit ];
1493 };
1493 };
1494 };
1494 };
1495 "pyramid" = super.buildPythonPackage {
1495 "pyramid" = super.buildPythonPackage {
1496 name = "pyramid-1.10.4";
1496 name = "pyramid-1.10.4";
1497 doCheck = false;
1497 doCheck = false;
1498 propagatedBuildInputs = [
1498 propagatedBuildInputs = [
1499 self."hupper"
1499 self."hupper"
1500 self."plaster"
1500 self."plaster"
1501 self."plaster-pastedeploy"
1501 self."plaster-pastedeploy"
1502 self."setuptools"
1502 self."setuptools"
1503 self."translationstring"
1503 self."translationstring"
1504 self."venusian"
1504 self."venusian"
1505 self."webob"
1505 self."webob"
1506 self."zope.deprecation"
1506 self."zope.deprecation"
1507 self."zope.interface"
1507 self."zope.interface"
1508 self."repoze.lru"
1508 self."repoze.lru"
1509 ];
1509 ];
1510 src = fetchurl {
1510 src = fetchurl {
1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1513 };
1513 };
1514 meta = {
1514 meta = {
1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1516 };
1516 };
1517 };
1517 };
1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1519 name = "pyramid-debugtoolbar-4.6.1";
1519 name = "pyramid-debugtoolbar-4.6.1";
1520 doCheck = false;
1520 doCheck = false;
1521 propagatedBuildInputs = [
1521 propagatedBuildInputs = [
1522 self."pyramid"
1522 self."pyramid"
1523 self."pyramid-mako"
1523 self."pyramid-mako"
1524 self."repoze.lru"
1524 self."repoze.lru"
1525 self."pygments"
1525 self."pygments"
1526 self."ipaddress"
1526 self."ipaddress"
1527 ];
1527 ];
1528 src = fetchurl {
1528 src = fetchurl {
1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1531 };
1531 };
1532 meta = {
1532 meta = {
1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1534 };
1534 };
1535 };
1535 };
1536 "pyramid-jinja2" = super.buildPythonPackage {
1536 "pyramid-jinja2" = super.buildPythonPackage {
1537 name = "pyramid-jinja2-2.7";
1537 name = "pyramid-jinja2-2.7";
1538 doCheck = false;
1538 doCheck = false;
1539 propagatedBuildInputs = [
1539 propagatedBuildInputs = [
1540 self."pyramid"
1540 self."pyramid"
1541 self."zope.deprecation"
1541 self."zope.deprecation"
1542 self."jinja2"
1542 self."jinja2"
1543 self."markupsafe"
1543 self."markupsafe"
1544 ];
1544 ];
1545 src = fetchurl {
1545 src = fetchurl {
1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1548 };
1548 };
1549 meta = {
1549 meta = {
1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1551 };
1551 };
1552 };
1552 };
1553 "pyramid-apispec" = super.buildPythonPackage {
1553 "pyramid-apispec" = super.buildPythonPackage {
1554 name = "pyramid-apispec-0.3.2";
1554 name = "pyramid-apispec-0.3.2";
1555 doCheck = false;
1555 doCheck = false;
1556 propagatedBuildInputs = [
1556 propagatedBuildInputs = [
1557 self."apispec"
1557 self."apispec"
1558 ];
1558 ];
1559 src = fetchurl {
1559 src = fetchurl {
1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1562 };
1562 };
1563 meta = {
1563 meta = {
1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1565 };
1565 };
1566 };
1566 };
1567 "pyramid-mailer" = super.buildPythonPackage {
1567 "pyramid-mailer" = super.buildPythonPackage {
1568 name = "pyramid-mailer-0.15.1";
1568 name = "pyramid-mailer-0.15.1";
1569 doCheck = false;
1569 doCheck = false;
1570 propagatedBuildInputs = [
1570 propagatedBuildInputs = [
1571 self."pyramid"
1571 self."pyramid"
1572 self."repoze.sendmail"
1572 self."repoze.sendmail"
1573 self."transaction"
1573 self."transaction"
1574 ];
1574 ];
1575 src = fetchurl {
1575 src = fetchurl {
1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1578 };
1578 };
1579 meta = {
1579 meta = {
1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1581 };
1581 };
1582 };
1582 };
1583 "pyramid-mako" = super.buildPythonPackage {
1583 "pyramid-mako" = super.buildPythonPackage {
1584 name = "pyramid-mako-1.1.0";
1584 name = "pyramid-mako-1.1.0";
1585 doCheck = false;
1585 doCheck = false;
1586 propagatedBuildInputs = [
1586 propagatedBuildInputs = [
1587 self."pyramid"
1587 self."pyramid"
1588 self."mako"
1588 self."mako"
1589 ];
1589 ];
1590 src = fetchurl {
1590 src = fetchurl {
1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1593 };
1593 };
1594 meta = {
1594 meta = {
1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1596 };
1596 };
1597 };
1597 };
1598 "pysqlite" = super.buildPythonPackage {
1598 "pysqlite" = super.buildPythonPackage {
1599 name = "pysqlite-2.8.3";
1599 name = "pysqlite-2.8.3";
1600 doCheck = false;
1600 doCheck = false;
1601 src = fetchurl {
1601 src = fetchurl {
1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1604 };
1604 };
1605 meta = {
1605 meta = {
1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1607 };
1607 };
1608 };
1608 };
1609 "pytest" = super.buildPythonPackage {
1609 "pytest" = super.buildPythonPackage {
1610 name = "pytest-4.6.5";
1610 name = "pytest-4.6.5";
1611 doCheck = false;
1611 doCheck = false;
1612 propagatedBuildInputs = [
1612 propagatedBuildInputs = [
1613 self."py"
1613 self."py"
1614 self."six"
1614 self."six"
1615 self."packaging"
1615 self."packaging"
1616 self."attrs"
1616 self."attrs"
1617 self."atomicwrites"
1617 self."atomicwrites"
1618 self."pluggy"
1618 self."pluggy"
1619 self."importlib-metadata"
1619 self."importlib-metadata"
1620 self."wcwidth"
1620 self."wcwidth"
1621 self."funcsigs"
1621 self."funcsigs"
1622 self."pathlib2"
1622 self."pathlib2"
1623 self."more-itertools"
1623 self."more-itertools"
1624 ];
1624 ];
1625 src = fetchurl {
1625 src = fetchurl {
1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1628 };
1628 };
1629 meta = {
1629 meta = {
1630 license = [ pkgs.lib.licenses.mit ];
1630 license = [ pkgs.lib.licenses.mit ];
1631 };
1631 };
1632 };
1632 };
1633 "pytest-cov" = super.buildPythonPackage {
1633 "pytest-cov" = super.buildPythonPackage {
1634 name = "pytest-cov-2.7.1";
1634 name = "pytest-cov-2.7.1";
1635 doCheck = false;
1635 doCheck = false;
1636 propagatedBuildInputs = [
1636 propagatedBuildInputs = [
1637 self."pytest"
1637 self."pytest"
1638 self."coverage"
1638 self."coverage"
1639 ];
1639 ];
1640 src = fetchurl {
1640 src = fetchurl {
1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1643 };
1643 };
1644 meta = {
1644 meta = {
1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1646 };
1646 };
1647 };
1647 };
1648 "pytest-profiling" = super.buildPythonPackage {
1648 "pytest-profiling" = super.buildPythonPackage {
1649 name = "pytest-profiling-1.7.0";
1649 name = "pytest-profiling-1.7.0";
1650 doCheck = false;
1650 doCheck = false;
1651 propagatedBuildInputs = [
1651 propagatedBuildInputs = [
1652 self."six"
1652 self."six"
1653 self."pytest"
1653 self."pytest"
1654 self."gprof2dot"
1654 self."gprof2dot"
1655 ];
1655 ];
1656 src = fetchurl {
1656 src = fetchurl {
1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1659 };
1659 };
1660 meta = {
1660 meta = {
1661 license = [ pkgs.lib.licenses.mit ];
1661 license = [ pkgs.lib.licenses.mit ];
1662 };
1662 };
1663 };
1663 };
1664 "pytest-runner" = super.buildPythonPackage {
1664 "pytest-runner" = super.buildPythonPackage {
1665 name = "pytest-runner-5.1";
1665 name = "pytest-runner-5.1";
1666 doCheck = false;
1666 doCheck = false;
1667 src = fetchurl {
1667 src = fetchurl {
1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1670 };
1670 };
1671 meta = {
1671 meta = {
1672 license = [ pkgs.lib.licenses.mit ];
1672 license = [ pkgs.lib.licenses.mit ];
1673 };
1673 };
1674 };
1674 };
1675 "pytest-sugar" = super.buildPythonPackage {
1675 "pytest-sugar" = super.buildPythonPackage {
1676 name = "pytest-sugar-0.9.2";
1676 name = "pytest-sugar-0.9.2";
1677 doCheck = false;
1677 doCheck = false;
1678 propagatedBuildInputs = [
1678 propagatedBuildInputs = [
1679 self."pytest"
1679 self."pytest"
1680 self."termcolor"
1680 self."termcolor"
1681 self."packaging"
1681 self."packaging"
1682 ];
1682 ];
1683 src = fetchurl {
1683 src = fetchurl {
1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1686 };
1686 };
1687 meta = {
1687 meta = {
1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1689 };
1689 };
1690 };
1690 };
1691 "pytest-timeout" = super.buildPythonPackage {
1691 "pytest-timeout" = super.buildPythonPackage {
1692 name = "pytest-timeout-1.3.3";
1692 name = "pytest-timeout-1.3.3";
1693 doCheck = false;
1693 doCheck = false;
1694 propagatedBuildInputs = [
1694 propagatedBuildInputs = [
1695 self."pytest"
1695 self."pytest"
1696 ];
1696 ];
1697 src = fetchurl {
1697 src = fetchurl {
1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1700 };
1700 };
1701 meta = {
1701 meta = {
1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1703 };
1703 };
1704 };
1704 };
1705 "python-dateutil" = super.buildPythonPackage {
1705 "python-dateutil" = super.buildPythonPackage {
1706 name = "python-dateutil-2.8.1";
1706 name = "python-dateutil-2.8.1";
1707 doCheck = false;
1707 doCheck = false;
1708 propagatedBuildInputs = [
1708 propagatedBuildInputs = [
1709 self."six"
1709 self."six"
1710 ];
1710 ];
1711 src = fetchurl {
1711 src = fetchurl {
1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1714 };
1714 };
1715 meta = {
1715 meta = {
1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1717 };
1717 };
1718 };
1718 };
1719 "python-editor" = super.buildPythonPackage {
1719 "python-editor" = super.buildPythonPackage {
1720 name = "python-editor-1.0.4";
1720 name = "python-editor-1.0.4";
1721 doCheck = false;
1721 doCheck = false;
1722 src = fetchurl {
1722 src = fetchurl {
1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1725 };
1725 };
1726 meta = {
1726 meta = {
1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1728 };
1728 };
1729 };
1729 };
1730 "python-ldap" = super.buildPythonPackage {
1730 "python-ldap" = super.buildPythonPackage {
1731 name = "python-ldap-3.2.0";
1731 name = "python-ldap-3.2.0";
1732 doCheck = false;
1732 doCheck = false;
1733 propagatedBuildInputs = [
1733 propagatedBuildInputs = [
1734 self."pyasn1"
1734 self."pyasn1"
1735 self."pyasn1-modules"
1735 self."pyasn1-modules"
1736 ];
1736 ];
1737 src = fetchurl {
1737 src = fetchurl {
1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1740 };
1740 };
1741 meta = {
1741 meta = {
1742 license = [ pkgs.lib.licenses.psfl ];
1742 license = [ pkgs.lib.licenses.psfl ];
1743 };
1743 };
1744 };
1744 };
1745 "python-memcached" = super.buildPythonPackage {
1745 "python-memcached" = super.buildPythonPackage {
1746 name = "python-memcached-1.59";
1746 name = "python-memcached-1.59";
1747 doCheck = false;
1747 doCheck = false;
1748 propagatedBuildInputs = [
1748 propagatedBuildInputs = [
1749 self."six"
1749 self."six"
1750 ];
1750 ];
1751 src = fetchurl {
1751 src = fetchurl {
1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1754 };
1754 };
1755 meta = {
1755 meta = {
1756 license = [ pkgs.lib.licenses.psfl ];
1756 license = [ pkgs.lib.licenses.psfl ];
1757 };
1757 };
1758 };
1758 };
1759 "python-pam" = super.buildPythonPackage {
1759 "python-pam" = super.buildPythonPackage {
1760 name = "python-pam-1.8.4";
1760 name = "python-pam-1.8.4";
1761 doCheck = false;
1761 doCheck = false;
1762 src = fetchurl {
1762 src = fetchurl {
1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1765 };
1765 };
1766 meta = {
1766 meta = {
1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1768 };
1768 };
1769 };
1769 };
1770 "python-saml" = super.buildPythonPackage {
1770 "python-saml" = super.buildPythonPackage {
1771 name = "python-saml-2.4.2";
1771 name = "python-saml-2.4.2";
1772 doCheck = false;
1772 doCheck = false;
1773 propagatedBuildInputs = [
1773 propagatedBuildInputs = [
1774 self."dm.xmlsec.binding"
1774 self."dm.xmlsec.binding"
1775 self."isodate"
1775 self."isodate"
1776 self."defusedxml"
1776 self."defusedxml"
1777 ];
1777 ];
1778 src = fetchurl {
1778 src = fetchurl {
1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1781 };
1781 };
1782 meta = {
1782 meta = {
1783 license = [ pkgs.lib.licenses.mit ];
1783 license = [ pkgs.lib.licenses.mit ];
1784 };
1784 };
1785 };
1785 };
1786 "pytz" = super.buildPythonPackage {
1786 "pytz" = super.buildPythonPackage {
1787 name = "pytz-2019.3";
1787 name = "pytz-2019.3";
1788 doCheck = false;
1788 doCheck = false;
1789 src = fetchurl {
1789 src = fetchurl {
1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1792 };
1792 };
1793 meta = {
1793 meta = {
1794 license = [ pkgs.lib.licenses.mit ];
1794 license = [ pkgs.lib.licenses.mit ];
1795 };
1795 };
1796 };
1796 };
1797 "pyzmq" = super.buildPythonPackage {
1797 "pyzmq" = super.buildPythonPackage {
1798 name = "pyzmq-14.6.0";
1798 name = "pyzmq-14.6.0";
1799 doCheck = false;
1799 doCheck = false;
1800 src = fetchurl {
1800 src = fetchurl {
1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1803 };
1803 };
1804 meta = {
1804 meta = {
1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1806 };
1806 };
1807 };
1807 };
1808 "PyYAML" = super.buildPythonPackage {
1808 "PyYAML" = super.buildPythonPackage {
1809 name = "PyYAML-5.3.1";
1809 name = "PyYAML-5.3.1";
1810 doCheck = false;
1810 doCheck = false;
1811 src = fetchurl {
1811 src = fetchurl {
1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1814 };
1814 };
1815 meta = {
1815 meta = {
1816 license = [ pkgs.lib.licenses.mit ];
1816 license = [ pkgs.lib.licenses.mit ];
1817 };
1817 };
1818 };
1818 };
1819 "regex" = super.buildPythonPackage {
1819 "regex" = super.buildPythonPackage {
1820 name = "regex-2020.9.27";
1820 name = "regex-2020.9.27";
1821 doCheck = false;
1821 doCheck = false;
1822 src = fetchurl {
1822 src = fetchurl {
1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1825 };
1825 };
1826 meta = {
1826 meta = {
1827 license = [ pkgs.lib.licenses.psfl ];
1827 license = [ pkgs.lib.licenses.psfl ];
1828 };
1828 };
1829 };
1829 };
1830 "redis" = super.buildPythonPackage {
1830 "redis" = super.buildPythonPackage {
1831 name = "redis-3.4.1";
1831 name = "redis-3.5.3";
1832 doCheck = false;
1832 doCheck = false;
1833 src = fetchurl {
1833 src = fetchurl {
1834 url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz";
1834 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
1835 sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd";
1835 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
1836 };
1836 };
1837 meta = {
1837 meta = {
1838 license = [ pkgs.lib.licenses.mit ];
1838 license = [ pkgs.lib.licenses.mit ];
1839 };
1839 };
1840 };
1840 };
1841 "repoze.lru" = super.buildPythonPackage {
1841 "repoze.lru" = super.buildPythonPackage {
1842 name = "repoze.lru-0.7";
1842 name = "repoze.lru-0.7";
1843 doCheck = false;
1843 doCheck = false;
1844 src = fetchurl {
1844 src = fetchurl {
1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1847 };
1847 };
1848 meta = {
1848 meta = {
1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1850 };
1850 };
1851 };
1851 };
1852 "repoze.sendmail" = super.buildPythonPackage {
1852 "repoze.sendmail" = super.buildPythonPackage {
1853 name = "repoze.sendmail-4.4.1";
1853 name = "repoze.sendmail-4.4.1";
1854 doCheck = false;
1854 doCheck = false;
1855 propagatedBuildInputs = [
1855 propagatedBuildInputs = [
1856 self."setuptools"
1856 self."setuptools"
1857 self."zope.interface"
1857 self."zope.interface"
1858 self."transaction"
1858 self."transaction"
1859 ];
1859 ];
1860 src = fetchurl {
1860 src = fetchurl {
1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1863 };
1863 };
1864 meta = {
1864 meta = {
1865 license = [ pkgs.lib.licenses.zpl21 ];
1865 license = [ pkgs.lib.licenses.zpl21 ];
1866 };
1866 };
1867 };
1867 };
1868 "requests" = super.buildPythonPackage {
1868 "requests" = super.buildPythonPackage {
1869 name = "requests-2.22.0";
1869 name = "requests-2.22.0";
1870 doCheck = false;
1870 doCheck = false;
1871 propagatedBuildInputs = [
1871 propagatedBuildInputs = [
1872 self."chardet"
1872 self."chardet"
1873 self."idna"
1873 self."idna"
1874 self."urllib3"
1874 self."urllib3"
1875 self."certifi"
1875 self."certifi"
1876 ];
1876 ];
1877 src = fetchurl {
1877 src = fetchurl {
1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1880 };
1880 };
1881 meta = {
1881 meta = {
1882 license = [ pkgs.lib.licenses.asl20 ];
1882 license = [ pkgs.lib.licenses.asl20 ];
1883 };
1883 };
1884 };
1884 };
1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1886 name = "rhodecode-enterprise-ce-4.23.2";
1886 name = "rhodecode-enterprise-ce-4.24.0";
1887 buildInputs = [
1887 buildInputs = [
1888 self."pytest"
1888 self."pytest"
1889 self."py"
1889 self."py"
1890 self."pytest-cov"
1890 self."pytest-cov"
1891 self."pytest-sugar"
1891 self."pytest-sugar"
1892 self."pytest-runner"
1892 self."pytest-runner"
1893 self."pytest-profiling"
1893 self."pytest-profiling"
1894 self."pytest-timeout"
1894 self."pytest-timeout"
1895 self."gprof2dot"
1895 self."gprof2dot"
1896 self."mock"
1896 self."mock"
1897 self."cov-core"
1897 self."cov-core"
1898 self."coverage"
1898 self."coverage"
1899 self."webtest"
1899 self."webtest"
1900 self."beautifulsoup4"
1900 self."beautifulsoup4"
1901 self."configobj"
1901 self."configobj"
1902 ];
1902 ];
1903 doCheck = true;
1903 doCheck = true;
1904 propagatedBuildInputs = [
1904 propagatedBuildInputs = [
1905 self."amqp"
1905 self."amqp"
1906 self."babel"
1906 self."babel"
1907 self."beaker"
1907 self."beaker"
1908 self."bleach"
1908 self."bleach"
1909 self."celery"
1909 self."celery"
1910 self."channelstream"
1910 self."channelstream"
1911 self."click"
1911 self."click"
1912 self."colander"
1912 self."colander"
1913 self."configobj"
1913 self."configobj"
1914 self."cssselect"
1914 self."cssselect"
1915 self."cryptography"
1915 self."cryptography"
1916 self."decorator"
1916 self."decorator"
1917 self."deform"
1917 self."deform"
1918 self."docutils"
1918 self."docutils"
1919 self."dogpile.cache"
1919 self."dogpile.cache"
1920 self."dogpile.core"
1920 self."dogpile.core"
1921 self."formencode"
1921 self."formencode"
1922 self."future"
1922 self."future"
1923 self."futures"
1923 self."futures"
1924 self."infrae.cache"
1924 self."infrae.cache"
1925 self."iso8601"
1925 self."iso8601"
1926 self."itsdangerous"
1926 self."itsdangerous"
1927 self."kombu"
1927 self."kombu"
1928 self."lxml"
1928 self."lxml"
1929 self."mako"
1929 self."mako"
1930 self."markdown"
1930 self."markdown"
1931 self."markupsafe"
1931 self."markupsafe"
1932 self."msgpack-python"
1932 self."msgpack-python"
1933 self."pyotp"
1933 self."pyotp"
1934 self."packaging"
1934 self."packaging"
1935 self."pathlib2"
1935 self."pathlib2"
1936 self."paste"
1936 self."paste"
1937 self."pastedeploy"
1937 self."pastedeploy"
1938 self."pastescript"
1938 self."pastescript"
1939 self."peppercorn"
1939 self."peppercorn"
1940 self."premailer"
1940 self."premailer"
1941 self."psutil"
1941 self."psutil"
1942 self."py-bcrypt"
1942 self."py-bcrypt"
1943 self."pycurl"
1943 self."pycurl"
1944 self."pycrypto"
1944 self."pycrypto"
1945 self."pygments"
1945 self."pygments"
1946 self."pyparsing"
1946 self."pyparsing"
1947 self."pyramid-debugtoolbar"
1947 self."pyramid-debugtoolbar"
1948 self."pyramid-mako"
1948 self."pyramid-mako"
1949 self."pyramid"
1949 self."pyramid"
1950 self."pyramid-mailer"
1950 self."pyramid-mailer"
1951 self."python-dateutil"
1951 self."python-dateutil"
1952 self."python-ldap"
1952 self."python-ldap"
1953 self."python-memcached"
1953 self."python-memcached"
1954 self."python-pam"
1954 self."python-pam"
1955 self."python-saml"
1955 self."python-saml"
1956 self."pytz"
1956 self."pytz"
1957 self."tzlocal"
1957 self."tzlocal"
1958 self."pyzmq"
1958 self."pyzmq"
1959 self."py-gfm"
1959 self."py-gfm"
1960 self."regex"
1960 self."regex"
1961 self."redis"
1961 self."redis"
1962 self."repoze.lru"
1962 self."repoze.lru"
1963 self."requests"
1963 self."requests"
1964 self."routes"
1964 self."routes"
1965 self."simplejson"
1965 self."simplejson"
1966 self."six"
1966 self."six"
1967 self."sqlalchemy"
1967 self."sqlalchemy"
1968 self."sshpubkeys"
1968 self."sshpubkeys"
1969 self."subprocess32"
1969 self."subprocess32"
1970 self."supervisor"
1970 self."supervisor"
1971 self."translationstring"
1971 self."translationstring"
1972 self."urllib3"
1972 self."urllib3"
1973 self."urlobject"
1973 self."urlobject"
1974 self."venusian"
1974 self."venusian"
1975 self."weberror"
1975 self."weberror"
1976 self."webhelpers2"
1976 self."webhelpers2"
1977 self."webob"
1977 self."webob"
1978 self."whoosh"
1978 self."whoosh"
1979 self."wsgiref"
1979 self."wsgiref"
1980 self."zope.cachedescriptors"
1980 self."zope.cachedescriptors"
1981 self."zope.deprecation"
1981 self."zope.deprecation"
1982 self."zope.event"
1982 self."zope.event"
1983 self."zope.interface"
1983 self."zope.interface"
1984 self."mysql-python"
1984 self."mysql-python"
1985 self."pymysql"
1985 self."pymysql"
1986 self."pysqlite"
1986 self."pysqlite"
1987 self."psycopg2"
1987 self."psycopg2"
1988 self."nbconvert"
1988 self."nbconvert"
1989 self."nbformat"
1989 self."nbformat"
1990 self."jupyter-client"
1990 self."jupyter-client"
1991 self."jupyter-core"
1991 self."jupyter-core"
1992 self."alembic"
1992 self."alembic"
1993 self."invoke"
1993 self."invoke"
1994 self."bumpversion"
1994 self."bumpversion"
1995 self."gevent"
1995 self."gevent"
1996 self."greenlet"
1996 self."greenlet"
1997 self."gunicorn"
1997 self."gunicorn"
1998 self."waitress"
1998 self."waitress"
1999 self."ipdb"
1999 self."ipdb"
2000 self."ipython"
2000 self."ipython"
2001 self."rhodecode-tools"
2001 self."rhodecode-tools"
2002 self."appenlight-client"
2002 self."appenlight-client"
2003 self."pytest"
2003 self."pytest"
2004 self."py"
2004 self."py"
2005 self."pytest-cov"
2005 self."pytest-cov"
2006 self."pytest-sugar"
2006 self."pytest-sugar"
2007 self."pytest-runner"
2007 self."pytest-runner"
2008 self."pytest-profiling"
2008 self."pytest-profiling"
2009 self."pytest-timeout"
2009 self."pytest-timeout"
2010 self."gprof2dot"
2010 self."gprof2dot"
2011 self."mock"
2011 self."mock"
2012 self."cov-core"
2012 self."cov-core"
2013 self."coverage"
2013 self."coverage"
2014 self."webtest"
2014 self."webtest"
2015 self."beautifulsoup4"
2015 self."beautifulsoup4"
2016 ];
2016 ];
2017 src = ./.;
2017 src = ./.;
2018 meta = {
2018 meta = {
2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2020 };
2020 };
2021 };
2021 };
2022 "rhodecode-tools" = super.buildPythonPackage {
2022 "rhodecode-tools" = super.buildPythonPackage {
2023 name = "rhodecode-tools-1.4.0";
2023 name = "rhodecode-tools-1.4.0";
2024 doCheck = false;
2024 doCheck = false;
2025 propagatedBuildInputs = [
2025 propagatedBuildInputs = [
2026 self."click"
2026 self."click"
2027 self."future"
2027 self."future"
2028 self."six"
2028 self."six"
2029 self."mako"
2029 self."mako"
2030 self."markupsafe"
2030 self."markupsafe"
2031 self."requests"
2031 self."requests"
2032 self."urllib3"
2032 self."urllib3"
2033 self."whoosh"
2033 self."whoosh"
2034 self."elasticsearch"
2034 self."elasticsearch"
2035 self."elasticsearch-dsl"
2035 self."elasticsearch-dsl"
2036 self."elasticsearch2"
2036 self."elasticsearch2"
2037 self."elasticsearch1-dsl"
2037 self."elasticsearch1-dsl"
2038 ];
2038 ];
2039 src = fetchurl {
2039 src = fetchurl {
2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2042 };
2042 };
2043 meta = {
2043 meta = {
2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2045 };
2045 };
2046 };
2046 };
2047 "routes" = super.buildPythonPackage {
2047 "routes" = super.buildPythonPackage {
2048 name = "routes-2.4.1";
2048 name = "routes-2.4.1";
2049 doCheck = false;
2049 doCheck = false;
2050 propagatedBuildInputs = [
2050 propagatedBuildInputs = [
2051 self."six"
2051 self."six"
2052 self."repoze.lru"
2052 self."repoze.lru"
2053 ];
2053 ];
2054 src = fetchurl {
2054 src = fetchurl {
2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2057 };
2057 };
2058 meta = {
2058 meta = {
2059 license = [ pkgs.lib.licenses.mit ];
2059 license = [ pkgs.lib.licenses.mit ];
2060 };
2060 };
2061 };
2061 };
2062 "scandir" = super.buildPythonPackage {
2062 "scandir" = super.buildPythonPackage {
2063 name = "scandir-1.10.0";
2063 name = "scandir-1.10.0";
2064 doCheck = false;
2064 doCheck = false;
2065 src = fetchurl {
2065 src = fetchurl {
2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2068 };
2068 };
2069 meta = {
2069 meta = {
2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2071 };
2071 };
2072 };
2072 };
2073 "setproctitle" = super.buildPythonPackage {
2073 "setproctitle" = super.buildPythonPackage {
2074 name = "setproctitle-1.1.10";
2074 name = "setproctitle-1.1.10";
2075 doCheck = false;
2075 doCheck = false;
2076 src = fetchurl {
2076 src = fetchurl {
2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2079 };
2079 };
2080 meta = {
2080 meta = {
2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2082 };
2082 };
2083 };
2083 };
2084 "setuptools" = super.buildPythonPackage {
2084 "setuptools" = super.buildPythonPackage {
2085 name = "setuptools-44.1.0";
2085 name = "setuptools-44.1.0";
2086 doCheck = false;
2086 doCheck = false;
2087 src = fetchurl {
2087 src = fetchurl {
2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2090 };
2090 };
2091 meta = {
2091 meta = {
2092 license = [ pkgs.lib.licenses.mit ];
2092 license = [ pkgs.lib.licenses.mit ];
2093 };
2093 };
2094 };
2094 };
2095 "simplegeneric" = super.buildPythonPackage {
2095 "simplegeneric" = super.buildPythonPackage {
2096 name = "simplegeneric-0.8.1";
2096 name = "simplegeneric-0.8.1";
2097 doCheck = false;
2097 doCheck = false;
2098 src = fetchurl {
2098 src = fetchurl {
2099 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2099 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2100 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2100 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2101 };
2101 };
2102 meta = {
2102 meta = {
2103 license = [ pkgs.lib.licenses.zpl21 ];
2103 license = [ pkgs.lib.licenses.zpl21 ];
2104 };
2104 };
2105 };
2105 };
2106 "simplejson" = super.buildPythonPackage {
2106 "simplejson" = super.buildPythonPackage {
2107 name = "simplejson-3.16.0";
2107 name = "simplejson-3.16.0";
2108 doCheck = false;
2108 doCheck = false;
2109 src = fetchurl {
2109 src = fetchurl {
2110 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2110 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2111 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2111 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2112 };
2112 };
2113 meta = {
2113 meta = {
2114 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2114 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2115 };
2115 };
2116 };
2116 };
2117 "six" = super.buildPythonPackage {
2117 "six" = super.buildPythonPackage {
2118 name = "six-1.11.0";
2118 name = "six-1.11.0";
2119 doCheck = false;
2119 doCheck = false;
2120 src = fetchurl {
2120 src = fetchurl {
2121 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2121 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2122 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2122 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2123 };
2123 };
2124 meta = {
2124 meta = {
2125 license = [ pkgs.lib.licenses.mit ];
2125 license = [ pkgs.lib.licenses.mit ];
2126 };
2126 };
2127 };
2127 };
2128 "sqlalchemy" = super.buildPythonPackage {
2128 "sqlalchemy" = super.buildPythonPackage {
2129 name = "sqlalchemy-1.3.15";
2129 name = "sqlalchemy-1.3.15";
2130 doCheck = false;
2130 doCheck = false;
2131 src = fetchurl {
2131 src = fetchurl {
2132 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2132 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2133 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2133 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2134 };
2134 };
2135 meta = {
2135 meta = {
2136 license = [ pkgs.lib.licenses.mit ];
2136 license = [ pkgs.lib.licenses.mit ];
2137 };
2137 };
2138 };
2138 };
2139 "sshpubkeys" = super.buildPythonPackage {
2139 "sshpubkeys" = super.buildPythonPackage {
2140 name = "sshpubkeys-3.1.0";
2140 name = "sshpubkeys-3.1.0";
2141 doCheck = false;
2141 doCheck = false;
2142 propagatedBuildInputs = [
2142 propagatedBuildInputs = [
2143 self."cryptography"
2143 self."cryptography"
2144 self."ecdsa"
2144 self."ecdsa"
2145 ];
2145 ];
2146 src = fetchurl {
2146 src = fetchurl {
2147 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2147 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2148 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2148 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2149 };
2149 };
2150 meta = {
2150 meta = {
2151 license = [ pkgs.lib.licenses.bsdOriginal ];
2151 license = [ pkgs.lib.licenses.bsdOriginal ];
2152 };
2152 };
2153 };
2153 };
2154 "subprocess32" = super.buildPythonPackage {
2154 "subprocess32" = super.buildPythonPackage {
2155 name = "subprocess32-3.5.4";
2155 name = "subprocess32-3.5.4";
2156 doCheck = false;
2156 doCheck = false;
2157 src = fetchurl {
2157 src = fetchurl {
2158 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2158 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2159 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2159 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2160 };
2160 };
2161 meta = {
2161 meta = {
2162 license = [ pkgs.lib.licenses.psfl ];
2162 license = [ pkgs.lib.licenses.psfl ];
2163 };
2163 };
2164 };
2164 };
2165 "supervisor" = super.buildPythonPackage {
2165 "supervisor" = super.buildPythonPackage {
2166 name = "supervisor-4.1.0";
2166 name = "supervisor-4.1.0";
2167 doCheck = false;
2167 doCheck = false;
2168 src = fetchurl {
2168 src = fetchurl {
2169 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2169 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2170 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2170 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2171 };
2171 };
2172 meta = {
2172 meta = {
2173 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2173 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2174 };
2174 };
2175 };
2175 };
2176 "tempita" = super.buildPythonPackage {
2176 "tempita" = super.buildPythonPackage {
2177 name = "tempita-0.5.2";
2177 name = "tempita-0.5.2";
2178 doCheck = false;
2178 doCheck = false;
2179 src = fetchurl {
2179 src = fetchurl {
2180 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2180 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2181 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2181 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2182 };
2182 };
2183 meta = {
2183 meta = {
2184 license = [ pkgs.lib.licenses.mit ];
2184 license = [ pkgs.lib.licenses.mit ];
2185 };
2185 };
2186 };
2186 };
2187 "termcolor" = super.buildPythonPackage {
2187 "termcolor" = super.buildPythonPackage {
2188 name = "termcolor-1.1.0";
2188 name = "termcolor-1.1.0";
2189 doCheck = false;
2189 doCheck = false;
2190 src = fetchurl {
2190 src = fetchurl {
2191 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2191 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2192 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2192 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2193 };
2193 };
2194 meta = {
2194 meta = {
2195 license = [ pkgs.lib.licenses.mit ];
2195 license = [ pkgs.lib.licenses.mit ];
2196 };
2196 };
2197 };
2197 };
2198 "testpath" = super.buildPythonPackage {
2198 "testpath" = super.buildPythonPackage {
2199 name = "testpath-0.4.4";
2199 name = "testpath-0.4.4";
2200 doCheck = false;
2200 doCheck = false;
2201 src = fetchurl {
2201 src = fetchurl {
2202 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2202 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2203 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2203 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2204 };
2204 };
2205 meta = {
2205 meta = {
2206 license = [ ];
2206 license = [ ];
2207 };
2207 };
2208 };
2208 };
2209 "traitlets" = super.buildPythonPackage {
2209 "traitlets" = super.buildPythonPackage {
2210 name = "traitlets-4.3.3";
2210 name = "traitlets-4.3.3";
2211 doCheck = false;
2211 doCheck = false;
2212 propagatedBuildInputs = [
2212 propagatedBuildInputs = [
2213 self."ipython-genutils"
2213 self."ipython-genutils"
2214 self."six"
2214 self."six"
2215 self."decorator"
2215 self."decorator"
2216 self."enum34"
2216 self."enum34"
2217 ];
2217 ];
2218 src = fetchurl {
2218 src = fetchurl {
2219 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2219 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2220 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2220 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2221 };
2221 };
2222 meta = {
2222 meta = {
2223 license = [ pkgs.lib.licenses.bsdOriginal ];
2223 license = [ pkgs.lib.licenses.bsdOriginal ];
2224 };
2224 };
2225 };
2225 };
2226 "transaction" = super.buildPythonPackage {
2226 "transaction" = super.buildPythonPackage {
2227 name = "transaction-2.4.0";
2227 name = "transaction-2.4.0";
2228 doCheck = false;
2228 doCheck = false;
2229 propagatedBuildInputs = [
2229 propagatedBuildInputs = [
2230 self."zope.interface"
2230 self."zope.interface"
2231 ];
2231 ];
2232 src = fetchurl {
2232 src = fetchurl {
2233 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2233 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2234 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2234 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2235 };
2235 };
2236 meta = {
2236 meta = {
2237 license = [ pkgs.lib.licenses.zpl21 ];
2237 license = [ pkgs.lib.licenses.zpl21 ];
2238 };
2238 };
2239 };
2239 };
2240 "translationstring" = super.buildPythonPackage {
2240 "translationstring" = super.buildPythonPackage {
2241 name = "translationstring-1.3";
2241 name = "translationstring-1.3";
2242 doCheck = false;
2242 doCheck = false;
2243 src = fetchurl {
2243 src = fetchurl {
2244 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2244 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2245 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2245 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2246 };
2246 };
2247 meta = {
2247 meta = {
2248 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2248 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2249 };
2249 };
2250 };
2250 };
2251 "tzlocal" = super.buildPythonPackage {
2251 "tzlocal" = super.buildPythonPackage {
2252 name = "tzlocal-1.5.1";
2252 name = "tzlocal-1.5.1";
2253 doCheck = false;
2253 doCheck = false;
2254 propagatedBuildInputs = [
2254 propagatedBuildInputs = [
2255 self."pytz"
2255 self."pytz"
2256 ];
2256 ];
2257 src = fetchurl {
2257 src = fetchurl {
2258 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2258 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2259 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2259 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2260 };
2260 };
2261 meta = {
2261 meta = {
2262 license = [ pkgs.lib.licenses.mit ];
2262 license = [ pkgs.lib.licenses.mit ];
2263 };
2263 };
2264 };
2264 };
2265 "urllib3" = super.buildPythonPackage {
2265 "urllib3" = super.buildPythonPackage {
2266 name = "urllib3-1.25.2";
2266 name = "urllib3-1.25.2";
2267 doCheck = false;
2267 doCheck = false;
2268 src = fetchurl {
2268 src = fetchurl {
2269 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2269 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2270 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2270 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2271 };
2271 };
2272 meta = {
2272 meta = {
2273 license = [ pkgs.lib.licenses.mit ];
2273 license = [ pkgs.lib.licenses.mit ];
2274 };
2274 };
2275 };
2275 };
2276 "urlobject" = super.buildPythonPackage {
2276 "urlobject" = super.buildPythonPackage {
2277 name = "urlobject-2.4.3";
2277 name = "urlobject-2.4.3";
2278 doCheck = false;
2278 doCheck = false;
2279 src = fetchurl {
2279 src = fetchurl {
2280 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2280 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2281 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2281 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2282 };
2282 };
2283 meta = {
2283 meta = {
2284 license = [ pkgs.lib.licenses.publicDomain ];
2284 license = [ pkgs.lib.licenses.publicDomain ];
2285 };
2285 };
2286 };
2286 };
2287 "venusian" = super.buildPythonPackage {
2287 "venusian" = super.buildPythonPackage {
2288 name = "venusian-1.2.0";
2288 name = "venusian-1.2.0";
2289 doCheck = false;
2289 doCheck = false;
2290 src = fetchurl {
2290 src = fetchurl {
2291 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2291 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2292 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2292 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2293 };
2293 };
2294 meta = {
2294 meta = {
2295 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2295 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2296 };
2296 };
2297 };
2297 };
2298 "vine" = super.buildPythonPackage {
2298 "vine" = super.buildPythonPackage {
2299 name = "vine-1.3.0";
2299 name = "vine-1.3.0";
2300 doCheck = false;
2300 doCheck = false;
2301 src = fetchurl {
2301 src = fetchurl {
2302 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2302 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2303 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2303 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2304 };
2304 };
2305 meta = {
2305 meta = {
2306 license = [ pkgs.lib.licenses.bsdOriginal ];
2306 license = [ pkgs.lib.licenses.bsdOriginal ];
2307 };
2307 };
2308 };
2308 };
2309 "waitress" = super.buildPythonPackage {
2309 "waitress" = super.buildPythonPackage {
2310 name = "waitress-1.3.1";
2310 name = "waitress-1.3.1";
2311 doCheck = false;
2311 doCheck = false;
2312 src = fetchurl {
2312 src = fetchurl {
2313 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2313 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2314 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2314 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2315 };
2315 };
2316 meta = {
2316 meta = {
2317 license = [ pkgs.lib.licenses.zpl21 ];
2317 license = [ pkgs.lib.licenses.zpl21 ];
2318 };
2318 };
2319 };
2319 };
2320 "wcwidth" = super.buildPythonPackage {
2320 "wcwidth" = super.buildPythonPackage {
2321 name = "wcwidth-0.1.9";
2321 name = "wcwidth-0.1.9";
2322 doCheck = false;
2322 doCheck = false;
2323 src = fetchurl {
2323 src = fetchurl {
2324 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2324 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2325 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2325 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2326 };
2326 };
2327 meta = {
2327 meta = {
2328 license = [ pkgs.lib.licenses.mit ];
2328 license = [ pkgs.lib.licenses.mit ];
2329 };
2329 };
2330 };
2330 };
2331 "webencodings" = super.buildPythonPackage {
2331 "webencodings" = super.buildPythonPackage {
2332 name = "webencodings-0.5.1";
2332 name = "webencodings-0.5.1";
2333 doCheck = false;
2333 doCheck = false;
2334 src = fetchurl {
2334 src = fetchurl {
2335 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2335 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2336 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2336 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2337 };
2337 };
2338 meta = {
2338 meta = {
2339 license = [ pkgs.lib.licenses.bsdOriginal ];
2339 license = [ pkgs.lib.licenses.bsdOriginal ];
2340 };
2340 };
2341 };
2341 };
2342 "weberror" = super.buildPythonPackage {
2342 "weberror" = super.buildPythonPackage {
2343 name = "weberror-0.13.1";
2343 name = "weberror-0.13.1";
2344 doCheck = false;
2344 doCheck = false;
2345 propagatedBuildInputs = [
2345 propagatedBuildInputs = [
2346 self."webob"
2346 self."webob"
2347 self."tempita"
2347 self."tempita"
2348 self."pygments"
2348 self."pygments"
2349 self."paste"
2349 self."paste"
2350 ];
2350 ];
2351 src = fetchurl {
2351 src = fetchurl {
2352 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2352 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2353 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2353 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2354 };
2354 };
2355 meta = {
2355 meta = {
2356 license = [ pkgs.lib.licenses.mit ];
2356 license = [ pkgs.lib.licenses.mit ];
2357 };
2357 };
2358 };
2358 };
2359 "webhelpers2" = super.buildPythonPackage {
2359 "webhelpers2" = super.buildPythonPackage {
2360 name = "webhelpers2-2.0";
2360 name = "webhelpers2-2.0";
2361 doCheck = false;
2361 doCheck = false;
2362 propagatedBuildInputs = [
2362 propagatedBuildInputs = [
2363 self."markupsafe"
2363 self."markupsafe"
2364 self."six"
2364 self."six"
2365 ];
2365 ];
2366 src = fetchurl {
2366 src = fetchurl {
2367 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2367 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2368 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2368 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2369 };
2369 };
2370 meta = {
2370 meta = {
2371 license = [ pkgs.lib.licenses.mit ];
2371 license = [ pkgs.lib.licenses.mit ];
2372 };
2372 };
2373 };
2373 };
2374 "webob" = super.buildPythonPackage {
2374 "webob" = super.buildPythonPackage {
2375 name = "webob-1.8.5";
2375 name = "webob-1.8.5";
2376 doCheck = false;
2376 doCheck = false;
2377 src = fetchurl {
2377 src = fetchurl {
2378 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2378 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2379 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2379 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2380 };
2380 };
2381 meta = {
2381 meta = {
2382 license = [ pkgs.lib.licenses.mit ];
2382 license = [ pkgs.lib.licenses.mit ];
2383 };
2383 };
2384 };
2384 };
2385 "webtest" = super.buildPythonPackage {
2385 "webtest" = super.buildPythonPackage {
2386 name = "webtest-2.0.34";
2386 name = "webtest-2.0.34";
2387 doCheck = false;
2387 doCheck = false;
2388 propagatedBuildInputs = [
2388 propagatedBuildInputs = [
2389 self."six"
2389 self."six"
2390 self."webob"
2390 self."webob"
2391 self."waitress"
2391 self."waitress"
2392 self."beautifulsoup4"
2392 self."beautifulsoup4"
2393 ];
2393 ];
2394 src = fetchurl {
2394 src = fetchurl {
2395 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2395 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2396 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2396 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2397 };
2397 };
2398 meta = {
2398 meta = {
2399 license = [ pkgs.lib.licenses.mit ];
2399 license = [ pkgs.lib.licenses.mit ];
2400 };
2400 };
2401 };
2401 };
2402 "whoosh" = super.buildPythonPackage {
2402 "whoosh" = super.buildPythonPackage {
2403 name = "whoosh-2.7.4";
2403 name = "whoosh-2.7.4";
2404 doCheck = false;
2404 doCheck = false;
2405 src = fetchurl {
2405 src = fetchurl {
2406 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2406 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2407 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2407 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2408 };
2408 };
2409 meta = {
2409 meta = {
2410 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2410 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2411 };
2411 };
2412 };
2412 };
2413 "ws4py" = super.buildPythonPackage {
2413 "ws4py" = super.buildPythonPackage {
2414 name = "ws4py-0.5.1";
2414 name = "ws4py-0.5.1";
2415 doCheck = false;
2415 doCheck = false;
2416 src = fetchurl {
2416 src = fetchurl {
2417 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2417 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2418 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2418 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2419 };
2419 };
2420 meta = {
2420 meta = {
2421 license = [ pkgs.lib.licenses.bsdOriginal ];
2421 license = [ pkgs.lib.licenses.bsdOriginal ];
2422 };
2422 };
2423 };
2423 };
2424 "wsgiref" = super.buildPythonPackage {
2424 "wsgiref" = super.buildPythonPackage {
2425 name = "wsgiref-0.1.2";
2425 name = "wsgiref-0.1.2";
2426 doCheck = false;
2426 doCheck = false;
2427 src = fetchurl {
2427 src = fetchurl {
2428 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2428 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2429 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2429 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2430 };
2430 };
2431 meta = {
2431 meta = {
2432 license = [ { fullName = "PSF or ZPL"; } ];
2432 license = [ { fullName = "PSF or ZPL"; } ];
2433 };
2433 };
2434 };
2434 };
2435 "zipp" = super.buildPythonPackage {
2435 "zipp" = super.buildPythonPackage {
2436 name = "zipp-1.2.0";
2436 name = "zipp-1.2.0";
2437 doCheck = false;
2437 doCheck = false;
2438 propagatedBuildInputs = [
2438 propagatedBuildInputs = [
2439 self."contextlib2"
2439 self."contextlib2"
2440 ];
2440 ];
2441 src = fetchurl {
2441 src = fetchurl {
2442 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2442 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2443 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2443 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2444 };
2444 };
2445 meta = {
2445 meta = {
2446 license = [ pkgs.lib.licenses.mit ];
2446 license = [ pkgs.lib.licenses.mit ];
2447 };
2447 };
2448 };
2448 };
2449 "zope.cachedescriptors" = super.buildPythonPackage {
2449 "zope.cachedescriptors" = super.buildPythonPackage {
2450 name = "zope.cachedescriptors-4.3.1";
2450 name = "zope.cachedescriptors-4.3.1";
2451 doCheck = false;
2451 doCheck = false;
2452 propagatedBuildInputs = [
2452 propagatedBuildInputs = [
2453 self."setuptools"
2453 self."setuptools"
2454 ];
2454 ];
2455 src = fetchurl {
2455 src = fetchurl {
2456 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2456 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2457 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2457 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2458 };
2458 };
2459 meta = {
2459 meta = {
2460 license = [ pkgs.lib.licenses.zpl21 ];
2460 license = [ pkgs.lib.licenses.zpl21 ];
2461 };
2461 };
2462 };
2462 };
2463 "zope.deprecation" = super.buildPythonPackage {
2463 "zope.deprecation" = super.buildPythonPackage {
2464 name = "zope.deprecation-4.4.0";
2464 name = "zope.deprecation-4.4.0";
2465 doCheck = false;
2465 doCheck = false;
2466 propagatedBuildInputs = [
2466 propagatedBuildInputs = [
2467 self."setuptools"
2467 self."setuptools"
2468 ];
2468 ];
2469 src = fetchurl {
2469 src = fetchurl {
2470 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2470 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2471 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2471 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2472 };
2472 };
2473 meta = {
2473 meta = {
2474 license = [ pkgs.lib.licenses.zpl21 ];
2474 license = [ pkgs.lib.licenses.zpl21 ];
2475 };
2475 };
2476 };
2476 };
2477 "zope.event" = super.buildPythonPackage {
2477 "zope.event" = super.buildPythonPackage {
2478 name = "zope.event-4.4";
2478 name = "zope.event-4.4";
2479 doCheck = false;
2479 doCheck = false;
2480 propagatedBuildInputs = [
2480 propagatedBuildInputs = [
2481 self."setuptools"
2481 self."setuptools"
2482 ];
2482 ];
2483 src = fetchurl {
2483 src = fetchurl {
2484 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2484 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2485 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2485 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2486 };
2486 };
2487 meta = {
2487 meta = {
2488 license = [ pkgs.lib.licenses.zpl21 ];
2488 license = [ pkgs.lib.licenses.zpl21 ];
2489 };
2489 };
2490 };
2490 };
2491 "zope.interface" = super.buildPythonPackage {
2491 "zope.interface" = super.buildPythonPackage {
2492 name = "zope.interface-4.6.0";
2492 name = "zope.interface-4.6.0";
2493 doCheck = false;
2493 doCheck = false;
2494 propagatedBuildInputs = [
2494 propagatedBuildInputs = [
2495 self."setuptools"
2495 self."setuptools"
2496 ];
2496 ];
2497 src = fetchurl {
2497 src = fetchurl {
2498 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2498 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2499 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2499 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2500 };
2500 };
2501 meta = {
2501 meta = {
2502 license = [ pkgs.lib.licenses.zpl21 ];
2502 license = [ pkgs.lib.licenses.zpl21 ];
2503 };
2503 };
2504 };
2504 };
2505
2505
2506 ### Test requirements
2506 ### Test requirements
2507
2507
2508
2508
2509 }
2509 }
@@ -1,124 +1,124 b''
1 ## dependencies
1 ## dependencies
2
2
3 amqp==2.5.2
3 amqp==2.5.2
4 babel==1.3
4 babel==1.3
5 beaker==1.9.1
5 beaker==1.9.1
6 bleach==3.1.3
6 bleach==3.1.3
7 celery==4.3.0
7 celery==4.3.0
8 channelstream==0.6.14
8 channelstream==0.6.14
9 click==7.0
9 click==7.0
10 colander==1.7.0
10 colander==1.7.0
11 # our custom configobj
11 # our custom configobj
12 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
12 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
13 cssselect==1.0.3
13 cssselect==1.0.3
14 cryptography==2.6.1
14 cryptography==2.6.1
15 decorator==4.1.2
15 decorator==4.1.2
16 deform==2.0.8
16 deform==2.0.8
17 docutils==0.16.0
17 docutils==0.16.0
18 dogpile.cache==0.9.0
18 dogpile.cache==0.9.0
19 dogpile.core==0.4.1
19 dogpile.core==0.4.1
20 formencode==1.2.4
20 formencode==1.2.4
21 future==0.14.3
21 future==0.14.3
22 futures==3.0.2
22 futures==3.0.2
23 infrae.cache==1.0.1
23 infrae.cache==1.0.1
24 iso8601==0.1.12
24 iso8601==0.1.12
25 itsdangerous==1.1.0
25 itsdangerous==1.1.0
26 kombu==4.6.6
26 kombu==4.6.6
27 lxml==4.2.5
27 lxml==4.2.5
28 mako==1.1.0
28 mako==1.1.0
29 markdown==2.6.11
29 markdown==2.6.11
30 markupsafe==1.1.1
30 markupsafe==1.1.1
31 msgpack-python==0.5.6
31 msgpack-python==0.5.6
32 pyotp==2.3.0
32 pyotp==2.3.0
33 packaging==20.3
33 packaging==20.3
34 pathlib2==2.3.5
34 pathlib2==2.3.5
35 paste==3.4.0
35 paste==3.4.0
36 pastedeploy==2.1.0
36 pastedeploy==2.1.0
37 pastescript==3.2.0
37 pastescript==3.2.0
38 peppercorn==0.6
38 peppercorn==0.6
39 premailer==3.6.1
39 premailer==3.6.1
40 psutil==5.7.0
40 psutil==5.7.0
41 py-bcrypt==0.4
41 py-bcrypt==0.4
42 pycurl==7.43.0.3
42 pycurl==7.43.0.3
43 pycrypto==2.6.1
43 pycrypto==2.6.1
44 pygments==2.4.2
44 pygments==2.4.2
45 pyparsing==2.4.7
45 pyparsing==2.4.7
46 pyramid-debugtoolbar==4.6.1
46 pyramid-debugtoolbar==4.6.1
47 pyramid-mako==1.1.0
47 pyramid-mako==1.1.0
48 pyramid==1.10.4
48 pyramid==1.10.4
49 pyramid_mailer==0.15.1
49 pyramid_mailer==0.15.1
50 python-dateutil==2.8.1
50 python-dateutil==2.8.1
51 python-ldap==3.2.0
51 python-ldap==3.2.0
52 python-memcached==1.59
52 python-memcached==1.59
53 python-pam==1.8.4
53 python-pam==1.8.4
54 python-saml==2.4.2
54 python-saml==2.4.2
55 pytz==2019.3
55 pytz==2019.3
56 tzlocal==1.5.1
56 tzlocal==1.5.1
57 pyzmq==14.6.0
57 pyzmq==14.6.0
58 py-gfm==0.1.4
58 py-gfm==0.1.4
59 regex==2020.9.27
59 regex==2020.9.27
60 redis==3.4.1
60 redis==3.5.3
61 repoze.lru==0.7
61 repoze.lru==0.7
62 requests==2.22.0
62 requests==2.22.0
63 routes==2.4.1
63 routes==2.4.1
64 simplejson==3.16.0
64 simplejson==3.16.0
65 six==1.11.0
65 six==1.11.0
66 sqlalchemy==1.3.15
66 sqlalchemy==1.3.15
67 sshpubkeys==3.1.0
67 sshpubkeys==3.1.0
68 subprocess32==3.5.4
68 subprocess32==3.5.4
69 supervisor==4.1.0
69 supervisor==4.1.0
70 translationstring==1.3
70 translationstring==1.3
71 urllib3==1.25.2
71 urllib3==1.25.2
72 urlobject==2.4.3
72 urlobject==2.4.3
73 venusian==1.2.0
73 venusian==1.2.0
74 weberror==0.13.1
74 weberror==0.13.1
75 webhelpers2==2.0
75 webhelpers2==2.0
76 webob==1.8.5
76 webob==1.8.5
77 whoosh==2.7.4
77 whoosh==2.7.4
78 wsgiref==0.1.2
78 wsgiref==0.1.2
79 zope.cachedescriptors==4.3.1
79 zope.cachedescriptors==4.3.1
80 zope.deprecation==4.4.0
80 zope.deprecation==4.4.0
81 zope.event==4.4.0
81 zope.event==4.4.0
82 zope.interface==4.6.0
82 zope.interface==4.6.0
83
83
84 # DB drivers
84 # DB drivers
85 mysql-python==1.2.5
85 mysql-python==1.2.5
86 pymysql==0.8.1
86 pymysql==0.8.1
87 pysqlite==2.8.3
87 pysqlite==2.8.3
88 psycopg2==2.8.4
88 psycopg2==2.8.4
89
89
90 # IPYTHON RENDERING
90 # IPYTHON RENDERING
91 # entrypoints backport, pypi version doesn't support egg installs
91 # entrypoints backport, pypi version doesn't support egg installs
92 https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d#egg=entrypoints==0.2.2.rhodecode-upstream1
92 https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d#egg=entrypoints==0.2.2.rhodecode-upstream1
93 nbconvert==5.3.1
93 nbconvert==5.3.1
94 nbformat==4.4.0
94 nbformat==4.4.0
95 jupyter-client==5.0.0
95 jupyter-client==5.0.0
96 jupyter-core==4.5.0
96 jupyter-core==4.5.0
97
97
98 ## cli tools
98 ## cli tools
99 alembic==1.4.2
99 alembic==1.4.2
100 invoke==0.13.0
100 invoke==0.13.0
101 bumpversion==0.5.3
101 bumpversion==0.5.3
102
102
103 ## http servers
103 ## http servers
104 gevent==1.5.0
104 gevent==1.5.0
105 greenlet==0.4.15
105 greenlet==0.4.15
106 gunicorn==19.9.0
106 gunicorn==19.9.0
107 waitress==1.3.1
107 waitress==1.3.1
108
108
109 ## debug
109 ## debug
110 ipdb==0.13.2
110 ipdb==0.13.2
111 ipython==5.1.0
111 ipython==5.1.0
112
112
113 ## rhodecode-tools, special case, use file://PATH.tar.gz#egg=rhodecode-tools==X.Y.Z, to test local version
113 ## rhodecode-tools, special case, use file://PATH.tar.gz#egg=rhodecode-tools==X.Y.Z, to test local version
114 https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a#egg=rhodecode-tools==1.4.0
114 https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a#egg=rhodecode-tools==1.4.0
115
115
116
116
117 ## appenlight
117 ## appenlight
118 appenlight-client==0.6.26
118 appenlight-client==0.6.26
119
119
120 ## test related requirements
120 ## test related requirements
121 -r requirements_test.txt
121 -r requirements_test.txt
122
122
123 ## uncomment to add the debug libraries
123 ## uncomment to add the debug libraries
124 #-r requirements_debug.txt
124 #-r requirements_debug.txt
@@ -1,1 +1,1 b''
1 4.23.2 No newline at end of file
1 4.24.0 No newline at end of file
@@ -1,111 +1,111 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.apps._base import ADMIN_PREFIX
23 from rhodecode.apps._base import ADMIN_PREFIX
24 from rhodecode.model.db import User
24 from rhodecode.model.db import User
25 from rhodecode.tests import (
25 from rhodecode.tests import (
26 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
26 TestController, route_path_generator, assert_session_flash)
27 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, assert_session_flash)
28 from rhodecode.tests.fixture import Fixture
27 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.utils import AssertResponse
28 from rhodecode.tests.utils import AssertResponse
30
29
31 fixture = Fixture()
30 fixture = Fixture()
32
31
33
32
34 def route_path(name, **kwargs):
33 def route_path(name, params=None, **kwargs):
35 return {
34 url_defs = {
36 'my_account_auth_tokens':
35 'my_account_auth_tokens':
37 ADMIN_PREFIX + '/my_account/auth_tokens',
36 ADMIN_PREFIX + '/my_account/auth_tokens',
38 'my_account_auth_tokens_add':
37 'my_account_auth_tokens_add':
39 ADMIN_PREFIX + '/my_account/auth_tokens/new',
38 ADMIN_PREFIX + '/my_account/auth_tokens/new',
40 'my_account_auth_tokens_delete':
39 'my_account_auth_tokens_delete':
41 ADMIN_PREFIX + '/my_account/auth_tokens/delete',
40 ADMIN_PREFIX + '/my_account/auth_tokens/delete',
42 }[name].format(**kwargs)
41 }
42 return route_path_generator(url_defs, name=name, params=params, **kwargs)
43
43
44
44
45 class TestMyAccountAuthTokens(TestController):
45 class TestMyAccountAuthTokens(TestController):
46
46
47 def test_my_account_auth_tokens(self):
47 def test_my_account_auth_tokens(self):
48 usr = self.log_user('test_regular2', 'test12')
48 usr = self.log_user('test_regular2', 'test12')
49 user = User.get(usr['user_id'])
49 user = User.get(usr['user_id'])
50 response = self.app.get(route_path('my_account_auth_tokens'))
50 response = self.app.get(route_path('my_account_auth_tokens'))
51 for token in user.auth_tokens:
51 for token in user.auth_tokens:
52 response.mustcontain(token[:4])
52 response.mustcontain(token[:4])
53 response.mustcontain('never')
53 response.mustcontain('never')
54
54
55 def test_my_account_add_auth_tokens_wrong_csrf(self, user_util):
55 def test_my_account_add_auth_tokens_wrong_csrf(self, user_util):
56 user = user_util.create_user(password='qweqwe')
56 user = user_util.create_user(password='qweqwe')
57 self.log_user(user.username, 'qweqwe')
57 self.log_user(user.username, 'qweqwe')
58
58
59 self.app.post(
59 self.app.post(
60 route_path('my_account_auth_tokens_add'),
60 route_path('my_account_auth_tokens_add'),
61 {'description': 'desc', 'lifetime': -1}, status=403)
61 {'description': 'desc', 'lifetime': -1}, status=403)
62
62
63 @pytest.mark.parametrize("desc, lifetime", [
63 @pytest.mark.parametrize("desc, lifetime", [
64 ('forever', -1),
64 ('forever', -1),
65 ('5mins', 60*5),
65 ('5mins', 60*5),
66 ('30days', 60*60*24*30),
66 ('30days', 60*60*24*30),
67 ])
67 ])
68 def test_my_account_add_auth_tokens(self, desc, lifetime, user_util):
68 def test_my_account_add_auth_tokens(self, desc, lifetime, user_util):
69 user = user_util.create_user(password='qweqwe')
69 user = user_util.create_user(password='qweqwe')
70 user_id = user.user_id
70 user_id = user.user_id
71 self.log_user(user.username, 'qweqwe')
71 self.log_user(user.username, 'qweqwe')
72
72
73 response = self.app.post(
73 response = self.app.post(
74 route_path('my_account_auth_tokens_add'),
74 route_path('my_account_auth_tokens_add'),
75 {'description': desc, 'lifetime': lifetime,
75 {'description': desc, 'lifetime': lifetime,
76 'csrf_token': self.csrf_token})
76 'csrf_token': self.csrf_token})
77 assert_session_flash(response, 'Auth token successfully created')
77 assert_session_flash(response, 'Auth token successfully created')
78
78
79 response = response.follow()
79 response = response.follow()
80 user = User.get(user_id)
80 user = User.get(user_id)
81 for auth_token in user.auth_tokens:
81 for auth_token in user.auth_tokens:
82 response.mustcontain(auth_token[:4])
82 response.mustcontain(auth_token[:4])
83
83
84 def test_my_account_delete_auth_token(self, user_util):
84 def test_my_account_delete_auth_token(self, user_util):
85 user = user_util.create_user(password='qweqwe')
85 user = user_util.create_user(password='qweqwe')
86 user_id = user.user_id
86 user_id = user.user_id
87 self.log_user(user.username, 'qweqwe')
87 self.log_user(user.username, 'qweqwe')
88
88
89 user = User.get(user_id)
89 user = User.get(user_id)
90 keys = user.get_auth_tokens()
90 keys = user.get_auth_tokens()
91 assert 2 == len(keys)
91 assert 2 == len(keys)
92
92
93 response = self.app.post(
93 response = self.app.post(
94 route_path('my_account_auth_tokens_add'),
94 route_path('my_account_auth_tokens_add'),
95 {'description': 'desc', 'lifetime': -1,
95 {'description': 'desc', 'lifetime': -1,
96 'csrf_token': self.csrf_token})
96 'csrf_token': self.csrf_token})
97 assert_session_flash(response, 'Auth token successfully created')
97 assert_session_flash(response, 'Auth token successfully created')
98 response.follow()
98 response.follow()
99
99
100 user = User.get(user_id)
100 user = User.get(user_id)
101 keys = user.get_auth_tokens()
101 keys = user.get_auth_tokens()
102 assert 3 == len(keys)
102 assert 3 == len(keys)
103
103
104 response = self.app.post(
104 response = self.app.post(
105 route_path('my_account_auth_tokens_delete'),
105 route_path('my_account_auth_tokens_delete'),
106 {'del_auth_token': keys[0].user_api_key_id, 'csrf_token': self.csrf_token})
106 {'del_auth_token': keys[0].user_api_key_id, 'csrf_token': self.csrf_token})
107 assert_session_flash(response, 'Auth token successfully deleted')
107 assert_session_flash(response, 'Auth token successfully deleted')
108
108
109 user = User.get(user_id)
109 user = User.get(user_id)
110 keys = user.auth_tokens
110 keys = user.auth_tokens
111 assert 2 == len(keys)
111 assert 2 == len(keys)
@@ -1,1854 +1,1857 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29
29
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 from rhodecode.lib.vcs.backends.base import (
43 from rhodecode.lib.vcs.backends.base import (
44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.comment import CommentsModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 func, false, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
50 func, false, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
51 PullRequestReviewers)
51 PullRequestReviewers)
52 from rhodecode.model.forms import PullRequestForm
52 from rhodecode.model.forms import PullRequestForm
53 from rhodecode.model.meta import Session
53 from rhodecode.model.meta import Session
54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
55 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.scm import ScmModel
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
61
61
62 def load_default_context(self):
62 def load_default_context(self):
63 c = self._get_local_tmpl_context(include_app_defaults=True)
63 c = self._get_local_tmpl_context(include_app_defaults=True)
64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
66 # backward compat., we use for OLD PRs a plain renderer
66 # backward compat., we use for OLD PRs a plain renderer
67 c.renderer = 'plain'
67 c.renderer = 'plain'
68 return c
68 return c
69
69
70 def _get_pull_requests_list(
70 def _get_pull_requests_list(
71 self, repo_name, source, filter_type, opened_by, statuses):
71 self, repo_name, source, filter_type, opened_by, statuses):
72
72
73 draw, start, limit = self._extract_chunk(self.request)
73 draw, start, limit = self._extract_chunk(self.request)
74 search_q, order_by, order_dir = self._extract_ordering(self.request)
74 search_q, order_by, order_dir = self._extract_ordering(self.request)
75 _render = self.request.get_partial_renderer(
75 _render = self.request.get_partial_renderer(
76 'rhodecode:templates/data_table/_dt_elements.mako')
76 'rhodecode:templates/data_table/_dt_elements.mako')
77
77
78 # pagination
78 # pagination
79
79
80 if filter_type == 'awaiting_review':
80 if filter_type == 'awaiting_review':
81 pull_requests = PullRequestModel().get_awaiting_review(
81 pull_requests = PullRequestModel().get_awaiting_review(
82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
83 statuses=statuses, offset=start, length=limit,
83 statuses=statuses, offset=start, length=limit,
84 order_by=order_by, order_dir=order_dir)
84 order_by=order_by, order_dir=order_dir)
85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
86 repo_name, search_q=search_q, source=source, statuses=statuses,
86 repo_name, search_q=search_q, source=source, statuses=statuses,
87 opened_by=opened_by)
87 opened_by=opened_by)
88 elif filter_type == 'awaiting_my_review':
88 elif filter_type == 'awaiting_my_review':
89 pull_requests = PullRequestModel().get_awaiting_my_review(
89 pull_requests = PullRequestModel().get_awaiting_my_review(
90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
91 user_id=self._rhodecode_user.user_id, statuses=statuses,
91 user_id=self._rhodecode_user.user_id, statuses=statuses,
92 offset=start, length=limit, order_by=order_by,
92 offset=start, length=limit, order_by=order_by,
93 order_dir=order_dir)
93 order_dir=order_dir)
94 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
94 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
96 statuses=statuses, opened_by=opened_by)
96 statuses=statuses, opened_by=opened_by)
97 else:
97 else:
98 pull_requests = PullRequestModel().get_all(
98 pull_requests = PullRequestModel().get_all(
99 repo_name, search_q=search_q, source=source, opened_by=opened_by,
99 repo_name, search_q=search_q, source=source, opened_by=opened_by,
100 statuses=statuses, offset=start, length=limit,
100 statuses=statuses, offset=start, length=limit,
101 order_by=order_by, order_dir=order_dir)
101 order_by=order_by, order_dir=order_dir)
102 pull_requests_total_count = PullRequestModel().count_all(
102 pull_requests_total_count = PullRequestModel().count_all(
103 repo_name, search_q=search_q, source=source, statuses=statuses,
103 repo_name, search_q=search_q, source=source, statuses=statuses,
104 opened_by=opened_by)
104 opened_by=opened_by)
105
105
106 data = []
106 data = []
107 comments_model = CommentsModel()
107 comments_model = CommentsModel()
108 for pr in pull_requests:
108 for pr in pull_requests:
109 comments_count = comments_model.get_all_comments(
109 comments_count = comments_model.get_all_comments(
110 self.db_repo.repo_id, pull_request=pr,
110 self.db_repo.repo_id, pull_request=pr,
111 include_drafts=False, count_only=True)
111 include_drafts=False, count_only=True)
112
112
113 data.append({
113 data.append({
114 'name': _render('pullrequest_name',
114 'name': _render('pullrequest_name',
115 pr.pull_request_id, pr.pull_request_state,
115 pr.pull_request_id, pr.pull_request_state,
116 pr.work_in_progress, pr.target_repo.repo_name,
116 pr.work_in_progress, pr.target_repo.repo_name,
117 short=True),
117 short=True),
118 'name_raw': pr.pull_request_id,
118 'name_raw': pr.pull_request_id,
119 'status': _render('pullrequest_status',
119 'status': _render('pullrequest_status',
120 pr.calculated_review_status()),
120 pr.calculated_review_status()),
121 'title': _render('pullrequest_title', pr.title, pr.description),
121 'title': _render('pullrequest_title', pr.title, pr.description),
122 'description': h.escape(pr.description),
122 'description': h.escape(pr.description),
123 'updated_on': _render('pullrequest_updated_on',
123 'updated_on': _render('pullrequest_updated_on',
124 h.datetime_to_time(pr.updated_on),
124 h.datetime_to_time(pr.updated_on),
125 pr.versions_count),
125 pr.versions_count),
126 'updated_on_raw': h.datetime_to_time(pr.updated_on),
126 'updated_on_raw': h.datetime_to_time(pr.updated_on),
127 'created_on': _render('pullrequest_updated_on',
127 'created_on': _render('pullrequest_updated_on',
128 h.datetime_to_time(pr.created_on)),
128 h.datetime_to_time(pr.created_on)),
129 'created_on_raw': h.datetime_to_time(pr.created_on),
129 'created_on_raw': h.datetime_to_time(pr.created_on),
130 'state': pr.pull_request_state,
130 'state': pr.pull_request_state,
131 'author': _render('pullrequest_author',
131 'author': _render('pullrequest_author',
132 pr.author.full_contact, ),
132 pr.author.full_contact, ),
133 'author_raw': pr.author.full_name,
133 'author_raw': pr.author.full_name,
134 'comments': _render('pullrequest_comments', comments_count),
134 'comments': _render('pullrequest_comments', comments_count),
135 'comments_raw': comments_count,
135 'comments_raw': comments_count,
136 'closed': pr.is_closed(),
136 'closed': pr.is_closed(),
137 })
137 })
138
138
139 data = ({
139 data = ({
140 'draw': draw,
140 'draw': draw,
141 'data': data,
141 'data': data,
142 'recordsTotal': pull_requests_total_count,
142 'recordsTotal': pull_requests_total_count,
143 'recordsFiltered': pull_requests_total_count,
143 'recordsFiltered': pull_requests_total_count,
144 })
144 })
145 return data
145 return data
146
146
147 @LoginRequired()
147 @LoginRequired()
148 @HasRepoPermissionAnyDecorator(
148 @HasRepoPermissionAnyDecorator(
149 'repository.read', 'repository.write', 'repository.admin')
149 'repository.read', 'repository.write', 'repository.admin')
150 def pull_request_list(self):
150 def pull_request_list(self):
151 c = self.load_default_context()
151 c = self.load_default_context()
152
152
153 req_get = self.request.GET
153 req_get = self.request.GET
154 c.source = str2bool(req_get.get('source'))
154 c.source = str2bool(req_get.get('source'))
155 c.closed = str2bool(req_get.get('closed'))
155 c.closed = str2bool(req_get.get('closed'))
156 c.my = str2bool(req_get.get('my'))
156 c.my = str2bool(req_get.get('my'))
157 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
157 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
158 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159
159
160 c.active = 'open'
160 c.active = 'open'
161 if c.my:
161 if c.my:
162 c.active = 'my'
162 c.active = 'my'
163 if c.closed:
163 if c.closed:
164 c.active = 'closed'
164 c.active = 'closed'
165 if c.awaiting_review and not c.source:
165 if c.awaiting_review and not c.source:
166 c.active = 'awaiting'
166 c.active = 'awaiting'
167 if c.source and not c.awaiting_review:
167 if c.source and not c.awaiting_review:
168 c.active = 'source'
168 c.active = 'source'
169 if c.awaiting_my_review:
169 if c.awaiting_my_review:
170 c.active = 'awaiting_my'
170 c.active = 'awaiting_my'
171
171
172 return self._get_template_context(c)
172 return self._get_template_context(c)
173
173
174 @LoginRequired()
174 @LoginRequired()
175 @HasRepoPermissionAnyDecorator(
175 @HasRepoPermissionAnyDecorator(
176 'repository.read', 'repository.write', 'repository.admin')
176 'repository.read', 'repository.write', 'repository.admin')
177 def pull_request_list_data(self):
177 def pull_request_list_data(self):
178 self.load_default_context()
178 self.load_default_context()
179
179
180 # additional filters
180 # additional filters
181 req_get = self.request.GET
181 req_get = self.request.GET
182 source = str2bool(req_get.get('source'))
182 source = str2bool(req_get.get('source'))
183 closed = str2bool(req_get.get('closed'))
183 closed = str2bool(req_get.get('closed'))
184 my = str2bool(req_get.get('my'))
184 my = str2bool(req_get.get('my'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187
187
188 filter_type = 'awaiting_review' if awaiting_review \
188 filter_type = 'awaiting_review' if awaiting_review \
189 else 'awaiting_my_review' if awaiting_my_review \
189 else 'awaiting_my_review' if awaiting_my_review \
190 else None
190 else None
191
191
192 opened_by = None
192 opened_by = None
193 if my:
193 if my:
194 opened_by = [self._rhodecode_user.user_id]
194 opened_by = [self._rhodecode_user.user_id]
195
195
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 if closed:
197 if closed:
198 statuses = [PullRequest.STATUS_CLOSED]
198 statuses = [PullRequest.STATUS_CLOSED]
199
199
200 data = self._get_pull_requests_list(
200 data = self._get_pull_requests_list(
201 repo_name=self.db_repo_name, source=source,
201 repo_name=self.db_repo_name, source=source,
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203
203
204 return data
204 return data
205
205
206 def _is_diff_cache_enabled(self, target_repo):
206 def _is_diff_cache_enabled(self, target_repo):
207 caching_enabled = self._get_general_setting(
207 caching_enabled = self._get_general_setting(
208 target_repo, 'rhodecode_diff_cache')
208 target_repo, 'rhodecode_diff_cache')
209 log.debug('Diff caching enabled: %s', caching_enabled)
209 log.debug('Diff caching enabled: %s', caching_enabled)
210 return caching_enabled
210 return caching_enabled
211
211
212 def _get_diffset(self, source_repo_name, source_repo,
212 def _get_diffset(self, source_repo_name, source_repo,
213 ancestor_commit,
213 ancestor_commit,
214 source_ref_id, target_ref_id,
214 source_ref_id, target_ref_id,
215 target_commit, source_commit, diff_limit, file_limit,
215 target_commit, source_commit, diff_limit, file_limit,
216 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
216 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
217
217
218 target_commit_final = target_commit
218 target_commit_final = target_commit
219 source_commit_final = source_commit
219 source_commit_final = source_commit
220
220
221 if use_ancestor:
221 if use_ancestor:
222 # we might want to not use it for versions
222 # we might want to not use it for versions
223 target_ref_id = ancestor_commit.raw_id
223 target_ref_id = ancestor_commit.raw_id
224 target_commit_final = ancestor_commit
224 target_commit_final = ancestor_commit
225
225
226 vcs_diff = PullRequestModel().get_diff(
226 vcs_diff = PullRequestModel().get_diff(
227 source_repo, source_ref_id, target_ref_id,
227 source_repo, source_ref_id, target_ref_id,
228 hide_whitespace_changes, diff_context)
228 hide_whitespace_changes, diff_context)
229
229
230 diff_processor = diffs.DiffProcessor(
230 diff_processor = diffs.DiffProcessor(
231 vcs_diff, format='newdiff', diff_limit=diff_limit,
231 vcs_diff, format='newdiff', diff_limit=diff_limit,
232 file_limit=file_limit, show_full_diff=fulldiff)
232 file_limit=file_limit, show_full_diff=fulldiff)
233
233
234 _parsed = diff_processor.prepare()
234 _parsed = diff_processor.prepare()
235
235
236 diffset = codeblocks.DiffSet(
236 diffset = codeblocks.DiffSet(
237 repo_name=self.db_repo_name,
237 repo_name=self.db_repo_name,
238 source_repo_name=source_repo_name,
238 source_repo_name=source_repo_name,
239 source_node_getter=codeblocks.diffset_node_getter(target_commit_final),
239 source_node_getter=codeblocks.diffset_node_getter(target_commit_final),
240 target_node_getter=codeblocks.diffset_node_getter(source_commit_final),
240 target_node_getter=codeblocks.diffset_node_getter(source_commit_final),
241 )
241 )
242 diffset = self.path_filter.render_patchset_filtered(
242 diffset = self.path_filter.render_patchset_filtered(
243 diffset, _parsed, target_ref_id, source_ref_id)
243 diffset, _parsed, target_ref_id, source_ref_id)
244
244
245 return diffset
245 return diffset
246
246
247 def _get_range_diffset(self, source_scm, source_repo,
247 def _get_range_diffset(self, source_scm, source_repo,
248 commit1, commit2, diff_limit, file_limit,
248 commit1, commit2, diff_limit, file_limit,
249 fulldiff, hide_whitespace_changes, diff_context):
249 fulldiff, hide_whitespace_changes, diff_context):
250 vcs_diff = source_scm.get_diff(
250 vcs_diff = source_scm.get_diff(
251 commit1, commit2,
251 commit1, commit2,
252 ignore_whitespace=hide_whitespace_changes,
252 ignore_whitespace=hide_whitespace_changes,
253 context=diff_context)
253 context=diff_context)
254
254
255 diff_processor = diffs.DiffProcessor(
255 diff_processor = diffs.DiffProcessor(
256 vcs_diff, format='newdiff', diff_limit=diff_limit,
256 vcs_diff, format='newdiff', diff_limit=diff_limit,
257 file_limit=file_limit, show_full_diff=fulldiff)
257 file_limit=file_limit, show_full_diff=fulldiff)
258
258
259 _parsed = diff_processor.prepare()
259 _parsed = diff_processor.prepare()
260
260
261 diffset = codeblocks.DiffSet(
261 diffset = codeblocks.DiffSet(
262 repo_name=source_repo.repo_name,
262 repo_name=source_repo.repo_name,
263 source_node_getter=codeblocks.diffset_node_getter(commit1),
263 source_node_getter=codeblocks.diffset_node_getter(commit1),
264 target_node_getter=codeblocks.diffset_node_getter(commit2))
264 target_node_getter=codeblocks.diffset_node_getter(commit2))
265
265
266 diffset = self.path_filter.render_patchset_filtered(
266 diffset = self.path_filter.render_patchset_filtered(
267 diffset, _parsed, commit1.raw_id, commit2.raw_id)
267 diffset, _parsed, commit1.raw_id, commit2.raw_id)
268
268
269 return diffset
269 return diffset
270
270
271 def register_comments_vars(self, c, pull_request, versions, include_drafts=True):
271 def register_comments_vars(self, c, pull_request, versions, include_drafts=True):
272 comments_model = CommentsModel()
272 comments_model = CommentsModel()
273
273
274 # GENERAL COMMENTS with versions #
274 # GENERAL COMMENTS with versions #
275 q = comments_model._all_general_comments_of_pull_request(pull_request)
275 q = comments_model._all_general_comments_of_pull_request(pull_request)
276 q = q.order_by(ChangesetComment.comment_id.asc())
276 q = q.order_by(ChangesetComment.comment_id.asc())
277 if not include_drafts:
277 if not include_drafts:
278 q = q.filter(ChangesetComment.draft == false())
278 q = q.filter(ChangesetComment.draft == false())
279 general_comments = q
279 general_comments = q
280
280
281 # pick comments we want to render at current version
281 # pick comments we want to render at current version
282 c.comment_versions = comments_model.aggregate_comments(
282 c.comment_versions = comments_model.aggregate_comments(
283 general_comments, versions, c.at_version_num)
283 general_comments, versions, c.at_version_num)
284
284
285 # INLINE COMMENTS with versions #
285 # INLINE COMMENTS with versions #
286 q = comments_model._all_inline_comments_of_pull_request(pull_request)
286 q = comments_model._all_inline_comments_of_pull_request(pull_request)
287 q = q.order_by(ChangesetComment.comment_id.asc())
287 q = q.order_by(ChangesetComment.comment_id.asc())
288 if not include_drafts:
288 if not include_drafts:
289 q = q.filter(ChangesetComment.draft == false())
289 q = q.filter(ChangesetComment.draft == false())
290 inline_comments = q
290 inline_comments = q
291
291
292 c.inline_versions = comments_model.aggregate_comments(
292 c.inline_versions = comments_model.aggregate_comments(
293 inline_comments, versions, c.at_version_num, inline=True)
293 inline_comments, versions, c.at_version_num, inline=True)
294
294
295 # Comments inline+general
295 # Comments inline+general
296 if c.at_version:
296 if c.at_version:
297 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
297 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
298 c.comments = c.comment_versions[c.at_version_num]['display']
298 c.comments = c.comment_versions[c.at_version_num]['display']
299 else:
299 else:
300 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
300 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
301 c.comments = c.comment_versions[c.at_version_num]['until']
301 c.comments = c.comment_versions[c.at_version_num]['until']
302
302
303 return general_comments, inline_comments
303 return general_comments, inline_comments
304
304
305 @LoginRequired()
305 @LoginRequired()
306 @HasRepoPermissionAnyDecorator(
306 @HasRepoPermissionAnyDecorator(
307 'repository.read', 'repository.write', 'repository.admin')
307 'repository.read', 'repository.write', 'repository.admin')
308 def pull_request_show(self):
308 def pull_request_show(self):
309 _ = self.request.translate
309 _ = self.request.translate
310 c = self.load_default_context()
310 c = self.load_default_context()
311
311
312 pull_request = PullRequest.get_or_404(
312 pull_request = PullRequest.get_or_404(
313 self.request.matchdict['pull_request_id'])
313 self.request.matchdict['pull_request_id'])
314 pull_request_id = pull_request.pull_request_id
314 pull_request_id = pull_request.pull_request_id
315
315
316 c.state_progressing = pull_request.is_state_changing()
316 c.state_progressing = pull_request.is_state_changing()
317 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
317 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
318
318
319 _new_state = {
319 _new_state = {
320 'created': PullRequest.STATE_CREATED,
320 'created': PullRequest.STATE_CREATED,
321 }.get(self.request.GET.get('force_state'))
321 }.get(self.request.GET.get('force_state'))
322
322
323 if c.is_super_admin and _new_state:
323 if c.is_super_admin and _new_state:
324 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
324 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
325 h.flash(
325 h.flash(
326 _('Pull Request state was force changed to `{}`').format(_new_state),
326 _('Pull Request state was force changed to `{}`').format(_new_state),
327 category='success')
327 category='success')
328 Session().commit()
328 Session().commit()
329
329
330 raise HTTPFound(h.route_path(
330 raise HTTPFound(h.route_path(
331 'pullrequest_show', repo_name=self.db_repo_name,
331 'pullrequest_show', repo_name=self.db_repo_name,
332 pull_request_id=pull_request_id))
332 pull_request_id=pull_request_id))
333
333
334 version = self.request.GET.get('version')
334 version = self.request.GET.get('version')
335 from_version = self.request.GET.get('from_version') or version
335 from_version = self.request.GET.get('from_version') or version
336 merge_checks = self.request.GET.get('merge_checks')
336 merge_checks = self.request.GET.get('merge_checks')
337 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
337 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
338 force_refresh = str2bool(self.request.GET.get('force_refresh'))
338 force_refresh = str2bool(self.request.GET.get('force_refresh'))
339 c.range_diff_on = self.request.GET.get('range-diff') == "1"
339 c.range_diff_on = self.request.GET.get('range-diff') == "1"
340
340
341 # fetch global flags of ignore ws or context lines
341 # fetch global flags of ignore ws or context lines
342 diff_context = diffs.get_diff_context(self.request)
342 diff_context = diffs.get_diff_context(self.request)
343 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
343 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
344
344
345 (pull_request_latest,
345 (pull_request_latest,
346 pull_request_at_ver,
346 pull_request_at_ver,
347 pull_request_display_obj,
347 pull_request_display_obj,
348 at_version) = PullRequestModel().get_pr_version(
348 at_version) = PullRequestModel().get_pr_version(
349 pull_request_id, version=version)
349 pull_request_id, version=version)
350
350
351 pr_closed = pull_request_latest.is_closed()
351 pr_closed = pull_request_latest.is_closed()
352
352
353 if pr_closed and (version or from_version):
353 if pr_closed and (version or from_version):
354 # not allow to browse versions for closed PR
354 # not allow to browse versions for closed PR
355 raise HTTPFound(h.route_path(
355 raise HTTPFound(h.route_path(
356 'pullrequest_show', repo_name=self.db_repo_name,
356 'pullrequest_show', repo_name=self.db_repo_name,
357 pull_request_id=pull_request_id))
357 pull_request_id=pull_request_id))
358
358
359 versions = pull_request_display_obj.versions()
359 versions = pull_request_display_obj.versions()
360
361 c.commit_versions = PullRequestModel().pr_commits_versions(versions)
362
360 # used to store per-commit range diffs
363 # used to store per-commit range diffs
361 c.changes = collections.OrderedDict()
364 c.changes = collections.OrderedDict()
362
365
363 c.at_version = at_version
366 c.at_version = at_version
364 c.at_version_num = (at_version
367 c.at_version_num = (at_version
365 if at_version and at_version != PullRequest.LATEST_VER
368 if at_version and at_version != PullRequest.LATEST_VER
366 else None)
369 else None)
367
370
368 c.at_version_index = ChangesetComment.get_index_from_version(
371 c.at_version_index = ChangesetComment.get_index_from_version(
369 c.at_version_num, versions)
372 c.at_version_num, versions)
370
373
371 (prev_pull_request_latest,
374 (prev_pull_request_latest,
372 prev_pull_request_at_ver,
375 prev_pull_request_at_ver,
373 prev_pull_request_display_obj,
376 prev_pull_request_display_obj,
374 prev_at_version) = PullRequestModel().get_pr_version(
377 prev_at_version) = PullRequestModel().get_pr_version(
375 pull_request_id, version=from_version)
378 pull_request_id, version=from_version)
376
379
377 c.from_version = prev_at_version
380 c.from_version = prev_at_version
378 c.from_version_num = (prev_at_version
381 c.from_version_num = (prev_at_version
379 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
382 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
380 else None)
383 else None)
381 c.from_version_index = ChangesetComment.get_index_from_version(
384 c.from_version_index = ChangesetComment.get_index_from_version(
382 c.from_version_num, versions)
385 c.from_version_num, versions)
383
386
384 # define if we're in COMPARE mode or VIEW at version mode
387 # define if we're in COMPARE mode or VIEW at version mode
385 compare = at_version != prev_at_version
388 compare = at_version != prev_at_version
386
389
387 # pull_requests repo_name we opened it against
390 # pull_requests repo_name we opened it against
388 # ie. target_repo must match
391 # ie. target_repo must match
389 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
392 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
390 log.warning('Mismatch between the current repo: %s, and target %s',
393 log.warning('Mismatch between the current repo: %s, and target %s',
391 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
394 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
392 raise HTTPNotFound()
395 raise HTTPNotFound()
393
396
394 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
397 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
395
398
396 c.pull_request = pull_request_display_obj
399 c.pull_request = pull_request_display_obj
397 c.renderer = pull_request_at_ver.description_renderer or c.renderer
400 c.renderer = pull_request_at_ver.description_renderer or c.renderer
398 c.pull_request_latest = pull_request_latest
401 c.pull_request_latest = pull_request_latest
399
402
400 # inject latest version
403 # inject latest version
401 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
404 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
402 c.versions = versions + [latest_ver]
405 c.versions = versions + [latest_ver]
403
406
404 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
407 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
405 c.allowed_to_change_status = False
408 c.allowed_to_change_status = False
406 c.allowed_to_update = False
409 c.allowed_to_update = False
407 c.allowed_to_merge = False
410 c.allowed_to_merge = False
408 c.allowed_to_delete = False
411 c.allowed_to_delete = False
409 c.allowed_to_comment = False
412 c.allowed_to_comment = False
410 c.allowed_to_close = False
413 c.allowed_to_close = False
411 else:
414 else:
412 can_change_status = PullRequestModel().check_user_change_status(
415 can_change_status = PullRequestModel().check_user_change_status(
413 pull_request_at_ver, self._rhodecode_user)
416 pull_request_at_ver, self._rhodecode_user)
414 c.allowed_to_change_status = can_change_status and not pr_closed
417 c.allowed_to_change_status = can_change_status and not pr_closed
415
418
416 c.allowed_to_update = PullRequestModel().check_user_update(
419 c.allowed_to_update = PullRequestModel().check_user_update(
417 pull_request_latest, self._rhodecode_user) and not pr_closed
420 pull_request_latest, self._rhodecode_user) and not pr_closed
418 c.allowed_to_merge = PullRequestModel().check_user_merge(
421 c.allowed_to_merge = PullRequestModel().check_user_merge(
419 pull_request_latest, self._rhodecode_user) and not pr_closed
422 pull_request_latest, self._rhodecode_user) and not pr_closed
420 c.allowed_to_delete = PullRequestModel().check_user_delete(
423 c.allowed_to_delete = PullRequestModel().check_user_delete(
421 pull_request_latest, self._rhodecode_user) and not pr_closed
424 pull_request_latest, self._rhodecode_user) and not pr_closed
422 c.allowed_to_comment = not pr_closed
425 c.allowed_to_comment = not pr_closed
423 c.allowed_to_close = c.allowed_to_merge and not pr_closed
426 c.allowed_to_close = c.allowed_to_merge and not pr_closed
424
427
425 c.forbid_adding_reviewers = False
428 c.forbid_adding_reviewers = False
426
429
427 if pull_request_latest.reviewer_data and \
430 if pull_request_latest.reviewer_data and \
428 'rules' in pull_request_latest.reviewer_data:
431 'rules' in pull_request_latest.reviewer_data:
429 rules = pull_request_latest.reviewer_data['rules'] or {}
432 rules = pull_request_latest.reviewer_data['rules'] or {}
430 try:
433 try:
431 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
434 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
432 except Exception:
435 except Exception:
433 pass
436 pass
434
437
435 # check merge capabilities
438 # check merge capabilities
436 _merge_check = MergeCheck.validate(
439 _merge_check = MergeCheck.validate(
437 pull_request_latest, auth_user=self._rhodecode_user,
440 pull_request_latest, auth_user=self._rhodecode_user,
438 translator=self.request.translate,
441 translator=self.request.translate,
439 force_shadow_repo_refresh=force_refresh)
442 force_shadow_repo_refresh=force_refresh)
440
443
441 c.pr_merge_errors = _merge_check.error_details
444 c.pr_merge_errors = _merge_check.error_details
442 c.pr_merge_possible = not _merge_check.failed
445 c.pr_merge_possible = not _merge_check.failed
443 c.pr_merge_message = _merge_check.merge_msg
446 c.pr_merge_message = _merge_check.merge_msg
444 c.pr_merge_source_commit = _merge_check.source_commit
447 c.pr_merge_source_commit = _merge_check.source_commit
445 c.pr_merge_target_commit = _merge_check.target_commit
448 c.pr_merge_target_commit = _merge_check.target_commit
446
449
447 c.pr_merge_info = MergeCheck.get_merge_conditions(
450 c.pr_merge_info = MergeCheck.get_merge_conditions(
448 pull_request_latest, translator=self.request.translate)
451 pull_request_latest, translator=self.request.translate)
449
452
450 c.pull_request_review_status = _merge_check.review_status
453 c.pull_request_review_status = _merge_check.review_status
451 if merge_checks:
454 if merge_checks:
452 self.request.override_renderer = \
455 self.request.override_renderer = \
453 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
456 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
454 return self._get_template_context(c)
457 return self._get_template_context(c)
455
458
456 c.reviewers_count = pull_request.reviewers_count
459 c.reviewers_count = pull_request.reviewers_count
457 c.observers_count = pull_request.observers_count
460 c.observers_count = pull_request.observers_count
458
461
459 # reviewers and statuses
462 # reviewers and statuses
460 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
463 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
461 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
464 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
462 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
465 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
463
466
464 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
467 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
465 member_reviewer = h.reviewer_as_json(
468 member_reviewer = h.reviewer_as_json(
466 member, reasons=reasons, mandatory=mandatory,
469 member, reasons=reasons, mandatory=mandatory,
467 role=review_obj.role,
470 role=review_obj.role,
468 user_group=review_obj.rule_user_group_data()
471 user_group=review_obj.rule_user_group_data()
469 )
472 )
470
473
471 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
474 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
472 member_reviewer['review_status'] = current_review_status
475 member_reviewer['review_status'] = current_review_status
473 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
476 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
474 member_reviewer['allowed_to_update'] = c.allowed_to_update
477 member_reviewer['allowed_to_update'] = c.allowed_to_update
475 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
478 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
476
479
477 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
480 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
478
481
479 for observer_obj, member in pull_request_at_ver.observers():
482 for observer_obj, member in pull_request_at_ver.observers():
480 member_observer = h.reviewer_as_json(
483 member_observer = h.reviewer_as_json(
481 member, reasons=[], mandatory=False,
484 member, reasons=[], mandatory=False,
482 role=observer_obj.role,
485 role=observer_obj.role,
483 user_group=observer_obj.rule_user_group_data()
486 user_group=observer_obj.rule_user_group_data()
484 )
487 )
485 member_observer['allowed_to_update'] = c.allowed_to_update
488 member_observer['allowed_to_update'] = c.allowed_to_update
486 c.pull_request_set_observers_data_json['observers'].append(member_observer)
489 c.pull_request_set_observers_data_json['observers'].append(member_observer)
487
490
488 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
491 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
489
492
490 general_comments, inline_comments = \
493 general_comments, inline_comments = \
491 self.register_comments_vars(c, pull_request_latest, versions)
494 self.register_comments_vars(c, pull_request_latest, versions)
492
495
493 # TODOs
496 # TODOs
494 c.unresolved_comments = CommentsModel() \
497 c.unresolved_comments = CommentsModel() \
495 .get_pull_request_unresolved_todos(pull_request_latest)
498 .get_pull_request_unresolved_todos(pull_request_latest)
496 c.resolved_comments = CommentsModel() \
499 c.resolved_comments = CommentsModel() \
497 .get_pull_request_resolved_todos(pull_request_latest)
500 .get_pull_request_resolved_todos(pull_request_latest)
498
501
499 # Drafts
502 # Drafts
500 c.draft_comments = CommentsModel().get_pull_request_drafts(
503 c.draft_comments = CommentsModel().get_pull_request_drafts(
501 self._rhodecode_db_user.user_id,
504 self._rhodecode_db_user.user_id,
502 pull_request_latest)
505 pull_request_latest)
503
506
504 # if we use version, then do not show later comments
507 # if we use version, then do not show later comments
505 # than current version
508 # than current version
506 display_inline_comments = collections.defaultdict(
509 display_inline_comments = collections.defaultdict(
507 lambda: collections.defaultdict(list))
510 lambda: collections.defaultdict(list))
508 for co in inline_comments:
511 for co in inline_comments:
509 if c.at_version_num:
512 if c.at_version_num:
510 # pick comments that are at least UPTO given version, so we
513 # pick comments that are at least UPTO given version, so we
511 # don't render comments for higher version
514 # don't render comments for higher version
512 should_render = co.pull_request_version_id and \
515 should_render = co.pull_request_version_id and \
513 co.pull_request_version_id <= c.at_version_num
516 co.pull_request_version_id <= c.at_version_num
514 else:
517 else:
515 # showing all, for 'latest'
518 # showing all, for 'latest'
516 should_render = True
519 should_render = True
517
520
518 if should_render:
521 if should_render:
519 display_inline_comments[co.f_path][co.line_no].append(co)
522 display_inline_comments[co.f_path][co.line_no].append(co)
520
523
521 # load diff data into template context, if we use compare mode then
524 # load diff data into template context, if we use compare mode then
522 # diff is calculated based on changes between versions of PR
525 # diff is calculated based on changes between versions of PR
523
526
524 source_repo = pull_request_at_ver.source_repo
527 source_repo = pull_request_at_ver.source_repo
525 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
528 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
526
529
527 target_repo = pull_request_at_ver.target_repo
530 target_repo = pull_request_at_ver.target_repo
528 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
531 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
529
532
530 if compare:
533 if compare:
531 # in compare switch the diff base to latest commit from prev version
534 # in compare switch the diff base to latest commit from prev version
532 target_ref_id = prev_pull_request_display_obj.revisions[0]
535 target_ref_id = prev_pull_request_display_obj.revisions[0]
533
536
534 # despite opening commits for bookmarks/branches/tags, we always
537 # despite opening commits for bookmarks/branches/tags, we always
535 # convert this to rev to prevent changes after bookmark or branch change
538 # convert this to rev to prevent changes after bookmark or branch change
536 c.source_ref_type = 'rev'
539 c.source_ref_type = 'rev'
537 c.source_ref = source_ref_id
540 c.source_ref = source_ref_id
538
541
539 c.target_ref_type = 'rev'
542 c.target_ref_type = 'rev'
540 c.target_ref = target_ref_id
543 c.target_ref = target_ref_id
541
544
542 c.source_repo = source_repo
545 c.source_repo = source_repo
543 c.target_repo = target_repo
546 c.target_repo = target_repo
544
547
545 c.commit_ranges = []
548 c.commit_ranges = []
546 source_commit = EmptyCommit()
549 source_commit = EmptyCommit()
547 target_commit = EmptyCommit()
550 target_commit = EmptyCommit()
548 c.missing_requirements = False
551 c.missing_requirements = False
549
552
550 source_scm = source_repo.scm_instance()
553 source_scm = source_repo.scm_instance()
551 target_scm = target_repo.scm_instance()
554 target_scm = target_repo.scm_instance()
552
555
553 shadow_scm = None
556 shadow_scm = None
554 try:
557 try:
555 shadow_scm = pull_request_latest.get_shadow_repo()
558 shadow_scm = pull_request_latest.get_shadow_repo()
556 except Exception:
559 except Exception:
557 log.debug('Failed to get shadow repo', exc_info=True)
560 log.debug('Failed to get shadow repo', exc_info=True)
558 # try first the existing source_repo, and then shadow
561 # try first the existing source_repo, and then shadow
559 # repo if we can obtain one
562 # repo if we can obtain one
560 commits_source_repo = source_scm
563 commits_source_repo = source_scm
561 if shadow_scm:
564 if shadow_scm:
562 commits_source_repo = shadow_scm
565 commits_source_repo = shadow_scm
563
566
564 c.commits_source_repo = commits_source_repo
567 c.commits_source_repo = commits_source_repo
565 c.ancestor = None # set it to None, to hide it from PR view
568 c.ancestor = None # set it to None, to hide it from PR view
566
569
567 # empty version means latest, so we keep this to prevent
570 # empty version means latest, so we keep this to prevent
568 # double caching
571 # double caching
569 version_normalized = version or PullRequest.LATEST_VER
572 version_normalized = version or PullRequest.LATEST_VER
570 from_version_normalized = from_version or PullRequest.LATEST_VER
573 from_version_normalized = from_version or PullRequest.LATEST_VER
571
574
572 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
575 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
573 cache_file_path = diff_cache_exist(
576 cache_file_path = diff_cache_exist(
574 cache_path, 'pull_request', pull_request_id, version_normalized,
577 cache_path, 'pull_request', pull_request_id, version_normalized,
575 from_version_normalized, source_ref_id, target_ref_id,
578 from_version_normalized, source_ref_id, target_ref_id,
576 hide_whitespace_changes, diff_context, c.fulldiff)
579 hide_whitespace_changes, diff_context, c.fulldiff)
577
580
578 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
581 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
579 force_recache = self.get_recache_flag()
582 force_recache = self.get_recache_flag()
580
583
581 cached_diff = None
584 cached_diff = None
582 if caching_enabled:
585 if caching_enabled:
583 cached_diff = load_cached_diff(cache_file_path)
586 cached_diff = load_cached_diff(cache_file_path)
584
587
585 has_proper_commit_cache = (
588 has_proper_commit_cache = (
586 cached_diff and cached_diff.get('commits')
589 cached_diff and cached_diff.get('commits')
587 and len(cached_diff.get('commits', [])) == 5
590 and len(cached_diff.get('commits', [])) == 5
588 and cached_diff.get('commits')[0]
591 and cached_diff.get('commits')[0]
589 and cached_diff.get('commits')[3])
592 and cached_diff.get('commits')[3])
590
593
591 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
594 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
592 diff_commit_cache = \
595 diff_commit_cache = \
593 (ancestor_commit, commit_cache, missing_requirements,
596 (ancestor_commit, commit_cache, missing_requirements,
594 source_commit, target_commit) = cached_diff['commits']
597 source_commit, target_commit) = cached_diff['commits']
595 else:
598 else:
596 # NOTE(marcink): we reach potentially unreachable errors when a PR has
599 # NOTE(marcink): we reach potentially unreachable errors when a PR has
597 # merge errors resulting in potentially hidden commits in the shadow repo.
600 # merge errors resulting in potentially hidden commits in the shadow repo.
598 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
601 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
599 and _merge_check.merge_response
602 and _merge_check.merge_response
600 maybe_unreachable = maybe_unreachable \
603 maybe_unreachable = maybe_unreachable \
601 and _merge_check.merge_response.metadata.get('unresolved_files')
604 and _merge_check.merge_response.metadata.get('unresolved_files')
602 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
605 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
603 diff_commit_cache = \
606 diff_commit_cache = \
604 (ancestor_commit, commit_cache, missing_requirements,
607 (ancestor_commit, commit_cache, missing_requirements,
605 source_commit, target_commit) = self.get_commits(
608 source_commit, target_commit) = self.get_commits(
606 commits_source_repo,
609 commits_source_repo,
607 pull_request_at_ver,
610 pull_request_at_ver,
608 source_commit,
611 source_commit,
609 source_ref_id,
612 source_ref_id,
610 source_scm,
613 source_scm,
611 target_commit,
614 target_commit,
612 target_ref_id,
615 target_ref_id,
613 target_scm,
616 target_scm,
614 maybe_unreachable=maybe_unreachable)
617 maybe_unreachable=maybe_unreachable)
615
618
616 # register our commit range
619 # register our commit range
617 for comm in commit_cache.values():
620 for comm in commit_cache.values():
618 c.commit_ranges.append(comm)
621 c.commit_ranges.append(comm)
619
622
620 c.missing_requirements = missing_requirements
623 c.missing_requirements = missing_requirements
621 c.ancestor_commit = ancestor_commit
624 c.ancestor_commit = ancestor_commit
622 c.statuses = source_repo.statuses(
625 c.statuses = source_repo.statuses(
623 [x.raw_id for x in c.commit_ranges])
626 [x.raw_id for x in c.commit_ranges])
624
627
625 # auto collapse if we have more than limit
628 # auto collapse if we have more than limit
626 collapse_limit = diffs.DiffProcessor._collapse_commits_over
629 collapse_limit = diffs.DiffProcessor._collapse_commits_over
627 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
630 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
628 c.compare_mode = compare
631 c.compare_mode = compare
629
632
630 # diff_limit is the old behavior, will cut off the whole diff
633 # diff_limit is the old behavior, will cut off the whole diff
631 # if the limit is applied otherwise will just hide the
634 # if the limit is applied otherwise will just hide the
632 # big files from the front-end
635 # big files from the front-end
633 diff_limit = c.visual.cut_off_limit_diff
636 diff_limit = c.visual.cut_off_limit_diff
634 file_limit = c.visual.cut_off_limit_file
637 file_limit = c.visual.cut_off_limit_file
635
638
636 c.missing_commits = False
639 c.missing_commits = False
637 if (c.missing_requirements
640 if (c.missing_requirements
638 or isinstance(source_commit, EmptyCommit)
641 or isinstance(source_commit, EmptyCommit)
639 or source_commit == target_commit):
642 or source_commit == target_commit):
640
643
641 c.missing_commits = True
644 c.missing_commits = True
642 else:
645 else:
643 c.inline_comments = display_inline_comments
646 c.inline_comments = display_inline_comments
644
647
645 use_ancestor = True
648 use_ancestor = True
646 if from_version_normalized != version_normalized:
649 if from_version_normalized != version_normalized:
647 use_ancestor = False
650 use_ancestor = False
648
651
649 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
652 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
650 if not force_recache and has_proper_diff_cache:
653 if not force_recache and has_proper_diff_cache:
651 c.diffset = cached_diff['diff']
654 c.diffset = cached_diff['diff']
652 else:
655 else:
653 try:
656 try:
654 c.diffset = self._get_diffset(
657 c.diffset = self._get_diffset(
655 c.source_repo.repo_name, commits_source_repo,
658 c.source_repo.repo_name, commits_source_repo,
656 c.ancestor_commit,
659 c.ancestor_commit,
657 source_ref_id, target_ref_id,
660 source_ref_id, target_ref_id,
658 target_commit, source_commit,
661 target_commit, source_commit,
659 diff_limit, file_limit, c.fulldiff,
662 diff_limit, file_limit, c.fulldiff,
660 hide_whitespace_changes, diff_context,
663 hide_whitespace_changes, diff_context,
661 use_ancestor=use_ancestor
664 use_ancestor=use_ancestor
662 )
665 )
663
666
664 # save cached diff
667 # save cached diff
665 if caching_enabled:
668 if caching_enabled:
666 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
669 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
667 except CommitDoesNotExistError:
670 except CommitDoesNotExistError:
668 log.exception('Failed to generate diffset')
671 log.exception('Failed to generate diffset')
669 c.missing_commits = True
672 c.missing_commits = True
670
673
671 if not c.missing_commits:
674 if not c.missing_commits:
672
675
673 c.limited_diff = c.diffset.limited_diff
676 c.limited_diff = c.diffset.limited_diff
674
677
675 # calculate removed files that are bound to comments
678 # calculate removed files that are bound to comments
676 comment_deleted_files = [
679 comment_deleted_files = [
677 fname for fname in display_inline_comments
680 fname for fname in display_inline_comments
678 if fname not in c.diffset.file_stats]
681 if fname not in c.diffset.file_stats]
679
682
680 c.deleted_files_comments = collections.defaultdict(dict)
683 c.deleted_files_comments = collections.defaultdict(dict)
681 for fname, per_line_comments in display_inline_comments.items():
684 for fname, per_line_comments in display_inline_comments.items():
682 if fname in comment_deleted_files:
685 if fname in comment_deleted_files:
683 c.deleted_files_comments[fname]['stats'] = 0
686 c.deleted_files_comments[fname]['stats'] = 0
684 c.deleted_files_comments[fname]['comments'] = list()
687 c.deleted_files_comments[fname]['comments'] = list()
685 for lno, comments in per_line_comments.items():
688 for lno, comments in per_line_comments.items():
686 c.deleted_files_comments[fname]['comments'].extend(comments)
689 c.deleted_files_comments[fname]['comments'].extend(comments)
687
690
688 # maybe calculate the range diff
691 # maybe calculate the range diff
689 if c.range_diff_on:
692 if c.range_diff_on:
690 # TODO(marcink): set whitespace/context
693 # TODO(marcink): set whitespace/context
691 context_lcl = 3
694 context_lcl = 3
692 ign_whitespace_lcl = False
695 ign_whitespace_lcl = False
693
696
694 for commit in c.commit_ranges:
697 for commit in c.commit_ranges:
695 commit2 = commit
698 commit2 = commit
696 commit1 = commit.first_parent
699 commit1 = commit.first_parent
697
700
698 range_diff_cache_file_path = diff_cache_exist(
701 range_diff_cache_file_path = diff_cache_exist(
699 cache_path, 'diff', commit.raw_id,
702 cache_path, 'diff', commit.raw_id,
700 ign_whitespace_lcl, context_lcl, c.fulldiff)
703 ign_whitespace_lcl, context_lcl, c.fulldiff)
701
704
702 cached_diff = None
705 cached_diff = None
703 if caching_enabled:
706 if caching_enabled:
704 cached_diff = load_cached_diff(range_diff_cache_file_path)
707 cached_diff = load_cached_diff(range_diff_cache_file_path)
705
708
706 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
709 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
707 if not force_recache and has_proper_diff_cache:
710 if not force_recache and has_proper_diff_cache:
708 diffset = cached_diff['diff']
711 diffset = cached_diff['diff']
709 else:
712 else:
710 diffset = self._get_range_diffset(
713 diffset = self._get_range_diffset(
711 commits_source_repo, source_repo,
714 commits_source_repo, source_repo,
712 commit1, commit2, diff_limit, file_limit,
715 commit1, commit2, diff_limit, file_limit,
713 c.fulldiff, ign_whitespace_lcl, context_lcl
716 c.fulldiff, ign_whitespace_lcl, context_lcl
714 )
717 )
715
718
716 # save cached diff
719 # save cached diff
717 if caching_enabled:
720 if caching_enabled:
718 cache_diff(range_diff_cache_file_path, diffset, None)
721 cache_diff(range_diff_cache_file_path, diffset, None)
719
722
720 c.changes[commit.raw_id] = diffset
723 c.changes[commit.raw_id] = diffset
721
724
722 # this is a hack to properly display links, when creating PR, the
725 # this is a hack to properly display links, when creating PR, the
723 # compare view and others uses different notation, and
726 # compare view and others uses different notation, and
724 # compare_commits.mako renders links based on the target_repo.
727 # compare_commits.mako renders links based on the target_repo.
725 # We need to swap that here to generate it properly on the html side
728 # We need to swap that here to generate it properly on the html side
726 c.target_repo = c.source_repo
729 c.target_repo = c.source_repo
727
730
728 c.commit_statuses = ChangesetStatus.STATUSES
731 c.commit_statuses = ChangesetStatus.STATUSES
729
732
730 c.show_version_changes = not pr_closed
733 c.show_version_changes = not pr_closed
731 if c.show_version_changes:
734 if c.show_version_changes:
732 cur_obj = pull_request_at_ver
735 cur_obj = pull_request_at_ver
733 prev_obj = prev_pull_request_at_ver
736 prev_obj = prev_pull_request_at_ver
734
737
735 old_commit_ids = prev_obj.revisions
738 old_commit_ids = prev_obj.revisions
736 new_commit_ids = cur_obj.revisions
739 new_commit_ids = cur_obj.revisions
737 commit_changes = PullRequestModel()._calculate_commit_id_changes(
740 commit_changes = PullRequestModel()._calculate_commit_id_changes(
738 old_commit_ids, new_commit_ids)
741 old_commit_ids, new_commit_ids)
739 c.commit_changes_summary = commit_changes
742 c.commit_changes_summary = commit_changes
740
743
741 # calculate the diff for commits between versions
744 # calculate the diff for commits between versions
742 c.commit_changes = []
745 c.commit_changes = []
743
746
744 def mark(cs, fw):
747 def mark(cs, fw):
745 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
748 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
746
749
747 for c_type, raw_id in mark(commit_changes.added, 'a') \
750 for c_type, raw_id in mark(commit_changes.added, 'a') \
748 + mark(commit_changes.removed, 'r') \
751 + mark(commit_changes.removed, 'r') \
749 + mark(commit_changes.common, 'c'):
752 + mark(commit_changes.common, 'c'):
750
753
751 if raw_id in commit_cache:
754 if raw_id in commit_cache:
752 commit = commit_cache[raw_id]
755 commit = commit_cache[raw_id]
753 else:
756 else:
754 try:
757 try:
755 commit = commits_source_repo.get_commit(raw_id)
758 commit = commits_source_repo.get_commit(raw_id)
756 except CommitDoesNotExistError:
759 except CommitDoesNotExistError:
757 # in case we fail extracting still use "dummy" commit
760 # in case we fail extracting still use "dummy" commit
758 # for display in commit diff
761 # for display in commit diff
759 commit = h.AttributeDict(
762 commit = h.AttributeDict(
760 {'raw_id': raw_id,
763 {'raw_id': raw_id,
761 'message': 'EMPTY or MISSING COMMIT'})
764 'message': 'EMPTY or MISSING COMMIT'})
762 c.commit_changes.append([c_type, commit])
765 c.commit_changes.append([c_type, commit])
763
766
764 # current user review statuses for each version
767 # current user review statuses for each version
765 c.review_versions = {}
768 c.review_versions = {}
766 is_reviewer = PullRequestModel().is_user_reviewer(
769 is_reviewer = PullRequestModel().is_user_reviewer(
767 pull_request, self._rhodecode_user)
770 pull_request, self._rhodecode_user)
768 if is_reviewer:
771 if is_reviewer:
769 for co in general_comments:
772 for co in general_comments:
770 if co.author.user_id == self._rhodecode_user.user_id:
773 if co.author.user_id == self._rhodecode_user.user_id:
771 status = co.status_change
774 status = co.status_change
772 if status:
775 if status:
773 _ver_pr = status[0].comment.pull_request_version_id
776 _ver_pr = status[0].comment.pull_request_version_id
774 c.review_versions[_ver_pr] = status[0]
777 c.review_versions[_ver_pr] = status[0]
775
778
776 return self._get_template_context(c)
779 return self._get_template_context(c)
777
780
778 def get_commits(
781 def get_commits(
779 self, commits_source_repo, pull_request_at_ver, source_commit,
782 self, commits_source_repo, pull_request_at_ver, source_commit,
780 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
783 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
781 maybe_unreachable=False):
784 maybe_unreachable=False):
782
785
783 commit_cache = collections.OrderedDict()
786 commit_cache = collections.OrderedDict()
784 missing_requirements = False
787 missing_requirements = False
785
788
786 try:
789 try:
787 pre_load = ["author", "date", "message", "branch", "parents"]
790 pre_load = ["author", "date", "message", "branch", "parents"]
788
791
789 pull_request_commits = pull_request_at_ver.revisions
792 pull_request_commits = pull_request_at_ver.revisions
790 log.debug('Loading %s commits from %s',
793 log.debug('Loading %s commits from %s',
791 len(pull_request_commits), commits_source_repo)
794 len(pull_request_commits), commits_source_repo)
792
795
793 for rev in pull_request_commits:
796 for rev in pull_request_commits:
794 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
797 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
795 maybe_unreachable=maybe_unreachable)
798 maybe_unreachable=maybe_unreachable)
796 commit_cache[comm.raw_id] = comm
799 commit_cache[comm.raw_id] = comm
797
800
798 # Order here matters, we first need to get target, and then
801 # Order here matters, we first need to get target, and then
799 # the source
802 # the source
800 target_commit = commits_source_repo.get_commit(
803 target_commit = commits_source_repo.get_commit(
801 commit_id=safe_str(target_ref_id))
804 commit_id=safe_str(target_ref_id))
802
805
803 source_commit = commits_source_repo.get_commit(
806 source_commit = commits_source_repo.get_commit(
804 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
807 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
805 except CommitDoesNotExistError:
808 except CommitDoesNotExistError:
806 log.warning('Failed to get commit from `{}` repo'.format(
809 log.warning('Failed to get commit from `{}` repo'.format(
807 commits_source_repo), exc_info=True)
810 commits_source_repo), exc_info=True)
808 except RepositoryRequirementError:
811 except RepositoryRequirementError:
809 log.warning('Failed to get all required data from repo', exc_info=True)
812 log.warning('Failed to get all required data from repo', exc_info=True)
810 missing_requirements = True
813 missing_requirements = True
811
814
812 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
815 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
813
816
814 try:
817 try:
815 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
818 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
816 except Exception:
819 except Exception:
817 ancestor_commit = None
820 ancestor_commit = None
818
821
819 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
822 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
820
823
821 def assure_not_empty_repo(self):
824 def assure_not_empty_repo(self):
822 _ = self.request.translate
825 _ = self.request.translate
823
826
824 try:
827 try:
825 self.db_repo.scm_instance().get_commit()
828 self.db_repo.scm_instance().get_commit()
826 except EmptyRepositoryError:
829 except EmptyRepositoryError:
827 h.flash(h.literal(_('There are no commits yet')),
830 h.flash(h.literal(_('There are no commits yet')),
828 category='warning')
831 category='warning')
829 raise HTTPFound(
832 raise HTTPFound(
830 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
833 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
831
834
832 @LoginRequired()
835 @LoginRequired()
833 @NotAnonymous()
836 @NotAnonymous()
834 @HasRepoPermissionAnyDecorator(
837 @HasRepoPermissionAnyDecorator(
835 'repository.read', 'repository.write', 'repository.admin')
838 'repository.read', 'repository.write', 'repository.admin')
836 def pull_request_new(self):
839 def pull_request_new(self):
837 _ = self.request.translate
840 _ = self.request.translate
838 c = self.load_default_context()
841 c = self.load_default_context()
839
842
840 self.assure_not_empty_repo()
843 self.assure_not_empty_repo()
841 source_repo = self.db_repo
844 source_repo = self.db_repo
842
845
843 commit_id = self.request.GET.get('commit')
846 commit_id = self.request.GET.get('commit')
844 branch_ref = self.request.GET.get('branch')
847 branch_ref = self.request.GET.get('branch')
845 bookmark_ref = self.request.GET.get('bookmark')
848 bookmark_ref = self.request.GET.get('bookmark')
846
849
847 try:
850 try:
848 source_repo_data = PullRequestModel().generate_repo_data(
851 source_repo_data = PullRequestModel().generate_repo_data(
849 source_repo, commit_id=commit_id,
852 source_repo, commit_id=commit_id,
850 branch=branch_ref, bookmark=bookmark_ref,
853 branch=branch_ref, bookmark=bookmark_ref,
851 translator=self.request.translate)
854 translator=self.request.translate)
852 except CommitDoesNotExistError as e:
855 except CommitDoesNotExistError as e:
853 log.exception(e)
856 log.exception(e)
854 h.flash(_('Commit does not exist'), 'error')
857 h.flash(_('Commit does not exist'), 'error')
855 raise HTTPFound(
858 raise HTTPFound(
856 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
859 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
857
860
858 default_target_repo = source_repo
861 default_target_repo = source_repo
859
862
860 if source_repo.parent and c.has_origin_repo_read_perm:
863 if source_repo.parent and c.has_origin_repo_read_perm:
861 parent_vcs_obj = source_repo.parent.scm_instance()
864 parent_vcs_obj = source_repo.parent.scm_instance()
862 if parent_vcs_obj and not parent_vcs_obj.is_empty():
865 if parent_vcs_obj and not parent_vcs_obj.is_empty():
863 # change default if we have a parent repo
866 # change default if we have a parent repo
864 default_target_repo = source_repo.parent
867 default_target_repo = source_repo.parent
865
868
866 target_repo_data = PullRequestModel().generate_repo_data(
869 target_repo_data = PullRequestModel().generate_repo_data(
867 default_target_repo, translator=self.request.translate)
870 default_target_repo, translator=self.request.translate)
868
871
869 selected_source_ref = source_repo_data['refs']['selected_ref']
872 selected_source_ref = source_repo_data['refs']['selected_ref']
870 title_source_ref = ''
873 title_source_ref = ''
871 if selected_source_ref:
874 if selected_source_ref:
872 title_source_ref = selected_source_ref.split(':', 2)[1]
875 title_source_ref = selected_source_ref.split(':', 2)[1]
873 c.default_title = PullRequestModel().generate_pullrequest_title(
876 c.default_title = PullRequestModel().generate_pullrequest_title(
874 source=source_repo.repo_name,
877 source=source_repo.repo_name,
875 source_ref=title_source_ref,
878 source_ref=title_source_ref,
876 target=default_target_repo.repo_name
879 target=default_target_repo.repo_name
877 )
880 )
878
881
879 c.default_repo_data = {
882 c.default_repo_data = {
880 'source_repo_name': source_repo.repo_name,
883 'source_repo_name': source_repo.repo_name,
881 'source_refs_json': json.dumps(source_repo_data),
884 'source_refs_json': json.dumps(source_repo_data),
882 'target_repo_name': default_target_repo.repo_name,
885 'target_repo_name': default_target_repo.repo_name,
883 'target_refs_json': json.dumps(target_repo_data),
886 'target_refs_json': json.dumps(target_repo_data),
884 }
887 }
885 c.default_source_ref = selected_source_ref
888 c.default_source_ref = selected_source_ref
886
889
887 return self._get_template_context(c)
890 return self._get_template_context(c)
888
891
889 @LoginRequired()
892 @LoginRequired()
890 @NotAnonymous()
893 @NotAnonymous()
891 @HasRepoPermissionAnyDecorator(
894 @HasRepoPermissionAnyDecorator(
892 'repository.read', 'repository.write', 'repository.admin')
895 'repository.read', 'repository.write', 'repository.admin')
893 def pull_request_repo_refs(self):
896 def pull_request_repo_refs(self):
894 self.load_default_context()
897 self.load_default_context()
895 target_repo_name = self.request.matchdict['target_repo_name']
898 target_repo_name = self.request.matchdict['target_repo_name']
896 repo = Repository.get_by_repo_name(target_repo_name)
899 repo = Repository.get_by_repo_name(target_repo_name)
897 if not repo:
900 if not repo:
898 raise HTTPNotFound()
901 raise HTTPNotFound()
899
902
900 target_perm = HasRepoPermissionAny(
903 target_perm = HasRepoPermissionAny(
901 'repository.read', 'repository.write', 'repository.admin')(
904 'repository.read', 'repository.write', 'repository.admin')(
902 target_repo_name)
905 target_repo_name)
903 if not target_perm:
906 if not target_perm:
904 raise HTTPNotFound()
907 raise HTTPNotFound()
905
908
906 return PullRequestModel().generate_repo_data(
909 return PullRequestModel().generate_repo_data(
907 repo, translator=self.request.translate)
910 repo, translator=self.request.translate)
908
911
909 @LoginRequired()
912 @LoginRequired()
910 @NotAnonymous()
913 @NotAnonymous()
911 @HasRepoPermissionAnyDecorator(
914 @HasRepoPermissionAnyDecorator(
912 'repository.read', 'repository.write', 'repository.admin')
915 'repository.read', 'repository.write', 'repository.admin')
913 def pullrequest_repo_targets(self):
916 def pullrequest_repo_targets(self):
914 _ = self.request.translate
917 _ = self.request.translate
915 filter_query = self.request.GET.get('query')
918 filter_query = self.request.GET.get('query')
916
919
917 # get the parents
920 # get the parents
918 parent_target_repos = []
921 parent_target_repos = []
919 if self.db_repo.parent:
922 if self.db_repo.parent:
920 parents_query = Repository.query() \
923 parents_query = Repository.query() \
921 .order_by(func.length(Repository.repo_name)) \
924 .order_by(func.length(Repository.repo_name)) \
922 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
925 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
923
926
924 if filter_query:
927 if filter_query:
925 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
928 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
926 parents_query = parents_query.filter(
929 parents_query = parents_query.filter(
927 Repository.repo_name.ilike(ilike_expression))
930 Repository.repo_name.ilike(ilike_expression))
928 parents = parents_query.limit(20).all()
931 parents = parents_query.limit(20).all()
929
932
930 for parent in parents:
933 for parent in parents:
931 parent_vcs_obj = parent.scm_instance()
934 parent_vcs_obj = parent.scm_instance()
932 if parent_vcs_obj and not parent_vcs_obj.is_empty():
935 if parent_vcs_obj and not parent_vcs_obj.is_empty():
933 parent_target_repos.append(parent)
936 parent_target_repos.append(parent)
934
937
935 # get other forks, and repo itself
938 # get other forks, and repo itself
936 query = Repository.query() \
939 query = Repository.query() \
937 .order_by(func.length(Repository.repo_name)) \
940 .order_by(func.length(Repository.repo_name)) \
938 .filter(
941 .filter(
939 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
942 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
940 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
943 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
941 ) \
944 ) \
942 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
945 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
943
946
944 if filter_query:
947 if filter_query:
945 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
948 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
946 query = query.filter(Repository.repo_name.ilike(ilike_expression))
949 query = query.filter(Repository.repo_name.ilike(ilike_expression))
947
950
948 limit = max(20 - len(parent_target_repos), 5) # not less then 5
951 limit = max(20 - len(parent_target_repos), 5) # not less then 5
949 target_repos = query.limit(limit).all()
952 target_repos = query.limit(limit).all()
950
953
951 all_target_repos = target_repos + parent_target_repos
954 all_target_repos = target_repos + parent_target_repos
952
955
953 repos = []
956 repos = []
954 # This checks permissions to the repositories
957 # This checks permissions to the repositories
955 for obj in ScmModel().get_repos(all_target_repos):
958 for obj in ScmModel().get_repos(all_target_repos):
956 repos.append({
959 repos.append({
957 'id': obj['name'],
960 'id': obj['name'],
958 'text': obj['name'],
961 'text': obj['name'],
959 'type': 'repo',
962 'type': 'repo',
960 'repo_id': obj['dbrepo']['repo_id'],
963 'repo_id': obj['dbrepo']['repo_id'],
961 'repo_type': obj['dbrepo']['repo_type'],
964 'repo_type': obj['dbrepo']['repo_type'],
962 'private': obj['dbrepo']['private'],
965 'private': obj['dbrepo']['private'],
963
966
964 })
967 })
965
968
966 data = {
969 data = {
967 'more': False,
970 'more': False,
968 'results': [{
971 'results': [{
969 'text': _('Repositories'),
972 'text': _('Repositories'),
970 'children': repos
973 'children': repos
971 }] if repos else []
974 }] if repos else []
972 }
975 }
973 return data
976 return data
974
977
975 @classmethod
978 @classmethod
976 def get_comment_ids(cls, post_data):
979 def get_comment_ids(cls, post_data):
977 return filter(lambda e: e > 0, map(safe_int, aslist(post_data.get('comments'), ',')))
980 return filter(lambda e: e > 0, map(safe_int, aslist(post_data.get('comments'), ',')))
978
981
979 @LoginRequired()
982 @LoginRequired()
980 @NotAnonymous()
983 @NotAnonymous()
981 @HasRepoPermissionAnyDecorator(
984 @HasRepoPermissionAnyDecorator(
982 'repository.read', 'repository.write', 'repository.admin')
985 'repository.read', 'repository.write', 'repository.admin')
983 def pullrequest_comments(self):
986 def pullrequest_comments(self):
984 self.load_default_context()
987 self.load_default_context()
985
988
986 pull_request = PullRequest.get_or_404(
989 pull_request = PullRequest.get_or_404(
987 self.request.matchdict['pull_request_id'])
990 self.request.matchdict['pull_request_id'])
988 pull_request_id = pull_request.pull_request_id
991 pull_request_id = pull_request.pull_request_id
989 version = self.request.GET.get('version')
992 version = self.request.GET.get('version')
990
993
991 _render = self.request.get_partial_renderer(
994 _render = self.request.get_partial_renderer(
992 'rhodecode:templates/base/sidebar.mako')
995 'rhodecode:templates/base/sidebar.mako')
993 c = _render.get_call_context()
996 c = _render.get_call_context()
994
997
995 (pull_request_latest,
998 (pull_request_latest,
996 pull_request_at_ver,
999 pull_request_at_ver,
997 pull_request_display_obj,
1000 pull_request_display_obj,
998 at_version) = PullRequestModel().get_pr_version(
1001 at_version) = PullRequestModel().get_pr_version(
999 pull_request_id, version=version)
1002 pull_request_id, version=version)
1000 versions = pull_request_display_obj.versions()
1003 versions = pull_request_display_obj.versions()
1001 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1004 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1002 c.versions = versions + [latest_ver]
1005 c.versions = versions + [latest_ver]
1003
1006
1004 c.at_version = at_version
1007 c.at_version = at_version
1005 c.at_version_num = (at_version
1008 c.at_version_num = (at_version
1006 if at_version and at_version != PullRequest.LATEST_VER
1009 if at_version and at_version != PullRequest.LATEST_VER
1007 else None)
1010 else None)
1008
1011
1009 self.register_comments_vars(c, pull_request_latest, versions, include_drafts=False)
1012 self.register_comments_vars(c, pull_request_latest, versions, include_drafts=False)
1010 all_comments = c.inline_comments_flat + c.comments
1013 all_comments = c.inline_comments_flat + c.comments
1011
1014
1012 existing_ids = self.get_comment_ids(self.request.POST)
1015 existing_ids = self.get_comment_ids(self.request.POST)
1013 return _render('comments_table', all_comments, len(all_comments),
1016 return _render('comments_table', all_comments, len(all_comments),
1014 existing_ids=existing_ids)
1017 existing_ids=existing_ids)
1015
1018
1016 @LoginRequired()
1019 @LoginRequired()
1017 @NotAnonymous()
1020 @NotAnonymous()
1018 @HasRepoPermissionAnyDecorator(
1021 @HasRepoPermissionAnyDecorator(
1019 'repository.read', 'repository.write', 'repository.admin')
1022 'repository.read', 'repository.write', 'repository.admin')
1020 def pullrequest_todos(self):
1023 def pullrequest_todos(self):
1021 self.load_default_context()
1024 self.load_default_context()
1022
1025
1023 pull_request = PullRequest.get_or_404(
1026 pull_request = PullRequest.get_or_404(
1024 self.request.matchdict['pull_request_id'])
1027 self.request.matchdict['pull_request_id'])
1025 pull_request_id = pull_request.pull_request_id
1028 pull_request_id = pull_request.pull_request_id
1026 version = self.request.GET.get('version')
1029 version = self.request.GET.get('version')
1027
1030
1028 _render = self.request.get_partial_renderer(
1031 _render = self.request.get_partial_renderer(
1029 'rhodecode:templates/base/sidebar.mako')
1032 'rhodecode:templates/base/sidebar.mako')
1030 c = _render.get_call_context()
1033 c = _render.get_call_context()
1031 (pull_request_latest,
1034 (pull_request_latest,
1032 pull_request_at_ver,
1035 pull_request_at_ver,
1033 pull_request_display_obj,
1036 pull_request_display_obj,
1034 at_version) = PullRequestModel().get_pr_version(
1037 at_version) = PullRequestModel().get_pr_version(
1035 pull_request_id, version=version)
1038 pull_request_id, version=version)
1036 versions = pull_request_display_obj.versions()
1039 versions = pull_request_display_obj.versions()
1037 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1040 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1038 c.versions = versions + [latest_ver]
1041 c.versions = versions + [latest_ver]
1039
1042
1040 c.at_version = at_version
1043 c.at_version = at_version
1041 c.at_version_num = (at_version
1044 c.at_version_num = (at_version
1042 if at_version and at_version != PullRequest.LATEST_VER
1045 if at_version and at_version != PullRequest.LATEST_VER
1043 else None)
1046 else None)
1044
1047
1045 c.unresolved_comments = CommentsModel() \
1048 c.unresolved_comments = CommentsModel() \
1046 .get_pull_request_unresolved_todos(pull_request, include_drafts=False)
1049 .get_pull_request_unresolved_todos(pull_request, include_drafts=False)
1047 c.resolved_comments = CommentsModel() \
1050 c.resolved_comments = CommentsModel() \
1048 .get_pull_request_resolved_todos(pull_request, include_drafts=False)
1051 .get_pull_request_resolved_todos(pull_request, include_drafts=False)
1049
1052
1050 all_comments = c.unresolved_comments + c.resolved_comments
1053 all_comments = c.unresolved_comments + c.resolved_comments
1051 existing_ids = self.get_comment_ids(self.request.POST)
1054 existing_ids = self.get_comment_ids(self.request.POST)
1052 return _render('comments_table', all_comments, len(c.unresolved_comments),
1055 return _render('comments_table', all_comments, len(c.unresolved_comments),
1053 todo_comments=True, existing_ids=existing_ids)
1056 todo_comments=True, existing_ids=existing_ids)
1054
1057
1055 @LoginRequired()
1058 @LoginRequired()
1056 @NotAnonymous()
1059 @NotAnonymous()
1057 @HasRepoPermissionAnyDecorator(
1060 @HasRepoPermissionAnyDecorator(
1058 'repository.read', 'repository.write', 'repository.admin')
1061 'repository.read', 'repository.write', 'repository.admin')
1059 def pullrequest_drafts(self):
1062 def pullrequest_drafts(self):
1060 self.load_default_context()
1063 self.load_default_context()
1061
1064
1062 pull_request = PullRequest.get_or_404(
1065 pull_request = PullRequest.get_or_404(
1063 self.request.matchdict['pull_request_id'])
1066 self.request.matchdict['pull_request_id'])
1064 pull_request_id = pull_request.pull_request_id
1067 pull_request_id = pull_request.pull_request_id
1065 version = self.request.GET.get('version')
1068 version = self.request.GET.get('version')
1066
1069
1067 _render = self.request.get_partial_renderer(
1070 _render = self.request.get_partial_renderer(
1068 'rhodecode:templates/base/sidebar.mako')
1071 'rhodecode:templates/base/sidebar.mako')
1069 c = _render.get_call_context()
1072 c = _render.get_call_context()
1070
1073
1071 (pull_request_latest,
1074 (pull_request_latest,
1072 pull_request_at_ver,
1075 pull_request_at_ver,
1073 pull_request_display_obj,
1076 pull_request_display_obj,
1074 at_version) = PullRequestModel().get_pr_version(
1077 at_version) = PullRequestModel().get_pr_version(
1075 pull_request_id, version=version)
1078 pull_request_id, version=version)
1076 versions = pull_request_display_obj.versions()
1079 versions = pull_request_display_obj.versions()
1077 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1080 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1078 c.versions = versions + [latest_ver]
1081 c.versions = versions + [latest_ver]
1079
1082
1080 c.at_version = at_version
1083 c.at_version = at_version
1081 c.at_version_num = (at_version
1084 c.at_version_num = (at_version
1082 if at_version and at_version != PullRequest.LATEST_VER
1085 if at_version and at_version != PullRequest.LATEST_VER
1083 else None)
1086 else None)
1084
1087
1085 c.draft_comments = CommentsModel() \
1088 c.draft_comments = CommentsModel() \
1086 .get_pull_request_drafts(self._rhodecode_db_user.user_id, pull_request)
1089 .get_pull_request_drafts(self._rhodecode_db_user.user_id, pull_request)
1087
1090
1088 all_comments = c.draft_comments
1091 all_comments = c.draft_comments
1089
1092
1090 existing_ids = self.get_comment_ids(self.request.POST)
1093 existing_ids = self.get_comment_ids(self.request.POST)
1091 return _render('comments_table', all_comments, len(all_comments),
1094 return _render('comments_table', all_comments, len(all_comments),
1092 existing_ids=existing_ids, draft_comments=True)
1095 existing_ids=existing_ids, draft_comments=True)
1093
1096
1094 @LoginRequired()
1097 @LoginRequired()
1095 @NotAnonymous()
1098 @NotAnonymous()
1096 @HasRepoPermissionAnyDecorator(
1099 @HasRepoPermissionAnyDecorator(
1097 'repository.read', 'repository.write', 'repository.admin')
1100 'repository.read', 'repository.write', 'repository.admin')
1098 @CSRFRequired()
1101 @CSRFRequired()
1099 def pull_request_create(self):
1102 def pull_request_create(self):
1100 _ = self.request.translate
1103 _ = self.request.translate
1101 self.assure_not_empty_repo()
1104 self.assure_not_empty_repo()
1102 self.load_default_context()
1105 self.load_default_context()
1103
1106
1104 controls = peppercorn.parse(self.request.POST.items())
1107 controls = peppercorn.parse(self.request.POST.items())
1105
1108
1106 try:
1109 try:
1107 form = PullRequestForm(
1110 form = PullRequestForm(
1108 self.request.translate, self.db_repo.repo_id)()
1111 self.request.translate, self.db_repo.repo_id)()
1109 _form = form.to_python(controls)
1112 _form = form.to_python(controls)
1110 except formencode.Invalid as errors:
1113 except formencode.Invalid as errors:
1111 if errors.error_dict.get('revisions'):
1114 if errors.error_dict.get('revisions'):
1112 msg = 'Revisions: %s' % errors.error_dict['revisions']
1115 msg = 'Revisions: %s' % errors.error_dict['revisions']
1113 elif errors.error_dict.get('pullrequest_title'):
1116 elif errors.error_dict.get('pullrequest_title'):
1114 msg = errors.error_dict.get('pullrequest_title')
1117 msg = errors.error_dict.get('pullrequest_title')
1115 else:
1118 else:
1116 msg = _('Error creating pull request: {}').format(errors)
1119 msg = _('Error creating pull request: {}').format(errors)
1117 log.exception(msg)
1120 log.exception(msg)
1118 h.flash(msg, 'error')
1121 h.flash(msg, 'error')
1119
1122
1120 # would rather just go back to form ...
1123 # would rather just go back to form ...
1121 raise HTTPFound(
1124 raise HTTPFound(
1122 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1125 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1123
1126
1124 source_repo = _form['source_repo']
1127 source_repo = _form['source_repo']
1125 source_ref = _form['source_ref']
1128 source_ref = _form['source_ref']
1126 target_repo = _form['target_repo']
1129 target_repo = _form['target_repo']
1127 target_ref = _form['target_ref']
1130 target_ref = _form['target_ref']
1128 commit_ids = _form['revisions'][::-1]
1131 commit_ids = _form['revisions'][::-1]
1129 common_ancestor_id = _form['common_ancestor']
1132 common_ancestor_id = _form['common_ancestor']
1130
1133
1131 # find the ancestor for this pr
1134 # find the ancestor for this pr
1132 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1135 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1133 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1136 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1134
1137
1135 if not (source_db_repo or target_db_repo):
1138 if not (source_db_repo or target_db_repo):
1136 h.flash(_('source_repo or target repo not found'), category='error')
1139 h.flash(_('source_repo or target repo not found'), category='error')
1137 raise HTTPFound(
1140 raise HTTPFound(
1138 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1141 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1139
1142
1140 # re-check permissions again here
1143 # re-check permissions again here
1141 # source_repo we must have read permissions
1144 # source_repo we must have read permissions
1142
1145
1143 source_perm = HasRepoPermissionAny(
1146 source_perm = HasRepoPermissionAny(
1144 'repository.read', 'repository.write', 'repository.admin')(
1147 'repository.read', 'repository.write', 'repository.admin')(
1145 source_db_repo.repo_name)
1148 source_db_repo.repo_name)
1146 if not source_perm:
1149 if not source_perm:
1147 msg = _('Not Enough permissions to source repo `{}`.'.format(
1150 msg = _('Not Enough permissions to source repo `{}`.'.format(
1148 source_db_repo.repo_name))
1151 source_db_repo.repo_name))
1149 h.flash(msg, category='error')
1152 h.flash(msg, category='error')
1150 # copy the args back to redirect
1153 # copy the args back to redirect
1151 org_query = self.request.GET.mixed()
1154 org_query = self.request.GET.mixed()
1152 raise HTTPFound(
1155 raise HTTPFound(
1153 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1156 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1154 _query=org_query))
1157 _query=org_query))
1155
1158
1156 # target repo we must have read permissions, and also later on
1159 # target repo we must have read permissions, and also later on
1157 # we want to check branch permissions here
1160 # we want to check branch permissions here
1158 target_perm = HasRepoPermissionAny(
1161 target_perm = HasRepoPermissionAny(
1159 'repository.read', 'repository.write', 'repository.admin')(
1162 'repository.read', 'repository.write', 'repository.admin')(
1160 target_db_repo.repo_name)
1163 target_db_repo.repo_name)
1161 if not target_perm:
1164 if not target_perm:
1162 msg = _('Not Enough permissions to target repo `{}`.'.format(
1165 msg = _('Not Enough permissions to target repo `{}`.'.format(
1163 target_db_repo.repo_name))
1166 target_db_repo.repo_name))
1164 h.flash(msg, category='error')
1167 h.flash(msg, category='error')
1165 # copy the args back to redirect
1168 # copy the args back to redirect
1166 org_query = self.request.GET.mixed()
1169 org_query = self.request.GET.mixed()
1167 raise HTTPFound(
1170 raise HTTPFound(
1168 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1171 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1169 _query=org_query))
1172 _query=org_query))
1170
1173
1171 source_scm = source_db_repo.scm_instance()
1174 source_scm = source_db_repo.scm_instance()
1172 target_scm = target_db_repo.scm_instance()
1175 target_scm = target_db_repo.scm_instance()
1173
1176
1174 source_ref_obj = unicode_to_reference(source_ref)
1177 source_ref_obj = unicode_to_reference(source_ref)
1175 target_ref_obj = unicode_to_reference(target_ref)
1178 target_ref_obj = unicode_to_reference(target_ref)
1176
1179
1177 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1180 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1178 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1181 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1179
1182
1180 ancestor = source_scm.get_common_ancestor(
1183 ancestor = source_scm.get_common_ancestor(
1181 source_commit.raw_id, target_commit.raw_id, target_scm)
1184 source_commit.raw_id, target_commit.raw_id, target_scm)
1182
1185
1183 # recalculate target ref based on ancestor
1186 # recalculate target ref based on ancestor
1184 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1187 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1185
1188
1186 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1189 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1187 PullRequestModel().get_reviewer_functions()
1190 PullRequestModel().get_reviewer_functions()
1188
1191
1189 # recalculate reviewers logic, to make sure we can validate this
1192 # recalculate reviewers logic, to make sure we can validate this
1190 reviewer_rules = get_default_reviewers_data(
1193 reviewer_rules = get_default_reviewers_data(
1191 self._rhodecode_db_user,
1194 self._rhodecode_db_user,
1192 source_db_repo,
1195 source_db_repo,
1193 source_ref_obj,
1196 source_ref_obj,
1194 target_db_repo,
1197 target_db_repo,
1195 target_ref_obj,
1198 target_ref_obj,
1196 include_diff_info=False)
1199 include_diff_info=False)
1197
1200
1198 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1201 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1199 observers = validate_observers(_form['observer_members'], reviewer_rules)
1202 observers = validate_observers(_form['observer_members'], reviewer_rules)
1200
1203
1201 pullrequest_title = _form['pullrequest_title']
1204 pullrequest_title = _form['pullrequest_title']
1202 title_source_ref = source_ref_obj.name
1205 title_source_ref = source_ref_obj.name
1203 if not pullrequest_title:
1206 if not pullrequest_title:
1204 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1207 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1205 source=source_repo,
1208 source=source_repo,
1206 source_ref=title_source_ref,
1209 source_ref=title_source_ref,
1207 target=target_repo
1210 target=target_repo
1208 )
1211 )
1209
1212
1210 description = _form['pullrequest_desc']
1213 description = _form['pullrequest_desc']
1211 description_renderer = _form['description_renderer']
1214 description_renderer = _form['description_renderer']
1212
1215
1213 try:
1216 try:
1214 pull_request = PullRequestModel().create(
1217 pull_request = PullRequestModel().create(
1215 created_by=self._rhodecode_user.user_id,
1218 created_by=self._rhodecode_user.user_id,
1216 source_repo=source_repo,
1219 source_repo=source_repo,
1217 source_ref=source_ref,
1220 source_ref=source_ref,
1218 target_repo=target_repo,
1221 target_repo=target_repo,
1219 target_ref=target_ref,
1222 target_ref=target_ref,
1220 revisions=commit_ids,
1223 revisions=commit_ids,
1221 common_ancestor_id=common_ancestor_id,
1224 common_ancestor_id=common_ancestor_id,
1222 reviewers=reviewers,
1225 reviewers=reviewers,
1223 observers=observers,
1226 observers=observers,
1224 title=pullrequest_title,
1227 title=pullrequest_title,
1225 description=description,
1228 description=description,
1226 description_renderer=description_renderer,
1229 description_renderer=description_renderer,
1227 reviewer_data=reviewer_rules,
1230 reviewer_data=reviewer_rules,
1228 auth_user=self._rhodecode_user
1231 auth_user=self._rhodecode_user
1229 )
1232 )
1230 Session().commit()
1233 Session().commit()
1231
1234
1232 h.flash(_('Successfully opened new pull request'),
1235 h.flash(_('Successfully opened new pull request'),
1233 category='success')
1236 category='success')
1234 except Exception:
1237 except Exception:
1235 msg = _('Error occurred during creation of this pull request.')
1238 msg = _('Error occurred during creation of this pull request.')
1236 log.exception(msg)
1239 log.exception(msg)
1237 h.flash(msg, category='error')
1240 h.flash(msg, category='error')
1238
1241
1239 # copy the args back to redirect
1242 # copy the args back to redirect
1240 org_query = self.request.GET.mixed()
1243 org_query = self.request.GET.mixed()
1241 raise HTTPFound(
1244 raise HTTPFound(
1242 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1245 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1243 _query=org_query))
1246 _query=org_query))
1244
1247
1245 raise HTTPFound(
1248 raise HTTPFound(
1246 h.route_path('pullrequest_show', repo_name=target_repo,
1249 h.route_path('pullrequest_show', repo_name=target_repo,
1247 pull_request_id=pull_request.pull_request_id))
1250 pull_request_id=pull_request.pull_request_id))
1248
1251
1249 @LoginRequired()
1252 @LoginRequired()
1250 @NotAnonymous()
1253 @NotAnonymous()
1251 @HasRepoPermissionAnyDecorator(
1254 @HasRepoPermissionAnyDecorator(
1252 'repository.read', 'repository.write', 'repository.admin')
1255 'repository.read', 'repository.write', 'repository.admin')
1253 @CSRFRequired()
1256 @CSRFRequired()
1254 def pull_request_update(self):
1257 def pull_request_update(self):
1255 pull_request = PullRequest.get_or_404(
1258 pull_request = PullRequest.get_or_404(
1256 self.request.matchdict['pull_request_id'])
1259 self.request.matchdict['pull_request_id'])
1257 _ = self.request.translate
1260 _ = self.request.translate
1258
1261
1259 c = self.load_default_context()
1262 c = self.load_default_context()
1260 redirect_url = None
1263 redirect_url = None
1261
1264
1262 if pull_request.is_closed():
1265 if pull_request.is_closed():
1263 log.debug('update: forbidden because pull request is closed')
1266 log.debug('update: forbidden because pull request is closed')
1264 msg = _(u'Cannot update closed pull requests.')
1267 msg = _(u'Cannot update closed pull requests.')
1265 h.flash(msg, category='error')
1268 h.flash(msg, category='error')
1266 return {'response': True,
1269 return {'response': True,
1267 'redirect_url': redirect_url}
1270 'redirect_url': redirect_url}
1268
1271
1269 is_state_changing = pull_request.is_state_changing()
1272 is_state_changing = pull_request.is_state_changing()
1270 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1273 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1271
1274
1272 # only owner or admin can update it
1275 # only owner or admin can update it
1273 allowed_to_update = PullRequestModel().check_user_update(
1276 allowed_to_update = PullRequestModel().check_user_update(
1274 pull_request, self._rhodecode_user)
1277 pull_request, self._rhodecode_user)
1275
1278
1276 if allowed_to_update:
1279 if allowed_to_update:
1277 controls = peppercorn.parse(self.request.POST.items())
1280 controls = peppercorn.parse(self.request.POST.items())
1278 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1281 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1279
1282
1280 if 'review_members' in controls:
1283 if 'review_members' in controls:
1281 self._update_reviewers(
1284 self._update_reviewers(
1282 c,
1285 c,
1283 pull_request, controls['review_members'],
1286 pull_request, controls['review_members'],
1284 pull_request.reviewer_data,
1287 pull_request.reviewer_data,
1285 PullRequestReviewers.ROLE_REVIEWER)
1288 PullRequestReviewers.ROLE_REVIEWER)
1286 elif 'observer_members' in controls:
1289 elif 'observer_members' in controls:
1287 self._update_reviewers(
1290 self._update_reviewers(
1288 c,
1291 c,
1289 pull_request, controls['observer_members'],
1292 pull_request, controls['observer_members'],
1290 pull_request.reviewer_data,
1293 pull_request.reviewer_data,
1291 PullRequestReviewers.ROLE_OBSERVER)
1294 PullRequestReviewers.ROLE_OBSERVER)
1292 elif str2bool(self.request.POST.get('update_commits', 'false')):
1295 elif str2bool(self.request.POST.get('update_commits', 'false')):
1293 if is_state_changing:
1296 if is_state_changing:
1294 log.debug('commits update: forbidden because pull request is in state %s',
1297 log.debug('commits update: forbidden because pull request is in state %s',
1295 pull_request.pull_request_state)
1298 pull_request.pull_request_state)
1296 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1299 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1297 u'Current state is: `{}`').format(
1300 u'Current state is: `{}`').format(
1298 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1301 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1299 h.flash(msg, category='error')
1302 h.flash(msg, category='error')
1300 return {'response': True,
1303 return {'response': True,
1301 'redirect_url': redirect_url}
1304 'redirect_url': redirect_url}
1302
1305
1303 self._update_commits(c, pull_request)
1306 self._update_commits(c, pull_request)
1304 if force_refresh:
1307 if force_refresh:
1305 redirect_url = h.route_path(
1308 redirect_url = h.route_path(
1306 'pullrequest_show', repo_name=self.db_repo_name,
1309 'pullrequest_show', repo_name=self.db_repo_name,
1307 pull_request_id=pull_request.pull_request_id,
1310 pull_request_id=pull_request.pull_request_id,
1308 _query={"force_refresh": 1})
1311 _query={"force_refresh": 1})
1309 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1312 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1310 self._edit_pull_request(pull_request)
1313 self._edit_pull_request(pull_request)
1311 else:
1314 else:
1312 log.error('Unhandled update data.')
1315 log.error('Unhandled update data.')
1313 raise HTTPBadRequest()
1316 raise HTTPBadRequest()
1314
1317
1315 return {'response': True,
1318 return {'response': True,
1316 'redirect_url': redirect_url}
1319 'redirect_url': redirect_url}
1317 raise HTTPForbidden()
1320 raise HTTPForbidden()
1318
1321
1319 def _edit_pull_request(self, pull_request):
1322 def _edit_pull_request(self, pull_request):
1320 """
1323 """
1321 Edit title and description
1324 Edit title and description
1322 """
1325 """
1323 _ = self.request.translate
1326 _ = self.request.translate
1324
1327
1325 try:
1328 try:
1326 PullRequestModel().edit(
1329 PullRequestModel().edit(
1327 pull_request,
1330 pull_request,
1328 self.request.POST.get('title'),
1331 self.request.POST.get('title'),
1329 self.request.POST.get('description'),
1332 self.request.POST.get('description'),
1330 self.request.POST.get('description_renderer'),
1333 self.request.POST.get('description_renderer'),
1331 self._rhodecode_user)
1334 self._rhodecode_user)
1332 except ValueError:
1335 except ValueError:
1333 msg = _(u'Cannot update closed pull requests.')
1336 msg = _(u'Cannot update closed pull requests.')
1334 h.flash(msg, category='error')
1337 h.flash(msg, category='error')
1335 return
1338 return
1336 else:
1339 else:
1337 Session().commit()
1340 Session().commit()
1338
1341
1339 msg = _(u'Pull request title & description updated.')
1342 msg = _(u'Pull request title & description updated.')
1340 h.flash(msg, category='success')
1343 h.flash(msg, category='success')
1341 return
1344 return
1342
1345
1343 def _update_commits(self, c, pull_request):
1346 def _update_commits(self, c, pull_request):
1344 _ = self.request.translate
1347 _ = self.request.translate
1345
1348
1346 with pull_request.set_state(PullRequest.STATE_UPDATING):
1349 with pull_request.set_state(PullRequest.STATE_UPDATING):
1347 resp = PullRequestModel().update_commits(
1350 resp = PullRequestModel().update_commits(
1348 pull_request, self._rhodecode_db_user)
1351 pull_request, self._rhodecode_db_user)
1349
1352
1350 if resp.executed:
1353 if resp.executed:
1351
1354
1352 if resp.target_changed and resp.source_changed:
1355 if resp.target_changed and resp.source_changed:
1353 changed = 'target and source repositories'
1356 changed = 'target and source repositories'
1354 elif resp.target_changed and not resp.source_changed:
1357 elif resp.target_changed and not resp.source_changed:
1355 changed = 'target repository'
1358 changed = 'target repository'
1356 elif not resp.target_changed and resp.source_changed:
1359 elif not resp.target_changed and resp.source_changed:
1357 changed = 'source repository'
1360 changed = 'source repository'
1358 else:
1361 else:
1359 changed = 'nothing'
1362 changed = 'nothing'
1360
1363
1361 msg = _(u'Pull request updated to "{source_commit_id}" with '
1364 msg = _(u'Pull request updated to "{source_commit_id}" with '
1362 u'{count_added} added, {count_removed} removed commits. '
1365 u'{count_added} added, {count_removed} removed commits. '
1363 u'Source of changes: {change_source}.')
1366 u'Source of changes: {change_source}.')
1364 msg = msg.format(
1367 msg = msg.format(
1365 source_commit_id=pull_request.source_ref_parts.commit_id,
1368 source_commit_id=pull_request.source_ref_parts.commit_id,
1366 count_added=len(resp.changes.added),
1369 count_added=len(resp.changes.added),
1367 count_removed=len(resp.changes.removed),
1370 count_removed=len(resp.changes.removed),
1368 change_source=changed)
1371 change_source=changed)
1369 h.flash(msg, category='success')
1372 h.flash(msg, category='success')
1370 channelstream.pr_update_channelstream_push(
1373 channelstream.pr_update_channelstream_push(
1371 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1374 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1372 else:
1375 else:
1373 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1376 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1374 warning_reasons = [
1377 warning_reasons = [
1375 UpdateFailureReason.NO_CHANGE,
1378 UpdateFailureReason.NO_CHANGE,
1376 UpdateFailureReason.WRONG_REF_TYPE,
1379 UpdateFailureReason.WRONG_REF_TYPE,
1377 ]
1380 ]
1378 category = 'warning' if resp.reason in warning_reasons else 'error'
1381 category = 'warning' if resp.reason in warning_reasons else 'error'
1379 h.flash(msg, category=category)
1382 h.flash(msg, category=category)
1380
1383
1381 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1384 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1382 _ = self.request.translate
1385 _ = self.request.translate
1383
1386
1384 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1387 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1385 PullRequestModel().get_reviewer_functions()
1388 PullRequestModel().get_reviewer_functions()
1386
1389
1387 if role == PullRequestReviewers.ROLE_REVIEWER:
1390 if role == PullRequestReviewers.ROLE_REVIEWER:
1388 try:
1391 try:
1389 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1392 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1390 except ValueError as e:
1393 except ValueError as e:
1391 log.error('Reviewers Validation: {}'.format(e))
1394 log.error('Reviewers Validation: {}'.format(e))
1392 h.flash(e, category='error')
1395 h.flash(e, category='error')
1393 return
1396 return
1394
1397
1395 old_calculated_status = pull_request.calculated_review_status()
1398 old_calculated_status = pull_request.calculated_review_status()
1396 PullRequestModel().update_reviewers(
1399 PullRequestModel().update_reviewers(
1397 pull_request, reviewers, self._rhodecode_db_user)
1400 pull_request, reviewers, self._rhodecode_db_user)
1398
1401
1399 Session().commit()
1402 Session().commit()
1400
1403
1401 msg = _('Pull request reviewers updated.')
1404 msg = _('Pull request reviewers updated.')
1402 h.flash(msg, category='success')
1405 h.flash(msg, category='success')
1403 channelstream.pr_update_channelstream_push(
1406 channelstream.pr_update_channelstream_push(
1404 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1407 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1405
1408
1406 # trigger status changed if change in reviewers changes the status
1409 # trigger status changed if change in reviewers changes the status
1407 calculated_status = pull_request.calculated_review_status()
1410 calculated_status = pull_request.calculated_review_status()
1408 if old_calculated_status != calculated_status:
1411 if old_calculated_status != calculated_status:
1409 PullRequestModel().trigger_pull_request_hook(
1412 PullRequestModel().trigger_pull_request_hook(
1410 pull_request, self._rhodecode_user, 'review_status_change',
1413 pull_request, self._rhodecode_user, 'review_status_change',
1411 data={'status': calculated_status})
1414 data={'status': calculated_status})
1412
1415
1413 elif role == PullRequestReviewers.ROLE_OBSERVER:
1416 elif role == PullRequestReviewers.ROLE_OBSERVER:
1414 try:
1417 try:
1415 observers = validate_observers(review_members, reviewer_rules)
1418 observers = validate_observers(review_members, reviewer_rules)
1416 except ValueError as e:
1419 except ValueError as e:
1417 log.error('Observers Validation: {}'.format(e))
1420 log.error('Observers Validation: {}'.format(e))
1418 h.flash(e, category='error')
1421 h.flash(e, category='error')
1419 return
1422 return
1420
1423
1421 PullRequestModel().update_observers(
1424 PullRequestModel().update_observers(
1422 pull_request, observers, self._rhodecode_db_user)
1425 pull_request, observers, self._rhodecode_db_user)
1423
1426
1424 Session().commit()
1427 Session().commit()
1425 msg = _('Pull request observers updated.')
1428 msg = _('Pull request observers updated.')
1426 h.flash(msg, category='success')
1429 h.flash(msg, category='success')
1427 channelstream.pr_update_channelstream_push(
1430 channelstream.pr_update_channelstream_push(
1428 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1431 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1429
1432
1430 @LoginRequired()
1433 @LoginRequired()
1431 @NotAnonymous()
1434 @NotAnonymous()
1432 @HasRepoPermissionAnyDecorator(
1435 @HasRepoPermissionAnyDecorator(
1433 'repository.read', 'repository.write', 'repository.admin')
1436 'repository.read', 'repository.write', 'repository.admin')
1434 @CSRFRequired()
1437 @CSRFRequired()
1435 def pull_request_merge(self):
1438 def pull_request_merge(self):
1436 """
1439 """
1437 Merge will perform a server-side merge of the specified
1440 Merge will perform a server-side merge of the specified
1438 pull request, if the pull request is approved and mergeable.
1441 pull request, if the pull request is approved and mergeable.
1439 After successful merging, the pull request is automatically
1442 After successful merging, the pull request is automatically
1440 closed, with a relevant comment.
1443 closed, with a relevant comment.
1441 """
1444 """
1442 pull_request = PullRequest.get_or_404(
1445 pull_request = PullRequest.get_or_404(
1443 self.request.matchdict['pull_request_id'])
1446 self.request.matchdict['pull_request_id'])
1444 _ = self.request.translate
1447 _ = self.request.translate
1445
1448
1446 if pull_request.is_state_changing():
1449 if pull_request.is_state_changing():
1447 log.debug('show: forbidden because pull request is in state %s',
1450 log.debug('show: forbidden because pull request is in state %s',
1448 pull_request.pull_request_state)
1451 pull_request.pull_request_state)
1449 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1452 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1450 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1453 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1451 pull_request.pull_request_state)
1454 pull_request.pull_request_state)
1452 h.flash(msg, category='error')
1455 h.flash(msg, category='error')
1453 raise HTTPFound(
1456 raise HTTPFound(
1454 h.route_path('pullrequest_show',
1457 h.route_path('pullrequest_show',
1455 repo_name=pull_request.target_repo.repo_name,
1458 repo_name=pull_request.target_repo.repo_name,
1456 pull_request_id=pull_request.pull_request_id))
1459 pull_request_id=pull_request.pull_request_id))
1457
1460
1458 self.load_default_context()
1461 self.load_default_context()
1459
1462
1460 with pull_request.set_state(PullRequest.STATE_UPDATING):
1463 with pull_request.set_state(PullRequest.STATE_UPDATING):
1461 check = MergeCheck.validate(
1464 check = MergeCheck.validate(
1462 pull_request, auth_user=self._rhodecode_user,
1465 pull_request, auth_user=self._rhodecode_user,
1463 translator=self.request.translate)
1466 translator=self.request.translate)
1464 merge_possible = not check.failed
1467 merge_possible = not check.failed
1465
1468
1466 for err_type, error_msg in check.errors:
1469 for err_type, error_msg in check.errors:
1467 h.flash(error_msg, category=err_type)
1470 h.flash(error_msg, category=err_type)
1468
1471
1469 if merge_possible:
1472 if merge_possible:
1470 log.debug("Pre-conditions checked, trying to merge.")
1473 log.debug("Pre-conditions checked, trying to merge.")
1471 extras = vcs_operation_context(
1474 extras = vcs_operation_context(
1472 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1475 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1473 username=self._rhodecode_db_user.username, action='push',
1476 username=self._rhodecode_db_user.username, action='push',
1474 scm=pull_request.target_repo.repo_type)
1477 scm=pull_request.target_repo.repo_type)
1475 with pull_request.set_state(PullRequest.STATE_UPDATING):
1478 with pull_request.set_state(PullRequest.STATE_UPDATING):
1476 self._merge_pull_request(
1479 self._merge_pull_request(
1477 pull_request, self._rhodecode_db_user, extras)
1480 pull_request, self._rhodecode_db_user, extras)
1478 else:
1481 else:
1479 log.debug("Pre-conditions failed, NOT merging.")
1482 log.debug("Pre-conditions failed, NOT merging.")
1480
1483
1481 raise HTTPFound(
1484 raise HTTPFound(
1482 h.route_path('pullrequest_show',
1485 h.route_path('pullrequest_show',
1483 repo_name=pull_request.target_repo.repo_name,
1486 repo_name=pull_request.target_repo.repo_name,
1484 pull_request_id=pull_request.pull_request_id))
1487 pull_request_id=pull_request.pull_request_id))
1485
1488
1486 def _merge_pull_request(self, pull_request, user, extras):
1489 def _merge_pull_request(self, pull_request, user, extras):
1487 _ = self.request.translate
1490 _ = self.request.translate
1488 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1491 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1489
1492
1490 if merge_resp.executed:
1493 if merge_resp.executed:
1491 log.debug("The merge was successful, closing the pull request.")
1494 log.debug("The merge was successful, closing the pull request.")
1492 PullRequestModel().close_pull_request(
1495 PullRequestModel().close_pull_request(
1493 pull_request.pull_request_id, user)
1496 pull_request.pull_request_id, user)
1494 Session().commit()
1497 Session().commit()
1495 msg = _('Pull request was successfully merged and closed.')
1498 msg = _('Pull request was successfully merged and closed.')
1496 h.flash(msg, category='success')
1499 h.flash(msg, category='success')
1497 else:
1500 else:
1498 log.debug(
1501 log.debug(
1499 "The merge was not successful. Merge response: %s", merge_resp)
1502 "The merge was not successful. Merge response: %s", merge_resp)
1500 msg = merge_resp.merge_status_message
1503 msg = merge_resp.merge_status_message
1501 h.flash(msg, category='error')
1504 h.flash(msg, category='error')
1502
1505
1503 @LoginRequired()
1506 @LoginRequired()
1504 @NotAnonymous()
1507 @NotAnonymous()
1505 @HasRepoPermissionAnyDecorator(
1508 @HasRepoPermissionAnyDecorator(
1506 'repository.read', 'repository.write', 'repository.admin')
1509 'repository.read', 'repository.write', 'repository.admin')
1507 @CSRFRequired()
1510 @CSRFRequired()
1508 def pull_request_delete(self):
1511 def pull_request_delete(self):
1509 _ = self.request.translate
1512 _ = self.request.translate
1510
1513
1511 pull_request = PullRequest.get_or_404(
1514 pull_request = PullRequest.get_or_404(
1512 self.request.matchdict['pull_request_id'])
1515 self.request.matchdict['pull_request_id'])
1513 self.load_default_context()
1516 self.load_default_context()
1514
1517
1515 pr_closed = pull_request.is_closed()
1518 pr_closed = pull_request.is_closed()
1516 allowed_to_delete = PullRequestModel().check_user_delete(
1519 allowed_to_delete = PullRequestModel().check_user_delete(
1517 pull_request, self._rhodecode_user) and not pr_closed
1520 pull_request, self._rhodecode_user) and not pr_closed
1518
1521
1519 # only owner can delete it !
1522 # only owner can delete it !
1520 if allowed_to_delete:
1523 if allowed_to_delete:
1521 PullRequestModel().delete(pull_request, self._rhodecode_user)
1524 PullRequestModel().delete(pull_request, self._rhodecode_user)
1522 Session().commit()
1525 Session().commit()
1523 h.flash(_('Successfully deleted pull request'),
1526 h.flash(_('Successfully deleted pull request'),
1524 category='success')
1527 category='success')
1525 raise HTTPFound(h.route_path('pullrequest_show_all',
1528 raise HTTPFound(h.route_path('pullrequest_show_all',
1526 repo_name=self.db_repo_name))
1529 repo_name=self.db_repo_name))
1527
1530
1528 log.warning('user %s tried to delete pull request without access',
1531 log.warning('user %s tried to delete pull request without access',
1529 self._rhodecode_user)
1532 self._rhodecode_user)
1530 raise HTTPNotFound()
1533 raise HTTPNotFound()
1531
1534
1532 def _pull_request_comments_create(self, pull_request, comments):
1535 def _pull_request_comments_create(self, pull_request, comments):
1533 _ = self.request.translate
1536 _ = self.request.translate
1534 data = {}
1537 data = {}
1535 if not comments:
1538 if not comments:
1536 return
1539 return
1537 pull_request_id = pull_request.pull_request_id
1540 pull_request_id = pull_request.pull_request_id
1538
1541
1539 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
1542 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
1540
1543
1541 for entry in comments:
1544 for entry in comments:
1542 c = self.load_default_context()
1545 c = self.load_default_context()
1543 comment_type = entry['comment_type']
1546 comment_type = entry['comment_type']
1544 text = entry['text']
1547 text = entry['text']
1545 status = entry['status']
1548 status = entry['status']
1546 is_draft = str2bool(entry['is_draft'])
1549 is_draft = str2bool(entry['is_draft'])
1547 resolves_comment_id = entry['resolves_comment_id']
1550 resolves_comment_id = entry['resolves_comment_id']
1548 close_pull_request = entry['close_pull_request']
1551 close_pull_request = entry['close_pull_request']
1549 f_path = entry['f_path']
1552 f_path = entry['f_path']
1550 line_no = entry['line']
1553 line_no = entry['line']
1551 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
1554 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
1552
1555
1553 # the logic here should work like following, if we submit close
1556 # the logic here should work like following, if we submit close
1554 # pr comment, use `close_pull_request_with_comment` function
1557 # pr comment, use `close_pull_request_with_comment` function
1555 # else handle regular comment logic
1558 # else handle regular comment logic
1556
1559
1557 if close_pull_request:
1560 if close_pull_request:
1558 # only owner or admin or person with write permissions
1561 # only owner or admin or person with write permissions
1559 allowed_to_close = PullRequestModel().check_user_update(
1562 allowed_to_close = PullRequestModel().check_user_update(
1560 pull_request, self._rhodecode_user)
1563 pull_request, self._rhodecode_user)
1561 if not allowed_to_close:
1564 if not allowed_to_close:
1562 log.debug('comment: forbidden because not allowed to close '
1565 log.debug('comment: forbidden because not allowed to close '
1563 'pull request %s', pull_request_id)
1566 'pull request %s', pull_request_id)
1564 raise HTTPForbidden()
1567 raise HTTPForbidden()
1565
1568
1566 # This also triggers `review_status_change`
1569 # This also triggers `review_status_change`
1567 comment, status = PullRequestModel().close_pull_request_with_comment(
1570 comment, status = PullRequestModel().close_pull_request_with_comment(
1568 pull_request, self._rhodecode_user, self.db_repo, message=text,
1571 pull_request, self._rhodecode_user, self.db_repo, message=text,
1569 auth_user=self._rhodecode_user)
1572 auth_user=self._rhodecode_user)
1570 Session().flush()
1573 Session().flush()
1571 is_inline = comment.is_inline
1574 is_inline = comment.is_inline
1572
1575
1573 PullRequestModel().trigger_pull_request_hook(
1576 PullRequestModel().trigger_pull_request_hook(
1574 pull_request, self._rhodecode_user, 'comment',
1577 pull_request, self._rhodecode_user, 'comment',
1575 data={'comment': comment})
1578 data={'comment': comment})
1576
1579
1577 else:
1580 else:
1578 # regular comment case, could be inline, or one with status.
1581 # regular comment case, could be inline, or one with status.
1579 # for that one we check also permissions
1582 # for that one we check also permissions
1580 # Additionally ENSURE if somehow draft is sent we're then unable to change status
1583 # Additionally ENSURE if somehow draft is sent we're then unable to change status
1581 allowed_to_change_status = PullRequestModel().check_user_change_status(
1584 allowed_to_change_status = PullRequestModel().check_user_change_status(
1582 pull_request, self._rhodecode_user) and not is_draft
1585 pull_request, self._rhodecode_user) and not is_draft
1583
1586
1584 if status and allowed_to_change_status:
1587 if status and allowed_to_change_status:
1585 message = (_('Status change %(transition_icon)s %(status)s')
1588 message = (_('Status change %(transition_icon)s %(status)s')
1586 % {'transition_icon': '>',
1589 % {'transition_icon': '>',
1587 'status': ChangesetStatus.get_status_lbl(status)})
1590 'status': ChangesetStatus.get_status_lbl(status)})
1588 text = text or message
1591 text = text or message
1589
1592
1590 comment = CommentsModel().create(
1593 comment = CommentsModel().create(
1591 text=text,
1594 text=text,
1592 repo=self.db_repo.repo_id,
1595 repo=self.db_repo.repo_id,
1593 user=self._rhodecode_user.user_id,
1596 user=self._rhodecode_user.user_id,
1594 pull_request=pull_request,
1597 pull_request=pull_request,
1595 f_path=f_path,
1598 f_path=f_path,
1596 line_no=line_no,
1599 line_no=line_no,
1597 status_change=(ChangesetStatus.get_status_lbl(status)
1600 status_change=(ChangesetStatus.get_status_lbl(status)
1598 if status and allowed_to_change_status else None),
1601 if status and allowed_to_change_status else None),
1599 status_change_type=(status
1602 status_change_type=(status
1600 if status and allowed_to_change_status else None),
1603 if status and allowed_to_change_status else None),
1601 comment_type=comment_type,
1604 comment_type=comment_type,
1602 is_draft=is_draft,
1605 is_draft=is_draft,
1603 resolves_comment_id=resolves_comment_id,
1606 resolves_comment_id=resolves_comment_id,
1604 auth_user=self._rhodecode_user,
1607 auth_user=self._rhodecode_user,
1605 send_email=not is_draft, # skip notification for draft comments
1608 send_email=not is_draft, # skip notification for draft comments
1606 )
1609 )
1607 is_inline = comment.is_inline
1610 is_inline = comment.is_inline
1608
1611
1609 if allowed_to_change_status:
1612 if allowed_to_change_status:
1610 # calculate old status before we change it
1613 # calculate old status before we change it
1611 old_calculated_status = pull_request.calculated_review_status()
1614 old_calculated_status = pull_request.calculated_review_status()
1612
1615
1613 # get status if set !
1616 # get status if set !
1614 if status:
1617 if status:
1615 ChangesetStatusModel().set_status(
1618 ChangesetStatusModel().set_status(
1616 self.db_repo.repo_id,
1619 self.db_repo.repo_id,
1617 status,
1620 status,
1618 self._rhodecode_user.user_id,
1621 self._rhodecode_user.user_id,
1619 comment,
1622 comment,
1620 pull_request=pull_request
1623 pull_request=pull_request
1621 )
1624 )
1622
1625
1623 Session().flush()
1626 Session().flush()
1624 # this is somehow required to get access to some relationship
1627 # this is somehow required to get access to some relationship
1625 # loaded on comment
1628 # loaded on comment
1626 Session().refresh(comment)
1629 Session().refresh(comment)
1627
1630
1628 # skip notifications for drafts
1631 # skip notifications for drafts
1629 if not is_draft:
1632 if not is_draft:
1630 PullRequestModel().trigger_pull_request_hook(
1633 PullRequestModel().trigger_pull_request_hook(
1631 pull_request, self._rhodecode_user, 'comment',
1634 pull_request, self._rhodecode_user, 'comment',
1632 data={'comment': comment})
1635 data={'comment': comment})
1633
1636
1634 # we now calculate the status of pull request, and based on that
1637 # we now calculate the status of pull request, and based on that
1635 # calculation we set the commits status
1638 # calculation we set the commits status
1636 calculated_status = pull_request.calculated_review_status()
1639 calculated_status = pull_request.calculated_review_status()
1637 if old_calculated_status != calculated_status:
1640 if old_calculated_status != calculated_status:
1638 PullRequestModel().trigger_pull_request_hook(
1641 PullRequestModel().trigger_pull_request_hook(
1639 pull_request, self._rhodecode_user, 'review_status_change',
1642 pull_request, self._rhodecode_user, 'review_status_change',
1640 data={'status': calculated_status})
1643 data={'status': calculated_status})
1641
1644
1642 comment_id = comment.comment_id
1645 comment_id = comment.comment_id
1643 data[comment_id] = {
1646 data[comment_id] = {
1644 'target_id': target_elem_id
1647 'target_id': target_elem_id
1645 }
1648 }
1646 Session().flush()
1649 Session().flush()
1647
1650
1648 c.co = comment
1651 c.co = comment
1649 c.at_version_num = None
1652 c.at_version_num = None
1650 c.is_new = True
1653 c.is_new = True
1651 rendered_comment = render(
1654 rendered_comment = render(
1652 'rhodecode:templates/changeset/changeset_comment_block.mako',
1655 'rhodecode:templates/changeset/changeset_comment_block.mako',
1653 self._get_template_context(c), self.request)
1656 self._get_template_context(c), self.request)
1654
1657
1655 data[comment_id].update(comment.get_dict())
1658 data[comment_id].update(comment.get_dict())
1656 data[comment_id].update({'rendered_text': rendered_comment})
1659 data[comment_id].update({'rendered_text': rendered_comment})
1657
1660
1658 Session().commit()
1661 Session().commit()
1659
1662
1660 # skip channelstream for draft comments
1663 # skip channelstream for draft comments
1661 if not all_drafts:
1664 if not all_drafts:
1662 comment_broadcast_channel = channelstream.comment_channel(
1665 comment_broadcast_channel = channelstream.comment_channel(
1663 self.db_repo_name, pull_request_obj=pull_request)
1666 self.db_repo_name, pull_request_obj=pull_request)
1664
1667
1665 comment_data = data
1668 comment_data = data
1666 posted_comment_type = 'inline' if is_inline else 'general'
1669 posted_comment_type = 'inline' if is_inline else 'general'
1667 if len(data) == 1:
1670 if len(data) == 1:
1668 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
1671 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
1669 else:
1672 else:
1670 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
1673 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
1671
1674
1672 channelstream.comment_channelstream_push(
1675 channelstream.comment_channelstream_push(
1673 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
1676 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
1674 comment_data=comment_data)
1677 comment_data=comment_data)
1675
1678
1676 return data
1679 return data
1677
1680
1678 @LoginRequired()
1681 @LoginRequired()
1679 @NotAnonymous()
1682 @NotAnonymous()
1680 @HasRepoPermissionAnyDecorator(
1683 @HasRepoPermissionAnyDecorator(
1681 'repository.read', 'repository.write', 'repository.admin')
1684 'repository.read', 'repository.write', 'repository.admin')
1682 @CSRFRequired()
1685 @CSRFRequired()
1683 def pull_request_comment_create(self):
1686 def pull_request_comment_create(self):
1684 _ = self.request.translate
1687 _ = self.request.translate
1685
1688
1686 pull_request = PullRequest.get_or_404(self.request.matchdict['pull_request_id'])
1689 pull_request = PullRequest.get_or_404(self.request.matchdict['pull_request_id'])
1687
1690
1688 if pull_request.is_closed():
1691 if pull_request.is_closed():
1689 log.debug('comment: forbidden because pull request is closed')
1692 log.debug('comment: forbidden because pull request is closed')
1690 raise HTTPForbidden()
1693 raise HTTPForbidden()
1691
1694
1692 allowed_to_comment = PullRequestModel().check_user_comment(
1695 allowed_to_comment = PullRequestModel().check_user_comment(
1693 pull_request, self._rhodecode_user)
1696 pull_request, self._rhodecode_user)
1694 if not allowed_to_comment:
1697 if not allowed_to_comment:
1695 log.debug('comment: forbidden because pull request is from forbidden repo')
1698 log.debug('comment: forbidden because pull request is from forbidden repo')
1696 raise HTTPForbidden()
1699 raise HTTPForbidden()
1697
1700
1698 comment_data = {
1701 comment_data = {
1699 'comment_type': self.request.POST.get('comment_type'),
1702 'comment_type': self.request.POST.get('comment_type'),
1700 'text': self.request.POST.get('text'),
1703 'text': self.request.POST.get('text'),
1701 'status': self.request.POST.get('changeset_status', None),
1704 'status': self.request.POST.get('changeset_status', None),
1702 'is_draft': self.request.POST.get('draft'),
1705 'is_draft': self.request.POST.get('draft'),
1703 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
1706 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
1704 'close_pull_request': self.request.POST.get('close_pull_request'),
1707 'close_pull_request': self.request.POST.get('close_pull_request'),
1705 'f_path': self.request.POST.get('f_path'),
1708 'f_path': self.request.POST.get('f_path'),
1706 'line': self.request.POST.get('line'),
1709 'line': self.request.POST.get('line'),
1707 }
1710 }
1708 data = self._pull_request_comments_create(pull_request, [comment_data])
1711 data = self._pull_request_comments_create(pull_request, [comment_data])
1709
1712
1710 return data
1713 return data
1711
1714
1712 @LoginRequired()
1715 @LoginRequired()
1713 @NotAnonymous()
1716 @NotAnonymous()
1714 @HasRepoPermissionAnyDecorator(
1717 @HasRepoPermissionAnyDecorator(
1715 'repository.read', 'repository.write', 'repository.admin')
1718 'repository.read', 'repository.write', 'repository.admin')
1716 @CSRFRequired()
1719 @CSRFRequired()
1717 def pull_request_comment_delete(self):
1720 def pull_request_comment_delete(self):
1718 pull_request = PullRequest.get_or_404(
1721 pull_request = PullRequest.get_or_404(
1719 self.request.matchdict['pull_request_id'])
1722 self.request.matchdict['pull_request_id'])
1720
1723
1721 comment = ChangesetComment.get_or_404(
1724 comment = ChangesetComment.get_or_404(
1722 self.request.matchdict['comment_id'])
1725 self.request.matchdict['comment_id'])
1723 comment_id = comment.comment_id
1726 comment_id = comment.comment_id
1724
1727
1725 if comment.immutable:
1728 if comment.immutable:
1726 # don't allow deleting comments that are immutable
1729 # don't allow deleting comments that are immutable
1727 raise HTTPForbidden()
1730 raise HTTPForbidden()
1728
1731
1729 if pull_request.is_closed():
1732 if pull_request.is_closed():
1730 log.debug('comment: forbidden because pull request is closed')
1733 log.debug('comment: forbidden because pull request is closed')
1731 raise HTTPForbidden()
1734 raise HTTPForbidden()
1732
1735
1733 if not comment:
1736 if not comment:
1734 log.debug('Comment with id:%s not found, skipping', comment_id)
1737 log.debug('Comment with id:%s not found, skipping', comment_id)
1735 # comment already deleted in another call probably
1738 # comment already deleted in another call probably
1736 return True
1739 return True
1737
1740
1738 if comment.pull_request.is_closed():
1741 if comment.pull_request.is_closed():
1739 # don't allow deleting comments on closed pull request
1742 # don't allow deleting comments on closed pull request
1740 raise HTTPForbidden()
1743 raise HTTPForbidden()
1741
1744
1742 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1745 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1743 super_admin = h.HasPermissionAny('hg.admin')()
1746 super_admin = h.HasPermissionAny('hg.admin')()
1744 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1747 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1745 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1748 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1746 comment_repo_admin = is_repo_admin and is_repo_comment
1749 comment_repo_admin = is_repo_admin and is_repo_comment
1747
1750
1748 if super_admin or comment_owner or comment_repo_admin:
1751 if super_admin or comment_owner or comment_repo_admin:
1749 old_calculated_status = comment.pull_request.calculated_review_status()
1752 old_calculated_status = comment.pull_request.calculated_review_status()
1750 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1753 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1751 Session().commit()
1754 Session().commit()
1752 calculated_status = comment.pull_request.calculated_review_status()
1755 calculated_status = comment.pull_request.calculated_review_status()
1753 if old_calculated_status != calculated_status:
1756 if old_calculated_status != calculated_status:
1754 PullRequestModel().trigger_pull_request_hook(
1757 PullRequestModel().trigger_pull_request_hook(
1755 comment.pull_request, self._rhodecode_user, 'review_status_change',
1758 comment.pull_request, self._rhodecode_user, 'review_status_change',
1756 data={'status': calculated_status})
1759 data={'status': calculated_status})
1757 return True
1760 return True
1758 else:
1761 else:
1759 log.warning('No permissions for user %s to delete comment_id: %s',
1762 log.warning('No permissions for user %s to delete comment_id: %s',
1760 self._rhodecode_db_user, comment_id)
1763 self._rhodecode_db_user, comment_id)
1761 raise HTTPNotFound()
1764 raise HTTPNotFound()
1762
1765
1763 @LoginRequired()
1766 @LoginRequired()
1764 @NotAnonymous()
1767 @NotAnonymous()
1765 @HasRepoPermissionAnyDecorator(
1768 @HasRepoPermissionAnyDecorator(
1766 'repository.read', 'repository.write', 'repository.admin')
1769 'repository.read', 'repository.write', 'repository.admin')
1767 @CSRFRequired()
1770 @CSRFRequired()
1768 def pull_request_comment_edit(self):
1771 def pull_request_comment_edit(self):
1769 self.load_default_context()
1772 self.load_default_context()
1770
1773
1771 pull_request = PullRequest.get_or_404(
1774 pull_request = PullRequest.get_or_404(
1772 self.request.matchdict['pull_request_id']
1775 self.request.matchdict['pull_request_id']
1773 )
1776 )
1774 comment = ChangesetComment.get_or_404(
1777 comment = ChangesetComment.get_or_404(
1775 self.request.matchdict['comment_id']
1778 self.request.matchdict['comment_id']
1776 )
1779 )
1777 comment_id = comment.comment_id
1780 comment_id = comment.comment_id
1778
1781
1779 if comment.immutable:
1782 if comment.immutable:
1780 # don't allow deleting comments that are immutable
1783 # don't allow deleting comments that are immutable
1781 raise HTTPForbidden()
1784 raise HTTPForbidden()
1782
1785
1783 if pull_request.is_closed():
1786 if pull_request.is_closed():
1784 log.debug('comment: forbidden because pull request is closed')
1787 log.debug('comment: forbidden because pull request is closed')
1785 raise HTTPForbidden()
1788 raise HTTPForbidden()
1786
1789
1787 if comment.pull_request.is_closed():
1790 if comment.pull_request.is_closed():
1788 # don't allow deleting comments on closed pull request
1791 # don't allow deleting comments on closed pull request
1789 raise HTTPForbidden()
1792 raise HTTPForbidden()
1790
1793
1791 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1794 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1792 super_admin = h.HasPermissionAny('hg.admin')()
1795 super_admin = h.HasPermissionAny('hg.admin')()
1793 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1796 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1794 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1797 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1795 comment_repo_admin = is_repo_admin and is_repo_comment
1798 comment_repo_admin = is_repo_admin and is_repo_comment
1796
1799
1797 if super_admin or comment_owner or comment_repo_admin:
1800 if super_admin or comment_owner or comment_repo_admin:
1798 text = self.request.POST.get('text')
1801 text = self.request.POST.get('text')
1799 version = self.request.POST.get('version')
1802 version = self.request.POST.get('version')
1800 if text == comment.text:
1803 if text == comment.text:
1801 log.warning(
1804 log.warning(
1802 'Comment(PR): '
1805 'Comment(PR): '
1803 'Trying to create new version '
1806 'Trying to create new version '
1804 'with the same comment body {}'.format(
1807 'with the same comment body {}'.format(
1805 comment_id,
1808 comment_id,
1806 )
1809 )
1807 )
1810 )
1808 raise HTTPNotFound()
1811 raise HTTPNotFound()
1809
1812
1810 if version.isdigit():
1813 if version.isdigit():
1811 version = int(version)
1814 version = int(version)
1812 else:
1815 else:
1813 log.warning(
1816 log.warning(
1814 'Comment(PR): Wrong version type {} {} '
1817 'Comment(PR): Wrong version type {} {} '
1815 'for comment {}'.format(
1818 'for comment {}'.format(
1816 version,
1819 version,
1817 type(version),
1820 type(version),
1818 comment_id,
1821 comment_id,
1819 )
1822 )
1820 )
1823 )
1821 raise HTTPNotFound()
1824 raise HTTPNotFound()
1822
1825
1823 try:
1826 try:
1824 comment_history = CommentsModel().edit(
1827 comment_history = CommentsModel().edit(
1825 comment_id=comment_id,
1828 comment_id=comment_id,
1826 text=text,
1829 text=text,
1827 auth_user=self._rhodecode_user,
1830 auth_user=self._rhodecode_user,
1828 version=version,
1831 version=version,
1829 )
1832 )
1830 except CommentVersionMismatch:
1833 except CommentVersionMismatch:
1831 raise HTTPConflict()
1834 raise HTTPConflict()
1832
1835
1833 if not comment_history:
1836 if not comment_history:
1834 raise HTTPNotFound()
1837 raise HTTPNotFound()
1835
1838
1836 Session().commit()
1839 Session().commit()
1837 if not comment.draft:
1840 if not comment.draft:
1838 PullRequestModel().trigger_pull_request_hook(
1841 PullRequestModel().trigger_pull_request_hook(
1839 pull_request, self._rhodecode_user, 'comment_edit',
1842 pull_request, self._rhodecode_user, 'comment_edit',
1840 data={'comment': comment})
1843 data={'comment': comment})
1841
1844
1842 return {
1845 return {
1843 'comment_history_id': comment_history.comment_history_id,
1846 'comment_history_id': comment_history.comment_history_id,
1844 'comment_id': comment.comment_id,
1847 'comment_id': comment.comment_id,
1845 'comment_version': comment_history.version,
1848 'comment_version': comment_history.version,
1846 'comment_author_username': comment_history.author.username,
1849 'comment_author_username': comment_history.author.username,
1847 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1850 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1848 'comment_created_on': h.age_component(comment_history.created_on,
1851 'comment_created_on': h.age_component(comment_history.created_on,
1849 time_is_local=True),
1852 time_is_local=True),
1850 }
1853 }
1851 else:
1854 else:
1852 log.warning('No permissions for user %s to edit comment_id: %s',
1855 log.warning('No permissions for user %s to edit comment_id: %s',
1853 self._rhodecode_db_user, comment_id)
1856 self._rhodecode_db_user, comment_id)
1854 raise HTTPNotFound()
1857 raise HTTPNotFound()
@@ -1,202 +1,202 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25 from whoosh import query
25 from whoosh import query
26
26
27 from rhodecode.tests import (
27 from rhodecode.tests import (
28 TestController, HG_REPO,
28 TestController, route_path_generator, HG_REPO,
29 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
29 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
30 from rhodecode.tests.utils import AssertResponse
30 from rhodecode.tests.utils import AssertResponse
31
31
32
32
33 def route_path(name, **kwargs):
33 def route_path(name, params=None, **kwargs):
34 from rhodecode.apps._base import ADMIN_PREFIX
34 from rhodecode.apps._base import ADMIN_PREFIX
35 return {
35 url_defs = {
36 'search':
36 'search':
37 ADMIN_PREFIX + '/search',
37 ADMIN_PREFIX + '/search',
38 'search_repo':
38 'search_repo':
39 '/{repo_name}/search',
39 '/{repo_name}/search',
40
40 }
41 }[name].format(**kwargs)
41 return route_path_generator(url_defs, name=name, params=params, **kwargs)
42
42
43
43
44 class TestSearchController(TestController):
44 class TestSearchController(TestController):
45
45
46 def test_index(self):
46 def test_index(self):
47 self.log_user()
47 self.log_user()
48 response = self.app.get(route_path('search'))
48 response = self.app.get(route_path('search'))
49 assert_response = response.assert_response()
49 assert_response = response.assert_response()
50 assert_response.one_element_exists('input#q')
50 assert_response.one_element_exists('input#q')
51
51
52 def test_search_files_empty_search(self):
52 def test_search_files_empty_search(self):
53 if os.path.isdir(self.index_location):
53 if os.path.isdir(self.index_location):
54 pytest.skip('skipped due to existing index')
54 pytest.skip('skipped due to existing index')
55 else:
55 else:
56 self.log_user()
56 self.log_user()
57 response = self.app.get(route_path('search'),
57 response = self.app.get(route_path('search'),
58 {'q': HG_REPO})
58 {'q': HG_REPO})
59 response.mustcontain('There is no index to search in. '
59 response.mustcontain('There is no index to search in. '
60 'Please run whoosh indexer')
60 'Please run whoosh indexer')
61
61
62 def test_search_validation(self):
62 def test_search_validation(self):
63 self.log_user()
63 self.log_user()
64 response = self.app.get(route_path('search'),
64 response = self.app.get(route_path('search'),
65 {'q': query, 'type': 'content', 'page_limit': 1000})
65 {'q': query, 'type': 'content', 'page_limit': 1000})
66
66
67 response.mustcontain(
67 response.mustcontain(
68 'page_limit - 1000 is greater than maximum value 500')
68 'page_limit - 1000 is greater than maximum value 500')
69
69
70 @pytest.mark.parametrize("query, expected_hits, expected_paths", [
70 @pytest.mark.parametrize("query, expected_hits, expected_paths", [
71 ('todo', 23, [
71 ('todo', 23, [
72 'vcs/backends/hg/inmemory.py',
72 'vcs/backends/hg/inmemory.py',
73 'vcs/tests/test_git.py']),
73 'vcs/tests/test_git.py']),
74 ('extension:rst installation', 6, [
74 ('extension:rst installation', 6, [
75 'docs/index.rst',
75 'docs/index.rst',
76 'docs/installation.rst']),
76 'docs/installation.rst']),
77 ('def repo', 87, [
77 ('def repo', 87, [
78 'vcs/tests/test_git.py',
78 'vcs/tests/test_git.py',
79 'vcs/tests/test_changesets.py']),
79 'vcs/tests/test_changesets.py']),
80 ('repository:%s def test' % HG_REPO, 18, [
80 ('repository:%s def test' % HG_REPO, 18, [
81 'vcs/tests/test_git.py',
81 'vcs/tests/test_git.py',
82 'vcs/tests/test_changesets.py']),
82 'vcs/tests/test_changesets.py']),
83 ('"def main"', 9, [
83 ('"def main"', 9, [
84 'vcs/__init__.py',
84 'vcs/__init__.py',
85 'vcs/tests/__init__.py',
85 'vcs/tests/__init__.py',
86 'vcs/utils/progressbar.py']),
86 'vcs/utils/progressbar.py']),
87 ('owner:test_admin', 358, [
87 ('owner:test_admin', 358, [
88 'vcs/tests/base.py',
88 'vcs/tests/base.py',
89 'MANIFEST.in',
89 'MANIFEST.in',
90 'vcs/utils/termcolors.py',
90 'vcs/utils/termcolors.py',
91 'docs/theme/ADC/static/documentation.png']),
91 'docs/theme/ADC/static/documentation.png']),
92 ('owner:test_admin def main', 72, [
92 ('owner:test_admin def main', 72, [
93 'vcs/__init__.py',
93 'vcs/__init__.py',
94 'vcs/tests/test_utils_filesize.py',
94 'vcs/tests/test_utils_filesize.py',
95 'vcs/tests/test_cli.py']),
95 'vcs/tests/test_cli.py']),
96 ('owner:michał test', 0, []),
96 ('owner:michał test', 0, []),
97 ])
97 ])
98 def test_search_files(self, query, expected_hits, expected_paths):
98 def test_search_files(self, query, expected_hits, expected_paths):
99 self.log_user()
99 self.log_user()
100 response = self.app.get(route_path('search'),
100 response = self.app.get(route_path('search'),
101 {'q': query, 'type': 'content', 'page_limit': 500})
101 {'q': query, 'type': 'content', 'page_limit': 500})
102
102
103 response.mustcontain('%s results' % expected_hits)
103 response.mustcontain('%s results' % expected_hits)
104 for path in expected_paths:
104 for path in expected_paths:
105 response.mustcontain(path)
105 response.mustcontain(path)
106
106
107 @pytest.mark.parametrize("query, expected_hits, expected_commits", [
107 @pytest.mark.parametrize("query, expected_hits, expected_commits", [
108 ('bother to ask where to fetch repo during tests', 3, [
108 ('bother to ask where to fetch repo during tests', 3, [
109 ('hg', 'a00c1b6f5d7a6ae678fd553a8b81d92367f7ecf1'),
109 ('hg', 'a00c1b6f5d7a6ae678fd553a8b81d92367f7ecf1'),
110 ('git', 'c6eb379775c578a95dad8ddab53f963b80894850'),
110 ('git', 'c6eb379775c578a95dad8ddab53f963b80894850'),
111 ('svn', '98')]),
111 ('svn', '98')]),
112 ('michał', 0, []),
112 ('michał', 0, []),
113 ('changed:tests/utils.py', 36, [
113 ('changed:tests/utils.py', 36, [
114 ('hg', 'a00c1b6f5d7a6ae678fd553a8b81d92367f7ecf1')]),
114 ('hg', 'a00c1b6f5d7a6ae678fd553a8b81d92367f7ecf1')]),
115 ('changed:vcs/utils/archivers.py', 11, [
115 ('changed:vcs/utils/archivers.py', 11, [
116 ('hg', '25213a5fbb048dff8ba65d21e466a835536e5b70'),
116 ('hg', '25213a5fbb048dff8ba65d21e466a835536e5b70'),
117 ('hg', '47aedd538bf616eedcb0e7d630ea476df0e159c7'),
117 ('hg', '47aedd538bf616eedcb0e7d630ea476df0e159c7'),
118 ('hg', 'f5d23247fad4856a1dabd5838afade1e0eed24fb'),
118 ('hg', 'f5d23247fad4856a1dabd5838afade1e0eed24fb'),
119 ('hg', '04ad456aefd6461aea24f90b63954b6b1ce07b3e'),
119 ('hg', '04ad456aefd6461aea24f90b63954b6b1ce07b3e'),
120 ('git', 'c994f0de03b2a0aa848a04fc2c0d7e737dba31fc'),
120 ('git', 'c994f0de03b2a0aa848a04fc2c0d7e737dba31fc'),
121 ('git', 'd1f898326327e20524fe22417c22d71064fe54a1'),
121 ('git', 'd1f898326327e20524fe22417c22d71064fe54a1'),
122 ('git', 'fe568b4081755c12abf6ba673ba777fc02a415f3'),
122 ('git', 'fe568b4081755c12abf6ba673ba777fc02a415f3'),
123 ('git', 'bafe786f0d8c2ff7da5c1dcfcfa577de0b5e92f1')]),
123 ('git', 'bafe786f0d8c2ff7da5c1dcfcfa577de0b5e92f1')]),
124 ('added:README.rst', 3, [
124 ('added:README.rst', 3, [
125 ('hg', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'),
125 ('hg', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'),
126 ('git', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
126 ('git', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
127 ('svn', '8')]),
127 ('svn', '8')]),
128 ('changed:lazy.py', 15, [
128 ('changed:lazy.py', 15, [
129 ('hg', 'eaa291c5e6ae6126a203059de9854ccf7b5baa12'),
129 ('hg', 'eaa291c5e6ae6126a203059de9854ccf7b5baa12'),
130 ('git', '17438a11f72b93f56d0e08e7d1fa79a378578a82'),
130 ('git', '17438a11f72b93f56d0e08e7d1fa79a378578a82'),
131 ('svn', '82'),
131 ('svn', '82'),
132 ('svn', '262'),
132 ('svn', '262'),
133 ('hg', 'f5d23247fad4856a1dabd5838afade1e0eed24fb'),
133 ('hg', 'f5d23247fad4856a1dabd5838afade1e0eed24fb'),
134 ('git', '33fa3223355104431402a888fa77a4e9956feb3e')
134 ('git', '33fa3223355104431402a888fa77a4e9956feb3e')
135 ]),
135 ]),
136 ('author:marcin@python-blog.com '
136 ('author:marcin@python-blog.com '
137 'commit_id:b986218ba1c9b0d6a259fac9b050b1724ed8e545', 1, [
137 'commit_id:b986218ba1c9b0d6a259fac9b050b1724ed8e545', 1, [
138 ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]),
138 ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]),
139 ('b986218ba1c9b0d6a259fac9b050b1724ed8e545', 1, [
139 ('b986218ba1c9b0d6a259fac9b050b1724ed8e545', 1, [
140 ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]),
140 ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]),
141 ('b986218b', 1, [
141 ('b986218b', 1, [
142 ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]),
142 ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]),
143 ])
143 ])
144 def test_search_commit_messages(
144 def test_search_commit_messages(
145 self, query, expected_hits, expected_commits, enabled_backends):
145 self, query, expected_hits, expected_commits, enabled_backends):
146 self.log_user()
146 self.log_user()
147 response = self.app.get(route_path('search'),
147 response = self.app.get(route_path('search'),
148 {'q': query, 'type': 'commit', 'page_limit': 500})
148 {'q': query, 'type': 'commit', 'page_limit': 500})
149
149
150 response.mustcontain('%s results' % expected_hits)
150 response.mustcontain('%s results' % expected_hits)
151 for backend, commit_id in expected_commits:
151 for backend, commit_id in expected_commits:
152 if backend in enabled_backends:
152 if backend in enabled_backends:
153 response.mustcontain(commit_id)
153 response.mustcontain(commit_id)
154
154
155 @pytest.mark.parametrize("query, expected_hits, expected_paths", [
155 @pytest.mark.parametrize("query, expected_hits, expected_paths", [
156 ('readme.rst', 3, []),
156 ('readme.rst', 3, []),
157 ('test*', 75, []),
157 ('test*', 75, []),
158 ('*model*', 1, []),
158 ('*model*', 1, []),
159 ('extension:rst', 48, []),
159 ('extension:rst', 48, []),
160 ('extension:rst api', 24, []),
160 ('extension:rst api', 24, []),
161 ])
161 ])
162 def test_search_file_paths(self, query, expected_hits, expected_paths):
162 def test_search_file_paths(self, query, expected_hits, expected_paths):
163 self.log_user()
163 self.log_user()
164 response = self.app.get(route_path('search'),
164 response = self.app.get(route_path('search'),
165 {'q': query, 'type': 'path', 'page_limit': 500})
165 {'q': query, 'type': 'path', 'page_limit': 500})
166
166
167 response.mustcontain('%s results' % expected_hits)
167 response.mustcontain('%s results' % expected_hits)
168 for path in expected_paths:
168 for path in expected_paths:
169 response.mustcontain(path)
169 response.mustcontain(path)
170
170
171 def test_search_commit_message_specific_repo(self, backend):
171 def test_search_commit_message_specific_repo(self, backend):
172 self.log_user()
172 self.log_user()
173 response = self.app.get(
173 response = self.app.get(
174 route_path('search_repo',repo_name=backend.repo_name),
174 route_path('search_repo',repo_name=backend.repo_name),
175 {'q': 'bother to ask where to fetch repo during tests',
175 {'q': 'bother to ask where to fetch repo during tests',
176 'type': 'commit'})
176 'type': 'commit'})
177
177
178 response.mustcontain('1 results')
178 response.mustcontain('1 results')
179
179
180 def test_filters_are_not_applied_for_admin_user(self):
180 def test_filters_are_not_applied_for_admin_user(self):
181 self.log_user()
181 self.log_user()
182 with mock.patch('whoosh.searching.Searcher.search') as search_mock:
182 with mock.patch('whoosh.searching.Searcher.search') as search_mock:
183 self.app.get(route_path('search'),
183 self.app.get(route_path('search'),
184 {'q': 'test query', 'type': 'commit'})
184 {'q': 'test query', 'type': 'commit'})
185 assert search_mock.call_count == 1
185 assert search_mock.call_count == 1
186 _, kwargs = search_mock.call_args
186 _, kwargs = search_mock.call_args
187 assert kwargs['filter'] is None
187 assert kwargs['filter'] is None
188
188
189 def test_filters_are_applied_for_normal_user(self, enabled_backends):
189 def test_filters_are_applied_for_normal_user(self, enabled_backends):
190 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
190 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
191 with mock.patch('whoosh.searching.Searcher.search') as search_mock:
191 with mock.patch('whoosh.searching.Searcher.search') as search_mock:
192 self.app.get(route_path('search'),
192 self.app.get(route_path('search'),
193 {'q': 'test query', 'type': 'commit'})
193 {'q': 'test query', 'type': 'commit'})
194 assert search_mock.call_count == 1
194 assert search_mock.call_count == 1
195 _, kwargs = search_mock.call_args
195 _, kwargs = search_mock.call_args
196 assert isinstance(kwargs['filter'], query.Or)
196 assert isinstance(kwargs['filter'], query.Or)
197 expected_repositories = [
197 expected_repositories = [
198 'vcs_test_{}'.format(b) for b in enabled_backends]
198 'vcs_test_{}'.format(b) for b in enabled_backends]
199 queried_repositories = [
199 queried_repositories = [
200 name for type_, name in kwargs['filter'].all_terms()]
200 name for type_, name in kwargs['filter'].all_terms()]
201 for repository in expected_repositories:
201 for repository in expected_repositories:
202 assert repository in queried_repositories
202 assert repository in queried_repositories
@@ -1,778 +1,778 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import tempfile
25 import tempfile
26 import time
26 import time
27
27
28 from paste.gzipper import make_gzip_middleware
28 from paste.gzipper import make_gzip_middleware
29 import pyramid.events
29 import pyramid.events
30 from pyramid.wsgi import wsgiapp
30 from pyramid.wsgi import wsgiapp
31 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid.authorization import ACLAuthorizationPolicy
32 from pyramid.config import Configurator
32 from pyramid.config import Configurator
33 from pyramid.settings import asbool, aslist
33 from pyramid.settings import asbool, aslist
34 from pyramid.httpexceptions import (
34 from pyramid.httpexceptions import (
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
36 from pyramid.renderers import render_to_response
36 from pyramid.renderers import render_to_response
37
37
38 from rhodecode.model import meta
38 from rhodecode.model import meta
39 from rhodecode.config import patches
39 from rhodecode.config import patches
40 from rhodecode.config import utils as config_utils
40 from rhodecode.config import utils as config_utils
41 from rhodecode.config.environment import load_pyramid_environment
41 from rhodecode.config.environment import load_pyramid_environment
42
42
43 import rhodecode.events
43 import rhodecode.events
44 from rhodecode.lib.middleware.vcs import VCSMiddleware
44 from rhodecode.lib.middleware.vcs import VCSMiddleware
45 from rhodecode.lib.request import Request
45 from rhodecode.lib.request import Request
46 from rhodecode.lib.vcs import VCSCommunicationError
46 from rhodecode.lib.vcs import VCSCommunicationError
47 from rhodecode.lib.exceptions import VCSServerUnavailable
47 from rhodecode.lib.exceptions import VCSServerUnavailable
48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
49 from rhodecode.lib.middleware.https_fixup import HttpsFixup
49 from rhodecode.lib.middleware.https_fixup import HttpsFixup
50 from rhodecode.lib.celerylib.loader import configure_celery
51 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
52 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
51 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
53 from rhodecode.lib.exc_tracking import store_exception
52 from rhodecode.lib.exc_tracking import store_exception
54 from rhodecode.subscribers import (
53 from rhodecode.subscribers import (
55 scan_repositories_if_enabled, write_js_routes_if_enabled,
54 scan_repositories_if_enabled, write_js_routes_if_enabled,
56 write_metadata_if_needed, write_usage_data)
55 write_metadata_if_needed, write_usage_data)
57
56
58
57
59 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
60
59
61
60
62 def is_http_error(response):
61 def is_http_error(response):
63 # error which should have traceback
62 # error which should have traceback
64 return response.status_code > 499
63 return response.status_code > 499
65
64
66
65
67 def should_load_all():
66 def should_load_all():
68 """
67 """
69 Returns if all application components should be loaded. In some cases it's
68 Returns if all application components should be loaded. In some cases it's
70 desired to skip apps loading for faster shell script execution
69 desired to skip apps loading for faster shell script execution
71 """
70 """
72 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
71 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
73 if ssh_cmd:
72 if ssh_cmd:
74 return False
73 return False
75
74
76 return True
75 return True
77
76
78
77
79 def make_pyramid_app(global_config, **settings):
78 def make_pyramid_app(global_config, **settings):
80 """
79 """
81 Constructs the WSGI application based on Pyramid.
80 Constructs the WSGI application based on Pyramid.
82
81
83 Specials:
82 Specials:
84
83
85 * The application can also be integrated like a plugin via the call to
84 * The application can also be integrated like a plugin via the call to
86 `includeme`. This is accompanied with the other utility functions which
85 `includeme`. This is accompanied with the other utility functions which
87 are called. Changing this should be done with great care to not break
86 are called. Changing this should be done with great care to not break
88 cases when these fragments are assembled from another place.
87 cases when these fragments are assembled from another place.
89
88
90 """
89 """
91
90
92 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
91 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
93 # will be replaced by the value of the environment variable "NAME" in this case.
92 # will be replaced by the value of the environment variable "NAME" in this case.
94 start_time = time.time()
93 start_time = time.time()
95 log.info('Pyramid app config starting')
94 log.info('Pyramid app config starting')
96
95
97 debug = asbool(global_config.get('debug'))
96 debug = asbool(global_config.get('debug'))
98 if debug:
97 if debug:
99 enable_debug()
98 enable_debug()
100
99
101 environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()}
100 environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()}
102
101
103 global_config = _substitute_values(global_config, environ)
102 global_config = _substitute_values(global_config, environ)
104 settings = _substitute_values(settings, environ)
103 settings = _substitute_values(settings, environ)
105
104
106 sanitize_settings_and_apply_defaults(global_config, settings)
105 sanitize_settings_and_apply_defaults(global_config, settings)
107
106
108 config = Configurator(settings=settings)
107 config = Configurator(settings=settings)
109
108
110 # Apply compatibility patches
109 # Apply compatibility patches
111 patches.inspect_getargspec()
110 patches.inspect_getargspec()
112
111
113 load_pyramid_environment(global_config, settings)
112 load_pyramid_environment(global_config, settings)
114
113
115 # Static file view comes first
114 # Static file view comes first
116 includeme_first(config)
115 includeme_first(config)
117
116
118 includeme(config)
117 includeme(config)
119
118
120 pyramid_app = config.make_wsgi_app()
119 pyramid_app = config.make_wsgi_app()
121 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
120 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
122 pyramid_app.config = config
121 pyramid_app.config = config
123
122
124 config.configure_celery(global_config['__file__'])
123 config.configure_celery(global_config['__file__'])
125
124
126 # creating the app uses a connection - return it after we are done
125 # creating the app uses a connection - return it after we are done
127 meta.Session.remove()
126 meta.Session.remove()
128 total_time = time.time() - start_time
127 total_time = time.time() - start_time
129 log.info('Pyramid app `%s` created and configured in %.2fs',
128 log.info('Pyramid app `%s` created and configured in %.2fs',
130 pyramid_app.func_name, total_time)
129 pyramid_app.func_name, total_time)
131
130
132 return pyramid_app
131 return pyramid_app
133
132
134
133
135 def not_found_view(request):
134 def not_found_view(request):
136 """
135 """
137 This creates the view which should be registered as not-found-view to
136 This creates the view which should be registered as not-found-view to
138 pyramid.
137 pyramid.
139 """
138 """
140
139
141 if not getattr(request, 'vcs_call', None):
140 if not getattr(request, 'vcs_call', None):
142 # handle like regular case with our error_handler
141 # handle like regular case with our error_handler
143 return error_handler(HTTPNotFound(), request)
142 return error_handler(HTTPNotFound(), request)
144
143
145 # handle not found view as a vcs call
144 # handle not found view as a vcs call
146 settings = request.registry.settings
145 settings = request.registry.settings
147 ae_client = getattr(request, 'ae_client', None)
146 ae_client = getattr(request, 'ae_client', None)
148 vcs_app = VCSMiddleware(
147 vcs_app = VCSMiddleware(
149 HTTPNotFound(), request.registry, settings,
148 HTTPNotFound(), request.registry, settings,
150 appenlight_client=ae_client)
149 appenlight_client=ae_client)
151
150
152 return wsgiapp(vcs_app)(None, request)
151 return wsgiapp(vcs_app)(None, request)
153
152
154
153
155 def error_handler(exception, request):
154 def error_handler(exception, request):
156 import rhodecode
155 import rhodecode
157 from rhodecode.lib import helpers
156 from rhodecode.lib import helpers
158
157
159 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
158 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
160
159
161 base_response = HTTPInternalServerError()
160 base_response = HTTPInternalServerError()
162 # prefer original exception for the response since it may have headers set
161 # prefer original exception for the response since it may have headers set
163 if isinstance(exception, HTTPException):
162 if isinstance(exception, HTTPException):
164 base_response = exception
163 base_response = exception
165 elif isinstance(exception, VCSCommunicationError):
164 elif isinstance(exception, VCSCommunicationError):
166 base_response = VCSServerUnavailable()
165 base_response = VCSServerUnavailable()
167
166
168 if is_http_error(base_response):
167 if is_http_error(base_response):
169 log.exception(
168 log.exception(
170 'error occurred handling this request for path: %s', request.path)
169 'error occurred handling this request for path: %s', request.path)
171
170
172 error_explanation = base_response.explanation or str(base_response)
171 error_explanation = base_response.explanation or str(base_response)
173 if base_response.status_code == 404:
172 if base_response.status_code == 404:
174 error_explanation += " Optionally you don't have permission to access this page."
173 error_explanation += " Optionally you don't have permission to access this page."
175 c = AttributeDict()
174 c = AttributeDict()
176 c.error_message = base_response.status
175 c.error_message = base_response.status
177 c.error_explanation = error_explanation
176 c.error_explanation = error_explanation
178 c.visual = AttributeDict()
177 c.visual = AttributeDict()
179
178
180 c.visual.rhodecode_support_url = (
179 c.visual.rhodecode_support_url = (
181 request.registry.settings.get('rhodecode_support_url') or
180 request.registry.settings.get('rhodecode_support_url') or
182 request.route_url('rhodecode_support')
181 request.route_url('rhodecode_support')
183 )
182 )
184 c.redirect_time = 0
183 c.redirect_time = 0
185 c.rhodecode_name = rhodecode_title
184 c.rhodecode_name = rhodecode_title
186 if not c.rhodecode_name:
185 if not c.rhodecode_name:
187 c.rhodecode_name = 'Rhodecode'
186 c.rhodecode_name = 'Rhodecode'
188
187
189 c.causes = []
188 c.causes = []
190 if is_http_error(base_response):
189 if is_http_error(base_response):
191 c.causes.append('Server is overloaded.')
190 c.causes.append('Server is overloaded.')
192 c.causes.append('Server database connection is lost.')
191 c.causes.append('Server database connection is lost.')
193 c.causes.append('Server expected unhandled error.')
192 c.causes.append('Server expected unhandled error.')
194
193
195 if hasattr(base_response, 'causes'):
194 if hasattr(base_response, 'causes'):
196 c.causes = base_response.causes
195 c.causes = base_response.causes
197
196
198 c.messages = helpers.flash.pop_messages(request=request)
197 c.messages = helpers.flash.pop_messages(request=request)
199
198
200 exc_info = sys.exc_info()
199 exc_info = sys.exc_info()
201 c.exception_id = id(exc_info)
200 c.exception_id = id(exc_info)
202 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
201 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
203 or base_response.status_code > 499
202 or base_response.status_code > 499
204 c.exception_id_url = request.route_url(
203 c.exception_id_url = request.route_url(
205 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
204 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
206
205
207 if c.show_exception_id:
206 if c.show_exception_id:
208 store_exception(c.exception_id, exc_info)
207 store_exception(c.exception_id, exc_info)
209
208
210 response = render_to_response(
209 response = render_to_response(
211 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
210 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
212 response=base_response)
211 response=base_response)
213
212
214 return response
213 return response
215
214
216
215
217 def includeme_first(config):
216 def includeme_first(config):
218 # redirect automatic browser favicon.ico requests to correct place
217 # redirect automatic browser favicon.ico requests to correct place
219 def favicon_redirect(context, request):
218 def favicon_redirect(context, request):
220 return HTTPFound(
219 return HTTPFound(
221 request.static_path('rhodecode:public/images/favicon.ico'))
220 request.static_path('rhodecode:public/images/favicon.ico'))
222
221
223 config.add_view(favicon_redirect, route_name='favicon')
222 config.add_view(favicon_redirect, route_name='favicon')
224 config.add_route('favicon', '/favicon.ico')
223 config.add_route('favicon', '/favicon.ico')
225
224
226 def robots_redirect(context, request):
225 def robots_redirect(context, request):
227 return HTTPFound(
226 return HTTPFound(
228 request.static_path('rhodecode:public/robots.txt'))
227 request.static_path('rhodecode:public/robots.txt'))
229
228
230 config.add_view(robots_redirect, route_name='robots')
229 config.add_view(robots_redirect, route_name='robots')
231 config.add_route('robots', '/robots.txt')
230 config.add_route('robots', '/robots.txt')
232
231
233 config.add_static_view(
232 config.add_static_view(
234 '_static/deform', 'deform:static')
233 '_static/deform', 'deform:static')
235 config.add_static_view(
234 config.add_static_view(
236 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
235 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
237
236
238
237
239 def includeme(config, auth_resources=None):
238 def includeme(config, auth_resources=None):
239 from rhodecode.lib.celerylib.loader import configure_celery
240 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
240 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
241 settings = config.registry.settings
241 settings = config.registry.settings
242 config.set_request_factory(Request)
242 config.set_request_factory(Request)
243
243
244 # plugin information
244 # plugin information
245 config.registry.rhodecode_plugins = collections.OrderedDict()
245 config.registry.rhodecode_plugins = collections.OrderedDict()
246
246
247 config.add_directive(
247 config.add_directive(
248 'register_rhodecode_plugin', register_rhodecode_plugin)
248 'register_rhodecode_plugin', register_rhodecode_plugin)
249
249
250 config.add_directive('configure_celery', configure_celery)
250 config.add_directive('configure_celery', configure_celery)
251
251
252 if asbool(settings.get('appenlight', 'false')):
252 if asbool(settings.get('appenlight', 'false')):
253 config.include('appenlight_client.ext.pyramid_tween')
253 config.include('appenlight_client.ext.pyramid_tween')
254
254
255 load_all = should_load_all()
255 load_all = should_load_all()
256
256
257 # Includes which are required. The application would fail without them.
257 # Includes which are required. The application would fail without them.
258 config.include('pyramid_mako')
258 config.include('pyramid_mako')
259 config.include('rhodecode.lib.rc_beaker')
259 config.include('rhodecode.lib.rc_beaker')
260 config.include('rhodecode.lib.rc_cache')
260 config.include('rhodecode.lib.rc_cache')
261 config.include('rhodecode.apps._base.navigation')
261 config.include('rhodecode.apps._base.navigation')
262 config.include('rhodecode.apps._base.subscribers')
262 config.include('rhodecode.apps._base.subscribers')
263 config.include('rhodecode.tweens')
263 config.include('rhodecode.tweens')
264 config.include('rhodecode.authentication')
264 config.include('rhodecode.authentication')
265
265
266 if load_all:
266 if load_all:
267 ce_auth_resources = [
267 ce_auth_resources = [
268 'rhodecode.authentication.plugins.auth_crowd',
268 'rhodecode.authentication.plugins.auth_crowd',
269 'rhodecode.authentication.plugins.auth_headers',
269 'rhodecode.authentication.plugins.auth_headers',
270 'rhodecode.authentication.plugins.auth_jasig_cas',
270 'rhodecode.authentication.plugins.auth_jasig_cas',
271 'rhodecode.authentication.plugins.auth_ldap',
271 'rhodecode.authentication.plugins.auth_ldap',
272 'rhodecode.authentication.plugins.auth_pam',
272 'rhodecode.authentication.plugins.auth_pam',
273 'rhodecode.authentication.plugins.auth_rhodecode',
273 'rhodecode.authentication.plugins.auth_rhodecode',
274 'rhodecode.authentication.plugins.auth_token',
274 'rhodecode.authentication.plugins.auth_token',
275 ]
275 ]
276
276
277 # load CE authentication plugins
277 # load CE authentication plugins
278
278
279 if auth_resources:
279 if auth_resources:
280 ce_auth_resources.extend(auth_resources)
280 ce_auth_resources.extend(auth_resources)
281
281
282 for resource in ce_auth_resources:
282 for resource in ce_auth_resources:
283 config.include(resource)
283 config.include(resource)
284
284
285 # Auto discover authentication plugins and include their configuration.
285 # Auto discover authentication plugins and include their configuration.
286 if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')):
286 if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')):
287 from rhodecode.authentication import discover_legacy_plugins
287 from rhodecode.authentication import discover_legacy_plugins
288 discover_legacy_plugins(config)
288 discover_legacy_plugins(config)
289
289
290 # apps
290 # apps
291 if load_all:
291 if load_all:
292 config.include('rhodecode.api')
292 config.include('rhodecode.api')
293 config.include('rhodecode.apps._base')
293 config.include('rhodecode.apps._base')
294 config.include('rhodecode.apps.hovercards')
294 config.include('rhodecode.apps.hovercards')
295 config.include('rhodecode.apps.ops')
295 config.include('rhodecode.apps.ops')
296 config.include('rhodecode.apps.channelstream')
296 config.include('rhodecode.apps.channelstream')
297 config.include('rhodecode.apps.file_store')
297 config.include('rhodecode.apps.file_store')
298 config.include('rhodecode.apps.admin')
298 config.include('rhodecode.apps.admin')
299 config.include('rhodecode.apps.login')
299 config.include('rhodecode.apps.login')
300 config.include('rhodecode.apps.home')
300 config.include('rhodecode.apps.home')
301 config.include('rhodecode.apps.journal')
301 config.include('rhodecode.apps.journal')
302
302
303 config.include('rhodecode.apps.repository')
303 config.include('rhodecode.apps.repository')
304 config.include('rhodecode.apps.repo_group')
304 config.include('rhodecode.apps.repo_group')
305 config.include('rhodecode.apps.user_group')
305 config.include('rhodecode.apps.user_group')
306 config.include('rhodecode.apps.search')
306 config.include('rhodecode.apps.search')
307 config.include('rhodecode.apps.user_profile')
307 config.include('rhodecode.apps.user_profile')
308 config.include('rhodecode.apps.user_group_profile')
308 config.include('rhodecode.apps.user_group_profile')
309 config.include('rhodecode.apps.my_account')
309 config.include('rhodecode.apps.my_account')
310 config.include('rhodecode.apps.gist')
310 config.include('rhodecode.apps.gist')
311
311
312 config.include('rhodecode.apps.svn_support')
312 config.include('rhodecode.apps.svn_support')
313 config.include('rhodecode.apps.ssh_support')
313 config.include('rhodecode.apps.ssh_support')
314 config.include('rhodecode.apps.debug_style')
314 config.include('rhodecode.apps.debug_style')
315
315
316 if load_all:
316 if load_all:
317 config.include('rhodecode.integrations')
317 config.include('rhodecode.integrations')
318
318
319 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
319 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
320 config.add_translation_dirs('rhodecode:i18n/')
320 config.add_translation_dirs('rhodecode:i18n/')
321 settings['default_locale_name'] = settings.get('lang', 'en')
321 settings['default_locale_name'] = settings.get('lang', 'en')
322
322
323 # Add subscribers.
323 # Add subscribers.
324 if load_all:
324 if load_all:
325 config.add_subscriber(scan_repositories_if_enabled,
325 config.add_subscriber(scan_repositories_if_enabled,
326 pyramid.events.ApplicationCreated)
326 pyramid.events.ApplicationCreated)
327 config.add_subscriber(write_metadata_if_needed,
327 config.add_subscriber(write_metadata_if_needed,
328 pyramid.events.ApplicationCreated)
328 pyramid.events.ApplicationCreated)
329 config.add_subscriber(write_usage_data,
329 config.add_subscriber(write_usage_data,
330 pyramid.events.ApplicationCreated)
330 pyramid.events.ApplicationCreated)
331 config.add_subscriber(write_js_routes_if_enabled,
331 config.add_subscriber(write_js_routes_if_enabled,
332 pyramid.events.ApplicationCreated)
332 pyramid.events.ApplicationCreated)
333
333
334 # request custom methods
334 # request custom methods
335 config.add_request_method(
335 config.add_request_method(
336 'rhodecode.lib.partial_renderer.get_partial_renderer',
336 'rhodecode.lib.partial_renderer.get_partial_renderer',
337 'get_partial_renderer')
337 'get_partial_renderer')
338
338
339 config.add_request_method(
339 config.add_request_method(
340 'rhodecode.lib.request_counter.get_request_counter',
340 'rhodecode.lib.request_counter.get_request_counter',
341 'request_count')
341 'request_count')
342
342
343 # Set the authorization policy.
343 # Set the authorization policy.
344 authz_policy = ACLAuthorizationPolicy()
344 authz_policy = ACLAuthorizationPolicy()
345 config.set_authorization_policy(authz_policy)
345 config.set_authorization_policy(authz_policy)
346
346
347 # Set the default renderer for HTML templates to mako.
347 # Set the default renderer for HTML templates to mako.
348 config.add_mako_renderer('.html')
348 config.add_mako_renderer('.html')
349
349
350 config.add_renderer(
350 config.add_renderer(
351 name='json_ext',
351 name='json_ext',
352 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
352 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
353
353
354 config.add_renderer(
354 config.add_renderer(
355 name='string_html',
355 name='string_html',
356 factory='rhodecode.lib.string_renderer.html')
356 factory='rhodecode.lib.string_renderer.html')
357
357
358 # include RhodeCode plugins
358 # include RhodeCode plugins
359 includes = aslist(settings.get('rhodecode.includes', []))
359 includes = aslist(settings.get('rhodecode.includes', []))
360 for inc in includes:
360 for inc in includes:
361 config.include(inc)
361 config.include(inc)
362
362
363 # custom not found view, if our pyramid app doesn't know how to handle
363 # custom not found view, if our pyramid app doesn't know how to handle
364 # the request pass it to potential VCS handling ap
364 # the request pass it to potential VCS handling ap
365 config.add_notfound_view(not_found_view)
365 config.add_notfound_view(not_found_view)
366 if not settings.get('debugtoolbar.enabled', False):
366 if not settings.get('debugtoolbar.enabled', False):
367 # disabled debugtoolbar handle all exceptions via the error_handlers
367 # disabled debugtoolbar handle all exceptions via the error_handlers
368 config.add_view(error_handler, context=Exception)
368 config.add_view(error_handler, context=Exception)
369
369
370 # all errors including 403/404/50X
370 # all errors including 403/404/50X
371 config.add_view(error_handler, context=HTTPError)
371 config.add_view(error_handler, context=HTTPError)
372
372
373
373
374 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
374 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
375 """
375 """
376 Apply outer WSGI middlewares around the application.
376 Apply outer WSGI middlewares around the application.
377 """
377 """
378 registry = config.registry
378 registry = config.registry
379 settings = registry.settings
379 settings = registry.settings
380
380
381 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
381 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
382 pyramid_app = HttpsFixup(pyramid_app, settings)
382 pyramid_app = HttpsFixup(pyramid_app, settings)
383
383
384 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
384 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
385 pyramid_app, settings)
385 pyramid_app, settings)
386 registry.ae_client = _ae_client
386 registry.ae_client = _ae_client
387
387
388 if settings['gzip_responses']:
388 if settings['gzip_responses']:
389 pyramid_app = make_gzip_middleware(
389 pyramid_app = make_gzip_middleware(
390 pyramid_app, settings, compress_level=1)
390 pyramid_app, settings, compress_level=1)
391
391
392 # this should be the outer most middleware in the wsgi stack since
392 # this should be the outer most middleware in the wsgi stack since
393 # middleware like Routes make database calls
393 # middleware like Routes make database calls
394 def pyramid_app_with_cleanup(environ, start_response):
394 def pyramid_app_with_cleanup(environ, start_response):
395 try:
395 try:
396 return pyramid_app(environ, start_response)
396 return pyramid_app(environ, start_response)
397 finally:
397 finally:
398 # Dispose current database session and rollback uncommitted
398 # Dispose current database session and rollback uncommitted
399 # transactions.
399 # transactions.
400 meta.Session.remove()
400 meta.Session.remove()
401
401
402 # In a single threaded mode server, on non sqlite db we should have
402 # In a single threaded mode server, on non sqlite db we should have
403 # '0 Current Checked out connections' at the end of a request,
403 # '0 Current Checked out connections' at the end of a request,
404 # if not, then something, somewhere is leaving a connection open
404 # if not, then something, somewhere is leaving a connection open
405 pool = meta.Base.metadata.bind.engine.pool
405 pool = meta.Base.metadata.bind.engine.pool
406 log.debug('sa pool status: %s', pool.status())
406 log.debug('sa pool status: %s', pool.status())
407 log.debug('Request processing finalized')
407 log.debug('Request processing finalized')
408
408
409 return pyramid_app_with_cleanup
409 return pyramid_app_with_cleanup
410
410
411
411
412 def sanitize_settings_and_apply_defaults(global_config, settings):
412 def sanitize_settings_and_apply_defaults(global_config, settings):
413 """
413 """
414 Applies settings defaults and does all type conversion.
414 Applies settings defaults and does all type conversion.
415
415
416 We would move all settings parsing and preparation into this place, so that
416 We would move all settings parsing and preparation into this place, so that
417 we have only one place left which deals with this part. The remaining parts
417 we have only one place left which deals with this part. The remaining parts
418 of the application would start to rely fully on well prepared settings.
418 of the application would start to rely fully on well prepared settings.
419
419
420 This piece would later be split up per topic to avoid a big fat monster
420 This piece would later be split up per topic to avoid a big fat monster
421 function.
421 function.
422 """
422 """
423
423
424 settings.setdefault('rhodecode.edition', 'Community Edition')
424 settings.setdefault('rhodecode.edition', 'Community Edition')
425 settings.setdefault('rhodecode.edition_id', 'CE')
425 settings.setdefault('rhodecode.edition_id', 'CE')
426
426
427 if 'mako.default_filters' not in settings:
427 if 'mako.default_filters' not in settings:
428 # set custom default filters if we don't have it defined
428 # set custom default filters if we don't have it defined
429 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
429 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
430 settings['mako.default_filters'] = 'h_filter'
430 settings['mako.default_filters'] = 'h_filter'
431
431
432 if 'mako.directories' not in settings:
432 if 'mako.directories' not in settings:
433 mako_directories = settings.setdefault('mako.directories', [
433 mako_directories = settings.setdefault('mako.directories', [
434 # Base templates of the original application
434 # Base templates of the original application
435 'rhodecode:templates',
435 'rhodecode:templates',
436 ])
436 ])
437 log.debug(
437 log.debug(
438 "Using the following Mako template directories: %s",
438 "Using the following Mako template directories: %s",
439 mako_directories)
439 mako_directories)
440
440
441 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
441 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
442 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
442 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
443 raw_url = settings['beaker.session.url']
443 raw_url = settings['beaker.session.url']
444 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
444 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
445 settings['beaker.session.url'] = 'redis://' + raw_url
445 settings['beaker.session.url'] = 'redis://' + raw_url
446
446
447 # Default includes, possible to change as a user
447 # Default includes, possible to change as a user
448 pyramid_includes = settings.setdefault('pyramid.includes', [])
448 pyramid_includes = settings.setdefault('pyramid.includes', [])
449 log.debug(
449 log.debug(
450 "Using the following pyramid.includes: %s",
450 "Using the following pyramid.includes: %s",
451 pyramid_includes)
451 pyramid_includes)
452
452
453 # TODO: johbo: Re-think this, usually the call to config.include
453 # TODO: johbo: Re-think this, usually the call to config.include
454 # should allow to pass in a prefix.
454 # should allow to pass in a prefix.
455 settings.setdefault('rhodecode.api.url', '/_admin/api')
455 settings.setdefault('rhodecode.api.url', '/_admin/api')
456 settings.setdefault('__file__', global_config.get('__file__'))
456 settings.setdefault('__file__', global_config.get('__file__'))
457
457
458 # Sanitize generic settings.
458 # Sanitize generic settings.
459 _list_setting(settings, 'default_encoding', 'UTF-8')
459 _list_setting(settings, 'default_encoding', 'UTF-8')
460 _bool_setting(settings, 'is_test', 'false')
460 _bool_setting(settings, 'is_test', 'false')
461 _bool_setting(settings, 'gzip_responses', 'false')
461 _bool_setting(settings, 'gzip_responses', 'false')
462
462
463 # Call split out functions that sanitize settings for each topic.
463 # Call split out functions that sanitize settings for each topic.
464 _sanitize_appenlight_settings(settings)
464 _sanitize_appenlight_settings(settings)
465 _sanitize_vcs_settings(settings)
465 _sanitize_vcs_settings(settings)
466 _sanitize_cache_settings(settings)
466 _sanitize_cache_settings(settings)
467
467
468 # configure instance id
468 # configure instance id
469 config_utils.set_instance_id(settings)
469 config_utils.set_instance_id(settings)
470
470
471 return settings
471 return settings
472
472
473
473
474 def enable_debug():
474 def enable_debug():
475 """
475 """
476 Helper to enable debug on running instance
476 Helper to enable debug on running instance
477 :return:
477 :return:
478 """
478 """
479 import tempfile
479 import tempfile
480 import textwrap
480 import textwrap
481 import logging.config
481 import logging.config
482
482
483 ini_template = textwrap.dedent("""
483 ini_template = textwrap.dedent("""
484 #####################################
484 #####################################
485 ### DEBUG LOGGING CONFIGURATION ####
485 ### DEBUG LOGGING CONFIGURATION ####
486 #####################################
486 #####################################
487 [loggers]
487 [loggers]
488 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
488 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
489
489
490 [handlers]
490 [handlers]
491 keys = console, console_sql
491 keys = console, console_sql
492
492
493 [formatters]
493 [formatters]
494 keys = generic, color_formatter, color_formatter_sql
494 keys = generic, color_formatter, color_formatter_sql
495
495
496 #############
496 #############
497 ## LOGGERS ##
497 ## LOGGERS ##
498 #############
498 #############
499 [logger_root]
499 [logger_root]
500 level = NOTSET
500 level = NOTSET
501 handlers = console
501 handlers = console
502
502
503 [logger_sqlalchemy]
503 [logger_sqlalchemy]
504 level = INFO
504 level = INFO
505 handlers = console_sql
505 handlers = console_sql
506 qualname = sqlalchemy.engine
506 qualname = sqlalchemy.engine
507 propagate = 0
507 propagate = 0
508
508
509 [logger_beaker]
509 [logger_beaker]
510 level = DEBUG
510 level = DEBUG
511 handlers =
511 handlers =
512 qualname = beaker.container
512 qualname = beaker.container
513 propagate = 1
513 propagate = 1
514
514
515 [logger_rhodecode]
515 [logger_rhodecode]
516 level = DEBUG
516 level = DEBUG
517 handlers =
517 handlers =
518 qualname = rhodecode
518 qualname = rhodecode
519 propagate = 1
519 propagate = 1
520
520
521 [logger_ssh_wrapper]
521 [logger_ssh_wrapper]
522 level = DEBUG
522 level = DEBUG
523 handlers =
523 handlers =
524 qualname = ssh_wrapper
524 qualname = ssh_wrapper
525 propagate = 1
525 propagate = 1
526
526
527 [logger_celery]
527 [logger_celery]
528 level = DEBUG
528 level = DEBUG
529 handlers =
529 handlers =
530 qualname = celery
530 qualname = celery
531
531
532
532
533 ##############
533 ##############
534 ## HANDLERS ##
534 ## HANDLERS ##
535 ##############
535 ##############
536
536
537 [handler_console]
537 [handler_console]
538 class = StreamHandler
538 class = StreamHandler
539 args = (sys.stderr, )
539 args = (sys.stderr, )
540 level = DEBUG
540 level = DEBUG
541 formatter = color_formatter
541 formatter = color_formatter
542
542
543 [handler_console_sql]
543 [handler_console_sql]
544 # "level = DEBUG" logs SQL queries and results.
544 # "level = DEBUG" logs SQL queries and results.
545 # "level = INFO" logs SQL queries.
545 # "level = INFO" logs SQL queries.
546 # "level = WARN" logs neither. (Recommended for production systems.)
546 # "level = WARN" logs neither. (Recommended for production systems.)
547 class = StreamHandler
547 class = StreamHandler
548 args = (sys.stderr, )
548 args = (sys.stderr, )
549 level = WARN
549 level = WARN
550 formatter = color_formatter_sql
550 formatter = color_formatter_sql
551
551
552 ################
552 ################
553 ## FORMATTERS ##
553 ## FORMATTERS ##
554 ################
554 ################
555
555
556 [formatter_generic]
556 [formatter_generic]
557 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
557 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
558 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s
558 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s
559 datefmt = %Y-%m-%d %H:%M:%S
559 datefmt = %Y-%m-%d %H:%M:%S
560
560
561 [formatter_color_formatter]
561 [formatter_color_formatter]
562 class = rhodecode.lib.logging_formatter.ColorRequestTrackingFormatter
562 class = rhodecode.lib.logging_formatter.ColorRequestTrackingFormatter
563 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s
563 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | %(req_id)s
564 datefmt = %Y-%m-%d %H:%M:%S
564 datefmt = %Y-%m-%d %H:%M:%S
565
565
566 [formatter_color_formatter_sql]
566 [formatter_color_formatter_sql]
567 class = rhodecode.lib.logging_formatter.ColorFormatterSql
567 class = rhodecode.lib.logging_formatter.ColorFormatterSql
568 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
568 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
569 datefmt = %Y-%m-%d %H:%M:%S
569 datefmt = %Y-%m-%d %H:%M:%S
570 """)
570 """)
571
571
572 with tempfile.NamedTemporaryFile(prefix='rc_debug_logging_', suffix='.ini',
572 with tempfile.NamedTemporaryFile(prefix='rc_debug_logging_', suffix='.ini',
573 delete=False) as f:
573 delete=False) as f:
574 log.info('Saved Temporary DEBUG config at %s', f.name)
574 log.info('Saved Temporary DEBUG config at %s', f.name)
575 f.write(ini_template)
575 f.write(ini_template)
576
576
577 logging.config.fileConfig(f.name)
577 logging.config.fileConfig(f.name)
578 log.debug('DEBUG MODE ON')
578 log.debug('DEBUG MODE ON')
579 os.remove(f.name)
579 os.remove(f.name)
580
580
581
581
582 def _sanitize_appenlight_settings(settings):
582 def _sanitize_appenlight_settings(settings):
583 _bool_setting(settings, 'appenlight', 'false')
583 _bool_setting(settings, 'appenlight', 'false')
584
584
585
585
586 def _sanitize_vcs_settings(settings):
586 def _sanitize_vcs_settings(settings):
587 """
587 """
588 Applies settings defaults and does type conversion for all VCS related
588 Applies settings defaults and does type conversion for all VCS related
589 settings.
589 settings.
590 """
590 """
591 _string_setting(settings, 'vcs.svn.compatible_version', '')
591 _string_setting(settings, 'vcs.svn.compatible_version', '')
592 _string_setting(settings, 'vcs.hooks.protocol', 'http')
592 _string_setting(settings, 'vcs.hooks.protocol', 'http')
593 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
593 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
594 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
594 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
595 _string_setting(settings, 'vcs.server', '')
595 _string_setting(settings, 'vcs.server', '')
596 _string_setting(settings, 'vcs.server.protocol', 'http')
596 _string_setting(settings, 'vcs.server.protocol', 'http')
597 _bool_setting(settings, 'startup.import_repos', 'false')
597 _bool_setting(settings, 'startup.import_repos', 'false')
598 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
598 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
599 _bool_setting(settings, 'vcs.server.enable', 'true')
599 _bool_setting(settings, 'vcs.server.enable', 'true')
600 _bool_setting(settings, 'vcs.start_server', 'false')
600 _bool_setting(settings, 'vcs.start_server', 'false')
601 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
601 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
602 _int_setting(settings, 'vcs.connection_timeout', 3600)
602 _int_setting(settings, 'vcs.connection_timeout', 3600)
603
603
604 # Support legacy values of vcs.scm_app_implementation. Legacy
604 # Support legacy values of vcs.scm_app_implementation. Legacy
605 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
605 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
606 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
606 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
607 scm_app_impl = settings['vcs.scm_app_implementation']
607 scm_app_impl = settings['vcs.scm_app_implementation']
608 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
608 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
609 settings['vcs.scm_app_implementation'] = 'http'
609 settings['vcs.scm_app_implementation'] = 'http'
610
610
611
611
612 def _sanitize_cache_settings(settings):
612 def _sanitize_cache_settings(settings):
613 temp_store = tempfile.gettempdir()
613 temp_store = tempfile.gettempdir()
614 default_cache_dir = os.path.join(temp_store, 'rc_cache')
614 default_cache_dir = os.path.join(temp_store, 'rc_cache')
615
615
616 # save default, cache dir, and use it for all backends later.
616 # save default, cache dir, and use it for all backends later.
617 default_cache_dir = _string_setting(
617 default_cache_dir = _string_setting(
618 settings,
618 settings,
619 'cache_dir',
619 'cache_dir',
620 default_cache_dir, lower=False, default_when_empty=True)
620 default_cache_dir, lower=False, default_when_empty=True)
621
621
622 # ensure we have our dir created
622 # ensure we have our dir created
623 if not os.path.isdir(default_cache_dir):
623 if not os.path.isdir(default_cache_dir):
624 os.makedirs(default_cache_dir, mode=0o755)
624 os.makedirs(default_cache_dir, mode=0o755)
625
625
626 # exception store cache
626 # exception store cache
627 _string_setting(
627 _string_setting(
628 settings,
628 settings,
629 'exception_tracker.store_path',
629 'exception_tracker.store_path',
630 temp_store, lower=False, default_when_empty=True)
630 temp_store, lower=False, default_when_empty=True)
631 _bool_setting(
631 _bool_setting(
632 settings,
632 settings,
633 'exception_tracker.send_email',
633 'exception_tracker.send_email',
634 'false')
634 'false')
635 _string_setting(
635 _string_setting(
636 settings,
636 settings,
637 'exception_tracker.email_prefix',
637 'exception_tracker.email_prefix',
638 '[RHODECODE ERROR]', lower=False, default_when_empty=True)
638 '[RHODECODE ERROR]', lower=False, default_when_empty=True)
639
639
640 # cache_perms
640 # cache_perms
641 _string_setting(
641 _string_setting(
642 settings,
642 settings,
643 'rc_cache.cache_perms.backend',
643 'rc_cache.cache_perms.backend',
644 'dogpile.cache.rc.file_namespace', lower=False)
644 'dogpile.cache.rc.file_namespace', lower=False)
645 _int_setting(
645 _int_setting(
646 settings,
646 settings,
647 'rc_cache.cache_perms.expiration_time',
647 'rc_cache.cache_perms.expiration_time',
648 60)
648 60)
649 _string_setting(
649 _string_setting(
650 settings,
650 settings,
651 'rc_cache.cache_perms.arguments.filename',
651 'rc_cache.cache_perms.arguments.filename',
652 os.path.join(default_cache_dir, 'rc_cache_1'), lower=False)
652 os.path.join(default_cache_dir, 'rc_cache_1'), lower=False)
653
653
654 # cache_repo
654 # cache_repo
655 _string_setting(
655 _string_setting(
656 settings,
656 settings,
657 'rc_cache.cache_repo.backend',
657 'rc_cache.cache_repo.backend',
658 'dogpile.cache.rc.file_namespace', lower=False)
658 'dogpile.cache.rc.file_namespace', lower=False)
659 _int_setting(
659 _int_setting(
660 settings,
660 settings,
661 'rc_cache.cache_repo.expiration_time',
661 'rc_cache.cache_repo.expiration_time',
662 60)
662 60)
663 _string_setting(
663 _string_setting(
664 settings,
664 settings,
665 'rc_cache.cache_repo.arguments.filename',
665 'rc_cache.cache_repo.arguments.filename',
666 os.path.join(default_cache_dir, 'rc_cache_2'), lower=False)
666 os.path.join(default_cache_dir, 'rc_cache_2'), lower=False)
667
667
668 # cache_license
668 # cache_license
669 _string_setting(
669 _string_setting(
670 settings,
670 settings,
671 'rc_cache.cache_license.backend',
671 'rc_cache.cache_license.backend',
672 'dogpile.cache.rc.file_namespace', lower=False)
672 'dogpile.cache.rc.file_namespace', lower=False)
673 _int_setting(
673 _int_setting(
674 settings,
674 settings,
675 'rc_cache.cache_license.expiration_time',
675 'rc_cache.cache_license.expiration_time',
676 5*60)
676 5*60)
677 _string_setting(
677 _string_setting(
678 settings,
678 settings,
679 'rc_cache.cache_license.arguments.filename',
679 'rc_cache.cache_license.arguments.filename',
680 os.path.join(default_cache_dir, 'rc_cache_3'), lower=False)
680 os.path.join(default_cache_dir, 'rc_cache_3'), lower=False)
681
681
682 # cache_repo_longterm memory, 96H
682 # cache_repo_longterm memory, 96H
683 _string_setting(
683 _string_setting(
684 settings,
684 settings,
685 'rc_cache.cache_repo_longterm.backend',
685 'rc_cache.cache_repo_longterm.backend',
686 'dogpile.cache.rc.memory_lru', lower=False)
686 'dogpile.cache.rc.memory_lru', lower=False)
687 _int_setting(
687 _int_setting(
688 settings,
688 settings,
689 'rc_cache.cache_repo_longterm.expiration_time',
689 'rc_cache.cache_repo_longterm.expiration_time',
690 345600)
690 345600)
691 _int_setting(
691 _int_setting(
692 settings,
692 settings,
693 'rc_cache.cache_repo_longterm.max_size',
693 'rc_cache.cache_repo_longterm.max_size',
694 10000)
694 10000)
695
695
696 # sql_cache_short
696 # sql_cache_short
697 _string_setting(
697 _string_setting(
698 settings,
698 settings,
699 'rc_cache.sql_cache_short.backend',
699 'rc_cache.sql_cache_short.backend',
700 'dogpile.cache.rc.memory_lru', lower=False)
700 'dogpile.cache.rc.memory_lru', lower=False)
701 _int_setting(
701 _int_setting(
702 settings,
702 settings,
703 'rc_cache.sql_cache_short.expiration_time',
703 'rc_cache.sql_cache_short.expiration_time',
704 30)
704 30)
705 _int_setting(
705 _int_setting(
706 settings,
706 settings,
707 'rc_cache.sql_cache_short.max_size',
707 'rc_cache.sql_cache_short.max_size',
708 10000)
708 10000)
709
709
710
710
711 def _int_setting(settings, name, default):
711 def _int_setting(settings, name, default):
712 settings[name] = int(settings.get(name, default))
712 settings[name] = int(settings.get(name, default))
713 return settings[name]
713 return settings[name]
714
714
715
715
716 def _bool_setting(settings, name, default):
716 def _bool_setting(settings, name, default):
717 input_val = settings.get(name, default)
717 input_val = settings.get(name, default)
718 if isinstance(input_val, unicode):
718 if isinstance(input_val, unicode):
719 input_val = input_val.encode('utf8')
719 input_val = input_val.encode('utf8')
720 settings[name] = asbool(input_val)
720 settings[name] = asbool(input_val)
721 return settings[name]
721 return settings[name]
722
722
723
723
724 def _list_setting(settings, name, default):
724 def _list_setting(settings, name, default):
725 raw_value = settings.get(name, default)
725 raw_value = settings.get(name, default)
726
726
727 old_separator = ','
727 old_separator = ','
728 if old_separator in raw_value:
728 if old_separator in raw_value:
729 # If we get a comma separated list, pass it to our own function.
729 # If we get a comma separated list, pass it to our own function.
730 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
730 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
731 else:
731 else:
732 # Otherwise we assume it uses pyramids space/newline separation.
732 # Otherwise we assume it uses pyramids space/newline separation.
733 settings[name] = aslist(raw_value)
733 settings[name] = aslist(raw_value)
734 return settings[name]
734 return settings[name]
735
735
736
736
737 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
737 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
738 value = settings.get(name, default)
738 value = settings.get(name, default)
739
739
740 if default_when_empty and not value:
740 if default_when_empty and not value:
741 # use default value when value is empty
741 # use default value when value is empty
742 value = default
742 value = default
743
743
744 if lower:
744 if lower:
745 value = value.lower()
745 value = value.lower()
746 settings[name] = value
746 settings[name] = value
747 return settings[name]
747 return settings[name]
748
748
749
749
750 def _substitute_values(mapping, substitutions):
750 def _substitute_values(mapping, substitutions):
751 result = {}
751 result = {}
752
752
753 try:
753 try:
754 for key, value in mapping.items():
754 for key, value in mapping.items():
755 # initialize without substitution first
755 # initialize without substitution first
756 result[key] = value
756 result[key] = value
757
757
758 # Note: Cannot use regular replacements, since they would clash
758 # Note: Cannot use regular replacements, since they would clash
759 # with the implementation of ConfigParser. Using "format" instead.
759 # with the implementation of ConfigParser. Using "format" instead.
760 try:
760 try:
761 result[key] = value.format(**substitutions)
761 result[key] = value.format(**substitutions)
762 except KeyError as e:
762 except KeyError as e:
763 env_var = '{}'.format(e.args[0])
763 env_var = '{}'.format(e.args[0])
764
764
765 msg = 'Failed to substitute: `{key}={{{var}}}` with environment entry. ' \
765 msg = 'Failed to substitute: `{key}={{{var}}}` with environment entry. ' \
766 'Make sure your environment has {var} set, or remove this ' \
766 'Make sure your environment has {var} set, or remove this ' \
767 'variable from config file'.format(key=key, var=env_var)
767 'variable from config file'.format(key=key, var=env_var)
768
768
769 if env_var.startswith('ENV_'):
769 if env_var.startswith('ENV_'):
770 raise ValueError(msg)
770 raise ValueError(msg)
771 else:
771 else:
772 log.warning(msg)
772 log.warning(msg)
773
773
774 except ValueError as e:
774 except ValueError as e:
775 log.warning('Failed to substitute ENV variable: %s', e)
775 log.warning('Failed to substitute ENV variable: %s', e)
776 result = mapping
776 result = mapping
777
777
778 return result
778 return result
@@ -1,666 +1,678 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database creation, and setup module for RhodeCode Enterprise. Used for creation
22 Database creation, and setup module for RhodeCode Enterprise. Used for creation
23 of database as well as for migration operations
23 of database as well as for migration operations
24 """
24 """
25
25
26 import os
26 import os
27 import sys
27 import sys
28 import time
28 import time
29 import uuid
29 import uuid
30 import logging
30 import logging
31 import getpass
31 import getpass
32 from os.path import dirname as dn, join as jn
32 from os.path import dirname as dn, join as jn
33
33
34 from sqlalchemy.engine import create_engine
34 from sqlalchemy.engine import create_engine
35
35
36 from rhodecode import __dbversion__
36 from rhodecode import __dbversion__
37 from rhodecode.model import init_model
37 from rhodecode.model import init_model
38 from rhodecode.model.user import UserModel
38 from rhodecode.model.user import UserModel
39 from rhodecode.model.db import (
39 from rhodecode.model.db import (
40 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
40 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
41 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
41 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
42 from rhodecode.model.meta import Session, Base
42 from rhodecode.model.meta import Session, Base
43 from rhodecode.model.permission import PermissionModel
43 from rhodecode.model.permission import PermissionModel
44 from rhodecode.model.repo import RepoModel
44 from rhodecode.model.repo import RepoModel
45 from rhodecode.model.repo_group import RepoGroupModel
45 from rhodecode.model.repo_group import RepoGroupModel
46 from rhodecode.model.settings import SettingsModel
46 from rhodecode.model.settings import SettingsModel
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def notify(msg):
52 def notify(msg):
53 """
53 """
54 Notification for migrations messages
54 Notification for migrations messages
55 """
55 """
56 ml = len(msg) + (4 * 2)
56 ml = len(msg) + (4 * 2)
57 print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper())
57 print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper())
58
58
59
59
60 class DbManage(object):
60 class DbManage(object):
61
61
62 def __init__(self, log_sql, dbconf, root, tests=False,
62 def __init__(self, log_sql, dbconf, root, tests=False,
63 SESSION=None, cli_args=None):
63 SESSION=None, cli_args=None):
64 self.dbname = dbconf.split('/')[-1]
64 self.dbname = dbconf.split('/')[-1]
65 self.tests = tests
65 self.tests = tests
66 self.root = root
66 self.root = root
67 self.dburi = dbconf
67 self.dburi = dbconf
68 self.log_sql = log_sql
68 self.log_sql = log_sql
69 self.db_exists = False
70 self.cli_args = cli_args or {}
69 self.cli_args = cli_args or {}
71 self.init_db(SESSION=SESSION)
70 self.init_db(SESSION=SESSION)
72 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
71 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
73
72
73 def db_exists(self):
74 if not self.sa:
75 self.init_db()
76 try:
77 self.sa.query(RhodeCodeUi)\
78 .filter(RhodeCodeUi.ui_key == '/')\
79 .scalar()
80 return True
81 except Exception:
82 return False
83 finally:
84 self.sa.rollback()
85
74 def get_ask_ok_func(self, param):
86 def get_ask_ok_func(self, param):
75 if param not in [None]:
87 if param not in [None]:
76 # return a function lambda that has a default set to param
88 # return a function lambda that has a default set to param
77 return lambda *args, **kwargs: param
89 return lambda *args, **kwargs: param
78 else:
90 else:
79 from rhodecode.lib.utils import ask_ok
91 from rhodecode.lib.utils import ask_ok
80 return ask_ok
92 return ask_ok
81
93
82 def init_db(self, SESSION=None):
94 def init_db(self, SESSION=None):
83 if SESSION:
95 if SESSION:
84 self.sa = SESSION
96 self.sa = SESSION
85 else:
97 else:
86 # init new sessions
98 # init new sessions
87 engine = create_engine(self.dburi, echo=self.log_sql)
99 engine = create_engine(self.dburi, echo=self.log_sql)
88 init_model(engine)
100 init_model(engine)
89 self.sa = Session()
101 self.sa = Session()
90
102
91 def create_tables(self, override=False):
103 def create_tables(self, override=False):
92 """
104 """
93 Create a auth database
105 Create a auth database
94 """
106 """
95
107
96 log.info("Existing database with the same name is going to be destroyed.")
108 log.info("Existing database with the same name is going to be destroyed.")
97 log.info("Setup command will run DROP ALL command on that database.")
109 log.info("Setup command will run DROP ALL command on that database.")
98 if self.tests:
110 if self.tests:
99 destroy = True
111 destroy = True
100 else:
112 else:
101 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
113 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
102 if not destroy:
114 if not destroy:
103 log.info('Nothing done.')
115 log.info('Nothing done.')
104 sys.exit(0)
116 sys.exit(0)
105 if destroy:
117 if destroy:
106 Base.metadata.drop_all()
118 Base.metadata.drop_all()
107
119
108 checkfirst = not override
120 checkfirst = not override
109 Base.metadata.create_all(checkfirst=checkfirst)
121 Base.metadata.create_all(checkfirst=checkfirst)
110 log.info('Created tables for %s', self.dbname)
122 log.info('Created tables for %s', self.dbname)
111
123
112 def set_db_version(self):
124 def set_db_version(self):
113 ver = DbMigrateVersion()
125 ver = DbMigrateVersion()
114 ver.version = __dbversion__
126 ver.version = __dbversion__
115 ver.repository_id = 'rhodecode_db_migrations'
127 ver.repository_id = 'rhodecode_db_migrations'
116 ver.repository_path = 'versions'
128 ver.repository_path = 'versions'
117 self.sa.add(ver)
129 self.sa.add(ver)
118 log.info('db version set to: %s', __dbversion__)
130 log.info('db version set to: %s', __dbversion__)
119
131
120 def run_post_migration_tasks(self):
132 def run_post_migration_tasks(self):
121 """
133 """
122 Run various tasks before actually doing migrations
134 Run various tasks before actually doing migrations
123 """
135 """
124 # delete cache keys on each upgrade
136 # delete cache keys on each upgrade
125 total = CacheKey.query().count()
137 total = CacheKey.query().count()
126 log.info("Deleting (%s) cache keys now...", total)
138 log.info("Deleting (%s) cache keys now...", total)
127 CacheKey.delete_all_cache()
139 CacheKey.delete_all_cache()
128
140
129 def upgrade(self, version=None):
141 def upgrade(self, version=None):
130 """
142 """
131 Upgrades given database schema to given revision following
143 Upgrades given database schema to given revision following
132 all needed steps, to perform the upgrade
144 all needed steps, to perform the upgrade
133
145
134 """
146 """
135
147
136 from rhodecode.lib.dbmigrate.migrate.versioning import api
148 from rhodecode.lib.dbmigrate.migrate.versioning import api
137 from rhodecode.lib.dbmigrate.migrate.exceptions import \
149 from rhodecode.lib.dbmigrate.migrate.exceptions import \
138 DatabaseNotControlledError
150 DatabaseNotControlledError
139
151
140 if 'sqlite' in self.dburi:
152 if 'sqlite' in self.dburi:
141 print(
153 print(
142 '********************** WARNING **********************\n'
154 '********************** WARNING **********************\n'
143 'Make sure your version of sqlite is at least 3.7.X. \n'
155 'Make sure your version of sqlite is at least 3.7.X. \n'
144 'Earlier versions are known to fail on some migrations\n'
156 'Earlier versions are known to fail on some migrations\n'
145 '*****************************************************\n')
157 '*****************************************************\n')
146
158
147 upgrade = self.ask_ok(
159 upgrade = self.ask_ok(
148 'You are about to perform a database upgrade. Make '
160 'You are about to perform a database upgrade. Make '
149 'sure you have backed up your database. '
161 'sure you have backed up your database. '
150 'Continue ? [y/n]')
162 'Continue ? [y/n]')
151 if not upgrade:
163 if not upgrade:
152 log.info('No upgrade performed')
164 log.info('No upgrade performed')
153 sys.exit(0)
165 sys.exit(0)
154
166
155 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
167 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
156 'rhodecode/lib/dbmigrate')
168 'rhodecode/lib/dbmigrate')
157 db_uri = self.dburi
169 db_uri = self.dburi
158
170
159 if version:
171 if version:
160 DbMigrateVersion.set_version(version)
172 DbMigrateVersion.set_version(version)
161
173
162 try:
174 try:
163 curr_version = api.db_version(db_uri, repository_path)
175 curr_version = api.db_version(db_uri, repository_path)
164 msg = ('Found current database db_uri under version '
176 msg = ('Found current database db_uri under version '
165 'control with version {}'.format(curr_version))
177 'control with version {}'.format(curr_version))
166
178
167 except (RuntimeError, DatabaseNotControlledError):
179 except (RuntimeError, DatabaseNotControlledError):
168 curr_version = 1
180 curr_version = 1
169 msg = ('Current database is not under version control. Setting '
181 msg = ('Current database is not under version control. Setting '
170 'as version %s' % curr_version)
182 'as version %s' % curr_version)
171 api.version_control(db_uri, repository_path, curr_version)
183 api.version_control(db_uri, repository_path, curr_version)
172
184
173 notify(msg)
185 notify(msg)
174
186
175
187
176 if curr_version == __dbversion__:
188 if curr_version == __dbversion__:
177 log.info('This database is already at the newest version')
189 log.info('This database is already at the newest version')
178 sys.exit(0)
190 sys.exit(0)
179
191
180 upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
192 upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
181 notify('attempting to upgrade database from '
193 notify('attempting to upgrade database from '
182 'version %s to version %s' % (curr_version, __dbversion__))
194 'version %s to version %s' % (curr_version, __dbversion__))
183
195
184 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
196 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
185 _step = None
197 _step = None
186 for step in upgrade_steps:
198 for step in upgrade_steps:
187 notify('performing upgrade step %s' % step)
199 notify('performing upgrade step %s' % step)
188 time.sleep(0.5)
200 time.sleep(0.5)
189
201
190 api.upgrade(db_uri, repository_path, step)
202 api.upgrade(db_uri, repository_path, step)
191 self.sa.rollback()
203 self.sa.rollback()
192 notify('schema upgrade for step %s completed' % (step,))
204 notify('schema upgrade for step %s completed' % (step,))
193
205
194 _step = step
206 _step = step
195
207
196 self.run_post_migration_tasks()
208 self.run_post_migration_tasks()
197 notify('upgrade to version %s successful' % _step)
209 notify('upgrade to version %s successful' % _step)
198
210
199 def fix_repo_paths(self):
211 def fix_repo_paths(self):
200 """
212 """
201 Fixes an old RhodeCode version path into new one without a '*'
213 Fixes an old RhodeCode version path into new one without a '*'
202 """
214 """
203
215
204 paths = self.sa.query(RhodeCodeUi)\
216 paths = self.sa.query(RhodeCodeUi)\
205 .filter(RhodeCodeUi.ui_key == '/')\
217 .filter(RhodeCodeUi.ui_key == '/')\
206 .scalar()
218 .scalar()
207
219
208 paths.ui_value = paths.ui_value.replace('*', '')
220 paths.ui_value = paths.ui_value.replace('*', '')
209
221
210 try:
222 try:
211 self.sa.add(paths)
223 self.sa.add(paths)
212 self.sa.commit()
224 self.sa.commit()
213 except Exception:
225 except Exception:
214 self.sa.rollback()
226 self.sa.rollback()
215 raise
227 raise
216
228
217 def fix_default_user(self):
229 def fix_default_user(self):
218 """
230 """
219 Fixes an old default user with some 'nicer' default values,
231 Fixes an old default user with some 'nicer' default values,
220 used mostly for anonymous access
232 used mostly for anonymous access
221 """
233 """
222 def_user = self.sa.query(User)\
234 def_user = self.sa.query(User)\
223 .filter(User.username == User.DEFAULT_USER)\
235 .filter(User.username == User.DEFAULT_USER)\
224 .one()
236 .one()
225
237
226 def_user.name = 'Anonymous'
238 def_user.name = 'Anonymous'
227 def_user.lastname = 'User'
239 def_user.lastname = 'User'
228 def_user.email = User.DEFAULT_USER_EMAIL
240 def_user.email = User.DEFAULT_USER_EMAIL
229
241
230 try:
242 try:
231 self.sa.add(def_user)
243 self.sa.add(def_user)
232 self.sa.commit()
244 self.sa.commit()
233 except Exception:
245 except Exception:
234 self.sa.rollback()
246 self.sa.rollback()
235 raise
247 raise
236
248
237 def fix_settings(self):
249 def fix_settings(self):
238 """
250 """
239 Fixes rhodecode settings and adds ga_code key for google analytics
251 Fixes rhodecode settings and adds ga_code key for google analytics
240 """
252 """
241
253
242 hgsettings3 = RhodeCodeSetting('ga_code', '')
254 hgsettings3 = RhodeCodeSetting('ga_code', '')
243
255
244 try:
256 try:
245 self.sa.add(hgsettings3)
257 self.sa.add(hgsettings3)
246 self.sa.commit()
258 self.sa.commit()
247 except Exception:
259 except Exception:
248 self.sa.rollback()
260 self.sa.rollback()
249 raise
261 raise
250
262
251 def create_admin_and_prompt(self):
263 def create_admin_and_prompt(self):
252
264
253 # defaults
265 # defaults
254 defaults = self.cli_args
266 defaults = self.cli_args
255 username = defaults.get('username')
267 username = defaults.get('username')
256 password = defaults.get('password')
268 password = defaults.get('password')
257 email = defaults.get('email')
269 email = defaults.get('email')
258
270
259 if username is None:
271 if username is None:
260 username = raw_input('Specify admin username:')
272 username = raw_input('Specify admin username:')
261 if password is None:
273 if password is None:
262 password = self._get_admin_password()
274 password = self._get_admin_password()
263 if not password:
275 if not password:
264 # second try
276 # second try
265 password = self._get_admin_password()
277 password = self._get_admin_password()
266 if not password:
278 if not password:
267 sys.exit()
279 sys.exit()
268 if email is None:
280 if email is None:
269 email = raw_input('Specify admin email:')
281 email = raw_input('Specify admin email:')
270 api_key = self.cli_args.get('api_key')
282 api_key = self.cli_args.get('api_key')
271 self.create_user(username, password, email, True,
283 self.create_user(username, password, email, True,
272 strict_creation_check=False,
284 strict_creation_check=False,
273 api_key=api_key)
285 api_key=api_key)
274
286
275 def _get_admin_password(self):
287 def _get_admin_password(self):
276 password = getpass.getpass('Specify admin password '
288 password = getpass.getpass('Specify admin password '
277 '(min 6 chars):')
289 '(min 6 chars):')
278 confirm = getpass.getpass('Confirm password:')
290 confirm = getpass.getpass('Confirm password:')
279
291
280 if password != confirm:
292 if password != confirm:
281 log.error('passwords mismatch')
293 log.error('passwords mismatch')
282 return False
294 return False
283 if len(password) < 6:
295 if len(password) < 6:
284 log.error('password is too short - use at least 6 characters')
296 log.error('password is too short - use at least 6 characters')
285 return False
297 return False
286
298
287 return password
299 return password
288
300
289 def create_test_admin_and_users(self):
301 def create_test_admin_and_users(self):
290 log.info('creating admin and regular test users')
302 log.info('creating admin and regular test users')
291 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
303 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
292 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
304 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
293 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
305 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
294 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
306 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
295 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
307 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
296
308
297 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
309 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
298 TEST_USER_ADMIN_EMAIL, True, api_key=True)
310 TEST_USER_ADMIN_EMAIL, True, api_key=True)
299
311
300 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
312 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
301 TEST_USER_REGULAR_EMAIL, False, api_key=True)
313 TEST_USER_REGULAR_EMAIL, False, api_key=True)
302
314
303 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
315 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
304 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
316 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
305
317
306 def create_ui_settings(self, repo_store_path):
318 def create_ui_settings(self, repo_store_path):
307 """
319 """
308 Creates ui settings, fills out hooks
320 Creates ui settings, fills out hooks
309 and disables dotencode
321 and disables dotencode
310 """
322 """
311 settings_model = SettingsModel(sa=self.sa)
323 settings_model = SettingsModel(sa=self.sa)
312 from rhodecode.lib.vcs.backends.hg import largefiles_store
324 from rhodecode.lib.vcs.backends.hg import largefiles_store
313 from rhodecode.lib.vcs.backends.git import lfs_store
325 from rhodecode.lib.vcs.backends.git import lfs_store
314
326
315 # Build HOOKS
327 # Build HOOKS
316 hooks = [
328 hooks = [
317 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
329 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
318
330
319 # HG
331 # HG
320 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
332 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
321 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
333 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
322 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
334 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
323 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
335 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
324 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
336 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
325 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
337 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
326
338
327 ]
339 ]
328
340
329 for key, value in hooks:
341 for key, value in hooks:
330 hook_obj = settings_model.get_ui_by_key(key)
342 hook_obj = settings_model.get_ui_by_key(key)
331 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
343 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
332 hooks2.ui_section = 'hooks'
344 hooks2.ui_section = 'hooks'
333 hooks2.ui_key = key
345 hooks2.ui_key = key
334 hooks2.ui_value = value
346 hooks2.ui_value = value
335 self.sa.add(hooks2)
347 self.sa.add(hooks2)
336
348
337 # enable largefiles
349 # enable largefiles
338 largefiles = RhodeCodeUi()
350 largefiles = RhodeCodeUi()
339 largefiles.ui_section = 'extensions'
351 largefiles.ui_section = 'extensions'
340 largefiles.ui_key = 'largefiles'
352 largefiles.ui_key = 'largefiles'
341 largefiles.ui_value = ''
353 largefiles.ui_value = ''
342 self.sa.add(largefiles)
354 self.sa.add(largefiles)
343
355
344 # set default largefiles cache dir, defaults to
356 # set default largefiles cache dir, defaults to
345 # /repo_store_location/.cache/largefiles
357 # /repo_store_location/.cache/largefiles
346 largefiles = RhodeCodeUi()
358 largefiles = RhodeCodeUi()
347 largefiles.ui_section = 'largefiles'
359 largefiles.ui_section = 'largefiles'
348 largefiles.ui_key = 'usercache'
360 largefiles.ui_key = 'usercache'
349 largefiles.ui_value = largefiles_store(repo_store_path)
361 largefiles.ui_value = largefiles_store(repo_store_path)
350
362
351 self.sa.add(largefiles)
363 self.sa.add(largefiles)
352
364
353 # set default lfs cache dir, defaults to
365 # set default lfs cache dir, defaults to
354 # /repo_store_location/.cache/lfs_store
366 # /repo_store_location/.cache/lfs_store
355 lfsstore = RhodeCodeUi()
367 lfsstore = RhodeCodeUi()
356 lfsstore.ui_section = 'vcs_git_lfs'
368 lfsstore.ui_section = 'vcs_git_lfs'
357 lfsstore.ui_key = 'store_location'
369 lfsstore.ui_key = 'store_location'
358 lfsstore.ui_value = lfs_store(repo_store_path)
370 lfsstore.ui_value = lfs_store(repo_store_path)
359
371
360 self.sa.add(lfsstore)
372 self.sa.add(lfsstore)
361
373
362 # enable hgsubversion disabled by default
374 # enable hgsubversion disabled by default
363 hgsubversion = RhodeCodeUi()
375 hgsubversion = RhodeCodeUi()
364 hgsubversion.ui_section = 'extensions'
376 hgsubversion.ui_section = 'extensions'
365 hgsubversion.ui_key = 'hgsubversion'
377 hgsubversion.ui_key = 'hgsubversion'
366 hgsubversion.ui_value = ''
378 hgsubversion.ui_value = ''
367 hgsubversion.ui_active = False
379 hgsubversion.ui_active = False
368 self.sa.add(hgsubversion)
380 self.sa.add(hgsubversion)
369
381
370 # enable hgevolve disabled by default
382 # enable hgevolve disabled by default
371 hgevolve = RhodeCodeUi()
383 hgevolve = RhodeCodeUi()
372 hgevolve.ui_section = 'extensions'
384 hgevolve.ui_section = 'extensions'
373 hgevolve.ui_key = 'evolve'
385 hgevolve.ui_key = 'evolve'
374 hgevolve.ui_value = ''
386 hgevolve.ui_value = ''
375 hgevolve.ui_active = False
387 hgevolve.ui_active = False
376 self.sa.add(hgevolve)
388 self.sa.add(hgevolve)
377
389
378 hgevolve = RhodeCodeUi()
390 hgevolve = RhodeCodeUi()
379 hgevolve.ui_section = 'experimental'
391 hgevolve.ui_section = 'experimental'
380 hgevolve.ui_key = 'evolution'
392 hgevolve.ui_key = 'evolution'
381 hgevolve.ui_value = ''
393 hgevolve.ui_value = ''
382 hgevolve.ui_active = False
394 hgevolve.ui_active = False
383 self.sa.add(hgevolve)
395 self.sa.add(hgevolve)
384
396
385 hgevolve = RhodeCodeUi()
397 hgevolve = RhodeCodeUi()
386 hgevolve.ui_section = 'experimental'
398 hgevolve.ui_section = 'experimental'
387 hgevolve.ui_key = 'evolution.exchange'
399 hgevolve.ui_key = 'evolution.exchange'
388 hgevolve.ui_value = ''
400 hgevolve.ui_value = ''
389 hgevolve.ui_active = False
401 hgevolve.ui_active = False
390 self.sa.add(hgevolve)
402 self.sa.add(hgevolve)
391
403
392 hgevolve = RhodeCodeUi()
404 hgevolve = RhodeCodeUi()
393 hgevolve.ui_section = 'extensions'
405 hgevolve.ui_section = 'extensions'
394 hgevolve.ui_key = 'topic'
406 hgevolve.ui_key = 'topic'
395 hgevolve.ui_value = ''
407 hgevolve.ui_value = ''
396 hgevolve.ui_active = False
408 hgevolve.ui_active = False
397 self.sa.add(hgevolve)
409 self.sa.add(hgevolve)
398
410
399 # enable hggit disabled by default
411 # enable hggit disabled by default
400 hggit = RhodeCodeUi()
412 hggit = RhodeCodeUi()
401 hggit.ui_section = 'extensions'
413 hggit.ui_section = 'extensions'
402 hggit.ui_key = 'hggit'
414 hggit.ui_key = 'hggit'
403 hggit.ui_value = ''
415 hggit.ui_value = ''
404 hggit.ui_active = False
416 hggit.ui_active = False
405 self.sa.add(hggit)
417 self.sa.add(hggit)
406
418
407 # set svn branch defaults
419 # set svn branch defaults
408 branches = ["/branches/*", "/trunk"]
420 branches = ["/branches/*", "/trunk"]
409 tags = ["/tags/*"]
421 tags = ["/tags/*"]
410
422
411 for branch in branches:
423 for branch in branches:
412 settings_model.create_ui_section_value(
424 settings_model.create_ui_section_value(
413 RhodeCodeUi.SVN_BRANCH_ID, branch)
425 RhodeCodeUi.SVN_BRANCH_ID, branch)
414
426
415 for tag in tags:
427 for tag in tags:
416 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
428 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
417
429
418 def create_auth_plugin_options(self, skip_existing=False):
430 def create_auth_plugin_options(self, skip_existing=False):
419 """
431 """
420 Create default auth plugin settings, and make it active
432 Create default auth plugin settings, and make it active
421
433
422 :param skip_existing:
434 :param skip_existing:
423 """
435 """
424 defaults = [
436 defaults = [
425 ('auth_plugins',
437 ('auth_plugins',
426 'egg:rhodecode-enterprise-ce#token,egg:rhodecode-enterprise-ce#rhodecode',
438 'egg:rhodecode-enterprise-ce#token,egg:rhodecode-enterprise-ce#rhodecode',
427 'list'),
439 'list'),
428
440
429 ('auth_authtoken_enabled',
441 ('auth_authtoken_enabled',
430 'True',
442 'True',
431 'bool'),
443 'bool'),
432
444
433 ('auth_rhodecode_enabled',
445 ('auth_rhodecode_enabled',
434 'True',
446 'True',
435 'bool'),
447 'bool'),
436 ]
448 ]
437 for k, v, t in defaults:
449 for k, v, t in defaults:
438 if (skip_existing and
450 if (skip_existing and
439 SettingsModel().get_setting_by_name(k) is not None):
451 SettingsModel().get_setting_by_name(k) is not None):
440 log.debug('Skipping option %s', k)
452 log.debug('Skipping option %s', k)
441 continue
453 continue
442 setting = RhodeCodeSetting(k, v, t)
454 setting = RhodeCodeSetting(k, v, t)
443 self.sa.add(setting)
455 self.sa.add(setting)
444
456
445 def create_default_options(self, skip_existing=False):
457 def create_default_options(self, skip_existing=False):
446 """Creates default settings"""
458 """Creates default settings"""
447
459
448 for k, v, t in [
460 for k, v, t in [
449 ('default_repo_enable_locking', False, 'bool'),
461 ('default_repo_enable_locking', False, 'bool'),
450 ('default_repo_enable_downloads', False, 'bool'),
462 ('default_repo_enable_downloads', False, 'bool'),
451 ('default_repo_enable_statistics', False, 'bool'),
463 ('default_repo_enable_statistics', False, 'bool'),
452 ('default_repo_private', False, 'bool'),
464 ('default_repo_private', False, 'bool'),
453 ('default_repo_type', 'hg', 'unicode')]:
465 ('default_repo_type', 'hg', 'unicode')]:
454
466
455 if (skip_existing and
467 if (skip_existing and
456 SettingsModel().get_setting_by_name(k) is not None):
468 SettingsModel().get_setting_by_name(k) is not None):
457 log.debug('Skipping option %s', k)
469 log.debug('Skipping option %s', k)
458 continue
470 continue
459 setting = RhodeCodeSetting(k, v, t)
471 setting = RhodeCodeSetting(k, v, t)
460 self.sa.add(setting)
472 self.sa.add(setting)
461
473
462 def fixup_groups(self):
474 def fixup_groups(self):
463 def_usr = User.get_default_user()
475 def_usr = User.get_default_user()
464 for g in RepoGroup.query().all():
476 for g in RepoGroup.query().all():
465 g.group_name = g.get_new_name(g.name)
477 g.group_name = g.get_new_name(g.name)
466 self.sa.add(g)
478 self.sa.add(g)
467 # get default perm
479 # get default perm
468 default = UserRepoGroupToPerm.query()\
480 default = UserRepoGroupToPerm.query()\
469 .filter(UserRepoGroupToPerm.group == g)\
481 .filter(UserRepoGroupToPerm.group == g)\
470 .filter(UserRepoGroupToPerm.user == def_usr)\
482 .filter(UserRepoGroupToPerm.user == def_usr)\
471 .scalar()
483 .scalar()
472
484
473 if default is None:
485 if default is None:
474 log.debug('missing default permission for group %s adding', g)
486 log.debug('missing default permission for group %s adding', g)
475 perm_obj = RepoGroupModel()._create_default_perms(g)
487 perm_obj = RepoGroupModel()._create_default_perms(g)
476 self.sa.add(perm_obj)
488 self.sa.add(perm_obj)
477
489
478 def reset_permissions(self, username):
490 def reset_permissions(self, username):
479 """
491 """
480 Resets permissions to default state, useful when old systems had
492 Resets permissions to default state, useful when old systems had
481 bad permissions, we must clean them up
493 bad permissions, we must clean them up
482
494
483 :param username:
495 :param username:
484 """
496 """
485 default_user = User.get_by_username(username)
497 default_user = User.get_by_username(username)
486 if not default_user:
498 if not default_user:
487 return
499 return
488
500
489 u2p = UserToPerm.query()\
501 u2p = UserToPerm.query()\
490 .filter(UserToPerm.user == default_user).all()
502 .filter(UserToPerm.user == default_user).all()
491 fixed = False
503 fixed = False
492 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
504 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
493 for p in u2p:
505 for p in u2p:
494 Session().delete(p)
506 Session().delete(p)
495 fixed = True
507 fixed = True
496 self.populate_default_permissions()
508 self.populate_default_permissions()
497 return fixed
509 return fixed
498
510
499 def config_prompt(self, test_repo_path='', retries=3):
511 def config_prompt(self, test_repo_path='', retries=3):
500 defaults = self.cli_args
512 defaults = self.cli_args
501 _path = defaults.get('repos_location')
513 _path = defaults.get('repos_location')
502 if retries == 3:
514 if retries == 3:
503 log.info('Setting up repositories config')
515 log.info('Setting up repositories config')
504
516
505 if _path is not None:
517 if _path is not None:
506 path = _path
518 path = _path
507 elif not self.tests and not test_repo_path:
519 elif not self.tests and not test_repo_path:
508 path = raw_input(
520 path = raw_input(
509 'Enter a valid absolute path to store repositories. '
521 'Enter a valid absolute path to store repositories. '
510 'All repositories in that path will be added automatically:'
522 'All repositories in that path will be added automatically:'
511 )
523 )
512 else:
524 else:
513 path = test_repo_path
525 path = test_repo_path
514 path_ok = True
526 path_ok = True
515
527
516 # check proper dir
528 # check proper dir
517 if not os.path.isdir(path):
529 if not os.path.isdir(path):
518 path_ok = False
530 path_ok = False
519 log.error('Given path %s is not a valid directory', path)
531 log.error('Given path %s is not a valid directory', path)
520
532
521 elif not os.path.isabs(path):
533 elif not os.path.isabs(path):
522 path_ok = False
534 path_ok = False
523 log.error('Given path %s is not an absolute path', path)
535 log.error('Given path %s is not an absolute path', path)
524
536
525 # check if path is at least readable.
537 # check if path is at least readable.
526 if not os.access(path, os.R_OK):
538 if not os.access(path, os.R_OK):
527 path_ok = False
539 path_ok = False
528 log.error('Given path %s is not readable', path)
540 log.error('Given path %s is not readable', path)
529
541
530 # check write access, warn user about non writeable paths
542 # check write access, warn user about non writeable paths
531 elif not os.access(path, os.W_OK) and path_ok:
543 elif not os.access(path, os.W_OK) and path_ok:
532 log.warning('No write permission to given path %s', path)
544 log.warning('No write permission to given path %s', path)
533
545
534 q = ('Given path %s is not writeable, do you want to '
546 q = ('Given path %s is not writeable, do you want to '
535 'continue with read only mode ? [y/n]' % (path,))
547 'continue with read only mode ? [y/n]' % (path,))
536 if not self.ask_ok(q):
548 if not self.ask_ok(q):
537 log.error('Canceled by user')
549 log.error('Canceled by user')
538 sys.exit(-1)
550 sys.exit(-1)
539
551
540 if retries == 0:
552 if retries == 0:
541 sys.exit('max retries reached')
553 sys.exit('max retries reached')
542 if not path_ok:
554 if not path_ok:
543 retries -= 1
555 retries -= 1
544 return self.config_prompt(test_repo_path, retries)
556 return self.config_prompt(test_repo_path, retries)
545
557
546 real_path = os.path.normpath(os.path.realpath(path))
558 real_path = os.path.normpath(os.path.realpath(path))
547
559
548 if real_path != os.path.normpath(path):
560 if real_path != os.path.normpath(path):
549 q = ('Path looks like a symlink, RhodeCode Enterprise will store '
561 q = ('Path looks like a symlink, RhodeCode Enterprise will store '
550 'given path as %s ? [y/n]') % (real_path,)
562 'given path as %s ? [y/n]') % (real_path,)
551 if not self.ask_ok(q):
563 if not self.ask_ok(q):
552 log.error('Canceled by user')
564 log.error('Canceled by user')
553 sys.exit(-1)
565 sys.exit(-1)
554
566
555 return real_path
567 return real_path
556
568
557 def create_settings(self, path):
569 def create_settings(self, path):
558
570
559 self.create_ui_settings(path)
571 self.create_ui_settings(path)
560
572
561 ui_config = [
573 ui_config = [
562 ('web', 'push_ssl', 'False'),
574 ('web', 'push_ssl', 'False'),
563 ('web', 'allow_archive', 'gz zip bz2'),
575 ('web', 'allow_archive', 'gz zip bz2'),
564 ('web', 'allow_push', '*'),
576 ('web', 'allow_push', '*'),
565 ('web', 'baseurl', '/'),
577 ('web', 'baseurl', '/'),
566 ('paths', '/', path),
578 ('paths', '/', path),
567 ('phases', 'publish', 'True')
579 ('phases', 'publish', 'True')
568 ]
580 ]
569 for section, key, value in ui_config:
581 for section, key, value in ui_config:
570 ui_conf = RhodeCodeUi()
582 ui_conf = RhodeCodeUi()
571 setattr(ui_conf, 'ui_section', section)
583 setattr(ui_conf, 'ui_section', section)
572 setattr(ui_conf, 'ui_key', key)
584 setattr(ui_conf, 'ui_key', key)
573 setattr(ui_conf, 'ui_value', value)
585 setattr(ui_conf, 'ui_value', value)
574 self.sa.add(ui_conf)
586 self.sa.add(ui_conf)
575
587
576 # rhodecode app settings
588 # rhodecode app settings
577 settings = [
589 settings = [
578 ('realm', 'RhodeCode', 'unicode'),
590 ('realm', 'RhodeCode', 'unicode'),
579 ('title', '', 'unicode'),
591 ('title', '', 'unicode'),
580 ('pre_code', '', 'unicode'),
592 ('pre_code', '', 'unicode'),
581 ('post_code', '', 'unicode'),
593 ('post_code', '', 'unicode'),
582
594
583 # Visual
595 # Visual
584 ('show_public_icon', True, 'bool'),
596 ('show_public_icon', True, 'bool'),
585 ('show_private_icon', True, 'bool'),
597 ('show_private_icon', True, 'bool'),
586 ('stylify_metatags', False, 'bool'),
598 ('stylify_metatags', False, 'bool'),
587 ('dashboard_items', 100, 'int'),
599 ('dashboard_items', 100, 'int'),
588 ('admin_grid_items', 25, 'int'),
600 ('admin_grid_items', 25, 'int'),
589
601
590 ('markup_renderer', 'markdown', 'unicode'),
602 ('markup_renderer', 'markdown', 'unicode'),
591
603
592 ('show_version', True, 'bool'),
604 ('show_version', True, 'bool'),
593 ('show_revision_number', True, 'bool'),
605 ('show_revision_number', True, 'bool'),
594 ('show_sha_length', 12, 'int'),
606 ('show_sha_length', 12, 'int'),
595
607
596 ('use_gravatar', False, 'bool'),
608 ('use_gravatar', False, 'bool'),
597 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
609 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
598
610
599 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
611 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
600 ('clone_uri_ssh_tmpl', Repository.DEFAULT_CLONE_URI_SSH, 'unicode'),
612 ('clone_uri_ssh_tmpl', Repository.DEFAULT_CLONE_URI_SSH, 'unicode'),
601 ('support_url', '', 'unicode'),
613 ('support_url', '', 'unicode'),
602 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
614 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
603
615
604 # VCS Settings
616 # VCS Settings
605 ('pr_merge_enabled', True, 'bool'),
617 ('pr_merge_enabled', True, 'bool'),
606 ('use_outdated_comments', True, 'bool'),
618 ('use_outdated_comments', True, 'bool'),
607 ('diff_cache', True, 'bool'),
619 ('diff_cache', True, 'bool'),
608 ]
620 ]
609
621
610 for key, val, type_ in settings:
622 for key, val, type_ in settings:
611 sett = RhodeCodeSetting(key, val, type_)
623 sett = RhodeCodeSetting(key, val, type_)
612 self.sa.add(sett)
624 self.sa.add(sett)
613
625
614 self.create_auth_plugin_options()
626 self.create_auth_plugin_options()
615 self.create_default_options()
627 self.create_default_options()
616
628
617 log.info('created ui config')
629 log.info('created ui config')
618
630
619 def create_user(self, username, password, email='', admin=False,
631 def create_user(self, username, password, email='', admin=False,
620 strict_creation_check=True, api_key=None):
632 strict_creation_check=True, api_key=None):
621 log.info('creating user `%s`', username)
633 log.info('creating user `%s`', username)
622 user = UserModel().create_or_update(
634 user = UserModel().create_or_update(
623 username, password, email, firstname=u'RhodeCode', lastname=u'Admin',
635 username, password, email, firstname=u'RhodeCode', lastname=u'Admin',
624 active=True, admin=admin, extern_type="rhodecode",
636 active=True, admin=admin, extern_type="rhodecode",
625 strict_creation_check=strict_creation_check)
637 strict_creation_check=strict_creation_check)
626
638
627 if api_key:
639 if api_key:
628 log.info('setting a new default auth token for user `%s`', username)
640 log.info('setting a new default auth token for user `%s`', username)
629 UserModel().add_auth_token(
641 UserModel().add_auth_token(
630 user=user, lifetime_minutes=-1,
642 user=user, lifetime_minutes=-1,
631 role=UserModel.auth_token_role.ROLE_ALL,
643 role=UserModel.auth_token_role.ROLE_ALL,
632 description=u'BUILTIN TOKEN')
644 description=u'BUILTIN TOKEN')
633
645
634 def create_default_user(self):
646 def create_default_user(self):
635 log.info('creating default user')
647 log.info('creating default user')
636 # create default user for handling default permissions.
648 # create default user for handling default permissions.
637 user = UserModel().create_or_update(username=User.DEFAULT_USER,
649 user = UserModel().create_or_update(username=User.DEFAULT_USER,
638 password=str(uuid.uuid1())[:20],
650 password=str(uuid.uuid1())[:20],
639 email=User.DEFAULT_USER_EMAIL,
651 email=User.DEFAULT_USER_EMAIL,
640 firstname=u'Anonymous',
652 firstname=u'Anonymous',
641 lastname=u'User',
653 lastname=u'User',
642 strict_creation_check=False)
654 strict_creation_check=False)
643 # based on configuration options activate/de-activate this user which
655 # based on configuration options activate/de-activate this user which
644 # controlls anonymous access
656 # controlls anonymous access
645 if self.cli_args.get('public_access') is False:
657 if self.cli_args.get('public_access') is False:
646 log.info('Public access disabled')
658 log.info('Public access disabled')
647 user.active = False
659 user.active = False
648 Session().add(user)
660 Session().add(user)
649 Session().commit()
661 Session().commit()
650
662
651 def create_permissions(self):
663 def create_permissions(self):
652 """
664 """
653 Creates all permissions defined in the system
665 Creates all permissions defined in the system
654 """
666 """
655 # module.(access|create|change|delete)_[name]
667 # module.(access|create|change|delete)_[name]
656 # module.(none|read|write|admin)
668 # module.(none|read|write|admin)
657 log.info('creating permissions')
669 log.info('creating permissions')
658 PermissionModel(self.sa).create_permissions()
670 PermissionModel(self.sa).create_permissions()
659
671
660 def populate_default_permissions(self):
672 def populate_default_permissions(self):
661 """
673 """
662 Populate default permissions. It will create only the default
674 Populate default permissions. It will create only the default
663 permissions that are missing, and not alter already defined ones
675 permissions that are missing, and not alter already defined ones
664 """
676 """
665 log.info('creating default user permissions')
677 log.info('creating default user permissions')
666 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
678 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
@@ -1,333 +1,333 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import traceback
25 import traceback
26 import threading
26 import threading
27
27
28 from BaseHTTPServer import BaseHTTPRequestHandler
28 from BaseHTTPServer import BaseHTTPRequestHandler
29 from SocketServer import TCPServer
29 from SocketServer import TCPServer
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
32 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
33 from rhodecode.model import meta
33 from rhodecode.model import meta
34 from rhodecode.lib.base import bootstrap_request, bootstrap_config
34 from rhodecode.lib.base import bootstrap_request, bootstrap_config
35 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
36 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.utils2 import AttributeDict
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib import rc_cache
38 from rhodecode.lib import rc_cache
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HooksHttpHandler(BaseHTTPRequestHandler):
43 class HooksHttpHandler(BaseHTTPRequestHandler):
44
44
45 def do_POST(self):
45 def do_POST(self):
46 method, extras = self._read_request()
46 method, extras = self._read_request()
47 txn_id = getattr(self.server, 'txn_id', None)
47 txn_id = getattr(self.server, 'txn_id', None)
48 if txn_id:
48 if txn_id:
49 log.debug('Computing TXN_ID based on `%s`:`%s`',
49 log.debug('Computing TXN_ID based on `%s`:`%s`',
50 extras['repository'], extras['txn_id'])
50 extras['repository'], extras['txn_id'])
51 computed_txn_id = rc_cache.utils.compute_key_from_params(
51 computed_txn_id = rc_cache.utils.compute_key_from_params(
52 extras['repository'], extras['txn_id'])
52 extras['repository'], extras['txn_id'])
53 if txn_id != computed_txn_id:
53 if txn_id != computed_txn_id:
54 raise Exception(
54 raise Exception(
55 'TXN ID fail: expected {} got {} instead'.format(
55 'TXN ID fail: expected {} got {} instead'.format(
56 txn_id, computed_txn_id))
56 txn_id, computed_txn_id))
57
57
58 try:
58 try:
59 result = self._call_hook(method, extras)
59 result = self._call_hook(method, extras)
60 except Exception as e:
60 except Exception as e:
61 exc_tb = traceback.format_exc()
61 exc_tb = traceback.format_exc()
62 result = {
62 result = {
63 'exception': e.__class__.__name__,
63 'exception': e.__class__.__name__,
64 'exception_traceback': exc_tb,
64 'exception_traceback': exc_tb,
65 'exception_args': e.args
65 'exception_args': e.args
66 }
66 }
67 self._write_response(result)
67 self._write_response(result)
68
68
69 def _read_request(self):
69 def _read_request(self):
70 length = int(self.headers['Content-Length'])
70 length = int(self.headers['Content-Length'])
71 body = self.rfile.read(length).decode('utf-8')
71 body = self.rfile.read(length).decode('utf-8')
72 data = json.loads(body)
72 data = json.loads(body)
73 return data['method'], data['extras']
73 return data['method'], data['extras']
74
74
75 def _write_response(self, result):
75 def _write_response(self, result):
76 self.send_response(200)
76 self.send_response(200)
77 self.send_header("Content-type", "text/json")
77 self.send_header("Content-type", "text/json")
78 self.end_headers()
78 self.end_headers()
79 self.wfile.write(json.dumps(result))
79 self.wfile.write(json.dumps(result))
80
80
81 def _call_hook(self, method, extras):
81 def _call_hook(self, method, extras):
82 hooks = Hooks()
82 hooks = Hooks()
83 try:
83 try:
84 result = getattr(hooks, method)(extras)
84 result = getattr(hooks, method)(extras)
85 finally:
85 finally:
86 meta.Session.remove()
86 meta.Session.remove()
87 return result
87 return result
88
88
89 def log_message(self, format, *args):
89 def log_message(self, format, *args):
90 """
90 """
91 This is an overridden method of BaseHTTPRequestHandler which logs using
91 This is an overridden method of BaseHTTPRequestHandler which logs using
92 logging library instead of writing directly to stderr.
92 logging library instead of writing directly to stderr.
93 """
93 """
94
94
95 message = format % args
95 message = format % args
96
96
97 log.debug(
97 log.debug(
98 "%s - - [%s] %s", self.client_address[0],
98 "%s - - [%s] %s", self.client_address[0],
99 self.log_date_time_string(), message)
99 self.log_date_time_string(), message)
100
100
101
101
102 class DummyHooksCallbackDaemon(object):
102 class DummyHooksCallbackDaemon(object):
103 hooks_uri = ''
103 hooks_uri = ''
104
104
105 def __init__(self):
105 def __init__(self):
106 self.hooks_module = Hooks.__module__
106 self.hooks_module = Hooks.__module__
107
107
108 def __enter__(self):
108 def __enter__(self):
109 log.debug('Running `%s` callback daemon', self.__class__.__name__)
109 log.debug('Running `%s` callback daemon', self.__class__.__name__)
110 return self
110 return self
111
111
112 def __exit__(self, exc_type, exc_val, exc_tb):
112 def __exit__(self, exc_type, exc_val, exc_tb):
113 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
113 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
114
114
115
115
116 class ThreadedHookCallbackDaemon(object):
116 class ThreadedHookCallbackDaemon(object):
117
117
118 _callback_thread = None
118 _callback_thread = None
119 _daemon = None
119 _daemon = None
120 _done = False
120 _done = False
121
121
122 def __init__(self, txn_id=None, host=None, port=None):
122 def __init__(self, txn_id=None, host=None, port=None):
123 self._prepare(txn_id=txn_id, host=None, port=port)
123 self._prepare(txn_id=txn_id, host=host, port=port)
124
124
125 def __enter__(self):
125 def __enter__(self):
126 log.debug('Running `%s` callback daemon', self.__class__.__name__)
126 log.debug('Running `%s` callback daemon', self.__class__.__name__)
127 self._run()
127 self._run()
128 return self
128 return self
129
129
130 def __exit__(self, exc_type, exc_val, exc_tb):
130 def __exit__(self, exc_type, exc_val, exc_tb):
131 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
131 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
132 self._stop()
132 self._stop()
133
133
134 def _prepare(self, txn_id=None, host=None, port=None):
134 def _prepare(self, txn_id=None, host=None, port=None):
135 raise NotImplementedError()
135 raise NotImplementedError()
136
136
137 def _run(self):
137 def _run(self):
138 raise NotImplementedError()
138 raise NotImplementedError()
139
139
140 def _stop(self):
140 def _stop(self):
141 raise NotImplementedError()
141 raise NotImplementedError()
142
142
143
143
144 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
144 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
145 """
145 """
146 Context manager which will run a callback daemon in a background thread.
146 Context manager which will run a callback daemon in a background thread.
147 """
147 """
148
148
149 hooks_uri = None
149 hooks_uri = None
150
150
151 # From Python docs: Polling reduces our responsiveness to a shutdown
151 # From Python docs: Polling reduces our responsiveness to a shutdown
152 # request and wastes cpu at all other times.
152 # request and wastes cpu at all other times.
153 POLL_INTERVAL = 0.01
153 POLL_INTERVAL = 0.01
154
154
155 def _prepare(self, txn_id=None, host=None, port=None):
155 def _prepare(self, txn_id=None, host=None, port=None):
156 host = host or '127.0.0.1'
156 host = host or '127.0.0.1'
157 self._done = False
157 self._done = False
158 self._daemon = TCPServer((host, port or 0), HooksHttpHandler)
158 self._daemon = TCPServer((host, port or 0), HooksHttpHandler)
159 _, port = self._daemon.server_address
159 _, port = self._daemon.server_address
160 self.hooks_uri = '{}:{}'.format(host, port)
160 self.hooks_uri = '{}:{}'.format(host, port)
161 self.txn_id = txn_id
161 self.txn_id = txn_id
162 # inject transaction_id for later verification
162 # inject transaction_id for later verification
163 self._daemon.txn_id = self.txn_id
163 self._daemon.txn_id = self.txn_id
164
164
165 log.debug(
165 log.debug(
166 "Preparing HTTP callback daemon at `%s` and registering hook object",
166 "Preparing HTTP callback daemon at `%s` and registering hook object",
167 self.hooks_uri)
167 self.hooks_uri)
168
168
169 def _run(self):
169 def _run(self):
170 log.debug("Running event loop of callback daemon in background thread")
170 log.debug("Running event loop of callback daemon in background thread")
171 callback_thread = threading.Thread(
171 callback_thread = threading.Thread(
172 target=self._daemon.serve_forever,
172 target=self._daemon.serve_forever,
173 kwargs={'poll_interval': self.POLL_INTERVAL})
173 kwargs={'poll_interval': self.POLL_INTERVAL})
174 callback_thread.daemon = True
174 callback_thread.daemon = True
175 callback_thread.start()
175 callback_thread.start()
176 self._callback_thread = callback_thread
176 self._callback_thread = callback_thread
177
177
178 def _stop(self):
178 def _stop(self):
179 log.debug("Waiting for background thread to finish.")
179 log.debug("Waiting for background thread to finish.")
180 self._daemon.shutdown()
180 self._daemon.shutdown()
181 self._callback_thread.join()
181 self._callback_thread.join()
182 self._daemon = None
182 self._daemon = None
183 self._callback_thread = None
183 self._callback_thread = None
184 if self.txn_id:
184 if self.txn_id:
185 txn_id_file = get_txn_id_data_path(self.txn_id)
185 txn_id_file = get_txn_id_data_path(self.txn_id)
186 log.debug('Cleaning up TXN ID %s', txn_id_file)
186 log.debug('Cleaning up TXN ID %s', txn_id_file)
187 if os.path.isfile(txn_id_file):
187 if os.path.isfile(txn_id_file):
188 os.remove(txn_id_file)
188 os.remove(txn_id_file)
189
189
190 log.debug("Background thread done.")
190 log.debug("Background thread done.")
191
191
192
192
193 def get_txn_id_data_path(txn_id):
193 def get_txn_id_data_path(txn_id):
194 import rhodecode
194 import rhodecode
195
195
196 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
196 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
197 final_dir = os.path.join(root, 'svn_txn_id')
197 final_dir = os.path.join(root, 'svn_txn_id')
198
198
199 if not os.path.isdir(final_dir):
199 if not os.path.isdir(final_dir):
200 os.makedirs(final_dir)
200 os.makedirs(final_dir)
201 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
201 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
202
202
203
203
204 def store_txn_id_data(txn_id, data_dict):
204 def store_txn_id_data(txn_id, data_dict):
205 if not txn_id:
205 if not txn_id:
206 log.warning('Cannot store txn_id because it is empty')
206 log.warning('Cannot store txn_id because it is empty')
207 return
207 return
208
208
209 path = get_txn_id_data_path(txn_id)
209 path = get_txn_id_data_path(txn_id)
210 try:
210 try:
211 with open(path, 'wb') as f:
211 with open(path, 'wb') as f:
212 f.write(json.dumps(data_dict))
212 f.write(json.dumps(data_dict))
213 except Exception:
213 except Exception:
214 log.exception('Failed to write txn_id metadata')
214 log.exception('Failed to write txn_id metadata')
215
215
216
216
217 def get_txn_id_from_store(txn_id):
217 def get_txn_id_from_store(txn_id):
218 """
218 """
219 Reads txn_id from store and if present returns the data for callback manager
219 Reads txn_id from store and if present returns the data for callback manager
220 """
220 """
221 path = get_txn_id_data_path(txn_id)
221 path = get_txn_id_data_path(txn_id)
222 try:
222 try:
223 with open(path, 'rb') as f:
223 with open(path, 'rb') as f:
224 return json.loads(f.read())
224 return json.loads(f.read())
225 except Exception:
225 except Exception:
226 return {}
226 return {}
227
227
228
228
229 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
229 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
230 txn_details = get_txn_id_from_store(txn_id)
230 txn_details = get_txn_id_from_store(txn_id)
231 port = txn_details.get('port', 0)
231 port = txn_details.get('port', 0)
232 if use_direct_calls:
232 if use_direct_calls:
233 callback_daemon = DummyHooksCallbackDaemon()
233 callback_daemon = DummyHooksCallbackDaemon()
234 extras['hooks_module'] = callback_daemon.hooks_module
234 extras['hooks_module'] = callback_daemon.hooks_module
235 else:
235 else:
236 if protocol == 'http':
236 if protocol == 'http':
237 callback_daemon = HttpHooksCallbackDaemon(
237 callback_daemon = HttpHooksCallbackDaemon(
238 txn_id=txn_id, host=host, port=port)
238 txn_id=txn_id, host=host, port=port)
239 else:
239 else:
240 log.error('Unsupported callback daemon protocol "%s"', protocol)
240 log.error('Unsupported callback daemon protocol "%s"', protocol)
241 raise Exception('Unsupported callback daemon protocol.')
241 raise Exception('Unsupported callback daemon protocol.')
242
242
243 extras['hooks_uri'] = callback_daemon.hooks_uri
243 extras['hooks_uri'] = callback_daemon.hooks_uri
244 extras['hooks_protocol'] = protocol
244 extras['hooks_protocol'] = protocol
245 extras['time'] = time.time()
245 extras['time'] = time.time()
246
246
247 # register txn_id
247 # register txn_id
248 extras['txn_id'] = txn_id
248 extras['txn_id'] = txn_id
249 log.debug('Prepared a callback daemon: %s at url `%s`',
249 log.debug('Prepared a callback daemon: %s at url `%s`',
250 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
250 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
251 return callback_daemon, extras
251 return callback_daemon, extras
252
252
253
253
254 class Hooks(object):
254 class Hooks(object):
255 """
255 """
256 Exposes the hooks for remote call backs
256 Exposes the hooks for remote call backs
257 """
257 """
258
258
259 def repo_size(self, extras):
259 def repo_size(self, extras):
260 log.debug("Called repo_size of %s object", self)
260 log.debug("Called repo_size of %s object", self)
261 return self._call_hook(hooks_base.repo_size, extras)
261 return self._call_hook(hooks_base.repo_size, extras)
262
262
263 def pre_pull(self, extras):
263 def pre_pull(self, extras):
264 log.debug("Called pre_pull of %s object", self)
264 log.debug("Called pre_pull of %s object", self)
265 return self._call_hook(hooks_base.pre_pull, extras)
265 return self._call_hook(hooks_base.pre_pull, extras)
266
266
267 def post_pull(self, extras):
267 def post_pull(self, extras):
268 log.debug("Called post_pull of %s object", self)
268 log.debug("Called post_pull of %s object", self)
269 return self._call_hook(hooks_base.post_pull, extras)
269 return self._call_hook(hooks_base.post_pull, extras)
270
270
271 def pre_push(self, extras):
271 def pre_push(self, extras):
272 log.debug("Called pre_push of %s object", self)
272 log.debug("Called pre_push of %s object", self)
273 return self._call_hook(hooks_base.pre_push, extras)
273 return self._call_hook(hooks_base.pre_push, extras)
274
274
275 def post_push(self, extras):
275 def post_push(self, extras):
276 log.debug("Called post_push of %s object", self)
276 log.debug("Called post_push of %s object", self)
277 return self._call_hook(hooks_base.post_push, extras)
277 return self._call_hook(hooks_base.post_push, extras)
278
278
279 def _call_hook(self, hook, extras):
279 def _call_hook(self, hook, extras):
280 extras = AttributeDict(extras)
280 extras = AttributeDict(extras)
281 server_url = extras['server_url']
281 server_url = extras['server_url']
282 request = bootstrap_request(application_url=server_url)
282 request = bootstrap_request(application_url=server_url)
283
283
284 bootstrap_config(request) # inject routes and other interfaces
284 bootstrap_config(request) # inject routes and other interfaces
285
285
286 # inject the user for usage in hooks
286 # inject the user for usage in hooks
287 request.user = AttributeDict({'username': extras.username,
287 request.user = AttributeDict({'username': extras.username,
288 'ip_addr': extras.ip,
288 'ip_addr': extras.ip,
289 'user_id': extras.user_id})
289 'user_id': extras.user_id})
290
290
291 extras.request = request
291 extras.request = request
292
292
293 try:
293 try:
294 result = hook(extras)
294 result = hook(extras)
295 if result is None:
295 if result is None:
296 raise Exception(
296 raise Exception(
297 'Failed to obtain hook result from func: {}'.format(hook))
297 'Failed to obtain hook result from func: {}'.format(hook))
298 except HTTPBranchProtected as handled_error:
298 except HTTPBranchProtected as handled_error:
299 # Those special cases doesn't need error reporting. It's a case of
299 # Those special cases doesn't need error reporting. It's a case of
300 # locked repo or protected branch
300 # locked repo or protected branch
301 result = AttributeDict({
301 result = AttributeDict({
302 'status': handled_error.code,
302 'status': handled_error.code,
303 'output': handled_error.explanation
303 'output': handled_error.explanation
304 })
304 })
305 except (HTTPLockedRC, Exception) as error:
305 except (HTTPLockedRC, Exception) as error:
306 # locked needs different handling since we need to also
306 # locked needs different handling since we need to also
307 # handle PULL operations
307 # handle PULL operations
308 exc_tb = ''
308 exc_tb = ''
309 if not isinstance(error, HTTPLockedRC):
309 if not isinstance(error, HTTPLockedRC):
310 exc_tb = traceback.format_exc()
310 exc_tb = traceback.format_exc()
311 log.exception('Exception when handling hook %s', hook)
311 log.exception('Exception when handling hook %s', hook)
312 error_args = error.args
312 error_args = error.args
313 return {
313 return {
314 'status': 128,
314 'status': 128,
315 'output': '',
315 'output': '',
316 'exception': type(error).__name__,
316 'exception': type(error).__name__,
317 'exception_traceback': exc_tb,
317 'exception_traceback': exc_tb,
318 'exception_args': error_args,
318 'exception_args': error_args,
319 }
319 }
320 finally:
320 finally:
321 meta.Session.remove()
321 meta.Session.remove()
322
322
323 log.debug('Got hook call response %s', result)
323 log.debug('Got hook call response %s', result)
324 return {
324 return {
325 'status': result.status,
325 'status': result.status,
326 'output': result.output,
326 'output': result.output,
327 }
327 }
328
328
329 def __enter__(self):
329 def __enter__(self):
330 return self
330 return self
331
331
332 def __exit__(self, exc_type, exc_val, exc_tb):
332 def __exit__(self, exc_type, exc_val, exc_tb):
333 pass
333 pass
@@ -1,103 +1,123 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
20 import logging
21
21
22 import click
22 import click
23 import pyramid.paster
23 import pyramid.paster
24
24
25 from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
25 from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
26 from rhodecode.lib.db_manage import DbManage
26 from rhodecode.lib.db_manage import DbManage
27 from rhodecode.model.db import Session
27 from rhodecode.model.db import Session
28
28
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 @click.command()
33 @click.command()
34 @click.argument('ini_path', type=click.Path(exists=True))
34 @click.argument('ini_path', type=click.Path(exists=True))
35 @click.option(
35 @click.option(
36 '--force-yes/--force-no', default=None,
36 '--force-yes/--force-no', default=None,
37 help="Force yes/no to every question")
37 help="Force yes/no to every question")
38 @click.option(
38 @click.option(
39 '--user',
39 '--user',
40 default=None,
40 default=None,
41 help='Initial super-admin username')
41 help='Initial super-admin username')
42 @click.option(
42 @click.option(
43 '--email',
43 '--email',
44 default=None,
44 default=None,
45 help='Initial super-admin email address.')
45 help='Initial super-admin email address.')
46 @click.option(
46 @click.option(
47 '--password',
47 '--password',
48 default=None,
48 default=None,
49 help='Initial super-admin password. Minimum 6 chars.')
49 help='Initial super-admin password. Minimum 6 chars.')
50 @click.option(
50 @click.option(
51 '--api-key',
51 '--api-key',
52 help='Initial API key for the admin user')
52 help='Initial API key for the admin user')
53 @click.option(
53 @click.option(
54 '--repos',
54 '--repos',
55 default=None,
55 default=None,
56 help='Absolute path to storage location. This is storage for all '
56 help='Absolute path to storage location. This is storage for all '
57 'existing and future repositories, and repository groups.')
57 'existing and future repositories, and repository groups.')
58 @click.option(
58 @click.option(
59 '--public-access/--no-public-access',
59 '--public-access/--no-public-access',
60 default=None,
60 default=None,
61 help='Enable public access on this installation. '
61 help='Enable public access on this installation. '
62 'Default is public access enabled.')
62 'Default is public access enabled.')
63 @click.option(
64 '--skip-existing-db',
65 default=False,
66 is_flag=True,
67 help='Do not destroy and re-initialize the database if it already exist.')
68 @click.option(
69 '--apply-license-key',
70 default=False,
71 is_flag=True,
72 help='Get the license key from a license file or ENV and apply during DB creation.')
63 def main(ini_path, force_yes, user, email, password, api_key, repos,
73 def main(ini_path, force_yes, user, email, password, api_key, repos,
64 public_access):
74 public_access, skip_existing_db, apply_license_key):
65 return command(ini_path, force_yes, user, email, password, api_key,
75 return command(ini_path, force_yes, user, email, password, api_key,
66 repos, public_access)
76 repos, public_access, skip_existing_db, apply_license_key)
67
77
68
78
69 def command(ini_path, force_yes, user, email, password, api_key, repos,
79 def command(ini_path, force_yes, user, email, password, api_key, repos,
70 public_access):
80 public_access, skip_existing_db, apply_license_key):
71 # mapping of old parameters to new CLI from click
81 # mapping of old parameters to new CLI from click
72 options = dict(
82 options = dict(
73 username=user,
83 username=user,
74 email=email,
84 email=email,
75 password=password,
85 password=password,
76 api_key=api_key,
86 api_key=api_key,
77 repos_location=repos,
87 repos_location=repos,
78 force_ask=force_yes,
88 force_ask=force_yes,
79 public_access=public_access
89 public_access=public_access
80 )
90 )
81 pyramid.paster.setup_logging(ini_path)
91 pyramid.paster.setup_logging(ini_path)
82
92
83 config = get_app_config(ini_path)
93 config = get_app_config(ini_path)
84
94
85 db_uri = config['sqlalchemy.db1.url']
95 db_uri = config['sqlalchemy.db1.url']
86 dbmanage = DbManage(log_sql=True, dbconf=db_uri, root='.',
96 dbmanage = DbManage(log_sql=True, dbconf=db_uri, root='.',
87 tests=False, cli_args=options)
97 tests=False, cli_args=options)
98 if skip_existing_db and dbmanage.db_exists():
99 return
100
88 dbmanage.create_tables(override=True)
101 dbmanage.create_tables(override=True)
89 dbmanage.set_db_version()
102 dbmanage.set_db_version()
90 opts = dbmanage.config_prompt(None)
103 opts = dbmanage.config_prompt(None)
91 dbmanage.create_settings(opts)
104 dbmanage.create_settings(opts)
92 dbmanage.create_default_user()
105 dbmanage.create_default_user()
93 dbmanage.create_admin_and_prompt()
106 dbmanage.create_admin_and_prompt()
94 dbmanage.create_permissions()
107 dbmanage.create_permissions()
95 dbmanage.populate_default_permissions()
108 dbmanage.populate_default_permissions()
109 if apply_license_key:
110 try:
111 from rc_license.models import apply_trial_license_if_missing
112 apply_trial_license_if_missing(force=True)
113 except ImportError:
114 pass
115
96 Session().commit()
116 Session().commit()
97
117
98 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
118 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
99 msg = 'Successfully initialized database, schema and default data.'
119 msg = 'Successfully initialized database, schema and default data.'
100 print()
120 print()
101 print('*' * len(msg))
121 print('*' * len(msg))
102 print(msg.upper())
122 print(msg.upper())
103 print('*' * len(msg))
123 print('*' * len(msg))
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now